1 /* 2 * Copyright (C) Marvell International Ltd. and its affiliates 3 * 4 * SPDX-License-Identifier: GPL-2.0 5 */ 6 7 #include <common.h> 8 #include <spl.h> 9 #include <asm/io.h> 10 #include <asm/arch/cpu.h> 11 #include <asm/arch/soc.h> 12 13 #include "ddr3_init.h" 14 15 #define GET_MAX_VALUE(x, y) \ 16 ((x) > (y)) ? (x) : (y) 17 #define CEIL_DIVIDE(x, y) \ 18 ((x - (x / y) * y) == 0) ? ((x / y) - 1) : (x / y) 19 20 #define TIME_2_CLOCK_CYCLES CEIL_DIVIDE 21 22 #define GET_CS_FROM_MASK(mask) (cs_mask2_num[mask]) 23 #define CS_CBE_VALUE(cs_num) (cs_cbe_reg[cs_num]) 24 25 u32 window_mem_addr = 0; 26 u32 phy_reg0_val = 0; 27 u32 phy_reg1_val = 8; 28 u32 phy_reg2_val = 0; 29 u32 phy_reg3_val = 0xa; 30 enum hws_ddr_freq init_freq = DDR_FREQ_667; 31 enum hws_ddr_freq low_freq = DDR_FREQ_LOW_FREQ; 32 enum hws_ddr_freq medium_freq; 33 u32 debug_dunit = 0; 34 u32 odt_additional = 1; 35 u32 *dq_map_table = NULL; 36 u32 odt_config = 1; 37 38 #if defined(CONFIG_ARMADA_38X) || defined(CONFIG_ALLEYCAT3) || \ 39 defined(CONFIG_ARMADA_39X) 40 u32 is_pll_before_init = 0, is_adll_calib_before_init = 0, is_dfs_in_init = 0; 41 u32 dfs_low_freq = 130; 42 #else 43 u32 is_pll_before_init = 0, is_adll_calib_before_init = 1, is_dfs_in_init = 0; 44 u32 dfs_low_freq = 100; 45 #endif 46 u32 g_rtt_nom_c_s0, g_rtt_nom_c_s1; 47 u8 calibration_update_control; /* 2 external only, 1 is internal only */ 48 49 enum hws_result training_result[MAX_STAGE_LIMIT][MAX_INTERFACE_NUM]; 50 enum auto_tune_stage training_stage = INIT_CONTROLLER; 51 u32 finger_test = 0, p_finger_start = 11, p_finger_end = 64, 52 n_finger_start = 11, n_finger_end = 64, 53 p_finger_step = 3, n_finger_step = 3; 54 u32 clamp_tbl[] = { 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3 }; 55 56 /* Initiate to 0xff, this variable is define by user in debug mode */ 57 u32 mode2_t = 0xff; 58 u32 xsb_validate_type = 0; 59 u32 xsb_validation_base_address = 0xf000; 60 u32 first_active_if = 0; 61 u32 dfs_low_phy1 = 0x1f; 62 u32 multicast_id = 0; 63 int use_broadcast = 0; 64 struct hws_tip_freq_config_info *freq_info_table = NULL; 65 u8 is_cbe_required = 0; 66 u32 debug_mode = 0; 67 u32 delay_enable = 0; 68 int rl_mid_freq_wa = 0; 69 70 u32 effective_cs = 0; 71 72 u32 mask_tune_func = (SET_MEDIUM_FREQ_MASK_BIT | 73 WRITE_LEVELING_MASK_BIT | 74 LOAD_PATTERN_2_MASK_BIT | 75 READ_LEVELING_MASK_BIT | 76 SET_TARGET_FREQ_MASK_BIT | WRITE_LEVELING_TF_MASK_BIT | 77 READ_LEVELING_TF_MASK_BIT | 78 CENTRALIZATION_RX_MASK_BIT | CENTRALIZATION_TX_MASK_BIT); 79 80 void ddr3_print_version(void) 81 { 82 printf(DDR3_TIP_VERSION_STRING); 83 } 84 85 static int ddr3_tip_ddr3_training_main_flow(u32 dev_num); 86 static int ddr3_tip_write_odt(u32 dev_num, enum hws_access_type access_type, 87 u32 if_id, u32 cl_value, u32 cwl_value); 88 static int ddr3_tip_ddr3_auto_tune(u32 dev_num); 89 static int is_bus_access_done(u32 dev_num, u32 if_id, 90 u32 dunit_reg_adrr, u32 bit); 91 #ifdef ODT_TEST_SUPPORT 92 static int odt_test(u32 dev_num, enum hws_algo_type algo_type); 93 #endif 94 95 int adll_calibration(u32 dev_num, enum hws_access_type access_type, 96 u32 if_id, enum hws_ddr_freq frequency); 97 static int ddr3_tip_set_timing(u32 dev_num, enum hws_access_type access_type, 98 u32 if_id, enum hws_ddr_freq frequency); 99 100 static struct page_element page_param[] = { 101 /* 102 * 8bits 16 bits 103 * page-size(K) page-size(K) mask 104 */ 105 { 1, 2, 2}, 106 /* 512M */ 107 { 1, 2, 3}, 108 /* 1G */ 109 { 1, 2, 0}, 110 /* 2G */ 111 { 1, 2, 4}, 112 /* 4G */ 113 { 2, 2, 5} 114 /* 8G */ 115 }; 116 117 static u8 mem_size_config[MEM_SIZE_LAST] = { 118 0x2, /* 512Mbit */ 119 0x3, /* 1Gbit */ 120 0x0, /* 2Gbit */ 121 0x4, /* 4Gbit */ 122 0x5 /* 8Gbit */ 123 }; 124 125 static u8 cs_mask2_num[] = { 0, 0, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3 }; 126 127 static struct reg_data odpg_default_value[] = { 128 {0x1034, 0x38000, MASK_ALL_BITS}, 129 {0x1038, 0x0, MASK_ALL_BITS}, 130 {0x10b0, 0x0, MASK_ALL_BITS}, 131 {0x10b8, 0x0, MASK_ALL_BITS}, 132 {0x10c0, 0x0, MASK_ALL_BITS}, 133 {0x10f0, 0x0, MASK_ALL_BITS}, 134 {0x10f4, 0x0, MASK_ALL_BITS}, 135 {0x10f8, 0xff, MASK_ALL_BITS}, 136 {0x10fc, 0xffff, MASK_ALL_BITS}, 137 {0x1130, 0x0, MASK_ALL_BITS}, 138 {0x1830, 0x2000000, MASK_ALL_BITS}, 139 {0x14d0, 0x0, MASK_ALL_BITS}, 140 {0x14d4, 0x0, MASK_ALL_BITS}, 141 {0x14d8, 0x0, MASK_ALL_BITS}, 142 {0x14dc, 0x0, MASK_ALL_BITS}, 143 {0x1454, 0x0, MASK_ALL_BITS}, 144 {0x1594, 0x0, MASK_ALL_BITS}, 145 {0x1598, 0x0, MASK_ALL_BITS}, 146 {0x159c, 0x0, MASK_ALL_BITS}, 147 {0x15a0, 0x0, MASK_ALL_BITS}, 148 {0x15a4, 0x0, MASK_ALL_BITS}, 149 {0x15a8, 0x0, MASK_ALL_BITS}, 150 {0x15ac, 0x0, MASK_ALL_BITS}, 151 {0x1604, 0x0, MASK_ALL_BITS}, 152 {0x1608, 0x0, MASK_ALL_BITS}, 153 {0x160c, 0x0, MASK_ALL_BITS}, 154 {0x1610, 0x0, MASK_ALL_BITS}, 155 {0x1614, 0x0, MASK_ALL_BITS}, 156 {0x1618, 0x0, MASK_ALL_BITS}, 157 {0x1624, 0x0, MASK_ALL_BITS}, 158 {0x1690, 0x0, MASK_ALL_BITS}, 159 {0x1694, 0x0, MASK_ALL_BITS}, 160 {0x1698, 0x0, MASK_ALL_BITS}, 161 {0x169c, 0x0, MASK_ALL_BITS}, 162 {0x14b8, 0x6f67, MASK_ALL_BITS}, 163 {0x1630, 0x0, MASK_ALL_BITS}, 164 {0x1634, 0x0, MASK_ALL_BITS}, 165 {0x1638, 0x0, MASK_ALL_BITS}, 166 {0x163c, 0x0, MASK_ALL_BITS}, 167 {0x16b0, 0x0, MASK_ALL_BITS}, 168 {0x16b4, 0x0, MASK_ALL_BITS}, 169 {0x16b8, 0x0, MASK_ALL_BITS}, 170 {0x16bc, 0x0, MASK_ALL_BITS}, 171 {0x16c0, 0x0, MASK_ALL_BITS}, 172 {0x16c4, 0x0, MASK_ALL_BITS}, 173 {0x16c8, 0x0, MASK_ALL_BITS}, 174 {0x16cc, 0x1, MASK_ALL_BITS}, 175 {0x16f0, 0x1, MASK_ALL_BITS}, 176 {0x16f4, 0x0, MASK_ALL_BITS}, 177 {0x16f8, 0x0, MASK_ALL_BITS}, 178 {0x16fc, 0x0, MASK_ALL_BITS} 179 }; 180 181 static int ddr3_tip_bus_access(u32 dev_num, enum hws_access_type interface_access, 182 u32 if_id, enum hws_access_type phy_access, 183 u32 phy_id, enum hws_ddr_phy phy_type, u32 reg_addr, 184 u32 data_value, enum hws_operation oper_type); 185 static int ddr3_tip_pad_inv(u32 dev_num, u32 if_id); 186 static int ddr3_tip_rank_control(u32 dev_num, u32 if_id); 187 188 /* 189 * Update global training parameters by data from user 190 */ 191 int ddr3_tip_tune_training_params(u32 dev_num, 192 struct tune_train_params *params) 193 { 194 if (params->ck_delay != -1) 195 ck_delay = params->ck_delay; 196 if (params->ck_delay_16 != -1) 197 ck_delay_16 = params->ck_delay_16; 198 if (params->phy_reg3_val != -1) 199 phy_reg3_val = params->phy_reg3_val; 200 201 return MV_OK; 202 } 203 204 /* 205 * Configure CS 206 */ 207 int ddr3_tip_configure_cs(u32 dev_num, u32 if_id, u32 cs_num, u32 enable) 208 { 209 u32 data, addr_hi, data_high; 210 u32 mem_index; 211 struct hws_topology_map *tm = ddr3_get_topology_map(); 212 213 if (enable == 1) { 214 data = (tm->interface_params[if_id].bus_width == 215 BUS_WIDTH_8) ? 0 : 1; 216 CHECK_STATUS(ddr3_tip_if_write 217 (dev_num, ACCESS_TYPE_UNICAST, if_id, 218 SDRAM_ACCESS_CONTROL_REG, (data << (cs_num * 4)), 219 0x3 << (cs_num * 4))); 220 mem_index = tm->interface_params[if_id].memory_size; 221 222 addr_hi = mem_size_config[mem_index] & 0x3; 223 CHECK_STATUS(ddr3_tip_if_write 224 (dev_num, ACCESS_TYPE_UNICAST, if_id, 225 SDRAM_ACCESS_CONTROL_REG, 226 (addr_hi << (2 + cs_num * 4)), 227 0x3 << (2 + cs_num * 4))); 228 229 data_high = (mem_size_config[mem_index] & 0x4) >> 2; 230 CHECK_STATUS(ddr3_tip_if_write 231 (dev_num, ACCESS_TYPE_UNICAST, if_id, 232 SDRAM_ACCESS_CONTROL_REG, 233 data_high << (20 + cs_num), 1 << (20 + cs_num))); 234 235 /* Enable Address Select Mode */ 236 CHECK_STATUS(ddr3_tip_if_write 237 (dev_num, ACCESS_TYPE_UNICAST, if_id, 238 SDRAM_ACCESS_CONTROL_REG, 1 << (16 + cs_num), 239 1 << (16 + cs_num))); 240 } 241 switch (cs_num) { 242 case 0: 243 case 1: 244 case 2: 245 CHECK_STATUS(ddr3_tip_if_write 246 (dev_num, ACCESS_TYPE_UNICAST, if_id, 247 DDR_CONTROL_LOW_REG, (enable << (cs_num + 11)), 248 1 << (cs_num + 11))); 249 break; 250 case 3: 251 CHECK_STATUS(ddr3_tip_if_write 252 (dev_num, ACCESS_TYPE_UNICAST, if_id, 253 DDR_CONTROL_LOW_REG, (enable << 15), 1 << 15)); 254 break; 255 } 256 257 return MV_OK; 258 } 259 260 /* 261 * Calculate number of CS 262 */ 263 static int calc_cs_num(u32 dev_num, u32 if_id, u32 *cs_num) 264 { 265 u32 cs; 266 u32 bus_cnt; 267 u32 cs_count; 268 u32 cs_bitmask; 269 u32 curr_cs_num = 0; 270 struct hws_topology_map *tm = ddr3_get_topology_map(); 271 272 for (bus_cnt = 0; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES(); bus_cnt++) { 273 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt); 274 cs_count = 0; 275 cs_bitmask = tm->interface_params[if_id]. 276 as_bus_params[bus_cnt].cs_bitmask; 277 for (cs = 0; cs < MAX_CS_NUM; cs++) { 278 if ((cs_bitmask >> cs) & 1) 279 cs_count++; 280 } 281 282 if (curr_cs_num == 0) { 283 curr_cs_num = cs_count; 284 } else if (cs_count != curr_cs_num) { 285 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 286 ("CS number is different per bus (IF %d BUS %d cs_num %d curr_cs_num %d)\n", 287 if_id, bus_cnt, cs_count, 288 curr_cs_num)); 289 return MV_NOT_SUPPORTED; 290 } 291 } 292 *cs_num = curr_cs_num; 293 294 return MV_OK; 295 } 296 297 /* 298 * Init Controller Flow 299 */ 300 int hws_ddr3_tip_init_controller(u32 dev_num, struct init_cntr_param *init_cntr_prm) 301 { 302 u32 if_id; 303 u32 cs_num; 304 u32 t_refi = 0, t_hclk = 0, t_ckclk = 0, t_faw = 0, t_pd = 0, 305 t_wr = 0, t2t = 0, txpdll = 0; 306 u32 data_value = 0, bus_width = 0, page_size = 0, cs_cnt = 0, 307 mem_mask = 0, bus_index = 0; 308 enum hws_speed_bin speed_bin_index = SPEED_BIN_DDR_2133N; 309 enum hws_mem_size memory_size = MEM_2G; 310 enum hws_ddr_freq freq = init_freq; 311 u32 cs_mask = 0; 312 u32 cl_value = 0, cwl_val = 0; 313 u32 refresh_interval_cnt = 0, bus_cnt = 0, adll_tap = 0; 314 enum hws_access_type access_type = ACCESS_TYPE_UNICAST; 315 u32 data_read[MAX_INTERFACE_NUM]; 316 struct hws_topology_map *tm = ddr3_get_topology_map(); 317 318 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, 319 ("Init_controller, do_mrs_phy=%d, is_ctrl64_bit=%d\n", 320 init_cntr_prm->do_mrs_phy, 321 init_cntr_prm->is_ctrl64_bit)); 322 323 if (init_cntr_prm->init_phy == 1) { 324 CHECK_STATUS(ddr3_tip_configure_phy(dev_num)); 325 } 326 327 if (generic_init_controller == 1) { 328 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 329 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 330 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, 331 ("active IF %d\n", if_id)); 332 mem_mask = 0; 333 for (bus_index = 0; 334 bus_index < GET_TOPOLOGY_NUM_OF_BUSES(); 335 bus_index++) { 336 VALIDATE_ACTIVE(tm->bus_act_mask, bus_index); 337 mem_mask |= 338 tm->interface_params[if_id]. 339 as_bus_params[bus_index].mirror_enable_bitmask; 340 } 341 342 if (mem_mask != 0) { 343 CHECK_STATUS(ddr3_tip_if_write 344 (dev_num, ACCESS_TYPE_MULTICAST, 345 if_id, CS_ENABLE_REG, 0, 346 0x8)); 347 } 348 349 memory_size = 350 tm->interface_params[if_id]. 351 memory_size; 352 speed_bin_index = 353 tm->interface_params[if_id]. 354 speed_bin_index; 355 freq = init_freq; 356 t_refi = 357 (tm->interface_params[if_id]. 358 interface_temp == 359 HWS_TEMP_HIGH) ? TREFI_HIGH : TREFI_LOW; 360 t_refi *= 1000; /* psec */ 361 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, 362 ("memy_size %d speed_bin_ind %d freq %d t_refi %d\n", 363 memory_size, speed_bin_index, freq, 364 t_refi)); 365 /* HCLK & CK CLK in 2:1[ps] */ 366 /* t_ckclk is external clock */ 367 t_ckclk = (MEGA / freq_val[freq]); 368 /* t_hclk is internal clock */ 369 t_hclk = 2 * t_ckclk; 370 refresh_interval_cnt = t_refi / t_hclk; /* no units */ 371 bus_width = 372 (DDR3_IS_16BIT_DRAM_MODE(tm->bus_act_mask) 373 == 1) ? (16) : (32); 374 375 if (init_cntr_prm->is_ctrl64_bit) 376 bus_width = 64; 377 378 data_value = 379 (refresh_interval_cnt | 0x4000 | 380 ((bus_width == 381 32) ? 0x8000 : 0) | 0x1000000) & ~(1 << 26); 382 383 /* Interface Bus Width */ 384 /* SRMode */ 385 CHECK_STATUS(ddr3_tip_if_write 386 (dev_num, access_type, if_id, 387 SDRAM_CONFIGURATION_REG, data_value, 388 0x100ffff)); 389 390 /* Interleave first command pre-charge enable (TBD) */ 391 CHECK_STATUS(ddr3_tip_if_write 392 (dev_num, access_type, if_id, 393 SDRAM_OPEN_PAGE_CONTROL_REG, (1 << 10), 394 (1 << 10))); 395 396 /* PHY configuration */ 397 /* 398 * Postamble Length = 1.5cc, Addresscntl to clk skew 399 * \BD, Preamble length normal, parralal ADLL enable 400 */ 401 CHECK_STATUS(ddr3_tip_if_write 402 (dev_num, access_type, if_id, 403 DRAM_PHY_CONFIGURATION, 0x28, 0x3e)); 404 if (init_cntr_prm->is_ctrl64_bit) { 405 /* positive edge */ 406 CHECK_STATUS(ddr3_tip_if_write 407 (dev_num, access_type, if_id, 408 DRAM_PHY_CONFIGURATION, 0x0, 409 0xff80)); 410 } 411 412 /* calibration block disable */ 413 /* Xbar Read buffer select (for Internal access) */ 414 CHECK_STATUS(ddr3_tip_if_write 415 (dev_num, access_type, if_id, 416 CALIB_MACHINE_CTRL_REG, 0x1200c, 417 0x7dffe01c)); 418 CHECK_STATUS(ddr3_tip_if_write 419 (dev_num, access_type, if_id, 420 CALIB_MACHINE_CTRL_REG, 421 calibration_update_control << 3, 0x3 << 3)); 422 423 /* Pad calibration control - enable */ 424 CHECK_STATUS(ddr3_tip_if_write 425 (dev_num, access_type, if_id, 426 CALIB_MACHINE_CTRL_REG, 0x1, 0x1)); 427 428 cs_mask = 0; 429 data_value = 0x7; 430 /* 431 * Address ctrl \96 Part of the Generic code 432 * The next configuration is done: 433 * 1) Memory Size 434 * 2) Bus_width 435 * 3) CS# 436 * 4) Page Number 437 * 5) t_faw 438 * Per Dunit get from the Map_topology the parameters: 439 * Bus_width 440 * t_faw is per Dunit not per CS 441 */ 442 page_size = 443 (tm->interface_params[if_id]. 444 bus_width == 445 BUS_WIDTH_8) ? page_param[memory_size]. 446 page_size_8bit : page_param[memory_size]. 447 page_size_16bit; 448 449 t_faw = 450 (page_size == 1) ? speed_bin_table(speed_bin_index, 451 SPEED_BIN_TFAW1K) 452 : speed_bin_table(speed_bin_index, 453 SPEED_BIN_TFAW2K); 454 455 data_value = TIME_2_CLOCK_CYCLES(t_faw, t_ckclk); 456 data_value = data_value << 24; 457 CHECK_STATUS(ddr3_tip_if_write 458 (dev_num, access_type, if_id, 459 SDRAM_ACCESS_CONTROL_REG, data_value, 460 0x7f000000)); 461 462 data_value = 463 (tm->interface_params[if_id]. 464 bus_width == BUS_WIDTH_8) ? 0 : 1; 465 466 /* create merge cs mask for all cs available in dunit */ 467 for (bus_cnt = 0; 468 bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES(); 469 bus_cnt++) { 470 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt); 471 cs_mask |= 472 tm->interface_params[if_id]. 473 as_bus_params[bus_cnt].cs_bitmask; 474 } 475 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, 476 ("Init_controller IF %d cs_mask %d\n", 477 if_id, cs_mask)); 478 /* 479 * Configure the next upon the Map Topology \96 If the 480 * Dunit is CS0 Configure CS0 if it is multi CS 481 * configure them both: The Bust_width it\92s the 482 * Memory Bus width \96 x8 or x16 483 */ 484 for (cs_cnt = 0; cs_cnt < NUM_OF_CS; cs_cnt++) { 485 ddr3_tip_configure_cs(dev_num, if_id, cs_cnt, 486 ((cs_mask & (1 << cs_cnt)) ? 1 487 : 0)); 488 } 489 490 if (init_cntr_prm->do_mrs_phy) { 491 /* 492 * MR0 \96 Part of the Generic code 493 * The next configuration is done: 494 * 1) Burst Length 495 * 2) CAS Latency 496 * get for each dunit what is it Speed_bin & 497 * Target Frequency. From those both parameters 498 * get the appropriate Cas_l from the CL table 499 */ 500 cl_value = 501 tm->interface_params[if_id]. 502 cas_l; 503 cwl_val = 504 tm->interface_params[if_id]. 505 cas_wl; 506 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, 507 ("cl_value 0x%x cwl_val 0x%x\n", 508 cl_value, cwl_val)); 509 510 data_value = 511 ((cl_mask_table[cl_value] & 0x1) << 2) | 512 ((cl_mask_table[cl_value] & 0xe) << 3); 513 CHECK_STATUS(ddr3_tip_if_write 514 (dev_num, access_type, if_id, 515 MR0_REG, data_value, 516 (0x7 << 4) | (1 << 2))); 517 CHECK_STATUS(ddr3_tip_if_write 518 (dev_num, access_type, if_id, 519 MR0_REG, twr_mask_table[t_wr + 1], 520 0xe00)); 521 522 /* 523 * MR1: Set RTT and DIC Design GL values 524 * configured by user 525 */ 526 CHECK_STATUS(ddr3_tip_if_write 527 (dev_num, ACCESS_TYPE_MULTICAST, 528 PARAM_NOT_CARE, MR1_REG, 529 g_dic | g_rtt_nom, 0x266)); 530 531 /* MR2 - Part of the Generic code */ 532 /* 533 * The next configuration is done: 534 * 1) SRT 535 * 2) CAS Write Latency 536 */ 537 data_value = (cwl_mask_table[cwl_val] << 3); 538 data_value |= 539 ((tm->interface_params[if_id]. 540 interface_temp == 541 HWS_TEMP_HIGH) ? (1 << 7) : 0); 542 CHECK_STATUS(ddr3_tip_if_write 543 (dev_num, access_type, if_id, 544 MR2_REG, data_value, 545 (0x7 << 3) | (0x1 << 7) | (0x3 << 546 9))); 547 } 548 549 ddr3_tip_write_odt(dev_num, access_type, if_id, 550 cl_value, cwl_val); 551 ddr3_tip_set_timing(dev_num, access_type, if_id, freq); 552 553 CHECK_STATUS(ddr3_tip_if_write 554 (dev_num, access_type, if_id, 555 DUNIT_CONTROL_HIGH_REG, 0x177, 556 0x1000177)); 557 558 if (init_cntr_prm->is_ctrl64_bit) { 559 /* disable 0.25 cc delay */ 560 CHECK_STATUS(ddr3_tip_if_write 561 (dev_num, access_type, if_id, 562 DUNIT_CONTROL_HIGH_REG, 0x0, 563 0x800)); 564 } 565 566 /* reset bit 7 */ 567 CHECK_STATUS(ddr3_tip_if_write 568 (dev_num, access_type, if_id, 569 DUNIT_CONTROL_HIGH_REG, 570 (init_cntr_prm->msys_init << 7), (1 << 7))); 571 572 if (mode2_t != 0xff) { 573 t2t = mode2_t; 574 } else { 575 /* calculate number of CS (per interface) */ 576 CHECK_STATUS(calc_cs_num 577 (dev_num, if_id, &cs_num)); 578 t2t = (cs_num == 1) ? 0 : 1; 579 } 580 581 CHECK_STATUS(ddr3_tip_if_write 582 (dev_num, access_type, if_id, 583 DDR_CONTROL_LOW_REG, t2t << 3, 584 0x3 << 3)); 585 /* move the block to ddr3_tip_set_timing - start */ 586 t_pd = GET_MAX_VALUE(t_ckclk * 3, 587 speed_bin_table(speed_bin_index, 588 SPEED_BIN_TPD)); 589 t_pd = TIME_2_CLOCK_CYCLES(t_pd, t_ckclk); 590 txpdll = GET_MAX_VALUE(t_ckclk * 10, 24); 591 txpdll = CEIL_DIVIDE((txpdll - 1), t_ckclk); 592 CHECK_STATUS(ddr3_tip_if_write 593 (dev_num, access_type, if_id, 594 DDR_TIMING_REG, txpdll << 4, 595 0x1f << 4)); 596 CHECK_STATUS(ddr3_tip_if_write 597 (dev_num, access_type, if_id, 598 DDR_TIMING_REG, 0x28 << 9, 0x3f << 9)); 599 CHECK_STATUS(ddr3_tip_if_write 600 (dev_num, access_type, if_id, 601 DDR_TIMING_REG, 0xa << 21, 0xff << 21)); 602 603 /* move the block to ddr3_tip_set_timing - end */ 604 /* AUTO_ZQC_TIMING */ 605 CHECK_STATUS(ddr3_tip_if_write 606 (dev_num, access_type, if_id, 607 TIMING_REG, (AUTO_ZQC_TIMING | (2 << 20)), 608 0x3fffff)); 609 CHECK_STATUS(ddr3_tip_if_read 610 (dev_num, access_type, if_id, 611 DRAM_PHY_CONFIGURATION, data_read, 0x30)); 612 data_value = 613 (data_read[if_id] == 0) ? (1 << 11) : 0; 614 CHECK_STATUS(ddr3_tip_if_write 615 (dev_num, access_type, if_id, 616 DUNIT_CONTROL_HIGH_REG, data_value, 617 (1 << 11))); 618 619 /* Set Active control for ODT write transactions */ 620 CHECK_STATUS(ddr3_tip_if_write 621 (dev_num, ACCESS_TYPE_MULTICAST, 622 PARAM_NOT_CARE, 0x1494, g_odt_config, 623 MASK_ALL_BITS)); 624 } 625 } else { 626 #ifdef STATIC_ALGO_SUPPORT 627 CHECK_STATUS(ddr3_tip_static_init_controller(dev_num)); 628 #if defined(CONFIG_ARMADA_38X) || defined(CONFIG_ARMADA_39X) 629 CHECK_STATUS(ddr3_tip_static_phy_init_controller(dev_num)); 630 #endif 631 #endif /* STATIC_ALGO_SUPPORT */ 632 } 633 634 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 635 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 636 CHECK_STATUS(ddr3_tip_rank_control(dev_num, if_id)); 637 638 if (init_cntr_prm->do_mrs_phy) { 639 CHECK_STATUS(ddr3_tip_pad_inv(dev_num, if_id)); 640 } 641 642 /* Pad calibration control - disable */ 643 CHECK_STATUS(ddr3_tip_if_write 644 (dev_num, access_type, if_id, 645 CALIB_MACHINE_CTRL_REG, 0x0, 0x1)); 646 CHECK_STATUS(ddr3_tip_if_write 647 (dev_num, access_type, if_id, 648 CALIB_MACHINE_CTRL_REG, 649 calibration_update_control << 3, 0x3 << 3)); 650 } 651 652 CHECK_STATUS(ddr3_tip_enable_init_sequence(dev_num)); 653 654 if (delay_enable != 0) { 655 adll_tap = MEGA / (freq_val[freq] * 64); 656 ddr3_tip_cmd_addr_init_delay(dev_num, adll_tap); 657 } 658 659 return MV_OK; 660 } 661 662 /* 663 * Load Topology map 664 */ 665 int hws_ddr3_tip_load_topology_map(u32 dev_num, struct hws_topology_map *tm) 666 { 667 enum hws_speed_bin speed_bin_index; 668 enum hws_ddr_freq freq = DDR_FREQ_LIMIT; 669 u32 if_id; 670 671 freq_val[DDR_FREQ_LOW_FREQ] = dfs_low_freq; 672 tm = ddr3_get_topology_map(); 673 CHECK_STATUS(ddr3_tip_get_first_active_if 674 ((u8)dev_num, tm->if_act_mask, 675 &first_active_if)); 676 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, 677 ("board IF_Mask=0x%x num_of_bus_per_interface=0x%x\n", 678 tm->if_act_mask, 679 tm->num_of_bus_per_interface)); 680 681 /* 682 * if CL, CWL values are missing in topology map, then fill them 683 * according to speedbin tables 684 */ 685 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 686 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 687 speed_bin_index = 688 tm->interface_params[if_id].speed_bin_index; 689 /* TBD memory frequency of interface 0 only is used ! */ 690 freq = tm->interface_params[first_active_if].memory_freq; 691 692 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, 693 ("speed_bin_index =%d freq=%d cl=%d cwl=%d\n", 694 speed_bin_index, freq_val[freq], 695 tm->interface_params[if_id]. 696 cas_l, 697 tm->interface_params[if_id]. 698 cas_wl)); 699 700 if (tm->interface_params[if_id].cas_l == 0) { 701 tm->interface_params[if_id].cas_l = 702 cas_latency_table[speed_bin_index].cl_val[freq]; 703 } 704 705 if (tm->interface_params[if_id].cas_wl == 0) { 706 tm->interface_params[if_id].cas_wl = 707 cas_write_latency_table[speed_bin_index].cl_val[freq]; 708 } 709 } 710 711 return MV_OK; 712 } 713 714 /* 715 * RANK Control Flow 716 */ 717 static int ddr3_tip_rank_control(u32 dev_num, u32 if_id) 718 { 719 u32 data_value = 0, bus_cnt; 720 struct hws_topology_map *tm = ddr3_get_topology_map(); 721 722 for (bus_cnt = 1; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES(); bus_cnt++) { 723 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt); 724 if ((tm->interface_params[if_id]. 725 as_bus_params[0].cs_bitmask != 726 tm->interface_params[if_id]. 727 as_bus_params[bus_cnt].cs_bitmask) || 728 (tm->interface_params[if_id]. 729 as_bus_params[0].mirror_enable_bitmask != 730 tm->interface_params[if_id]. 731 as_bus_params[bus_cnt].mirror_enable_bitmask)) 732 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 733 ("WARNING:Wrong configuration for pup #%d CS mask and CS mirroring for all pups should be the same\n", 734 bus_cnt)); 735 } 736 737 data_value |= tm->interface_params[if_id]. 738 as_bus_params[0].cs_bitmask; 739 data_value |= tm->interface_params[if_id]. 740 as_bus_params[0].mirror_enable_bitmask << 4; 741 742 CHECK_STATUS(ddr3_tip_if_write 743 (dev_num, ACCESS_TYPE_UNICAST, if_id, RANK_CTRL_REG, 744 data_value, 0xff)); 745 746 return MV_OK; 747 } 748 749 /* 750 * PAD Inverse Flow 751 */ 752 static int ddr3_tip_pad_inv(u32 dev_num, u32 if_id) 753 { 754 u32 bus_cnt, data_value, ck_swap_pup_ctrl; 755 struct hws_topology_map *tm = ddr3_get_topology_map(); 756 757 for (bus_cnt = 0; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES(); bus_cnt++) { 758 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt); 759 if (tm->interface_params[if_id]. 760 as_bus_params[bus_cnt].is_dqs_swap == 1) { 761 /* dqs swap */ 762 ddr3_tip_bus_read_modify_write(dev_num, ACCESS_TYPE_UNICAST, 763 if_id, bus_cnt, 764 DDR_PHY_DATA, 765 PHY_CONTROL_PHY_REG, 0xc0, 766 0xc0); 767 } 768 769 if (tm->interface_params[if_id]. 770 as_bus_params[bus_cnt].is_ck_swap == 1) { 771 if (bus_cnt <= 1) 772 data_value = 0x5 << 2; 773 else 774 data_value = 0xa << 2; 775 776 /* mask equals data */ 777 /* ck swap pup is only control pup #0 ! */ 778 ck_swap_pup_ctrl = 0; 779 ddr3_tip_bus_read_modify_write(dev_num, ACCESS_TYPE_UNICAST, 780 if_id, ck_swap_pup_ctrl, 781 DDR_PHY_CONTROL, 782 PHY_CONTROL_PHY_REG, 783 data_value, data_value); 784 } 785 } 786 787 return MV_OK; 788 } 789 790 /* 791 * Run Training Flow 792 */ 793 int hws_ddr3_tip_run_alg(u32 dev_num, enum hws_algo_type algo_type) 794 { 795 int ret = MV_OK, ret_tune = MV_OK; 796 797 #ifdef ODT_TEST_SUPPORT 798 if (finger_test == 1) 799 return odt_test(dev_num, algo_type); 800 #endif 801 802 if (algo_type == ALGO_TYPE_DYNAMIC) { 803 ret = ddr3_tip_ddr3_auto_tune(dev_num); 804 } else { 805 #ifdef STATIC_ALGO_SUPPORT 806 { 807 enum hws_ddr_freq freq; 808 freq = init_freq; 809 810 /* add to mask */ 811 if (is_adll_calib_before_init != 0) { 812 printf("with adll calib before init\n"); 813 adll_calibration(dev_num, ACCESS_TYPE_MULTICAST, 814 0, freq); 815 } 816 /* 817 * Frequency per interface is not relevant, 818 * only interface 0 819 */ 820 ret = ddr3_tip_run_static_alg(dev_num, 821 freq); 822 } 823 #endif 824 } 825 826 if (ret != MV_OK) { 827 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 828 ("Run_alg: tuning failed %d\n", ret_tune)); 829 } 830 831 return ret; 832 } 833 834 #ifdef ODT_TEST_SUPPORT 835 /* 836 * ODT Test 837 */ 838 static int odt_test(u32 dev_num, enum hws_algo_type algo_type) 839 { 840 int ret = MV_OK, ret_tune = MV_OK; 841 int pfinger_val = 0, nfinger_val; 842 843 for (pfinger_val = p_finger_start; pfinger_val <= p_finger_end; 844 pfinger_val += p_finger_step) { 845 for (nfinger_val = n_finger_start; nfinger_val <= n_finger_end; 846 nfinger_val += n_finger_step) { 847 if (finger_test != 0) { 848 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 849 ("pfinger_val %d nfinger_val %d\n", 850 pfinger_val, nfinger_val)); 851 p_finger = pfinger_val; 852 n_finger = nfinger_val; 853 } 854 855 if (algo_type == ALGO_TYPE_DYNAMIC) { 856 ret = ddr3_tip_ddr3_auto_tune(dev_num); 857 } else { 858 /* 859 * Frequency per interface is not relevant, 860 * only interface 0 861 */ 862 ret = ddr3_tip_run_static_alg(dev_num, 863 init_freq); 864 } 865 } 866 } 867 868 if (ret_tune != MV_OK) { 869 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 870 ("Run_alg: tuning failed %d\n", ret_tune)); 871 ret = (ret == MV_OK) ? ret_tune : ret; 872 } 873 874 return ret; 875 } 876 #endif 877 878 /* 879 * Select Controller 880 */ 881 int hws_ddr3_tip_select_ddr_controller(u32 dev_num, int enable) 882 { 883 if (config_func_info[dev_num].tip_dunit_mux_select_func != NULL) { 884 return config_func_info[dev_num]. 885 tip_dunit_mux_select_func((u8)dev_num, enable); 886 } 887 888 return MV_FAIL; 889 } 890 891 /* 892 * Dunit Register Write 893 */ 894 int ddr3_tip_if_write(u32 dev_num, enum hws_access_type interface_access, 895 u32 if_id, u32 reg_addr, u32 data_value, u32 mask) 896 { 897 if (config_func_info[dev_num].tip_dunit_write_func != NULL) { 898 return config_func_info[dev_num]. 899 tip_dunit_write_func((u8)dev_num, interface_access, 900 if_id, reg_addr, 901 data_value, mask); 902 } 903 904 return MV_FAIL; 905 } 906 907 /* 908 * Dunit Register Read 909 */ 910 int ddr3_tip_if_read(u32 dev_num, enum hws_access_type interface_access, 911 u32 if_id, u32 reg_addr, u32 *data, u32 mask) 912 { 913 if (config_func_info[dev_num].tip_dunit_read_func != NULL) { 914 return config_func_info[dev_num]. 915 tip_dunit_read_func((u8)dev_num, interface_access, 916 if_id, reg_addr, 917 data, mask); 918 } 919 920 return MV_FAIL; 921 } 922 923 /* 924 * Dunit Register Polling 925 */ 926 int ddr3_tip_if_polling(u32 dev_num, enum hws_access_type access_type, 927 u32 if_id, u32 exp_value, u32 mask, u32 offset, 928 u32 poll_tries) 929 { 930 u32 poll_cnt = 0, interface_num = 0, start_if, end_if; 931 u32 read_data[MAX_INTERFACE_NUM]; 932 int ret; 933 int is_fail = 0, is_if_fail; 934 struct hws_topology_map *tm = ddr3_get_topology_map(); 935 936 if (access_type == ACCESS_TYPE_MULTICAST) { 937 start_if = 0; 938 end_if = MAX_INTERFACE_NUM - 1; 939 } else { 940 start_if = if_id; 941 end_if = if_id; 942 } 943 944 for (interface_num = start_if; interface_num <= end_if; interface_num++) { 945 /* polling bit 3 for n times */ 946 VALIDATE_ACTIVE(tm->if_act_mask, interface_num); 947 948 is_if_fail = 0; 949 for (poll_cnt = 0; poll_cnt < poll_tries; poll_cnt++) { 950 ret = 951 ddr3_tip_if_read(dev_num, ACCESS_TYPE_UNICAST, 952 interface_num, offset, read_data, 953 mask); 954 if (ret != MV_OK) 955 return ret; 956 957 if (read_data[interface_num] == exp_value) 958 break; 959 } 960 961 if (poll_cnt >= poll_tries) { 962 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 963 ("max poll IF #%d\n", interface_num)); 964 is_fail = 1; 965 is_if_fail = 1; 966 } 967 968 training_result[training_stage][interface_num] = 969 (is_if_fail == 1) ? TEST_FAILED : TEST_SUCCESS; 970 } 971 972 return (is_fail == 0) ? MV_OK : MV_FAIL; 973 } 974 975 /* 976 * Bus read access 977 */ 978 int ddr3_tip_bus_read(u32 dev_num, u32 if_id, 979 enum hws_access_type phy_access, u32 phy_id, 980 enum hws_ddr_phy phy_type, u32 reg_addr, u32 *data) 981 { 982 u32 bus_index = 0; 983 u32 data_read[MAX_INTERFACE_NUM]; 984 struct hws_topology_map *tm = ddr3_get_topology_map(); 985 986 if (phy_access == ACCESS_TYPE_MULTICAST) { 987 for (bus_index = 0; bus_index < GET_TOPOLOGY_NUM_OF_BUSES(); 988 bus_index++) { 989 VALIDATE_ACTIVE(tm->bus_act_mask, bus_index); 990 CHECK_STATUS(ddr3_tip_bus_access 991 (dev_num, ACCESS_TYPE_UNICAST, 992 if_id, ACCESS_TYPE_UNICAST, 993 bus_index, phy_type, reg_addr, 0, 994 OPERATION_READ)); 995 CHECK_STATUS(ddr3_tip_if_read 996 (dev_num, ACCESS_TYPE_UNICAST, if_id, 997 PHY_REG_FILE_ACCESS, data_read, 998 MASK_ALL_BITS)); 999 data[bus_index] = (data_read[if_id] & 0xffff); 1000 } 1001 } else { 1002 CHECK_STATUS(ddr3_tip_bus_access 1003 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1004 phy_access, phy_id, phy_type, reg_addr, 0, 1005 OPERATION_READ)); 1006 CHECK_STATUS(ddr3_tip_if_read 1007 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1008 PHY_REG_FILE_ACCESS, data_read, MASK_ALL_BITS)); 1009 1010 /* 1011 * only 16 lsb bit are valid in Phy (each register is different, 1012 * some can actually be less than 16 bits) 1013 */ 1014 *data = (data_read[if_id] & 0xffff); 1015 } 1016 1017 return MV_OK; 1018 } 1019 1020 /* 1021 * Bus write access 1022 */ 1023 int ddr3_tip_bus_write(u32 dev_num, enum hws_access_type interface_access, 1024 u32 if_id, enum hws_access_type phy_access, 1025 u32 phy_id, enum hws_ddr_phy phy_type, u32 reg_addr, 1026 u32 data_value) 1027 { 1028 CHECK_STATUS(ddr3_tip_bus_access 1029 (dev_num, interface_access, if_id, phy_access, 1030 phy_id, phy_type, reg_addr, data_value, OPERATION_WRITE)); 1031 1032 return MV_OK; 1033 } 1034 1035 /* 1036 * Bus access routine (relevant for both read & write) 1037 */ 1038 static int ddr3_tip_bus_access(u32 dev_num, enum hws_access_type interface_access, 1039 u32 if_id, enum hws_access_type phy_access, 1040 u32 phy_id, enum hws_ddr_phy phy_type, u32 reg_addr, 1041 u32 data_value, enum hws_operation oper_type) 1042 { 1043 u32 addr_low = 0x3f & reg_addr; 1044 u32 addr_hi = ((0xc0 & reg_addr) >> 6); 1045 u32 data_p1 = 1046 (oper_type << 30) + (addr_hi << 28) + (phy_access << 27) + 1047 (phy_type << 26) + (phy_id << 22) + (addr_low << 16) + 1048 (data_value & 0xffff); 1049 u32 data_p2 = data_p1 + (1 << 31); 1050 u32 start_if, end_if; 1051 struct hws_topology_map *tm = ddr3_get_topology_map(); 1052 1053 CHECK_STATUS(ddr3_tip_if_write 1054 (dev_num, interface_access, if_id, PHY_REG_FILE_ACCESS, 1055 data_p1, MASK_ALL_BITS)); 1056 CHECK_STATUS(ddr3_tip_if_write 1057 (dev_num, interface_access, if_id, PHY_REG_FILE_ACCESS, 1058 data_p2, MASK_ALL_BITS)); 1059 1060 if (interface_access == ACCESS_TYPE_UNICAST) { 1061 start_if = if_id; 1062 end_if = if_id; 1063 } else { 1064 start_if = 0; 1065 end_if = MAX_INTERFACE_NUM - 1; 1066 } 1067 1068 /* polling for read/write execution done */ 1069 for (if_id = start_if; if_id <= end_if; if_id++) { 1070 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 1071 CHECK_STATUS(is_bus_access_done 1072 (dev_num, if_id, PHY_REG_FILE_ACCESS, 31)); 1073 } 1074 1075 return MV_OK; 1076 } 1077 1078 /* 1079 * Check bus access done 1080 */ 1081 static int is_bus_access_done(u32 dev_num, u32 if_id, u32 dunit_reg_adrr, 1082 u32 bit) 1083 { 1084 u32 rd_data = 1; 1085 u32 cnt = 0; 1086 u32 data_read[MAX_INTERFACE_NUM]; 1087 1088 CHECK_STATUS(ddr3_tip_if_read 1089 (dev_num, ACCESS_TYPE_UNICAST, if_id, dunit_reg_adrr, 1090 data_read, MASK_ALL_BITS)); 1091 rd_data = data_read[if_id]; 1092 rd_data &= (1 << bit); 1093 1094 while (rd_data != 0) { 1095 if (cnt++ >= MAX_POLLING_ITERATIONS) 1096 break; 1097 1098 CHECK_STATUS(ddr3_tip_if_read 1099 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1100 dunit_reg_adrr, data_read, MASK_ALL_BITS)); 1101 rd_data = data_read[if_id]; 1102 rd_data &= (1 << bit); 1103 } 1104 1105 if (cnt < MAX_POLLING_ITERATIONS) 1106 return MV_OK; 1107 else 1108 return MV_FAIL; 1109 } 1110 1111 /* 1112 * Phy read-modify-write 1113 */ 1114 int ddr3_tip_bus_read_modify_write(u32 dev_num, enum hws_access_type access_type, 1115 u32 interface_id, u32 phy_id, 1116 enum hws_ddr_phy phy_type, u32 reg_addr, 1117 u32 data_value, u32 reg_mask) 1118 { 1119 u32 data_val = 0, if_id, start_if, end_if; 1120 struct hws_topology_map *tm = ddr3_get_topology_map(); 1121 1122 if (access_type == ACCESS_TYPE_MULTICAST) { 1123 start_if = 0; 1124 end_if = MAX_INTERFACE_NUM - 1; 1125 } else { 1126 start_if = interface_id; 1127 end_if = interface_id; 1128 } 1129 1130 for (if_id = start_if; if_id <= end_if; if_id++) { 1131 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 1132 CHECK_STATUS(ddr3_tip_bus_read 1133 (dev_num, if_id, ACCESS_TYPE_UNICAST, phy_id, 1134 phy_type, reg_addr, &data_val)); 1135 data_value = (data_val & (~reg_mask)) | (data_value & reg_mask); 1136 CHECK_STATUS(ddr3_tip_bus_write 1137 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1138 ACCESS_TYPE_UNICAST, phy_id, phy_type, reg_addr, 1139 data_value)); 1140 } 1141 1142 return MV_OK; 1143 } 1144 1145 /* 1146 * ADLL Calibration 1147 */ 1148 int adll_calibration(u32 dev_num, enum hws_access_type access_type, 1149 u32 if_id, enum hws_ddr_freq frequency) 1150 { 1151 struct hws_tip_freq_config_info freq_config_info; 1152 u32 bus_cnt = 0; 1153 struct hws_topology_map *tm = ddr3_get_topology_map(); 1154 1155 /* Reset Diver_b assert -> de-assert */ 1156 CHECK_STATUS(ddr3_tip_if_write 1157 (dev_num, access_type, if_id, SDRAM_CONFIGURATION_REG, 1158 0, 0x10000000)); 1159 mdelay(10); 1160 CHECK_STATUS(ddr3_tip_if_write 1161 (dev_num, access_type, if_id, SDRAM_CONFIGURATION_REG, 1162 0x10000000, 0x10000000)); 1163 1164 if (config_func_info[dev_num].tip_get_freq_config_info_func != NULL) { 1165 CHECK_STATUS(config_func_info[dev_num]. 1166 tip_get_freq_config_info_func((u8)dev_num, frequency, 1167 &freq_config_info)); 1168 } else { 1169 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 1170 ("tip_get_freq_config_info_func is NULL")); 1171 return MV_NOT_INITIALIZED; 1172 } 1173 1174 for (bus_cnt = 0; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES(); bus_cnt++) { 1175 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt); 1176 CHECK_STATUS(ddr3_tip_bus_read_modify_write 1177 (dev_num, access_type, if_id, bus_cnt, 1178 DDR_PHY_DATA, BW_PHY_REG, 1179 freq_config_info.bw_per_freq << 8, 0x700)); 1180 CHECK_STATUS(ddr3_tip_bus_read_modify_write 1181 (dev_num, access_type, if_id, bus_cnt, 1182 DDR_PHY_DATA, RATE_PHY_REG, 1183 freq_config_info.rate_per_freq, 0x7)); 1184 } 1185 1186 /* DUnit to Phy drive post edge, ADLL reset assert de-assert */ 1187 CHECK_STATUS(ddr3_tip_if_write 1188 (dev_num, access_type, if_id, DRAM_PHY_CONFIGURATION, 1189 0, (0x80000000 | 0x40000000))); 1190 mdelay(100 / (freq_val[frequency] / freq_val[DDR_FREQ_LOW_FREQ])); 1191 CHECK_STATUS(ddr3_tip_if_write 1192 (dev_num, access_type, if_id, DRAM_PHY_CONFIGURATION, 1193 (0x80000000 | 0x40000000), (0x80000000 | 0x40000000))); 1194 1195 /* polling for ADLL Done */ 1196 if (ddr3_tip_if_polling(dev_num, access_type, if_id, 1197 0x3ff03ff, 0x3ff03ff, PHY_LOCK_STATUS_REG, 1198 MAX_POLLING_ITERATIONS) != MV_OK) { 1199 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 1200 ("Freq_set: DDR3 poll failed(1)")); 1201 } 1202 1203 /* pup data_pup reset assert-> deassert */ 1204 CHECK_STATUS(ddr3_tip_if_write 1205 (dev_num, access_type, if_id, SDRAM_CONFIGURATION_REG, 1206 0, 0x60000000)); 1207 mdelay(10); 1208 CHECK_STATUS(ddr3_tip_if_write 1209 (dev_num, access_type, if_id, SDRAM_CONFIGURATION_REG, 1210 0x60000000, 0x60000000)); 1211 1212 return MV_OK; 1213 } 1214 1215 int ddr3_tip_freq_set(u32 dev_num, enum hws_access_type access_type, 1216 u32 if_id, enum hws_ddr_freq frequency) 1217 { 1218 u32 cl_value = 0, cwl_value = 0, mem_mask = 0, val = 0, 1219 bus_cnt = 0, t_hclk = 0, t_wr = 0, 1220 refresh_interval_cnt = 0, cnt_id; 1221 u32 t_refi = 0, end_if, start_if; 1222 u32 bus_index = 0; 1223 int is_dll_off = 0; 1224 enum hws_speed_bin speed_bin_index = 0; 1225 struct hws_tip_freq_config_info freq_config_info; 1226 enum hws_result *flow_result = training_result[training_stage]; 1227 u32 adll_tap = 0; 1228 u32 cs_mask[MAX_INTERFACE_NUM]; 1229 struct hws_topology_map *tm = ddr3_get_topology_map(); 1230 1231 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, 1232 ("dev %d access %d IF %d freq %d\n", dev_num, 1233 access_type, if_id, frequency)); 1234 1235 if (frequency == DDR_FREQ_LOW_FREQ) 1236 is_dll_off = 1; 1237 if (access_type == ACCESS_TYPE_MULTICAST) { 1238 start_if = 0; 1239 end_if = MAX_INTERFACE_NUM - 1; 1240 } else { 1241 start_if = if_id; 1242 end_if = if_id; 1243 } 1244 1245 /* calculate interface cs mask - Oferb 4/11 */ 1246 /* speed bin can be different for each interface */ 1247 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1248 /* cs enable is active low */ 1249 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 1250 cs_mask[if_id] = CS_BIT_MASK; 1251 training_result[training_stage][if_id] = TEST_SUCCESS; 1252 ddr3_tip_calc_cs_mask(dev_num, if_id, effective_cs, 1253 &cs_mask[if_id]); 1254 } 1255 1256 /* speed bin can be different for each interface */ 1257 /* 1258 * moti b - need to remove the loop for multicas access functions 1259 * and loop the unicast access functions 1260 */ 1261 for (if_id = start_if; if_id <= end_if; if_id++) { 1262 if (IS_ACTIVE(tm->if_act_mask, if_id) == 0) 1263 continue; 1264 1265 flow_result[if_id] = TEST_SUCCESS; 1266 speed_bin_index = 1267 tm->interface_params[if_id].speed_bin_index; 1268 if (tm->interface_params[if_id].memory_freq == 1269 frequency) { 1270 cl_value = 1271 tm->interface_params[if_id].cas_l; 1272 cwl_value = 1273 tm->interface_params[if_id].cas_wl; 1274 } else { 1275 cl_value = 1276 cas_latency_table[speed_bin_index].cl_val[frequency]; 1277 cwl_value = 1278 cas_write_latency_table[speed_bin_index]. 1279 cl_val[frequency]; 1280 } 1281 1282 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, 1283 ("Freq_set dev 0x%x access 0x%x if 0x%x freq 0x%x speed %d:\n\t", 1284 dev_num, access_type, if_id, 1285 frequency, speed_bin_index)); 1286 1287 for (cnt_id = 0; cnt_id < DDR_FREQ_LIMIT; cnt_id++) { 1288 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, 1289 ("%d ", 1290 cas_latency_table[speed_bin_index]. 1291 cl_val[cnt_id])); 1292 } 1293 1294 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, ("\n")); 1295 mem_mask = 0; 1296 for (bus_index = 0; bus_index < GET_TOPOLOGY_NUM_OF_BUSES(); 1297 bus_index++) { 1298 VALIDATE_ACTIVE(tm->bus_act_mask, bus_index); 1299 mem_mask |= 1300 tm->interface_params[if_id]. 1301 as_bus_params[bus_index].mirror_enable_bitmask; 1302 } 1303 1304 if (mem_mask != 0) { 1305 /* motib redundant in KW28 */ 1306 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, 1307 if_id, 1308 CS_ENABLE_REG, 0, 0x8)); 1309 } 1310 1311 /* dll state after exiting SR */ 1312 if (is_dll_off == 1) { 1313 CHECK_STATUS(ddr3_tip_if_write 1314 (dev_num, access_type, if_id, 1315 DFS_REG, 0x1, 0x1)); 1316 } else { 1317 CHECK_STATUS(ddr3_tip_if_write 1318 (dev_num, access_type, if_id, 1319 DFS_REG, 0, 0x1)); 1320 } 1321 1322 CHECK_STATUS(ddr3_tip_if_write 1323 (dev_num, access_type, if_id, 1324 DUNIT_MMASK_REG, 0, 0x1)); 1325 /* DFS - block transactions */ 1326 CHECK_STATUS(ddr3_tip_if_write 1327 (dev_num, access_type, if_id, 1328 DFS_REG, 0x2, 0x2)); 1329 1330 /* disable ODT in case of dll off */ 1331 if (is_dll_off == 1) { 1332 CHECK_STATUS(ddr3_tip_if_write 1333 (dev_num, access_type, if_id, 1334 0x1874, 0, 0x244)); 1335 CHECK_STATUS(ddr3_tip_if_write 1336 (dev_num, access_type, if_id, 1337 0x1884, 0, 0x244)); 1338 CHECK_STATUS(ddr3_tip_if_write 1339 (dev_num, access_type, if_id, 1340 0x1894, 0, 0x244)); 1341 CHECK_STATUS(ddr3_tip_if_write 1342 (dev_num, access_type, if_id, 1343 0x18a4, 0, 0x244)); 1344 } 1345 1346 /* DFS - Enter Self-Refresh */ 1347 CHECK_STATUS(ddr3_tip_if_write 1348 (dev_num, access_type, if_id, DFS_REG, 0x4, 1349 0x4)); 1350 /* polling on self refresh entry */ 1351 if (ddr3_tip_if_polling(dev_num, ACCESS_TYPE_UNICAST, 1352 if_id, 0x8, 0x8, DFS_REG, 1353 MAX_POLLING_ITERATIONS) != MV_OK) { 1354 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 1355 ("Freq_set: DDR3 poll failed on SR entry\n")); 1356 } 1357 1358 /* PLL configuration */ 1359 if (config_func_info[dev_num].tip_set_freq_divider_func != NULL) { 1360 config_func_info[dev_num]. 1361 tip_set_freq_divider_func(dev_num, if_id, 1362 frequency); 1363 } 1364 1365 /* PLL configuration End */ 1366 1367 /* adjust t_refi to new frequency */ 1368 t_refi = (tm->interface_params[if_id].interface_temp == 1369 HWS_TEMP_HIGH) ? TREFI_LOW : TREFI_HIGH; 1370 t_refi *= 1000; /*psec */ 1371 1372 /* HCLK in[ps] */ 1373 t_hclk = MEGA / (freq_val[frequency] / 2); 1374 refresh_interval_cnt = t_refi / t_hclk; /* no units */ 1375 val = 0x4000 | refresh_interval_cnt; 1376 CHECK_STATUS(ddr3_tip_if_write 1377 (dev_num, access_type, if_id, 1378 SDRAM_CONFIGURATION_REG, val, 0x7fff)); 1379 1380 /* DFS - CL/CWL/WR parameters after exiting SR */ 1381 CHECK_STATUS(ddr3_tip_if_write 1382 (dev_num, access_type, if_id, DFS_REG, 1383 (cl_mask_table[cl_value] << 8), 0xf00)); 1384 CHECK_STATUS(ddr3_tip_if_write 1385 (dev_num, access_type, if_id, DFS_REG, 1386 (cwl_mask_table[cwl_value] << 12), 0x7000)); 1387 t_wr = speed_bin_table(speed_bin_index, SPEED_BIN_TWR); 1388 t_wr = (t_wr / 1000); 1389 CHECK_STATUS(ddr3_tip_if_write 1390 (dev_num, access_type, if_id, DFS_REG, 1391 (twr_mask_table[t_wr + 1] << 16), 0x70000)); 1392 1393 /* Restore original RTT values if returning from DLL OFF mode */ 1394 if (is_dll_off == 1) { 1395 CHECK_STATUS(ddr3_tip_if_write 1396 (dev_num, access_type, if_id, 0x1874, 1397 g_dic | g_rtt_nom, 0x266)); 1398 CHECK_STATUS(ddr3_tip_if_write 1399 (dev_num, access_type, if_id, 0x1884, 1400 g_dic | g_rtt_nom, 0x266)); 1401 CHECK_STATUS(ddr3_tip_if_write 1402 (dev_num, access_type, if_id, 0x1894, 1403 g_dic | g_rtt_nom, 0x266)); 1404 CHECK_STATUS(ddr3_tip_if_write 1405 (dev_num, access_type, if_id, 0x18a4, 1406 g_dic | g_rtt_nom, 0x266)); 1407 } 1408 1409 /* Reset Diver_b assert -> de-assert */ 1410 CHECK_STATUS(ddr3_tip_if_write 1411 (dev_num, access_type, if_id, 1412 SDRAM_CONFIGURATION_REG, 0, 0x10000000)); 1413 mdelay(10); 1414 CHECK_STATUS(ddr3_tip_if_write 1415 (dev_num, access_type, if_id, 1416 SDRAM_CONFIGURATION_REG, 0x10000000, 0x10000000)); 1417 1418 /* Adll configuration function of process and Frequency */ 1419 if (config_func_info[dev_num].tip_get_freq_config_info_func != NULL) { 1420 CHECK_STATUS(config_func_info[dev_num]. 1421 tip_get_freq_config_info_func(dev_num, frequency, 1422 &freq_config_info)); 1423 } 1424 /* TBD check milo5 using device ID ? */ 1425 for (bus_cnt = 0; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES(); 1426 bus_cnt++) { 1427 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt); 1428 CHECK_STATUS(ddr3_tip_bus_read_modify_write 1429 (dev_num, ACCESS_TYPE_UNICAST, 1430 if_id, bus_cnt, DDR_PHY_DATA, 1431 0x92, 1432 freq_config_info. 1433 bw_per_freq << 8 1434 /*freq_mask[dev_num][frequency] << 8 */ 1435 , 0x700)); 1436 CHECK_STATUS(ddr3_tip_bus_read_modify_write 1437 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1438 bus_cnt, DDR_PHY_DATA, 0x94, 1439 freq_config_info.rate_per_freq, 0x7)); 1440 } 1441 1442 /* DUnit to Phy drive post edge, ADLL reset assert de-assert */ 1443 CHECK_STATUS(ddr3_tip_if_write 1444 (dev_num, access_type, if_id, 1445 DRAM_PHY_CONFIGURATION, 0, 1446 (0x80000000 | 0x40000000))); 1447 mdelay(100 / (freq_val[frequency] / freq_val[DDR_FREQ_LOW_FREQ])); 1448 CHECK_STATUS(ddr3_tip_if_write 1449 (dev_num, access_type, if_id, 1450 DRAM_PHY_CONFIGURATION, (0x80000000 | 0x40000000), 1451 (0x80000000 | 0x40000000))); 1452 1453 /* polling for ADLL Done */ 1454 if (ddr3_tip_if_polling 1455 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x3ff03ff, 1456 0x3ff03ff, PHY_LOCK_STATUS_REG, 1457 MAX_POLLING_ITERATIONS) != MV_OK) { 1458 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 1459 ("Freq_set: DDR3 poll failed(1)\n")); 1460 } 1461 1462 /* pup data_pup reset assert-> deassert */ 1463 CHECK_STATUS(ddr3_tip_if_write 1464 (dev_num, access_type, if_id, 1465 SDRAM_CONFIGURATION_REG, 0, 0x60000000)); 1466 mdelay(10); 1467 CHECK_STATUS(ddr3_tip_if_write 1468 (dev_num, access_type, if_id, 1469 SDRAM_CONFIGURATION_REG, 0x60000000, 0x60000000)); 1470 1471 /* Set proper timing params before existing Self-Refresh */ 1472 ddr3_tip_set_timing(dev_num, access_type, if_id, frequency); 1473 if (delay_enable != 0) { 1474 adll_tap = MEGA / (freq_val[frequency] * 64); 1475 ddr3_tip_cmd_addr_init_delay(dev_num, adll_tap); 1476 } 1477 1478 /* Exit SR */ 1479 CHECK_STATUS(ddr3_tip_if_write 1480 (dev_num, access_type, if_id, DFS_REG, 0, 1481 0x4)); 1482 if (ddr3_tip_if_polling 1483 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x8, DFS_REG, 1484 MAX_POLLING_ITERATIONS) != MV_OK) { 1485 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 1486 ("Freq_set: DDR3 poll failed(2)")); 1487 } 1488 1489 /* Refresh Command */ 1490 CHECK_STATUS(ddr3_tip_if_write 1491 (dev_num, access_type, if_id, 1492 SDRAM_OPERATION_REG, 0x2, 0xf1f)); 1493 if (ddr3_tip_if_polling 1494 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x1f, 1495 SDRAM_OPERATION_REG, MAX_POLLING_ITERATIONS) != MV_OK) { 1496 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 1497 ("Freq_set: DDR3 poll failed(3)")); 1498 } 1499 1500 /* Release DFS Block */ 1501 CHECK_STATUS(ddr3_tip_if_write 1502 (dev_num, access_type, if_id, DFS_REG, 0, 1503 0x2)); 1504 /* Controller to MBUS Retry - normal */ 1505 CHECK_STATUS(ddr3_tip_if_write 1506 (dev_num, access_type, if_id, DUNIT_MMASK_REG, 1507 0x1, 0x1)); 1508 1509 /* MRO: Burst Length 8, CL , Auto_precharge 0x16cc */ 1510 val = 1511 ((cl_mask_table[cl_value] & 0x1) << 2) | 1512 ((cl_mask_table[cl_value] & 0xe) << 3); 1513 CHECK_STATUS(ddr3_tip_if_write 1514 (dev_num, access_type, if_id, MR0_REG, 1515 val, (0x7 << 4) | (1 << 2))); 1516 /* MR2: CWL = 10 , Auto Self-Refresh - disable */ 1517 val = (cwl_mask_table[cwl_value] << 3); 1518 /* 1519 * nklein 24.10.13 - should not be here - leave value as set in 1520 * the init configuration val |= (1 << 9); 1521 * val |= ((tm->interface_params[if_id]. 1522 * interface_temp == HWS_TEMP_HIGH) ? (1 << 7) : 0); 1523 */ 1524 /* nklein 24.10.13 - see above comment */ 1525 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, 1526 if_id, MR2_REG, 1527 val, (0x7 << 3))); 1528 1529 /* ODT TIMING */ 1530 val = ((cl_value - cwl_value + 1) << 4) | 1531 ((cl_value - cwl_value + 6) << 8) | 1532 ((cl_value - 1) << 12) | ((cl_value + 6) << 16); 1533 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, 1534 if_id, ODT_TIMING_LOW, 1535 val, 0xffff0)); 1536 val = 0x71 | ((cwl_value - 1) << 8) | ((cwl_value + 5) << 12); 1537 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, 1538 if_id, ODT_TIMING_HI_REG, 1539 val, 0xffff)); 1540 1541 /* ODT Active */ 1542 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, 1543 if_id, 1544 DUNIT_ODT_CONTROL_REG, 1545 0xf, 0xf)); 1546 1547 /* re-write CL */ 1548 val = ((cl_mask_table[cl_value] & 0x1) << 2) | 1549 ((cl_mask_table[cl_value] & 0xe) << 3); 1550 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1551 0, MR0_REG, val, 1552 (0x7 << 4) | (1 << 2))); 1553 1554 /* re-write CWL */ 1555 val = (cwl_mask_table[cwl_value] << 3); 1556 CHECK_STATUS(ddr3_tip_write_mrs_cmd(dev_num, cs_mask, MRS2_CMD, 1557 val, (0x7 << 3))); 1558 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1559 0, MR2_REG, val, (0x7 << 3))); 1560 1561 if (mem_mask != 0) { 1562 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, 1563 if_id, 1564 CS_ENABLE_REG, 1565 1 << 3, 0x8)); 1566 } 1567 } 1568 1569 return MV_OK; 1570 } 1571 1572 /* 1573 * Set ODT values 1574 */ 1575 static int ddr3_tip_write_odt(u32 dev_num, enum hws_access_type access_type, 1576 u32 if_id, u32 cl_value, u32 cwl_value) 1577 { 1578 /* ODT TIMING */ 1579 u32 val = (cl_value - cwl_value + 6); 1580 1581 val = ((cl_value - cwl_value + 1) << 4) | ((val & 0xf) << 8) | 1582 (((cl_value - 1) & 0xf) << 12) | 1583 (((cl_value + 6) & 0xf) << 16) | (((val & 0x10) >> 4) << 21); 1584 val |= (((cl_value - 1) >> 4) << 22) | (((cl_value + 6) >> 4) << 23); 1585 1586 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1587 ODT_TIMING_LOW, val, 0xffff0)); 1588 val = 0x71 | ((cwl_value - 1) << 8) | ((cwl_value + 5) << 12); 1589 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1590 ODT_TIMING_HI_REG, val, 0xffff)); 1591 if (odt_additional == 1) { 1592 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, 1593 if_id, 1594 SDRAM_ODT_CONTROL_HIGH_REG, 1595 0xf, 0xf)); 1596 } 1597 1598 /* ODT Active */ 1599 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1600 DUNIT_ODT_CONTROL_REG, 0xf, 0xf)); 1601 1602 return MV_OK; 1603 } 1604 1605 /* 1606 * Set Timing values for training 1607 */ 1608 static int ddr3_tip_set_timing(u32 dev_num, enum hws_access_type access_type, 1609 u32 if_id, enum hws_ddr_freq frequency) 1610 { 1611 u32 t_ckclk = 0, t_ras = 0; 1612 u32 t_rcd = 0, t_rp = 0, t_wr = 0, t_wtr = 0, t_rrd = 0, t_rtp = 0, 1613 t_rfc = 0, t_mod = 0; 1614 u32 val = 0, page_size = 0; 1615 enum hws_speed_bin speed_bin_index; 1616 enum hws_mem_size memory_size = MEM_2G; 1617 struct hws_topology_map *tm = ddr3_get_topology_map(); 1618 1619 speed_bin_index = tm->interface_params[if_id].speed_bin_index; 1620 memory_size = tm->interface_params[if_id].memory_size; 1621 page_size = 1622 (tm->interface_params[if_id].bus_width == 1623 BUS_WIDTH_8) ? page_param[memory_size]. 1624 page_size_8bit : page_param[memory_size].page_size_16bit; 1625 t_ckclk = (MEGA / freq_val[frequency]); 1626 t_rrd = (page_size == 1) ? speed_bin_table(speed_bin_index, 1627 SPEED_BIN_TRRD1K) : 1628 speed_bin_table(speed_bin_index, SPEED_BIN_TRRD2K); 1629 t_rrd = GET_MAX_VALUE(t_ckclk * 4, t_rrd); 1630 t_rtp = GET_MAX_VALUE(t_ckclk * 4, speed_bin_table(speed_bin_index, 1631 SPEED_BIN_TRTP)); 1632 t_wtr = GET_MAX_VALUE(t_ckclk * 4, speed_bin_table(speed_bin_index, 1633 SPEED_BIN_TWTR)); 1634 t_ras = TIME_2_CLOCK_CYCLES(speed_bin_table(speed_bin_index, 1635 SPEED_BIN_TRAS), 1636 t_ckclk); 1637 t_rcd = TIME_2_CLOCK_CYCLES(speed_bin_table(speed_bin_index, 1638 SPEED_BIN_TRCD), 1639 t_ckclk); 1640 t_rp = TIME_2_CLOCK_CYCLES(speed_bin_table(speed_bin_index, 1641 SPEED_BIN_TRP), 1642 t_ckclk); 1643 t_wr = TIME_2_CLOCK_CYCLES(speed_bin_table(speed_bin_index, 1644 SPEED_BIN_TWR), 1645 t_ckclk); 1646 t_wtr = TIME_2_CLOCK_CYCLES(t_wtr, t_ckclk); 1647 t_rrd = TIME_2_CLOCK_CYCLES(t_rrd, t_ckclk); 1648 t_rtp = TIME_2_CLOCK_CYCLES(t_rtp, t_ckclk); 1649 t_rfc = TIME_2_CLOCK_CYCLES(rfc_table[memory_size] * 1000, t_ckclk); 1650 t_mod = GET_MAX_VALUE(t_ckclk * 24, 15000); 1651 t_mod = TIME_2_CLOCK_CYCLES(t_mod, t_ckclk); 1652 1653 /* SDRAM Timing Low */ 1654 val = (t_ras & 0xf) | (t_rcd << 4) | (t_rp << 8) | (t_wr << 12) | 1655 (t_wtr << 16) | (((t_ras & 0x30) >> 4) << 20) | (t_rrd << 24) | 1656 (t_rtp << 28); 1657 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1658 SDRAM_TIMING_LOW_REG, val, 0xff3fffff)); 1659 1660 /* SDRAM Timing High */ 1661 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1662 SDRAM_TIMING_HIGH_REG, 1663 t_rfc & 0x7f, 0x7f)); 1664 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1665 SDRAM_TIMING_HIGH_REG, 1666 0x180, 0x180)); 1667 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1668 SDRAM_TIMING_HIGH_REG, 1669 0x600, 0x600)); 1670 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1671 SDRAM_TIMING_HIGH_REG, 1672 0x1800, 0xf800)); 1673 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1674 SDRAM_TIMING_HIGH_REG, 1675 ((t_rfc & 0x380) >> 7) << 16, 0x70000)); 1676 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1677 SDRAM_TIMING_HIGH_REG, 0, 1678 0x380000)); 1679 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1680 SDRAM_TIMING_HIGH_REG, 1681 (t_mod & 0xf) << 25, 0x1e00000)); 1682 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1683 SDRAM_TIMING_HIGH_REG, 1684 (t_mod >> 4) << 30, 0xc0000000)); 1685 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1686 SDRAM_TIMING_HIGH_REG, 1687 0x16000000, 0x1e000000)); 1688 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1689 SDRAM_TIMING_HIGH_REG, 1690 0x40000000, 0xc0000000)); 1691 1692 return MV_OK; 1693 } 1694 1695 /* 1696 * Mode Read 1697 */ 1698 int hws_ddr3_tip_mode_read(u32 dev_num, struct mode_info *mode_info) 1699 { 1700 u32 ret; 1701 1702 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1703 MR0_REG, mode_info->reg_mr0, MASK_ALL_BITS); 1704 if (ret != MV_OK) 1705 return ret; 1706 1707 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1708 MR1_REG, mode_info->reg_mr1, MASK_ALL_BITS); 1709 if (ret != MV_OK) 1710 return ret; 1711 1712 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1713 MR2_REG, mode_info->reg_mr2, MASK_ALL_BITS); 1714 if (ret != MV_OK) 1715 return ret; 1716 1717 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1718 MR3_REG, mode_info->reg_mr2, MASK_ALL_BITS); 1719 if (ret != MV_OK) 1720 return ret; 1721 1722 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1723 READ_DATA_SAMPLE_DELAY, mode_info->read_data_sample, 1724 MASK_ALL_BITS); 1725 if (ret != MV_OK) 1726 return ret; 1727 1728 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1729 READ_DATA_READY_DELAY, mode_info->read_data_ready, 1730 MASK_ALL_BITS); 1731 if (ret != MV_OK) 1732 return ret; 1733 1734 return MV_OK; 1735 } 1736 1737 /* 1738 * Get first active IF 1739 */ 1740 int ddr3_tip_get_first_active_if(u8 dev_num, u32 interface_mask, 1741 u32 *interface_id) 1742 { 1743 u32 if_id; 1744 struct hws_topology_map *tm = ddr3_get_topology_map(); 1745 1746 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1747 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 1748 if (interface_mask & (1 << if_id)) { 1749 *interface_id = if_id; 1750 break; 1751 } 1752 } 1753 1754 return MV_OK; 1755 } 1756 1757 /* 1758 * Write CS Result 1759 */ 1760 int ddr3_tip_write_cs_result(u32 dev_num, u32 offset) 1761 { 1762 u32 if_id, bus_num, cs_bitmask, data_val, cs_num; 1763 struct hws_topology_map *tm = ddr3_get_topology_map(); 1764 1765 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1766 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 1767 for (bus_num = 0; bus_num < tm->num_of_bus_per_interface; 1768 bus_num++) { 1769 VALIDATE_ACTIVE(tm->bus_act_mask, bus_num); 1770 cs_bitmask = 1771 tm->interface_params[if_id]. 1772 as_bus_params[bus_num].cs_bitmask; 1773 if (cs_bitmask != effective_cs) { 1774 cs_num = GET_CS_FROM_MASK(cs_bitmask); 1775 ddr3_tip_bus_read(dev_num, if_id, 1776 ACCESS_TYPE_UNICAST, bus_num, 1777 DDR_PHY_DATA, 1778 offset + 1779 CS_REG_VALUE(effective_cs), 1780 &data_val); 1781 ddr3_tip_bus_write(dev_num, 1782 ACCESS_TYPE_UNICAST, 1783 if_id, 1784 ACCESS_TYPE_UNICAST, 1785 bus_num, DDR_PHY_DATA, 1786 offset + 1787 CS_REG_VALUE(cs_num), 1788 data_val); 1789 } 1790 } 1791 } 1792 1793 return MV_OK; 1794 } 1795 1796 /* 1797 * Write MRS 1798 */ 1799 int ddr3_tip_write_mrs_cmd(u32 dev_num, u32 *cs_mask_arr, u32 cmd, 1800 u32 data, u32 mask) 1801 { 1802 u32 if_id, reg; 1803 struct hws_topology_map *tm = ddr3_get_topology_map(); 1804 1805 reg = (cmd == MRS1_CMD) ? MR1_REG : MR2_REG; 1806 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1807 PARAM_NOT_CARE, reg, data, mask)); 1808 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1809 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 1810 CHECK_STATUS(ddr3_tip_if_write 1811 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1812 SDRAM_OPERATION_REG, 1813 (cs_mask_arr[if_id] << 8) | cmd, 0xf1f)); 1814 } 1815 1816 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1817 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 1818 if (ddr3_tip_if_polling(dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 1819 0x1f, SDRAM_OPERATION_REG, 1820 MAX_POLLING_ITERATIONS) != MV_OK) { 1821 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 1822 ("write_mrs_cmd: Poll cmd fail")); 1823 } 1824 } 1825 1826 return MV_OK; 1827 } 1828 1829 /* 1830 * Reset XSB Read FIFO 1831 */ 1832 int ddr3_tip_reset_fifo_ptr(u32 dev_num) 1833 { 1834 u32 if_id = 0; 1835 1836 /* Configure PHY reset value to 0 in order to "clean" the FIFO */ 1837 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1838 if_id, 0x15c8, 0, 0xff000000)); 1839 /* 1840 * Move PHY to RL mode (only in RL mode the PHY overrides FIFO values 1841 * during FIFO reset) 1842 */ 1843 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1844 if_id, TRAINING_SW_2_REG, 1845 0x1, 0x9)); 1846 /* In order that above configuration will influence the PHY */ 1847 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1848 if_id, 0x15b0, 1849 0x80000000, 0x80000000)); 1850 /* Reset read fifo assertion */ 1851 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1852 if_id, 0x1400, 0, 0x40000000)); 1853 /* Reset read fifo deassertion */ 1854 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1855 if_id, 0x1400, 1856 0x40000000, 0x40000000)); 1857 /* Move PHY back to functional mode */ 1858 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1859 if_id, TRAINING_SW_2_REG, 1860 0x8, 0x9)); 1861 /* Stop training machine */ 1862 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1863 if_id, 0x15b4, 0x10000, 0x10000)); 1864 1865 return MV_OK; 1866 } 1867 1868 /* 1869 * Reset Phy registers 1870 */ 1871 int ddr3_tip_ddr3_reset_phy_regs(u32 dev_num) 1872 { 1873 u32 if_id, phy_id, cs; 1874 struct hws_topology_map *tm = ddr3_get_topology_map(); 1875 1876 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1877 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 1878 for (phy_id = 0; phy_id < tm->num_of_bus_per_interface; 1879 phy_id++) { 1880 VALIDATE_ACTIVE(tm->bus_act_mask, phy_id); 1881 CHECK_STATUS(ddr3_tip_bus_write 1882 (dev_num, ACCESS_TYPE_UNICAST, 1883 if_id, ACCESS_TYPE_UNICAST, 1884 phy_id, DDR_PHY_DATA, 1885 WL_PHY_REG + 1886 CS_REG_VALUE(effective_cs), 1887 phy_reg0_val)); 1888 CHECK_STATUS(ddr3_tip_bus_write 1889 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1890 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA, 1891 RL_PHY_REG + CS_REG_VALUE(effective_cs), 1892 phy_reg2_val)); 1893 CHECK_STATUS(ddr3_tip_bus_write 1894 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1895 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA, 1896 READ_CENTRALIZATION_PHY_REG + 1897 CS_REG_VALUE(effective_cs), phy_reg3_val)); 1898 CHECK_STATUS(ddr3_tip_bus_write 1899 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1900 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA, 1901 WRITE_CENTRALIZATION_PHY_REG + 1902 CS_REG_VALUE(effective_cs), phy_reg3_val)); 1903 } 1904 } 1905 1906 /* Set Receiver Calibration value */ 1907 for (cs = 0; cs < MAX_CS_NUM; cs++) { 1908 /* PHY register 0xdb bits[5:0] - configure to 63 */ 1909 CHECK_STATUS(ddr3_tip_bus_write 1910 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1911 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1912 DDR_PHY_DATA, CSN_IOB_VREF_REG(cs), 63)); 1913 } 1914 1915 return MV_OK; 1916 } 1917 1918 /* 1919 * Restore Dunit registers 1920 */ 1921 int ddr3_tip_restore_dunit_regs(u32 dev_num) 1922 { 1923 u32 index_cnt; 1924 1925 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1926 PARAM_NOT_CARE, CALIB_MACHINE_CTRL_REG, 1927 0x1, 0x1)); 1928 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1929 PARAM_NOT_CARE, CALIB_MACHINE_CTRL_REG, 1930 calibration_update_control << 3, 1931 0x3 << 3)); 1932 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1933 PARAM_NOT_CARE, 1934 ODPG_WRITE_READ_MODE_ENABLE_REG, 1935 0xffff, MASK_ALL_BITS)); 1936 1937 for (index_cnt = 0; index_cnt < ARRAY_SIZE(odpg_default_value); 1938 index_cnt++) { 1939 CHECK_STATUS(ddr3_tip_if_write 1940 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1941 odpg_default_value[index_cnt].reg_addr, 1942 odpg_default_value[index_cnt].reg_data, 1943 odpg_default_value[index_cnt].reg_mask)); 1944 } 1945 1946 return MV_OK; 1947 } 1948 1949 /* 1950 * Auto tune main flow 1951 */ 1952 static int ddr3_tip_ddr3_training_main_flow(u32 dev_num) 1953 { 1954 enum hws_ddr_freq freq = init_freq; 1955 struct init_cntr_param init_cntr_prm; 1956 int ret = MV_OK; 1957 u32 if_id; 1958 u32 max_cs = hws_ddr3_tip_max_cs_get(); 1959 struct hws_topology_map *tm = ddr3_get_topology_map(); 1960 1961 #ifndef EXCLUDE_SWITCH_DEBUG 1962 if (debug_training == DEBUG_LEVEL_TRACE) { 1963 CHECK_STATUS(print_device_info((u8)dev_num)); 1964 } 1965 #endif 1966 1967 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 1968 CHECK_STATUS(ddr3_tip_ddr3_reset_phy_regs(dev_num)); 1969 } 1970 /* Set to 0 after each loop to avoid illegal value may be used */ 1971 effective_cs = 0; 1972 1973 freq = init_freq; 1974 if (is_pll_before_init != 0) { 1975 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 1976 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 1977 config_func_info[dev_num].tip_set_freq_divider_func( 1978 (u8)dev_num, if_id, freq); 1979 } 1980 } 1981 1982 if (is_adll_calib_before_init != 0) { 1983 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 1984 ("with adll calib before init\n")); 1985 adll_calibration(dev_num, ACCESS_TYPE_MULTICAST, 0, freq); 1986 } 1987 1988 if (is_reg_dump != 0) { 1989 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 1990 ("Dump before init controller\n")); 1991 ddr3_tip_reg_dump(dev_num); 1992 } 1993 1994 if (mask_tune_func & INIT_CONTROLLER_MASK_BIT) { 1995 training_stage = INIT_CONTROLLER; 1996 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 1997 ("INIT_CONTROLLER_MASK_BIT\n")); 1998 init_cntr_prm.do_mrs_phy = 1; 1999 init_cntr_prm.is_ctrl64_bit = 0; 2000 init_cntr_prm.init_phy = 1; 2001 init_cntr_prm.msys_init = 0; 2002 ret = hws_ddr3_tip_init_controller(dev_num, &init_cntr_prm); 2003 if (is_reg_dump != 0) 2004 ddr3_tip_reg_dump(dev_num); 2005 if (ret != MV_OK) { 2006 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2007 ("hws_ddr3_tip_init_controller failure\n")); 2008 if (debug_mode == 0) 2009 return MV_FAIL; 2010 } 2011 } 2012 2013 #ifdef STATIC_ALGO_SUPPORT 2014 if (mask_tune_func & STATIC_LEVELING_MASK_BIT) { 2015 training_stage = STATIC_LEVELING; 2016 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2017 ("STATIC_LEVELING_MASK_BIT\n")); 2018 ret = ddr3_tip_run_static_alg(dev_num, freq); 2019 if (is_reg_dump != 0) 2020 ddr3_tip_reg_dump(dev_num); 2021 if (ret != MV_OK) { 2022 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2023 ("ddr3_tip_run_static_alg failure\n")); 2024 if (debug_mode == 0) 2025 return MV_FAIL; 2026 } 2027 } 2028 #endif 2029 2030 if (mask_tune_func & SET_LOW_FREQ_MASK_BIT) { 2031 training_stage = SET_LOW_FREQ; 2032 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2033 ("SET_LOW_FREQ_MASK_BIT %d\n", 2034 freq_val[low_freq])); 2035 ret = ddr3_tip_freq_set(dev_num, ACCESS_TYPE_MULTICAST, 2036 PARAM_NOT_CARE, low_freq); 2037 if (is_reg_dump != 0) 2038 ddr3_tip_reg_dump(dev_num); 2039 if (ret != MV_OK) { 2040 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2041 ("ddr3_tip_freq_set failure\n")); 2042 if (debug_mode == 0) 2043 return MV_FAIL; 2044 } 2045 } 2046 2047 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 2048 if (mask_tune_func & LOAD_PATTERN_MASK_BIT) { 2049 training_stage = LOAD_PATTERN; 2050 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2051 ("LOAD_PATTERN_MASK_BIT #%d\n", 2052 effective_cs)); 2053 ret = ddr3_tip_load_all_pattern_to_mem(dev_num); 2054 if (is_reg_dump != 0) 2055 ddr3_tip_reg_dump(dev_num); 2056 if (ret != MV_OK) { 2057 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2058 ("ddr3_tip_load_all_pattern_to_mem failure CS #%d\n", 2059 effective_cs)); 2060 if (debug_mode == 0) 2061 return MV_FAIL; 2062 } 2063 } 2064 } 2065 /* Set to 0 after each loop to avoid illegal value may be used */ 2066 effective_cs = 0; 2067 2068 if (mask_tune_func & SET_MEDIUM_FREQ_MASK_BIT) { 2069 training_stage = SET_MEDIUM_FREQ; 2070 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2071 ("SET_MEDIUM_FREQ_MASK_BIT %d\n", 2072 freq_val[medium_freq])); 2073 ret = 2074 ddr3_tip_freq_set(dev_num, ACCESS_TYPE_MULTICAST, 2075 PARAM_NOT_CARE, medium_freq); 2076 if (is_reg_dump != 0) 2077 ddr3_tip_reg_dump(dev_num); 2078 if (ret != MV_OK) { 2079 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2080 ("ddr3_tip_freq_set failure\n")); 2081 if (debug_mode == 0) 2082 return MV_FAIL; 2083 } 2084 } 2085 2086 if (mask_tune_func & WRITE_LEVELING_MASK_BIT) { 2087 training_stage = WRITE_LEVELING; 2088 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2089 ("WRITE_LEVELING_MASK_BIT\n")); 2090 if ((rl_mid_freq_wa == 0) || (freq_val[medium_freq] == 533)) { 2091 ret = ddr3_tip_dynamic_write_leveling(dev_num); 2092 } else { 2093 /* Use old WL */ 2094 ret = ddr3_tip_legacy_dynamic_write_leveling(dev_num); 2095 } 2096 2097 if (is_reg_dump != 0) 2098 ddr3_tip_reg_dump(dev_num); 2099 if (ret != MV_OK) { 2100 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2101 ("ddr3_tip_dynamic_write_leveling failure\n")); 2102 if (debug_mode == 0) 2103 return MV_FAIL; 2104 } 2105 } 2106 2107 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 2108 if (mask_tune_func & LOAD_PATTERN_2_MASK_BIT) { 2109 training_stage = LOAD_PATTERN_2; 2110 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2111 ("LOAD_PATTERN_2_MASK_BIT CS #%d\n", 2112 effective_cs)); 2113 ret = ddr3_tip_load_all_pattern_to_mem(dev_num); 2114 if (is_reg_dump != 0) 2115 ddr3_tip_reg_dump(dev_num); 2116 if (ret != MV_OK) { 2117 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2118 ("ddr3_tip_load_all_pattern_to_mem failure CS #%d\n", 2119 effective_cs)); 2120 if (debug_mode == 0) 2121 return MV_FAIL; 2122 } 2123 } 2124 } 2125 /* Set to 0 after each loop to avoid illegal value may be used */ 2126 effective_cs = 0; 2127 2128 if (mask_tune_func & READ_LEVELING_MASK_BIT) { 2129 training_stage = READ_LEVELING; 2130 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2131 ("READ_LEVELING_MASK_BIT\n")); 2132 if ((rl_mid_freq_wa == 0) || (freq_val[medium_freq] == 533)) { 2133 ret = ddr3_tip_dynamic_read_leveling(dev_num, medium_freq); 2134 } else { 2135 /* Use old RL */ 2136 ret = ddr3_tip_legacy_dynamic_read_leveling(dev_num); 2137 } 2138 2139 if (is_reg_dump != 0) 2140 ddr3_tip_reg_dump(dev_num); 2141 if (ret != MV_OK) { 2142 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2143 ("ddr3_tip_dynamic_read_leveling failure\n")); 2144 if (debug_mode == 0) 2145 return MV_FAIL; 2146 } 2147 } 2148 2149 if (mask_tune_func & WRITE_LEVELING_SUPP_MASK_BIT) { 2150 training_stage = WRITE_LEVELING_SUPP; 2151 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2152 ("WRITE_LEVELING_SUPP_MASK_BIT\n")); 2153 ret = ddr3_tip_dynamic_write_leveling_supp(dev_num); 2154 if (is_reg_dump != 0) 2155 ddr3_tip_reg_dump(dev_num); 2156 if (ret != MV_OK) { 2157 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2158 ("ddr3_tip_dynamic_write_leveling_supp failure\n")); 2159 if (debug_mode == 0) 2160 return MV_FAIL; 2161 } 2162 } 2163 2164 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 2165 if (mask_tune_func & PBS_RX_MASK_BIT) { 2166 training_stage = PBS_RX; 2167 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2168 ("PBS_RX_MASK_BIT CS #%d\n", 2169 effective_cs)); 2170 ret = ddr3_tip_pbs_rx(dev_num); 2171 if (is_reg_dump != 0) 2172 ddr3_tip_reg_dump(dev_num); 2173 if (ret != MV_OK) { 2174 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2175 ("ddr3_tip_pbs_rx failure CS #%d\n", 2176 effective_cs)); 2177 if (debug_mode == 0) 2178 return MV_FAIL; 2179 } 2180 } 2181 } 2182 2183 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 2184 if (mask_tune_func & PBS_TX_MASK_BIT) { 2185 training_stage = PBS_TX; 2186 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2187 ("PBS_TX_MASK_BIT CS #%d\n", 2188 effective_cs)); 2189 ret = ddr3_tip_pbs_tx(dev_num); 2190 if (is_reg_dump != 0) 2191 ddr3_tip_reg_dump(dev_num); 2192 if (ret != MV_OK) { 2193 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2194 ("ddr3_tip_pbs_tx failure CS #%d\n", 2195 effective_cs)); 2196 if (debug_mode == 0) 2197 return MV_FAIL; 2198 } 2199 } 2200 } 2201 /* Set to 0 after each loop to avoid illegal value may be used */ 2202 effective_cs = 0; 2203 2204 if (mask_tune_func & SET_TARGET_FREQ_MASK_BIT) { 2205 training_stage = SET_TARGET_FREQ; 2206 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2207 ("SET_TARGET_FREQ_MASK_BIT %d\n", 2208 freq_val[tm-> 2209 interface_params[first_active_if]. 2210 memory_freq])); 2211 ret = ddr3_tip_freq_set(dev_num, ACCESS_TYPE_MULTICAST, 2212 PARAM_NOT_CARE, 2213 tm->interface_params[first_active_if]. 2214 memory_freq); 2215 if (is_reg_dump != 0) 2216 ddr3_tip_reg_dump(dev_num); 2217 if (ret != MV_OK) { 2218 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2219 ("ddr3_tip_freq_set failure\n")); 2220 if (debug_mode == 0) 2221 return MV_FAIL; 2222 } 2223 } 2224 2225 if (mask_tune_func & WRITE_LEVELING_TF_MASK_BIT) { 2226 training_stage = WRITE_LEVELING_TF; 2227 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2228 ("WRITE_LEVELING_TF_MASK_BIT\n")); 2229 ret = ddr3_tip_dynamic_write_leveling(dev_num); 2230 if (is_reg_dump != 0) 2231 ddr3_tip_reg_dump(dev_num); 2232 if (ret != MV_OK) { 2233 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2234 ("ddr3_tip_dynamic_write_leveling TF failure\n")); 2235 if (debug_mode == 0) 2236 return MV_FAIL; 2237 } 2238 } 2239 2240 if (mask_tune_func & LOAD_PATTERN_HIGH_MASK_BIT) { 2241 training_stage = LOAD_PATTERN_HIGH; 2242 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("LOAD_PATTERN_HIGH\n")); 2243 ret = ddr3_tip_load_all_pattern_to_mem(dev_num); 2244 if (is_reg_dump != 0) 2245 ddr3_tip_reg_dump(dev_num); 2246 if (ret != MV_OK) { 2247 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2248 ("ddr3_tip_load_all_pattern_to_mem failure\n")); 2249 if (debug_mode == 0) 2250 return MV_FAIL; 2251 } 2252 } 2253 2254 if (mask_tune_func & READ_LEVELING_TF_MASK_BIT) { 2255 training_stage = READ_LEVELING_TF; 2256 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2257 ("READ_LEVELING_TF_MASK_BIT\n")); 2258 ret = ddr3_tip_dynamic_read_leveling(dev_num, tm-> 2259 interface_params[first_active_if]. 2260 memory_freq); 2261 if (is_reg_dump != 0) 2262 ddr3_tip_reg_dump(dev_num); 2263 if (ret != MV_OK) { 2264 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2265 ("ddr3_tip_dynamic_read_leveling TF failure\n")); 2266 if (debug_mode == 0) 2267 return MV_FAIL; 2268 } 2269 } 2270 2271 if (mask_tune_func & DM_PBS_TX_MASK_BIT) { 2272 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("DM_PBS_TX_MASK_BIT\n")); 2273 } 2274 2275 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 2276 if (mask_tune_func & VREF_CALIBRATION_MASK_BIT) { 2277 training_stage = VREF_CALIBRATION; 2278 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("VREF\n")); 2279 ret = ddr3_tip_vref(dev_num); 2280 if (is_reg_dump != 0) { 2281 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2282 ("VREF Dump\n")); 2283 ddr3_tip_reg_dump(dev_num); 2284 } 2285 if (ret != MV_OK) { 2286 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2287 ("ddr3_tip_vref failure\n")); 2288 if (debug_mode == 0) 2289 return MV_FAIL; 2290 } 2291 } 2292 } 2293 /* Set to 0 after each loop to avoid illegal value may be used */ 2294 effective_cs = 0; 2295 2296 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 2297 if (mask_tune_func & CENTRALIZATION_RX_MASK_BIT) { 2298 training_stage = CENTRALIZATION_RX; 2299 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2300 ("CENTRALIZATION_RX_MASK_BIT CS #%d\n", 2301 effective_cs)); 2302 ret = ddr3_tip_centralization_rx(dev_num); 2303 if (is_reg_dump != 0) 2304 ddr3_tip_reg_dump(dev_num); 2305 if (ret != MV_OK) { 2306 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2307 ("ddr3_tip_centralization_rx failure CS #%d\n", 2308 effective_cs)); 2309 if (debug_mode == 0) 2310 return MV_FAIL; 2311 } 2312 } 2313 } 2314 /* Set to 0 after each loop to avoid illegal value may be used */ 2315 effective_cs = 0; 2316 2317 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 2318 if (mask_tune_func & WRITE_LEVELING_SUPP_TF_MASK_BIT) { 2319 training_stage = WRITE_LEVELING_SUPP_TF; 2320 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2321 ("WRITE_LEVELING_SUPP_TF_MASK_BIT CS #%d\n", 2322 effective_cs)); 2323 ret = ddr3_tip_dynamic_write_leveling_supp(dev_num); 2324 if (is_reg_dump != 0) 2325 ddr3_tip_reg_dump(dev_num); 2326 if (ret != MV_OK) { 2327 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2328 ("ddr3_tip_dynamic_write_leveling_supp TF failure CS #%d\n", 2329 effective_cs)); 2330 if (debug_mode == 0) 2331 return MV_FAIL; 2332 } 2333 } 2334 } 2335 /* Set to 0 after each loop to avoid illegal value may be used */ 2336 effective_cs = 0; 2337 2338 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 2339 if (mask_tune_func & CENTRALIZATION_TX_MASK_BIT) { 2340 training_stage = CENTRALIZATION_TX; 2341 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2342 ("CENTRALIZATION_TX_MASK_BIT CS #%d\n", 2343 effective_cs)); 2344 ret = ddr3_tip_centralization_tx(dev_num); 2345 if (is_reg_dump != 0) 2346 ddr3_tip_reg_dump(dev_num); 2347 if (ret != MV_OK) { 2348 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2349 ("ddr3_tip_centralization_tx failure CS #%d\n", 2350 effective_cs)); 2351 if (debug_mode == 0) 2352 return MV_FAIL; 2353 } 2354 } 2355 } 2356 /* Set to 0 after each loop to avoid illegal value may be used */ 2357 effective_cs = 0; 2358 2359 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("restore registers to default\n")); 2360 /* restore register values */ 2361 CHECK_STATUS(ddr3_tip_restore_dunit_regs(dev_num)); 2362 2363 if (is_reg_dump != 0) 2364 ddr3_tip_reg_dump(dev_num); 2365 2366 return MV_OK; 2367 } 2368 2369 /* 2370 * DDR3 Dynamic training flow 2371 */ 2372 static int ddr3_tip_ddr3_auto_tune(u32 dev_num) 2373 { 2374 u32 if_id, stage, ret; 2375 int is_if_fail = 0, is_auto_tune_fail = 0; 2376 2377 training_stage = INIT_CONTROLLER; 2378 2379 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 2380 for (stage = 0; stage < MAX_STAGE_LIMIT; stage++) 2381 training_result[stage][if_id] = NO_TEST_DONE; 2382 } 2383 2384 ret = ddr3_tip_ddr3_training_main_flow(dev_num); 2385 2386 /* activate XSB test */ 2387 if (xsb_validate_type != 0) { 2388 run_xsb_test(dev_num, xsb_validation_base_address, 1, 1, 2389 0x1024); 2390 } 2391 2392 if (is_reg_dump != 0) 2393 ddr3_tip_reg_dump(dev_num); 2394 2395 /* print log */ 2396 CHECK_STATUS(ddr3_tip_print_log(dev_num, window_mem_addr)); 2397 2398 if (ret != MV_OK) { 2399 CHECK_STATUS(ddr3_tip_print_stability_log(dev_num)); 2400 } 2401 2402 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 2403 is_if_fail = 0; 2404 for (stage = 0; stage < MAX_STAGE_LIMIT; stage++) { 2405 if (training_result[stage][if_id] == TEST_FAILED) 2406 is_if_fail = 1; 2407 } 2408 if (is_if_fail == 1) { 2409 is_auto_tune_fail = 1; 2410 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2411 ("Auto Tune failed for IF %d\n", 2412 if_id)); 2413 } 2414 } 2415 2416 if ((ret == MV_FAIL) || (is_auto_tune_fail == 1)) 2417 return MV_FAIL; 2418 else 2419 return MV_OK; 2420 } 2421 2422 /* 2423 * Enable init sequence 2424 */ 2425 int ddr3_tip_enable_init_sequence(u32 dev_num) 2426 { 2427 int is_fail = 0; 2428 u32 if_id = 0, mem_mask = 0, bus_index = 0; 2429 struct hws_topology_map *tm = ddr3_get_topology_map(); 2430 2431 /* Enable init sequence */ 2432 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 0, 2433 SDRAM_INIT_CONTROL_REG, 0x1, 0x1)); 2434 2435 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 2436 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 2437 2438 if (ddr3_tip_if_polling 2439 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x1, 2440 SDRAM_INIT_CONTROL_REG, 2441 MAX_POLLING_ITERATIONS) != MV_OK) { 2442 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2443 ("polling failed IF %d\n", 2444 if_id)); 2445 is_fail = 1; 2446 continue; 2447 } 2448 2449 mem_mask = 0; 2450 for (bus_index = 0; bus_index < GET_TOPOLOGY_NUM_OF_BUSES(); 2451 bus_index++) { 2452 VALIDATE_ACTIVE(tm->bus_act_mask, bus_index); 2453 mem_mask |= 2454 tm->interface_params[if_id]. 2455 as_bus_params[bus_index].mirror_enable_bitmask; 2456 } 2457 2458 if (mem_mask != 0) { 2459 /* Disable Multi CS */ 2460 CHECK_STATUS(ddr3_tip_if_write 2461 (dev_num, ACCESS_TYPE_MULTICAST, 2462 if_id, CS_ENABLE_REG, 1 << 3, 2463 1 << 3)); 2464 } 2465 } 2466 2467 return (is_fail == 0) ? MV_OK : MV_FAIL; 2468 } 2469 2470 int ddr3_tip_register_dq_table(u32 dev_num, u32 *table) 2471 { 2472 dq_map_table = table; 2473 2474 return MV_OK; 2475 } 2476 2477 /* 2478 * Check if pup search is locked 2479 */ 2480 int ddr3_tip_is_pup_lock(u32 *pup_buf, enum hws_training_result read_mode) 2481 { 2482 u32 bit_start = 0, bit_end = 0, bit_id; 2483 2484 if (read_mode == RESULT_PER_BIT) { 2485 bit_start = 0; 2486 bit_end = BUS_WIDTH_IN_BITS - 1; 2487 } else { 2488 bit_start = 0; 2489 bit_end = 0; 2490 } 2491 2492 for (bit_id = bit_start; bit_id <= bit_end; bit_id++) { 2493 if (GET_LOCK_RESULT(pup_buf[bit_id]) == 0) 2494 return 0; 2495 } 2496 2497 return 1; 2498 } 2499 2500 /* 2501 * Get minimum buffer value 2502 */ 2503 u8 ddr3_tip_get_buf_min(u8 *buf_ptr) 2504 { 2505 u8 min_val = 0xff; 2506 u8 cnt = 0; 2507 2508 for (cnt = 0; cnt < BUS_WIDTH_IN_BITS; cnt++) { 2509 if (buf_ptr[cnt] < min_val) 2510 min_val = buf_ptr[cnt]; 2511 } 2512 2513 return min_val; 2514 } 2515 2516 /* 2517 * Get maximum buffer value 2518 */ 2519 u8 ddr3_tip_get_buf_max(u8 *buf_ptr) 2520 { 2521 u8 max_val = 0; 2522 u8 cnt = 0; 2523 2524 for (cnt = 0; cnt < BUS_WIDTH_IN_BITS; cnt++) { 2525 if (buf_ptr[cnt] > max_val) 2526 max_val = buf_ptr[cnt]; 2527 } 2528 2529 return max_val; 2530 } 2531 2532 /* 2533 * The following functions return memory parameters: 2534 * bus and device width, device size 2535 */ 2536 2537 u32 hws_ddr3_get_bus_width(void) 2538 { 2539 struct hws_topology_map *tm = ddr3_get_topology_map(); 2540 2541 return (DDR3_IS_16BIT_DRAM_MODE(tm->bus_act_mask) == 2542 1) ? 16 : 32; 2543 } 2544 2545 u32 hws_ddr3_get_device_width(u32 if_id) 2546 { 2547 struct hws_topology_map *tm = ddr3_get_topology_map(); 2548 2549 return (tm->interface_params[if_id].bus_width == 2550 BUS_WIDTH_8) ? 8 : 16; 2551 } 2552 2553 u32 hws_ddr3_get_device_size(u32 if_id) 2554 { 2555 struct hws_topology_map *tm = ddr3_get_topology_map(); 2556 2557 if (tm->interface_params[if_id].memory_size >= 2558 MEM_SIZE_LAST) { 2559 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2560 ("Error: Wrong device size of Cs: %d", 2561 tm->interface_params[if_id].memory_size)); 2562 return 0; 2563 } else { 2564 return 1 << tm->interface_params[if_id].memory_size; 2565 } 2566 } 2567 2568 int hws_ddr3_calc_mem_cs_size(u32 if_id, u32 cs, u32 *cs_size) 2569 { 2570 u32 cs_mem_size, dev_size; 2571 2572 dev_size = hws_ddr3_get_device_size(if_id); 2573 if (dev_size != 0) { 2574 cs_mem_size = ((hws_ddr3_get_bus_width() / 2575 hws_ddr3_get_device_width(if_id)) * dev_size); 2576 2577 /* the calculated result in Gbytex16 to avoid float using */ 2578 2579 if (cs_mem_size == 2) { 2580 *cs_size = _128M; 2581 } else if (cs_mem_size == 4) { 2582 *cs_size = _256M; 2583 } else if (cs_mem_size == 8) { 2584 *cs_size = _512M; 2585 } else if (cs_mem_size == 16) { 2586 *cs_size = _1G; 2587 } else if (cs_mem_size == 32) { 2588 *cs_size = _2G; 2589 } else { 2590 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2591 ("Error: Wrong Memory size of Cs: %d", cs)); 2592 return MV_FAIL; 2593 } 2594 return MV_OK; 2595 } else { 2596 return MV_FAIL; 2597 } 2598 } 2599 2600 int hws_ddr3_cs_base_adr_calc(u32 if_id, u32 cs, u32 *cs_base_addr) 2601 { 2602 u32 cs_mem_size = 0; 2603 #ifdef DEVICE_MAX_DRAM_ADDRESS_SIZE 2604 u32 physical_mem_size; 2605 u32 max_mem_size = DEVICE_MAX_DRAM_ADDRESS_SIZE; 2606 #endif 2607 2608 if (hws_ddr3_calc_mem_cs_size(if_id, cs, &cs_mem_size) != MV_OK) 2609 return MV_FAIL; 2610 2611 #ifdef DEVICE_MAX_DRAM_ADDRESS_SIZE 2612 struct hws_topology_map *tm = ddr3_get_topology_map(); 2613 /* 2614 * if number of address pins doesn't allow to use max mem size that 2615 * is defined in topology mem size is defined by 2616 * DEVICE_MAX_DRAM_ADDRESS_SIZE 2617 */ 2618 physical_mem_size = 2619 mv_hwsmem_size[tm->interface_params[0].memory_size]; 2620 2621 if (hws_ddr3_get_device_width(cs) == 16) { 2622 /* 2623 * 16bit mem device can be twice more - no need in less 2624 * significant pin 2625 */ 2626 max_mem_size = DEVICE_MAX_DRAM_ADDRESS_SIZE * 2; 2627 } 2628 2629 if (physical_mem_size > max_mem_size) { 2630 cs_mem_size = max_mem_size * 2631 (hws_ddr3_get_bus_width() / 2632 hws_ddr3_get_device_width(if_id)); 2633 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2634 ("Updated Physical Mem size is from 0x%x to %x\n", 2635 physical_mem_size, 2636 DEVICE_MAX_DRAM_ADDRESS_SIZE)); 2637 } 2638 #endif 2639 2640 /* calculate CS base addr */ 2641 *cs_base_addr = ((cs_mem_size) * cs) & 0xffff0000; 2642 2643 return MV_OK; 2644 } 2645