1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * Copyright (C) Marvell International Ltd. and its affiliates 4 */ 5 6 #include "ddr3_init.h" 7 #include "mv_ddr_common.h" 8 9 #define GET_CS_FROM_MASK(mask) (cs_mask2_num[mask]) 10 #define CS_CBE_VALUE(cs_num) (cs_cbe_reg[cs_num]) 11 12 u32 window_mem_addr = 0; 13 u32 phy_reg0_val = 0; 14 u32 phy_reg1_val = 8; 15 u32 phy_reg2_val = 0; 16 u32 phy_reg3_val = PARAM_UNDEFINED; 17 enum hws_ddr_freq low_freq = DDR_FREQ_LOW_FREQ; 18 enum hws_ddr_freq medium_freq; 19 u32 debug_dunit = 0; 20 u32 odt_additional = 1; 21 u32 *dq_map_table = NULL; 22 23 /* in case of ddr4 do not run ddr3_tip_write_additional_odt_setting function - mc odt always 'on' 24 * in ddr4 case the terminations are rttWR and rttPARK and the odt must be always 'on' 0x1498 = 0xf 25 */ 26 u32 odt_config = 1; 27 28 u32 nominal_avs; 29 u32 extension_avs; 30 31 u32 is_pll_before_init = 0, is_adll_calib_before_init = 1, is_dfs_in_init = 0; 32 u32 dfs_low_freq; 33 34 u32 g_rtt_nom_cs0, g_rtt_nom_cs1; 35 u8 calibration_update_control; /* 2 external only, 1 is internal only */ 36 37 enum hws_result training_result[MAX_STAGE_LIMIT][MAX_INTERFACE_NUM]; 38 enum auto_tune_stage training_stage = INIT_CONTROLLER; 39 u32 finger_test = 0, p_finger_start = 11, p_finger_end = 64, 40 n_finger_start = 11, n_finger_end = 64, 41 p_finger_step = 3, n_finger_step = 3; 42 u32 clamp_tbl[] = { 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3 }; 43 44 /* Initiate to 0xff, this variable is define by user in debug mode */ 45 u32 mode_2t = 0xff; 46 u32 xsb_validate_type = 0; 47 u32 xsb_validation_base_address = 0xf000; 48 u32 first_active_if = 0; 49 u32 dfs_low_phy1 = 0x1f; 50 u32 multicast_id = 0; 51 int use_broadcast = 0; 52 struct hws_tip_freq_config_info *freq_info_table = NULL; 53 u8 is_cbe_required = 0; 54 u32 debug_mode = 0; 55 u32 delay_enable = 0; 56 int rl_mid_freq_wa = 0; 57 58 u32 effective_cs = 0; 59 60 u32 vref_init_val = 0x4; 61 u32 ck_delay = PARAM_UNDEFINED; 62 63 /* Design guidelines parameters */ 64 u32 g_zpri_data = PARAM_UNDEFINED; /* controller data - P drive strength */ 65 u32 g_znri_data = PARAM_UNDEFINED; /* controller data - N drive strength */ 66 u32 g_zpri_ctrl = PARAM_UNDEFINED; /* controller C/A - P drive strength */ 67 u32 g_znri_ctrl = PARAM_UNDEFINED; /* controller C/A - N drive strength */ 68 69 u32 g_zpodt_data = PARAM_UNDEFINED; /* controller data - P ODT */ 70 u32 g_znodt_data = PARAM_UNDEFINED; /* controller data - N ODT */ 71 u32 g_zpodt_ctrl = PARAM_UNDEFINED; /* controller data - P ODT */ 72 u32 g_znodt_ctrl = PARAM_UNDEFINED; /* controller data - N ODT */ 73 74 u32 g_odt_config = PARAM_UNDEFINED; 75 u32 g_rtt_nom = PARAM_UNDEFINED; 76 u32 g_rtt_wr = PARAM_UNDEFINED; 77 u32 g_dic = PARAM_UNDEFINED; 78 u32 g_rtt_park = PARAM_UNDEFINED; 79 80 u32 mask_tune_func = (SET_MEDIUM_FREQ_MASK_BIT | 81 WRITE_LEVELING_MASK_BIT | 82 LOAD_PATTERN_2_MASK_BIT | 83 READ_LEVELING_MASK_BIT | 84 SET_TARGET_FREQ_MASK_BIT | 85 WRITE_LEVELING_TF_MASK_BIT | 86 READ_LEVELING_TF_MASK_BIT | 87 CENTRALIZATION_RX_MASK_BIT | 88 CENTRALIZATION_TX_MASK_BIT); 89 90 static int ddr3_tip_ddr3_training_main_flow(u32 dev_num); 91 static int ddr3_tip_write_odt(u32 dev_num, enum hws_access_type access_type, 92 u32 if_id, u32 cl_value, u32 cwl_value); 93 static int ddr3_tip_ddr3_auto_tune(u32 dev_num); 94 95 #ifdef ODT_TEST_SUPPORT 96 static int odt_test(u32 dev_num, enum hws_algo_type algo_type); 97 #endif 98 99 int adll_calibration(u32 dev_num, enum hws_access_type access_type, 100 u32 if_id, enum hws_ddr_freq frequency); 101 static int ddr3_tip_set_timing(u32 dev_num, enum hws_access_type access_type, 102 u32 if_id, enum hws_ddr_freq frequency); 103 104 static struct page_element page_tbl[] = { 105 /* 106 * 8bits 16 bits 107 * page-size(K) page-size(K) mask 108 */ 109 { 1, 2, 2}, 110 /* 512M */ 111 { 1, 2, 3}, 112 /* 1G */ 113 { 1, 2, 0}, 114 /* 2G */ 115 { 1, 2, 4}, 116 /* 4G */ 117 { 2, 2, 5}, 118 /* 8G */ 119 {0, 0, 0}, /* TODO: placeholder for 16-Mbit die capacity */ 120 {0, 0, 0}, /* TODO: placeholder for 32-Mbit die capacity */ 121 {0, 0, 0}, /* TODO: placeholder for 12-Mbit die capacity */ 122 {0, 0, 0} /* TODO: placeholder for 24-Mbit die capacity */ 123 124 }; 125 126 struct page_element *mv_ddr_page_tbl_get(void) 127 { 128 return &page_tbl[0]; 129 } 130 131 static u8 mem_size_config[MV_DDR_DIE_CAP_LAST] = { 132 0x2, /* 512Mbit */ 133 0x3, /* 1Gbit */ 134 0x0, /* 2Gbit */ 135 0x4, /* 4Gbit */ 136 0x5, /* 8Gbit */ 137 0x0, /* TODO: placeholder for 16-Mbit die capacity */ 138 0x0, /* TODO: placeholder for 32-Mbit die capacity */ 139 0x0, /* TODO: placeholder for 12-Mbit die capacity */ 140 0x0 /* TODO: placeholder for 24-Mbit die capacity */ 141 }; 142 143 static u8 cs_mask2_num[] = { 0, 0, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3 }; 144 145 static struct reg_data odpg_default_value[] = { 146 {0x1034, 0x38000, MASK_ALL_BITS}, 147 {0x1038, 0x0, MASK_ALL_BITS}, 148 {0x10b0, 0x0, MASK_ALL_BITS}, 149 {0x10b8, 0x0, MASK_ALL_BITS}, 150 {0x10c0, 0x0, MASK_ALL_BITS}, 151 {0x10f0, 0x0, MASK_ALL_BITS}, 152 {0x10f4, 0x0, MASK_ALL_BITS}, 153 {0x10f8, 0xff, MASK_ALL_BITS}, 154 {0x10fc, 0xffff, MASK_ALL_BITS}, 155 {0x1130, 0x0, MASK_ALL_BITS}, 156 {0x1830, 0x2000000, MASK_ALL_BITS}, 157 {0x14d0, 0x0, MASK_ALL_BITS}, 158 {0x14d4, 0x0, MASK_ALL_BITS}, 159 {0x14d8, 0x0, MASK_ALL_BITS}, 160 {0x14dc, 0x0, MASK_ALL_BITS}, 161 {0x1454, 0x0, MASK_ALL_BITS}, 162 {0x1594, 0x0, MASK_ALL_BITS}, 163 {0x1598, 0x0, MASK_ALL_BITS}, 164 {0x159c, 0x0, MASK_ALL_BITS}, 165 {0x15a0, 0x0, MASK_ALL_BITS}, 166 {0x15a4, 0x0, MASK_ALL_BITS}, 167 {0x15a8, 0x0, MASK_ALL_BITS}, 168 {0x15ac, 0x0, MASK_ALL_BITS}, 169 {0x1604, 0x0, MASK_ALL_BITS}, 170 {0x1608, 0x0, MASK_ALL_BITS}, 171 {0x160c, 0x0, MASK_ALL_BITS}, 172 {0x1610, 0x0, MASK_ALL_BITS}, 173 {0x1614, 0x0, MASK_ALL_BITS}, 174 {0x1618, 0x0, MASK_ALL_BITS}, 175 {0x1624, 0x0, MASK_ALL_BITS}, 176 {0x1690, 0x0, MASK_ALL_BITS}, 177 {0x1694, 0x0, MASK_ALL_BITS}, 178 {0x1698, 0x0, MASK_ALL_BITS}, 179 {0x169c, 0x0, MASK_ALL_BITS}, 180 {0x14b8, 0x6f67, MASK_ALL_BITS}, 181 {0x1630, 0x0, MASK_ALL_BITS}, 182 {0x1634, 0x0, MASK_ALL_BITS}, 183 {0x1638, 0x0, MASK_ALL_BITS}, 184 {0x163c, 0x0, MASK_ALL_BITS}, 185 {0x16b0, 0x0, MASK_ALL_BITS}, 186 {0x16b4, 0x0, MASK_ALL_BITS}, 187 {0x16b8, 0x0, MASK_ALL_BITS}, 188 {0x16bc, 0x0, MASK_ALL_BITS}, 189 {0x16c0, 0x0, MASK_ALL_BITS}, 190 {0x16c4, 0x0, MASK_ALL_BITS}, 191 {0x16c8, 0x0, MASK_ALL_BITS}, 192 {0x16cc, 0x1, MASK_ALL_BITS}, 193 {0x16f0, 0x1, MASK_ALL_BITS}, 194 {0x16f4, 0x0, MASK_ALL_BITS}, 195 {0x16f8, 0x0, MASK_ALL_BITS}, 196 {0x16fc, 0x0, MASK_ALL_BITS} 197 }; 198 199 /* MR cmd and addr definitions */ 200 struct mv_ddr_mr_data mr_data[] = { 201 {MRS0_CMD, MR0_REG}, 202 {MRS1_CMD, MR1_REG}, 203 {MRS2_CMD, MR2_REG}, 204 {MRS3_CMD, MR3_REG} 205 }; 206 207 static int ddr3_tip_pad_inv(u32 dev_num, u32 if_id); 208 static int ddr3_tip_rank_control(u32 dev_num, u32 if_id); 209 210 /* 211 * Update global training parameters by data from user 212 */ 213 int ddr3_tip_tune_training_params(u32 dev_num, 214 struct tune_train_params *params) 215 { 216 if (params->ck_delay != PARAM_UNDEFINED) 217 ck_delay = params->ck_delay; 218 if (params->phy_reg3_val != PARAM_UNDEFINED) 219 phy_reg3_val = params->phy_reg3_val; 220 if (params->g_rtt_nom != PARAM_UNDEFINED) 221 g_rtt_nom = params->g_rtt_nom; 222 if (params->g_rtt_wr != PARAM_UNDEFINED) 223 g_rtt_wr = params->g_rtt_wr; 224 if (params->g_dic != PARAM_UNDEFINED) 225 g_dic = params->g_dic; 226 if (params->g_odt_config != PARAM_UNDEFINED) 227 g_odt_config = params->g_odt_config; 228 if (params->g_zpri_data != PARAM_UNDEFINED) 229 g_zpri_data = params->g_zpri_data; 230 if (params->g_znri_data != PARAM_UNDEFINED) 231 g_znri_data = params->g_znri_data; 232 if (params->g_zpri_ctrl != PARAM_UNDEFINED) 233 g_zpri_ctrl = params->g_zpri_ctrl; 234 if (params->g_znri_ctrl != PARAM_UNDEFINED) 235 g_znri_ctrl = params->g_znri_ctrl; 236 if (params->g_zpodt_data != PARAM_UNDEFINED) 237 g_zpodt_data = params->g_zpodt_data; 238 if (params->g_znodt_data != PARAM_UNDEFINED) 239 g_znodt_data = params->g_znodt_data; 240 if (params->g_zpodt_ctrl != PARAM_UNDEFINED) 241 g_zpodt_ctrl = params->g_zpodt_ctrl; 242 if (params->g_znodt_ctrl != PARAM_UNDEFINED) 243 g_znodt_ctrl = params->g_znodt_ctrl; 244 if (params->g_rtt_park != PARAM_UNDEFINED) 245 g_rtt_park = params->g_rtt_park; 246 247 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 248 ("DGL parameters: 0x%X 0x%X 0x%X 0x%X 0x%X 0x%X 0x%X 0x%X 0x%X 0x%X 0x%X 0x%X\n", 249 g_zpri_data, g_znri_data, g_zpri_ctrl, g_znri_ctrl, g_zpodt_data, g_znodt_data, 250 g_zpodt_ctrl, g_znodt_ctrl, g_rtt_nom, g_dic, g_odt_config, g_rtt_wr)); 251 252 return MV_OK; 253 } 254 255 /* 256 * Configure CS 257 */ 258 int ddr3_tip_configure_cs(u32 dev_num, u32 if_id, u32 cs_num, u32 enable) 259 { 260 u32 data, addr_hi, data_high; 261 u32 mem_index; 262 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 263 264 if (enable == 1) { 265 data = (tm->interface_params[if_id].bus_width == 266 MV_DDR_DEV_WIDTH_8BIT) ? 0 : 1; 267 CHECK_STATUS(ddr3_tip_if_write 268 (dev_num, ACCESS_TYPE_UNICAST, if_id, 269 SDRAM_ADDR_CTRL_REG, (data << (cs_num * 4)), 270 0x3 << (cs_num * 4))); 271 mem_index = tm->interface_params[if_id].memory_size; 272 273 addr_hi = mem_size_config[mem_index] & 0x3; 274 CHECK_STATUS(ddr3_tip_if_write 275 (dev_num, ACCESS_TYPE_UNICAST, if_id, 276 SDRAM_ADDR_CTRL_REG, 277 (addr_hi << (2 + cs_num * 4)), 278 0x3 << (2 + cs_num * 4))); 279 280 data_high = (mem_size_config[mem_index] & 0x4) >> 2; 281 CHECK_STATUS(ddr3_tip_if_write 282 (dev_num, ACCESS_TYPE_UNICAST, if_id, 283 SDRAM_ADDR_CTRL_REG, 284 data_high << (20 + cs_num), 1 << (20 + cs_num))); 285 286 /* Enable Address Select Mode */ 287 CHECK_STATUS(ddr3_tip_if_write 288 (dev_num, ACCESS_TYPE_UNICAST, if_id, 289 SDRAM_ADDR_CTRL_REG, 1 << (16 + cs_num), 290 1 << (16 + cs_num))); 291 } 292 switch (cs_num) { 293 case 0: 294 case 1: 295 case 2: 296 CHECK_STATUS(ddr3_tip_if_write 297 (dev_num, ACCESS_TYPE_UNICAST, if_id, 298 DUNIT_CTRL_LOW_REG, (enable << (cs_num + 11)), 299 1 << (cs_num + 11))); 300 break; 301 case 3: 302 CHECK_STATUS(ddr3_tip_if_write 303 (dev_num, ACCESS_TYPE_UNICAST, if_id, 304 DUNIT_CTRL_LOW_REG, (enable << 15), 1 << 15)); 305 break; 306 } 307 308 return MV_OK; 309 } 310 311 /* 312 * Calculate number of CS 313 */ 314 int calc_cs_num(u32 dev_num, u32 if_id, u32 *cs_num) 315 { 316 u32 cs; 317 u32 bus_cnt; 318 u32 cs_count; 319 u32 cs_bitmask; 320 u32 curr_cs_num = 0; 321 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 322 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 323 324 for (bus_cnt = 0; bus_cnt < octets_per_if_num; bus_cnt++) { 325 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt); 326 cs_count = 0; 327 cs_bitmask = tm->interface_params[if_id]. 328 as_bus_params[bus_cnt].cs_bitmask; 329 for (cs = 0; cs < MAX_CS_NUM; cs++) { 330 if ((cs_bitmask >> cs) & 1) 331 cs_count++; 332 } 333 334 if (curr_cs_num == 0) { 335 curr_cs_num = cs_count; 336 } else if (cs_count != curr_cs_num) { 337 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 338 ("CS number is different per bus (IF %d BUS %d cs_num %d curr_cs_num %d)\n", 339 if_id, bus_cnt, cs_count, 340 curr_cs_num)); 341 return MV_NOT_SUPPORTED; 342 } 343 } 344 *cs_num = curr_cs_num; 345 346 return MV_OK; 347 } 348 349 /* 350 * Init Controller Flow 351 */ 352 int hws_ddr3_tip_init_controller(u32 dev_num, struct init_cntr_param *init_cntr_prm) 353 { 354 u32 if_id; 355 u32 cs_num; 356 u32 t_ckclk = 0, t_wr = 0, t2t = 0; 357 u32 data_value = 0, cs_cnt = 0, 358 mem_mask = 0, bus_index = 0; 359 enum hws_speed_bin speed_bin_index = SPEED_BIN_DDR_2133N; 360 u32 cs_mask = 0; 361 u32 cl_value = 0, cwl_val = 0; 362 u32 bus_cnt = 0, adll_tap = 0; 363 enum hws_access_type access_type = ACCESS_TYPE_UNICAST; 364 u32 data_read[MAX_INTERFACE_NUM]; 365 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 366 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 367 enum hws_ddr_freq freq = tm->interface_params[0].memory_freq; 368 369 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, 370 ("Init_controller, do_mrs_phy=%d, is_ctrl64_bit=%d\n", 371 init_cntr_prm->do_mrs_phy, 372 init_cntr_prm->is_ctrl64_bit)); 373 374 if (init_cntr_prm->init_phy == 1) { 375 CHECK_STATUS(ddr3_tip_configure_phy(dev_num)); 376 } 377 378 if (generic_init_controller == 1) { 379 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 380 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 381 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, 382 ("active IF %d\n", if_id)); 383 mem_mask = 0; 384 for (bus_index = 0; 385 bus_index < octets_per_if_num; 386 bus_index++) { 387 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_index); 388 mem_mask |= 389 tm->interface_params[if_id]. 390 as_bus_params[bus_index].mirror_enable_bitmask; 391 } 392 393 if (mem_mask != 0) { 394 CHECK_STATUS(ddr3_tip_if_write 395 (dev_num, ACCESS_TYPE_MULTICAST, 396 if_id, DUAL_DUNIT_CFG_REG, 0, 397 0x8)); 398 } 399 400 speed_bin_index = 401 tm->interface_params[if_id]. 402 speed_bin_index; 403 404 /* t_ckclk is external clock */ 405 t_ckclk = (MEGA / freq_val[freq]); 406 407 if (MV_DDR_IS_HALF_BUS_DRAM_MODE(tm->bus_act_mask, octets_per_if_num)) 408 data_value = (0x4000 | 0 | 0x1000000) & ~(1 << 26); 409 else 410 data_value = (0x4000 | 0x8000 | 0x1000000) & ~(1 << 26); 411 412 /* Interface Bus Width */ 413 /* SRMode */ 414 CHECK_STATUS(ddr3_tip_if_write 415 (dev_num, access_type, if_id, 416 SDRAM_CFG_REG, data_value, 417 0x100c000)); 418 419 /* Interleave first command pre-charge enable (TBD) */ 420 CHECK_STATUS(ddr3_tip_if_write 421 (dev_num, access_type, if_id, 422 SDRAM_OPEN_PAGES_CTRL_REG, (1 << 10), 423 (1 << 10))); 424 425 /* Reset divider_b assert -> de-assert */ 426 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 427 SDRAM_CFG_REG, 428 0x0 << PUP_RST_DIVIDER_OFFS, 429 PUP_RST_DIVIDER_MASK << PUP_RST_DIVIDER_OFFS)); 430 431 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 432 SDRAM_CFG_REG, 433 0x1 << PUP_RST_DIVIDER_OFFS, 434 PUP_RST_DIVIDER_MASK << PUP_RST_DIVIDER_OFFS)); 435 436 /* PHY configuration */ 437 /* 438 * Postamble Length = 1.5cc, Addresscntl to clk skew 439 * \BD, Preamble length normal, parralal ADLL enable 440 */ 441 CHECK_STATUS(ddr3_tip_if_write 442 (dev_num, access_type, if_id, 443 DRAM_PHY_CFG_REG, 0x28, 0x3e)); 444 if (init_cntr_prm->is_ctrl64_bit) { 445 /* positive edge */ 446 CHECK_STATUS(ddr3_tip_if_write 447 (dev_num, access_type, if_id, 448 DRAM_PHY_CFG_REG, 0x0, 449 0xff80)); 450 } 451 452 /* calibration block disable */ 453 /* Xbar Read buffer select (for Internal access) */ 454 CHECK_STATUS(ddr3_tip_if_write 455 (dev_num, access_type, if_id, 456 MAIN_PADS_CAL_MACH_CTRL_REG, 0x1200c, 457 0x7dffe01c)); 458 CHECK_STATUS(ddr3_tip_if_write 459 (dev_num, access_type, if_id, 460 MAIN_PADS_CAL_MACH_CTRL_REG, 461 calibration_update_control << 3, 0x3 << 3)); 462 463 /* Pad calibration control - enable */ 464 CHECK_STATUS(ddr3_tip_if_write 465 (dev_num, access_type, if_id, 466 MAIN_PADS_CAL_MACH_CTRL_REG, 0x1, 0x1)); 467 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) < MV_TIP_REV_3) { 468 /* DDR3 rank ctrl \96 part of the generic code */ 469 /* CS1 mirroring enable + w/a for JIRA DUNIT-14581 */ 470 CHECK_STATUS(ddr3_tip_if_write 471 (dev_num, access_type, if_id, 472 DDR3_RANK_CTRL_REG, 0x27, MASK_ALL_BITS)); 473 } 474 475 cs_mask = 0; 476 data_value = 0x7; 477 /* 478 * Address ctrl \96 Part of the Generic code 479 * The next configuration is done: 480 * 1) Memory Size 481 * 2) Bus_width 482 * 3) CS# 483 * 4) Page Number 484 * Per Dunit get from the Map_topology the parameters: 485 * Bus_width 486 */ 487 488 data_value = 489 (tm->interface_params[if_id]. 490 bus_width == MV_DDR_DEV_WIDTH_8BIT) ? 0 : 1; 491 492 /* create merge cs mask for all cs available in dunit */ 493 for (bus_cnt = 0; 494 bus_cnt < octets_per_if_num; 495 bus_cnt++) { 496 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt); 497 cs_mask |= 498 tm->interface_params[if_id]. 499 as_bus_params[bus_cnt].cs_bitmask; 500 } 501 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, 502 ("Init_controller IF %d cs_mask %d\n", 503 if_id, cs_mask)); 504 /* 505 * Configure the next upon the Map Topology \96 If the 506 * Dunit is CS0 Configure CS0 if it is multi CS 507 * configure them both: The Bust_width it\92s the 508 * Memory Bus width \96 x8 or x16 509 */ 510 for (cs_cnt = 0; cs_cnt < NUM_OF_CS; cs_cnt++) { 511 ddr3_tip_configure_cs(dev_num, if_id, cs_cnt, 512 ((cs_mask & (1 << cs_cnt)) ? 1 513 : 0)); 514 } 515 516 if (init_cntr_prm->do_mrs_phy) { 517 /* 518 * MR0 \96 Part of the Generic code 519 * The next configuration is done: 520 * 1) Burst Length 521 * 2) CAS Latency 522 * get for each dunit what is it Speed_bin & 523 * Target Frequency. From those both parameters 524 * get the appropriate Cas_l from the CL table 525 */ 526 cl_value = 527 tm->interface_params[if_id]. 528 cas_l; 529 cwl_val = 530 tm->interface_params[if_id]. 531 cas_wl; 532 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, 533 ("cl_value 0x%x cwl_val 0x%x\n", 534 cl_value, cwl_val)); 535 536 t_wr = time_to_nclk(speed_bin_table 537 (speed_bin_index, 538 SPEED_BIN_TWR), t_ckclk); 539 540 data_value = 541 ((cl_mask_table[cl_value] & 0x1) << 2) | 542 ((cl_mask_table[cl_value] & 0xe) << 3); 543 CHECK_STATUS(ddr3_tip_if_write 544 (dev_num, access_type, if_id, 545 MR0_REG, data_value, 546 (0x7 << 4) | (1 << 2))); 547 CHECK_STATUS(ddr3_tip_if_write 548 (dev_num, access_type, if_id, 549 MR0_REG, twr_mask_table[t_wr] << 9, 550 0x7 << 9)); 551 552 /* 553 * MR1: Set RTT and DIC Design GL values 554 * configured by user 555 */ 556 CHECK_STATUS(ddr3_tip_if_write 557 (dev_num, ACCESS_TYPE_MULTICAST, 558 PARAM_NOT_CARE, MR1_REG, 559 g_dic | g_rtt_nom, 0x266)); 560 561 /* MR2 - Part of the Generic code */ 562 /* 563 * The next configuration is done: 564 * 1) SRT 565 * 2) CAS Write Latency 566 */ 567 data_value = (cwl_mask_table[cwl_val] << 3); 568 data_value |= 569 ((tm->interface_params[if_id]. 570 interface_temp == 571 MV_DDR_TEMP_HIGH) ? (1 << 7) : 0); 572 data_value |= g_rtt_wr; 573 CHECK_STATUS(ddr3_tip_if_write 574 (dev_num, access_type, if_id, 575 MR2_REG, data_value, 576 (0x7 << 3) | (0x1 << 7) | (0x3 << 577 9))); 578 } 579 580 ddr3_tip_write_odt(dev_num, access_type, if_id, 581 cl_value, cwl_val); 582 ddr3_tip_set_timing(dev_num, access_type, if_id, freq); 583 584 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) < MV_TIP_REV_3) { 585 CHECK_STATUS(ddr3_tip_if_write 586 (dev_num, access_type, if_id, 587 DUNIT_CTRL_HIGH_REG, 0x1000119, 588 0x100017F)); 589 } else { 590 CHECK_STATUS(ddr3_tip_if_write 591 (dev_num, access_type, if_id, 592 DUNIT_CTRL_HIGH_REG, 0x600177 | 593 (init_cntr_prm->is_ctrl64_bit ? 594 CPU_INTERJECTION_ENA_SPLIT_ENA << CPU_INTERJECTION_ENA_OFFS : 595 CPU_INTERJECTION_ENA_SPLIT_DIS << CPU_INTERJECTION_ENA_OFFS), 596 0x1600177 | CPU_INTERJECTION_ENA_MASK << 597 CPU_INTERJECTION_ENA_OFFS)); 598 } 599 600 /* reset bit 7 */ 601 CHECK_STATUS(ddr3_tip_if_write 602 (dev_num, access_type, if_id, 603 DUNIT_CTRL_HIGH_REG, 604 (init_cntr_prm->msys_init << 7), (1 << 7))); 605 606 if (mode_2t != 0xff) { 607 t2t = mode_2t; 608 } else { 609 /* calculate number of CS (per interface) */ 610 CHECK_STATUS(calc_cs_num 611 (dev_num, if_id, &cs_num)); 612 t2t = (cs_num == 1) ? 0 : 1; 613 } 614 615 CHECK_STATUS(ddr3_tip_if_write 616 (dev_num, access_type, if_id, 617 DUNIT_CTRL_LOW_REG, t2t << 3, 618 0x3 << 3)); 619 CHECK_STATUS(ddr3_tip_if_write 620 (dev_num, access_type, if_id, 621 DDR_TIMING_REG, 0x28 << 9, 0x3f << 9)); 622 CHECK_STATUS(ddr3_tip_if_write 623 (dev_num, access_type, if_id, 624 DDR_TIMING_REG, 0xa << 21, 0xff << 21)); 625 626 /* move the block to ddr3_tip_set_timing - end */ 627 /* AUTO_ZQC_TIMING */ 628 CHECK_STATUS(ddr3_tip_if_write 629 (dev_num, access_type, if_id, 630 ZQC_CFG_REG, (AUTO_ZQC_TIMING | (2 << 20)), 631 0x3fffff)); 632 CHECK_STATUS(ddr3_tip_if_read 633 (dev_num, access_type, if_id, 634 DRAM_PHY_CFG_REG, data_read, 0x30)); 635 data_value = 636 (data_read[if_id] == 0) ? (1 << 11) : 0; 637 CHECK_STATUS(ddr3_tip_if_write 638 (dev_num, access_type, if_id, 639 DUNIT_CTRL_HIGH_REG, data_value, 640 (1 << 11))); 641 642 /* Set Active control for ODT write transactions */ 643 CHECK_STATUS(ddr3_tip_if_write 644 (dev_num, ACCESS_TYPE_MULTICAST, 645 PARAM_NOT_CARE, 0x1494, g_odt_config, 646 MASK_ALL_BITS)); 647 648 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) == MV_TIP_REV_3) { 649 CHECK_STATUS(ddr3_tip_if_write 650 (dev_num, access_type, if_id, 651 0x14a8, 0x900, 0x900)); 652 /* wa: controls control sub-phy outputs floating during self-refresh */ 653 CHECK_STATUS(ddr3_tip_if_write 654 (dev_num, access_type, if_id, 655 0x16d0, 0, 0x8000)); 656 } 657 } 658 } 659 660 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 661 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 662 CHECK_STATUS(ddr3_tip_rank_control(dev_num, if_id)); 663 664 if (init_cntr_prm->do_mrs_phy) { 665 CHECK_STATUS(ddr3_tip_pad_inv(dev_num, if_id)); 666 } 667 668 /* Pad calibration control - disable */ 669 CHECK_STATUS(ddr3_tip_if_write 670 (dev_num, access_type, if_id, 671 MAIN_PADS_CAL_MACH_CTRL_REG, 0x0, 0x1)); 672 CHECK_STATUS(ddr3_tip_if_write 673 (dev_num, access_type, if_id, 674 MAIN_PADS_CAL_MACH_CTRL_REG, 675 calibration_update_control << 3, 0x3 << 3)); 676 } 677 678 679 if (delay_enable != 0) { 680 adll_tap = MEGA / (freq_val[freq] * 64); 681 ddr3_tip_cmd_addr_init_delay(dev_num, adll_tap); 682 } 683 684 return MV_OK; 685 } 686 687 /* 688 * Rank Control Flow 689 */ 690 static int ddr3_tip_rev2_rank_control(u32 dev_num, u32 if_id) 691 { 692 u32 data_value = 0, bus_cnt = 0; 693 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 694 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 695 696 for (bus_cnt = 0; bus_cnt < octets_per_if_num; bus_cnt++) { 697 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt); 698 data_value |= tm->interface_params[if_id].as_bus_params[bus_cnt]. 699 cs_bitmask; 700 701 if (tm->interface_params[if_id].as_bus_params[bus_cnt]. 702 mirror_enable_bitmask == 1) { 703 /* 704 * Check mirror_enable_bitmask 705 * If it is enabled, CS + 4 bit in a word to be '1' 706 */ 707 if ((tm->interface_params[if_id].as_bus_params[bus_cnt]. 708 cs_bitmask & 0x1) != 0) { 709 data_value |= tm->interface_params[if_id]. 710 as_bus_params[bus_cnt]. 711 mirror_enable_bitmask << 4; 712 } 713 714 if ((tm->interface_params[if_id].as_bus_params[bus_cnt]. 715 cs_bitmask & 0x2) != 0) { 716 data_value |= tm->interface_params[if_id]. 717 as_bus_params[bus_cnt]. 718 mirror_enable_bitmask << 5; 719 } 720 721 if ((tm->interface_params[if_id].as_bus_params[bus_cnt]. 722 cs_bitmask & 0x4) != 0) { 723 data_value |= tm->interface_params[if_id]. 724 as_bus_params[bus_cnt]. 725 mirror_enable_bitmask << 6; 726 } 727 728 if ((tm->interface_params[if_id].as_bus_params[bus_cnt]. 729 cs_bitmask & 0x8) != 0) { 730 data_value |= tm->interface_params[if_id]. 731 as_bus_params[bus_cnt]. 732 mirror_enable_bitmask << 7; 733 } 734 } 735 } 736 737 CHECK_STATUS(ddr3_tip_if_write 738 (dev_num, ACCESS_TYPE_UNICAST, if_id, DDR3_RANK_CTRL_REG, 739 data_value, 0xff)); 740 741 return MV_OK; 742 } 743 744 static int ddr3_tip_rev3_rank_control(u32 dev_num, u32 if_id) 745 { 746 u32 data_value = 0, bus_cnt; 747 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 748 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 749 750 for (bus_cnt = 1; bus_cnt < octets_per_if_num; bus_cnt++) { 751 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt); 752 if ((tm->interface_params[if_id]. 753 as_bus_params[0].cs_bitmask != 754 tm->interface_params[if_id]. 755 as_bus_params[bus_cnt].cs_bitmask) || 756 (tm->interface_params[if_id]. 757 as_bus_params[0].mirror_enable_bitmask != 758 tm->interface_params[if_id]. 759 as_bus_params[bus_cnt].mirror_enable_bitmask)) 760 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 761 ("WARNING:Wrong configuration for pup #%d CS mask and CS mirroring for all pups should be the same\n", 762 bus_cnt)); 763 } 764 765 data_value |= tm->interface_params[if_id]. 766 as_bus_params[0].cs_bitmask; 767 data_value |= tm->interface_params[if_id]. 768 as_bus_params[0].mirror_enable_bitmask << 4; 769 770 CHECK_STATUS(ddr3_tip_if_write 771 (dev_num, ACCESS_TYPE_UNICAST, if_id, DDR3_RANK_CTRL_REG, 772 data_value, 0xff)); 773 774 return MV_OK; 775 } 776 777 static int ddr3_tip_rank_control(u32 dev_num, u32 if_id) 778 { 779 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) == MV_TIP_REV_2) 780 return ddr3_tip_rev2_rank_control(dev_num, if_id); 781 else 782 return ddr3_tip_rev3_rank_control(dev_num, if_id); 783 } 784 785 /* 786 * PAD Inverse Flow 787 */ 788 static int ddr3_tip_pad_inv(u32 dev_num, u32 if_id) 789 { 790 u32 bus_cnt, data_value, ck_swap_pup_ctrl; 791 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 792 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 793 794 for (bus_cnt = 0; bus_cnt < octets_per_if_num; bus_cnt++) { 795 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt); 796 if (tm->interface_params[if_id]. 797 as_bus_params[bus_cnt].is_dqs_swap == 1) { 798 /* dqs swap */ 799 ddr3_tip_bus_read_modify_write(dev_num, ACCESS_TYPE_UNICAST, 800 if_id, bus_cnt, 801 DDR_PHY_DATA, 802 PHY_CTRL_PHY_REG, 0xc0, 803 0xc0); 804 } 805 806 if (tm->interface_params[if_id]. 807 as_bus_params[bus_cnt].is_ck_swap == 1) { 808 if (bus_cnt <= 1) 809 data_value = 0x5 << 2; 810 else 811 data_value = 0xa << 2; 812 813 /* mask equals data */ 814 /* ck swap pup is only control pup #0 ! */ 815 ck_swap_pup_ctrl = 0; 816 ddr3_tip_bus_read_modify_write(dev_num, ACCESS_TYPE_UNICAST, 817 if_id, ck_swap_pup_ctrl, 818 DDR_PHY_CONTROL, 819 PHY_CTRL_PHY_REG, 820 data_value, data_value); 821 } 822 } 823 824 return MV_OK; 825 } 826 827 /* 828 * Algorithm Parameters Validation 829 */ 830 int ddr3_tip_validate_algo_var(u32 value, u32 fail_value, char *var_name) 831 { 832 if (value == fail_value) { 833 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 834 ("Error: %s is not initialized (Algo Components Validation)\n", 835 var_name)); 836 return 0; 837 } 838 839 return 1; 840 } 841 842 int ddr3_tip_validate_algo_ptr(void *ptr, void *fail_value, char *ptr_name) 843 { 844 if (ptr == fail_value) { 845 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 846 ("Error: %s is not initialized (Algo Components Validation)\n", 847 ptr_name)); 848 return 0; 849 } 850 851 return 1; 852 } 853 854 int ddr3_tip_validate_algo_components(u8 dev_num) 855 { 856 int status = 1; 857 858 /* Check DGL parameters*/ 859 status &= ddr3_tip_validate_algo_var(ck_delay, PARAM_UNDEFINED, "ck_delay"); 860 status &= ddr3_tip_validate_algo_var(phy_reg3_val, PARAM_UNDEFINED, "phy_reg3_val"); 861 status &= ddr3_tip_validate_algo_var(g_rtt_nom, PARAM_UNDEFINED, "g_rtt_nom"); 862 status &= ddr3_tip_validate_algo_var(g_dic, PARAM_UNDEFINED, "g_dic"); 863 status &= ddr3_tip_validate_algo_var(odt_config, PARAM_UNDEFINED, "odt_config"); 864 status &= ddr3_tip_validate_algo_var(g_zpri_data, PARAM_UNDEFINED, "g_zpri_data"); 865 status &= ddr3_tip_validate_algo_var(g_znri_data, PARAM_UNDEFINED, "g_znri_data"); 866 status &= ddr3_tip_validate_algo_var(g_zpri_ctrl, PARAM_UNDEFINED, "g_zpri_ctrl"); 867 status &= ddr3_tip_validate_algo_var(g_znri_ctrl, PARAM_UNDEFINED, "g_znri_ctrl"); 868 status &= ddr3_tip_validate_algo_var(g_zpodt_data, PARAM_UNDEFINED, "g_zpodt_data"); 869 status &= ddr3_tip_validate_algo_var(g_znodt_data, PARAM_UNDEFINED, "g_znodt_data"); 870 status &= ddr3_tip_validate_algo_var(g_zpodt_ctrl, PARAM_UNDEFINED, "g_zpodt_ctrl"); 871 status &= ddr3_tip_validate_algo_var(g_znodt_ctrl, PARAM_UNDEFINED, "g_znodt_ctrl"); 872 873 /* Check functions pointers */ 874 status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].tip_dunit_mux_select_func, 875 NULL, "tip_dunit_mux_select_func"); 876 status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].mv_ddr_dunit_write, 877 NULL, "mv_ddr_dunit_write"); 878 status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].mv_ddr_dunit_read, 879 NULL, "mv_ddr_dunit_read"); 880 status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].mv_ddr_phy_write, 881 NULL, "mv_ddr_phy_write"); 882 status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].mv_ddr_phy_read, 883 NULL, "mv_ddr_phy_read"); 884 status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].tip_get_freq_config_info_func, 885 NULL, "tip_get_freq_config_info_func"); 886 status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].tip_set_freq_divider_func, 887 NULL, "tip_set_freq_divider_func"); 888 status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].tip_get_clock_ratio, 889 NULL, "tip_get_clock_ratio"); 890 891 status &= ddr3_tip_validate_algo_ptr(dq_map_table, NULL, "dq_map_table"); 892 status &= ddr3_tip_validate_algo_var(dfs_low_freq, 0, "dfs_low_freq"); 893 894 return (status == 1) ? MV_OK : MV_NOT_INITIALIZED; 895 } 896 897 898 int ddr3_pre_algo_config(void) 899 { 900 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 901 902 /* Set Bus3 ECC training mode */ 903 if (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask)) { 904 /* Set Bus3 ECC MUX */ 905 CHECK_STATUS(ddr3_tip_if_write 906 (0, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE, 907 DRAM_PINS_MUX_REG, 0x100, 0x100)); 908 } 909 910 /* Set regular ECC training mode (bus4 and bus 3) */ 911 if ((DDR3_IS_ECC_PUP4_MODE(tm->bus_act_mask)) || 912 (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask)) || 913 (DDR3_IS_ECC_PUP8_MODE(tm->bus_act_mask))) { 914 /* Enable ECC Write MUX */ 915 CHECK_STATUS(ddr3_tip_if_write 916 (0, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE, 917 TRAINING_SW_2_REG, 0x100, 0x100)); 918 /* General ECC enable */ 919 CHECK_STATUS(ddr3_tip_if_write 920 (0, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE, 921 SDRAM_CFG_REG, 0x40000, 0x40000)); 922 /* Disable Read Data ECC MUX */ 923 CHECK_STATUS(ddr3_tip_if_write 924 (0, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE, 925 TRAINING_SW_2_REG, 0x0, 0x2)); 926 } 927 928 return MV_OK; 929 } 930 931 int ddr3_post_algo_config(void) 932 { 933 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 934 int status; 935 936 status = ddr3_post_run_alg(); 937 if (MV_OK != status) { 938 printf("DDR3 Post Run Alg - FAILED 0x%x\n", status); 939 return status; 940 } 941 942 /* Un_set ECC training mode */ 943 if ((DDR3_IS_ECC_PUP4_MODE(tm->bus_act_mask)) || 944 (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask)) || 945 (DDR3_IS_ECC_PUP8_MODE(tm->bus_act_mask))) { 946 /* Disable ECC Write MUX */ 947 CHECK_STATUS(ddr3_tip_if_write 948 (0, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE, 949 TRAINING_SW_2_REG, 0x0, 0x100)); 950 /* General ECC and Bus3 ECC MUX remains enabled */ 951 } 952 953 return MV_OK; 954 } 955 956 /* 957 * Run Training Flow 958 */ 959 int hws_ddr3_tip_run_alg(u32 dev_num, enum hws_algo_type algo_type) 960 { 961 int status = MV_OK; 962 963 status = ddr3_pre_algo_config(); 964 if (MV_OK != status) { 965 printf("DDR3 Pre Algo Config - FAILED 0x%x\n", status); 966 return status; 967 } 968 969 #ifdef ODT_TEST_SUPPORT 970 if (finger_test == 1) 971 return odt_test(dev_num, algo_type); 972 #endif 973 974 if (algo_type == ALGO_TYPE_DYNAMIC) { 975 status = ddr3_tip_ddr3_auto_tune(dev_num); 976 } 977 978 if (status != MV_OK) { 979 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 980 ("******** DRAM initialization Failed (res 0x%x) ********\n", 981 status)); 982 return status; 983 } 984 985 status = ddr3_post_algo_config(); 986 if (MV_OK != status) { 987 printf("DDR3 Post Algo Config - FAILED 0x%x\n", status); 988 return status; 989 } 990 991 return status; 992 } 993 994 #ifdef ODT_TEST_SUPPORT 995 /* 996 * ODT Test 997 */ 998 static int odt_test(u32 dev_num, enum hws_algo_type algo_type) 999 { 1000 int ret = MV_OK, ret_tune = MV_OK; 1001 int pfinger_val = 0, nfinger_val; 1002 1003 for (pfinger_val = p_finger_start; pfinger_val <= p_finger_end; 1004 pfinger_val += p_finger_step) { 1005 for (nfinger_val = n_finger_start; nfinger_val <= n_finger_end; 1006 nfinger_val += n_finger_step) { 1007 if (finger_test != 0) { 1008 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 1009 ("pfinger_val %d nfinger_val %d\n", 1010 pfinger_val, nfinger_val)); 1011 /* 1012 * TODO: need to check the correctness 1013 * of the following two lines. 1014 */ 1015 g_zpodt_data = pfinger_val; 1016 g_znodt_data = nfinger_val; 1017 } 1018 1019 if (algo_type == ALGO_TYPE_DYNAMIC) { 1020 ret = ddr3_tip_ddr3_auto_tune(dev_num); 1021 } 1022 } 1023 } 1024 1025 if (ret_tune != MV_OK) { 1026 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 1027 ("Run_alg: tuning failed %d\n", ret_tune)); 1028 ret = (ret == MV_OK) ? ret_tune : ret; 1029 } 1030 1031 return ret; 1032 } 1033 #endif 1034 1035 /* 1036 * Select Controller 1037 */ 1038 int hws_ddr3_tip_select_ddr_controller(u32 dev_num, int enable) 1039 { 1040 return config_func_info[dev_num]. 1041 tip_dunit_mux_select_func((u8)dev_num, enable); 1042 } 1043 1044 /* 1045 * Dunit Register Write 1046 */ 1047 int ddr3_tip_if_write(u32 dev_num, enum hws_access_type interface_access, 1048 u32 if_id, u32 reg_addr, u32 data_value, u32 mask) 1049 { 1050 config_func_info[dev_num].mv_ddr_dunit_write(reg_addr, mask, data_value); 1051 1052 return MV_OK; 1053 } 1054 1055 /* 1056 * Dunit Register Read 1057 */ 1058 int ddr3_tip_if_read(u32 dev_num, enum hws_access_type interface_access, 1059 u32 if_id, u32 reg_addr, u32 *data, u32 mask) 1060 { 1061 config_func_info[dev_num].mv_ddr_dunit_read(reg_addr, mask, data); 1062 1063 return MV_OK; 1064 } 1065 1066 /* 1067 * Dunit Register Polling 1068 */ 1069 int ddr3_tip_if_polling(u32 dev_num, enum hws_access_type access_type, 1070 u32 if_id, u32 exp_value, u32 mask, u32 offset, 1071 u32 poll_tries) 1072 { 1073 u32 poll_cnt = 0, interface_num = 0, start_if, end_if; 1074 u32 read_data[MAX_INTERFACE_NUM]; 1075 int ret; 1076 int is_fail = 0, is_if_fail; 1077 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1078 1079 if (access_type == ACCESS_TYPE_MULTICAST) { 1080 start_if = 0; 1081 end_if = MAX_INTERFACE_NUM - 1; 1082 } else { 1083 start_if = if_id; 1084 end_if = if_id; 1085 } 1086 1087 for (interface_num = start_if; interface_num <= end_if; interface_num++) { 1088 /* polling bit 3 for n times */ 1089 VALIDATE_IF_ACTIVE(tm->if_act_mask, interface_num); 1090 1091 is_if_fail = 0; 1092 for (poll_cnt = 0; poll_cnt < poll_tries; poll_cnt++) { 1093 ret = 1094 ddr3_tip_if_read(dev_num, ACCESS_TYPE_UNICAST, 1095 interface_num, offset, read_data, 1096 mask); 1097 if (ret != MV_OK) 1098 return ret; 1099 1100 if (read_data[interface_num] == exp_value) 1101 break; 1102 } 1103 1104 if (poll_cnt >= poll_tries) { 1105 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 1106 ("max poll IF #%d\n", interface_num)); 1107 is_fail = 1; 1108 is_if_fail = 1; 1109 } 1110 1111 training_result[training_stage][interface_num] = 1112 (is_if_fail == 1) ? TEST_FAILED : TEST_SUCCESS; 1113 } 1114 1115 return (is_fail == 0) ? MV_OK : MV_FAIL; 1116 } 1117 1118 /* 1119 * Bus read access 1120 */ 1121 int ddr3_tip_bus_read(u32 dev_num, u32 if_id, 1122 enum hws_access_type phy_access, u32 phy_id, 1123 enum hws_ddr_phy phy_type, u32 reg_addr, u32 *data) 1124 { 1125 return config_func_info[dev_num]. 1126 mv_ddr_phy_read(phy_access, phy_id, phy_type, reg_addr, data); 1127 } 1128 1129 /* 1130 * Bus write access 1131 */ 1132 int ddr3_tip_bus_write(u32 dev_num, enum hws_access_type interface_access, 1133 u32 if_id, enum hws_access_type phy_access, 1134 u32 phy_id, enum hws_ddr_phy phy_type, u32 reg_addr, 1135 u32 data_value) 1136 { 1137 return config_func_info[dev_num]. 1138 mv_ddr_phy_write(phy_access, phy_id, phy_type, reg_addr, data_value, OPERATION_WRITE); 1139 } 1140 1141 1142 /* 1143 * Phy read-modify-write 1144 */ 1145 int ddr3_tip_bus_read_modify_write(u32 dev_num, enum hws_access_type access_type, 1146 u32 interface_id, u32 phy_id, 1147 enum hws_ddr_phy phy_type, u32 reg_addr, 1148 u32 data_value, u32 reg_mask) 1149 { 1150 u32 data_val = 0, if_id, start_if, end_if; 1151 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1152 1153 if (access_type == ACCESS_TYPE_MULTICAST) { 1154 start_if = 0; 1155 end_if = MAX_INTERFACE_NUM - 1; 1156 } else { 1157 start_if = interface_id; 1158 end_if = interface_id; 1159 } 1160 1161 for (if_id = start_if; if_id <= end_if; if_id++) { 1162 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1163 CHECK_STATUS(ddr3_tip_bus_read 1164 (dev_num, if_id, ACCESS_TYPE_UNICAST, phy_id, 1165 phy_type, reg_addr, &data_val)); 1166 data_value = (data_val & (~reg_mask)) | (data_value & reg_mask); 1167 CHECK_STATUS(ddr3_tip_bus_write 1168 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1169 ACCESS_TYPE_UNICAST, phy_id, phy_type, reg_addr, 1170 data_value)); 1171 } 1172 1173 return MV_OK; 1174 } 1175 1176 /* 1177 * ADLL Calibration 1178 */ 1179 int adll_calibration(u32 dev_num, enum hws_access_type access_type, 1180 u32 if_id, enum hws_ddr_freq frequency) 1181 { 1182 struct hws_tip_freq_config_info freq_config_info; 1183 u32 bus_cnt = 0; 1184 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 1185 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1186 1187 /* Reset Diver_b assert -> de-assert */ 1188 CHECK_STATUS(ddr3_tip_if_write 1189 (dev_num, access_type, if_id, SDRAM_CFG_REG, 1190 0, 0x10000000)); 1191 mdelay(10); 1192 CHECK_STATUS(ddr3_tip_if_write 1193 (dev_num, access_type, if_id, SDRAM_CFG_REG, 1194 0x10000000, 0x10000000)); 1195 1196 CHECK_STATUS(config_func_info[dev_num]. 1197 tip_get_freq_config_info_func((u8)dev_num, frequency, 1198 &freq_config_info)); 1199 1200 for (bus_cnt = 0; bus_cnt < octets_per_if_num; bus_cnt++) { 1201 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt); 1202 CHECK_STATUS(ddr3_tip_bus_read_modify_write 1203 (dev_num, access_type, if_id, bus_cnt, 1204 DDR_PHY_DATA, ADLL_CFG0_PHY_REG, 1205 freq_config_info.bw_per_freq << 8, 0x700)); 1206 CHECK_STATUS(ddr3_tip_bus_read_modify_write 1207 (dev_num, access_type, if_id, bus_cnt, 1208 DDR_PHY_DATA, ADLL_CFG2_PHY_REG, 1209 freq_config_info.rate_per_freq, 0x7)); 1210 } 1211 1212 for (bus_cnt = 0; bus_cnt < DDR_IF_CTRL_SUBPHYS_NUM; bus_cnt++) { 1213 CHECK_STATUS(ddr3_tip_bus_read_modify_write 1214 (dev_num, ACCESS_TYPE_UNICAST, if_id, bus_cnt, 1215 DDR_PHY_CONTROL, ADLL_CFG0_PHY_REG, 1216 freq_config_info.bw_per_freq << 8, 0x700)); 1217 CHECK_STATUS(ddr3_tip_bus_read_modify_write 1218 (dev_num, ACCESS_TYPE_UNICAST, if_id, bus_cnt, 1219 DDR_PHY_CONTROL, ADLL_CFG2_PHY_REG, 1220 freq_config_info.rate_per_freq, 0x7)); 1221 } 1222 1223 /* DUnit to Phy drive post edge, ADLL reset assert de-assert */ 1224 CHECK_STATUS(ddr3_tip_if_write 1225 (dev_num, access_type, if_id, DRAM_PHY_CFG_REG, 1226 0, (0x80000000 | 0x40000000))); 1227 mdelay(100 / (freq_val[frequency] / freq_val[DDR_FREQ_LOW_FREQ])); 1228 CHECK_STATUS(ddr3_tip_if_write 1229 (dev_num, access_type, if_id, DRAM_PHY_CFG_REG, 1230 (0x80000000 | 0x40000000), (0x80000000 | 0x40000000))); 1231 1232 /* polling for ADLL Done */ 1233 if (ddr3_tip_if_polling(dev_num, access_type, if_id, 1234 0x3ff03ff, 0x3ff03ff, PHY_LOCK_STATUS_REG, 1235 MAX_POLLING_ITERATIONS) != MV_OK) { 1236 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 1237 ("Freq_set: DDR3 poll failed(1)")); 1238 } 1239 1240 /* pup data_pup reset assert-> deassert */ 1241 CHECK_STATUS(ddr3_tip_if_write 1242 (dev_num, access_type, if_id, SDRAM_CFG_REG, 1243 0, 0x60000000)); 1244 mdelay(10); 1245 CHECK_STATUS(ddr3_tip_if_write 1246 (dev_num, access_type, if_id, SDRAM_CFG_REG, 1247 0x60000000, 0x60000000)); 1248 1249 return MV_OK; 1250 } 1251 1252 int ddr3_tip_freq_set(u32 dev_num, enum hws_access_type access_type, 1253 u32 if_id, enum hws_ddr_freq frequency) 1254 { 1255 u32 cl_value = 0, cwl_value = 0, mem_mask = 0, val = 0, 1256 bus_cnt = 0, t_wr = 0, t_ckclk = 0, 1257 cnt_id; 1258 u32 end_if, start_if; 1259 u32 bus_index = 0; 1260 int is_dll_off = 0; 1261 enum hws_speed_bin speed_bin_index = 0; 1262 struct hws_tip_freq_config_info freq_config_info; 1263 enum hws_result *flow_result = training_result[training_stage]; 1264 u32 adll_tap = 0; 1265 u32 cs_num; 1266 u32 t2t; 1267 u32 cs_mask[MAX_INTERFACE_NUM]; 1268 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 1269 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1270 unsigned int tclk; 1271 1272 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, 1273 ("dev %d access %d IF %d freq %d\n", dev_num, 1274 access_type, if_id, frequency)); 1275 1276 if (frequency == DDR_FREQ_LOW_FREQ) 1277 is_dll_off = 1; 1278 if (access_type == ACCESS_TYPE_MULTICAST) { 1279 start_if = 0; 1280 end_if = MAX_INTERFACE_NUM - 1; 1281 } else { 1282 start_if = if_id; 1283 end_if = if_id; 1284 } 1285 1286 /* calculate interface cs mask - Oferb 4/11 */ 1287 /* speed bin can be different for each interface */ 1288 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1289 /* cs enable is active low */ 1290 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1291 cs_mask[if_id] = CS_BIT_MASK; 1292 training_result[training_stage][if_id] = TEST_SUCCESS; 1293 ddr3_tip_calc_cs_mask(dev_num, if_id, effective_cs, 1294 &cs_mask[if_id]); 1295 } 1296 1297 /* speed bin can be different for each interface */ 1298 /* 1299 * moti b - need to remove the loop for multicas access functions 1300 * and loop the unicast access functions 1301 */ 1302 for (if_id = start_if; if_id <= end_if; if_id++) { 1303 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1304 1305 flow_result[if_id] = TEST_SUCCESS; 1306 speed_bin_index = 1307 tm->interface_params[if_id].speed_bin_index; 1308 if (tm->interface_params[if_id].memory_freq == 1309 frequency) { 1310 cl_value = 1311 tm->interface_params[if_id].cas_l; 1312 cwl_value = 1313 tm->interface_params[if_id].cas_wl; 1314 } else if (tm->cfg_src == MV_DDR_CFG_SPD) { 1315 tclk = 1000000 / freq_val[frequency]; 1316 cl_value = mv_ddr_cl_calc(tm->timing_data[MV_DDR_TAA_MIN], tclk); 1317 if (cl_value == 0) { 1318 printf("mv_ddr: unsupported cas latency value found\n"); 1319 return MV_FAIL; 1320 } 1321 cwl_value = mv_ddr_cwl_calc(tclk); 1322 if (cwl_value == 0) { 1323 printf("mv_ddr: unsupported cas write latency value found\n"); 1324 return MV_FAIL; 1325 } 1326 } else { 1327 cl_value = 1328 cas_latency_table[speed_bin_index].cl_val[frequency]; 1329 cwl_value = 1330 cas_write_latency_table[speed_bin_index]. 1331 cl_val[frequency]; 1332 } 1333 1334 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, 1335 ("Freq_set dev 0x%x access 0x%x if 0x%x freq 0x%x speed %d:\n\t", 1336 dev_num, access_type, if_id, 1337 frequency, speed_bin_index)); 1338 1339 for (cnt_id = 0; cnt_id < DDR_FREQ_LAST; cnt_id++) { 1340 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, 1341 ("%d ", 1342 cas_latency_table[speed_bin_index]. 1343 cl_val[cnt_id])); 1344 } 1345 1346 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, ("\n")); 1347 mem_mask = 0; 1348 for (bus_index = 0; bus_index < octets_per_if_num; 1349 bus_index++) { 1350 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_index); 1351 mem_mask |= 1352 tm->interface_params[if_id]. 1353 as_bus_params[bus_index].mirror_enable_bitmask; 1354 } 1355 1356 if (mem_mask != 0) { 1357 /* motib redundent in KW28 */ 1358 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, 1359 if_id, 1360 DUAL_DUNIT_CFG_REG, 0, 0x8)); 1361 } 1362 1363 /* dll state after exiting SR */ 1364 if (is_dll_off == 1) { 1365 CHECK_STATUS(ddr3_tip_if_write 1366 (dev_num, access_type, if_id, 1367 DFS_REG, 0x1, 0x1)); 1368 } else { 1369 CHECK_STATUS(ddr3_tip_if_write 1370 (dev_num, access_type, if_id, 1371 DFS_REG, 0, 0x1)); 1372 } 1373 1374 CHECK_STATUS(ddr3_tip_if_write 1375 (dev_num, access_type, if_id, 1376 DUNIT_MMASK_REG, 0, 0x1)); 1377 /* DFS - block transactions */ 1378 CHECK_STATUS(ddr3_tip_if_write 1379 (dev_num, access_type, if_id, 1380 DFS_REG, 0x2, 0x2)); 1381 1382 /* disable ODT in case of dll off */ 1383 if (is_dll_off == 1) { 1384 CHECK_STATUS(ddr3_tip_if_write 1385 (dev_num, access_type, if_id, 1386 0x1874, 0, 0x244)); 1387 CHECK_STATUS(ddr3_tip_if_write 1388 (dev_num, access_type, if_id, 1389 0x1884, 0, 0x244)); 1390 CHECK_STATUS(ddr3_tip_if_write 1391 (dev_num, access_type, if_id, 1392 0x1894, 0, 0x244)); 1393 CHECK_STATUS(ddr3_tip_if_write 1394 (dev_num, access_type, if_id, 1395 0x18a4, 0, 0x244)); 1396 } 1397 1398 /* DFS - Enter Self-Refresh */ 1399 CHECK_STATUS(ddr3_tip_if_write 1400 (dev_num, access_type, if_id, DFS_REG, 0x4, 1401 0x4)); 1402 /* polling on self refresh entry */ 1403 if (ddr3_tip_if_polling(dev_num, ACCESS_TYPE_UNICAST, 1404 if_id, 0x8, 0x8, DFS_REG, 1405 MAX_POLLING_ITERATIONS) != MV_OK) { 1406 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 1407 ("Freq_set: DDR3 poll failed on SR entry\n")); 1408 } 1409 1410 /* Calculate 2T mode */ 1411 if (mode_2t != 0xff) { 1412 t2t = mode_2t; 1413 } else { 1414 /* Calculate number of CS per interface */ 1415 CHECK_STATUS(calc_cs_num(dev_num, if_id, &cs_num)); 1416 t2t = (cs_num == 1) ? 0 : 1; 1417 } 1418 1419 1420 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_INTERLEAVE_WA) == 1) { 1421 /* Use 1T mode if 1:1 ratio configured */ 1422 if (config_func_info[dev_num].tip_get_clock_ratio(frequency) == 1) { 1423 /* Low freq*/ 1424 CHECK_STATUS(ddr3_tip_if_write 1425 (dev_num, access_type, if_id, 1426 SDRAM_OPEN_PAGES_CTRL_REG, 0x0, 0x3C0)); 1427 t2t = 0; 1428 } else { 1429 /* Middle or target freq */ 1430 CHECK_STATUS(ddr3_tip_if_write 1431 (dev_num, access_type, if_id, 1432 SDRAM_OPEN_PAGES_CTRL_REG, 0x3C0, 0x3C0)); 1433 } 1434 } 1435 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1436 DUNIT_CTRL_LOW_REG, t2t << 3, 0x3 << 3)); 1437 1438 /* PLL configuration */ 1439 config_func_info[dev_num].tip_set_freq_divider_func(dev_num, if_id, 1440 frequency); 1441 1442 /* DFS - CL/CWL/WR parameters after exiting SR */ 1443 CHECK_STATUS(ddr3_tip_if_write 1444 (dev_num, access_type, if_id, DFS_REG, 1445 (cl_mask_table[cl_value] << 8), 0xf00)); 1446 CHECK_STATUS(ddr3_tip_if_write 1447 (dev_num, access_type, if_id, DFS_REG, 1448 (cwl_mask_table[cwl_value] << 12), 0x7000)); 1449 1450 t_ckclk = (MEGA / freq_val[frequency]); 1451 t_wr = time_to_nclk(speed_bin_table 1452 (speed_bin_index, 1453 SPEED_BIN_TWR), t_ckclk); 1454 1455 CHECK_STATUS(ddr3_tip_if_write 1456 (dev_num, access_type, if_id, DFS_REG, 1457 (twr_mask_table[t_wr] << 16), 0x70000)); 1458 1459 /* Restore original RTT values if returning from DLL OFF mode */ 1460 if (is_dll_off == 1) { 1461 CHECK_STATUS(ddr3_tip_if_write 1462 (dev_num, access_type, if_id, 0x1874, 1463 g_dic | g_rtt_nom, 0x266)); 1464 CHECK_STATUS(ddr3_tip_if_write 1465 (dev_num, access_type, if_id, 0x1884, 1466 g_dic | g_rtt_nom, 0x266)); 1467 CHECK_STATUS(ddr3_tip_if_write 1468 (dev_num, access_type, if_id, 0x1894, 1469 g_dic | g_rtt_nom, 0x266)); 1470 CHECK_STATUS(ddr3_tip_if_write 1471 (dev_num, access_type, if_id, 0x18a4, 1472 g_dic | g_rtt_nom, 0x266)); 1473 } 1474 1475 /* Reset divider_b assert -> de-assert */ 1476 CHECK_STATUS(ddr3_tip_if_write 1477 (dev_num, access_type, if_id, 1478 SDRAM_CFG_REG, 0, 0x10000000)); 1479 mdelay(10); 1480 CHECK_STATUS(ddr3_tip_if_write 1481 (dev_num, access_type, if_id, 1482 SDRAM_CFG_REG, 0x10000000, 0x10000000)); 1483 1484 /* ADLL configuration function of process and frequency */ 1485 CHECK_STATUS(config_func_info[dev_num]. 1486 tip_get_freq_config_info_func(dev_num, frequency, 1487 &freq_config_info)); 1488 1489 /* TBD check milo5 using device ID ? */ 1490 for (bus_cnt = 0; bus_cnt < octets_per_if_num; 1491 bus_cnt++) { 1492 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt); 1493 CHECK_STATUS(ddr3_tip_bus_read_modify_write 1494 (dev_num, ACCESS_TYPE_UNICAST, 1495 if_id, bus_cnt, DDR_PHY_DATA, 1496 0x92, 1497 freq_config_info. 1498 bw_per_freq << 8 1499 /*freq_mask[dev_num][frequency] << 8 */ 1500 , 0x700)); 1501 CHECK_STATUS(ddr3_tip_bus_read_modify_write 1502 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1503 bus_cnt, DDR_PHY_DATA, 0x94, 1504 freq_config_info.rate_per_freq, 0x7)); 1505 } 1506 1507 /* Dunit to PHY drive post edge, ADLL reset assert -> de-assert */ 1508 CHECK_STATUS(ddr3_tip_if_write 1509 (dev_num, access_type, if_id, 1510 DRAM_PHY_CFG_REG, 0, 1511 (0x80000000 | 0x40000000))); 1512 mdelay(100 / (freq_val[frequency] / freq_val[DDR_FREQ_LOW_FREQ])); 1513 CHECK_STATUS(ddr3_tip_if_write 1514 (dev_num, access_type, if_id, 1515 DRAM_PHY_CFG_REG, (0x80000000 | 0x40000000), 1516 (0x80000000 | 0x40000000))); 1517 1518 /* polling for ADLL Done */ 1519 if (ddr3_tip_if_polling 1520 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x3ff03ff, 1521 0x3ff03ff, PHY_LOCK_STATUS_REG, 1522 MAX_POLLING_ITERATIONS) != MV_OK) { 1523 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 1524 ("Freq_set: DDR3 poll failed(1)\n")); 1525 } 1526 1527 /* pup data_pup reset assert-> deassert */ 1528 CHECK_STATUS(ddr3_tip_if_write 1529 (dev_num, access_type, if_id, 1530 SDRAM_CFG_REG, 0, 0x60000000)); 1531 mdelay(10); 1532 CHECK_STATUS(ddr3_tip_if_write 1533 (dev_num, access_type, if_id, 1534 SDRAM_CFG_REG, 0x60000000, 0x60000000)); 1535 1536 /* Set proper timing params before existing Self-Refresh */ 1537 ddr3_tip_set_timing(dev_num, access_type, if_id, frequency); 1538 if (delay_enable != 0) { 1539 adll_tap = (is_dll_off == 1) ? 1000 : (MEGA / (freq_val[frequency] * 64)); 1540 ddr3_tip_cmd_addr_init_delay(dev_num, adll_tap); 1541 } 1542 1543 /* Exit SR */ 1544 CHECK_STATUS(ddr3_tip_if_write 1545 (dev_num, access_type, if_id, DFS_REG, 0, 1546 0x4)); 1547 if (ddr3_tip_if_polling 1548 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x8, DFS_REG, 1549 MAX_POLLING_ITERATIONS) != MV_OK) { 1550 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 1551 ("Freq_set: DDR3 poll failed(2)")); 1552 } 1553 1554 /* Refresh Command */ 1555 CHECK_STATUS(ddr3_tip_if_write 1556 (dev_num, access_type, if_id, 1557 SDRAM_OP_REG, 0x2, 0xf1f)); 1558 if (ddr3_tip_if_polling 1559 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x1f, 1560 SDRAM_OP_REG, MAX_POLLING_ITERATIONS) != MV_OK) { 1561 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 1562 ("Freq_set: DDR3 poll failed(3)")); 1563 } 1564 1565 /* Release DFS Block */ 1566 CHECK_STATUS(ddr3_tip_if_write 1567 (dev_num, access_type, if_id, DFS_REG, 0, 1568 0x2)); 1569 /* Controller to MBUS Retry - normal */ 1570 CHECK_STATUS(ddr3_tip_if_write 1571 (dev_num, access_type, if_id, DUNIT_MMASK_REG, 1572 0x1, 0x1)); 1573 1574 /* MRO: Burst Length 8, CL , Auto_precharge 0x16cc */ 1575 val = 1576 ((cl_mask_table[cl_value] & 0x1) << 2) | 1577 ((cl_mask_table[cl_value] & 0xe) << 3); 1578 CHECK_STATUS(ddr3_tip_if_write 1579 (dev_num, access_type, if_id, MR0_REG, 1580 val, (0x7 << 4) | (1 << 2))); 1581 /* MR2: CWL = 10 , Auto Self-Refresh - disable */ 1582 val = (cwl_mask_table[cwl_value] << 3) | g_rtt_wr; 1583 /* 1584 * nklein 24.10.13 - should not be here - leave value as set in 1585 * the init configuration val |= (1 << 9); 1586 * val |= ((tm->interface_params[if_id]. 1587 * interface_temp == MV_DDR_TEMP_HIGH) ? (1 << 7) : 0); 1588 */ 1589 /* nklein 24.10.13 - see above comment */ 1590 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, 1591 if_id, MR2_REG, 1592 val, (0x7 << 3) | (0x3 << 9))); 1593 1594 /* ODT TIMING */ 1595 val = ((cl_value - cwl_value + 1) << 4) | 1596 ((cl_value - cwl_value + 6) << 8) | 1597 ((cl_value - 1) << 12) | ((cl_value + 6) << 16); 1598 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, 1599 if_id, DDR_ODT_TIMING_LOW_REG, 1600 val, 0xffff0)); 1601 val = 0x91 | ((cwl_value - 1) << 8) | ((cwl_value + 5) << 12); 1602 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, 1603 if_id, DDR_ODT_TIMING_HIGH_REG, 1604 val, 0xffff)); 1605 1606 /* in case of ddr4 need to set the receiver to odt always 'on' (odt_config = '0') 1607 * in case of ddr3 configure the odt through the timing 1608 */ 1609 if (odt_config != 0) { 1610 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, DUNIT_ODT_CTRL_REG, 0xf, 0xf)); 1611 } 1612 else { 1613 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, DUNIT_ODT_CTRL_REG, 1614 0x30f, 0x30f)); 1615 } 1616 1617 /* re-write CL */ 1618 val = ((cl_mask_table[cl_value] & 0x1) << 2) | 1619 ((cl_mask_table[cl_value] & 0xe) << 3); 1620 1621 CHECK_STATUS(ddr3_tip_write_mrs_cmd(dev_num, cs_mask, MR_CMD0, 1622 val, (0x7 << 4) | (0x1 << 2))); 1623 1624 /* re-write CWL */ 1625 val = (cwl_mask_table[cwl_value] << 3) | g_rtt_wr; 1626 CHECK_STATUS(ddr3_tip_write_mrs_cmd(dev_num, cs_mask, MR_CMD2, 1627 val, (0x7 << 3) | (0x3 << 9))); 1628 1629 if (mem_mask != 0) { 1630 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, 1631 if_id, 1632 DUAL_DUNIT_CFG_REG, 1633 1 << 3, 0x8)); 1634 } 1635 } 1636 1637 return MV_OK; 1638 } 1639 1640 /* 1641 * Set ODT values 1642 */ 1643 static int ddr3_tip_write_odt(u32 dev_num, enum hws_access_type access_type, 1644 u32 if_id, u32 cl_value, u32 cwl_value) 1645 { 1646 /* ODT TIMING */ 1647 u32 val = (cl_value - cwl_value + 6); 1648 1649 val = ((cl_value - cwl_value + 1) << 4) | ((val & 0xf) << 8) | 1650 (((cl_value - 1) & 0xf) << 12) | 1651 (((cl_value + 6) & 0xf) << 16) | (((val & 0x10) >> 4) << 21); 1652 val |= (((cl_value - 1) >> 4) << 22) | (((cl_value + 6) >> 4) << 23); 1653 1654 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1655 DDR_ODT_TIMING_LOW_REG, val, 0xffff0)); 1656 val = 0x91 | ((cwl_value - 1) << 8) | ((cwl_value + 5) << 12); 1657 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1658 DDR_ODT_TIMING_HIGH_REG, val, 0xffff)); 1659 if (odt_additional == 1) { 1660 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, 1661 if_id, 1662 SDRAM_ODT_CTRL_HIGH_REG, 1663 0xf, 0xf)); 1664 } 1665 1666 /* ODT Active */ 1667 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1668 DUNIT_ODT_CTRL_REG, 0xf, 0xf)); 1669 1670 return MV_OK; 1671 } 1672 1673 /* 1674 * Set Timing values for training 1675 */ 1676 static int ddr3_tip_set_timing(u32 dev_num, enum hws_access_type access_type, 1677 u32 if_id, enum hws_ddr_freq frequency) 1678 { 1679 u32 t_ckclk = 0, t_ras = 0; 1680 u32 t_rcd = 0, t_rp = 0, t_wr = 0, t_wtr = 0, t_rrd = 0, t_rtp = 0, 1681 t_rfc = 0, t_mod = 0, t_r2r = 0x3, t_r2r_high = 0, 1682 t_r2w_w2r = 0x3, t_r2w_w2r_high = 0x1, t_w2w = 0x3; 1683 u32 refresh_interval_cnt, t_hclk, t_refi, t_faw, t_pd, t_xpdll; 1684 u32 val = 0, page_size = 0, mask = 0; 1685 enum hws_speed_bin speed_bin_index; 1686 enum mv_ddr_die_capacity memory_size = MV_DDR_DIE_CAP_2GBIT; 1687 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1688 struct page_element *page_param = mv_ddr_page_tbl_get(); 1689 1690 speed_bin_index = tm->interface_params[if_id].speed_bin_index; 1691 memory_size = tm->interface_params[if_id].memory_size; 1692 page_size = 1693 (tm->interface_params[if_id].bus_width == 1694 MV_DDR_DEV_WIDTH_8BIT) ? page_param[memory_size]. 1695 page_size_8bit : page_param[memory_size].page_size_16bit; 1696 t_ckclk = (MEGA / freq_val[frequency]); 1697 /* HCLK in[ps] */ 1698 t_hclk = MEGA / (freq_val[frequency] / config_func_info[dev_num].tip_get_clock_ratio(frequency)); 1699 1700 t_refi = (tm->interface_params[if_id].interface_temp == MV_DDR_TEMP_HIGH) ? TREFI_HIGH : TREFI_LOW; 1701 t_refi *= 1000; /* psec */ 1702 refresh_interval_cnt = t_refi / t_hclk; /* no units */ 1703 1704 if (page_size == 1) { 1705 t_faw = speed_bin_table(speed_bin_index, SPEED_BIN_TFAW1K); 1706 t_faw = time_to_nclk(t_faw, t_ckclk); 1707 t_faw = GET_MAX_VALUE(20, t_faw); 1708 } else { /* page size =2, we do not support page size 0.5k */ 1709 t_faw = speed_bin_table(speed_bin_index, SPEED_BIN_TFAW2K); 1710 t_faw = time_to_nclk(t_faw, t_ckclk); 1711 t_faw = GET_MAX_VALUE(28, t_faw); 1712 } 1713 1714 t_pd = GET_MAX_VALUE(t_ckclk * 3, speed_bin_table(speed_bin_index, SPEED_BIN_TPD)); 1715 t_pd = time_to_nclk(t_pd, t_ckclk); 1716 1717 t_xpdll = GET_MAX_VALUE(t_ckclk * 10, speed_bin_table(speed_bin_index, SPEED_BIN_TXPDLL)); 1718 t_xpdll = time_to_nclk(t_xpdll, t_ckclk); 1719 1720 t_rrd = (page_size == 1) ? speed_bin_table(speed_bin_index, 1721 SPEED_BIN_TRRD1K) : 1722 speed_bin_table(speed_bin_index, SPEED_BIN_TRRD2K); 1723 t_rrd = GET_MAX_VALUE(t_ckclk * 4, t_rrd); 1724 t_rtp = GET_MAX_VALUE(t_ckclk * 4, speed_bin_table(speed_bin_index, 1725 SPEED_BIN_TRTP)); 1726 t_mod = GET_MAX_VALUE(t_ckclk * 12, 15000); 1727 t_wtr = GET_MAX_VALUE(t_ckclk * 4, speed_bin_table(speed_bin_index, 1728 SPEED_BIN_TWTR)); 1729 t_ras = time_to_nclk(speed_bin_table(speed_bin_index, 1730 SPEED_BIN_TRAS), 1731 t_ckclk); 1732 t_rcd = time_to_nclk(speed_bin_table(speed_bin_index, 1733 SPEED_BIN_TRCD), 1734 t_ckclk); 1735 t_rp = time_to_nclk(speed_bin_table(speed_bin_index, 1736 SPEED_BIN_TRP), 1737 t_ckclk); 1738 t_wr = time_to_nclk(speed_bin_table(speed_bin_index, 1739 SPEED_BIN_TWR), 1740 t_ckclk); 1741 t_wtr = time_to_nclk(t_wtr, t_ckclk); 1742 t_rrd = time_to_nclk(t_rrd, t_ckclk); 1743 t_rtp = time_to_nclk(t_rtp, t_ckclk); 1744 t_rfc = time_to_nclk(rfc_table[memory_size] * 1000, t_ckclk); 1745 t_mod = time_to_nclk(t_mod, t_ckclk); 1746 1747 /* SDRAM Timing Low */ 1748 val = (((t_ras - 1) & SDRAM_TIMING_LOW_TRAS_MASK) << SDRAM_TIMING_LOW_TRAS_OFFS) | 1749 (((t_rcd - 1) & SDRAM_TIMING_LOW_TRCD_MASK) << SDRAM_TIMING_LOW_TRCD_OFFS) | 1750 (((t_rcd - 1) >> SDRAM_TIMING_LOW_TRCD_OFFS & SDRAM_TIMING_HIGH_TRCD_MASK) 1751 << SDRAM_TIMING_HIGH_TRCD_OFFS) | 1752 (((t_rp - 1) & SDRAM_TIMING_LOW_TRP_MASK) << SDRAM_TIMING_LOW_TRP_OFFS) | 1753 (((t_rp - 1) >> SDRAM_TIMING_LOW_TRP_MASK & SDRAM_TIMING_HIGH_TRP_MASK) 1754 << SDRAM_TIMING_HIGH_TRP_OFFS) | 1755 (((t_wr - 1) & SDRAM_TIMING_LOW_TWR_MASK) << SDRAM_TIMING_LOW_TWR_OFFS) | 1756 (((t_wtr - 1) & SDRAM_TIMING_LOW_TWTR_MASK) << SDRAM_TIMING_LOW_TWTR_OFFS) | 1757 ((((t_ras - 1) >> 4) & SDRAM_TIMING_LOW_TRAS_HIGH_MASK) << SDRAM_TIMING_LOW_TRAS_HIGH_OFFS) | 1758 (((t_rrd - 1) & SDRAM_TIMING_LOW_TRRD_MASK) << SDRAM_TIMING_LOW_TRRD_OFFS) | 1759 (((t_rtp - 1) & SDRAM_TIMING_LOW_TRTP_MASK) << SDRAM_TIMING_LOW_TRTP_OFFS); 1760 1761 mask = (SDRAM_TIMING_LOW_TRAS_MASK << SDRAM_TIMING_LOW_TRAS_OFFS) | 1762 (SDRAM_TIMING_LOW_TRCD_MASK << SDRAM_TIMING_LOW_TRCD_OFFS) | 1763 (SDRAM_TIMING_HIGH_TRCD_MASK << SDRAM_TIMING_HIGH_TRCD_OFFS) | 1764 (SDRAM_TIMING_LOW_TRP_MASK << SDRAM_TIMING_LOW_TRP_OFFS) | 1765 (SDRAM_TIMING_HIGH_TRP_MASK << SDRAM_TIMING_HIGH_TRP_OFFS) | 1766 (SDRAM_TIMING_LOW_TWR_MASK << SDRAM_TIMING_LOW_TWR_OFFS) | 1767 (SDRAM_TIMING_LOW_TWTR_MASK << SDRAM_TIMING_LOW_TWTR_OFFS) | 1768 (SDRAM_TIMING_LOW_TRAS_HIGH_MASK << SDRAM_TIMING_LOW_TRAS_HIGH_OFFS) | 1769 (SDRAM_TIMING_LOW_TRRD_MASK << SDRAM_TIMING_LOW_TRRD_OFFS) | 1770 (SDRAM_TIMING_LOW_TRTP_MASK << SDRAM_TIMING_LOW_TRTP_OFFS); 1771 1772 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1773 SDRAM_TIMING_LOW_REG, val, mask)); 1774 1775 /* SDRAM Timing High */ 1776 val = 0; 1777 mask = 0; 1778 1779 val = (((t_rfc - 1) & SDRAM_TIMING_HIGH_TRFC_MASK) << SDRAM_TIMING_HIGH_TRFC_OFFS) | 1780 ((t_r2r & SDRAM_TIMING_HIGH_TR2R_MASK) << SDRAM_TIMING_HIGH_TR2R_OFFS) | 1781 ((t_r2w_w2r & SDRAM_TIMING_HIGH_TR2W_W2R_MASK) << SDRAM_TIMING_HIGH_TR2W_W2R_OFFS) | 1782 ((t_w2w & SDRAM_TIMING_HIGH_TW2W_MASK) << SDRAM_TIMING_HIGH_TW2W_OFFS) | 1783 ((((t_rfc - 1) >> 7) & SDRAM_TIMING_HIGH_TRFC_HIGH_MASK) << SDRAM_TIMING_HIGH_TRFC_HIGH_OFFS) | 1784 ((t_r2r_high & SDRAM_TIMING_HIGH_TR2R_HIGH_MASK) << SDRAM_TIMING_HIGH_TR2R_HIGH_OFFS) | 1785 ((t_r2w_w2r_high & SDRAM_TIMING_HIGH_TR2W_W2R_HIGH_MASK) << SDRAM_TIMING_HIGH_TR2W_W2R_HIGH_OFFS) | 1786 (((t_mod - 1) & SDRAM_TIMING_HIGH_TMOD_MASK) << SDRAM_TIMING_HIGH_TMOD_OFFS) | 1787 ((((t_mod - 1) >> 4) & SDRAM_TIMING_HIGH_TMOD_HIGH_MASK) << SDRAM_TIMING_HIGH_TMOD_HIGH_OFFS); 1788 1789 mask = (SDRAM_TIMING_HIGH_TRFC_MASK << SDRAM_TIMING_HIGH_TRFC_OFFS) | 1790 (SDRAM_TIMING_HIGH_TR2R_MASK << SDRAM_TIMING_HIGH_TR2R_OFFS) | 1791 (SDRAM_TIMING_HIGH_TR2W_W2R_MASK << SDRAM_TIMING_HIGH_TR2W_W2R_OFFS) | 1792 (SDRAM_TIMING_HIGH_TW2W_MASK << SDRAM_TIMING_HIGH_TW2W_OFFS) | 1793 (SDRAM_TIMING_HIGH_TRFC_HIGH_MASK << SDRAM_TIMING_HIGH_TRFC_HIGH_OFFS) | 1794 (SDRAM_TIMING_HIGH_TR2R_HIGH_MASK << SDRAM_TIMING_HIGH_TR2R_HIGH_OFFS) | 1795 (SDRAM_TIMING_HIGH_TR2W_W2R_HIGH_MASK << SDRAM_TIMING_HIGH_TR2W_W2R_HIGH_OFFS) | 1796 (SDRAM_TIMING_HIGH_TMOD_MASK << SDRAM_TIMING_HIGH_TMOD_OFFS) | 1797 (SDRAM_TIMING_HIGH_TMOD_HIGH_MASK << SDRAM_TIMING_HIGH_TMOD_HIGH_OFFS); 1798 1799 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1800 SDRAM_TIMING_HIGH_REG, val, mask)); 1801 1802 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1803 SDRAM_CFG_REG, 1804 refresh_interval_cnt << REFRESH_OFFS, 1805 REFRESH_MASK << REFRESH_OFFS)); 1806 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1807 SDRAM_ADDR_CTRL_REG, (t_faw - 1) << T_FAW_OFFS, 1808 T_FAW_MASK << T_FAW_OFFS)); 1809 1810 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, DDR_TIMING_REG, 1811 (t_pd - 1) << DDR_TIMING_TPD_OFFS | 1812 (t_xpdll - 1) << DDR_TIMING_TXPDLL_OFFS, 1813 DDR_TIMING_TPD_MASK << DDR_TIMING_TPD_OFFS | 1814 DDR_TIMING_TXPDLL_MASK << DDR_TIMING_TXPDLL_OFFS)); 1815 1816 1817 return MV_OK; 1818 } 1819 1820 1821 /* 1822 * Mode Read 1823 */ 1824 int hws_ddr3_tip_mode_read(u32 dev_num, struct mode_info *mode_info) 1825 { 1826 u32 ret; 1827 1828 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1829 MR0_REG, mode_info->reg_mr0, MASK_ALL_BITS); 1830 if (ret != MV_OK) 1831 return ret; 1832 1833 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1834 MR1_REG, mode_info->reg_mr1, MASK_ALL_BITS); 1835 if (ret != MV_OK) 1836 return ret; 1837 1838 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1839 MR2_REG, mode_info->reg_mr2, MASK_ALL_BITS); 1840 if (ret != MV_OK) 1841 return ret; 1842 1843 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1844 MR3_REG, mode_info->reg_mr2, MASK_ALL_BITS); 1845 if (ret != MV_OK) 1846 return ret; 1847 1848 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1849 RD_DATA_SMPL_DLYS_REG, mode_info->read_data_sample, 1850 MASK_ALL_BITS); 1851 if (ret != MV_OK) 1852 return ret; 1853 1854 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1855 RD_DATA_RDY_DLYS_REG, mode_info->read_data_ready, 1856 MASK_ALL_BITS); 1857 if (ret != MV_OK) 1858 return ret; 1859 1860 return MV_OK; 1861 } 1862 1863 /* 1864 * Get first active IF 1865 */ 1866 int ddr3_tip_get_first_active_if(u8 dev_num, u32 interface_mask, 1867 u32 *interface_id) 1868 { 1869 u32 if_id; 1870 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1871 1872 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1873 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1874 if (interface_mask & (1 << if_id)) { 1875 *interface_id = if_id; 1876 break; 1877 } 1878 } 1879 1880 return MV_OK; 1881 } 1882 1883 /* 1884 * Write CS Result 1885 */ 1886 int ddr3_tip_write_cs_result(u32 dev_num, u32 offset) 1887 { 1888 u32 if_id, bus_num, cs_bitmask, data_val, cs_num; 1889 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 1890 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1891 1892 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1893 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1894 for (bus_num = 0; bus_num < octets_per_if_num; 1895 bus_num++) { 1896 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_num); 1897 cs_bitmask = 1898 tm->interface_params[if_id]. 1899 as_bus_params[bus_num].cs_bitmask; 1900 if (cs_bitmask != effective_cs) { 1901 cs_num = GET_CS_FROM_MASK(cs_bitmask); 1902 ddr3_tip_bus_read(dev_num, if_id, 1903 ACCESS_TYPE_UNICAST, bus_num, 1904 DDR_PHY_DATA, 1905 offset + 1906 (effective_cs * 0x4), 1907 &data_val); 1908 ddr3_tip_bus_write(dev_num, 1909 ACCESS_TYPE_UNICAST, 1910 if_id, 1911 ACCESS_TYPE_UNICAST, 1912 bus_num, DDR_PHY_DATA, 1913 offset + 1914 (cs_num * 0x4), 1915 data_val); 1916 } 1917 } 1918 } 1919 1920 return MV_OK; 1921 } 1922 1923 /* 1924 * Write MRS 1925 */ 1926 int ddr3_tip_write_mrs_cmd(u32 dev_num, u32 *cs_mask_arr, enum mr_number mr_num, u32 data, u32 mask) 1927 { 1928 u32 if_id; 1929 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1930 1931 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1932 PARAM_NOT_CARE, mr_data[mr_num].reg_addr, data, mask)); 1933 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1934 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1935 CHECK_STATUS(ddr3_tip_if_write 1936 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1937 SDRAM_OP_REG, 1938 (cs_mask_arr[if_id] << 8) | mr_data[mr_num].cmd, 0xf1f)); 1939 } 1940 1941 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1942 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1943 if (ddr3_tip_if_polling(dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 1944 0x1f, SDRAM_OP_REG, 1945 MAX_POLLING_ITERATIONS) != MV_OK) { 1946 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 1947 ("write_mrs_cmd: Poll cmd fail")); 1948 } 1949 } 1950 1951 return MV_OK; 1952 } 1953 1954 /* 1955 * Reset XSB Read FIFO 1956 */ 1957 int ddr3_tip_reset_fifo_ptr(u32 dev_num) 1958 { 1959 u32 if_id = 0; 1960 1961 /* Configure PHY reset value to 0 in order to "clean" the FIFO */ 1962 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1963 if_id, 0x15c8, 0, 0xff000000)); 1964 /* 1965 * Move PHY to RL mode (only in RL mode the PHY overrides FIFO values 1966 * during FIFO reset) 1967 */ 1968 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1969 if_id, TRAINING_SW_2_REG, 1970 0x1, 0x9)); 1971 /* In order that above configuration will influence the PHY */ 1972 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1973 if_id, 0x15b0, 1974 0x80000000, 0x80000000)); 1975 /* Reset read fifo assertion */ 1976 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1977 if_id, 0x1400, 0, 0x40000000)); 1978 /* Reset read fifo deassertion */ 1979 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1980 if_id, 0x1400, 1981 0x40000000, 0x40000000)); 1982 /* Move PHY back to functional mode */ 1983 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1984 if_id, TRAINING_SW_2_REG, 1985 0x8, 0x9)); 1986 /* Stop training machine */ 1987 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1988 if_id, 0x15b4, 0x10000, 0x10000)); 1989 1990 return MV_OK; 1991 } 1992 1993 /* 1994 * Reset Phy registers 1995 */ 1996 int ddr3_tip_ddr3_reset_phy_regs(u32 dev_num) 1997 { 1998 u32 if_id, phy_id, cs; 1999 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 2000 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 2001 2002 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 2003 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 2004 for (phy_id = 0; phy_id < octets_per_if_num; 2005 phy_id++) { 2006 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, phy_id); 2007 CHECK_STATUS(ddr3_tip_bus_write 2008 (dev_num, ACCESS_TYPE_UNICAST, 2009 if_id, ACCESS_TYPE_UNICAST, 2010 phy_id, DDR_PHY_DATA, 2011 WL_PHY_REG(effective_cs), 2012 phy_reg0_val)); 2013 CHECK_STATUS(ddr3_tip_bus_write 2014 (dev_num, ACCESS_TYPE_UNICAST, if_id, 2015 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA, 2016 RL_PHY_REG(effective_cs), 2017 phy_reg2_val)); 2018 CHECK_STATUS(ddr3_tip_bus_write 2019 (dev_num, ACCESS_TYPE_UNICAST, if_id, 2020 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA, 2021 CRX_PHY_REG(effective_cs), phy_reg3_val)); 2022 CHECK_STATUS(ddr3_tip_bus_write 2023 (dev_num, ACCESS_TYPE_UNICAST, if_id, 2024 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA, 2025 CTX_PHY_REG(effective_cs), phy_reg1_val)); 2026 CHECK_STATUS(ddr3_tip_bus_write 2027 (dev_num, ACCESS_TYPE_UNICAST, if_id, 2028 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA, 2029 PBS_TX_BCAST_PHY_REG(effective_cs), 0x0)); 2030 CHECK_STATUS(ddr3_tip_bus_write 2031 (dev_num, ACCESS_TYPE_UNICAST, if_id, 2032 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA, 2033 PBS_RX_BCAST_PHY_REG(effective_cs), 0)); 2034 CHECK_STATUS(ddr3_tip_bus_write 2035 (dev_num, ACCESS_TYPE_UNICAST, if_id, 2036 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA, 2037 PBS_TX_PHY_REG(effective_cs, DQSP_PAD), 0)); 2038 CHECK_STATUS(ddr3_tip_bus_write 2039 (dev_num, ACCESS_TYPE_UNICAST, if_id, 2040 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA, 2041 PBS_RX_PHY_REG(effective_cs, DQSP_PAD), 0)); 2042 CHECK_STATUS(ddr3_tip_bus_write 2043 (dev_num, ACCESS_TYPE_UNICAST, if_id, 2044 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA, 2045 PBS_TX_PHY_REG(effective_cs, DQSN_PAD), 0)); 2046 CHECK_STATUS(ddr3_tip_bus_write 2047 (dev_num, ACCESS_TYPE_UNICAST, if_id, 2048 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA, 2049 PBS_RX_PHY_REG(effective_cs, DQSN_PAD), 0)); 2050 } 2051 } 2052 2053 /* Set Receiver Calibration value */ 2054 for (cs = 0; cs < MAX_CS_NUM; cs++) { 2055 /* PHY register 0xdb bits[5:0] - configure to 63 */ 2056 CHECK_STATUS(ddr3_tip_bus_write 2057 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 2058 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 2059 DDR_PHY_DATA, VREF_BCAST_PHY_REG(cs), 63)); 2060 } 2061 2062 return MV_OK; 2063 } 2064 2065 /* 2066 * Restore Dunit registers 2067 */ 2068 int ddr3_tip_restore_dunit_regs(u32 dev_num) 2069 { 2070 u32 index_cnt; 2071 2072 mv_ddr_set_calib_controller(); 2073 2074 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 2075 PARAM_NOT_CARE, MAIN_PADS_CAL_MACH_CTRL_REG, 2076 0x1, 0x1)); 2077 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 2078 PARAM_NOT_CARE, MAIN_PADS_CAL_MACH_CTRL_REG, 2079 calibration_update_control << 3, 2080 0x3 << 3)); 2081 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 2082 PARAM_NOT_CARE, 2083 ODPG_WR_RD_MODE_ENA_REG, 2084 0xffff, MASK_ALL_BITS)); 2085 2086 for (index_cnt = 0; index_cnt < ARRAY_SIZE(odpg_default_value); 2087 index_cnt++) { 2088 CHECK_STATUS(ddr3_tip_if_write 2089 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 2090 odpg_default_value[index_cnt].reg_addr, 2091 odpg_default_value[index_cnt].reg_data, 2092 odpg_default_value[index_cnt].reg_mask)); 2093 } 2094 2095 return MV_OK; 2096 } 2097 2098 int ddr3_tip_adll_regs_bypass(u32 dev_num, u32 reg_val1, u32 reg_val2) 2099 { 2100 u32 if_id, phy_id; 2101 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 2102 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 2103 2104 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 2105 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 2106 for (phy_id = 0; phy_id < octets_per_if_num; phy_id++) { 2107 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, phy_id); 2108 CHECK_STATUS(ddr3_tip_bus_write 2109 (dev_num, ACCESS_TYPE_UNICAST, if_id, 2110 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA, 2111 CTX_PHY_REG(effective_cs), reg_val1)); 2112 CHECK_STATUS(ddr3_tip_bus_write 2113 (dev_num, ACCESS_TYPE_UNICAST, if_id, 2114 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA, 2115 PBS_TX_BCAST_PHY_REG(effective_cs), reg_val2)); 2116 } 2117 } 2118 2119 return MV_OK; 2120 } 2121 2122 /* 2123 * Auto tune main flow 2124 */ 2125 static int ddr3_tip_ddr3_training_main_flow(u32 dev_num) 2126 { 2127 /* TODO: enable this functionality for other platforms */ 2128 #if defined(CONFIG_ARMADA_38X) || defined(CONFIG_ARMADA_39X) 2129 struct init_cntr_param init_cntr_prm; 2130 #endif 2131 int ret = MV_OK; 2132 int adll_bypass_flag = 0; 2133 u32 if_id; 2134 u32 max_cs = ddr3_tip_max_cs_get(dev_num); 2135 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 2136 enum hws_ddr_freq freq = tm->interface_params[0].memory_freq; 2137 2138 #ifdef DDR_VIEWER_TOOL 2139 if (debug_training == DEBUG_LEVEL_TRACE) { 2140 CHECK_STATUS(print_device_info((u8)dev_num)); 2141 } 2142 #endif 2143 2144 ddr3_tip_validate_algo_components(dev_num); 2145 2146 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 2147 CHECK_STATUS(ddr3_tip_ddr3_reset_phy_regs(dev_num)); 2148 } 2149 /* Set to 0 after each loop to avoid illegal value may be used */ 2150 effective_cs = 0; 2151 2152 freq_val[DDR_FREQ_LOW_FREQ] = dfs_low_freq; 2153 2154 if (is_pll_before_init != 0) { 2155 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 2156 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 2157 config_func_info[dev_num].tip_set_freq_divider_func( 2158 (u8)dev_num, if_id, freq); 2159 } 2160 } 2161 2162 /* TODO: enable this functionality for other platforms */ 2163 #if defined(CONFIG_ARMADA_38X) || defined(CONFIG_ARMADA_39X) 2164 if (is_adll_calib_before_init != 0) { 2165 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2166 ("with adll calib before init\n")); 2167 adll_calibration(dev_num, ACCESS_TYPE_MULTICAST, 0, freq); 2168 } 2169 2170 if (is_reg_dump != 0) { 2171 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2172 ("Dump before init controller\n")); 2173 ddr3_tip_reg_dump(dev_num); 2174 } 2175 2176 if (mask_tune_func & INIT_CONTROLLER_MASK_BIT) { 2177 training_stage = INIT_CONTROLLER; 2178 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2179 ("INIT_CONTROLLER_MASK_BIT\n")); 2180 init_cntr_prm.do_mrs_phy = 1; 2181 init_cntr_prm.is_ctrl64_bit = 0; 2182 init_cntr_prm.init_phy = 1; 2183 init_cntr_prm.msys_init = 0; 2184 ret = hws_ddr3_tip_init_controller(dev_num, &init_cntr_prm); 2185 if (is_reg_dump != 0) 2186 ddr3_tip_reg_dump(dev_num); 2187 if (ret != MV_OK) { 2188 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2189 ("hws_ddr3_tip_init_controller failure\n")); 2190 if (debug_mode == 0) 2191 return MV_FAIL; 2192 } 2193 } 2194 #endif 2195 2196 ret = adll_calibration(dev_num, ACCESS_TYPE_MULTICAST, 0, freq); 2197 if (ret != MV_OK) { 2198 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2199 ("adll_calibration failure\n")); 2200 if (debug_mode == 0) 2201 return MV_FAIL; 2202 } 2203 2204 if (mask_tune_func & SET_LOW_FREQ_MASK_BIT) { 2205 training_stage = SET_LOW_FREQ; 2206 2207 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 2208 ddr3_tip_adll_regs_bypass(dev_num, 0, 0x1f); 2209 adll_bypass_flag = 1; 2210 } 2211 effective_cs = 0; 2212 2213 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2214 ("SET_LOW_FREQ_MASK_BIT %d\n", 2215 freq_val[low_freq])); 2216 ret = ddr3_tip_freq_set(dev_num, ACCESS_TYPE_MULTICAST, 2217 PARAM_NOT_CARE, low_freq); 2218 if (is_reg_dump != 0) 2219 ddr3_tip_reg_dump(dev_num); 2220 if (ret != MV_OK) { 2221 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2222 ("ddr3_tip_freq_set failure\n")); 2223 if (debug_mode == 0) 2224 return MV_FAIL; 2225 } 2226 } 2227 2228 if (mask_tune_func & WRITE_LEVELING_LF_MASK_BIT) { 2229 training_stage = WRITE_LEVELING_LF; 2230 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2231 ("WRITE_LEVELING_LF_MASK_BIT\n")); 2232 ret = ddr3_tip_dynamic_write_leveling(dev_num, 1); 2233 if (is_reg_dump != 0) 2234 ddr3_tip_reg_dump(dev_num); 2235 if (ret != MV_OK) { 2236 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2237 ("ddr3_tip_dynamic_write_leveling LF failure\n")); 2238 if (debug_mode == 0) 2239 return MV_FAIL; 2240 } 2241 } 2242 2243 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 2244 if (mask_tune_func & LOAD_PATTERN_MASK_BIT) { 2245 training_stage = LOAD_PATTERN; 2246 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2247 ("LOAD_PATTERN_MASK_BIT #%d\n", 2248 effective_cs)); 2249 ret = ddr3_tip_load_all_pattern_to_mem(dev_num); 2250 if (is_reg_dump != 0) 2251 ddr3_tip_reg_dump(dev_num); 2252 if (ret != MV_OK) { 2253 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2254 ("ddr3_tip_load_all_pattern_to_mem failure CS #%d\n", 2255 effective_cs)); 2256 if (debug_mode == 0) 2257 return MV_FAIL; 2258 } 2259 } 2260 } 2261 2262 if (adll_bypass_flag == 1) { 2263 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 2264 ddr3_tip_adll_regs_bypass(dev_num, phy_reg1_val, 0); 2265 adll_bypass_flag = 0; 2266 } 2267 } 2268 2269 /* Set to 0 after each loop to avoid illegal value may be used */ 2270 effective_cs = 0; 2271 2272 if (mask_tune_func & SET_MEDIUM_FREQ_MASK_BIT) { 2273 training_stage = SET_MEDIUM_FREQ; 2274 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2275 ("SET_MEDIUM_FREQ_MASK_BIT %d\n", 2276 freq_val[medium_freq])); 2277 ret = 2278 ddr3_tip_freq_set(dev_num, ACCESS_TYPE_MULTICAST, 2279 PARAM_NOT_CARE, medium_freq); 2280 if (is_reg_dump != 0) 2281 ddr3_tip_reg_dump(dev_num); 2282 if (ret != MV_OK) { 2283 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2284 ("ddr3_tip_freq_set failure\n")); 2285 if (debug_mode == 0) 2286 return MV_FAIL; 2287 } 2288 } 2289 2290 if (mask_tune_func & WRITE_LEVELING_MASK_BIT) { 2291 training_stage = WRITE_LEVELING; 2292 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2293 ("WRITE_LEVELING_MASK_BIT\n")); 2294 if ((rl_mid_freq_wa == 0) || (freq_val[medium_freq] == 533)) { 2295 ret = ddr3_tip_dynamic_write_leveling(dev_num, 0); 2296 } else { 2297 /* Use old WL */ 2298 ret = ddr3_tip_legacy_dynamic_write_leveling(dev_num); 2299 } 2300 2301 if (is_reg_dump != 0) 2302 ddr3_tip_reg_dump(dev_num); 2303 if (ret != MV_OK) { 2304 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2305 ("ddr3_tip_dynamic_write_leveling failure\n")); 2306 if (debug_mode == 0) 2307 return MV_FAIL; 2308 } 2309 } 2310 2311 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 2312 if (mask_tune_func & LOAD_PATTERN_2_MASK_BIT) { 2313 training_stage = LOAD_PATTERN_2; 2314 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2315 ("LOAD_PATTERN_2_MASK_BIT CS #%d\n", 2316 effective_cs)); 2317 ret = ddr3_tip_load_all_pattern_to_mem(dev_num); 2318 if (is_reg_dump != 0) 2319 ddr3_tip_reg_dump(dev_num); 2320 if (ret != MV_OK) { 2321 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2322 ("ddr3_tip_load_all_pattern_to_mem failure CS #%d\n", 2323 effective_cs)); 2324 if (debug_mode == 0) 2325 return MV_FAIL; 2326 } 2327 } 2328 } 2329 /* Set to 0 after each loop to avoid illegal value may be used */ 2330 effective_cs = 0; 2331 2332 if (mask_tune_func & READ_LEVELING_MASK_BIT) { 2333 training_stage = READ_LEVELING; 2334 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2335 ("READ_LEVELING_MASK_BIT\n")); 2336 if ((rl_mid_freq_wa == 0) || (freq_val[medium_freq] == 533)) { 2337 ret = ddr3_tip_dynamic_read_leveling(dev_num, medium_freq); 2338 } else { 2339 /* Use old RL */ 2340 ret = ddr3_tip_legacy_dynamic_read_leveling(dev_num); 2341 } 2342 2343 if (is_reg_dump != 0) 2344 ddr3_tip_reg_dump(dev_num); 2345 if (ret != MV_OK) { 2346 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2347 ("ddr3_tip_dynamic_read_leveling failure\n")); 2348 if (debug_mode == 0) 2349 return MV_FAIL; 2350 } 2351 } 2352 2353 if (mask_tune_func & WRITE_LEVELING_SUPP_MASK_BIT) { 2354 training_stage = WRITE_LEVELING_SUPP; 2355 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2356 ("WRITE_LEVELING_SUPP_MASK_BIT\n")); 2357 ret = ddr3_tip_dynamic_write_leveling_supp(dev_num); 2358 if (is_reg_dump != 0) 2359 ddr3_tip_reg_dump(dev_num); 2360 if (ret != MV_OK) { 2361 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2362 ("ddr3_tip_dynamic_write_leveling_supp failure\n")); 2363 if (debug_mode == 0) 2364 return MV_FAIL; 2365 } 2366 } 2367 2368 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 2369 if (mask_tune_func & PBS_RX_MASK_BIT) { 2370 training_stage = PBS_RX; 2371 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2372 ("PBS_RX_MASK_BIT CS #%d\n", 2373 effective_cs)); 2374 ret = ddr3_tip_pbs_rx(dev_num); 2375 if (is_reg_dump != 0) 2376 ddr3_tip_reg_dump(dev_num); 2377 if (ret != MV_OK) { 2378 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2379 ("ddr3_tip_pbs_rx failure CS #%d\n", 2380 effective_cs)); 2381 if (debug_mode == 0) 2382 return MV_FAIL; 2383 } 2384 } 2385 } 2386 2387 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 2388 if (mask_tune_func & PBS_TX_MASK_BIT) { 2389 training_stage = PBS_TX; 2390 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2391 ("PBS_TX_MASK_BIT CS #%d\n", 2392 effective_cs)); 2393 ret = ddr3_tip_pbs_tx(dev_num); 2394 if (is_reg_dump != 0) 2395 ddr3_tip_reg_dump(dev_num); 2396 if (ret != MV_OK) { 2397 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2398 ("ddr3_tip_pbs_tx failure CS #%d\n", 2399 effective_cs)); 2400 if (debug_mode == 0) 2401 return MV_FAIL; 2402 } 2403 } 2404 } 2405 /* Set to 0 after each loop to avoid illegal value may be used */ 2406 effective_cs = 0; 2407 2408 if (mask_tune_func & SET_TARGET_FREQ_MASK_BIT) { 2409 training_stage = SET_TARGET_FREQ; 2410 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2411 ("SET_TARGET_FREQ_MASK_BIT %d\n", 2412 freq_val[tm-> 2413 interface_params[first_active_if]. 2414 memory_freq])); 2415 ret = ddr3_tip_freq_set(dev_num, ACCESS_TYPE_MULTICAST, 2416 PARAM_NOT_CARE, 2417 tm->interface_params[first_active_if]. 2418 memory_freq); 2419 #if defined(A70X0) || defined(A80X0) 2420 if (apn806_rev_id_get() == APN806_REV_ID_A0) { 2421 reg_write(0x6f812c, extension_avs); 2422 reg_write(0x6f8130, nominal_avs); 2423 } 2424 #endif /* #if defined(A70X0) || defined(A80X0) */ 2425 if (is_reg_dump != 0) 2426 ddr3_tip_reg_dump(dev_num); 2427 if (ret != MV_OK) { 2428 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2429 ("ddr3_tip_freq_set failure\n")); 2430 if (debug_mode == 0) 2431 return MV_FAIL; 2432 } 2433 } 2434 2435 if (mask_tune_func & WRITE_LEVELING_TF_MASK_BIT) { 2436 training_stage = WRITE_LEVELING_TF; 2437 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2438 ("WRITE_LEVELING_TF_MASK_BIT\n")); 2439 ret = ddr3_tip_dynamic_write_leveling(dev_num, 0); 2440 if (is_reg_dump != 0) 2441 ddr3_tip_reg_dump(dev_num); 2442 if (ret != MV_OK) { 2443 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2444 ("ddr3_tip_dynamic_write_leveling TF failure\n")); 2445 if (debug_mode == 0) 2446 return MV_FAIL; 2447 } 2448 } 2449 2450 if (mask_tune_func & LOAD_PATTERN_HIGH_MASK_BIT) { 2451 training_stage = LOAD_PATTERN_HIGH; 2452 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("LOAD_PATTERN_HIGH\n")); 2453 ret = ddr3_tip_load_all_pattern_to_mem(dev_num); 2454 if (is_reg_dump != 0) 2455 ddr3_tip_reg_dump(dev_num); 2456 if (ret != MV_OK) { 2457 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2458 ("ddr3_tip_load_all_pattern_to_mem failure\n")); 2459 if (debug_mode == 0) 2460 return MV_FAIL; 2461 } 2462 } 2463 2464 if (mask_tune_func & READ_LEVELING_TF_MASK_BIT) { 2465 training_stage = READ_LEVELING_TF; 2466 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2467 ("READ_LEVELING_TF_MASK_BIT\n")); 2468 ret = ddr3_tip_dynamic_read_leveling(dev_num, tm-> 2469 interface_params[first_active_if]. 2470 memory_freq); 2471 if (is_reg_dump != 0) 2472 ddr3_tip_reg_dump(dev_num); 2473 if (ret != MV_OK) { 2474 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2475 ("ddr3_tip_dynamic_read_leveling TF failure\n")); 2476 if (debug_mode == 0) 2477 return MV_FAIL; 2478 } 2479 } 2480 2481 if (mask_tune_func & RL_DQS_BURST_MASK_BIT) { 2482 training_stage = READ_LEVELING_TF; 2483 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2484 ("RL_DQS_BURST_MASK_BIT\n")); 2485 ret = mv_ddr_rl_dqs_burst(0, 0, tm->interface_params[0].memory_freq); 2486 if (is_reg_dump != 0) 2487 ddr3_tip_reg_dump(dev_num); 2488 if (ret != MV_OK) { 2489 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2490 ("mv_ddr_rl_dqs_burst TF failure\n")); 2491 if (debug_mode == 0) 2492 return MV_FAIL; 2493 } 2494 } 2495 2496 if (mask_tune_func & DM_PBS_TX_MASK_BIT) { 2497 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("DM_PBS_TX_MASK_BIT\n")); 2498 } 2499 2500 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 2501 if (mask_tune_func & VREF_CALIBRATION_MASK_BIT) { 2502 training_stage = VREF_CALIBRATION; 2503 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("VREF\n")); 2504 ret = ddr3_tip_vref(dev_num); 2505 if (is_reg_dump != 0) { 2506 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2507 ("VREF Dump\n")); 2508 ddr3_tip_reg_dump(dev_num); 2509 } 2510 if (ret != MV_OK) { 2511 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2512 ("ddr3_tip_vref failure\n")); 2513 if (debug_mode == 0) 2514 return MV_FAIL; 2515 } 2516 } 2517 } 2518 /* Set to 0 after each loop to avoid illegal value may be used */ 2519 effective_cs = 0; 2520 2521 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 2522 if (mask_tune_func & CENTRALIZATION_RX_MASK_BIT) { 2523 training_stage = CENTRALIZATION_RX; 2524 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2525 ("CENTRALIZATION_RX_MASK_BIT CS #%d\n", 2526 effective_cs)); 2527 ret = ddr3_tip_centralization_rx(dev_num); 2528 if (is_reg_dump != 0) 2529 ddr3_tip_reg_dump(dev_num); 2530 if (ret != MV_OK) { 2531 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2532 ("ddr3_tip_centralization_rx failure CS #%d\n", 2533 effective_cs)); 2534 if (debug_mode == 0) 2535 return MV_FAIL; 2536 } 2537 } 2538 } 2539 /* Set to 0 after each loop to avoid illegal value may be used */ 2540 effective_cs = 0; 2541 2542 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 2543 if (mask_tune_func & WRITE_LEVELING_SUPP_TF_MASK_BIT) { 2544 training_stage = WRITE_LEVELING_SUPP_TF; 2545 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2546 ("WRITE_LEVELING_SUPP_TF_MASK_BIT CS #%d\n", 2547 effective_cs)); 2548 ret = ddr3_tip_dynamic_write_leveling_supp(dev_num); 2549 if (is_reg_dump != 0) 2550 ddr3_tip_reg_dump(dev_num); 2551 if (ret != MV_OK) { 2552 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2553 ("ddr3_tip_dynamic_write_leveling_supp TF failure CS #%d\n", 2554 effective_cs)); 2555 if (debug_mode == 0) 2556 return MV_FAIL; 2557 } 2558 } 2559 } 2560 /* Set to 0 after each loop to avoid illegal value may be used */ 2561 effective_cs = 0; 2562 2563 2564 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 2565 if (mask_tune_func & CENTRALIZATION_TX_MASK_BIT) { 2566 training_stage = CENTRALIZATION_TX; 2567 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2568 ("CENTRALIZATION_TX_MASK_BIT CS #%d\n", 2569 effective_cs)); 2570 ret = ddr3_tip_centralization_tx(dev_num); 2571 if (is_reg_dump != 0) 2572 ddr3_tip_reg_dump(dev_num); 2573 if (ret != MV_OK) { 2574 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2575 ("ddr3_tip_centralization_tx failure CS #%d\n", 2576 effective_cs)); 2577 if (debug_mode == 0) 2578 return MV_FAIL; 2579 } 2580 } 2581 } 2582 /* Set to 0 after each loop to avoid illegal value may be used */ 2583 effective_cs = 0; 2584 2585 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("restore registers to default\n")); 2586 /* restore register values */ 2587 CHECK_STATUS(ddr3_tip_restore_dunit_regs(dev_num)); 2588 2589 if (is_reg_dump != 0) 2590 ddr3_tip_reg_dump(dev_num); 2591 2592 return MV_OK; 2593 } 2594 2595 /* 2596 * DDR3 Dynamic training flow 2597 */ 2598 static int ddr3_tip_ddr3_auto_tune(u32 dev_num) 2599 { 2600 int status; 2601 u32 if_id, stage; 2602 int is_if_fail = 0, is_auto_tune_fail = 0; 2603 2604 training_stage = INIT_CONTROLLER; 2605 2606 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 2607 for (stage = 0; stage < MAX_STAGE_LIMIT; stage++) 2608 training_result[stage][if_id] = NO_TEST_DONE; 2609 } 2610 2611 status = ddr3_tip_ddr3_training_main_flow(dev_num); 2612 2613 /* activate XSB test */ 2614 if (xsb_validate_type != 0) { 2615 run_xsb_test(dev_num, xsb_validation_base_address, 1, 1, 2616 0x1024); 2617 } 2618 2619 if (is_reg_dump != 0) 2620 ddr3_tip_reg_dump(dev_num); 2621 2622 /* print log */ 2623 CHECK_STATUS(ddr3_tip_print_log(dev_num, window_mem_addr)); 2624 2625 #ifndef EXCLUDE_DEBUG_PRINTS 2626 if (status != MV_OK) { 2627 CHECK_STATUS(ddr3_tip_print_stability_log(dev_num)); 2628 } 2629 #endif /* EXCLUDE_DEBUG_PRINTS */ 2630 2631 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 2632 is_if_fail = 0; 2633 for (stage = 0; stage < MAX_STAGE_LIMIT; stage++) { 2634 if (training_result[stage][if_id] == TEST_FAILED) 2635 is_if_fail = 1; 2636 } 2637 if (is_if_fail == 1) { 2638 is_auto_tune_fail = 1; 2639 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2640 ("Auto Tune failed for IF %d\n", 2641 if_id)); 2642 } 2643 } 2644 2645 if (((status == MV_FAIL) && (is_auto_tune_fail == 0)) || 2646 ((status == MV_OK) && (is_auto_tune_fail == 1))) { 2647 /* 2648 * If MainFlow result and trainingResult DB not in sync, 2649 * issue warning (caused by no update of trainingResult DB 2650 * when failed) 2651 */ 2652 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2653 ("Warning: Algorithm return value and Result DB" 2654 "are not synced (status 0x%x result DB %d)\n", 2655 status, is_auto_tune_fail)); 2656 } 2657 2658 if ((status != MV_OK) || (is_auto_tune_fail == 1)) 2659 return MV_FAIL; 2660 else 2661 return MV_OK; 2662 } 2663 2664 /* 2665 * Enable init sequence 2666 */ 2667 int ddr3_tip_enable_init_sequence(u32 dev_num) 2668 { 2669 int is_fail = 0; 2670 u32 if_id = 0, mem_mask = 0, bus_index = 0; 2671 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 2672 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 2673 2674 /* Enable init sequence */ 2675 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 0, 2676 SDRAM_INIT_CTRL_REG, 0x1, 0x1)); 2677 2678 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 2679 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 2680 2681 if (ddr3_tip_if_polling 2682 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x1, 2683 SDRAM_INIT_CTRL_REG, 2684 MAX_POLLING_ITERATIONS) != MV_OK) { 2685 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2686 ("polling failed IF %d\n", 2687 if_id)); 2688 is_fail = 1; 2689 continue; 2690 } 2691 2692 mem_mask = 0; 2693 for (bus_index = 0; bus_index < octets_per_if_num; 2694 bus_index++) { 2695 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_index); 2696 mem_mask |= 2697 tm->interface_params[if_id]. 2698 as_bus_params[bus_index].mirror_enable_bitmask; 2699 } 2700 2701 if (mem_mask != 0) { 2702 /* Disable Multi CS */ 2703 CHECK_STATUS(ddr3_tip_if_write 2704 (dev_num, ACCESS_TYPE_MULTICAST, 2705 if_id, DUAL_DUNIT_CFG_REG, 1 << 3, 2706 1 << 3)); 2707 } 2708 } 2709 2710 return (is_fail == 0) ? MV_OK : MV_FAIL; 2711 } 2712 2713 int ddr3_tip_register_dq_table(u32 dev_num, u32 *table) 2714 { 2715 dq_map_table = table; 2716 2717 return MV_OK; 2718 } 2719 2720 /* 2721 * Check if pup search is locked 2722 */ 2723 int ddr3_tip_is_pup_lock(u32 *pup_buf, enum hws_training_result read_mode) 2724 { 2725 u32 bit_start = 0, bit_end = 0, bit_id; 2726 2727 if (read_mode == RESULT_PER_BIT) { 2728 bit_start = 0; 2729 bit_end = BUS_WIDTH_IN_BITS - 1; 2730 } else { 2731 bit_start = 0; 2732 bit_end = 0; 2733 } 2734 2735 for (bit_id = bit_start; bit_id <= bit_end; bit_id++) { 2736 if (GET_LOCK_RESULT(pup_buf[bit_id]) == 0) 2737 return 0; 2738 } 2739 2740 return 1; 2741 } 2742 2743 /* 2744 * Get minimum buffer value 2745 */ 2746 u8 ddr3_tip_get_buf_min(u8 *buf_ptr) 2747 { 2748 u8 min_val = 0xff; 2749 u8 cnt = 0; 2750 2751 for (cnt = 0; cnt < BUS_WIDTH_IN_BITS; cnt++) { 2752 if (buf_ptr[cnt] < min_val) 2753 min_val = buf_ptr[cnt]; 2754 } 2755 2756 return min_val; 2757 } 2758 2759 /* 2760 * Get maximum buffer value 2761 */ 2762 u8 ddr3_tip_get_buf_max(u8 *buf_ptr) 2763 { 2764 u8 max_val = 0; 2765 u8 cnt = 0; 2766 2767 for (cnt = 0; cnt < BUS_WIDTH_IN_BITS; cnt++) { 2768 if (buf_ptr[cnt] > max_val) 2769 max_val = buf_ptr[cnt]; 2770 } 2771 2772 return max_val; 2773 } 2774 2775 /* 2776 * The following functions return memory parameters: 2777 * bus and device width, device size 2778 */ 2779 2780 u32 hws_ddr3_get_bus_width(void) 2781 { 2782 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 2783 2784 return (DDR3_IS_16BIT_DRAM_MODE(tm->bus_act_mask) == 2785 1) ? 16 : 32; 2786 } 2787 2788 u32 hws_ddr3_get_device_width(u32 if_id) 2789 { 2790 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 2791 2792 return (tm->interface_params[if_id].bus_width == 2793 MV_DDR_DEV_WIDTH_8BIT) ? 8 : 16; 2794 } 2795 2796 u32 hws_ddr3_get_device_size(u32 if_id) 2797 { 2798 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 2799 2800 if (tm->interface_params[if_id].memory_size >= 2801 MV_DDR_DIE_CAP_LAST) { 2802 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2803 ("Error: Wrong device size of Cs: %d", 2804 tm->interface_params[if_id].memory_size)); 2805 return 0; 2806 } else { 2807 return 1 << tm->interface_params[if_id].memory_size; 2808 } 2809 } 2810 2811 int hws_ddr3_calc_mem_cs_size(u32 if_id, u32 cs, u32 *cs_size) 2812 { 2813 u32 cs_mem_size, dev_size; 2814 2815 dev_size = hws_ddr3_get_device_size(if_id); 2816 if (dev_size != 0) { 2817 cs_mem_size = ((hws_ddr3_get_bus_width() / 2818 hws_ddr3_get_device_width(if_id)) * dev_size); 2819 2820 /* the calculated result in Gbytex16 to avoid float using */ 2821 2822 if (cs_mem_size == 2) { 2823 *cs_size = _128M; 2824 } else if (cs_mem_size == 4) { 2825 *cs_size = _256M; 2826 } else if (cs_mem_size == 8) { 2827 *cs_size = _512M; 2828 } else if (cs_mem_size == 16) { 2829 *cs_size = _1G; 2830 } else if (cs_mem_size == 32) { 2831 *cs_size = _2G; 2832 } else { 2833 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2834 ("Error: Wrong Memory size of Cs: %d", cs)); 2835 return MV_FAIL; 2836 } 2837 return MV_OK; 2838 } else { 2839 return MV_FAIL; 2840 } 2841 } 2842 2843 int hws_ddr3_cs_base_adr_calc(u32 if_id, u32 cs, u32 *cs_base_addr) 2844 { 2845 u32 cs_mem_size = 0; 2846 #ifdef DEVICE_MAX_DRAM_ADDRESS_SIZE 2847 u32 physical_mem_size; 2848 u32 max_mem_size = DEVICE_MAX_DRAM_ADDRESS_SIZE; 2849 #endif 2850 2851 if (hws_ddr3_calc_mem_cs_size(if_id, cs, &cs_mem_size) != MV_OK) 2852 return MV_FAIL; 2853 2854 #ifdef DEVICE_MAX_DRAM_ADDRESS_SIZE 2855 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 2856 /* 2857 * if number of address pins doesn't allow to use max mem size that 2858 * is defined in topology mem size is defined by 2859 * DEVICE_MAX_DRAM_ADDRESS_SIZE 2860 */ 2861 physical_mem_size = mem_size[tm->interface_params[0].memory_size]; 2862 2863 if (hws_ddr3_get_device_width(cs) == 16) { 2864 /* 2865 * 16bit mem device can be twice more - no need in less 2866 * significant pin 2867 */ 2868 max_mem_size = DEVICE_MAX_DRAM_ADDRESS_SIZE * 2; 2869 } 2870 2871 if (physical_mem_size > max_mem_size) { 2872 cs_mem_size = max_mem_size * 2873 (hws_ddr3_get_bus_width() / 2874 hws_ddr3_get_device_width(if_id)); 2875 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2876 ("Updated Physical Mem size is from 0x%x to %x\n", 2877 physical_mem_size, 2878 DEVICE_MAX_DRAM_ADDRESS_SIZE)); 2879 } 2880 #endif 2881 2882 /* calculate CS base addr */ 2883 *cs_base_addr = ((cs_mem_size) * cs) & 0xffff0000; 2884 2885 return MV_OK; 2886 } 2887