Lines Matching +full:0 +full:x00880000

15  * 1. L2 filter should be set at binary header to 0xD000000,
17 * 2. U-Boot modifies internal registers base to 0xf100000,
18 * and than should update L2 filter accordingly to 0xf000000 (3.75 GB)
20 #define L2_FILTER_FOR_MAX_MEMORY_SIZE 0xC0000000 /* temporary limit l2 filter to 3gb (LSP issue) */
21 #define ADDRESS_FILTERING_END_REGISTER 0x8c04
27 #define TSEN_CONTROL_LSB_REG 0xE4070
28 #define TSEN_CONTROL_LSB_TC_TRIM_OFFSET 0
29 #define TSEN_CONTROL_LSB_TC_TRIM_MASK (0x7 << TSEN_CONTROL_LSB_TC_TRIM_OFFSET)
30 #define TSEN_CONTROL_MSB_REG 0xE4074
32 #define TSEN_CONTROL_MSB_RST_MASK (0x1 << TSEN_CONTROL_MSB_RST_OFFSET)
33 #define TSEN_STATUS_REG 0xe4078
35 #define TSEN_STATUS_READOUT_VALID_MASK (0x1 << \
37 #define TSEN_STATUS_TEMP_OUT_OFFSET 0
38 #define TSEN_STATUS_TEMP_OUT_MASK (0x3ff << TSEN_STATUS_TEMP_OUT_OFFSET)
41 {DLB_CTRL_REG, 0x2000005c},
42 {DLB_BUS_OPT_WT_REG, 0x00880000},
43 {DLB_AGING_REG, 0x0f7f007f},
44 {DLB_EVICTION_CTRL_REG, 0x0000129f},
45 {DLB_EVICTION_TIMERS_REG, 0x00ff0000},
46 {DLB_WTS_DIFF_CS_REG, 0x04030802},
47 {DLB_WTS_DIFF_BG_REG, 0x00000a02},
48 {DLB_WTS_SAME_BG_REG, 0x09000a01},
49 {DLB_WTS_CMDS_REG, 0x00020005},
50 {DLB_WTS_ATTR_PRIO_REG, 0x00060f10},
51 {DLB_QUEUE_MAP_REG, 0x00000543},
52 {DLB_SPLIT_REG, 0x00000000},
53 {DLB_USER_CMD_REG, 0x00000000},
54 {0x0, 0x0}
59 return &ddr3_dlb_config_table[0]; in sys_env_dlb_config_ptr_get()
63 0x3, /* MV_DDR_FREQ_100 */
64 0x4, /* MV_DDR_FREQ_400 */
65 0x4, /* MV_DDR_FREQ_533 */
66 0x5, /* MV_DDR_FREQ_667 */
67 0x5, /* MV_DDR_FREQ_800 */
68 0x5, /* MV_DDR_FREQ_933 */
69 0x5, /* MV_DDR_FREQ_1066 */
70 0x3, /* MV_DDR_FREQ_311 */
71 0x3, /* MV_DDR_FREQ_333 */
72 0x4, /* MV_DDR_FREQ_467 */
73 0x5, /* MV_DDR_FREQ_850 */
74 0x5, /* MV_DDR_FREQ_600 */
75 0x3, /* MV_DDR_FREQ_300 */
76 0x5, /* MV_DDR_FREQ_900 */
77 0x3, /* MV_DDR_FREQ_360 */
78 0x5 /* MV_DDR_FREQ_1000 */
82 0x1, /* MV_DDR_FREQ_100 */
83 0x2, /* MV_DDR_FREQ_400 */
84 0x2, /* MV_DDR_FREQ_533 */
85 0x2, /* MV_DDR_FREQ_667 */
86 0x2, /* MV_DDR_FREQ_800 */
87 0x3, /* MV_DDR_FREQ_933 */
88 0x3, /* MV_DDR_FREQ_1066 */
89 0x1, /* MV_DDR_FREQ_311 */
90 0x1, /* MV_DDR_FREQ_333 */
91 0x2, /* MV_DDR_FREQ_467 */
92 0x2, /* MV_DDR_FREQ_850 */
93 0x2, /* MV_DDR_FREQ_600 */
94 0x1, /* MV_DDR_FREQ_300 */
95 0x2, /* MV_DDR_FREQ_900 */
96 0x1, /* MV_DDR_FREQ_360 */
97 0x2 /* MV_DDR_FREQ_1000 */
101 666, /* 0 */
135 666, /* 0 */
138 800, /* 0x3 */
140 1066, /* 0x5 */
146 1600, /* 0xB */
150 1560, /* 0xF */
165 1800 /* 30 - 0x1E */
172 1, 0, 2, 6, 9, 8, 3, 7, /* 0 */
173 8, 9, 1, 7, 2, 6, 3, 0, /* 1 */
174 3, 9, 7, 8, 1, 0, 2, 6, /* 2 */
175 1, 0, 6, 2, 8, 3, 7, 9, /* 3 */
176 0, 1, 2, 9, 7, 8, 3, 6, /* 4 */
192 int reg = 0; in ddr3_ctrl_get_junc_temp()
195 if ((reg_read(TSEN_CONTROL_MSB_REG) & TSEN_CONTROL_MSB_RST_MASK) == 0) { in ddr3_ctrl_get_junc_temp()
200 reg |= 0x3 << TSEN_CONTROL_LSB_TC_TRIM_OFFSET; in ddr3_ctrl_get_junc_temp()
206 if ((reg_read(TSEN_STATUS_REG) & TSEN_STATUS_READOUT_VALID_MASK) == 0) { in ddr3_ctrl_get_junc_temp()
208 return 0; in ddr3_ctrl_get_junc_temp()
228 if (a38x_bw_per_freq[freq] == 0xff) in ddr3_tip_a38x_get_freq_config()
259 #define ODPG_ENABLE_REG 0x186d4
260 #define ODPG_EN_OFFS 0
261 #define ODPG_EN_MASK 0x1
263 #define ODPG_EN_DONE 0
265 #define ODPG_DIS_MASK 0x1
290 for (i = 0; i < count; i++) { in mv_ddr_is_odpg_done()
312 #define DRAM_INIT_CTRL_STATUS_REG 0x18488
313 #define TRAINING_TRIGGER_OFFS 0
314 #define TRAINING_TRIGGER_MASK 0x1
317 #define TRAINING_DONE_MASK 0x1
319 #define TRAINING_DONE_NOT_DONE 0
321 #define TRAINING_RESULT_MASK 0x1
322 #define TRAINING_RESULT_PASS 0
333 for (i = 0; i < count; i++) { in mv_ddr_is_training_done()
398 if (((ref_clk_satr >> DEVICE_SAMPLE_AT_RESET2_REG_REFCLK_OFFSET) & 0x1) == in mv_ddr_sar_freq_get()
401 case 0x1: in mv_ddr_sar_freq_get()
406 case 0x0: in mv_ddr_sar_freq_get()
409 case 0x3: in mv_ddr_sar_freq_get()
414 case 0x2: in mv_ddr_sar_freq_get()
417 case 0xd: in mv_ddr_sar_freq_get()
422 case 0x4: in mv_ddr_sar_freq_get()
425 case 0x6: in mv_ddr_sar_freq_get()
428 case 0x11: in mv_ddr_sar_freq_get()
429 case 0x14: in mv_ddr_sar_freq_get()
434 case 0x8: in mv_ddr_sar_freq_get()
437 case 0x15: in mv_ddr_sar_freq_get()
438 case 0x1b: in mv_ddr_sar_freq_get()
443 case 0xc: in mv_ddr_sar_freq_get()
446 case 0x10: in mv_ddr_sar_freq_get()
449 case 0x12: in mv_ddr_sar_freq_get()
452 case 0x13: in mv_ddr_sar_freq_get()
456 *freq = 0; in mv_ddr_sar_freq_get()
461 case 0x3: in mv_ddr_sar_freq_get()
464 case 0x5: in mv_ddr_sar_freq_get()
467 case 0xb: in mv_ddr_sar_freq_get()
470 case 0x1e: in mv_ddr_sar_freq_get()
474 *freq = 0; in mv_ddr_sar_freq_get()
492 if (((ref_clk_satr >> DEVICE_SAMPLE_AT_RESET2_REG_REFCLK_OFFSET) & 0x1) == in ddr3_tip_a38x_get_medium_freq()
495 case 0x0: in ddr3_tip_a38x_get_medium_freq()
496 case 0x1: in ddr3_tip_a38x_get_medium_freq()
500 case 0x2: in ddr3_tip_a38x_get_medium_freq()
501 case 0x3: in ddr3_tip_a38x_get_medium_freq()
505 case 0x4: in ddr3_tip_a38x_get_medium_freq()
506 case 0xd: in ddr3_tip_a38x_get_medium_freq()
510 case 0x8: in ddr3_tip_a38x_get_medium_freq()
511 case 0x10: in ddr3_tip_a38x_get_medium_freq()
512 case 0x11: in ddr3_tip_a38x_get_medium_freq()
513 case 0x14: in ddr3_tip_a38x_get_medium_freq()
516 case 0xc: in ddr3_tip_a38x_get_medium_freq()
517 case 0x15: in ddr3_tip_a38x_get_medium_freq()
518 case 0x1b: in ddr3_tip_a38x_get_medium_freq()
521 case 0x6: in ddr3_tip_a38x_get_medium_freq()
524 case 0x12: in ddr3_tip_a38x_get_medium_freq()
527 case 0x13: in ddr3_tip_a38x_get_medium_freq()
531 *freq = 0; in ddr3_tip_a38x_get_medium_freq()
536 case 0x3: in ddr3_tip_a38x_get_medium_freq()
540 case 0x5: in ddr3_tip_a38x_get_medium_freq()
544 case 0xb: in ddr3_tip_a38x_get_medium_freq()
547 case 0x1e: in ddr3_tip_a38x_get_medium_freq()
551 *freq = 0; in ddr3_tip_a38x_get_medium_freq()
562 info_ptr->device_id = 0x6900; in ddr3_tip_a38x_get_device_info()
564 info_ptr->device_id = 0x6800; in ddr3_tip_a38x_get_device_info()
575 u32 iter = 0; in is_prfa_done()
618 u32 max_phy = ddr3_tip_dev_attr_get(0, MV_ATTR_OCTET_PER_INTERFACE); in prfa_read()
622 for (i = 0; i < max_phy; i++) { in prfa_read()
624 if (prfa_write(ACCESS_TYPE_UNICAST, i, phy_type, addr, 0, OPERATION_READ) != MV_OK) in prfa_read()
630 if (prfa_write(phy_access, phy, phy_type, addr, 0, OPERATION_READ) != MV_OK) in prfa_read()
671 ddr3_tip_dev_attr_set(dev_num, MV_ATTR_INTERLEAVE_WA, 0); in mv_ddr_sw_db_init()
674 ca_delay = 0; in mv_ddr_sw_db_init()
687 enum mv_ddr_freq ddr_freq = tm->interface_params[0].memory_freq; in mv_ddr_training_mask_set()
713 rl_mid_freq_wa = 0; /* WA not needed if 333/400 is TF */ in mv_ddr_training_mask_set()
741 u32 divider = 0; in ddr3_tip_a38x_set_divider()
746 if (if_id != 0) { in ddr3_tip_a38x_set_divider()
748 ("A38x does not support interface 0x%x\n", in ddr3_tip_a38x_set_divider()
759 if (((ref_clk_satr >> DEVICE_SAMPLE_AT_RESET2_REG_REFCLK_OFFSET) & 0x1) == in ddr3_tip_a38x_set_divider()
767 dunit_write(0x20220, 0x1000, 0x1000); in ddr3_tip_a38x_set_divider()
768 dunit_write(0xe42f4, 0x200, 0x200); in ddr3_tip_a38x_set_divider()
776 async_val = 0x806f012; in ddr3_tip_a38x_set_divider()
779 async_val = 0x807f012; in ddr3_tip_a38x_set_divider()
782 async_val = 0x805f00a; in ddr3_tip_a38x_set_divider()
785 async_val = 0x809f012; in ddr3_tip_a38x_set_divider()
788 async_val = 0x807f00a; in ddr3_tip_a38x_set_divider()
791 async_val = 0x80cb012; in ddr3_tip_a38x_set_divider()
794 async_val = 0x80d7012; in ddr3_tip_a38x_set_divider()
797 async_val = 0x80df012; in ddr3_tip_a38x_set_divider()
800 async_val = 0x80ef012; in ddr3_tip_a38x_set_divider()
803 async_val = 0x80ff012; in ddr3_tip_a38x_set_divider()
807 async_val = 0x809f012; in ddr3_tip_a38x_set_divider()
809 dunit_write(0xe42f0, 0xffffffff, async_val); in ddr3_tip_a38x_set_divider()
812 dunit_write(0x20220, 0x1000, 0x0); in ddr3_tip_a38x_set_divider()
813 dunit_write(0xe42f4, 0x200, 0x0); in ddr3_tip_a38x_set_divider()
816 dunit_write(0xe4264, 0xff, 0x1f); in ddr3_tip_a38x_set_divider()
819 dunit_write(0xe4260, (0xff << 8), (0x2 << 8)); in ddr3_tip_a38x_set_divider()
822 dunit_write(0xe4260, (0xff << 24), (0x2 << 24)); in ddr3_tip_a38x_set_divider()
825 dunit_write(0xe4268, (0x3f << 8), (divider << 8)); in ddr3_tip_a38x_set_divider()
828 dunit_write(0xe4264, (1 << 8), (1 << 8)); in ddr3_tip_a38x_set_divider()
831 dunit_write(0xe4264, (1 << 8), 0x0); in ddr3_tip_a38x_set_divider()
834 dunit_write(0xe4260, (0xff << 8), 0x0); in ddr3_tip_a38x_set_divider()
837 dunit_write(0xe4260, (0xff << 24), 0x0); in ddr3_tip_a38x_set_divider()
840 dunit_write(0xe4264, 0xff, 0x0); in ddr3_tip_a38x_set_divider()
844 dunit_write(0x18488, (1 << 16), ((ddr3_tip_clock_mode(frequency) & 0x1) << 16)); in ddr3_tip_a38x_set_divider()
845 dunit_write(0x1524, (1 << 15), ((ddr3_tip_clock_mode(frequency) - 1) << 15)); in ddr3_tip_a38x_set_divider()
858 for (burst_num = 0; burst_num < num_of_bursts * 8; burst_num++) in ddr3_tip_ext_read()
871 for (burst_num = 0; burst_num < num_of_bursts * 8; burst_num++) in ddr3_tip_ext_write()
885 * '0' in ddr3 in mv_ddr_early_init()
890 mv_ddr_sw_db_init(0, 0); in mv_ddr_early_init()
892 if (tm->interface_params[0].memory_freq != MV_DDR_FREQ_SAR) in mv_ddr_early_init()
907 return 0; in mv_ddr_pre_training_fixup()
912 return 0; in mv_ddr_post_training_fixup()
927 (0, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE, in ddr3_silicon_post_init()
928 SDRAM_CFG_REG, 0x0, 0x8000)); in ddr3_silicon_post_init()
938 mv_ddr_sar_freq_get(0, &freq); in mv_ddr_init_freq_get()
947 bus_width = (reg_read(SDRAM_CFG_REG) & 0x8000) >> in ddr3_get_bus_width()
950 return (bus_width == 0) ? 16 : 32; in ddr3_get_bus_width()
961 return (device_width == 0) ? 8 : 16; in ddr3_get_device_width()
973 device_size_low = (data >> cs_low_offset) & 0x3; in ddr3_get_device_size()
974 device_size_high = (data >> cs_high_offset) & 0x1; in ddr3_get_device_size()
979 case 0: in ddr3_get_device_size()
993 return 0; in ddr3_get_device_size()
1008 * so bit 15 in 0x1400, that means if whole bus used or only half, in ddr3_calc_mem_cs_size()
1026 uint64_t mem_total_size = 0; in ddr3_fast_path_dynamic_cs_size_config()
1027 uint64_t cs_mem_size = 0; in ddr3_fast_path_dynamic_cs_size_config()
1037 for (cs = 0; cs < MAX_CS_NUM; cs++) { in ddr3_fast_path_dynamic_cs_size_config()
1050 [tm->interface_params[0].memory_size]; in ddr3_fast_path_dynamic_cs_size_config()
1064 printf("Updated Physical Mem size is from 0x%x to %x\n", in ddr3_fast_path_dynamic_cs_size_config()
1071 reg = 0xffffe1; in ddr3_fast_path_dynamic_cs_size_config()
1073 reg |= (cs_mem_size - 1) & 0xffff0000; in ddr3_fast_path_dynamic_cs_size_config()
1078 reg = ((cs_mem_size) * cs) & 0xffff0000; in ddr3_fast_path_dynamic_cs_size_config()
1086 * cs_mem_size by 0x10000 (it is equal to >> 16) in ddr3_fast_path_dynamic_cs_size_config()
1088 mem_total_size_c = (mem_total_size >> 16) & 0xffffffffffff; in ddr3_fast_path_dynamic_cs_size_config()
1089 cs_mem_size_c = (cs_mem_size >> 16) & 0xffffffffffff; in ddr3_fast_path_dynamic_cs_size_config()
1091 if (mem_total_size_c + cs_mem_size_c < 0x10000) in ddr3_fast_path_dynamic_cs_size_config()
1114 for (ui = 0; ui < num_of_win_regs; ui++) in ddr3_restore_and_set_final_windows()
1115 reg_write((win_ctrl_reg + 0x4 * ui), win[ui]); in ddr3_restore_and_set_final_windows()
1125 reg = 0x1fffffe1; in ddr3_restore_and_set_final_windows()
1126 for (cs = 0; cs < MAX_CS_NUM; cs++) { in ddr3_restore_and_set_final_windows()
1133 reg_write(REG_FASTPATH_WIN_CTRL_ADDR(0), reg); in ddr3_restore_and_set_final_windows()
1148 win_jump_index = 0x10; in ddr3_save_and_set_training_windows()
1157 reg_write(ADDRESS_FILTERING_END_REGISTER, 0); in ddr3_save_and_set_training_windows()
1160 cs_ena = tm->interface_params[0].as_bus_params[0].cs_bitmask; in ddr3_save_and_set_training_windows()
1163 /* {0x000200e8} - Open Mbus Window - 2G */ in ddr3_save_and_set_training_windows()
1164 reg_write(REG_XBAR_WIN_19_CTRL_ADDR, 0); in ddr3_save_and_set_training_windows()
1167 for (ui = 0; ui < num_of_win_regs; ui++) in ddr3_save_and_set_training_windows()
1168 win[ui] = reg_read(win_ctrl_reg + 0x4 * ui); in ddr3_save_and_set_training_windows()
1171 reg = 0; in ddr3_save_and_set_training_windows()
1172 tmp_count = 0; in ddr3_save_and_set_training_windows()
1173 for (cs = 0; cs < MAX_CS_NUM; cs++) { in ddr3_save_and_set_training_windows()
1176 case 0: in ddr3_save_and_set_training_windows()
1177 reg = 0x0e00; in ddr3_save_and_set_training_windows()
1180 reg = 0x0d00; in ddr3_save_and_set_training_windows()
1183 reg = 0x0b00; in ddr3_save_and_set_training_windows()
1186 reg = 0x0700; in ddr3_save_and_set_training_windows()
1189 reg |= (1 << 0); in ddr3_save_and_set_training_windows()
1190 reg |= (SDRAM_CS_SIZE & 0xffff0000); in ddr3_save_and_set_training_windows()
1195 0xffff0000); in ddr3_save_and_set_training_windows()
1201 win_jump_index * tmp_count, 0); in ddr3_save_and_set_training_windows()
1221 case 0x3: in mv_ddr_pre_training_soc_config()
1225 case 0x1: in mv_ddr_pre_training_soc_config()
1228 case 0x0: in mv_ddr_pre_training_soc_config()
1229 reg_bit_set(CPU_CONFIGURATION_REG(0), CPU_MRVL_ID_OFFSET); in mv_ddr_pre_training_soc_config()
1253 /* Fix read ready phases for all SOC in reg 0x15c8 */ in mv_ddr_pre_training_soc_config()
1256 reg_val &= ~(TRN_DBG_RDY_INC_PH_2TO1_MASK << TRN_DBG_RDY_INC_PH_2TO1_OFFS(0)); in mv_ddr_pre_training_soc_config()
1257 reg_val |= (0x4 << TRN_DBG_RDY_INC_PH_2TO1_OFFS(0)); /* phase 0 */ in mv_ddr_pre_training_soc_config()
1260 reg_val |= (0x4 << TRN_DBG_RDY_INC_PH_2TO1_OFFS(1)); /* phase 1 */ in mv_ddr_pre_training_soc_config()
1263 reg_val |= (0x6 << TRN_DBG_RDY_INC_PH_2TO1_OFFS(3)); /* phase 3 */ in mv_ddr_pre_training_soc_config()
1266 reg_val |= (0x6 << TRN_DBG_RDY_INC_PH_2TO1_OFFS(4)); /* phase 4 */ in mv_ddr_pre_training_soc_config()
1269 reg_val |= (0x6 << TRN_DBG_RDY_INC_PH_2TO1_OFFS(5)); /* phase 5 */ in mv_ddr_pre_training_soc_config()
1276 * Axi_data_bus_width[0] = 128bit in mv_ddr_pre_training_soc_config()
1278 /* 0x14a8 - AXI Control Register */ in mv_ddr_pre_training_soc_config()
1279 reg_write(AXI_CTRL_REG, 0); in mv_ddr_pre_training_soc_config()
1292 u32 reg, i = 0; in ddr3_new_tip_dlb_config()
1296 while (config_table_ptr[i].reg_addr != 0) { in ddr3_new_tip_dlb_config()
1347 init_param.is_ctrl64_bit = 0; in mv_ddr_mc_config()
1350 status = hws_ddr3_tip_init_controller(0, &init_param); in mv_ddr_mc_config()
1352 printf("DDR3 init controller - FAILED 0x%x\n", status); in mv_ddr_mc_config()
1356 printf("DDR3 init_sequence - FAILED 0x%x\n", status); in mv_ddr_mc_config()
1363 CHECK_STATUS(ddr3_tip_enable_init_sequence(0)); in mv_ddr_mc_init()
1381 ((0x7f & g_zpri_data) << 7 | (0x7f & g_znri_data)))); in ddr3_tip_configure_phy()
1386 ((0x7f & g_zpri_ctrl) << 7 | (0x7f & g_znri_ctrl)))); in ddr3_tip_configure_phy()
1391 ((0x3f & g_zpodt_data) << 6 | (0x3f & g_znodt_data)))); in ddr3_tip_configure_phy()
1396 ((0x3f & g_zpodt_ctrl) << 6 | (0x3f & g_znodt_ctrl)))); in ddr3_tip_configure_phy()
1401 PAD_PRE_DISABLE_PHY_REG, 0)); in ddr3_tip_configure_phy()
1405 CMOS_CONFIG_PHY_REG, 0)); in ddr3_tip_configure_phy()
1409 CMOS_CONFIG_PHY_REG, 0)); in ddr3_tip_configure_phy()
1411 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { in ddr3_tip_configure_phy()
1415 for (phy_id = 0; in ddr3_tip_configure_phy()
1425 ((0x7 << 4) | 0x7))); in ddr3_tip_configure_phy()
1430 PAD_CFG_PHY_REG, 0x4, 0x7)); in ddr3_tip_configure_phy()
1439 DDR_PHY_DATA, 0x90, 0x6002)); in ddr3_tip_configure_phy()
1448 return 0; in mv_ddr_manual_cal_do()