1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * Copyright (C) Marvell International Ltd. and its affiliates 4 */ 5 6 #include "ddr3_init.h" 7 8 #define PATTERN_1 0x55555555 9 #define PATTERN_2 0xaaaaaaaa 10 11 #define VALIDATE_TRAINING_LIMIT(e1, e2) \ 12 ((((e2) - (e1) + 1) > 33) && ((e1) < 67)) 13 14 u32 phy_reg_bk[MAX_INTERFACE_NUM][MAX_BUS_NUM][BUS_WIDTH_IN_BITS]; 15 16 u32 training_res[MAX_INTERFACE_NUM * MAX_BUS_NUM * BUS_WIDTH_IN_BITS * 17 HWS_SEARCH_DIR_LIMIT]; 18 u8 byte_status[MAX_INTERFACE_NUM][MAX_BUS_NUM]; /* holds the bit status in the byte in wrapper function*/ 19 20 u16 mask_results_dq_reg_map[] = { 21 RESULT_CONTROL_PUP_0_BIT_0_REG, RESULT_CONTROL_PUP_0_BIT_1_REG, 22 RESULT_CONTROL_PUP_0_BIT_2_REG, RESULT_CONTROL_PUP_0_BIT_3_REG, 23 RESULT_CONTROL_PUP_0_BIT_4_REG, RESULT_CONTROL_PUP_0_BIT_5_REG, 24 RESULT_CONTROL_PUP_0_BIT_6_REG, RESULT_CONTROL_PUP_0_BIT_7_REG, 25 RESULT_CONTROL_PUP_1_BIT_0_REG, RESULT_CONTROL_PUP_1_BIT_1_REG, 26 RESULT_CONTROL_PUP_1_BIT_2_REG, RESULT_CONTROL_PUP_1_BIT_3_REG, 27 RESULT_CONTROL_PUP_1_BIT_4_REG, RESULT_CONTROL_PUP_1_BIT_5_REG, 28 RESULT_CONTROL_PUP_1_BIT_6_REG, RESULT_CONTROL_PUP_1_BIT_7_REG, 29 RESULT_CONTROL_PUP_2_BIT_0_REG, RESULT_CONTROL_PUP_2_BIT_1_REG, 30 RESULT_CONTROL_PUP_2_BIT_2_REG, RESULT_CONTROL_PUP_2_BIT_3_REG, 31 RESULT_CONTROL_PUP_2_BIT_4_REG, RESULT_CONTROL_PUP_2_BIT_5_REG, 32 RESULT_CONTROL_PUP_2_BIT_6_REG, RESULT_CONTROL_PUP_2_BIT_7_REG, 33 RESULT_CONTROL_PUP_3_BIT_0_REG, RESULT_CONTROL_PUP_3_BIT_1_REG, 34 RESULT_CONTROL_PUP_3_BIT_2_REG, RESULT_CONTROL_PUP_3_BIT_3_REG, 35 RESULT_CONTROL_PUP_3_BIT_4_REG, RESULT_CONTROL_PUP_3_BIT_5_REG, 36 RESULT_CONTROL_PUP_3_BIT_6_REG, RESULT_CONTROL_PUP_3_BIT_7_REG, 37 RESULT_CONTROL_PUP_4_BIT_0_REG, RESULT_CONTROL_PUP_4_BIT_1_REG, 38 RESULT_CONTROL_PUP_4_BIT_2_REG, RESULT_CONTROL_PUP_4_BIT_3_REG, 39 RESULT_CONTROL_PUP_4_BIT_4_REG, RESULT_CONTROL_PUP_4_BIT_5_REG, 40 RESULT_CONTROL_PUP_4_BIT_6_REG, RESULT_CONTROL_PUP_4_BIT_7_REG, 41 #if MAX_BUS_NUM == 9 42 RESULT_CONTROL_PUP_5_BIT_0_REG, RESULT_CONTROL_PUP_5_BIT_1_REG, 43 RESULT_CONTROL_PUP_5_BIT_2_REG, RESULT_CONTROL_PUP_5_BIT_3_REG, 44 RESULT_CONTROL_PUP_5_BIT_4_REG, RESULT_CONTROL_PUP_5_BIT_5_REG, 45 RESULT_CONTROL_PUP_5_BIT_6_REG, RESULT_CONTROL_PUP_5_BIT_7_REG, 46 RESULT_CONTROL_PUP_6_BIT_0_REG, RESULT_CONTROL_PUP_6_BIT_1_REG, 47 RESULT_CONTROL_PUP_6_BIT_2_REG, RESULT_CONTROL_PUP_6_BIT_3_REG, 48 RESULT_CONTROL_PUP_6_BIT_4_REG, RESULT_CONTROL_PUP_6_BIT_5_REG, 49 RESULT_CONTROL_PUP_6_BIT_6_REG, RESULT_CONTROL_PUP_6_BIT_7_REG, 50 RESULT_CONTROL_PUP_7_BIT_0_REG, RESULT_CONTROL_PUP_7_BIT_1_REG, 51 RESULT_CONTROL_PUP_7_BIT_2_REG, RESULT_CONTROL_PUP_7_BIT_3_REG, 52 RESULT_CONTROL_PUP_7_BIT_4_REG, RESULT_CONTROL_PUP_7_BIT_5_REG, 53 RESULT_CONTROL_PUP_7_BIT_6_REG, RESULT_CONTROL_PUP_7_BIT_7_REG, 54 RESULT_CONTROL_PUP_8_BIT_0_REG, RESULT_CONTROL_PUP_8_BIT_1_REG, 55 RESULT_CONTROL_PUP_8_BIT_2_REG, RESULT_CONTROL_PUP_8_BIT_3_REG, 56 RESULT_CONTROL_PUP_8_BIT_4_REG, RESULT_CONTROL_PUP_8_BIT_5_REG, 57 RESULT_CONTROL_PUP_8_BIT_6_REG, RESULT_CONTROL_PUP_8_BIT_7_REG, 58 #endif 59 0xffff 60 }; 61 62 u16 mask_results_pup_reg_map[] = { 63 RESULT_CONTROL_BYTE_PUP_0_REG, RESULT_CONTROL_BYTE_PUP_1_REG, 64 RESULT_CONTROL_BYTE_PUP_2_REG, RESULT_CONTROL_BYTE_PUP_3_REG, 65 RESULT_CONTROL_BYTE_PUP_4_REG, 66 #if MAX_BUS_NUM == 9 67 RESULT_CONTROL_BYTE_PUP_5_REG, RESULT_CONTROL_BYTE_PUP_6_REG, 68 RESULT_CONTROL_BYTE_PUP_7_REG, RESULT_CONTROL_BYTE_PUP_8_REG, 69 #endif 70 0xffff 71 }; 72 73 #if MAX_BUS_NUM == 5 74 u16 mask_results_dq_reg_map_pup3_ecc[] = { 75 RESULT_CONTROL_PUP_0_BIT_0_REG, RESULT_CONTROL_PUP_0_BIT_1_REG, 76 RESULT_CONTROL_PUP_0_BIT_2_REG, RESULT_CONTROL_PUP_0_BIT_3_REG, 77 RESULT_CONTROL_PUP_0_BIT_4_REG, RESULT_CONTROL_PUP_0_BIT_5_REG, 78 RESULT_CONTROL_PUP_0_BIT_6_REG, RESULT_CONTROL_PUP_0_BIT_7_REG, 79 RESULT_CONTROL_PUP_1_BIT_0_REG, RESULT_CONTROL_PUP_1_BIT_1_REG, 80 RESULT_CONTROL_PUP_1_BIT_2_REG, RESULT_CONTROL_PUP_1_BIT_3_REG, 81 RESULT_CONTROL_PUP_1_BIT_4_REG, RESULT_CONTROL_PUP_1_BIT_5_REG, 82 RESULT_CONTROL_PUP_1_BIT_6_REG, RESULT_CONTROL_PUP_1_BIT_7_REG, 83 RESULT_CONTROL_PUP_2_BIT_0_REG, RESULT_CONTROL_PUP_2_BIT_1_REG, 84 RESULT_CONTROL_PUP_2_BIT_2_REG, RESULT_CONTROL_PUP_2_BIT_3_REG, 85 RESULT_CONTROL_PUP_2_BIT_4_REG, RESULT_CONTROL_PUP_2_BIT_5_REG, 86 RESULT_CONTROL_PUP_2_BIT_6_REG, RESULT_CONTROL_PUP_2_BIT_7_REG, 87 RESULT_CONTROL_PUP_4_BIT_0_REG, RESULT_CONTROL_PUP_4_BIT_1_REG, 88 RESULT_CONTROL_PUP_4_BIT_2_REG, RESULT_CONTROL_PUP_4_BIT_3_REG, 89 RESULT_CONTROL_PUP_4_BIT_4_REG, RESULT_CONTROL_PUP_4_BIT_5_REG, 90 RESULT_CONTROL_PUP_4_BIT_6_REG, RESULT_CONTROL_PUP_4_BIT_7_REG, 91 RESULT_CONTROL_PUP_3_BIT_0_REG, RESULT_CONTROL_PUP_3_BIT_1_REG, 92 RESULT_CONTROL_PUP_3_BIT_2_REG, RESULT_CONTROL_PUP_3_BIT_3_REG, 93 RESULT_CONTROL_PUP_3_BIT_4_REG, RESULT_CONTROL_PUP_3_BIT_5_REG, 94 RESULT_CONTROL_PUP_3_BIT_6_REG, RESULT_CONTROL_PUP_3_BIT_7_REG 95 }; 96 #endif 97 98 #if MAX_BUS_NUM == 5 99 u16 mask_results_pup_reg_map_pup3_ecc[] = { 100 RESULT_CONTROL_BYTE_PUP_0_REG, RESULT_CONTROL_BYTE_PUP_1_REG, 101 RESULT_CONTROL_BYTE_PUP_2_REG, RESULT_CONTROL_BYTE_PUP_4_REG, 102 RESULT_CONTROL_BYTE_PUP_4_REG 103 }; 104 #endif 105 106 struct pattern_info pattern_table_64[] = { 107 /* 108 * num_of_phases_tx, tx_burst_size; 109 * delay_between_bursts, num_of_phases_rx, 110 * start_addr, pattern_len 111 */ 112 {0x7, 0x7, 2, 0x7, 0x00000, 8}, /* PATTERN_PBS1 */ 113 {0x7, 0x7, 2, 0x7, 0x00080, 8}, /* PATTERN_PBS2 */ 114 {0x7, 0x7, 2, 0x7, 0x00100, 8}, /* PATTERN_PBS3 */ 115 {0x7, 0x7, 2, 0x7, 0x00030, 8}, /* PATTERN_TEST */ 116 {0x7, 0x7, 2, 0x7, 0x00100, 8}, /* PATTERN_RL */ 117 {0x7, 0x7, 2, 0x7, 0x00100, 8}, /* PATTERN_RL2 */ 118 {0x1f, 0xf, 2, 0xf, 0x00680, 32}, /* PATTERN_STATIC_PBS */ 119 {0x1f, 0xf, 2, 0xf, 0x00a80, 32}, /* PATTERN_KILLER_DQ0 */ 120 {0x1f, 0xf, 2, 0xf, 0x01280, 32}, /* PATTERN_KILLER_DQ1 */ 121 {0x1f, 0xf, 2, 0xf, 0x01a80, 32}, /* PATTERN_KILLER_DQ2 */ 122 {0x1f, 0xf, 2, 0xf, 0x02280, 32}, /* PATTERN_KILLER_DQ3 */ 123 {0x1f, 0xf, 2, 0xf, 0x02a80, 32}, /* PATTERN_KILLER_DQ4 */ 124 {0x1f, 0xf, 2, 0xf, 0x03280, 32}, /* PATTERN_KILLER_DQ5 */ 125 {0x1f, 0xf, 2, 0xf, 0x03a80, 32}, /* PATTERN_KILLER_DQ6 */ 126 {0x1f, 0xf, 2, 0xf, 0x04280, 32}, /* PATTERN_KILLER_DQ7 */ 127 {0x1f, 0xf, 2, 0xf, 0x00e80, 32}, /* PATTERN_KILLER_DQ0_64 */ 128 {0x1f, 0xf, 2, 0xf, 0x01680, 32}, /* PATTERN_KILLER_DQ1_64 */ 129 {0x1f, 0xf, 2, 0xf, 0x01e80, 32}, /* PATTERN_KILLER_DQ2_64 */ 130 {0x1f, 0xf, 2, 0xf, 0x02680, 32}, /* PATTERN_KILLER_DQ3_64 */ 131 {0x1f, 0xf, 2, 0xf, 0x02e80, 32}, /* PATTERN_KILLER_DQ4_64 */ 132 {0x1f, 0xf, 2, 0xf, 0x03680, 32}, /* PATTERN_KILLER_DQ5_64 */ 133 {0x1f, 0xf, 2, 0xf, 0x03e80, 32}, /* PATTERN_KILLER_DQ6_64 */ 134 {0x1f, 0xf, 2, 0xf, 0x04680, 32}, /* PATTERN_KILLER_DQ7_64 */ 135 {0x1f, 0xf, 2, 0xf, 0x04a80, 32}, /* PATTERN_KILLER_DQ0_INV */ 136 {0x1f, 0xf, 2, 0xf, 0x05280, 32}, /* PATTERN_KILLER_DQ1_INV */ 137 {0x1f, 0xf, 2, 0xf, 0x05a80, 32}, /* PATTERN_KILLER_DQ2_INV */ 138 {0x1f, 0xf, 2, 0xf, 0x06280, 32}, /* PATTERN_KILLER_DQ3_INV */ 139 {0x1f, 0xf, 2, 0xf, 0x06a80, 32}, /* PATTERN_KILLER_DQ4_INV */ 140 {0x1f, 0xf, 2, 0xf, 0x07280, 32}, /* PATTERN_KILLER_DQ5_INV */ 141 {0x1f, 0xf, 2, 0xf, 0x07a80, 32}, /* PATTERN_KILLER_DQ6_INV */ 142 {0x1f, 0xf, 2, 0xf, 0x08280, 32}, /* PATTERN_KILLER_DQ7_INV */ 143 {0x1f, 0xf, 2, 0xf, 0x04e80, 32}, /* PATTERN_KILLER_DQ0_INV_64 */ 144 {0x1f, 0xf, 2, 0xf, 0x05680, 32}, /* PATTERN_KILLER_DQ1_INV_64 */ 145 {0x1f, 0xf, 2, 0xf, 0x05e80, 32}, /* PATTERN_KILLER_DQ2_INV_64 */ 146 {0x1f, 0xf, 2, 0xf, 0x06680, 32}, /* PATTERN_KILLER_DQ3_INV_64 */ 147 {0x1f, 0xf, 2, 0xf, 0x06e80, 32}, /* PATTERN_KILLER_DQ4_INV_64 */ 148 {0x1f, 0xf, 2, 0xf, 0x07680, 32}, /* PATTERN_KILLER_DQ5_INV_64 */ 149 {0x1f, 0xf, 2, 0xf, 0x07e80, 32}, /* PATTERN_KILLER_DQ6_INV_64 */ 150 {0x1f, 0xf, 2, 0xf, 0x08680, 32}, /* PATTERN_KILLER_DQ7_INV_64 */ 151 {0x1f, 0xf, 2, 0xf, 0x08a80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ0 */ 152 {0x1f, 0xf, 2, 0xf, 0x09280, 32}, /* PATTERN_SSO_FULL_XTALK_DQ1 */ 153 {0x1f, 0xf, 2, 0xf, 0x09a80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ2 */ 154 {0x1f, 0xf, 2, 0xf, 0x0a280, 32}, /* PATTERN_SSO_FULL_XTALK_DQ3 */ 155 {0x1f, 0xf, 2, 0xf, 0x0aa80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ4 */ 156 {0x1f, 0xf, 2, 0xf, 0x0b280, 32}, /* PATTERN_SSO_FULL_XTALK_DQ5 */ 157 {0x1f, 0xf, 2, 0xf, 0x0ba80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ6 */ 158 {0x1f, 0xf, 2, 0xf, 0x0c280, 32}, /* PATTERN_SSO_FULL_XTALK_DQ7 */ 159 {0x1f, 0xf, 2, 0xf, 0x08e80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ0_64 */ 160 {0x1f, 0xf, 2, 0xf, 0x09680, 32}, /* PATTERN_SSO_FULL_XTALK_DQ1_64 */ 161 {0x1f, 0xf, 2, 0xf, 0x09e80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ2_64 */ 162 {0x1f, 0xf, 2, 0xf, 0x0a680, 32}, /* PATTERN_SSO_FULL_XTALK_DQ3_64 */ 163 {0x1f, 0xf, 2, 0xf, 0x0ae80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ4_64 */ 164 {0x1f, 0xf, 2, 0xf, 0x0b680, 32}, /* PATTERN_SSO_FULL_XTALK_DQ5_64 */ 165 {0x1f, 0xf, 2, 0xf, 0x0be80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ6_64 */ 166 {0x1f, 0xf, 2, 0xf, 0x0c680, 32}, /* PATTERN_SSO_FULL_XTALK_DQ7_64 */ 167 {0x1f, 0xf, 2, 0xf, 0x0ca80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ0 */ 168 {0x1f, 0xf, 2, 0xf, 0x0d280, 32}, /* PATTERN_SSO_XTALK_FREE_DQ1 */ 169 {0x1f, 0xf, 2, 0xf, 0x0da80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ2 */ 170 {0x1f, 0xf, 2, 0xf, 0x0e280, 32}, /* PATTERN_SSO_XTALK_FREE_DQ3 */ 171 {0x1f, 0xf, 2, 0xf, 0x0ea80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ4 */ 172 {0x1f, 0xf, 2, 0xf, 0x0f280, 32}, /* PATTERN_SSO_XTALK_FREE_DQ5 */ 173 {0x1f, 0xf, 2, 0xf, 0x0fa80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ6 */ 174 {0x1f, 0xf, 2, 0xf, 0x10280, 32}, /* PATTERN_SSO_XTALK_FREE_DQ7 */ 175 {0x1f, 0xf, 2, 0xf, 0x0ce80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ0_64 */ 176 {0x1f, 0xf, 2, 0xf, 0x0d680, 32}, /* PATTERN_SSO_XTALK_FREE_DQ1_64 */ 177 {0x1f, 0xf, 2, 0xf, 0x0de80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ2_64 */ 178 {0x1f, 0xf, 2, 0xf, 0x0e680, 32}, /* PATTERN_SSO_XTALK_FREE_DQ3_64 */ 179 {0x1f, 0xf, 2, 0xf, 0x0ee80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ4_64 */ 180 {0x1f, 0xf, 2, 0xf, 0x0f680, 32}, /* PATTERN_SSO_XTALK_FREE_DQ5_64 */ 181 {0x1f, 0xf, 2, 0xf, 0x0fe80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ6_64 */ 182 {0x1f, 0xf, 2, 0xf, 0x10680, 32}, /* PATTERN_SSO_XTALK_FREE_DQ7_64 */ 183 {0x1f, 0xf, 2, 0xf, 0x10a80, 32}, /* PATTERN_ISI_XTALK_FREE */ 184 {0x1f, 0xf, 2, 0xf, 0x10e80, 32}, /* PATTERN_ISI_XTALK_FREE_64 */ 185 {0x1f, 0xf, 2, 0xf, 0x11280, 32}, /* PATTERN_VREF */ 186 {0x1f, 0xf, 2, 0xf, 0x11680, 32}, /* PATTERN_VREF_64 */ 187 {0x1f, 0xf, 2, 0xf, 0x11a80, 32}, /* PATTERN_VREF_INV */ 188 {0x1f, 0xf, 2, 0xf, 0x11e80, 32}, /* PATTERN_FULL_SSO_0T */ 189 {0x1f, 0xf, 2, 0xf, 0x12280, 32}, /* PATTERN_FULL_SSO_1T */ 190 {0x1f, 0xf, 2, 0xf, 0x12680, 32}, /* PATTERN_FULL_SSO_2T */ 191 {0x1f, 0xf, 2, 0xf, 0x12a80, 32}, /* PATTERN_FULL_SSO_3T */ 192 {0x1f, 0xf, 2, 0xf, 0x12e80, 32}, /* PATTERN_RESONANCE_1T */ 193 {0x1f, 0xf, 2, 0xf, 0x13280, 32}, /* PATTERN_RESONANCE_2T */ 194 {0x1f, 0xf, 2, 0xf, 0x13680, 32}, /* PATTERN_RESONANCE_3T */ 195 {0x1f, 0xf, 2, 0xf, 0x13a80, 32}, /* PATTERN_RESONANCE_4T */ 196 {0x1f, 0xf, 2, 0xf, 0x13e80, 32}, /* PATTERN_RESONANCE_5T */ 197 {0x1f, 0xf, 2, 0xf, 0x14280, 32}, /* PATTERN_RESONANCE_6T */ 198 {0x1f, 0xf, 2, 0xf, 0x14680, 32}, /* PATTERN_RESONANCE_7T */ 199 {0x1f, 0xf, 2, 0xf, 0x14a80, 32}, /* PATTERN_RESONANCE_8T */ 200 {0x1f, 0xf, 2, 0xf, 0x14e80, 32}, /* PATTERN_RESONANCE_9T */ 201 {0x1f, 0xf, 2, 0xf, 0x15280, 32}, /* PATTERN_ZERO */ 202 {0x1f, 0xf, 2, 0xf, 0x15680, 32} /* PATTERN_ONE */ 203 /* Note: actual start_address is "<< 3" of defined address */ 204 }; 205 206 struct pattern_info pattern_table_16[] = { 207 /* 208 * num tx phases, tx burst, delay between, rx pattern, 209 * start_address, pattern_len 210 */ 211 {1, 1, 2, 1, 0x0080, 2}, /* PATTERN_PBS1 */ 212 {1, 1, 2, 1, 0x00c0, 2}, /* PATTERN_PBS2 */ 213 {1, 1, 2, 1, 0x0380, 2}, /* PATTERN_PBS3 */ 214 {1, 1, 2, 1, 0x0040, 2}, /* PATTERN_TEST */ 215 {1, 1, 2, 1, 0x0100, 2}, /* PATTERN_RL */ 216 {1, 1, 2, 1, 0x0000, 2}, /* PATTERN_RL2 */ 217 {0xf, 0x7, 2, 0x7, 0x0140, 16}, /* PATTERN_STATIC_PBS */ 218 {0xf, 0x7, 2, 0x7, 0x0190, 16}, /* PATTERN_KILLER_DQ0 */ 219 {0xf, 0x7, 2, 0x7, 0x01d0, 16}, /* PATTERN_KILLER_DQ1 */ 220 {0xf, 0x7, 2, 0x7, 0x0210, 16}, /* PATTERN_KILLER_DQ2 */ 221 {0xf, 0x7, 2, 0x7, 0x0250, 16}, /* PATTERN_KILLER_DQ3 */ 222 {0xf, 0x7, 2, 0x7, 0x0290, 16}, /* PATTERN_KILLER_DQ4 */ 223 {0xf, 0x7, 2, 0x7, 0x02d0, 16}, /* PATTERN_KILLER_DQ5 */ 224 {0xf, 0x7, 2, 0x7, 0x0310, 16}, /* PATTERN_KILLER_DQ6 */ 225 {0xf, 0x7, 2, 0x7, 0x0350, 16}, /* PATTERN_KILLER_DQ7 */ 226 {0xf, 0x7, 2, 0x7, 0x04c0, 16}, /* PATTERN_VREF */ 227 {0xf, 0x7, 2, 0x7, 0x03c0, 16}, /* PATTERN_FULL_SSO_1T */ 228 {0xf, 0x7, 2, 0x7, 0x0400, 16}, /* PATTERN_FULL_SSO_2T */ 229 {0xf, 0x7, 2, 0x7, 0x0440, 16}, /* PATTERN_FULL_SSO_3T */ 230 {0xf, 0x7, 2, 0x7, 0x0480, 16}, /* PATTERN_FULL_SSO_4T */ 231 {0xf, 7, 2, 7, 0x6280, 16}, /* PATTERN_SSO_FULL_XTALK_DQ1 */ 232 {0xf, 7, 2, 7, 0x6680, 16}, /* PATTERN_SSO_FULL_XTALK_DQ1 */ 233 {0xf, 7, 2, 7, 0x6A80, 16}, /* PATTERN_SSO_FULL_XTALK_DQ2 */ 234 {0xf, 7, 2, 7, 0x6E80, 16}, /* PATTERN_SSO_FULL_XTALK_DQ3 */ 235 {0xf, 7, 2, 7, 0x7280, 16}, /* PATTERN_SSO_FULL_XTALK_DQ4 */ 236 {0xf, 7, 2, 7, 0x7680, 16}, /* PATTERN_SSO_FULL_XTALK_DQ5 */ 237 {0xf, 7, 2, 7, 0x7A80, 16}, /* PATTERN_SSO_FULL_XTALK_DQ6 */ 238 {0xf, 7, 2, 7, 0x7E80, 16}, /* PATTERN_SSO_FULL_XTALK_DQ7 */ 239 {0xf, 7, 2, 7, 0x8280, 16}, /* PATTERN_SSO_XTALK_FREE_DQ0 */ 240 {0xf, 7, 2, 7, 0x8680, 16}, /* PATTERN_SSO_XTALK_FREE_DQ1 */ 241 {0xf, 7, 2, 7, 0x8A80, 16}, /* PATTERN_SSO_XTALK_FREE_DQ2 */ 242 {0xf, 7, 2, 7, 0x8E80, 16}, /* PATTERN_SSO_XTALK_FREE_DQ3 */ 243 {0xf, 7, 2, 7, 0x9280, 16}, /* PATTERN_SSO_XTALK_FREE_DQ4 */ 244 {0xf, 7, 2, 7, 0x9680, 16}, /* PATTERN_SSO_XTALK_FREE_DQ5 */ 245 {0xf, 7, 2, 7, 0x9A80, 16}, /* PATTERN_SSO_XTALK_FREE_DQ6 */ 246 {0xf, 7, 2, 7, 0x9E80, 16}, /* PATTERN_SSO_XTALK_FREE_DQ7 */ 247 {0xf, 7, 2, 7, 0xA280, 16} /* PATTERN_ISI_XTALK_FREE */ 248 /* Note: actual start_address is "<< 3" of defined address */ 249 }; 250 251 struct pattern_info pattern_table_32[] = { 252 /* 253 * num tx phases, tx burst, delay between, rx pattern, 254 * start_address, pattern_len 255 */ 256 {3, 3, 2, 3, 0x0080, 4}, /* PATTERN_PBS1 */ 257 {3, 3, 2, 3, 0x00c0, 4}, /* PATTERN_PBS2 */ 258 {3, 3, 2, 3, 0x0380, 4}, /* PATTERN_PBS3 */ 259 {3, 3, 2, 3, 0x0040, 4}, /* PATTERN_TEST */ 260 {3, 3, 2, 3, 0x0100, 4}, /* PATTERN_RL */ 261 {3, 3, 2, 3, 0x0000, 4}, /* PATTERN_RL2 */ 262 {0x1f, 0xf, 2, 0xf, 0x0140, 32}, /* PATTERN_STATIC_PBS */ 263 {0x1f, 0xf, 2, 0xf, 0x0190, 32}, /* PATTERN_KILLER_DQ0 */ 264 {0x1f, 0xf, 2, 0xf, 0x01d0, 32}, /* PATTERN_KILLER_DQ1 */ 265 {0x1f, 0xf, 2, 0xf, 0x0210, 32}, /* PATTERN_KILLER_DQ2 */ 266 {0x1f, 0xf, 2, 0xf, 0x0250, 32}, /* PATTERN_KILLER_DQ3 */ 267 {0x1f, 0xf, 2, 0xf, 0x0290, 32}, /* PATTERN_KILLER_DQ4 */ 268 {0x1f, 0xf, 2, 0xf, 0x02d0, 32}, /* PATTERN_KILLER_DQ5 */ 269 {0x1f, 0xf, 2, 0xf, 0x0310, 32}, /* PATTERN_KILLER_DQ6 */ 270 {0x1f, 0xf, 2, 0xf, 0x0350, 32}, /* PATTERN_KILLER_DQ7 */ 271 {0x1f, 0xf, 2, 0xf, 0x04c0, 32}, /* PATTERN_VREF */ 272 {0x1f, 0xf, 2, 0xf, 0x03c0, 32}, /* PATTERN_FULL_SSO_1T */ 273 {0x1f, 0xf, 2, 0xf, 0x0400, 32}, /* PATTERN_FULL_SSO_2T */ 274 {0x1f, 0xf, 2, 0xf, 0x0440, 32}, /* PATTERN_FULL_SSO_3T */ 275 {0x1f, 0xf, 2, 0xf, 0x0480, 32}, /* PATTERN_FULL_SSO_4T */ 276 {0x1f, 0xF, 2, 0xf, 0x6280, 32}, /* PATTERN_SSO_FULL_XTALK_DQ0 */ 277 {0x1f, 0xF, 2, 0xf, 0x6680, 32}, /* PATTERN_SSO_FULL_XTALK_DQ1 */ 278 {0x1f, 0xF, 2, 0xf, 0x6A80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ2 */ 279 {0x1f, 0xF, 2, 0xf, 0x6E80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ3 */ 280 {0x1f, 0xF, 2, 0xf, 0x7280, 32}, /* PATTERN_SSO_FULL_XTALK_DQ4 */ 281 {0x1f, 0xF, 2, 0xf, 0x7680, 32}, /* PATTERN_SSO_FULL_XTALK_DQ5 */ 282 {0x1f, 0xF, 2, 0xf, 0x7A80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ6 */ 283 {0x1f, 0xF, 2, 0xf, 0x7E80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ7 */ 284 {0x1f, 0xF, 2, 0xf, 0x8280, 32}, /* PATTERN_SSO_XTALK_FREE_DQ0 */ 285 {0x1f, 0xF, 2, 0xf, 0x8680, 32}, /* PATTERN_SSO_XTALK_FREE_DQ1 */ 286 {0x1f, 0xF, 2, 0xf, 0x8A80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ2 */ 287 {0x1f, 0xF, 2, 0xf, 0x8E80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ3 */ 288 {0x1f, 0xF, 2, 0xf, 0x9280, 32}, /* PATTERN_SSO_XTALK_FREE_DQ4 */ 289 {0x1f, 0xF, 2, 0xf, 0x9680, 32}, /* PATTERN_SSO_XTALK_FREE_DQ5 */ 290 {0x1f, 0xF, 2, 0xf, 0x9A80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ6 */ 291 {0x1f, 0xF, 2, 0xf, 0x9E80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ7 */ 292 {0x1f, 0xF, 2, 0xf, 0xA280, 32} /* PATTERN_ISI_XTALK_FREE */ 293 /* Note: actual start_address is "<< 3" of defined address */ 294 }; 295 296 u32 train_dev_num; 297 enum hws_ddr_cs traintrain_cs_type; 298 u32 train_pup_num; 299 enum hws_training_result train_result_type; 300 enum hws_control_element train_control_element; 301 enum hws_search_dir traine_search_dir; 302 enum hws_dir train_direction; 303 u32 train_if_select; 304 u32 train_init_value; 305 u32 train_number_iterations; 306 enum hws_pattern train_pattern; 307 enum hws_edge_compare train_edge_compare; 308 u32 train_cs_num; 309 u32 train_if_acess, train_if_id, train_pup_access; 310 u32 max_polling_for_done = 1000000; 311 312 u32 *ddr3_tip_get_buf_ptr(u32 dev_num, enum hws_search_dir search, 313 enum hws_training_result result_type, 314 u32 interface_num) 315 { 316 u32 *buf_ptr = NULL; 317 318 buf_ptr = &training_res 319 [MAX_INTERFACE_NUM * MAX_BUS_NUM * BUS_WIDTH_IN_BITS * search + 320 interface_num * MAX_BUS_NUM * BUS_WIDTH_IN_BITS]; 321 322 return buf_ptr; 323 } 324 325 enum { 326 PASS, 327 FAIL 328 }; 329 /* 330 * IP Training search 331 * Note: for one edge search only from fail to pass, else jitter can 332 * be be entered into solution. 333 */ 334 int ddr3_tip_ip_training(u32 dev_num, enum hws_access_type access_type, 335 u32 interface_num, 336 enum hws_access_type pup_access_type, 337 u32 pup_num, enum hws_training_result result_type, 338 enum hws_control_element control_element, 339 enum hws_search_dir search_dir, enum hws_dir direction, 340 u32 interface_mask, u32 init_value, u32 num_iter, 341 enum hws_pattern pattern, 342 enum hws_edge_compare edge_comp, 343 enum hws_ddr_cs cs_type, u32 cs_num, 344 enum hws_training_ip_stat *train_status) 345 { 346 u32 mask_dq_num_of_regs, mask_pup_num_of_regs, index_cnt, 347 reg_data, pup_id; 348 u32 tx_burst_size; 349 u32 delay_between_burst; 350 u32 rd_mode; 351 u32 data; 352 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table(); 353 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map(); 354 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg(); 355 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 356 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 357 358 if (pup_num >= octets_per_if_num) { 359 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR, 360 ("pup_num %d not valid\n", pup_num)); 361 } 362 if (interface_num >= MAX_INTERFACE_NUM) { 363 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR, 364 ("if_id %d not valid\n", 365 interface_num)); 366 } 367 if (train_status == NULL) { 368 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR, 369 ("error param 4\n")); 370 return MV_BAD_PARAM; 371 } 372 373 /* load pattern */ 374 if (cs_type == CS_SINGLE) { 375 /* All CSs to CS0 */ 376 CHECK_STATUS(ddr3_tip_if_write 377 (dev_num, access_type, interface_num, 378 DUAL_DUNIT_CFG_REG, 1 << 3, 1 << 3)); 379 /* All CSs to CS0 */ 380 CHECK_STATUS(ddr3_tip_if_write 381 (dev_num, access_type, interface_num, 382 ODPG_DATA_CTRL_REG, 383 (0x3 | (effective_cs << 26)), 0xc000003)); 384 } else { 385 CHECK_STATUS(ddr3_tip_if_write 386 (dev_num, access_type, interface_num, 387 DUAL_DUNIT_CFG_REG, 0, 1 << 3)); 388 /* CS select */ 389 CHECK_STATUS(ddr3_tip_if_write 390 (dev_num, access_type, interface_num, 391 ODPG_DATA_CTRL_REG, 0x3 | cs_num << 26, 392 0x3 | 3 << 26)); 393 } 394 395 /* load pattern to ODPG */ 396 ddr3_tip_load_pattern_to_odpg(dev_num, access_type, interface_num, 397 pattern, 398 pattern_table[pattern].start_addr); 399 tx_burst_size = (direction == OPER_WRITE) ? 400 pattern_table[pattern].tx_burst_size : 0; 401 delay_between_burst = (direction == OPER_WRITE) ? 2 : 0; 402 rd_mode = (direction == OPER_WRITE) ? 1 : 0; 403 CHECK_STATUS(ddr3_tip_configure_odpg 404 (dev_num, access_type, interface_num, direction, 405 pattern_table[pattern].num_of_phases_tx, tx_burst_size, 406 pattern_table[pattern].num_of_phases_rx, 407 delay_between_burst, rd_mode, effective_cs, STRESS_NONE, 408 DURATION_SINGLE)); 409 reg_data = (direction == OPER_READ) ? 0 : (0x3 << 30); 410 reg_data |= (direction == OPER_READ) ? 0x60 : 0xfa; 411 CHECK_STATUS(ddr3_tip_if_write 412 (dev_num, access_type, interface_num, 413 ODPG_WR_RD_MODE_ENA_REG, reg_data, 414 MASK_ALL_BITS)); 415 reg_data = (edge_comp == EDGE_PF || edge_comp == EDGE_FP) ? 0 : 1 << 6; 416 reg_data |= (edge_comp == EDGE_PF || edge_comp == EDGE_PFP) ? 417 (1 << 7) : 0; 418 419 /* change from Pass to Fail will lock the result */ 420 if (pup_access_type == ACCESS_TYPE_MULTICAST) 421 reg_data |= 0xe << 14; 422 else 423 reg_data |= pup_num << 14; 424 425 if (edge_comp == EDGE_FP) { 426 /* don't search for readl edge change, only the state */ 427 reg_data |= (0 << 20); 428 } else if (edge_comp == EDGE_FPF) { 429 reg_data |= (0 << 20); 430 } else { 431 reg_data |= (3 << 20); 432 } 433 434 CHECK_STATUS(ddr3_tip_if_write 435 (dev_num, access_type, interface_num, 436 GENERAL_TRAINING_OPCODE_REG, 437 reg_data | (0x7 << 8) | (0x7 << 11), 438 (0x3 | (0x3 << 2) | (0x3 << 6) | (1 << 5) | (0x7 << 8) | 439 (0x7 << 11) | (0xf << 14) | (0x3 << 18) | (3 << 20)))); 440 reg_data = (search_dir == HWS_LOW2HIGH) ? 0 : (1 << 8); 441 CHECK_STATUS(ddr3_tip_if_write 442 (dev_num, access_type, interface_num, OPCODE_REG0_REG(1), 443 1 | reg_data | init_value << 9 | (1 << 25) | (1 << 26), 444 0xff | (1 << 8) | (0xffff << 9) | (1 << 25) | (1 << 26))); 445 446 /* 447 * Write2_dunit(0x10b4, Number_iteration , [15:0]) 448 * Max number of iterations 449 */ 450 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, interface_num, 451 OPCODE_REG1_REG(1), num_iter, 452 0xffff)); 453 if (control_element == HWS_CONTROL_ELEMENT_DQ_SKEW && 454 direction == OPER_READ) { 455 /* 456 * Write2_dunit(0x10c0, 0x5f , [7:0]) 457 * MC PBS Reg Address at DDR PHY 458 */ 459 reg_data = PBS_RX_BCAST_PHY_REG(effective_cs); 460 } else if (control_element == HWS_CONTROL_ELEMENT_DQ_SKEW && 461 direction == OPER_WRITE) { 462 reg_data = PBS_TX_BCAST_PHY_REG(effective_cs); 463 } else if (control_element == HWS_CONTROL_ELEMENT_ADLL && 464 direction == OPER_WRITE) { 465 /* 466 * LOOP 0x00000001 + 4*n: 467 * where n (0-3) represents M_CS number 468 */ 469 /* 470 * Write2_dunit(0x10c0, 0x1 , [7:0]) 471 * ADLL WR Reg Address at DDR PHY 472 */ 473 reg_data = CTX_PHY_REG(effective_cs); 474 } else if (control_element == HWS_CONTROL_ELEMENT_ADLL && 475 direction == OPER_READ) { 476 /* ADLL RD Reg Address at DDR PHY */ 477 reg_data = CRX_PHY_REG(effective_cs); 478 } else if (control_element == HWS_CONTROL_ELEMENT_DQS_SKEW && 479 direction == OPER_WRITE) { 480 /* TBD not defined in 0.5.0 requirement */ 481 } else if (control_element == HWS_CONTROL_ELEMENT_DQS_SKEW && 482 direction == OPER_READ) { 483 /* TBD not defined in 0.5.0 requirement */ 484 } 485 486 reg_data |= (0x6 << 28); 487 CHECK_STATUS(ddr3_tip_if_write 488 (dev_num, access_type, interface_num, CAL_PHY_REG(1), 489 reg_data | (init_value << 8), 490 0xff | (0xffff << 8) | (0xf << 24) | (u32) (0xf << 28))); 491 492 mask_dq_num_of_regs = octets_per_if_num * BUS_WIDTH_IN_BITS; 493 mask_pup_num_of_regs = octets_per_if_num; 494 495 if (result_type == RESULT_PER_BIT) { 496 for (index_cnt = 0; index_cnt < mask_dq_num_of_regs; 497 index_cnt++) { 498 CHECK_STATUS(ddr3_tip_if_write 499 (dev_num, access_type, interface_num, 500 mask_results_dq_reg_map[index_cnt], 0, 501 1 << 24)); 502 } 503 504 /* Mask disabled buses */ 505 for (pup_id = 0; pup_id < octets_per_if_num; 506 pup_id++) { 507 if (IS_BUS_ACTIVE(tm->bus_act_mask, pup_id) == 1) 508 continue; 509 510 for (index_cnt = (pup_id * 8); index_cnt < (pup_id + 1) * 8; index_cnt++) { 511 CHECK_STATUS(ddr3_tip_if_write 512 (dev_num, access_type, 513 interface_num, 514 mask_results_dq_reg_map 515 [index_cnt], (1 << 24), 1 << 24)); 516 } 517 } 518 519 for (index_cnt = 0; index_cnt < mask_pup_num_of_regs; 520 index_cnt++) { 521 CHECK_STATUS(ddr3_tip_if_write 522 (dev_num, access_type, interface_num, 523 mask_results_pup_reg_map[index_cnt], 524 (1 << 24), 1 << 24)); 525 } 526 } else if (result_type == RESULT_PER_BYTE) { 527 /* write to adll */ 528 for (index_cnt = 0; index_cnt < mask_pup_num_of_regs; 529 index_cnt++) { 530 CHECK_STATUS(ddr3_tip_if_write 531 (dev_num, access_type, interface_num, 532 mask_results_pup_reg_map[index_cnt], 0, 533 1 << 24)); 534 } 535 for (index_cnt = 0; index_cnt < mask_dq_num_of_regs; 536 index_cnt++) { 537 CHECK_STATUS(ddr3_tip_if_write 538 (dev_num, access_type, interface_num, 539 mask_results_dq_reg_map[index_cnt], 540 (1 << 24), (1 << 24))); 541 } 542 } 543 544 /* trigger training */ 545 mv_ddr_training_enable(); 546 547 /* wa for 16-bit mode: wait for all rfu tests to finish or timeout */ 548 mdelay(1); 549 550 /* check for training done */ 551 if (mv_ddr_is_training_done(MAX_POLLING_ITERATIONS, &data) != MV_OK) { 552 train_status[0] = HWS_TRAINING_IP_STATUS_TIMEOUT; 553 } else { /* training done; check for pass */ 554 if (data == PASS) 555 train_status[0] = HWS_TRAINING_IP_STATUS_SUCCESS; 556 else 557 train_status[0] = HWS_TRAINING_IP_STATUS_FAIL; 558 } 559 560 ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 561 ODPG_DATA_CTRL_REG, 0, MASK_ALL_BITS); 562 563 return MV_OK; 564 } 565 566 /* 567 * Load expected Pattern to ODPG 568 */ 569 int ddr3_tip_load_pattern_to_odpg(u32 dev_num, enum hws_access_type access_type, 570 u32 if_id, enum hws_pattern pattern, 571 u32 load_addr) 572 { 573 u32 pattern_length_cnt = 0; 574 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table(); 575 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 576 577 for (pattern_length_cnt = 0; 578 pattern_length_cnt < pattern_table[pattern].pattern_len; 579 pattern_length_cnt++) { /* FIXME: the ecc patch below is only for a7040 A0 */ 580 if (MV_DDR_IS_64BIT_DRAM_MODE(tm->bus_act_mask)/* || tm->bus_act_mask == MV_DDR_32BIT_ECC_PUP8_BUS_MASK*/) { 581 CHECK_STATUS(ddr3_tip_if_write 582 (dev_num, access_type, if_id, 583 ODPG_DATA_WR_DATA_LOW_REG, 584 pattern_table_get_word(dev_num, pattern, 585 (u8) (pattern_length_cnt)), 586 MASK_ALL_BITS)); 587 CHECK_STATUS(ddr3_tip_if_write 588 (dev_num, access_type, if_id, 589 ODPG_DATA_WR_DATA_HIGH_REG, 590 pattern_table_get_word(dev_num, pattern, 591 (u8) (pattern_length_cnt)), 592 MASK_ALL_BITS)); 593 } else { 594 CHECK_STATUS(ddr3_tip_if_write 595 (dev_num, access_type, if_id, 596 ODPG_DATA_WR_DATA_LOW_REG, 597 pattern_table_get_word(dev_num, pattern, 598 (u8) (pattern_length_cnt * 2)), 599 MASK_ALL_BITS)); 600 CHECK_STATUS(ddr3_tip_if_write 601 (dev_num, access_type, if_id, 602 ODPG_DATA_WR_DATA_HIGH_REG, 603 pattern_table_get_word(dev_num, pattern, 604 (u8) (pattern_length_cnt * 2 + 1)), 605 MASK_ALL_BITS)); 606 } 607 CHECK_STATUS(ddr3_tip_if_write 608 (dev_num, access_type, if_id, 609 ODPG_DATA_WR_ADDR_REG, pattern_length_cnt, 610 MASK_ALL_BITS)); 611 } 612 613 CHECK_STATUS(ddr3_tip_if_write 614 (dev_num, access_type, if_id, 615 ODPG_DATA_BUFFER_OFFS_REG, load_addr, MASK_ALL_BITS)); 616 617 return MV_OK; 618 } 619 620 /* 621 * Configure ODPG 622 */ 623 int ddr3_tip_configure_odpg(u32 dev_num, enum hws_access_type access_type, 624 u32 if_id, enum hws_dir direction, u32 tx_phases, 625 u32 tx_burst_size, u32 rx_phases, 626 u32 delay_between_burst, u32 rd_mode, u32 cs_num, 627 u32 addr_stress_jump, u32 single_pattern) 628 { 629 u32 data_value = 0; 630 int ret; 631 632 data_value = ((single_pattern << 2) | (tx_phases << 5) | 633 (tx_burst_size << 11) | (delay_between_burst << 15) | 634 (rx_phases << 21) | (rd_mode << 25) | (cs_num << 26) | 635 (addr_stress_jump << 29)); 636 ret = ddr3_tip_if_write(dev_num, access_type, if_id, 637 ODPG_DATA_CTRL_REG, data_value, 0xaffffffc); 638 if (ret != MV_OK) 639 return ret; 640 641 return MV_OK; 642 } 643 644 int ddr3_tip_process_result(u32 *ar_result, enum hws_edge e_edge, 645 enum hws_edge_search e_edge_search, 646 u32 *edge_result) 647 { 648 u32 i, res; 649 int tap_val, max_val = -10000, min_val = 10000; 650 int lock_success = 1; 651 652 for (i = 0; i < BUS_WIDTH_IN_BITS; i++) { 653 res = GET_LOCK_RESULT(ar_result[i]); 654 if (res == 0) { 655 lock_success = 0; 656 break; 657 } 658 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR, 659 ("lock failed for bit %d\n", i)); 660 } 661 662 if (lock_success == 1) { 663 for (i = 0; i < BUS_WIDTH_IN_BITS; i++) { 664 tap_val = GET_TAP_RESULT(ar_result[i], e_edge); 665 if (tap_val > max_val) 666 max_val = tap_val; 667 if (tap_val < min_val) 668 min_val = tap_val; 669 if (e_edge_search == TRAINING_EDGE_MAX) 670 *edge_result = (u32) max_val; 671 else 672 *edge_result = (u32) min_val; 673 674 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR, 675 ("i %d ar_result[i] 0x%x tap_val %d max_val %d min_val %d Edge_result %d\n", 676 i, ar_result[i], tap_val, 677 max_val, min_val, 678 *edge_result)); 679 } 680 } else { 681 return MV_FAIL; 682 } 683 684 return MV_OK; 685 } 686 687 /* 688 * Read training search result 689 */ 690 int ddr3_tip_read_training_result(u32 dev_num, u32 if_id, 691 enum hws_access_type pup_access_type, 692 u32 pup_num, u32 bit_num, 693 enum hws_search_dir search, 694 enum hws_dir direction, 695 enum hws_training_result result_type, 696 enum hws_training_load_op operation, 697 u32 cs_num_type, u32 **load_res, 698 int is_read_from_db, u8 cons_tap, 699 int is_check_result_validity) 700 { 701 u32 reg_offset, pup_cnt, start_pup, end_pup, start_reg, end_reg; 702 u32 *interface_train_res = NULL; 703 u16 *reg_addr = NULL; 704 u32 read_data[MAX_INTERFACE_NUM]; 705 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map(); 706 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg(); 707 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 708 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 709 710 /* 711 * Agreed assumption: all CS mask contain same number of bits, 712 * i.e. in multi CS, the number of CS per memory is the same for 713 * all pups 714 */ 715 CHECK_STATUS(ddr3_tip_if_write 716 (dev_num, ACCESS_TYPE_UNICAST, if_id, DUAL_DUNIT_CFG_REG, 717 (cs_num_type == 0) ? 1 << 3 : 0, (1 << 3))); 718 CHECK_STATUS(ddr3_tip_if_write 719 (dev_num, ACCESS_TYPE_UNICAST, if_id, 720 ODPG_DATA_CTRL_REG, (cs_num_type << 26), (3 << 26))); 721 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_TRACE, 722 ("Read_from_d_b %d cs_type %d oper %d result_type %d direction %d search %d pup_num %d if_id %d pup_access_type %d\n", 723 is_read_from_db, cs_num_type, operation, 724 result_type, direction, search, pup_num, 725 if_id, pup_access_type)); 726 727 if ((load_res == NULL) && (is_read_from_db == 1)) { 728 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR, 729 ("ddr3_tip_read_training_result load_res = NULL")); 730 return MV_FAIL; 731 } 732 if (pup_num >= octets_per_if_num) { 733 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR, 734 ("pup_num %d not valid\n", pup_num)); 735 } 736 if (if_id >= MAX_INTERFACE_NUM) { 737 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR, 738 ("if_id %d not valid\n", if_id)); 739 } 740 if (result_type == RESULT_PER_BIT) 741 reg_addr = mask_results_dq_reg_map; 742 else 743 reg_addr = mask_results_pup_reg_map; 744 if (pup_access_type == ACCESS_TYPE_UNICAST) { 745 start_pup = pup_num; 746 end_pup = pup_num; 747 } else { /*pup_access_type == ACCESS_TYPE_MULTICAST) */ 748 749 start_pup = 0; 750 end_pup = octets_per_if_num - 1; 751 } 752 753 for (pup_cnt = start_pup; pup_cnt <= end_pup; pup_cnt++) { 754 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup_cnt); 755 DEBUG_TRAINING_IP_ENGINE( 756 DEBUG_LEVEL_TRACE, 757 ("if_id %d start_pup %d end_pup %d pup_cnt %d\n", 758 if_id, start_pup, end_pup, pup_cnt)); 759 if (result_type == RESULT_PER_BIT) { 760 if (bit_num == ALL_BITS_PER_PUP) { 761 start_reg = pup_cnt * BUS_WIDTH_IN_BITS; 762 end_reg = (pup_cnt + 1) * BUS_WIDTH_IN_BITS - 1; 763 } else { 764 start_reg = 765 pup_cnt * BUS_WIDTH_IN_BITS + bit_num; 766 end_reg = pup_cnt * BUS_WIDTH_IN_BITS + bit_num; 767 } 768 } else { 769 start_reg = pup_cnt; 770 end_reg = pup_cnt; 771 } 772 773 interface_train_res = 774 ddr3_tip_get_buf_ptr(dev_num, search, result_type, 775 if_id); 776 DEBUG_TRAINING_IP_ENGINE( 777 DEBUG_LEVEL_TRACE, 778 ("start_reg %d end_reg %d interface %p\n", 779 start_reg, end_reg, interface_train_res)); 780 if (interface_train_res == NULL) { 781 DEBUG_TRAINING_IP_ENGINE( 782 DEBUG_LEVEL_ERROR, 783 ("interface_train_res is NULL\n")); 784 return MV_FAIL; 785 } 786 787 for (reg_offset = start_reg; reg_offset <= end_reg; 788 reg_offset++) { 789 if (operation == TRAINING_LOAD_OPERATION_UNLOAD) { 790 if (is_read_from_db == 0) { 791 CHECK_STATUS(ddr3_tip_if_read 792 (dev_num, 793 ACCESS_TYPE_UNICAST, 794 if_id, 795 reg_addr[reg_offset], 796 read_data, 797 MASK_ALL_BITS)); 798 if (is_check_result_validity == 1) { 799 if ((read_data[if_id] & 800 TIP_ENG_LOCK) == 0) { 801 interface_train_res 802 [reg_offset] = 803 TIP_ENG_LOCK + 804 TIP_TX_DLL_RANGE_MAX; 805 } else { 806 interface_train_res 807 [reg_offset] = 808 read_data 809 [if_id] + 810 cons_tap; 811 } 812 } else { 813 interface_train_res[reg_offset] 814 = read_data[if_id] + 815 cons_tap; 816 } 817 DEBUG_TRAINING_IP_ENGINE 818 (DEBUG_LEVEL_TRACE, 819 ("reg_offset %d value 0x%x addr %p\n", 820 reg_offset, 821 interface_train_res 822 [reg_offset], 823 &interface_train_res 824 [reg_offset])); 825 } else { 826 *load_res = 827 &interface_train_res[start_reg]; 828 DEBUG_TRAINING_IP_ENGINE 829 (DEBUG_LEVEL_TRACE, 830 ("*load_res %p\n", *load_res)); 831 } 832 } else { 833 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_TRACE, 834 ("not supported\n")); 835 } 836 } 837 } 838 839 return MV_OK; 840 } 841 842 /* 843 * Load all pattern to memory using ODPG 844 */ 845 int ddr3_tip_load_all_pattern_to_mem(u32 dev_num) 846 { 847 u32 pattern = 0, if_id; 848 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 849 850 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 851 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 852 training_result[training_stage][if_id] = TEST_SUCCESS; 853 } 854 855 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 856 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 857 /* enable single cs */ 858 CHECK_STATUS(ddr3_tip_if_write 859 (dev_num, ACCESS_TYPE_UNICAST, if_id, 860 DUAL_DUNIT_CFG_REG, (1 << 3), (1 << 3))); 861 } 862 863 for (pattern = 0; pattern < PATTERN_LAST; pattern++) 864 ddr3_tip_load_pattern_to_mem(dev_num, pattern); 865 866 return MV_OK; 867 } 868 869 /* 870 * Load specific pattern to memory using ODPG 871 */ 872 int ddr3_tip_load_pattern_to_mem(u32 dev_num, enum hws_pattern pattern) 873 { 874 u32 reg_data, if_id; 875 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table(); 876 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 877 878 /* load pattern to memory */ 879 /* 880 * Write Tx mode, CS0, phases, Tx burst size, delay between burst, 881 * rx pattern phases 882 */ 883 reg_data = 884 0x1 | (pattern_table[pattern].num_of_phases_tx << 5) | 885 (pattern_table[pattern].tx_burst_size << 11) | 886 (pattern_table[pattern].delay_between_bursts << 15) | 887 (pattern_table[pattern].num_of_phases_rx << 21) | (0x1 << 25) | 888 (effective_cs << 26); 889 CHECK_STATUS(ddr3_tip_if_write 890 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 891 ODPG_DATA_CTRL_REG, reg_data, MASK_ALL_BITS)); 892 /* ODPG Write enable from BIST */ 893 CHECK_STATUS(ddr3_tip_if_write 894 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 895 ODPG_DATA_CTRL_REG, (0x1 | (effective_cs << 26)), 896 0xc000003)); 897 /* disable error injection */ 898 CHECK_STATUS(ddr3_tip_if_write 899 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 900 ODPG_DATA_WR_DATA_ERR_REG, 0, 0x1)); 901 /* load pattern to ODPG */ 902 ddr3_tip_load_pattern_to_odpg(dev_num, ACCESS_TYPE_MULTICAST, 903 PARAM_NOT_CARE, pattern, 904 pattern_table[pattern].start_addr); 905 906 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) >= MV_TIP_REV_3) { 907 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 908 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 909 910 CHECK_STATUS(ddr3_tip_if_write 911 (dev_num, ACCESS_TYPE_UNICAST, if_id, 912 SDRAM_ODT_CTRL_HIGH_REG, 913 0x3, 0xf)); 914 } 915 916 mv_ddr_odpg_enable(); 917 } else { 918 CHECK_STATUS(ddr3_tip_if_write 919 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 920 ODPG_DATA_CTRL_REG, (u32)(0x1 << 31), 921 (u32)(0x1 << 31))); 922 } 923 mdelay(1); 924 925 if (mv_ddr_is_odpg_done(MAX_POLLING_ITERATIONS) != MV_OK) 926 return MV_FAIL; 927 928 /* Disable ODPG and stop write to memory */ 929 CHECK_STATUS(ddr3_tip_if_write 930 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 931 ODPG_DATA_CTRL_REG, (0x1 << 30), (u32) (0x3 << 30))); 932 933 /* return to default */ 934 CHECK_STATUS(ddr3_tip_if_write 935 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 936 ODPG_DATA_CTRL_REG, 0, MASK_ALL_BITS)); 937 938 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) >= MV_TIP_REV_3) { 939 /* Disable odt0 for CS0 training - need to adjust for multy CS */ 940 CHECK_STATUS(ddr3_tip_if_write 941 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 942 SDRAM_ODT_CTRL_HIGH_REG, 0x0, 0xf)); 943 } 944 /* temporary added */ 945 mdelay(1); 946 947 return MV_OK; 948 } 949 950 /* 951 * Training search routine 952 */ 953 int ddr3_tip_ip_training_wrapper_int(u32 dev_num, 954 enum hws_access_type access_type, 955 u32 if_id, 956 enum hws_access_type pup_access_type, 957 u32 pup_num, u32 bit_num, 958 enum hws_training_result result_type, 959 enum hws_control_element control_element, 960 enum hws_search_dir search_dir, 961 enum hws_dir direction, 962 u32 interface_mask, u32 init_value_l2h, 963 u32 init_value_h2l, u32 num_iter, 964 enum hws_pattern pattern, 965 enum hws_edge_compare edge_comp, 966 enum hws_ddr_cs train_cs_type, u32 cs_num, 967 enum hws_training_ip_stat *train_status) 968 { 969 u32 interface_num = 0, start_if, end_if, init_value_used; 970 enum hws_search_dir search_dir_id, start_search, end_search; 971 enum hws_edge_compare edge_comp_used; 972 u8 cons_tap = 0; 973 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 974 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 975 976 if (train_status == NULL) { 977 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR, 978 ("train_status is NULL\n")); 979 return MV_FAIL; 980 } 981 982 if ((train_cs_type > CS_NON_SINGLE) || 983 (edge_comp >= EDGE_PFP) || 984 (pattern >= PATTERN_LAST) || 985 (direction > OPER_WRITE_AND_READ) || 986 (search_dir > HWS_HIGH2LOW) || 987 (control_element > HWS_CONTROL_ELEMENT_DQS_SKEW) || 988 (result_type > RESULT_PER_BYTE) || 989 (pup_num >= octets_per_if_num) || 990 (pup_access_type > ACCESS_TYPE_MULTICAST) || 991 (if_id > 11) || (access_type > ACCESS_TYPE_MULTICAST)) { 992 DEBUG_TRAINING_IP_ENGINE( 993 DEBUG_LEVEL_ERROR, 994 ("wrong parameter train_cs_type %d edge_comp %d pattern %d direction %d search_dir %d control_element %d result_type %d pup_num %d pup_access_type %d if_id %d access_type %d\n", 995 train_cs_type, edge_comp, pattern, direction, 996 search_dir, control_element, result_type, pup_num, 997 pup_access_type, if_id, access_type)); 998 return MV_FAIL; 999 } 1000 1001 if (edge_comp == EDGE_FPF) { 1002 start_search = HWS_LOW2HIGH; 1003 end_search = HWS_HIGH2LOW; 1004 edge_comp_used = EDGE_FP; 1005 } else { 1006 start_search = search_dir; 1007 end_search = search_dir; 1008 edge_comp_used = edge_comp; 1009 } 1010 1011 for (search_dir_id = start_search; search_dir_id <= end_search; 1012 search_dir_id++) { 1013 init_value_used = (search_dir_id == HWS_LOW2HIGH) ? 1014 init_value_l2h : init_value_h2l; 1015 DEBUG_TRAINING_IP_ENGINE( 1016 DEBUG_LEVEL_TRACE, 1017 ("dev_num %d, access_type %d, if_id %d, pup_access_type %d,pup_num %d, result_type %d, control_element %d search_dir_id %d, direction %d, interface_mask %d,init_value_used %d, num_iter %d, pattern %d, edge_comp_used %d, train_cs_type %d, cs_num %d\n", 1018 dev_num, access_type, if_id, pup_access_type, pup_num, 1019 result_type, control_element, search_dir_id, 1020 direction, interface_mask, init_value_used, num_iter, 1021 pattern, edge_comp_used, train_cs_type, cs_num)); 1022 1023 ddr3_tip_ip_training(dev_num, access_type, if_id, 1024 pup_access_type, pup_num, result_type, 1025 control_element, search_dir_id, direction, 1026 interface_mask, init_value_used, num_iter, 1027 pattern, edge_comp_used, train_cs_type, 1028 cs_num, train_status); 1029 if (access_type == ACCESS_TYPE_MULTICAST) { 1030 start_if = 0; 1031 end_if = MAX_INTERFACE_NUM - 1; 1032 } else { 1033 start_if = if_id; 1034 end_if = if_id; 1035 } 1036 1037 for (interface_num = start_if; interface_num <= end_if; 1038 interface_num++) { 1039 VALIDATE_IF_ACTIVE(tm->if_act_mask, interface_num); 1040 cs_num = 0; 1041 CHECK_STATUS(ddr3_tip_read_training_result 1042 (dev_num, interface_num, pup_access_type, 1043 pup_num, bit_num, search_dir_id, 1044 direction, result_type, 1045 TRAINING_LOAD_OPERATION_UNLOAD, 1046 train_cs_type, NULL, 0, cons_tap, 1047 0)); 1048 } 1049 } 1050 1051 return MV_OK; 1052 } 1053 /* 1054 * Training search & read result routine 1055 * This function implements the search algorithm 1056 * first it calls the function ddr3_tip_ip_training_wrapper_int which triggers the search from l2h and h2l 1057 * this function handles rx and tx search cases 1058 * in case of rx it only triggers the search (l2h and h2l) 1059 * in case of tx there are 3 optional algorithm phases: 1060 * phase 1: 1061 * it first triggers the search and handles the results as following (phase 1): 1062 * each bit, which defined by the search two edges (e1 or VW_L and e2 or VW_H), match on of cases: 1063 * 1. BIT_LOW_UI 0 =< VW =< 31 in case of jitter use: VW_L <= 31, VW_H <= 31 1064 * 2. BIT_HIGH_UI 32 =< VW =< 63 in case of jitter use: VW_L >= 32, VW_H >= 32 1065 * 3. BIT_SPLIT_IN VW_L <= 31 & VW_H >= 32 1066 * 4. BIT_SPLIT_OUT* VW_H < 32 & VW_L > 32 1067 * note: the VW units is adll taps 1068 * phase 2: 1069 * only bit case BIT_SPLIT_OUT requires another search (phase 2) from the middle range in two directions h2l and l2h 1070 * because only this case is not locked by the search engine in the first search trigger (phase 1). 1071 * phase 3: 1072 * each subphy is categorized according to its bits definition. 1073 * the sub-phy cases are as follows: 1074 * 1.BYTE_NOT_DEFINED the byte has not yet been categorized 1075 * 2.BYTE_HOMOGENEOUS_LOW 0 =< VW =< 31 1076 * 3.BYTE_HOMOGENEOUS_HIGH 32 =< VW =< 63 1077 * 4.BYTE_HOMOGENEOUS_SPLIT_IN VW_L <= 31 & VW_H >= 32 1078 * or the center of all bits in the byte =< 31 1079 * 5.BYTE_HOMOGENEOUS_SPLIT_OUT VW_H < 32 & VW_L > 32 1080 * 6.BYTE_SPLIT_OUT_MIX at least one bits is in split out state and one bit is in other 1081 * or the center of all bits in the byte => 32 1082 * after the two phases above a center valid window for each subphy is calculated accordingly: 1083 * center valid window = maximum center of all bits in the subphy - minimum center of all bits in the subphy. 1084 * now decisions are made in each subphy as following: 1085 * all subphys which are homogeneous remains as is 1086 * all subphys which are homogeneous low | homogeneous high and the subphy center valid window is less than 32 1087 * mark this subphy as homogeneous split in. 1088 * now the bits in the bytes which are BYTE_SPLIT_OUT_MIX needed to be reorganized and handles as following 1089 * all bits which are BIT_LOW_UI will be added with 64 adll, 1090 * this will hopefully ensures that all the bits in the sub phy can be sampled by the dqs 1091 */ 1092 int ddr3_tip_ip_training_wrapper(u32 dev_num, enum hws_access_type access_type, 1093 u32 if_id, 1094 enum hws_access_type pup_access_type, 1095 u32 pup_num, 1096 enum hws_training_result result_type, 1097 enum hws_control_element control_element, 1098 enum hws_search_dir search_dir, 1099 enum hws_dir direction, u32 interface_mask, 1100 u32 init_value_l2h, u32 init_value_h2l, 1101 u32 num_iter, enum hws_pattern pattern, 1102 enum hws_edge_compare edge_comp, 1103 enum hws_ddr_cs train_cs_type, u32 cs_num, 1104 enum hws_training_ip_stat *train_status) 1105 { 1106 u8 e1, e2; 1107 u32 bit_id, start_if, end_if, bit_end = 0; 1108 u32 *result[HWS_SEARCH_DIR_LIMIT] = { 0 }; 1109 u8 cons_tap = (direction == OPER_WRITE) ? (64) : (0); 1110 u8 bit_bit_mask[MAX_BUS_NUM] = { 0 }, bit_bit_mask_active = 0; 1111 u8 bit_state[MAX_BUS_NUM * BUS_WIDTH_IN_BITS] = {0}; 1112 u8 h2l_adll_value[MAX_BUS_NUM][BUS_WIDTH_IN_BITS]; 1113 u8 l2h_adll_value[MAX_BUS_NUM][BUS_WIDTH_IN_BITS]; 1114 u8 center_subphy_adll_window[MAX_BUS_NUM]; 1115 u8 min_center_subphy_adll[MAX_BUS_NUM]; 1116 u8 max_center_subphy_adll[MAX_BUS_NUM]; 1117 u32 *l2h_if_train_res = NULL; 1118 u32 *h2l_if_train_res = NULL; 1119 enum hws_search_dir search_dir_id; 1120 int status; 1121 u32 bit_lock_result; 1122 1123 u8 sybphy_id; 1124 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 1125 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1126 1127 if (pup_num >= octets_per_if_num) { 1128 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR, 1129 ("pup_num %d not valid\n", pup_num)); 1130 } 1131 1132 if (if_id >= MAX_INTERFACE_NUM) { 1133 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR, 1134 ("if_id %d not valid\n", if_id)); 1135 } 1136 1137 status = ddr3_tip_ip_training_wrapper_int 1138 (dev_num, access_type, if_id, pup_access_type, pup_num, 1139 ALL_BITS_PER_PUP, result_type, control_element, 1140 search_dir, direction, interface_mask, init_value_l2h, 1141 init_value_h2l, num_iter, pattern, edge_comp, 1142 train_cs_type, cs_num, train_status); 1143 1144 if (MV_OK != status) 1145 return status; 1146 1147 if (access_type == ACCESS_TYPE_MULTICAST) { 1148 start_if = 0; 1149 end_if = MAX_INTERFACE_NUM - 1; 1150 } else { 1151 start_if = if_id; 1152 end_if = if_id; 1153 } 1154 1155 for (if_id = start_if; if_id <= end_if; if_id++) { 1156 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1157 /* zero the database */ 1158 bit_bit_mask_active = 0; /* clean the flag for level2 search */ 1159 memset(bit_state, 0, sizeof(bit_state)); 1160 /* phase 1 */ 1161 for (sybphy_id = 0; sybphy_id < octets_per_if_num; sybphy_id++) { 1162 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, sybphy_id); 1163 if (result_type == RESULT_PER_BIT) 1164 bit_end = BUS_WIDTH_IN_BITS; 1165 else 1166 bit_end = 0; 1167 1168 /* zero the data base */ 1169 bit_bit_mask[sybphy_id] = 0; 1170 byte_status[if_id][sybphy_id] = BYTE_NOT_DEFINED; 1171 for (bit_id = 0; bit_id < bit_end; bit_id++) { 1172 h2l_adll_value[sybphy_id][bit_id] = 64; 1173 l2h_adll_value[sybphy_id][bit_id] = 0; 1174 for (search_dir_id = HWS_LOW2HIGH; search_dir_id <= HWS_HIGH2LOW; 1175 search_dir_id++) { 1176 status = ddr3_tip_read_training_result 1177 (dev_num, if_id, 1178 ACCESS_TYPE_UNICAST, sybphy_id, bit_id, 1179 search_dir_id, direction, result_type, 1180 TRAINING_LOAD_OPERATION_UNLOAD, CS_SINGLE, 1181 &result[search_dir_id], 1, 0, 0); 1182 1183 if (MV_OK != status) 1184 return status; 1185 } 1186 1187 e1 = GET_TAP_RESULT(result[HWS_LOW2HIGH][0], EDGE_1); 1188 e2 = GET_TAP_RESULT(result[HWS_HIGH2LOW][0], EDGE_1); 1189 DEBUG_TRAINING_IP_ENGINE 1190 (DEBUG_LEVEL_INFO, 1191 ("if_id %d sybphy_id %d bit %d l2h 0x%x (e1 0x%x) h2l 0x%x (e2 0x%x)\n", 1192 if_id, sybphy_id, bit_id, result[HWS_LOW2HIGH][0], e1, 1193 result[HWS_HIGH2LOW][0], e2)); 1194 bit_lock_result = 1195 (GET_LOCK_RESULT(result[HWS_LOW2HIGH][0]) && 1196 GET_LOCK_RESULT(result[HWS_HIGH2LOW][0])); 1197 1198 if (bit_lock_result) { 1199 /* in case of read operation set the byte status as homogeneous low */ 1200 if (direction == OPER_READ) { 1201 byte_status[if_id][sybphy_id] |= BYTE_HOMOGENEOUS_LOW; 1202 } else if ((e2 - e1) > 32) { /* oper_write */ 1203 /* split out */ 1204 bit_state[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] = 1205 BIT_SPLIT_OUT; 1206 byte_status[if_id][sybphy_id] |= BYTE_HOMOGENEOUS_SPLIT_OUT; 1207 /* mark problem bits */ 1208 bit_bit_mask[sybphy_id] |= (1 << bit_id); 1209 bit_bit_mask_active = 1; 1210 DEBUG_TRAINING_IP_ENGINE 1211 (DEBUG_LEVEL_TRACE, 1212 ("if_id %d sybphy_id %d bit %d BIT_SPLIT_OUT\n", 1213 if_id, sybphy_id, bit_id)); 1214 } else { 1215 /* low ui */ 1216 if (e1 <= 31 && e2 <= 31) { 1217 bit_state[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] = 1218 BIT_LOW_UI; 1219 byte_status[if_id][sybphy_id] |= BYTE_HOMOGENEOUS_LOW; 1220 l2h_adll_value[sybphy_id][bit_id] = e1; 1221 h2l_adll_value[sybphy_id][bit_id] = e2; 1222 DEBUG_TRAINING_IP_ENGINE 1223 (DEBUG_LEVEL_TRACE, 1224 ("if_id %d sybphy_id %d bit %d BIT_LOW_UI\n", 1225 if_id, sybphy_id, bit_id)); 1226 } 1227 /* high ui */ 1228 if (e1 >= 32 && e2 >= 32) { 1229 bit_state[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] = 1230 BIT_HIGH_UI; 1231 byte_status[if_id][sybphy_id] |= BYTE_HOMOGENEOUS_HIGH; 1232 l2h_adll_value[sybphy_id][bit_id] = e1; 1233 h2l_adll_value[sybphy_id][bit_id] = e2; 1234 DEBUG_TRAINING_IP_ENGINE 1235 (DEBUG_LEVEL_TRACE, 1236 ("if_id %d sybphy_id %d bit %d BIT_HIGH_UI\n", 1237 if_id, sybphy_id, bit_id)); 1238 } 1239 /* split in */ 1240 if (e1 <= 31 && e2 >= 32) { 1241 bit_state[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] = 1242 BIT_SPLIT_IN; 1243 byte_status[if_id][sybphy_id] |= 1244 BYTE_HOMOGENEOUS_SPLIT_IN; 1245 l2h_adll_value[sybphy_id][bit_id] = e1; 1246 h2l_adll_value[sybphy_id][bit_id] = e2; 1247 DEBUG_TRAINING_IP_ENGINE 1248 (DEBUG_LEVEL_TRACE, 1249 ("if_id %d sybphy_id %d bit %d BIT_SPLIT_IN\n", 1250 if_id, sybphy_id, bit_id)); 1251 } 1252 } 1253 } else { 1254 DEBUG_TRAINING_IP_ENGINE 1255 (DEBUG_LEVEL_INFO, 1256 ("if_id %d sybphy_id %d bit %d l2h 0x%x (e1 0x%x)" 1257 "h2l 0x%x (e2 0x%x): bit cannot be categorized\n", 1258 if_id, sybphy_id, bit_id, result[HWS_LOW2HIGH][0], e1, 1259 result[HWS_HIGH2LOW][0], e2)); 1260 /* mark the byte as not defined */ 1261 byte_status[if_id][sybphy_id] = BYTE_NOT_DEFINED; 1262 break; /* continue to next pup - no reason to analyze this byte */ 1263 } 1264 } /* for all bits */ 1265 } /* for all PUPs */ 1266 1267 /* phase 2 will occur only in write operation */ 1268 if (bit_bit_mask_active != 0) { 1269 l2h_if_train_res = ddr3_tip_get_buf_ptr(dev_num, HWS_LOW2HIGH, result_type, if_id); 1270 h2l_if_train_res = ddr3_tip_get_buf_ptr(dev_num, HWS_HIGH2LOW, result_type, if_id); 1271 /* search from middle to end */ 1272 ddr3_tip_ip_training 1273 (dev_num, ACCESS_TYPE_UNICAST, 1274 if_id, ACCESS_TYPE_MULTICAST, 1275 PARAM_NOT_CARE, result_type, 1276 control_element, HWS_LOW2HIGH, 1277 direction, interface_mask, 1278 num_iter / 2, num_iter / 2, 1279 pattern, EDGE_FP, train_cs_type, 1280 cs_num, train_status); 1281 1282 for (sybphy_id = 0; sybphy_id < octets_per_if_num; sybphy_id++) { 1283 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, sybphy_id); 1284 if (byte_status[if_id][sybphy_id] != BYTE_NOT_DEFINED) { 1285 if (bit_bit_mask[sybphy_id] == 0) 1286 continue; /* this byte bits have no split out state */ 1287 1288 for (bit_id = 0; bit_id < bit_end; bit_id++) { 1289 if ((bit_bit_mask[sybphy_id] & (1 << bit_id)) == 0) 1290 continue; /* this bit is non split goto next bit */ 1291 1292 /* enter the result to the data base */ 1293 status = ddr3_tip_read_training_result 1294 (dev_num, if_id, ACCESS_TYPE_UNICAST, sybphy_id, 1295 bit_id, HWS_LOW2HIGH, direction, result_type, 1296 TRAINING_LOAD_OPERATION_UNLOAD, CS_SINGLE, 1297 &l2h_if_train_res, 0, 0, 1); 1298 1299 if (MV_OK != status) 1300 return status; 1301 1302 l2h_adll_value[sybphy_id][bit_id] = 1303 l2h_if_train_res[sybphy_id * 1304 BUS_WIDTH_IN_BITS + bit_id] & PUP_RESULT_EDGE_1_MASK; 1305 } 1306 } 1307 } 1308 /* Search from middle to start */ 1309 ddr3_tip_ip_training 1310 (dev_num, ACCESS_TYPE_UNICAST, 1311 if_id, ACCESS_TYPE_MULTICAST, 1312 PARAM_NOT_CARE, result_type, 1313 control_element, HWS_HIGH2LOW, 1314 direction, interface_mask, 1315 num_iter / 2, num_iter / 2, 1316 pattern, EDGE_FP, train_cs_type, 1317 cs_num, train_status); 1318 1319 for (sybphy_id = 0; sybphy_id < octets_per_if_num; sybphy_id++) { 1320 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, sybphy_id); 1321 if (byte_status[if_id][sybphy_id] != BYTE_NOT_DEFINED) { 1322 if (bit_bit_mask[sybphy_id] == 0) 1323 continue; 1324 1325 for (bit_id = 0; bit_id < bit_end; bit_id++) { 1326 if ((bit_bit_mask[sybphy_id] & (1 << bit_id)) == 0) 1327 continue; 1328 1329 status = ddr3_tip_read_training_result 1330 (dev_num, if_id, ACCESS_TYPE_UNICAST, sybphy_id, 1331 bit_id, HWS_HIGH2LOW, direction, result_type, 1332 TRAINING_LOAD_OPERATION_UNLOAD, CS_SINGLE, 1333 &h2l_if_train_res, 0, cons_tap, 1); 1334 1335 if (MV_OK != status) 1336 return status; 1337 1338 h2l_adll_value[sybphy_id][bit_id] = 1339 h2l_if_train_res[sybphy_id * 1340 BUS_WIDTH_IN_BITS + bit_id] & PUP_RESULT_EDGE_1_MASK; 1341 } 1342 } 1343 } 1344 } /* end if bit_bit_mask_active */ 1345 /* 1346 * phase 3 will occur only in write operation 1347 * find the maximum and the minimum center of each subphy 1348 */ 1349 for (sybphy_id = 0; sybphy_id < octets_per_if_num; sybphy_id++) { 1350 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, sybphy_id); 1351 1352 if ((byte_status[if_id][sybphy_id] != BYTE_NOT_DEFINED) && (direction == OPER_WRITE)) { 1353 /* clear the arrays and parameters */ 1354 center_subphy_adll_window[sybphy_id] = 0; 1355 max_center_subphy_adll[sybphy_id] = 0; 1356 min_center_subphy_adll[sybphy_id] = 64; 1357 /* find the max and min center adll value in the current subphy */ 1358 for (bit_id = 0; bit_id < bit_end; bit_id++) { 1359 /* debug print all the bit edges after alignment */ 1360 DEBUG_TRAINING_IP_ENGINE 1361 (DEBUG_LEVEL_TRACE, 1362 ("if_id %d sybphy_id %d bit %d l2h %d h2l %d\n", 1363 if_id, sybphy_id, bit_id, l2h_adll_value[sybphy_id][bit_id], 1364 h2l_adll_value[sybphy_id][bit_id])); 1365 1366 if (((l2h_adll_value[sybphy_id][bit_id] + 1367 h2l_adll_value[sybphy_id][bit_id]) / 2) > 1368 max_center_subphy_adll[sybphy_id]) 1369 max_center_subphy_adll[sybphy_id] = 1370 (l2h_adll_value[sybphy_id][bit_id] + 1371 h2l_adll_value[sybphy_id][bit_id]) / 2; 1372 if (((l2h_adll_value[sybphy_id][bit_id] + 1373 h2l_adll_value[sybphy_id][bit_id]) / 2) < 1374 min_center_subphy_adll[sybphy_id]) 1375 min_center_subphy_adll[sybphy_id] = 1376 (l2h_adll_value[sybphy_id][bit_id] + 1377 h2l_adll_value[sybphy_id][bit_id]) / 2; 1378 } 1379 1380 /* calculate the center of the current subphy */ 1381 center_subphy_adll_window[sybphy_id] = 1382 max_center_subphy_adll[sybphy_id] - 1383 min_center_subphy_adll[sybphy_id]; 1384 DEBUG_TRAINING_IP_ENGINE 1385 (DEBUG_LEVEL_TRACE, 1386 ("if_id %d sybphy_id %d min center %d max center %d center %d\n", 1387 if_id, sybphy_id, min_center_subphy_adll[sybphy_id], 1388 max_center_subphy_adll[sybphy_id], 1389 center_subphy_adll_window[sybphy_id])); 1390 } 1391 } 1392 /* 1393 * check byte state and fix bits state if needed 1394 * in case the level 1 and 2 above subphy results are 1395 * homogeneous continue to the next subphy 1396 */ 1397 for (sybphy_id = 0; sybphy_id < octets_per_if_num; sybphy_id++) { 1398 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, sybphy_id); 1399 if ((byte_status[if_id][sybphy_id] == BYTE_HOMOGENEOUS_LOW) || 1400 (byte_status[if_id][sybphy_id] == BYTE_HOMOGENEOUS_HIGH) || 1401 (byte_status[if_id][sybphy_id] == BYTE_HOMOGENEOUS_SPLIT_IN) || 1402 (byte_status[if_id][sybphy_id] == BYTE_HOMOGENEOUS_SPLIT_OUT) || 1403 (byte_status[if_id][sybphy_id] == BYTE_NOT_DEFINED)) 1404 continue; 1405 1406 /* 1407 * in case all of the bits in the current subphy are 1408 * less than 32 which will find alignment in the subphy bits 1409 * mark this subphy as homogeneous split in 1410 */ 1411 if (center_subphy_adll_window[sybphy_id] <= 31) 1412 byte_status[if_id][sybphy_id] = BYTE_HOMOGENEOUS_SPLIT_IN; 1413 1414 /* 1415 * in case the current byte is split_out and the center is bigger than 31 1416 * the byte can be aligned. in this case add 64 to the the low ui bits aligning it 1417 * to the other ui bits 1418 */ 1419 if (center_subphy_adll_window[sybphy_id] >= 32) { 1420 byte_status[if_id][sybphy_id] = BYTE_SPLIT_OUT_MIX; 1421 1422 DEBUG_TRAINING_IP_ENGINE 1423 (DEBUG_LEVEL_TRACE, 1424 ("if_id %d sybphy_id %d byte state 0x%x\n", 1425 if_id, sybphy_id, byte_status[if_id][sybphy_id])); 1426 for (bit_id = 0; bit_id < bit_end; bit_id++) { 1427 if (bit_state[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] == BIT_LOW_UI) { 1428 l2h_if_train_res[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] += 64; 1429 h2l_if_train_res[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] += 64; 1430 } 1431 DEBUG_TRAINING_IP_ENGINE 1432 (DEBUG_LEVEL_TRACE, 1433 ("if_id %d sybphy_id %d bit_id %d added 64 adlls\n", 1434 if_id, sybphy_id, bit_id)); 1435 } 1436 } 1437 } 1438 } /* for all interfaces */ 1439 1440 return MV_OK; 1441 } 1442 1443 u8 mv_ddr_tip_sub_phy_byte_status_get(u32 if_id, u32 subphy_id) 1444 { 1445 return byte_status[if_id][subphy_id]; 1446 } 1447 1448 void mv_ddr_tip_sub_phy_byte_status_set(u32 if_id, u32 subphy_id, u8 byte_status_data) 1449 { 1450 byte_status[if_id][subphy_id] = byte_status_data; 1451 } 1452 1453 /* 1454 * Load phy values 1455 */ 1456 int ddr3_tip_load_phy_values(int b_load) 1457 { 1458 u32 bus_cnt = 0, if_id, dev_num = 0; 1459 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 1460 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1461 1462 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1463 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1464 for (bus_cnt = 0; bus_cnt < octets_per_if_num; bus_cnt++) { 1465 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt); 1466 if (b_load == 1) { 1467 CHECK_STATUS(ddr3_tip_bus_read 1468 (dev_num, if_id, 1469 ACCESS_TYPE_UNICAST, bus_cnt, 1470 DDR_PHY_DATA, 1471 CTX_PHY_REG(effective_cs), 1472 &phy_reg_bk[if_id][bus_cnt] 1473 [0])); 1474 CHECK_STATUS(ddr3_tip_bus_read 1475 (dev_num, if_id, 1476 ACCESS_TYPE_UNICAST, bus_cnt, 1477 DDR_PHY_DATA, 1478 RL_PHY_REG(effective_cs), 1479 &phy_reg_bk[if_id][bus_cnt] 1480 [1])); 1481 CHECK_STATUS(ddr3_tip_bus_read 1482 (dev_num, if_id, 1483 ACCESS_TYPE_UNICAST, bus_cnt, 1484 DDR_PHY_DATA, 1485 CRX_PHY_REG(effective_cs), 1486 &phy_reg_bk[if_id][bus_cnt] 1487 [2])); 1488 } else { 1489 CHECK_STATUS(ddr3_tip_bus_write 1490 (dev_num, ACCESS_TYPE_UNICAST, 1491 if_id, ACCESS_TYPE_UNICAST, 1492 bus_cnt, DDR_PHY_DATA, 1493 CTX_PHY_REG(effective_cs), 1494 phy_reg_bk[if_id][bus_cnt] 1495 [0])); 1496 CHECK_STATUS(ddr3_tip_bus_write 1497 (dev_num, ACCESS_TYPE_UNICAST, 1498 if_id, ACCESS_TYPE_UNICAST, 1499 bus_cnt, DDR_PHY_DATA, 1500 RL_PHY_REG(effective_cs), 1501 phy_reg_bk[if_id][bus_cnt] 1502 [1])); 1503 CHECK_STATUS(ddr3_tip_bus_write 1504 (dev_num, ACCESS_TYPE_UNICAST, 1505 if_id, ACCESS_TYPE_UNICAST, 1506 bus_cnt, DDR_PHY_DATA, 1507 CRX_PHY_REG(effective_cs), 1508 phy_reg_bk[if_id][bus_cnt] 1509 [2])); 1510 } 1511 } 1512 } 1513 1514 return MV_OK; 1515 } 1516 1517 int ddr3_tip_training_ip_test(u32 dev_num, enum hws_training_result result_type, 1518 enum hws_search_dir search_dir, 1519 enum hws_dir direction, 1520 enum hws_edge_compare edge, 1521 u32 init_val1, u32 init_val2, 1522 u32 num_of_iterations, 1523 u32 start_pattern, u32 end_pattern) 1524 { 1525 u32 pattern, if_id, pup_id; 1526 enum hws_training_ip_stat train_status[MAX_INTERFACE_NUM]; 1527 u32 *res = NULL; 1528 u32 search_state = 0; 1529 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 1530 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1531 1532 ddr3_tip_load_phy_values(1); 1533 1534 for (pattern = start_pattern; pattern <= end_pattern; pattern++) { 1535 for (search_state = 0; search_state < HWS_SEARCH_DIR_LIMIT; 1536 search_state++) { 1537 ddr3_tip_ip_training_wrapper(dev_num, 1538 ACCESS_TYPE_MULTICAST, 0, 1539 ACCESS_TYPE_MULTICAST, 0, 1540 result_type, 1541 HWS_CONTROL_ELEMENT_ADLL, 1542 search_dir, direction, 1543 0xfff, init_val1, 1544 init_val2, 1545 num_of_iterations, pattern, 1546 edge, CS_SINGLE, 1547 PARAM_NOT_CARE, 1548 train_status); 1549 1550 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; 1551 if_id++) { 1552 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1553 for (pup_id = 0; pup_id < 1554 octets_per_if_num; 1555 pup_id++) { 1556 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, 1557 pup_id); 1558 CHECK_STATUS 1559 (ddr3_tip_read_training_result 1560 (dev_num, if_id, 1561 ACCESS_TYPE_UNICAST, pup_id, 1562 ALL_BITS_PER_PUP, 1563 search_state, 1564 direction, result_type, 1565 TRAINING_LOAD_OPERATION_UNLOAD, 1566 CS_SINGLE, &res, 1, 0, 1567 0)); 1568 if (result_type == RESULT_PER_BYTE) { 1569 DEBUG_TRAINING_IP_ENGINE 1570 (DEBUG_LEVEL_INFO, 1571 ("search_state %d if_id %d pup_id %d 0x%x\n", 1572 search_state, if_id, 1573 pup_id, res[0])); 1574 } else { 1575 DEBUG_TRAINING_IP_ENGINE 1576 (DEBUG_LEVEL_INFO, 1577 ("search_state %d if_id %d pup_id %d 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n", 1578 search_state, if_id, 1579 pup_id, res[0], 1580 res[1], res[2], 1581 res[3], res[4], 1582 res[5], res[6], 1583 res[7])); 1584 } 1585 } 1586 } /* interface */ 1587 } /* search */ 1588 } /* pattern */ 1589 1590 ddr3_tip_load_phy_values(0); 1591 1592 return MV_OK; 1593 } 1594 1595 int mv_ddr_pattern_start_addr_set(struct pattern_info *pattern_tbl, enum hws_pattern pattern, u32 addr) 1596 { 1597 pattern_tbl[pattern].start_addr = addr; 1598 1599 return 0; 1600 } 1601 1602 struct pattern_info *ddr3_tip_get_pattern_table() 1603 { 1604 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1605 1606 if (MV_DDR_IS_64BIT_DRAM_MODE(tm->bus_act_mask)) 1607 return pattern_table_64; 1608 else if (DDR3_IS_16BIT_DRAM_MODE(tm->bus_act_mask) == 0) 1609 return pattern_table_32; 1610 else 1611 return pattern_table_16; 1612 } 1613 1614 u16 *ddr3_tip_get_mask_results_dq_reg() 1615 { 1616 #if MAX_BUS_NUM == 5 1617 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1618 1619 if (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask)) 1620 return mask_results_dq_reg_map_pup3_ecc; 1621 else 1622 #endif 1623 return mask_results_dq_reg_map; 1624 } 1625 1626 u16 *ddr3_tip_get_mask_results_pup_reg_map() 1627 { 1628 #if MAX_BUS_NUM == 5 1629 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1630 1631 if (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask)) 1632 return mask_results_pup_reg_map_pup3_ecc; 1633 else 1634 #endif 1635 return mask_results_pup_reg_map; 1636 } 1637 1638 /* load expected dm pattern to odpg */ 1639 #define LOW_NIBBLE_BYTE_MASK 0xf 1640 #define HIGH_NIBBLE_BYTE_MASK 0xf0 1641 int mv_ddr_load_dm_pattern_to_odpg(enum hws_access_type access_type, enum hws_pattern pattern, 1642 enum dm_direction dm_dir) 1643 { 1644 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table(); 1645 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1646 u32 pattern_len = 0; 1647 u32 data_low, data_high; 1648 u8 dm_data; 1649 1650 for (pattern_len = 0; 1651 pattern_len < pattern_table[pattern].pattern_len; 1652 pattern_len++) { 1653 if (MV_DDR_IS_64BIT_DRAM_MODE(tm->bus_act_mask)) { 1654 data_low = pattern_table_get_word(0, pattern, (u8)pattern_len); 1655 data_high = data_low; 1656 } else { 1657 data_low = pattern_table_get_word(0, pattern, (u8)(pattern_len * 2)); 1658 data_high = pattern_table_get_word(0, pattern, (u8)(pattern_len * 2 + 1)); 1659 } 1660 1661 /* odpg mbus dm definition is opposite to ddr4 protocol */ 1662 if (dm_dir == DM_DIR_INVERSE) 1663 dm_data = ~((data_low & LOW_NIBBLE_BYTE_MASK) | (data_high & HIGH_NIBBLE_BYTE_MASK)); 1664 else 1665 dm_data = (data_low & LOW_NIBBLE_BYTE_MASK) | (data_high & HIGH_NIBBLE_BYTE_MASK); 1666 1667 ddr3_tip_if_write(0, access_type, 0, ODPG_DATA_WR_DATA_LOW_REG, data_low, MASK_ALL_BITS); 1668 ddr3_tip_if_write(0, access_type, 0, ODPG_DATA_WR_DATA_HIGH_REG, data_high, MASK_ALL_BITS); 1669 ddr3_tip_if_write(0, access_type, 0, ODPG_DATA_WR_ADDR_REG, 1670 pattern_len | ((dm_data & ODPG_DATA_WR_DATA_MASK) << ODPG_DATA_WR_DATA_OFFS), 1671 MASK_ALL_BITS); 1672 } 1673 1674 return MV_OK; 1675 } 1676