1 /* SPDX-License-Identifier: (GPL-2.0+ OR MIT) */ 2 /* 3 * Copyright (c) 2018 Microsemi Corporation 4 */ 5 6 #ifndef __ASM_MACH_DDR_H 7 #define __ASM_MACH_DDR_H 8 9 #include <asm/cacheops.h> 10 #include <asm/io.h> 11 #include <asm/reboot.h> 12 #include <mach/common.h> 13 14 #define MIPS_VCOREIII_MEMORY_DDR3 15 #define MIPS_VCOREIII_DDR_SIZE CONFIG_SYS_SDRAM_SIZE 16 17 #if defined(CONFIG_DDRTYPE_H5TQ1G63BFA) /* Serval1 Refboard */ 18 19 /* Hynix H5TQ1G63BFA (1Gbit DDR3, x16) @ 3.20ns */ 20 #define VC3_MPAR_bank_addr_cnt 3 21 #define VC3_MPAR_row_addr_cnt 13 22 #define VC3_MPAR_col_addr_cnt 10 23 #define VC3_MPAR_tREFI 2437 24 #define VC3_MPAR_tRAS_min 12 25 #define VC3_MPAR_CL 6 26 #define VC3_MPAR_tWTR 4 27 #define VC3_MPAR_tRC 16 28 #define VC3_MPAR_tFAW 16 29 #define VC3_MPAR_tRP 5 30 #define VC3_MPAR_tRRD 4 31 #define VC3_MPAR_tRCD 5 32 #define VC3_MPAR_tMRD 4 33 #define VC3_MPAR_tRFC 35 34 #define VC3_MPAR_CWL 5 35 #define VC3_MPAR_tXPR 38 36 #define VC3_MPAR_tMOD 12 37 #define VC3_MPAR_tDLLK 512 38 #define VC3_MPAR_tWR 5 39 40 #elif defined(CONFIG_DDRTYPE_MT41J128M16HA) /* Validation board */ 41 42 /* Micron MT41J128M16HA-15E:D (2Gbit DDR3, x16) @ 3.20ns */ 43 #define VC3_MPAR_bank_addr_cnt 3 44 #define VC3_MPAR_row_addr_cnt 14 45 #define VC3_MPAR_col_addr_cnt 10 46 #define VC3_MPAR_tREFI 2437 47 #define VC3_MPAR_tRAS_min 12 48 #define VC3_MPAR_CL 5 49 #define VC3_MPAR_tWTR 4 50 #define VC3_MPAR_tRC 16 51 #define VC3_MPAR_tFAW 16 52 #define VC3_MPAR_tRP 5 53 #define VC3_MPAR_tRRD 4 54 #define VC3_MPAR_tRCD 5 55 #define VC3_MPAR_tMRD 4 56 #define VC3_MPAR_tRFC 50 57 #define VC3_MPAR_CWL 5 58 #define VC3_MPAR_tXPR 54 59 #define VC3_MPAR_tMOD 12 60 #define VC3_MPAR_tDLLK 512 61 #define VC3_MPAR_tWR 5 62 63 #elif defined(CONFIG_DDRTYPE_MT41K256M16) /* JR2 Validation board */ 64 65 /* Micron MT41K256M16 (4Gbit, DDR3L-800, 256Mbitx16) @ 3.20ns */ 66 #define VC3_MPAR_bank_addr_cnt 3 67 #define VC3_MPAR_row_addr_cnt 15 68 #define VC3_MPAR_col_addr_cnt 10 69 #define VC3_MPAR_tREFI 2437 70 #define VC3_MPAR_tRAS_min 12 71 #define VC3_MPAR_CL 5 72 #define VC3_MPAR_tWTR 4 73 #define VC3_MPAR_tRC 16 74 #define VC3_MPAR_tFAW 16 75 #define VC3_MPAR_tRP 5 76 #define VC3_MPAR_tRRD 4 77 #define VC3_MPAR_tRCD 5 78 #define VC3_MPAR_tMRD 4 79 #define VC3_MPAR_tRFC 82 80 #define VC3_MPAR_CWL 5 81 #define VC3_MPAR_tXPR 85 82 #define VC3_MPAR_tMOD 12 83 #define VC3_MPAR_tDLLK 512 84 #define VC3_MPAR_tWR 5 85 86 #elif defined(CONFIG_DDRTYPE_H5TQ4G63MFR) /* JR2 Reference board */ 87 88 /* Hynix H5TQ4G63MFR-PBC (4Gbit, DDR3-800, 256Mbitx16) - 2kb pages @ 3.20ns */ 89 #define VC3_MPAR_bank_addr_cnt 3 90 #define VC3_MPAR_row_addr_cnt 15 91 #define VC3_MPAR_col_addr_cnt 10 92 #define VC3_MPAR_tREFI 2437 93 #define VC3_MPAR_tRAS_min 12 94 #define VC3_MPAR_CL 6 95 #define VC3_MPAR_tWTR 4 96 #define VC3_MPAR_tRC 17 97 #define VC3_MPAR_tFAW 16 98 #define VC3_MPAR_tRP 5 99 #define VC3_MPAR_tRRD 4 100 #define VC3_MPAR_tRCD 5 101 #define VC3_MPAR_tMRD 4 102 #define VC3_MPAR_tRFC 82 103 #define VC3_MPAR_CWL 5 104 #define VC3_MPAR_tXPR 85 105 #define VC3_MPAR_tMOD 12 106 #define VC3_MPAR_tDLLK 512 107 #define VC3_MPAR_tWR 5 108 109 #elif defined(CONFIG_DDRTYPE_MT41K128M16JT) 110 111 /* Micron Micron MT41K128M16JT-125 (2Gbit DDR3L, 128Mbitx16) @ 3.20ns */ 112 #define VC3_MPAR_bank_addr_cnt 3 113 #define VC3_MPAR_row_addr_cnt 14 114 #define VC3_MPAR_col_addr_cnt 10 115 #define VC3_MPAR_tREFI 2437 116 #define VC3_MPAR_tRAS_min 12 117 #define VC3_MPAR_CL 6 118 #define VC3_MPAR_tWTR 4 119 #define VC3_MPAR_tRC 16 120 #define VC3_MPAR_tFAW 16 121 #define VC3_MPAR_tRP 5 122 #define VC3_MPAR_tRRD 4 123 #define VC3_MPAR_tRCD 5 124 #define VC3_MPAR_tMRD 4 125 #define VC3_MPAR_tRFC 82 126 #define VC3_MPAR_CWL 5 127 #define VC3_MPAR_tXPR 85 128 #define VC3_MPAR_tMOD 12 129 #define VC3_MPAR_tDLLK 512 130 #define VC3_MPAR_tWR 5 131 132 #elif defined(CONFIG_DDRTYPE_MT47H128M8HQ) /* Luton10/26 Refboards */ 133 134 /* Micron 1Gb MT47H128M8-3 16Meg x 8 x 8 banks, DDR-533@CL4 @ 4.80ns */ 135 #define VC3_MPAR_bank_addr_cnt 3 136 #define VC3_MPAR_row_addr_cnt 14 137 #define VC3_MPAR_col_addr_cnt 10 138 #define VC3_MPAR_tREFI 1625 139 #define VC3_MPAR_tRAS_min 9 140 #define VC3_MPAR_CL 4 141 #define VC3_MPAR_tWTR 2 142 #define VC3_MPAR_tRC 12 143 #define VC3_MPAR_tFAW 8 144 #define VC3_MPAR_tRP 4 145 #define VC3_MPAR_tRRD 2 146 #define VC3_MPAR_tRCD 4 147 148 #define VC3_MPAR_tRPA 4 149 #define VC3_MPAR_tRP 4 150 151 #define VC3_MPAR_tMRD 2 152 #define VC3_MPAR_tRFC 27 153 154 #define VC3_MPAR__400_ns_dly 84 155 156 #define VC3_MPAR_tWR 4 157 #undef MIPS_VCOREIII_MEMORY_DDR3 158 #else 159 160 #error Unknown DDR system configuration - please add! 161 162 #endif 163 164 #if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \ 165 defined(CONFIG_SOC_SERVALT) || defined(CONFIG_SOC_SERVAL) 166 #define MIPS_VCOREIII_MEMORY_16BIT 1 167 #endif 168 169 #define MIPS_VCOREIII_MEMORY_SSTL_ODT 7 170 #define MIPS_VCOREIII_MEMORY_SSTL_DRIVE 7 171 #define VCOREIII_DDR_DQS_MODE_CALIBRATE 172 173 #ifdef MIPS_VCOREIII_MEMORY_16BIT 174 #define VC3_MPAR_16BIT 1 175 #else 176 #define VC3_MPAR_16BIT 0 177 #endif 178 179 #ifdef MIPS_VCOREIII_MEMORY_DDR3 180 #define VC3_MPAR_DDR3_MODE 1 /* DDR3 */ 181 #define VC3_MPAR_BURST_LENGTH 8 /* Always 8 (1) for DDR3 */ 182 #ifdef MIPS_VCOREIII_MEMORY_16BIT 183 #define VC3_MPAR_BURST_SIZE 1 /* Always 1 for DDR3/16bit */ 184 #else 185 #define VC3_MPAR_BURST_SIZE 0 186 #endif 187 #else 188 #define VC3_MPAR_DDR3_MODE 0 /* DDR2 */ 189 #ifdef MIPS_VCOREIII_MEMORY_16BIT 190 #define VC3_MPAR_BURST_LENGTH 4 /* in DDR2 16-bit mode, use burstlen 4 */ 191 #else 192 #define VC3_MPAR_BURST_LENGTH 8 /* For 8-bit IF we must run burst-8 */ 193 #endif 194 #define VC3_MPAR_BURST_SIZE 0 /* Always 0 for DDR2 */ 195 #endif 196 197 #define VC3_MPAR_RL VC3_MPAR_CL 198 #if !defined(MIPS_VCOREIII_MEMORY_DDR3) 199 #define VC3_MPAR_WL (VC3_MPAR_RL - 1) 200 #define VC3_MPAR_MD VC3_MPAR_tMRD 201 #define VC3_MPAR_ID VC3_MPAR__400_ns_dly 202 #define VC3_MPAR_SD VC3_MPAR_tXSRD 203 #define VC3_MPAR_OW (VC3_MPAR_WL - 2) 204 #define VC3_MPAR_OR (VC3_MPAR_WL - 3) 205 #define VC3_MPAR_RP (VC3_MPAR_bank_addr_cnt < 3 ? VC3_MPAR_tRP : VC3_MPAR_tRPA) 206 #define VC3_MPAR_FAW (VC3_MPAR_bank_addr_cnt < 3 ? 1 : VC3_MPAR_tFAW) 207 #define VC3_MPAR_BL (VC3_MPAR_BURST_LENGTH == 4 ? 2 : 4) 208 #define MSCC_MEMPARM_MR0 \ 209 (VC3_MPAR_BURST_LENGTH == 8 ? 3 : 2) | (VC3_MPAR_CL << 4) | \ 210 ((VC3_MPAR_tWR - 1) << 9) 211 /* DLL-on, Full-OD, AL=0, RTT=off, nDQS-on, RDQS-off, out-en */ 212 #define MSCC_MEMPARM_MR1 0x382 213 #define MSCC_MEMPARM_MR2 0 214 #define MSCC_MEMPARM_MR3 0 215 #else 216 #define VC3_MPAR_WL VC3_MPAR_CWL 217 #define VC3_MPAR_MD VC3_MPAR_tMOD 218 #define VC3_MPAR_ID VC3_MPAR_tXPR 219 #define VC3_MPAR_SD VC3_MPAR_tDLLK 220 #define VC3_MPAR_OW 2 221 #define VC3_MPAR_OR 2 222 #define VC3_MPAR_RP VC3_MPAR_tRP 223 #define VC3_MPAR_FAW VC3_MPAR_tFAW 224 #define VC3_MPAR_BL 4 225 #define MSCC_MEMPARM_MR0 ((VC3_MPAR_RL - 4) << 4) | ((VC3_MPAR_tWR - 4) << 9) 226 /* ODT_RTT: “0x0040” for 120ohm, and “0x0004” for 60ohm. */ 227 #define MSCC_MEMPARM_MR1 0x0040 228 #define MSCC_MEMPARM_MR2 ((VC3_MPAR_WL - 5) << 3) 229 #define MSCC_MEMPARM_MR3 0 230 #endif /* MIPS_VCOREIII_MEMORY_DDR3 */ 231 232 #define MSCC_MEMPARM_MEMCFG \ 233 ((MIPS_VCOREIII_DDR_SIZE > SZ_512M) ? \ 234 ICPU_MEMCTRL_CFG_DDR_512MBYTE_PLUS : 0) | \ 235 (VC3_MPAR_16BIT ? ICPU_MEMCTRL_CFG_DDR_WIDTH : 0) | \ 236 (VC3_MPAR_DDR3_MODE ? ICPU_MEMCTRL_CFG_DDR_MODE : 0) | \ 237 (VC3_MPAR_BURST_SIZE ? ICPU_MEMCTRL_CFG_BURST_SIZE : 0) | \ 238 (VC3_MPAR_BURST_LENGTH == 8 ? ICPU_MEMCTRL_CFG_BURST_LEN : 0) | \ 239 (VC3_MPAR_bank_addr_cnt == 3 ? ICPU_MEMCTRL_CFG_BANK_CNT : 0) | \ 240 ICPU_MEMCTRL_CFG_MSB_ROW_ADDR(VC3_MPAR_row_addr_cnt - 1) | \ 241 ICPU_MEMCTRL_CFG_MSB_COL_ADDR(VC3_MPAR_col_addr_cnt - 1) 242 243 #if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \ 244 defined(CONFIG_SOC_SERVALT) || defined(CONFIG_SOC_SERVAL) 245 #define MSCC_MEMPARM_PERIOD \ 246 ICPU_MEMCTRL_REF_PERIOD_MAX_PEND_REF(8) | \ 247 ICPU_MEMCTRL_REF_PERIOD_REF_PERIOD(VC3_MPAR_tREFI) 248 249 #define MSCC_MEMPARM_TIMING0 \ 250 ICPU_MEMCTRL_TIMING0_RD_TO_WR_DLY(VC3_MPAR_RL + VC3_MPAR_BL + 1 - \ 251 VC3_MPAR_WL) | \ 252 ICPU_MEMCTRL_TIMING0_WR_CS_CHANGE_DLY(VC3_MPAR_BL - 1) | \ 253 ICPU_MEMCTRL_TIMING0_RD_CS_CHANGE_DLY(VC3_MPAR_BL) | \ 254 ICPU_MEMCTRL_TIMING0_RAS_TO_PRECH_DLY(VC3_MPAR_tRAS_min - 1) | \ 255 ICPU_MEMCTRL_TIMING0_WR_TO_PRECH_DLY(VC3_MPAR_WL + \ 256 VC3_MPAR_BL + \ 257 VC3_MPAR_tWR - 1) | \ 258 ICPU_MEMCTRL_TIMING0_RD_TO_PRECH_DLY(VC3_MPAR_BL - 1) | \ 259 ICPU_MEMCTRL_TIMING0_WR_DATA_XFR_DLY(VC3_MPAR_WL - 1) | \ 260 ICPU_MEMCTRL_TIMING0_RD_DATA_XFR_DLY(VC3_MPAR_RL - 3) 261 262 #define MSCC_MEMPARM_TIMING1 \ 263 ICPU_MEMCTRL_TIMING1_RAS_TO_RAS_SAME_BANK_DLY(VC3_MPAR_tRC - 1) | \ 264 ICPU_MEMCTRL_TIMING1_BANK8_FAW_DLY(VC3_MPAR_FAW - 1) | \ 265 ICPU_MEMCTRL_TIMING1_PRECH_TO_RAS_DLY(VC3_MPAR_RP - 1) | \ 266 ICPU_MEMCTRL_TIMING1_RAS_TO_RAS_DLY(VC3_MPAR_tRRD - 1) | \ 267 ICPU_MEMCTRL_TIMING1_RAS_TO_CAS_DLY(VC3_MPAR_tRCD - 1) | \ 268 ICPU_MEMCTRL_TIMING1_WR_TO_RD_DLY(VC3_MPAR_WL + \ 269 VC3_MPAR_BL + \ 270 VC3_MPAR_tWTR - 1) 271 272 #define MSCC_MEMPARM_TIMING2 \ 273 ICPU_MEMCTRL_TIMING2_PRECH_ALL_DLY(VC3_MPAR_RP - 1) | \ 274 ICPU_MEMCTRL_TIMING2_MDSET_DLY(VC3_MPAR_MD - 1) | \ 275 ICPU_MEMCTRL_TIMING2_REF_DLY(VC3_MPAR_tRFC - 1) | \ 276 ICPU_MEMCTRL_TIMING2_INIT_DLY(VC3_MPAR_ID - 1) 277 278 #define MSCC_MEMPARM_TIMING3 \ 279 ICPU_MEMCTRL_TIMING3_WR_TO_RD_CS_CHANGE_DLY(VC3_MPAR_WL + \ 280 VC3_MPAR_tWTR - 1) |\ 281 ICPU_MEMCTRL_TIMING3_ODT_RD_DLY(VC3_MPAR_OR - 1) | \ 282 ICPU_MEMCTRL_TIMING3_ODT_WR_DLY(VC3_MPAR_OW - 1) | \ 283 ICPU_MEMCTRL_TIMING3_LOCAL_ODT_RD_DLY(VC3_MPAR_RL - 3) 284 285 #else 286 #define MSCC_MEMPARM_PERIOD \ 287 ICPU_MEMCTRL_REF_PERIOD_MAX_PEND_REF(1) | \ 288 ICPU_MEMCTRL_REF_PERIOD_REF_PERIOD(VC3_MPAR_tREFI) 289 290 #define MSCC_MEMPARM_TIMING0 \ 291 ICPU_MEMCTRL_TIMING0_RAS_TO_PRECH_DLY(VC3_MPAR_tRAS_min - 1) | \ 292 ICPU_MEMCTRL_TIMING0_WR_TO_PRECH_DLY(VC3_MPAR_CL + \ 293 (VC3_MPAR_BURST_LENGTH == 8 ? 2 : 0) + \ 294 VC3_MPAR_tWR) | \ 295 ICPU_MEMCTRL_TIMING0_RD_TO_PRECH_DLY(VC3_MPAR_BURST_LENGTH == 8 ? 3 : 1) | \ 296 ICPU_MEMCTRL_TIMING0_WR_DATA_XFR_DLY(VC3_MPAR_CL - 3) | \ 297 ICPU_MEMCTRL_TIMING0_RD_DATA_XFR_DLY(VC3_MPAR_CL - 3) 298 299 #define MSCC_MEMPARM_TIMING1 \ 300 ICPU_MEMCTRL_TIMING1_RAS_TO_RAS_SAME_BANK_DLY(VC3_MPAR_tRC - 1) | \ 301 ICPU_MEMCTRL_TIMING1_BANK8_FAW_DLY(VC3_MPAR_tFAW - 1) | \ 302 ICPU_MEMCTRL_TIMING1_PRECH_TO_RAS_DLY(VC3_MPAR_tRP - 1) | \ 303 ICPU_MEMCTRL_TIMING1_RAS_TO_RAS_DLY(VC3_MPAR_tRRD - 1) | \ 304 ICPU_MEMCTRL_TIMING1_RAS_TO_CAS_DLY(VC3_MPAR_tRCD - 1) | \ 305 ICPU_MEMCTRL_TIMING1_WR_TO_RD_DLY(VC3_MPAR_CL + \ 306 (VC3_MPAR_BURST_LENGTH == 8 ? 2 : 0) + \ 307 VC3_MPAR_tWTR) 308 #define MSCC_MEMPARM_TIMING2 \ 309 ICPU_MEMCTRL_TIMING2_PRECH_ALL_DLY(VC3_MPAR_tRPA - 1) | \ 310 ICPU_MEMCTRL_TIMING2_MDSET_DLY(VC3_MPAR_tMRD - 1) | \ 311 ICPU_MEMCTRL_TIMING2_REF_DLY(VC3_MPAR_tRFC - 1) | \ 312 ICPU_MEMCTRL_TIMING2_FOUR_HUNDRED_NS_DLY(VC3_MPAR__400_ns_dly) 313 314 #define MSCC_MEMPARM_TIMING3 \ 315 ICPU_MEMCTRL_TIMING3_WR_TO_RD_CS_CHANGE_DLY(VC3_MPAR_CL - 1) | \ 316 ICPU_MEMCTRL_TIMING3_ODT_WR_DLY(VC3_MPAR_CL - 1) | \ 317 ICPU_MEMCTRL_TIMING3_LOCAL_ODT_RD_DLY(VC3_MPAR_CL - 1) 318 319 #endif 320 321 enum { 322 DDR_TRAIN_OK, 323 DDR_TRAIN_CONTINUE, 324 DDR_TRAIN_ERROR, 325 }; 326 327 /* 328 * We actually have very few 'pause' possibilities apart from 329 * these assembly nops (at this very early stage). 330 */ 331 #define PAUSE() asm volatile("nop; nop; nop; nop; nop; nop; nop; nop") 332 333 /* NB: Assumes inlining as no stack is available! */ 334 static inline void set_dly(u32 bytelane, u32 dly) 335 { 336 register u32 r = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane)); 337 338 r &= ~ICPU_MEMCTRL_DQS_DLY_DQS_DLY_M; 339 r |= ICPU_MEMCTRL_DQS_DLY_DQS_DLY(dly); 340 writel(r, BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane)); 341 } 342 343 static inline bool incr_dly(u32 bytelane) 344 { 345 register u32 r = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane)); 346 347 if (ICPU_MEMCTRL_DQS_DLY_DQS_DLY(r) < 31) { 348 writel(r + 1, BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane)); 349 return true; 350 } 351 352 return false; 353 } 354 355 static inline bool adjust_dly(int adjust) 356 { 357 register u32 r = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(0)); 358 359 if (ICPU_MEMCTRL_DQS_DLY_DQS_DLY(r) < 31) { 360 writel(r + adjust, BASE_CFG + ICPU_MEMCTRL_DQS_DLY(0)); 361 return true; 362 } 363 364 return false; 365 } 366 367 /* NB: Assumes inlining as no stack is available! */ 368 static inline void center_dly(u32 bytelane, u32 start) 369 { 370 register u32 r = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane)) - start; 371 372 writel(start + (r >> 1), BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane)); 373 } 374 375 static inline void memphy_soft_reset(void) 376 { 377 setbits_le32(BASE_CFG + ICPU_MEMPHY_CFG, ICPU_MEMPHY_CFG_PHY_FIFO_RST); 378 PAUSE(); 379 clrbits_le32(BASE_CFG + ICPU_MEMPHY_CFG, ICPU_MEMPHY_CFG_PHY_FIFO_RST); 380 PAUSE(); 381 } 382 383 #if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \ 384 defined(CONFIG_SOC_SERVALT) || defined(CONFIG_SOC_SERVAL) 385 static u8 training_data[] = { 0xfe, 0x11, 0x33, 0x55, 0x77, 0x99, 0xbb, 0xdd }; 386 387 static inline void sleep_100ns(u32 val) 388 { 389 /* Set the timer tick generator to 100 ns */ 390 writel(VCOREIII_TIMER_DIVIDER - 1, BASE_CFG + ICPU_TIMER_TICK_DIV); 391 392 /* Set the timer value */ 393 writel(val, BASE_CFG + ICPU_TIMER_VALUE(0)); 394 395 /* Enable timer 0 for one-shot */ 396 writel(ICPU_TIMER_CTRL_ONE_SHOT_ENA | ICPU_TIMER_CTRL_TIMER_ENA, 397 BASE_CFG + ICPU_TIMER_CTRL(0)); 398 399 /* Wait for timer 0 to reach 0 */ 400 while (readl(BASE_CFG + ICPU_TIMER_VALUE(0)) != 0) 401 ; 402 } 403 404 #if defined(CONFIG_SOC_OCELOT) 405 static inline void hal_vcoreiii_ddr_reset_assert(void) 406 { 407 /* DDR has reset pin on GPIO 19 toggle Low-High to release */ 408 setbits_le32(BASE_DEVCPU_GCB + PERF_GPIO_OE, BIT(19)); 409 writel(BIT(19), BASE_DEVCPU_GCB + PERF_GPIO_OUT_CLR); 410 sleep_100ns(10000); 411 } 412 413 static inline void hal_vcoreiii_ddr_reset_release(void) 414 { 415 /* DDR has reset pin on GPIO 19 toggle Low-High to release */ 416 setbits_le32(BASE_DEVCPU_GCB + PERF_GPIO_OE, BIT(19)); 417 writel(BIT(19), BASE_DEVCPU_GCB + PERF_GPIO_OUT_SET); 418 sleep_100ns(10000); 419 } 420 421 /* 422 * DDR memory sanity checking failed, tally and do hard reset 423 * 424 * NB: Assumes inlining as no stack is available! 425 */ 426 static inline void hal_vcoreiii_ddr_failed(void) 427 { 428 register u32 reset; 429 430 writel(readl(BASE_CFG + ICPU_GPR(6)) + 1, BASE_CFG + ICPU_GPR(6)); 431 432 clrbits_le32(BASE_DEVCPU_GCB + PERF_GPIO_OE, BIT(19)); 433 434 /* We have to execute the reset function from cache. Indeed, 435 * the reboot workaround in _machine_restart() will change the 436 * SPI NOR into SW bitbang. 437 * 438 * This will render the CPU unable to execute directly from 439 * the NOR, which is why the reset instructions are prefetched 440 * into the I-cache. 441 * 442 * When failing the DDR initialization we are executing from 443 * NOR. 444 * 445 * The last instruction in _machine_restart() will reset the 446 * MIPS CPU (and the cache), and the CPU will start executing 447 * from the reset vector. 448 */ 449 reset = KSEG0ADDR(_machine_restart); 450 icache_lock((void *)reset, 128); 451 asm volatile ("jr %0"::"r" (reset)); 452 453 panic("DDR init failed\n"); 454 } 455 #else /* JR2 || ServalT || Serval */ 456 static inline void hal_vcoreiii_ddr_reset_assert(void) 457 { 458 /* Ensure the memory controller physical iface is forced reset */ 459 writel(readl(BASE_CFG + ICPU_MEMPHY_CFG) | 460 ICPU_MEMPHY_CFG_PHY_RST, BASE_CFG + ICPU_MEMPHY_CFG); 461 462 /* Ensure the memory controller is forced reset */ 463 writel(readl(BASE_CFG + ICPU_RESET) | 464 ICPU_RESET_MEM_RST_FORCE, BASE_CFG + ICPU_RESET); 465 } 466 467 static inline void hal_vcoreiii_ddr_failed(void) 468 { 469 writel(0, BASE_CFG + ICPU_RESET); 470 writel(PERF_SOFT_RST_SOFT_CHIP_RST, BASE_CFG + PERF_SOFT_RST); 471 472 panic("DDR init failed\n"); 473 } 474 #endif /* JR2 || ServalT || Serval */ 475 476 /* 477 * DDR memory sanity checking done, possibly enable ECC. 478 * 479 * NB: Assumes inlining as no stack is available! 480 */ 481 static inline void hal_vcoreiii_ddr_verified(void) 482 { 483 #ifdef MIPS_VCOREIII_MEMORY_ECC 484 /* Finally, enable ECC */ 485 register u32 val = readl(BASE_CFG + ICPU_MEMCTRL_CFG); 486 487 val |= ICPU_MEMCTRL_CFG_DDR_ECC_ERR_ENA; 488 val &= ~ICPU_MEMCTRL_CFG_BURST_SIZE; 489 490 writel(val, BASE_CFG + ICPU_MEMCTRL_CFG); 491 #endif 492 493 /* Reset Status register - sticky bits */ 494 writel(readl(BASE_CFG + ICPU_MEMCTRL_STAT), BASE_CFG + ICPU_MEMCTRL_STAT); 495 } 496 497 /* NB: Assumes inlining as no stack is available! */ 498 static inline int look_for(u32 bytelane) 499 { 500 register u32 i; 501 502 /* Reset FIFO in case any previous access failed */ 503 for (i = 0; i < sizeof(training_data); i++) { 504 register u32 byte; 505 506 memphy_soft_reset(); 507 /* Reset sticky bits */ 508 writel(readl(BASE_CFG + ICPU_MEMCTRL_STAT), 509 BASE_CFG + ICPU_MEMCTRL_STAT); 510 /* Read data */ 511 byte = __raw_readb((void __iomem *)MSCC_DDR_TO + bytelane + 512 (i * 4)); 513 514 /* 515 * Prevent the compiler reordering the instruction so 516 * the read of RAM happens after the check of the 517 * errors. 518 */ 519 rmb(); 520 if (readl(BASE_CFG + ICPU_MEMCTRL_STAT) & 521 (ICPU_MEMCTRL_STAT_RDATA_MASKED | 522 ICPU_MEMCTRL_STAT_RDATA_DUMMY)) { 523 /* Noise on the line */ 524 goto read_error; 525 } 526 /* If mismatch, increment DQS - if possible */ 527 if (byte != training_data[i]) { 528 read_error: 529 if (!incr_dly(bytelane)) 530 return DDR_TRAIN_ERROR; 531 return DDR_TRAIN_CONTINUE; 532 } 533 } 534 return DDR_TRAIN_OK; 535 } 536 537 /* NB: Assumes inlining as no stack is available! */ 538 static inline int look_past(u32 bytelane) 539 { 540 register u32 i; 541 542 /* Reset FIFO in case any previous access failed */ 543 for (i = 0; i < sizeof(training_data); i++) { 544 register u32 byte; 545 546 memphy_soft_reset(); 547 /* Ack sticky bits */ 548 writel(readl(BASE_CFG + ICPU_MEMCTRL_STAT), 549 BASE_CFG + ICPU_MEMCTRL_STAT); 550 byte = __raw_readb((void __iomem *)MSCC_DDR_TO + bytelane + 551 (i * 4)); 552 /* 553 * Prevent the compiler reordering the instruction so 554 * the read of RAM happens after the check of the 555 * errors. 556 */ 557 rmb(); 558 if (readl(BASE_CFG + ICPU_MEMCTRL_STAT) & 559 (ICPU_MEMCTRL_STAT_RDATA_MASKED | 560 ICPU_MEMCTRL_STAT_RDATA_DUMMY)) { 561 /* Noise on the line */ 562 goto read_error; 563 } 564 /* Bail out when we see first mismatch */ 565 if (byte != training_data[i]) { 566 read_error: 567 return DDR_TRAIN_OK; 568 } 569 } 570 /* All data compares OK, increase DQS and retry */ 571 if (!incr_dly(bytelane)) 572 return DDR_TRAIN_ERROR; 573 574 return DDR_TRAIN_CONTINUE; 575 } 576 577 static inline int hal_vcoreiii_train_bytelane(u32 bytelane) 578 { 579 register int res; 580 register u32 dqs_s; 581 582 set_dly(bytelane, 0); /* Start training at DQS=0 */ 583 while ((res = look_for(bytelane)) == DDR_TRAIN_CONTINUE) 584 ; 585 if (res != DDR_TRAIN_OK) 586 return res; 587 588 dqs_s = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane)); 589 while ((res = look_past(bytelane)) == DDR_TRAIN_CONTINUE) 590 ; 591 if (res != DDR_TRAIN_OK) 592 return res; 593 /* Reset FIFO - for good measure */ 594 memphy_soft_reset(); 595 /* Adjust to center [dqs_s;cur] */ 596 center_dly(bytelane, dqs_s); 597 return DDR_TRAIN_OK; 598 } 599 600 /* This algorithm is converted from the TCL training algorithm used 601 * during silicon simulation. 602 * NB: Assumes inlining as no stack is available! 603 */ 604 static inline int hal_vcoreiii_init_dqs(void) 605 { 606 #define MAX_DQS 32 607 register u32 i, j; 608 609 for (i = 0; i < MAX_DQS; i++) { 610 set_dly(0, i); /* Byte-lane 0 */ 611 for (j = 0; j < MAX_DQS; j++) { 612 __maybe_unused register u32 byte; 613 614 set_dly(1, j); /* Byte-lane 1 */ 615 /* Reset FIFO in case any previous access failed */ 616 memphy_soft_reset(); 617 writel(readl(BASE_CFG + ICPU_MEMCTRL_STAT), 618 BASE_CFG + ICPU_MEMCTRL_STAT); 619 byte = __raw_readb((void __iomem *)MSCC_DDR_TO); 620 byte = __raw_readb((void __iomem *)(MSCC_DDR_TO + 1)); 621 if (!(readl(BASE_CFG + ICPU_MEMCTRL_STAT) & 622 (ICPU_MEMCTRL_STAT_RDATA_MASKED | 623 ICPU_MEMCTRL_STAT_RDATA_DUMMY))) 624 return 0; 625 } 626 } 627 return -1; 628 } 629 630 static inline int dram_check(void) 631 { 632 register u32 i; 633 634 for (i = 0; i < 8; i++) { 635 __raw_writel(~i, (void __iomem *)(MSCC_DDR_TO + (i * 4))); 636 if (__raw_readl((void __iomem *)(MSCC_DDR_TO + (i * 4))) != ~i) 637 return 1; 638 } 639 return 0; 640 } 641 #else /* Luton */ 642 643 static inline void sleep_100ns(u32 val) 644 { 645 } 646 647 static inline void hal_vcoreiii_ddr_reset_assert(void) 648 { 649 setbits_le32(BASE_CFG + ICPU_MEMPHY_CFG, ICPU_MEMPHY_CFG_PHY_RST); 650 setbits_le32(BASE_CFG + ICPU_RESET, ICPU_RESET_MEM_RST_FORCE); 651 } 652 653 static inline void hal_vcoreiii_ddr_reset_release(void) 654 { 655 } 656 657 static inline void hal_vcoreiii_ddr_failed(void) 658 { 659 register u32 memphy_cfg = readl(BASE_CFG + ICPU_MEMPHY_CFG); 660 661 /* Do a fifo reset and start over */ 662 writel(memphy_cfg | ICPU_MEMPHY_CFG_PHY_FIFO_RST, 663 BASE_CFG + ICPU_MEMPHY_CFG); 664 writel(memphy_cfg & ~ICPU_MEMPHY_CFG_PHY_FIFO_RST, 665 BASE_CFG + ICPU_MEMPHY_CFG); 666 writel(memphy_cfg | ICPU_MEMPHY_CFG_PHY_FIFO_RST, 667 BASE_CFG + ICPU_MEMPHY_CFG); 668 } 669 670 static inline void hal_vcoreiii_ddr_verified(void) 671 { 672 } 673 674 static inline int look_for(u32 data) 675 { 676 register u32 byte = __raw_readb((void __iomem *)MSCC_DDR_TO); 677 678 if (data != byte) { 679 if (!incr_dly(0)) 680 return DDR_TRAIN_ERROR; 681 return DDR_TRAIN_CONTINUE; 682 } 683 684 return DDR_TRAIN_OK; 685 } 686 687 /* This algorithm is converted from the TCL training algorithm used 688 * during silicon simulation. 689 * NB: Assumes inlining as no stack is available! 690 */ 691 static inline int hal_vcoreiii_train_bytelane(u32 bytelane) 692 { 693 register int res; 694 695 set_dly(bytelane, 0); /* Start training at DQS=0 */ 696 while ((res = look_for(0xff)) == DDR_TRAIN_CONTINUE) 697 ; 698 if (res != DDR_TRAIN_OK) 699 return res; 700 701 set_dly(bytelane, 0); /* Start training at DQS=0 */ 702 while ((res = look_for(0x00)) == DDR_TRAIN_CONTINUE) 703 704 ; 705 706 if (res != DDR_TRAIN_OK) 707 return res; 708 709 adjust_dly(-3); 710 711 return DDR_TRAIN_OK; 712 } 713 714 static inline int hal_vcoreiii_init_dqs(void) 715 { 716 return 0; 717 } 718 719 static inline int dram_check(void) 720 { 721 register u32 i; 722 723 for (i = 0; i < 8; i++) { 724 __raw_writel(~i, (void __iomem *)(MSCC_DDR_TO + (i * 4))); 725 726 if (__raw_readl((void __iomem *)(MSCC_DDR_TO + (i * 4))) != ~i) 727 return 1; 728 } 729 730 return 0; 731 } 732 #endif 733 734 /* 735 * NB: Called *early* to init memory controller - assumes inlining as 736 * no stack is available! 737 */ 738 static inline void hal_vcoreiii_init_memctl(void) 739 { 740 /* Ensure DDR is in reset */ 741 hal_vcoreiii_ddr_reset_assert(); 742 743 /* Wait maybe not needed, but ... */ 744 PAUSE(); 745 746 /* Drop sys ctl memory controller forced reset */ 747 clrbits_le32(BASE_CFG + ICPU_RESET, ICPU_RESET_MEM_RST_FORCE); 748 749 PAUSE(); 750 751 /* Drop Reset, enable SSTL */ 752 writel(ICPU_MEMPHY_CFG_PHY_SSTL_ENA, BASE_CFG + ICPU_MEMPHY_CFG); 753 PAUSE(); 754 755 /* Start the automatic SSTL output and ODT drive-strength calibration */ 756 writel(ICPU_MEMPHY_ZCAL_ZCAL_PROG_ODT(MIPS_VCOREIII_MEMORY_SSTL_ODT) | 757 /* drive strength */ 758 ICPU_MEMPHY_ZCAL_ZCAL_PROG(MIPS_VCOREIII_MEMORY_SSTL_DRIVE) | 759 /* Start calibration process */ 760 ICPU_MEMPHY_ZCAL_ZCAL_ENA, BASE_CFG + ICPU_MEMPHY_ZCAL); 761 762 /* Wait for ZCAL to clear */ 763 while (readl(BASE_CFG + ICPU_MEMPHY_ZCAL) & ICPU_MEMPHY_ZCAL_ZCAL_ENA) 764 ; 765 #if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \ 766 defined(CONFIG_SOC_SERVALT) 767 /* Check no ZCAL_ERR */ 768 if (readl(BASE_CFG + ICPU_MEMPHY_ZCAL_STAT) 769 & ICPU_MEMPHY_ZCAL_STAT_ZCAL_ERR) 770 hal_vcoreiii_ddr_failed(); 771 #endif 772 /* Drive CL, CK, ODT */ 773 setbits_le32(BASE_CFG + ICPU_MEMPHY_CFG, ICPU_MEMPHY_CFG_PHY_ODT_OE | 774 ICPU_MEMPHY_CFG_PHY_CK_OE | ICPU_MEMPHY_CFG_PHY_CL_OE); 775 776 /* Initialize memory controller */ 777 writel(MSCC_MEMPARM_MEMCFG, BASE_CFG + ICPU_MEMCTRL_CFG); 778 writel(MSCC_MEMPARM_PERIOD, BASE_CFG + ICPU_MEMCTRL_REF_PERIOD); 779 780 #if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \ 781 defined(CONFIG_SOC_SERVALT) || defined(CONFIG_SOC_SERVAL) 782 writel(MSCC_MEMPARM_TIMING0, BASE_CFG + ICPU_MEMCTRL_TIMING0); 783 #else /* Luton */ 784 clrbits_le32(BASE_CFG + ICPU_MEMCTRL_TIMING0, ((1 << 20) - 1)); 785 setbits_le32(BASE_CFG + ICPU_MEMCTRL_TIMING0, MSCC_MEMPARM_TIMING0); 786 #endif 787 788 writel(MSCC_MEMPARM_TIMING1, BASE_CFG + ICPU_MEMCTRL_TIMING1); 789 writel(MSCC_MEMPARM_TIMING2, BASE_CFG + ICPU_MEMCTRL_TIMING2); 790 writel(MSCC_MEMPARM_TIMING3, BASE_CFG + ICPU_MEMCTRL_TIMING3); 791 writel(MSCC_MEMPARM_MR0, BASE_CFG + ICPU_MEMCTRL_MR0_VAL); 792 writel(MSCC_MEMPARM_MR1, BASE_CFG + ICPU_MEMCTRL_MR1_VAL); 793 writel(MSCC_MEMPARM_MR2, BASE_CFG + ICPU_MEMCTRL_MR2_VAL); 794 writel(MSCC_MEMPARM_MR3, BASE_CFG + ICPU_MEMCTRL_MR3_VAL); 795 796 #if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_SERVAL) 797 /* Termination setup - enable ODT */ 798 writel(ICPU_MEMCTRL_TERMRES_CTRL_LOCAL_ODT_RD_ENA | 799 /* Assert ODT0 for any write */ 800 ICPU_MEMCTRL_TERMRES_CTRL_ODT_WR_ENA(3), 801 BASE_CFG + ICPU_MEMCTRL_TERMRES_CTRL); 802 803 /* Release Reset from DDR */ 804 #if defined(CONFIG_SOC_OCELOT) 805 hal_vcoreiii_ddr_reset_release(); 806 #endif 807 808 writel(readl(BASE_CFG + ICPU_GPR(7)) + 1, BASE_CFG + ICPU_GPR(7)); 809 #elif defined(CONFIG_SOC_JR2) || defined(CONFIG_SOC_SERVALT) 810 writel(ICPU_MEMCTRL_TERMRES_CTRL_ODT_WR_ENA(3), 811 BASE_CFG + ICPU_MEMCTRL_TERMRES_CTRL); 812 #else /* Luton */ 813 /* Termination setup - disable ODT */ 814 writel(0, BASE_CFG + ICPU_MEMCTRL_TERMRES_CTRL); 815 816 #endif 817 } 818 819 static inline void hal_vcoreiii_wait_memctl(void) 820 { 821 /* Now, rip it! */ 822 writel(ICPU_MEMCTRL_CTRL_INITIALIZE, BASE_CFG + ICPU_MEMCTRL_CTRL); 823 824 while (!(readl(BASE_CFG + ICPU_MEMCTRL_STAT) 825 & ICPU_MEMCTRL_STAT_INIT_DONE)) 826 ; 827 828 /* Settle...? */ 829 sleep_100ns(10000); 830 #if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \ 831 defined(CONFIG_SOC_SERVALT) || defined(CONFIG_SOC_SERVAL) 832 /* Establish data contents in DDR RAM for training */ 833 834 __raw_writel(0xcacafefe, ((void __iomem *)MSCC_DDR_TO)); 835 __raw_writel(0x22221111, ((void __iomem *)MSCC_DDR_TO + 0x4)); 836 __raw_writel(0x44443333, ((void __iomem *)MSCC_DDR_TO + 0x8)); 837 __raw_writel(0x66665555, ((void __iomem *)MSCC_DDR_TO + 0xC)); 838 __raw_writel(0x88887777, ((void __iomem *)MSCC_DDR_TO + 0x10)); 839 __raw_writel(0xaaaa9999, ((void __iomem *)MSCC_DDR_TO + 0x14)); 840 __raw_writel(0xccccbbbb, ((void __iomem *)MSCC_DDR_TO + 0x18)); 841 __raw_writel(0xeeeedddd, ((void __iomem *)MSCC_DDR_TO + 0x1C)); 842 #else 843 __raw_writel(0xff, ((void __iomem *)MSCC_DDR_TO)); 844 #endif 845 } 846 #endif /* __ASM_MACH_DDR_H */ 847