1 /* 2 * (C) Copyright 2016-2017 Rockchip Inc. 3 * 4 * SPDX-License-Identifier: GPL-2.0 5 * 6 * Adapted from coreboot. 7 */ 8 9 #include <common.h> 10 #include <clk.h> 11 #include <dm.h> 12 #include <dt-structs.h> 13 #include <ram.h> 14 #include <regmap.h> 15 #include <syscon.h> 16 #include <asm/io.h> 17 #include <asm/arch/clock.h> 18 #include <asm/arch/sdram_common.h> 19 #include <asm/arch/sdram_rk3399.h> 20 #include <asm/arch/cru_rk3399.h> 21 #include <asm/arch/grf_rk3399.h> 22 #include <asm/arch/hardware.h> 23 #include <linux/err.h> 24 #include <time.h> 25 26 DECLARE_GLOBAL_DATA_PTR; 27 struct chan_info { 28 struct rk3399_ddr_pctl_regs *pctl; 29 struct rk3399_ddr_pi_regs *pi; 30 struct rk3399_ddr_publ_regs *publ; 31 struct rk3399_msch_regs *msch; 32 }; 33 34 struct dram_info { 35 #ifdef CONFIG_SPL_BUILD 36 struct chan_info chan[2]; 37 struct clk ddr_clk; 38 struct rk3399_cru *cru; 39 struct rk3399_pmucru *pmucru; 40 struct rk3399_pmusgrf_regs *pmusgrf; 41 struct rk3399_ddr_cic_regs *cic; 42 #endif 43 struct ram_info info; 44 struct rk3399_pmugrf_regs *pmugrf; 45 }; 46 47 #define PRESET_SGRF_HOLD(n) ((0x1 << (6 + 16)) | ((n) << 6)) 48 #define PRESET_GPIO0_HOLD(n) ((0x1 << (7 + 16)) | ((n) << 7)) 49 #define PRESET_GPIO1_HOLD(n) ((0x1 << (8 + 16)) | ((n) << 8)) 50 51 #define PHY_DRV_ODT_Hi_Z 0x0 52 #define PHY_DRV_ODT_240 0x1 53 #define PHY_DRV_ODT_120 0x8 54 #define PHY_DRV_ODT_80 0x9 55 #define PHY_DRV_ODT_60 0xc 56 #define PHY_DRV_ODT_48 0xd 57 #define PHY_DRV_ODT_40 0xe 58 #define PHY_DRV_ODT_34_3 0xf 59 60 #ifdef CONFIG_SPL_BUILD 61 62 struct rockchip_dmc_plat { 63 #if CONFIG_IS_ENABLED(OF_PLATDATA) 64 struct dtd_rockchip_rk3399_dmc dtplat; 65 #else 66 struct rk3399_sdram_params sdram_params; 67 #endif 68 struct regmap *map; 69 }; 70 71 static void copy_to_reg(u32 *dest, const u32 *src, u32 n) 72 { 73 int i; 74 75 for (i = 0; i < n / sizeof(u32); i++) { 76 writel(*src, dest); 77 src++; 78 dest++; 79 } 80 } 81 82 static void phy_dll_bypass_set(struct rk3399_ddr_publ_regs *ddr_publ_regs, 83 u32 freq) 84 { 85 u32 *denali_phy = ddr_publ_regs->denali_phy; 86 87 /* From IP spec, only freq small than 125 can enter dll bypass mode */ 88 if (freq <= 125) { 89 /* phy_sw_master_mode_X PHY_86/214/342/470 4bits offset_8 */ 90 setbits_le32(&denali_phy[86], (0x3 << 2) << 8); 91 setbits_le32(&denali_phy[214], (0x3 << 2) << 8); 92 setbits_le32(&denali_phy[342], (0x3 << 2) << 8); 93 setbits_le32(&denali_phy[470], (0x3 << 2) << 8); 94 95 /* phy_adrctl_sw_master_mode PHY_547/675/803 4bits offset_16 */ 96 setbits_le32(&denali_phy[547], (0x3 << 2) << 16); 97 setbits_le32(&denali_phy[675], (0x3 << 2) << 16); 98 setbits_le32(&denali_phy[803], (0x3 << 2) << 16); 99 } else { 100 /* phy_sw_master_mode_X PHY_86/214/342/470 4bits offset_8 */ 101 clrbits_le32(&denali_phy[86], (0x3 << 2) << 8); 102 clrbits_le32(&denali_phy[214], (0x3 << 2) << 8); 103 clrbits_le32(&denali_phy[342], (0x3 << 2) << 8); 104 clrbits_le32(&denali_phy[470], (0x3 << 2) << 8); 105 106 /* phy_adrctl_sw_master_mode PHY_547/675/803 4bits offset_16 */ 107 clrbits_le32(&denali_phy[547], (0x3 << 2) << 16); 108 clrbits_le32(&denali_phy[675], (0x3 << 2) << 16); 109 clrbits_le32(&denali_phy[803], (0x3 << 2) << 16); 110 } 111 } 112 113 static void set_memory_map(const struct chan_info *chan, u32 channel, 114 const struct rk3399_sdram_params *sdram_params) 115 { 116 const struct rk3399_sdram_channel *sdram_ch = 117 &sdram_params->ch[channel]; 118 u32 *denali_ctl = chan->pctl->denali_ctl; 119 u32 *denali_pi = chan->pi->denali_pi; 120 u32 cs_map; 121 u32 reduc; 122 u32 row; 123 124 /* Get row number from ddrconfig setting */ 125 if (sdram_ch->ddrconfig < 2 || sdram_ch->ddrconfig == 4) 126 row = 16; 127 else if (sdram_ch->ddrconfig == 3) 128 row = 14; 129 else 130 row = 15; 131 132 cs_map = (sdram_ch->rank > 1) ? 3 : 1; 133 reduc = (sdram_ch->bw == 2) ? 0 : 1; 134 135 /* Set the dram configuration to ctrl */ 136 clrsetbits_le32(&denali_ctl[191], 0xF, (12 - sdram_ch->col)); 137 clrsetbits_le32(&denali_ctl[190], (0x3 << 16) | (0x7 << 24), 138 ((3 - sdram_ch->bk) << 16) | 139 ((16 - row) << 24)); 140 141 clrsetbits_le32(&denali_ctl[196], 0x3 | (1 << 16), 142 cs_map | (reduc << 16)); 143 144 /* PI_199 PI_COL_DIFF:RW:0:4 */ 145 clrsetbits_le32(&denali_pi[199], 0xF, (12 - sdram_ch->col)); 146 147 /* PI_155 PI_ROW_DIFF:RW:24:3 PI_BANK_DIFF:RW:16:2 */ 148 clrsetbits_le32(&denali_pi[155], (0x3 << 16) | (0x7 << 24), 149 ((3 - sdram_ch->bk) << 16) | 150 ((16 - row) << 24)); 151 /* PI_41 PI_CS_MAP:RW:24:4 */ 152 clrsetbits_le32(&denali_pi[41], 0xf << 24, cs_map << 24); 153 if ((sdram_ch->rank == 1) && (sdram_params->base.dramtype == DDR3)) 154 writel(0x2EC7FFFF, &denali_pi[34]); 155 } 156 157 static void set_ds_odt(const struct chan_info *chan, 158 const struct rk3399_sdram_params *sdram_params) 159 { 160 u32 *denali_phy = chan->publ->denali_phy; 161 162 u32 tsel_idle_en, tsel_wr_en, tsel_rd_en; 163 u32 tsel_idle_select_p, tsel_wr_select_p, tsel_rd_select_p; 164 u32 ca_tsel_wr_select_p, ca_tsel_wr_select_n; 165 u32 tsel_idle_select_n, tsel_wr_select_n, tsel_rd_select_n; 166 u32 reg_value; 167 168 if (sdram_params->base.dramtype == LPDDR4) { 169 tsel_rd_select_p = PHY_DRV_ODT_Hi_Z; 170 tsel_wr_select_p = PHY_DRV_ODT_40; 171 ca_tsel_wr_select_p = PHY_DRV_ODT_40; 172 tsel_idle_select_p = PHY_DRV_ODT_Hi_Z; 173 174 tsel_rd_select_n = PHY_DRV_ODT_240; 175 tsel_wr_select_n = PHY_DRV_ODT_40; 176 ca_tsel_wr_select_n = PHY_DRV_ODT_40; 177 tsel_idle_select_n = PHY_DRV_ODT_240; 178 } else if (sdram_params->base.dramtype == LPDDR3) { 179 tsel_rd_select_p = PHY_DRV_ODT_240; 180 tsel_wr_select_p = PHY_DRV_ODT_34_3; 181 ca_tsel_wr_select_p = PHY_DRV_ODT_48; 182 tsel_idle_select_p = PHY_DRV_ODT_240; 183 184 tsel_rd_select_n = PHY_DRV_ODT_Hi_Z; 185 tsel_wr_select_n = PHY_DRV_ODT_34_3; 186 ca_tsel_wr_select_n = PHY_DRV_ODT_48; 187 tsel_idle_select_n = PHY_DRV_ODT_Hi_Z; 188 } else { 189 tsel_rd_select_p = PHY_DRV_ODT_240; 190 tsel_wr_select_p = PHY_DRV_ODT_34_3; 191 ca_tsel_wr_select_p = PHY_DRV_ODT_34_3; 192 tsel_idle_select_p = PHY_DRV_ODT_240; 193 194 tsel_rd_select_n = PHY_DRV_ODT_240; 195 tsel_wr_select_n = PHY_DRV_ODT_34_3; 196 ca_tsel_wr_select_n = PHY_DRV_ODT_34_3; 197 tsel_idle_select_n = PHY_DRV_ODT_240; 198 } 199 200 if (sdram_params->base.odt == 1) 201 tsel_rd_en = 1; 202 else 203 tsel_rd_en = 0; 204 205 tsel_wr_en = 0; 206 tsel_idle_en = 0; 207 208 /* 209 * phy_dq_tsel_select_X 24bits DENALI_PHY_6/134/262/390 offset_0 210 * sets termination values for read/idle cycles and drive strength 211 * for write cycles for DQ/DM 212 */ 213 reg_value = tsel_rd_select_n | (tsel_rd_select_p << 0x4) | 214 (tsel_wr_select_n << 8) | (tsel_wr_select_p << 12) | 215 (tsel_idle_select_n << 16) | (tsel_idle_select_p << 20); 216 clrsetbits_le32(&denali_phy[6], 0xffffff, reg_value); 217 clrsetbits_le32(&denali_phy[134], 0xffffff, reg_value); 218 clrsetbits_le32(&denali_phy[262], 0xffffff, reg_value); 219 clrsetbits_le32(&denali_phy[390], 0xffffff, reg_value); 220 221 /* 222 * phy_dqs_tsel_select_X 24bits DENALI_PHY_7/135/263/391 offset_0 223 * sets termination values for read/idle cycles and drive strength 224 * for write cycles for DQS 225 */ 226 clrsetbits_le32(&denali_phy[7], 0xffffff, reg_value); 227 clrsetbits_le32(&denali_phy[135], 0xffffff, reg_value); 228 clrsetbits_le32(&denali_phy[263], 0xffffff, reg_value); 229 clrsetbits_le32(&denali_phy[391], 0xffffff, reg_value); 230 231 /* phy_adr_tsel_select_ 8bits DENALI_PHY_544/672/800 offset_0 */ 232 reg_value = ca_tsel_wr_select_n | (ca_tsel_wr_select_p << 0x4); 233 clrsetbits_le32(&denali_phy[544], 0xff, reg_value); 234 clrsetbits_le32(&denali_phy[672], 0xff, reg_value); 235 clrsetbits_le32(&denali_phy[800], 0xff, reg_value); 236 237 /* phy_pad_addr_drive 8bits DENALI_PHY_928 offset_0 */ 238 clrsetbits_le32(&denali_phy[928], 0xff, reg_value); 239 240 /* phy_pad_rst_drive 8bits DENALI_PHY_937 offset_0 */ 241 clrsetbits_le32(&denali_phy[937], 0xff, reg_value); 242 243 /* phy_pad_cke_drive 8bits DENALI_PHY_935 offset_0 */ 244 clrsetbits_le32(&denali_phy[935], 0xff, reg_value); 245 246 /* phy_pad_cs_drive 8bits DENALI_PHY_939 offset_0 */ 247 clrsetbits_le32(&denali_phy[939], 0xff, reg_value); 248 249 /* phy_pad_clk_drive 8bits DENALI_PHY_929 offset_0 */ 250 clrsetbits_le32(&denali_phy[929], 0xff, reg_value); 251 252 /* phy_pad_fdbk_drive 23bit DENALI_PHY_924/925 */ 253 clrsetbits_le32(&denali_phy[924], 0xff, 254 tsel_wr_select_n | (tsel_wr_select_p << 4)); 255 clrsetbits_le32(&denali_phy[925], 0xff, 256 tsel_rd_select_n | (tsel_rd_select_p << 4)); 257 258 /* phy_dq_tsel_enable_X 3bits DENALI_PHY_5/133/261/389 offset_16 */ 259 reg_value = (tsel_rd_en | (tsel_wr_en << 1) | (tsel_idle_en << 2)) 260 << 16; 261 clrsetbits_le32(&denali_phy[5], 0x7 << 16, reg_value); 262 clrsetbits_le32(&denali_phy[133], 0x7 << 16, reg_value); 263 clrsetbits_le32(&denali_phy[261], 0x7 << 16, reg_value); 264 clrsetbits_le32(&denali_phy[389], 0x7 << 16, reg_value); 265 266 /* phy_dqs_tsel_enable_X 3bits DENALI_PHY_6/134/262/390 offset_24 */ 267 reg_value = (tsel_rd_en | (tsel_wr_en << 1) | (tsel_idle_en << 2)) 268 << 24; 269 clrsetbits_le32(&denali_phy[6], 0x7 << 24, reg_value); 270 clrsetbits_le32(&denali_phy[134], 0x7 << 24, reg_value); 271 clrsetbits_le32(&denali_phy[262], 0x7 << 24, reg_value); 272 clrsetbits_le32(&denali_phy[390], 0x7 << 24, reg_value); 273 274 /* phy_adr_tsel_enable_ 1bit DENALI_PHY_518/646/774 offset_8 */ 275 reg_value = tsel_wr_en << 8; 276 clrsetbits_le32(&denali_phy[518], 0x1 << 8, reg_value); 277 clrsetbits_le32(&denali_phy[646], 0x1 << 8, reg_value); 278 clrsetbits_le32(&denali_phy[774], 0x1 << 8, reg_value); 279 280 /* phy_pad_addr_term tsel 1bit DENALI_PHY_933 offset_17 */ 281 reg_value = tsel_wr_en << 17; 282 clrsetbits_le32(&denali_phy[933], 0x1 << 17, reg_value); 283 /* 284 * pad_rst/cke/cs/clk_term tsel 1bits 285 * DENALI_PHY_938/936/940/934 offset_17 286 */ 287 clrsetbits_le32(&denali_phy[938], 0x1 << 17, reg_value); 288 clrsetbits_le32(&denali_phy[936], 0x1 << 17, reg_value); 289 clrsetbits_le32(&denali_phy[940], 0x1 << 17, reg_value); 290 clrsetbits_le32(&denali_phy[934], 0x1 << 17, reg_value); 291 292 /* phy_pad_fdbk_term 1bit DENALI_PHY_930 offset_17 */ 293 clrsetbits_le32(&denali_phy[930], 0x1 << 17, reg_value); 294 } 295 296 static int phy_io_config(const struct chan_info *chan, 297 const struct rk3399_sdram_params *sdram_params) 298 { 299 u32 *denali_phy = chan->publ->denali_phy; 300 u32 vref_mode_dq, vref_value_dq, vref_mode_ac, vref_value_ac; 301 u32 mode_sel; 302 u32 reg_value; 303 u32 drv_value, odt_value; 304 u32 speed; 305 306 /* vref setting */ 307 if (sdram_params->base.dramtype == LPDDR4) { 308 /* LPDDR4 */ 309 vref_mode_dq = 0x6; 310 vref_value_dq = 0x1f; 311 vref_mode_ac = 0x6; 312 vref_value_ac = 0x1f; 313 } else if (sdram_params->base.dramtype == LPDDR3) { 314 if (sdram_params->base.odt == 1) { 315 vref_mode_dq = 0x5; /* LPDDR3 ODT */ 316 drv_value = (readl(&denali_phy[6]) >> 12) & 0xf; 317 odt_value = (readl(&denali_phy[6]) >> 4) & 0xf; 318 if (drv_value == PHY_DRV_ODT_48) { 319 switch (odt_value) { 320 case PHY_DRV_ODT_240: 321 vref_value_dq = 0x16; 322 break; 323 case PHY_DRV_ODT_120: 324 vref_value_dq = 0x26; 325 break; 326 case PHY_DRV_ODT_60: 327 vref_value_dq = 0x36; 328 break; 329 default: 330 debug("Invalid ODT value.\n"); 331 return -EINVAL; 332 } 333 } else if (drv_value == PHY_DRV_ODT_40) { 334 switch (odt_value) { 335 case PHY_DRV_ODT_240: 336 vref_value_dq = 0x19; 337 break; 338 case PHY_DRV_ODT_120: 339 vref_value_dq = 0x23; 340 break; 341 case PHY_DRV_ODT_60: 342 vref_value_dq = 0x31; 343 break; 344 default: 345 debug("Invalid ODT value.\n"); 346 return -EINVAL; 347 } 348 } else if (drv_value == PHY_DRV_ODT_34_3) { 349 switch (odt_value) { 350 case PHY_DRV_ODT_240: 351 vref_value_dq = 0x17; 352 break; 353 case PHY_DRV_ODT_120: 354 vref_value_dq = 0x20; 355 break; 356 case PHY_DRV_ODT_60: 357 vref_value_dq = 0x2e; 358 break; 359 default: 360 debug("Invalid ODT value.\n"); 361 return -EINVAL; 362 } 363 } else { 364 debug("Invalid DRV value.\n"); 365 return -EINVAL; 366 } 367 } else { 368 vref_mode_dq = 0x2; /* LPDDR3 */ 369 vref_value_dq = 0x1f; 370 } 371 vref_mode_ac = 0x2; 372 vref_value_ac = 0x1f; 373 } else if (sdram_params->base.dramtype == DDR3) { 374 /* DDR3L */ 375 vref_mode_dq = 0x1; 376 vref_value_dq = 0x1f; 377 vref_mode_ac = 0x1; 378 vref_value_ac = 0x1f; 379 } else { 380 debug("Unknown DRAM type.\n"); 381 return -EINVAL; 382 } 383 384 reg_value = (vref_mode_dq << 9) | (0x1 << 8) | vref_value_dq; 385 386 /* PHY_913 PHY_PAD_VREF_CTRL_DQ_0 12bits offset_8 */ 387 clrsetbits_le32(&denali_phy[913], 0xfff << 8, reg_value << 8); 388 /* PHY_914 PHY_PAD_VREF_CTRL_DQ_1 12bits offset_0 */ 389 clrsetbits_le32(&denali_phy[914], 0xfff, reg_value); 390 /* PHY_914 PHY_PAD_VREF_CTRL_DQ_2 12bits offset_16 */ 391 clrsetbits_le32(&denali_phy[914], 0xfff << 16, reg_value << 16); 392 /* PHY_915 PHY_PAD_VREF_CTRL_DQ_3 12bits offset_0 */ 393 clrsetbits_le32(&denali_phy[915], 0xfff, reg_value); 394 395 reg_value = (vref_mode_ac << 9) | (0x1 << 8) | vref_value_ac; 396 397 /* PHY_915 PHY_PAD_VREF_CTRL_AC 12bits offset_16 */ 398 clrsetbits_le32(&denali_phy[915], 0xfff << 16, reg_value << 16); 399 400 if (sdram_params->base.dramtype == LPDDR4) 401 mode_sel = 0x6; 402 else if (sdram_params->base.dramtype == LPDDR3) 403 mode_sel = 0x0; 404 else if (sdram_params->base.dramtype == DDR3) 405 mode_sel = 0x1; 406 else 407 return -EINVAL; 408 409 /* PHY_924 PHY_PAD_FDBK_DRIVE */ 410 clrsetbits_le32(&denali_phy[924], 0x7 << 15, mode_sel << 15); 411 /* PHY_926 PHY_PAD_DATA_DRIVE */ 412 clrsetbits_le32(&denali_phy[926], 0x7 << 6, mode_sel << 6); 413 /* PHY_927 PHY_PAD_DQS_DRIVE */ 414 clrsetbits_le32(&denali_phy[927], 0x7 << 6, mode_sel << 6); 415 /* PHY_928 PHY_PAD_ADDR_DRIVE */ 416 clrsetbits_le32(&denali_phy[928], 0x7 << 14, mode_sel << 14); 417 /* PHY_929 PHY_PAD_CLK_DRIVE */ 418 clrsetbits_le32(&denali_phy[929], 0x7 << 14, mode_sel << 14); 419 /* PHY_935 PHY_PAD_CKE_DRIVE */ 420 clrsetbits_le32(&denali_phy[935], 0x7 << 14, mode_sel << 14); 421 /* PHY_937 PHY_PAD_RST_DRIVE */ 422 clrsetbits_le32(&denali_phy[937], 0x7 << 14, mode_sel << 14); 423 /* PHY_939 PHY_PAD_CS_DRIVE */ 424 clrsetbits_le32(&denali_phy[939], 0x7 << 14, mode_sel << 14); 425 426 427 /* speed setting */ 428 if (sdram_params->base.ddr_freq < 400) 429 speed = 0x0; 430 else if (sdram_params->base.ddr_freq < 800) 431 speed = 0x1; 432 else if (sdram_params->base.ddr_freq < 1200) 433 speed = 0x2; 434 else 435 speed = 0x3; 436 437 /* PHY_924 PHY_PAD_FDBK_DRIVE */ 438 clrsetbits_le32(&denali_phy[924], 0x3 << 21, speed << 21); 439 /* PHY_926 PHY_PAD_DATA_DRIVE */ 440 clrsetbits_le32(&denali_phy[926], 0x3 << 9, speed << 9); 441 /* PHY_927 PHY_PAD_DQS_DRIVE */ 442 clrsetbits_le32(&denali_phy[927], 0x3 << 9, speed << 9); 443 /* PHY_928 PHY_PAD_ADDR_DRIVE */ 444 clrsetbits_le32(&denali_phy[928], 0x3 << 17, speed << 17); 445 /* PHY_929 PHY_PAD_CLK_DRIVE */ 446 clrsetbits_le32(&denali_phy[929], 0x3 << 17, speed << 17); 447 /* PHY_935 PHY_PAD_CKE_DRIVE */ 448 clrsetbits_le32(&denali_phy[935], 0x3 << 17, speed << 17); 449 /* PHY_937 PHY_PAD_RST_DRIVE */ 450 clrsetbits_le32(&denali_phy[937], 0x3 << 17, speed << 17); 451 /* PHY_939 PHY_PAD_CS_DRIVE */ 452 clrsetbits_le32(&denali_phy[939], 0x3 << 17, speed << 17); 453 454 return 0; 455 } 456 457 static int pctl_cfg(const struct chan_info *chan, u32 channel, 458 const struct rk3399_sdram_params *sdram_params) 459 { 460 u32 *denali_ctl = chan->pctl->denali_ctl; 461 u32 *denali_pi = chan->pi->denali_pi; 462 u32 *denali_phy = chan->publ->denali_phy; 463 const u32 *params_ctl = sdram_params->pctl_regs.denali_ctl; 464 const u32 *params_phy = sdram_params->phy_regs.denali_phy; 465 u32 tmp, tmp1, tmp2; 466 u32 pwrup_srefresh_exit; 467 int ret; 468 const ulong timeout_ms = 200; 469 470 /* 471 * work around controller bug: 472 * Do not program DRAM_CLASS until NO_PHY_IND_TRAIN_INT is programmed 473 */ 474 copy_to_reg(&denali_ctl[1], ¶ms_ctl[1], 475 sizeof(struct rk3399_ddr_pctl_regs) - 4); 476 writel(params_ctl[0], &denali_ctl[0]); 477 copy_to_reg(denali_pi, &sdram_params->pi_regs.denali_pi[0], 478 sizeof(struct rk3399_ddr_pi_regs)); 479 /* rank count need to set for init */ 480 set_memory_map(chan, channel, sdram_params); 481 482 writel(sdram_params->phy_regs.denali_phy[910], &denali_phy[910]); 483 writel(sdram_params->phy_regs.denali_phy[911], &denali_phy[911]); 484 writel(sdram_params->phy_regs.denali_phy[912], &denali_phy[912]); 485 486 pwrup_srefresh_exit = readl(&denali_ctl[68]) & PWRUP_SREFRESH_EXIT; 487 clrbits_le32(&denali_ctl[68], PWRUP_SREFRESH_EXIT); 488 489 /* PHY_DLL_RST_EN */ 490 clrsetbits_le32(&denali_phy[957], 0x3 << 24, 1 << 24); 491 492 setbits_le32(&denali_pi[0], START); 493 setbits_le32(&denali_ctl[0], START); 494 495 /* Wating for phy DLL lock */ 496 while (1) { 497 tmp = readl(&denali_phy[920]); 498 tmp1 = readl(&denali_phy[921]); 499 tmp2 = readl(&denali_phy[922]); 500 if ((((tmp >> 16) & 0x1) == 0x1) && 501 (((tmp1 >> 16) & 0x1) == 0x1) && 502 (((tmp1 >> 0) & 0x1) == 0x1) && 503 (((tmp2 >> 0) & 0x1) == 0x1)) 504 break; 505 } 506 507 copy_to_reg(&denali_phy[896], ¶ms_phy[896], (958 - 895) * 4); 508 copy_to_reg(&denali_phy[0], ¶ms_phy[0], (90 - 0 + 1) * 4); 509 copy_to_reg(&denali_phy[128], ¶ms_phy[128], (218 - 128 + 1) * 4); 510 copy_to_reg(&denali_phy[256], ¶ms_phy[256], (346 - 256 + 1) * 4); 511 copy_to_reg(&denali_phy[384], ¶ms_phy[384], (474 - 384 + 1) * 4); 512 copy_to_reg(&denali_phy[512], ¶ms_phy[512], (549 - 512 + 1) * 4); 513 copy_to_reg(&denali_phy[640], ¶ms_phy[640], (677 - 640 + 1) * 4); 514 copy_to_reg(&denali_phy[768], ¶ms_phy[768], (805 - 768 + 1) * 4); 515 set_ds_odt(chan, sdram_params); 516 517 /* 518 * phy_dqs_tsel_wr_timing_X 8bits DENALI_PHY_84/212/340/468 offset_8 519 * dqs_tsel_wr_end[7:4] add Half cycle 520 */ 521 tmp = (readl(&denali_phy[84]) >> 8) & 0xff; 522 clrsetbits_le32(&denali_phy[84], 0xff << 8, (tmp + 0x10) << 8); 523 tmp = (readl(&denali_phy[212]) >> 8) & 0xff; 524 clrsetbits_le32(&denali_phy[212], 0xff << 8, (tmp + 0x10) << 8); 525 tmp = (readl(&denali_phy[340]) >> 8) & 0xff; 526 clrsetbits_le32(&denali_phy[340], 0xff << 8, (tmp + 0x10) << 8); 527 tmp = (readl(&denali_phy[468]) >> 8) & 0xff; 528 clrsetbits_le32(&denali_phy[468], 0xff << 8, (tmp + 0x10) << 8); 529 530 /* 531 * phy_dqs_tsel_wr_timing_X 8bits DENALI_PHY_83/211/339/467 offset_8 532 * dq_tsel_wr_end[7:4] add Half cycle 533 */ 534 tmp = (readl(&denali_phy[83]) >> 16) & 0xff; 535 clrsetbits_le32(&denali_phy[83], 0xff << 16, (tmp + 0x10) << 16); 536 tmp = (readl(&denali_phy[211]) >> 16) & 0xff; 537 clrsetbits_le32(&denali_phy[211], 0xff << 16, (tmp + 0x10) << 16); 538 tmp = (readl(&denali_phy[339]) >> 16) & 0xff; 539 clrsetbits_le32(&denali_phy[339], 0xff << 16, (tmp + 0x10) << 16); 540 tmp = (readl(&denali_phy[467]) >> 16) & 0xff; 541 clrsetbits_le32(&denali_phy[467], 0xff << 16, (tmp + 0x10) << 16); 542 543 ret = phy_io_config(chan, sdram_params); 544 if (ret) 545 return ret; 546 547 /* PHY_DLL_RST_EN */ 548 clrsetbits_le32(&denali_phy[957], 0x3 << 24, 0x2 << 24); 549 550 /* Wating for PHY and DRAM init complete */ 551 tmp = get_timer(0); 552 do { 553 if (get_timer(tmp) > timeout_ms) { 554 pr_err("DRAM (%s): phy failed to lock within %ld ms\n", 555 __func__, timeout_ms); 556 return -ETIME; 557 } 558 } while (!(readl(&denali_ctl[203]) & (1 << 3))); 559 debug("DRAM (%s): phy locked after %ld ms\n", __func__, get_timer(tmp)); 560 561 clrsetbits_le32(&denali_ctl[68], PWRUP_SREFRESH_EXIT, 562 pwrup_srefresh_exit); 563 return 0; 564 } 565 566 static void select_per_cs_training_index(const struct chan_info *chan, 567 u32 rank) 568 { 569 u32 *denali_phy = chan->publ->denali_phy; 570 571 /* PHY_84 PHY_PER_CS_TRAINING_EN_0 1bit offset_16 */ 572 if ((readl(&denali_phy[84])>>16) & 1) { 573 /* 574 * PHY_8/136/264/392 575 * phy_per_cs_training_index_X 1bit offset_24 576 */ 577 clrsetbits_le32(&denali_phy[8], 0x1 << 24, rank << 24); 578 clrsetbits_le32(&denali_phy[136], 0x1 << 24, rank << 24); 579 clrsetbits_le32(&denali_phy[264], 0x1 << 24, rank << 24); 580 clrsetbits_le32(&denali_phy[392], 0x1 << 24, rank << 24); 581 } 582 } 583 584 static void override_write_leveling_value(const struct chan_info *chan) 585 { 586 u32 *denali_ctl = chan->pctl->denali_ctl; 587 u32 *denali_phy = chan->publ->denali_phy; 588 u32 byte; 589 590 /* PHY_896 PHY_FREQ_SEL_MULTICAST_EN 1bit offset_0 */ 591 setbits_le32(&denali_phy[896], 1); 592 593 /* 594 * PHY_8/136/264/392 595 * phy_per_cs_training_multicast_en_X 1bit offset_16 596 */ 597 clrsetbits_le32(&denali_phy[8], 0x1 << 16, 1 << 16); 598 clrsetbits_le32(&denali_phy[136], 0x1 << 16, 1 << 16); 599 clrsetbits_le32(&denali_phy[264], 0x1 << 16, 1 << 16); 600 clrsetbits_le32(&denali_phy[392], 0x1 << 16, 1 << 16); 601 602 for (byte = 0; byte < 4; byte++) 603 clrsetbits_le32(&denali_phy[63 + (128 * byte)], 0xffff << 16, 604 0x200 << 16); 605 606 /* PHY_896 PHY_FREQ_SEL_MULTICAST_EN 1bit offset_0 */ 607 clrbits_le32(&denali_phy[896], 1); 608 609 /* CTL_200 ctrlupd_req 1bit offset_8 */ 610 clrsetbits_le32(&denali_ctl[200], 0x1 << 8, 0x1 << 8); 611 } 612 613 static int data_training_ca(const struct chan_info *chan, u32 channel, 614 const struct rk3399_sdram_params *sdram_params) 615 { 616 u32 *denali_pi = chan->pi->denali_pi; 617 u32 *denali_phy = chan->publ->denali_phy; 618 u32 i, tmp; 619 u32 obs_0, obs_1, obs_2, obs_err = 0; 620 u32 rank = sdram_params->ch[channel].rank; 621 622 for (i = 0; i < rank; i++) { 623 select_per_cs_training_index(chan, i); 624 /* PI_100 PI_CALVL_EN:RW:8:2 */ 625 clrsetbits_le32(&denali_pi[100], 0x3 << 8, 0x2 << 8); 626 /* PI_92 PI_CALVL_REQ:WR:16:1,PI_CALVL_CS:RW:24:2 */ 627 clrsetbits_le32(&denali_pi[92], 628 (0x1 << 16) | (0x3 << 24), 629 (0x1 << 16) | (i << 24)); 630 631 /* Waiting for training complete */ 632 while (1) { 633 /* PI_174 PI_INT_STATUS:RD:8:18 */ 634 tmp = readl(&denali_pi[174]) >> 8; 635 /* 636 * check status obs 637 * PHY_532/660/789 phy_adr_calvl_obs1_:0:32 638 */ 639 obs_0 = readl(&denali_phy[532]); 640 obs_1 = readl(&denali_phy[660]); 641 obs_2 = readl(&denali_phy[788]); 642 if (((obs_0 >> 30) & 0x3) || 643 ((obs_1 >> 30) & 0x3) || 644 ((obs_2 >> 30) & 0x3)) 645 obs_err = 1; 646 if ((((tmp >> 11) & 0x1) == 0x1) && 647 (((tmp >> 13) & 0x1) == 0x1) && 648 (((tmp >> 5) & 0x1) == 0x0) && 649 (obs_err == 0)) 650 break; 651 else if ((((tmp >> 5) & 0x1) == 0x1) || 652 (obs_err == 1)) 653 return -EIO; 654 } 655 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */ 656 writel(0x00003f7c, (&denali_pi[175])); 657 } 658 clrbits_le32(&denali_pi[100], 0x3 << 8); 659 660 return 0; 661 } 662 663 static int data_training_wl(const struct chan_info *chan, u32 channel, 664 const struct rk3399_sdram_params *sdram_params) 665 { 666 u32 *denali_pi = chan->pi->denali_pi; 667 u32 *denali_phy = chan->publ->denali_phy; 668 u32 i, tmp; 669 u32 obs_0, obs_1, obs_2, obs_3, obs_err = 0; 670 u32 rank = sdram_params->ch[channel].rank; 671 672 for (i = 0; i < rank; i++) { 673 select_per_cs_training_index(chan, i); 674 /* PI_60 PI_WRLVL_EN:RW:8:2 */ 675 clrsetbits_le32(&denali_pi[60], 0x3 << 8, 0x2 << 8); 676 /* PI_59 PI_WRLVL_REQ:WR:8:1,PI_WRLVL_CS:RW:16:2 */ 677 clrsetbits_le32(&denali_pi[59], 678 (0x1 << 8) | (0x3 << 16), 679 (0x1 << 8) | (i << 16)); 680 681 /* Waiting for training complete */ 682 while (1) { 683 /* PI_174 PI_INT_STATUS:RD:8:18 */ 684 tmp = readl(&denali_pi[174]) >> 8; 685 686 /* 687 * check status obs, if error maybe can not 688 * get leveling done PHY_40/168/296/424 689 * phy_wrlvl_status_obs_X:0:13 690 */ 691 obs_0 = readl(&denali_phy[40]); 692 obs_1 = readl(&denali_phy[168]); 693 obs_2 = readl(&denali_phy[296]); 694 obs_3 = readl(&denali_phy[424]); 695 if (((obs_0 >> 12) & 0x1) || 696 ((obs_1 >> 12) & 0x1) || 697 ((obs_2 >> 12) & 0x1) || 698 ((obs_3 >> 12) & 0x1)) 699 obs_err = 1; 700 if ((((tmp >> 10) & 0x1) == 0x1) && 701 (((tmp >> 13) & 0x1) == 0x1) && 702 (((tmp >> 4) & 0x1) == 0x0) && 703 (obs_err == 0)) 704 break; 705 else if ((((tmp >> 4) & 0x1) == 0x1) || 706 (obs_err == 1)) 707 return -EIO; 708 } 709 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */ 710 writel(0x00003f7c, (&denali_pi[175])); 711 } 712 713 override_write_leveling_value(chan); 714 clrbits_le32(&denali_pi[60], 0x3 << 8); 715 716 return 0; 717 } 718 719 static int data_training_rg(const struct chan_info *chan, u32 channel, 720 const struct rk3399_sdram_params *sdram_params) 721 { 722 u32 *denali_pi = chan->pi->denali_pi; 723 u32 *denali_phy = chan->publ->denali_phy; 724 u32 i, tmp; 725 u32 obs_0, obs_1, obs_2, obs_3, obs_err = 0; 726 u32 rank = sdram_params->ch[channel].rank; 727 728 for (i = 0; i < rank; i++) { 729 select_per_cs_training_index(chan, i); 730 /* PI_80 PI_RDLVL_GATE_EN:RW:24:2 */ 731 clrsetbits_le32(&denali_pi[80], 0x3 << 24, 0x2 << 24); 732 /* 733 * PI_74 PI_RDLVL_GATE_REQ:WR:16:1 734 * PI_RDLVL_CS:RW:24:2 735 */ 736 clrsetbits_le32(&denali_pi[74], 737 (0x1 << 16) | (0x3 << 24), 738 (0x1 << 16) | (i << 24)); 739 740 /* Waiting for training complete */ 741 while (1) { 742 /* PI_174 PI_INT_STATUS:RD:8:18 */ 743 tmp = readl(&denali_pi[174]) >> 8; 744 745 /* 746 * check status obs 747 * PHY_43/171/299/427 748 * PHY_GTLVL_STATUS_OBS_x:16:8 749 */ 750 obs_0 = readl(&denali_phy[43]); 751 obs_1 = readl(&denali_phy[171]); 752 obs_2 = readl(&denali_phy[299]); 753 obs_3 = readl(&denali_phy[427]); 754 if (((obs_0 >> (16 + 6)) & 0x3) || 755 ((obs_1 >> (16 + 6)) & 0x3) || 756 ((obs_2 >> (16 + 6)) & 0x3) || 757 ((obs_3 >> (16 + 6)) & 0x3)) 758 obs_err = 1; 759 if ((((tmp >> 9) & 0x1) == 0x1) && 760 (((tmp >> 13) & 0x1) == 0x1) && 761 (((tmp >> 3) & 0x1) == 0x0) && 762 (obs_err == 0)) 763 break; 764 else if ((((tmp >> 3) & 0x1) == 0x1) || 765 (obs_err == 1)) 766 return -EIO; 767 } 768 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */ 769 writel(0x00003f7c, (&denali_pi[175])); 770 } 771 clrbits_le32(&denali_pi[80], 0x3 << 24); 772 773 return 0; 774 } 775 776 static int data_training_rl(const struct chan_info *chan, u32 channel, 777 const struct rk3399_sdram_params *sdram_params) 778 { 779 u32 *denali_pi = chan->pi->denali_pi; 780 u32 i, tmp; 781 u32 rank = sdram_params->ch[channel].rank; 782 783 for (i = 0; i < rank; i++) { 784 select_per_cs_training_index(chan, i); 785 /* PI_80 PI_RDLVL_EN:RW:16:2 */ 786 clrsetbits_le32(&denali_pi[80], 0x3 << 16, 0x2 << 16); 787 /* PI_74 PI_RDLVL_REQ:WR:8:1,PI_RDLVL_CS:RW:24:2 */ 788 clrsetbits_le32(&denali_pi[74], 789 (0x1 << 8) | (0x3 << 24), 790 (0x1 << 8) | (i << 24)); 791 792 /* Waiting for training complete */ 793 while (1) { 794 /* PI_174 PI_INT_STATUS:RD:8:18 */ 795 tmp = readl(&denali_pi[174]) >> 8; 796 797 /* 798 * make sure status obs not report error bit 799 * PHY_46/174/302/430 800 * phy_rdlvl_status_obs_X:16:8 801 */ 802 if ((((tmp >> 8) & 0x1) == 0x1) && 803 (((tmp >> 13) & 0x1) == 0x1) && 804 (((tmp >> 2) & 0x1) == 0x0)) 805 break; 806 else if (((tmp >> 2) & 0x1) == 0x1) 807 return -EIO; 808 } 809 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */ 810 writel(0x00003f7c, (&denali_pi[175])); 811 } 812 clrbits_le32(&denali_pi[80], 0x3 << 16); 813 814 return 0; 815 } 816 817 static int data_training_wdql(const struct chan_info *chan, u32 channel, 818 const struct rk3399_sdram_params *sdram_params) 819 { 820 u32 *denali_pi = chan->pi->denali_pi; 821 u32 i, tmp; 822 u32 rank = sdram_params->ch[channel].rank; 823 824 for (i = 0; i < rank; i++) { 825 select_per_cs_training_index(chan, i); 826 /* 827 * disable PI_WDQLVL_VREF_EN before wdq leveling? 828 * PI_181 PI_WDQLVL_VREF_EN:RW:8:1 829 */ 830 clrbits_le32(&denali_pi[181], 0x1 << 8); 831 /* PI_124 PI_WDQLVL_EN:RW:16:2 */ 832 clrsetbits_le32(&denali_pi[124], 0x3 << 16, 0x2 << 16); 833 /* PI_121 PI_WDQLVL_REQ:WR:8:1,PI_WDQLVL_CS:RW:16:2 */ 834 clrsetbits_le32(&denali_pi[121], 835 (0x1 << 8) | (0x3 << 16), 836 (0x1 << 8) | (i << 16)); 837 838 /* Waiting for training complete */ 839 while (1) { 840 /* PI_174 PI_INT_STATUS:RD:8:18 */ 841 tmp = readl(&denali_pi[174]) >> 8; 842 if ((((tmp >> 12) & 0x1) == 0x1) && 843 (((tmp >> 13) & 0x1) == 0x1) && 844 (((tmp >> 6) & 0x1) == 0x0)) 845 break; 846 else if (((tmp >> 6) & 0x1) == 0x1) 847 return -EIO; 848 } 849 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */ 850 writel(0x00003f7c, (&denali_pi[175])); 851 } 852 clrbits_le32(&denali_pi[124], 0x3 << 16); 853 854 return 0; 855 } 856 857 static int data_training(const struct chan_info *chan, u32 channel, 858 const struct rk3399_sdram_params *sdram_params, 859 u32 training_flag) 860 { 861 u32 *denali_phy = chan->publ->denali_phy; 862 863 /* PHY_927 PHY_PAD_DQS_DRIVE RPULL offset_22 */ 864 setbits_le32(&denali_phy[927], (1 << 22)); 865 866 if (training_flag == PI_FULL_TRAINING) { 867 if (sdram_params->base.dramtype == LPDDR4) { 868 training_flag = PI_CA_TRAINING | PI_WRITE_LEVELING | 869 PI_READ_GATE_TRAINING | 870 PI_READ_LEVELING | PI_WDQ_LEVELING; 871 } else if (sdram_params->base.dramtype == LPDDR3) { 872 training_flag = PI_CA_TRAINING | PI_WRITE_LEVELING | 873 PI_READ_GATE_TRAINING; 874 } else if (sdram_params->base.dramtype == DDR3) { 875 training_flag = PI_WRITE_LEVELING | 876 PI_READ_GATE_TRAINING | 877 PI_READ_LEVELING; 878 } 879 } 880 881 /* ca training(LPDDR4,LPDDR3 support) */ 882 if ((training_flag & PI_CA_TRAINING) == PI_CA_TRAINING) 883 data_training_ca(chan, channel, sdram_params); 884 885 /* write leveling(LPDDR4,LPDDR3,DDR3 support) */ 886 if ((training_flag & PI_WRITE_LEVELING) == PI_WRITE_LEVELING) 887 data_training_wl(chan, channel, sdram_params); 888 889 /* read gate training(LPDDR4,LPDDR3,DDR3 support) */ 890 if ((training_flag & PI_READ_GATE_TRAINING) == PI_READ_GATE_TRAINING) 891 data_training_rg(chan, channel, sdram_params); 892 893 /* read leveling(LPDDR4,LPDDR3,DDR3 support) */ 894 if ((training_flag & PI_READ_LEVELING) == PI_READ_LEVELING) 895 data_training_rl(chan, channel, sdram_params); 896 897 /* wdq leveling(LPDDR4 support) */ 898 if ((training_flag & PI_WDQ_LEVELING) == PI_WDQ_LEVELING) 899 data_training_wdql(chan, channel, sdram_params); 900 901 /* PHY_927 PHY_PAD_DQS_DRIVE RPULL offset_22 */ 902 clrbits_le32(&denali_phy[927], (1 << 22)); 903 904 return 0; 905 } 906 907 static void set_ddrconfig(const struct chan_info *chan, 908 const struct rk3399_sdram_params *sdram_params, 909 unsigned char channel, u32 ddrconfig) 910 { 911 /* only need to set ddrconfig */ 912 struct rk3399_msch_regs *ddr_msch_regs = chan->msch; 913 unsigned int cs0_cap = 0; 914 unsigned int cs1_cap = 0; 915 916 cs0_cap = (1 << (sdram_params->ch[channel].cs0_row 917 + sdram_params->ch[channel].col 918 + sdram_params->ch[channel].bk 919 + sdram_params->ch[channel].bw - 20)); 920 if (sdram_params->ch[channel].rank > 1) 921 cs1_cap = cs0_cap >> (sdram_params->ch[channel].cs0_row 922 - sdram_params->ch[channel].cs1_row); 923 if (sdram_params->ch[channel].row_3_4) { 924 cs0_cap = cs0_cap * 3 / 4; 925 cs1_cap = cs1_cap * 3 / 4; 926 } 927 928 writel(ddrconfig | (ddrconfig << 8), &ddr_msch_regs->ddrconf); 929 writel(((cs0_cap / 32) & 0xff) | (((cs1_cap / 32) & 0xff) << 8), 930 &ddr_msch_regs->ddrsize); 931 } 932 933 static void dram_all_config(struct dram_info *dram, 934 const struct rk3399_sdram_params *sdram_params) 935 { 936 u32 sys_reg = 0; 937 unsigned int channel, idx; 938 939 sys_reg |= sdram_params->base.dramtype << SYS_REG_DDRTYPE_SHIFT; 940 sys_reg |= (sdram_params->base.num_channels - 1) 941 << SYS_REG_NUM_CH_SHIFT; 942 for (channel = 0, idx = 0; 943 (idx < sdram_params->base.num_channels) && (channel < 2); 944 channel++) { 945 const struct rk3399_sdram_channel *info = 946 &sdram_params->ch[channel]; 947 struct rk3399_msch_regs *ddr_msch_regs; 948 const struct rk3399_msch_timings *noc_timing; 949 950 if (sdram_params->ch[channel].col == 0) 951 continue; 952 idx++; 953 sys_reg |= info->row_3_4 << SYS_REG_ROW_3_4_SHIFT(channel); 954 sys_reg |= 1 << SYS_REG_CHINFO_SHIFT(channel); 955 sys_reg |= (info->rank - 1) << SYS_REG_RANK_SHIFT(channel); 956 sys_reg |= (info->col - 9) << SYS_REG_COL_SHIFT(channel); 957 sys_reg |= info->bk == 3 ? 0 : 1 << SYS_REG_BK_SHIFT(channel); 958 sys_reg |= (info->cs0_row - 13) << SYS_REG_CS0_ROW_SHIFT(channel); 959 sys_reg |= (info->cs1_row - 13) << SYS_REG_CS1_ROW_SHIFT(channel); 960 sys_reg |= (2 >> info->bw) << SYS_REG_BW_SHIFT(channel); 961 sys_reg |= (2 >> info->dbw) << SYS_REG_DBW_SHIFT(channel); 962 963 ddr_msch_regs = dram->chan[channel].msch; 964 noc_timing = &sdram_params->ch[channel].noc_timings; 965 writel(noc_timing->ddrtiminga0, 966 &ddr_msch_regs->ddrtiminga0); 967 writel(noc_timing->ddrtimingb0, 968 &ddr_msch_regs->ddrtimingb0); 969 writel(noc_timing->ddrtimingc0, 970 &ddr_msch_regs->ddrtimingc0); 971 writel(noc_timing->devtodev0, 972 &ddr_msch_regs->devtodev0); 973 writel(noc_timing->ddrmode, 974 &ddr_msch_regs->ddrmode); 975 976 /* rank 1 memory clock disable (dfi_dram_clk_disable = 1) */ 977 if (sdram_params->ch[channel].rank == 1) 978 setbits_le32(&dram->chan[channel].pctl->denali_ctl[276], 979 1 << 17); 980 } 981 982 writel(sys_reg, &dram->pmugrf->os_reg2); 983 rk_clrsetreg(&dram->pmusgrf->soc_con4, 0x1f << 10, 984 sdram_params->base.stride << 10); 985 986 /* reboot hold register set */ 987 writel(PRESET_SGRF_HOLD(0) | PRESET_GPIO0_HOLD(1) | 988 PRESET_GPIO1_HOLD(1), 989 &dram->pmucru->pmucru_rstnhold_con[1]); 990 clrsetbits_le32(&dram->cru->glb_rst_con, 0x3, 0x3); 991 } 992 993 static int switch_to_phy_index1(struct dram_info *dram, 994 const struct rk3399_sdram_params *sdram_params) 995 { 996 u32 channel; 997 u32 *denali_phy; 998 u32 ch_count = sdram_params->base.num_channels; 999 int ret; 1000 int i = 0; 1001 1002 writel(RK_CLRSETBITS(0x03 << 4 | 1 << 2 | 1, 1003 1 << 4 | 1 << 2 | 1), 1004 &dram->cic->cic_ctrl0); 1005 while (!(readl(&dram->cic->cic_status0) & (1 << 2))) { 1006 mdelay(10); 1007 i++; 1008 if (i > 10) { 1009 debug("index1 frequency change overtime\n"); 1010 return -ETIME; 1011 } 1012 } 1013 1014 i = 0; 1015 writel(RK_CLRSETBITS(1 << 1, 1 << 1), &dram->cic->cic_ctrl0); 1016 while (!(readl(&dram->cic->cic_status0) & (1 << 0))) { 1017 mdelay(10); 1018 i++; 1019 if (i > 10) { 1020 debug("index1 frequency done overtime\n"); 1021 return -ETIME; 1022 } 1023 } 1024 1025 for (channel = 0; channel < ch_count; channel++) { 1026 denali_phy = dram->chan[channel].publ->denali_phy; 1027 clrsetbits_le32(&denali_phy[896], (0x3 << 8) | 1, 1 << 8); 1028 ret = data_training(&dram->chan[channel], channel, 1029 sdram_params, PI_FULL_TRAINING); 1030 if (ret) { 1031 debug("index1 training failed\n"); 1032 return ret; 1033 } 1034 } 1035 1036 return 0; 1037 } 1038 1039 static int sdram_init(struct dram_info *dram, 1040 const struct rk3399_sdram_params *sdram_params) 1041 { 1042 unsigned char dramtype = sdram_params->base.dramtype; 1043 unsigned int ddr_freq = sdram_params->base.ddr_freq; 1044 int channel; 1045 1046 debug("Starting SDRAM initialization...\n"); 1047 1048 if ((dramtype == DDR3 && ddr_freq > 933) || 1049 (dramtype == LPDDR3 && ddr_freq > 933) || 1050 (dramtype == LPDDR4 && ddr_freq > 800)) { 1051 debug("SDRAM frequency is to high!"); 1052 return -E2BIG; 1053 } 1054 1055 for (channel = 0; channel < 2; channel++) { 1056 const struct chan_info *chan = &dram->chan[channel]; 1057 struct rk3399_ddr_publ_regs *publ = chan->publ; 1058 1059 phy_dll_bypass_set(publ, ddr_freq); 1060 1061 if (channel >= sdram_params->base.num_channels) 1062 continue; 1063 1064 if (pctl_cfg(chan, channel, sdram_params) != 0) { 1065 printf("pctl_cfg fail, reset\n"); 1066 return -EIO; 1067 } 1068 1069 /* LPDDR2/LPDDR3 need to wait DAI complete, max 10us */ 1070 if (dramtype == LPDDR3) 1071 udelay(10); 1072 1073 if (data_training(chan, channel, 1074 sdram_params, PI_FULL_TRAINING)) { 1075 printf("SDRAM initialization failed, reset\n"); 1076 return -EIO; 1077 } 1078 1079 set_ddrconfig(chan, sdram_params, channel, 1080 sdram_params->ch[channel].ddrconfig); 1081 } 1082 dram_all_config(dram, sdram_params); 1083 switch_to_phy_index1(dram, sdram_params); 1084 1085 debug("Finish SDRAM initialization...\n"); 1086 return 0; 1087 } 1088 1089 static int rk3399_dmc_ofdata_to_platdata(struct udevice *dev) 1090 { 1091 #if !CONFIG_IS_ENABLED(OF_PLATDATA) 1092 struct rockchip_dmc_plat *plat = dev_get_platdata(dev); 1093 int ret; 1094 1095 ret = dev_read_u32_array(dev, "rockchip,sdram-params", 1096 (u32 *)&plat->sdram_params, 1097 sizeof(plat->sdram_params) / sizeof(u32)); 1098 if (ret) { 1099 printf("%s: Cannot read rockchip,sdram-params %d\n", 1100 __func__, ret); 1101 return ret; 1102 } 1103 ret = regmap_init_mem(dev, &plat->map); 1104 if (ret) 1105 printf("%s: regmap failed %d\n", __func__, ret); 1106 1107 #endif 1108 return 0; 1109 } 1110 1111 #if CONFIG_IS_ENABLED(OF_PLATDATA) 1112 static int conv_of_platdata(struct udevice *dev) 1113 { 1114 struct rockchip_dmc_plat *plat = dev_get_platdata(dev); 1115 struct dtd_rockchip_rk3399_dmc *dtplat = &plat->dtplat; 1116 int ret; 1117 1118 ret = regmap_init_mem_platdata(dev, dtplat->reg, 1119 ARRAY_SIZE(dtplat->reg) / 2, 1120 &plat->map); 1121 if (ret) 1122 return ret; 1123 1124 return 0; 1125 } 1126 #endif 1127 1128 static int rk3399_dmc_init(struct udevice *dev) 1129 { 1130 struct dram_info *priv = dev_get_priv(dev); 1131 struct rockchip_dmc_plat *plat = dev_get_platdata(dev); 1132 int ret; 1133 #if !CONFIG_IS_ENABLED(OF_PLATDATA) 1134 struct rk3399_sdram_params *params = &plat->sdram_params; 1135 #else 1136 struct dtd_rockchip_rk3399_dmc *dtplat = &plat->dtplat; 1137 struct rk3399_sdram_params *params = 1138 (void *)dtplat->rockchip_sdram_params; 1139 1140 ret = conv_of_platdata(dev); 1141 if (ret) 1142 return ret; 1143 #endif 1144 1145 priv->cic = syscon_get_first_range(ROCKCHIP_SYSCON_CIC); 1146 priv->pmugrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUGRF); 1147 priv->pmusgrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUSGRF); 1148 priv->pmucru = rockchip_get_pmucru(); 1149 priv->cru = rockchip_get_cru(); 1150 priv->chan[0].pctl = regmap_get_range(plat->map, 0); 1151 priv->chan[0].pi = regmap_get_range(plat->map, 1); 1152 priv->chan[0].publ = regmap_get_range(plat->map, 2); 1153 priv->chan[0].msch = regmap_get_range(plat->map, 3); 1154 priv->chan[1].pctl = regmap_get_range(plat->map, 4); 1155 priv->chan[1].pi = regmap_get_range(plat->map, 5); 1156 priv->chan[1].publ = regmap_get_range(plat->map, 6); 1157 priv->chan[1].msch = regmap_get_range(plat->map, 7); 1158 1159 debug("con reg %p %p %p %p %p %p %p %p\n", 1160 priv->chan[0].pctl, priv->chan[0].pi, 1161 priv->chan[0].publ, priv->chan[0].msch, 1162 priv->chan[1].pctl, priv->chan[1].pi, 1163 priv->chan[1].publ, priv->chan[1].msch); 1164 debug("cru %p, cic %p, grf %p, sgrf %p, pmucru %p\n", priv->cru, 1165 priv->cic, priv->pmugrf, priv->pmusgrf, priv->pmucru); 1166 #if CONFIG_IS_ENABLED(OF_PLATDATA) 1167 ret = clk_get_by_index_platdata(dev, 0, dtplat->clocks, &priv->ddr_clk); 1168 #else 1169 ret = clk_get_by_index(dev, 0, &priv->ddr_clk); 1170 #endif 1171 if (ret) { 1172 printf("%s clk get failed %d\n", __func__, ret); 1173 return ret; 1174 } 1175 ret = clk_set_rate(&priv->ddr_clk, params->base.ddr_freq * MHz); 1176 if (ret < 0) { 1177 printf("%s clk set failed %d\n", __func__, ret); 1178 return ret; 1179 } 1180 ret = sdram_init(priv, params); 1181 if (ret < 0) { 1182 printf("%s DRAM init failed%d\n", __func__, ret); 1183 return ret; 1184 } 1185 1186 return 0; 1187 } 1188 #endif 1189 1190 static int rk3399_dmc_probe(struct udevice *dev) 1191 { 1192 #ifdef CONFIG_SPL_BUILD 1193 if (rk3399_dmc_init(dev)) 1194 return 0; 1195 #else 1196 struct dram_info *priv = dev_get_priv(dev); 1197 1198 priv->pmugrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUGRF); 1199 debug("%s: pmugrf=%p\n", __func__, priv->pmugrf); 1200 priv->info.base = CONFIG_SYS_SDRAM_BASE; 1201 priv->info.size = rockchip_sdram_size( 1202 (phys_addr_t)&priv->pmugrf->os_reg2); 1203 #endif 1204 return 0; 1205 } 1206 1207 static int rk3399_dmc_get_info(struct udevice *dev, struct ram_info *info) 1208 { 1209 struct dram_info *priv = dev_get_priv(dev); 1210 1211 *info = priv->info; 1212 1213 return 0; 1214 } 1215 1216 static struct ram_ops rk3399_dmc_ops = { 1217 .get_info = rk3399_dmc_get_info, 1218 }; 1219 1220 1221 static const struct udevice_id rk3399_dmc_ids[] = { 1222 { .compatible = "rockchip,rk3399-dmc" }, 1223 { } 1224 }; 1225 1226 U_BOOT_DRIVER(dmc_rk3399) = { 1227 .name = "rockchip_rk3399_dmc", 1228 .id = UCLASS_RAM, 1229 .of_match = rk3399_dmc_ids, 1230 .ops = &rk3399_dmc_ops, 1231 #ifdef CONFIG_SPL_BUILD 1232 .ofdata_to_platdata = rk3399_dmc_ofdata_to_platdata, 1233 #endif 1234 .probe = rk3399_dmc_probe, 1235 .priv_auto_alloc_size = sizeof(struct dram_info), 1236 #ifdef CONFIG_SPL_BUILD 1237 .platdata_auto_alloc_size = sizeof(struct rockchip_dmc_plat), 1238 #endif 1239 }; 1240