Lines Matching +full:pctl +full:- +full:regmap

1 // SPDX-License-Identifier: GPL-2.0
9 #include <dt-bindings/memory/rk3368-dmc.h>
10 #include <dt-structs.h>
12 #include <regmap.h>
27 struct rk3368_ddr_pctl *pctl; member
40 struct regmap *map;
123 ((n <= 8) ? ((n - 4) << 9) : (((n >> 1) & 0x7) << 9))
125 ((((n - 4) & 0x7) << 4) | (((n - 4) & 0x8) >> 2))
133 (((n - 5) & 0x7) << 3)
141 rk_setreg(&grf->ddrc0_con0, NOC_RSP_ERR_STALL); in ddr_set_noc_spr_err_stall()
143 rk_clrreg(&grf->ddrc0_con0, NOC_RSP_ERR_STALL); in ddr_set_noc_spr_err_stall()
149 rk_setreg(&grf->ddrc0_con0, MSCH0_MAINDDR3_DDR3); in ddr_set_ddr3_mode()
151 rk_clrreg(&grf->ddrc0_con0, MSCH0_MAINDDR3_DDR3); in ddr_set_ddr3_mode()
160 clrsetbits_le32(&phy->reg[1], 0x3, 0x0); in ddrphy_config()
163 clrsetbits_le32(&phy->reg[0xb], 0xff, tcl << 4 | tal); in ddrphy_config()
165 clrsetbits_le32(&phy->reg[0xc], 0x0f, tcwl); in ddrphy_config()
167 /* Update drive-strength */ in ddrphy_config()
168 writel(0xcc, &phy->reg[0x11]); in ddrphy_config()
169 writel(0xaa, &phy->reg[0x16]); in ddrphy_config()
176 writel(0xcc, &phy->reg[0x20 + i * 0x10]); in ddrphy_config()
177 writel(0x44, &phy->reg[0x21 + i * 0x10]); in ddrphy_config()
180 /* Enable write-leveling calibration bypass */ in ddrphy_config()
181 setbits_le32(&phy->reg[2], BIT(3)); in ddrphy_config()
192 static void send_command(struct rk3368_ddr_pctl *pctl, u32 rank, u32 cmd) in send_command() argument
197 writel(mcmd, &pctl->mcmd); in send_command()
198 while (readl(&pctl->mcmd) & START_CMD) in send_command()
202 static void send_mrs(struct rk3368_ddr_pctl *pctl, in send_mrs() argument
208 writel(mcmd, &pctl->mcmd); in send_mrs()
209 while (readl(&pctl->mcmd) & START_CMD) in send_mrs()
213 static int memory_init(struct rk3368_ddr_pctl *pctl, in memory_init() argument
221 * Power up DRAM by DDR_PCTL_POWCTL[0] register of PCTL and in memory_init()
223 * of PCTL. in memory_init()
225 writel(POWER_UP_START, &pctl->powctl); in memory_init()
232 return -ETIME; in memory_init()
234 } while (!(readl(&pctl->powstat) & POWER_UP_DONE)); in memory_init()
237 mr[0] = DDR3_MR0_WR(params->pctl_timing.twr) | in memory_init()
238 DDR3_MR0_CL(params->pctl_timing.tcl) | in memory_init()
241 mr[2] = DDR3_MR2_TWL(params->pctl_timing.tcwl); in memory_init()
248 send_command(pctl, MCMD_RANK0 | MCMD_RANK1, DESELECT_CMD); in memory_init()
250 send_command(pctl, MCMD_RANK0 | MCMD_RANK1, PREA_CMD); in memory_init()
251 send_mrs(pctl, MCMD_RANK0 | MCMD_RANK1, 2, mr[2]); in memory_init()
252 send_mrs(pctl, MCMD_RANK0 | MCMD_RANK1, 3, mr[3]); in memory_init()
253 send_mrs(pctl, MCMD_RANK0 | MCMD_RANK1, 1, mr[1]); in memory_init()
254 send_mrs(pctl, MCMD_RANK0 | MCMD_RANK1, 0, mr[0]); in memory_init()
255 send_command(pctl, MCMD_RANK0 | MCMD_RANK1, ZQCL_CMD); in memory_init()
260 static void move_to_config_state(struct rk3368_ddr_pctl *pctl) in move_to_config_state() argument
264 * "16.6.1 State transition of PCTL (Moving to Config State)" in move_to_config_state()
266 u32 state = readl(&pctl->stat) & PCTL_STAT_MSK; in move_to_config_state()
270 writel(WAKEUP_STATE, &pctl->sctl); in move_to_config_state()
271 while ((readl(&pctl->stat) & PCTL_STAT_MSK) != ACCESS) in move_to_config_state()
274 /* fall-through */ in move_to_config_state()
277 writel(CFG_STATE, &pctl->sctl); in move_to_config_state()
278 while ((readl(&pctl->stat) & PCTL_STAT_MSK) != CONFIG) in move_to_config_state()
290 static void move_to_access_state(struct rk3368_ddr_pctl *pctl) in move_to_access_state() argument
294 * "16.6.1 State transition of PCTL (Moving to Access State)" in move_to_access_state()
296 u32 state = readl(&pctl->stat) & PCTL_STAT_MSK; in move_to_access_state()
300 if (((readl(&pctl->stat) >> LP_TRIG_SHIFT) & in move_to_access_state()
304 writel(WAKEUP_STATE, &pctl->sctl); in move_to_access_state()
305 while ((readl(&pctl->stat) & PCTL_STAT_MSK) != ACCESS) in move_to_access_state()
308 /* fall-through */ in move_to_access_state()
310 writel(CFG_STATE, &pctl->sctl); in move_to_access_state()
311 while ((readl(&pctl->stat) & PCTL_STAT_MSK) != CONFIG) in move_to_access_state()
314 /* fall-through */ in move_to_access_state()
316 writel(GO_STATE, &pctl->sctl); in move_to_access_state()
317 while ((readl(&pctl->stat) & PCTL_STAT_MSK) == CONFIG) in move_to_access_state()
335 * The PHY reset should be released before the PCTL reset. in ddrctl_reset()
338 * us to delay between releasing the PHY and PCTL reset) has in ddrctl_reset()
342 rk_setreg(&cru->softrst_con[10], ctl_reset | phy_reset); in ddrctl_reset()
344 rk_clrreg(&cru->softrst_con[10], phy_reset); in ddrctl_reset()
346 rk_clrreg(&cru->softrst_con[10], ctl_reset); in ddrctl_reset()
356 clrbits_le32(&ddrphy->reg[0], BIT(3) | BIT(2)); in ddrphy_reset()
358 setbits_le32(&ddrphy->reg[0], BIT(2)); in ddrphy_reset()
360 setbits_le32(&ddrphy->reg[0], BIT(3)); in ddrphy_reset()
367 setbits_le32(&ddrphy->reg[0x13], BIT(4)); in ddrphy_config_delays()
368 clrbits_le32(&ddrphy->reg[0x14], BIT(3)); in ddrphy_config_delays()
370 setbits_le32(&ddrphy->reg[0x26], BIT(4)); in ddrphy_config_delays()
371 clrbits_le32(&ddrphy->reg[0x27], BIT(3)); in ddrphy_config_delays()
373 setbits_le32(&ddrphy->reg[0x36], BIT(4)); in ddrphy_config_delays()
374 clrbits_le32(&ddrphy->reg[0x37], BIT(3)); in ddrphy_config_delays()
376 setbits_le32(&ddrphy->reg[0x46], BIT(4)); in ddrphy_config_delays()
377 clrbits_le32(&ddrphy->reg[0x47], BIT(3)); in ddrphy_config_delays()
379 setbits_le32(&ddrphy->reg[0x56], BIT(4)); in ddrphy_config_delays()
380 clrbits_le32(&ddrphy->reg[0x57], BIT(3)); in ddrphy_config_delays()
383 setbits_le32(&ddrphy->reg[0xa4], 0x1f); in ddrphy_config_delays()
385 clrbits_le32(&ddrphy->reg[0xa4], 0x1f); in ddrphy_config_delays()
392 writel(dqs_dll_delay, &ddrphy->reg[0x28]); in ddrphy_config_delays()
393 writel(dqs_dll_delay, &ddrphy->reg[0x38]); in ddrphy_config_delays()
394 writel(dqs_dll_delay, &ddrphy->reg[0x48]); in ddrphy_config_delays()
395 writel(dqs_dll_delay, &ddrphy->reg[0x58]); in ddrphy_config_delays()
398 static int dfi_cfg(struct rk3368_ddr_pctl *pctl) in dfi_cfg() argument
403 writel(DFI_DATA_BYTE_DISABLE_EN, &pctl->dfistcfg0); in dfi_cfg()
406 &pctl->dfistcfg1); in dfi_cfg()
407 writel(DFI_PARITY_INTR_EN | DFI_PARITY_EN, &pctl->dfistcfg2); in dfi_cfg()
409 &pctl->dfilpcfg0); in dfi_cfg()
411 writel(1, &pctl->dfitphyupdtype0); in dfi_cfg()
413 writel(0x1f, &pctl->dfitphyrdlat); in dfi_cfg()
414 writel(0, &pctl->dfitphywrdata); in dfi_cfg()
415 writel(0, &pctl->dfiupdcfg); /* phyupd and ctrlupd disabled */ in dfi_cfg()
417 setbits_le32(&pctl->dfistcfg0, DFI_INIT_START); in dfi_cfg()
424 return -ETIME; in dfi_cfg()
426 } while ((readl(&pctl->dfiststat0) & 1) == 0); in dfi_cfg()
451 struct rk3288_sdram_pctl_timing *pctl_timing = &params->pctl_timing; in pctl_calc_timings()
456 if (params->ddr_speed_bin != DDR3_1600K) { in pctl_calc_timings()
458 __func__, params->ddr_speed_bin); in pctl_calc_timings()
459 return -1; in pctl_calc_timings()
462 /* PCTL is clocked at 1/2 the DRAM clock; err on the side of caution */ in pctl_calc_timings()
463 pctl_timing->togcnt1u = DIV_ROUND_UP(freq, 2 * MHz); in pctl_calc_timings()
464 pctl_timing->togcnt100n = DIV_ROUND_UP(freq / 10, 2 * MHz); in pctl_calc_timings()
466 pctl_timing->tinit = 200; /* 200 usec */ in pctl_calc_timings()
467 pctl_timing->trsth = 500; /* 500 usec */ in pctl_calc_timings()
468 pctl_timing->trefi = 78; /* 7.8usec = 78 * 100ns */ in pctl_calc_timings()
469 params->trefi_mem_ddr3 = ns_to_tCK(pctl_timing->trefi * 100, freq); in pctl_calc_timings()
472 pctl_timing->tcl = 6; in pctl_calc_timings()
473 pctl_timing->tcwl = 10; in pctl_calc_timings()
475 pctl_timing->tcl = 8; in pctl_calc_timings()
476 pctl_timing->tcwl = 6; in pctl_calc_timings()
478 pctl_timing->tcl = 10; in pctl_calc_timings()
479 pctl_timing->tcwl = 7; in pctl_calc_timings()
481 pctl_timing->tcl = 11; in pctl_calc_timings()
482 pctl_timing->tcwl = 8; in pctl_calc_timings()
485 pctl_timing->tmrd = 4; /* 4 tCK (all speed bins) */ in pctl_calc_timings()
486 pctl_timing->trfc = ns_to_tCK(350, freq); /* tRFC: 350 (max) @ 8GBit */ in pctl_calc_timings()
487 pctl_timing->trp = max(4u, ps_to_tCK(13750, freq)); in pctl_calc_timings()
489 * JESD-79: in pctl_calc_timings()
490 * READ to WRITE Command Delay = RL + tCCD / 2 + 2tCK - WL in pctl_calc_timings()
493 pctl_timing->trtw = pctl_timing->tcl + tccd/2 + 2 - pctl_timing->tcwl; in pctl_calc_timings()
494 pctl_timing->tal = 0; in pctl_calc_timings()
495 pctl_timing->tras = ps_to_tCK(35000, freq); in pctl_calc_timings()
496 pctl_timing->trc = ps_to_tCK(48750, freq); in pctl_calc_timings()
497 pctl_timing->trcd = ps_to_tCK(13750, freq); in pctl_calc_timings()
498 pctl_timing->trrd = max(4u, ps_to_tCK(7500, freq)); in pctl_calc_timings()
499 pctl_timing->trtp = max(4u, ps_to_tCK(7500, freq)); in pctl_calc_timings()
500 pctl_timing->twr = ps_to_tCK(15000, freq); in pctl_calc_timings()
501 /* The DDR3 mode-register does only support even values for tWR > 8. */ in pctl_calc_timings()
502 if (pctl_timing->twr > 8) in pctl_calc_timings()
503 pctl_timing->twr = (pctl_timing->twr + 1) & ~1; in pctl_calc_timings()
504 pctl_timing->twtr = max(4u, ps_to_tCK(7500, freq)); in pctl_calc_timings()
505 pctl_timing->texsr = 512; /* tEXSR(max) is tDLLLK */ in pctl_calc_timings()
506 pctl_timing->txp = max(3u, ps_to_tCK(6000, freq)); in pctl_calc_timings()
507 pctl_timing->txpdll = max(10u, ps_to_tCK(24000, freq)); in pctl_calc_timings()
508 pctl_timing->tzqcs = max(64u, ps_to_tCK(80000, freq)); in pctl_calc_timings()
509 pctl_timing->tzqcsi = 10000; /* as used by Rockchip */ in pctl_calc_timings()
510 pctl_timing->tdqs = 1; /* fixed for DDR3 */ in pctl_calc_timings()
511 pctl_timing->tcksre = max(5u, ps_to_tCK(10000, freq)); in pctl_calc_timings()
512 pctl_timing->tcksrx = max(5u, ps_to_tCK(10000, freq)); in pctl_calc_timings()
513 pctl_timing->tcke = max(3u, ps_to_tCK(5000, freq)); in pctl_calc_timings()
514 pctl_timing->tmod = max(12u, ps_to_tCK(15000, freq)); in pctl_calc_timings()
515 pctl_timing->trstl = ns_to_tCK(100, freq); in pctl_calc_timings()
516 pctl_timing->tzqcl = max(256u, ps_to_tCK(320000, freq)); /* tZQoper */ in pctl_calc_timings()
517 pctl_timing->tmrr = 0; in pctl_calc_timings()
518 pctl_timing->tckesr = pctl_timing->tcke + 1; /* JESD-79: tCKE + 1tCK */ in pctl_calc_timings()
519 pctl_timing->tdpd = 0; /* RK3368 TRM: "allowed values for DDR3: 0" */ in pctl_calc_timings()
525 * requirements of the given speed-bin. If necessary, we stretch out in pctl_calc_timings()
528 tfaw_as_ps = 40000; /* 40ns: tFAW for DDR3-1600K, 2KB page-size */ in pctl_calc_timings()
529 if (tCK_to_ps(pctl_timing->trrd * 6, freq) < tfaw_as_ps) { in pctl_calc_timings()
531 pctl_timing->trrd = ps_to_tCK(DIV_ROUND_UP(40000, 6), freq); in pctl_calc_timings()
532 params->tfaw_mult = TFAW_TRRD_MULT6; in pctl_calc_timings()
533 } else if (tCK_to_ps(pctl_timing->trrd * 5, freq) < tfaw_as_ps) { in pctl_calc_timings()
534 params->tfaw_mult = TFAW_TRRD_MULT6; in pctl_calc_timings()
535 } else if (tCK_to_ps(pctl_timing->trrd * 4, freq) < tfaw_as_ps) { in pctl_calc_timings()
536 params->tfaw_mult = TFAW_TRRD_MULT5; in pctl_calc_timings()
538 params->tfaw_mult = TFAW_TRRD_MULT4; in pctl_calc_timings()
544 static void pctl_cfg(struct rk3368_ddr_pctl *pctl, in pctl_cfg() argument
548 /* Configure PCTL timing registers */ in pctl_cfg()
549 params->pctl_timing.trefi |= BIT(31); /* see PCTL_TREFI */ in pctl_cfg()
550 copy_to_reg(&pctl->togcnt1u, &params->pctl_timing.togcnt1u, in pctl_cfg()
551 sizeof(params->pctl_timing)); in pctl_cfg()
552 writel(params->trefi_mem_ddr3, &pctl->trefi_mem_ddr3); in pctl_cfg()
555 writel((RANK0_ODT_WRITE_SEL | RANK1_ODT_WRITE_SEL), &pctl->dfiodtcfg); in pctl_cfg()
556 writel(7 << ODT_LEN_BL8_W_SHIFT, &pctl->dfiodtcfg1); in pctl_cfg()
558 /* Set up the CL/CWL-dependent timings of DFI */ in pctl_cfg()
559 writel((params->pctl_timing.tcl - 1) / 2 - 1, &pctl->dfitrddataen); in pctl_cfg()
560 writel((params->pctl_timing.tcwl - 1) / 2 - 1, &pctl->dfitphywrlat); in pctl_cfg()
563 writel(params->tfaw_mult | DDR3_EN | DDR2_DDR3_BL_8, &pctl->mcfg); in pctl_cfg()
564 writel(0x001c0004, &grf->ddrc0_con0); in pctl_cfg()
566 setbits_le32(&pctl->scfg, HW_LOW_POWER_EN); in pctl_cfg()
569 static int ddrphy_data_training(struct rk3368_ddr_pctl *pctl, in ddrphy_data_training() argument
572 const u32 trefi = readl(&pctl->trefi); in ddrphy_data_training()
576 /* disable auto-refresh */ in ddrphy_data_training()
577 writel(0 | BIT(31), &pctl->trefi); in ddrphy_data_training()
579 clrsetbits_le32(&ddrphy->reg[2], 0x33, 0x20); in ddrphy_data_training()
580 clrsetbits_le32(&ddrphy->reg[2], 0x33, 0x21); in ddrphy_data_training()
587 return -ETIME; in ddrphy_data_training()
589 } while ((readl(&ddrphy->reg[0xff]) & 0xf) != 0xf); in ddrphy_data_training()
591 send_command(pctl, MCMD_RANK0 | MCMD_RANK1, PREA_CMD); in ddrphy_data_training()
592 clrsetbits_le32(&ddrphy->reg[2], 0x33, 0x20); in ddrphy_data_training()
593 /* resume auto-refresh */ in ddrphy_data_training()
594 writel(trefi | BIT(31), &pctl->trefi); in ddrphy_data_training()
603 struct rk3368_ddr_pctl *pctl = priv->pctl; in sdram_col_row_detect() local
604 struct rk3368_msch *msch = priv->msch; in sdram_col_row_detect()
609 move_to_config_state(pctl); in sdram_col_row_detect()
610 writel(6, &msch->ddrconf); in sdram_col_row_detect()
611 move_to_access_state(pctl); in sdram_col_row_detect()
614 for (col = 11; col >= 9; col--) { in sdram_col_row_detect()
617 (1 << (col + params->chan.bw - 1)); in sdram_col_row_detect()
626 return -EINVAL; in sdram_col_row_detect()
629 move_to_config_state(pctl); in sdram_col_row_detect()
630 writel(15, &msch->ddrconf); in sdram_col_row_detect()
631 move_to_access_state(pctl); in sdram_col_row_detect()
634 for (row = 16; row >= 12; row--) { in sdram_col_row_detect()
636 addr = CONFIG_SYS_SDRAM_BASE + (1 << (row + 15 - 1)); in sdram_col_row_detect()
645 return -EINVAL; in sdram_col_row_detect()
650 params->chan.col = col; in sdram_col_row_detect()
651 params->chan.cs0_row = row; in sdram_col_row_detect()
652 params->chan.cs1_row = row; in sdram_col_row_detect()
653 params->chan.row_3_4 = 0; in sdram_col_row_detect()
662 const u8 cols = params->chan.col - ((params->chan.bw == 2) ? 0 : 1); in msch_niu_config()
663 const u8 rows = params->chan.cs0_row; in msch_niu_config()
666 * The DDR address-translation table always assumes a 32bit in msch_niu_config()
668 * a 16bit bus (i.e. one column-address is consumed). in msch_niu_config()
676 * C-B-R-D patterns are first. For these we require an in msch_niu_config()
692 * 11 through 13 are C-R-B-D patterns. These are in msch_niu_config()
696 * there's no gaps up until we hit the device/chip-select; in msch_niu_config()
698 * as the row-address continues right after the CS in msch_niu_config()
705 * 14 and 15 are catch-all variants using a C-B-D-R in msch_niu_config()
706 * scheme (i.e. alternating the chip-select every time in msch_niu_config()
707 * C-B overflows) and stuffing the remaining C-bits in msch_niu_config()
710 * can use less the the maximum number of rows) -or- in msch_niu_config()
713 * remaining C-bits will be stuffed onto the top after in msch_niu_config()
714 * the device/chip-select switches). in msch_niu_config()
721 * For C-B-R-D, we need an exact match (i.e. both for the number of in msch_niu_config()
722 * columns and rows), while for C-B-D-R, only the the number of in msch_niu_config()
729 if (ddrconf_table[i].type != params->memory_schedule) in msch_niu_config()
733 * Match according to the rules (exact/inexact/at-least) in msch_niu_config()
736 switch (params->memory_schedule) { in msch_niu_config()
759 writel(i, &msch->ddrconf); in msch_niu_config()
765 return -EINVAL; in msch_niu_config()
771 struct rk3368_pmu_grf *pmugrf = priv->pmugrf; in dram_all_config()
773 const struct rk3288_sdram_channel *info = &params->chan; in dram_all_config()
780 sys_reg |= info->row_3_4 << SYS_REG_ROW_3_4_SHIFT(chan); in dram_all_config()
782 sys_reg |= (info->rank - 1) << SYS_REG_RANK_SHIFT(chan); in dram_all_config()
783 sys_reg |= (info->col - 9) << SYS_REG_COL_SHIFT(chan); in dram_all_config()
784 sys_reg |= info->bk == 3 ? 0 : 1 << SYS_REG_BK_SHIFT(chan); in dram_all_config()
785 sys_reg |= (info->cs0_row - 13) << SYS_REG_CS0_ROW_SHIFT(chan); in dram_all_config()
786 sys_reg |= (info->cs1_row - 13) << SYS_REG_CS1_ROW_SHIFT(chan); in dram_all_config()
787 sys_reg |= (2 >> info->bw) << SYS_REG_BW_SHIFT(chan); in dram_all_config()
788 sys_reg |= (2 >> info->dbw) << SYS_REG_DBW_SHIFT(chan); in dram_all_config()
790 writel(sys_reg, &pmugrf->os_reg[2]); in dram_all_config()
798 struct rk3368_ddr_pctl *pctl = priv->pctl; in setup_sdram() local
799 struct rk3368_ddrphy *ddrphy = priv->phy; in setup_sdram()
800 struct rk3368_cru *cru = priv->cru; in setup_sdram()
801 struct rk3368_grf *grf = priv->grf; in setup_sdram()
802 struct rk3368_msch *msch = priv->msch; in setup_sdram()
807 ret = clk_set_rate(&priv->ddr_clk, 2 * params->ddr_freq); in setup_sdram()
813 /* Update the read-latency for the RK3368 */ in setup_sdram()
814 writel(0x32, &msch->readlatency); in setup_sdram()
816 /* Initialise the DDR PCTL and DDR PHY */ in setup_sdram()
819 ddrphy_config_delays(ddrphy, params->ddr_freq); in setup_sdram()
820 dfi_cfg(pctl); in setup_sdram()
825 pctl_calc_timings(params, params->ddr_freq); in setup_sdram()
827 pctl_cfg(pctl, params, grf); in setup_sdram()
830 params->pctl_timing.tcl, in setup_sdram()
831 params->pctl_timing.tal, in setup_sdram()
832 params->pctl_timing.tcwl); in setup_sdram()
834 /* Initialize DRAM and configure with mode-register values */ in setup_sdram()
835 ret = memory_init(pctl, params); in setup_sdram()
839 move_to_config_state(pctl); in setup_sdram()
840 /* Perform data-training */ in setup_sdram()
841 ddrphy_data_training(pctl, ddrphy); in setup_sdram()
842 move_to_access_state(pctl); in setup_sdram()
845 params->chan.rank = 2; in setup_sdram()
846 /* TODO(prt): bus width is not auto-detected (yet)... */ in setup_sdram()
847 params->chan.bw = 2; /* 32bit wide bus */ in setup_sdram()
848 params->chan.dbw = params->chan.dbw; /* 32bit wide bus */ in setup_sdram()
851 params->chan.bk = 3; in setup_sdram()
880 ret = regmap_init_mem(dev_ofnode(dev), &plat->map); in rk3368_dmc_ofdata_to_platdata()
892 struct dtd_rockchip_rk3368_dmc *of_plat = &plat->of_plat; in conv_of_platdata()
894 plat->ddr_freq = of_plat->rockchip_ddr_frequency; in conv_of_platdata()
895 plat->ddr_speed_bin = of_plat->rockchip_ddr_speed_bin; in conv_of_platdata()
896 plat->memory_schedule = of_plat->rockchip_memory_schedule; in conv_of_platdata()
906 struct rk3368_ddr_pctl *pctl; in rk3368_dmc_probe() local
922 priv->pmugrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUGRF); in rk3368_dmc_probe()
923 debug("%s: pmugrf=%p\n", __func__, priv->pmugrf); in rk3368_dmc_probe()
926 pctl = (struct rk3368_ddr_pctl *)plat->of_plat.reg[0]; in rk3368_dmc_probe()
927 ddrphy = (struct rk3368_ddrphy *)plat->of_plat.reg[2]; in rk3368_dmc_probe()
931 priv->pctl = pctl; in rk3368_dmc_probe()
932 priv->phy = ddrphy; in rk3368_dmc_probe()
933 priv->msch = msch; in rk3368_dmc_probe()
934 priv->grf = grf; in rk3368_dmc_probe()
939 priv->ddr_clk.id = CLK_DDR; in rk3368_dmc_probe()
940 ret = clk_request(dev_clk, &priv->ddr_clk); in rk3368_dmc_probe()
945 priv->cru = cru; in rk3368_dmc_probe()
946 if (IS_ERR(priv->cru)) in rk3368_dmc_probe()
947 return PTR_ERR(priv->cru); in rk3368_dmc_probe()
954 priv->info.base = 0; in rk3368_dmc_probe()
955 priv->info.size = in rk3368_dmc_probe()
956 rockchip_sdram_size((phys_addr_t)&priv->pmugrf->os_reg[2]); in rk3368_dmc_probe()
963 priv->info.size = min(priv->info.size, (size_t)0xfe000000); in rk3368_dmc_probe()
972 *info = priv->info; in rk3368_dmc_get_info()
982 { .compatible = "rockchip,rk3368-dmc" },