1 /* 2 * (C) Copyright 2017 Rockchip Electronics Co., Ltd 3 * 4 * SPDX-License-Identifier: GPL-2.0 5 */ 6 7 #include <common.h> 8 #include <clk-uclass.h> 9 #include <dm.h> 10 #include <errno.h> 11 #include <syscon.h> 12 #include <asm/io.h> 13 #include <asm/arch/clock.h> 14 #include <asm/arch/cru_rk322x.h> 15 #include <asm/arch/hardware.h> 16 #include <dm/lists.h> 17 #include <dt-bindings/clock/rk3228-cru.h> 18 #include <linux/log2.h> 19 20 enum { 21 VCO_MAX_HZ = 3200U * 1000000, 22 VCO_MIN_HZ = 800 * 1000000, 23 OUTPUT_MAX_HZ = 3200U * 1000000, 24 OUTPUT_MIN_HZ = 24 * 1000000, 25 }; 26 27 #define DIV_TO_RATE(input_rate, div) ((input_rate) / ((div) + 1)) 28 29 #define PLL_DIVISORS(hz, _refdiv, _postdiv1, _postdiv2) {\ 30 .refdiv = _refdiv,\ 31 .fbdiv = (u32)((u64)hz * _refdiv * _postdiv1 * _postdiv2 / OSC_HZ), \ 32 .postdiv1 = _postdiv1, .postdiv2 = _postdiv2};\ 33 _Static_assert(((u64)hz * _refdiv * _postdiv1 * _postdiv2 / OSC_HZ) * \ 34 OSC_HZ / (_refdiv * _postdiv1 * _postdiv2) == hz, \ 35 #hz "Hz cannot be hit with PLL "\ 36 "divisors on line " __stringify(__LINE__)); 37 38 /* use integer mode*/ 39 static const struct pll_div apll_init_cfg = PLL_DIVISORS(APLL_HZ, 1, 3, 1); 40 static const struct pll_div gpll_init_cfg = PLL_DIVISORS(GPLL_HZ, 2, 2, 1); 41 42 static int rkclk_set_pll(struct rk322x_cru *cru, enum rk_clk_id clk_id, 43 const struct pll_div *div) 44 { 45 int pll_id = rk_pll_id(clk_id); 46 struct rk322x_pll *pll = &cru->pll[pll_id]; 47 48 /* All PLLs have same VCO and output frequency range restrictions. */ 49 uint vco_hz = OSC_HZ / 1000 * div->fbdiv / div->refdiv * 1000; 50 uint output_hz = vco_hz / div->postdiv1 / div->postdiv2; 51 52 debug("PLL at %p: fb=%d, ref=%d, pst1=%d, pst2=%d, vco=%u Hz, output=%u Hz\n", 53 pll, div->fbdiv, div->refdiv, div->postdiv1, 54 div->postdiv2, vco_hz, output_hz); 55 assert(vco_hz >= VCO_MIN_HZ && vco_hz <= VCO_MAX_HZ && 56 output_hz >= OUTPUT_MIN_HZ && output_hz <= OUTPUT_MAX_HZ); 57 58 /* use integer mode */ 59 rk_setreg(&pll->con1, 1 << PLL_DSMPD_SHIFT); 60 /* Power down */ 61 rk_setreg(&pll->con1, 1 << PLL_PD_SHIFT); 62 63 rk_clrsetreg(&pll->con0, 64 PLL_POSTDIV1_MASK | PLL_FBDIV_MASK, 65 (div->postdiv1 << PLL_POSTDIV1_SHIFT) | div->fbdiv); 66 rk_clrsetreg(&pll->con1, PLL_POSTDIV2_MASK | PLL_REFDIV_MASK, 67 (div->postdiv2 << PLL_POSTDIV2_SHIFT | 68 div->refdiv << PLL_REFDIV_SHIFT)); 69 70 /* Power Up */ 71 rk_clrreg(&pll->con1, 1 << PLL_PD_SHIFT); 72 73 /* waiting for pll lock */ 74 while (readl(&pll->con1) & (1 << PLL_LOCK_STATUS_SHIFT)) 75 udelay(1); 76 77 return 0; 78 } 79 80 static void rkclk_init(struct rk322x_cru *cru) 81 { 82 u32 aclk_div; 83 u32 hclk_div; 84 u32 pclk_div; 85 86 /* pll enter slow-mode */ 87 rk_clrsetreg(&cru->cru_mode_con, 88 GPLL_MODE_MASK | APLL_MODE_MASK, 89 GPLL_MODE_SLOW << GPLL_MODE_SHIFT | 90 APLL_MODE_SLOW << APLL_MODE_SHIFT); 91 92 /* init pll */ 93 rkclk_set_pll(cru, CLK_ARM, &apll_init_cfg); 94 rkclk_set_pll(cru, CLK_GENERAL, &gpll_init_cfg); 95 96 /* 97 * select apll as cpu/core clock pll source and 98 * set up dependent divisors for PERI and ACLK clocks. 99 * core hz : apll = 1:1 100 */ 101 aclk_div = APLL_HZ / CORE_ACLK_HZ - 1; 102 assert((aclk_div + 1) * CORE_ACLK_HZ == APLL_HZ && aclk_div < 0x7); 103 104 pclk_div = APLL_HZ / CORE_PERI_HZ - 1; 105 assert((pclk_div + 1) * CORE_PERI_HZ == APLL_HZ && pclk_div < 0xf); 106 107 rk_clrsetreg(&cru->cru_clksel_con[0], 108 CORE_CLK_PLL_SEL_MASK | CORE_DIV_CON_MASK, 109 CORE_CLK_PLL_SEL_APLL << CORE_CLK_PLL_SEL_SHIFT | 110 0 << CORE_DIV_CON_SHIFT); 111 112 rk_clrsetreg(&cru->cru_clksel_con[1], 113 CORE_ACLK_DIV_MASK | CORE_PERI_DIV_MASK, 114 aclk_div << CORE_ACLK_DIV_SHIFT | 115 pclk_div << CORE_PERI_DIV_SHIFT); 116 117 /* 118 * select gpll as pd_bus bus clock source and 119 * set up dependent divisors for PCLK/HCLK and ACLK clocks. 120 */ 121 aclk_div = GPLL_HZ / BUS_ACLK_HZ - 1; 122 assert((aclk_div + 1) * BUS_ACLK_HZ == GPLL_HZ && aclk_div <= 0x1f); 123 124 pclk_div = BUS_ACLK_HZ / BUS_PCLK_HZ - 1; 125 assert((pclk_div + 1) * BUS_PCLK_HZ == GPLL_HZ && pclk_div <= 0x7); 126 127 hclk_div = BUS_ACLK_HZ / BUS_HCLK_HZ - 1; 128 assert((hclk_div + 1) * BUS_HCLK_HZ == GPLL_HZ && hclk_div <= 0x3); 129 130 rk_clrsetreg(&cru->cru_clksel_con[0], 131 BUS_ACLK_PLL_SEL_MASK | BUS_ACLK_DIV_MASK, 132 BUS_ACLK_PLL_SEL_GPLL << BUS_ACLK_PLL_SEL_SHIFT | 133 aclk_div << BUS_ACLK_DIV_SHIFT); 134 135 rk_clrsetreg(&cru->cru_clksel_con[1], 136 BUS_PCLK_DIV_MASK | BUS_HCLK_DIV_MASK, 137 pclk_div << BUS_PCLK_DIV_SHIFT | 138 hclk_div << BUS_HCLK_DIV_SHIFT); 139 140 /* 141 * select gpll as pd_peri bus clock source and 142 * set up dependent divisors for PCLK/HCLK and ACLK clocks. 143 */ 144 aclk_div = GPLL_HZ / PERI_ACLK_HZ - 1; 145 assert((aclk_div + 1) * PERI_ACLK_HZ == GPLL_HZ && aclk_div < 0x1f); 146 147 hclk_div = ilog2(PERI_ACLK_HZ / PERI_HCLK_HZ); 148 assert((1 << hclk_div) * PERI_HCLK_HZ == 149 PERI_ACLK_HZ && (hclk_div < 0x4)); 150 151 pclk_div = ilog2(PERI_ACLK_HZ / PERI_PCLK_HZ); 152 assert((1 << pclk_div) * PERI_PCLK_HZ == 153 PERI_ACLK_HZ && pclk_div < 0x8); 154 155 rk_clrsetreg(&cru->cru_clksel_con[10], 156 PERI_PLL_SEL_MASK | PERI_PCLK_DIV_MASK | 157 PERI_HCLK_DIV_MASK | PERI_ACLK_DIV_MASK, 158 PERI_PLL_GPLL << PERI_PLL_SEL_SHIFT | 159 pclk_div << PERI_PCLK_DIV_SHIFT | 160 hclk_div << PERI_HCLK_DIV_SHIFT | 161 aclk_div << PERI_ACLK_DIV_SHIFT); 162 163 /* PLL enter normal-mode */ 164 rk_clrsetreg(&cru->cru_mode_con, 165 GPLL_MODE_MASK | APLL_MODE_MASK, 166 GPLL_MODE_NORM << GPLL_MODE_SHIFT | 167 APLL_MODE_NORM << APLL_MODE_SHIFT); 168 } 169 170 /* Get pll rate by id */ 171 static uint32_t rkclk_pll_get_rate(struct rk322x_cru *cru, 172 enum rk_clk_id clk_id) 173 { 174 uint32_t refdiv, fbdiv, postdiv1, postdiv2; 175 uint32_t con; 176 int pll_id = rk_pll_id(clk_id); 177 struct rk322x_pll *pll = &cru->pll[pll_id]; 178 static u8 clk_shift[CLK_COUNT] = { 179 0xff, APLL_MODE_SHIFT, DPLL_MODE_SHIFT, 0xff, 180 GPLL_MODE_SHIFT, 0xff 181 }; 182 static u32 clk_mask[CLK_COUNT] = { 183 0xff, APLL_MODE_MASK, DPLL_MODE_MASK, 0xff, 184 GPLL_MODE_MASK, 0xff 185 }; 186 uint shift; 187 uint mask; 188 189 con = readl(&cru->cru_mode_con); 190 shift = clk_shift[clk_id]; 191 mask = clk_mask[clk_id]; 192 193 switch ((con & mask) >> shift) { 194 case GPLL_MODE_SLOW: 195 return OSC_HZ; 196 case GPLL_MODE_NORM: 197 198 /* normal mode */ 199 con = readl(&pll->con0); 200 postdiv1 = (con & PLL_POSTDIV1_MASK) >> PLL_POSTDIV1_SHIFT; 201 fbdiv = (con & PLL_FBDIV_MASK) >> PLL_FBDIV_SHIFT; 202 con = readl(&pll->con1); 203 postdiv2 = (con & PLL_POSTDIV2_MASK) >> PLL_POSTDIV2_SHIFT; 204 refdiv = (con & PLL_REFDIV_MASK) >> PLL_REFDIV_SHIFT; 205 return (24 * fbdiv / (refdiv * postdiv1 * postdiv2)) * 1000000; 206 default: 207 return 32768; 208 } 209 } 210 211 static ulong rockchip_mmc_get_clk(struct rk322x_cru *cru, uint clk_general_rate, 212 int periph) 213 { 214 uint src_rate; 215 uint div, mux; 216 u32 con; 217 218 switch (periph) { 219 case HCLK_EMMC: 220 case SCLK_EMMC: 221 con = readl(&cru->cru_clksel_con[11]); 222 mux = (con & EMMC_PLL_MASK) >> EMMC_PLL_SHIFT; 223 con = readl(&cru->cru_clksel_con[12]); 224 div = (con & EMMC_DIV_MASK) >> EMMC_DIV_SHIFT; 225 break; 226 case HCLK_SDMMC: 227 case SCLK_SDMMC: 228 con = readl(&cru->cru_clksel_con[11]); 229 mux = (con & MMC0_PLL_MASK) >> MMC0_PLL_SHIFT; 230 div = (con & MMC0_DIV_MASK) >> MMC0_DIV_SHIFT; 231 break; 232 default: 233 return -EINVAL; 234 } 235 236 src_rate = mux == EMMC_SEL_24M ? OSC_HZ : clk_general_rate; 237 return DIV_TO_RATE(src_rate, div) / 2; 238 } 239 240 static ulong rk322x_mac_set_clk(struct rk322x_cru *cru, uint freq) 241 { 242 ulong ret; 243 244 /* 245 * The gmac clock can be derived either from an external clock 246 * or can be generated from internally by a divider from SCLK_MAC. 247 */ 248 if (readl(&cru->cru_clksel_con[5]) & BIT(5)) { 249 /* An external clock will always generate the right rate... */ 250 ret = freq; 251 } else { 252 u32 con = readl(&cru->cru_clksel_con[5]); 253 ulong pll_rate; 254 u8 div; 255 256 if ((con >> MAC_PLL_SEL_SHIFT) & MAC_PLL_SEL_MASK) 257 pll_rate = GPLL_HZ; 258 else 259 /* CPLL is not set */ 260 return -EPERM; 261 262 div = DIV_ROUND_UP(pll_rate, freq) - 1; 263 if (div <= 0x1f) 264 rk_clrsetreg(&cru->cru_clksel_con[5], CLK_MAC_DIV_MASK, 265 div << CLK_MAC_DIV_SHIFT); 266 else 267 debug("Unsupported div for gmac:%d\n", div); 268 269 return DIV_TO_RATE(pll_rate, div); 270 } 271 272 return ret; 273 } 274 275 static ulong rockchip_mmc_set_clk(struct rk322x_cru *cru, uint clk_general_rate, 276 int periph, uint freq) 277 { 278 int src_clk_div; 279 int mux; 280 281 debug("%s: clk_general_rate=%u\n", __func__, clk_general_rate); 282 283 /* mmc clock defaulg div 2 internal, need provide double in cru */ 284 src_clk_div = DIV_ROUND_UP(clk_general_rate / 2, freq); 285 286 if (src_clk_div > 128) { 287 src_clk_div = DIV_ROUND_UP(OSC_HZ / 2, freq); 288 assert(src_clk_div - 1 < 128); 289 mux = EMMC_SEL_24M; 290 } else { 291 mux = EMMC_SEL_GPLL; 292 } 293 294 switch (periph) { 295 case HCLK_EMMC: 296 case SCLK_EMMC: 297 rk_clrsetreg(&cru->cru_clksel_con[11], 298 EMMC_PLL_MASK, 299 mux << EMMC_PLL_SHIFT); 300 rk_clrsetreg(&cru->cru_clksel_con[12], 301 EMMC_DIV_MASK, 302 (src_clk_div - 1) << EMMC_DIV_SHIFT); 303 break; 304 case HCLK_SDMMC: 305 case SCLK_SDMMC: 306 rk_clrsetreg(&cru->cru_clksel_con[11], 307 MMC0_PLL_MASK | MMC0_DIV_MASK, 308 mux << MMC0_PLL_SHIFT | 309 (src_clk_div - 1) << MMC0_DIV_SHIFT); 310 break; 311 default: 312 return -EINVAL; 313 } 314 315 return rockchip_mmc_get_clk(cru, clk_general_rate, periph); 316 } 317 318 static int rk322x_ddr_set_clk(struct rk322x_cru *cru, unsigned int set_rate) 319 { 320 struct pll_div dpll_cfg; 321 322 /* clk_ddrc == DPLL = 24MHz / refdiv * fbdiv / postdiv1 / postdiv2 */ 323 switch (set_rate) { 324 case 400*MHz: 325 dpll_cfg = (struct pll_div) 326 {.refdiv = 1, .fbdiv = 50, .postdiv1 = 3, .postdiv2 = 1}; 327 break; 328 case 600*MHz: 329 dpll_cfg = (struct pll_div) 330 {.refdiv = 1, .fbdiv = 75, .postdiv1 = 3, .postdiv2 = 1}; 331 break; 332 case 800*MHz: 333 dpll_cfg = (struct pll_div) 334 {.refdiv = 1, .fbdiv = 100, .postdiv1 = 3, .postdiv2 = 1}; 335 break; 336 } 337 338 /* pll enter slow-mode */ 339 rk_clrsetreg(&cru->cru_mode_con, DPLL_MODE_MASK, 340 DPLL_MODE_SLOW << DPLL_MODE_SHIFT); 341 rkclk_set_pll(cru, CLK_DDR, &dpll_cfg); 342 /* PLL enter normal-mode */ 343 rk_clrsetreg(&cru->cru_mode_con, DPLL_MODE_MASK, 344 DPLL_MODE_NORM << DPLL_MODE_SHIFT); 345 346 return set_rate; 347 } 348 static ulong rk322x_clk_get_rate(struct clk *clk) 349 { 350 struct rk322x_clk_priv *priv = dev_get_priv(clk->dev); 351 ulong rate, gclk_rate; 352 353 gclk_rate = rkclk_pll_get_rate(priv->cru, CLK_GENERAL); 354 switch (clk->id) { 355 case 0 ... 63: 356 rate = rkclk_pll_get_rate(priv->cru, clk->id); 357 break; 358 case HCLK_EMMC: 359 case SCLK_EMMC: 360 case HCLK_SDMMC: 361 case SCLK_SDMMC: 362 rate = rockchip_mmc_get_clk(priv->cru, gclk_rate, clk->id); 363 break; 364 default: 365 return -ENOENT; 366 } 367 368 return rate; 369 } 370 371 static ulong rk322x_clk_set_rate(struct clk *clk, ulong rate) 372 { 373 struct rk322x_clk_priv *priv = dev_get_priv(clk->dev); 374 ulong new_rate, gclk_rate; 375 376 gclk_rate = rkclk_pll_get_rate(priv->cru, CLK_GENERAL); 377 switch (clk->id) { 378 case HCLK_EMMC: 379 case SCLK_EMMC: 380 case HCLK_SDMMC: 381 case SCLK_SDMMC: 382 new_rate = rockchip_mmc_set_clk(priv->cru, gclk_rate, 383 clk->id, rate); 384 break; 385 case CLK_DDR: 386 new_rate = rk322x_ddr_set_clk(priv->cru, rate); 387 break; 388 case SCLK_MAC: 389 new_rate = rk322x_mac_set_clk(priv->cru, rate); 390 break; 391 case PLL_GPLL: 392 return 0; 393 default: 394 return -ENOENT; 395 } 396 397 return new_rate; 398 } 399 400 static int rk322x_gmac_set_parent(struct clk *clk, struct clk *parent) 401 { 402 struct rk322x_clk_priv *priv = dev_get_priv(clk->dev); 403 struct rk322x_cru *cru = priv->cru; 404 405 /* 406 * If the requested parent is in the same clock-controller and the id 407 * is SCLK_MAC_SRC ("sclk_gmac_src"), switch to the internal clock. 408 */ 409 if ((parent->dev == clk->dev) && (parent->id == SCLK_MAC_SRC)) { 410 debug("%s: switching RGMII to SCLK_MAC_SRC\n", __func__); 411 rk_clrsetreg(&cru->cru_clksel_con[5], BIT(5), 0); 412 return 0; 413 } 414 415 /* 416 * If the requested parent is in the same clock-controller and the id 417 * is SCLK_MAC_EXTCLK (sclk_mac_extclk), switch to the external clock. 418 */ 419 if ((parent->dev == clk->dev) && (parent->id == SCLK_MAC_EXTCLK)) { 420 debug("%s: switching RGMII to SCLK_MAC_EXTCLK\n", __func__); 421 rk_clrsetreg(&cru->cru_clksel_con[5], BIT(5), BIT(5)); 422 return 0; 423 } 424 425 return -EINVAL; 426 } 427 428 static int rk322x_gmac_extclk_set_parent(struct clk *clk, struct clk *parent) 429 { 430 struct rk322x_clk_priv *priv = dev_get_priv(clk->dev); 431 const char *clock_output_name; 432 struct rk322x_cru *cru = priv->cru; 433 int ret; 434 435 ret = dev_read_string_index(parent->dev, "clock-output-names", 436 parent->id, &clock_output_name); 437 if (ret < 0) 438 return -ENODATA; 439 440 if (!strcmp(clock_output_name, "ext_gmac")) { 441 debug("%s: switching gmac extclk to ext_gmac\n", __func__); 442 rk_clrsetreg(&cru->cru_clksel_con[29], BIT(10), 0); 443 return 0; 444 } else if (!strcmp(clock_output_name, "phy_50m_out")) { 445 debug("%s: switching gmac extclk to phy_50m_out\n", __func__); 446 rk_clrsetreg(&cru->cru_clksel_con[29], BIT(10), BIT(10)); 447 return 0; 448 } 449 450 return -EINVAL; 451 } 452 453 static int rk322x_clk_set_parent(struct clk *clk, struct clk *parent) 454 { 455 switch (clk->id) { 456 case SCLK_MAC: 457 return rk322x_gmac_set_parent(clk, parent); 458 case SCLK_MAC_EXTCLK: 459 return rk322x_gmac_extclk_set_parent(clk, parent); 460 } 461 462 debug("%s: unsupported clk %ld\n", __func__, clk->id); 463 return -ENOENT; 464 } 465 466 static struct clk_ops rk322x_clk_ops = { 467 .get_rate = rk322x_clk_get_rate, 468 .set_rate = rk322x_clk_set_rate, 469 .set_parent = rk322x_clk_set_parent, 470 }; 471 472 static int rk322x_clk_ofdata_to_platdata(struct udevice *dev) 473 { 474 struct rk322x_clk_priv *priv = dev_get_priv(dev); 475 476 priv->cru = dev_read_addr_ptr(dev); 477 478 return 0; 479 } 480 481 static int rk322x_clk_probe(struct udevice *dev) 482 { 483 struct rk322x_clk_priv *priv = dev_get_priv(dev); 484 485 rkclk_init(priv->cru); 486 487 return 0; 488 } 489 490 static int rk322x_clk_bind(struct udevice *dev) 491 { 492 int ret; 493 struct udevice *sys_child; 494 struct sysreset_reg *priv; 495 496 /* The reset driver does not have a device node, so bind it here */ 497 ret = device_bind_driver(dev, "rockchip_sysreset", "sysreset", 498 &sys_child); 499 if (ret) { 500 debug("Warning: No sysreset driver: ret=%d\n", ret); 501 } else { 502 priv = malloc(sizeof(struct sysreset_reg)); 503 priv->glb_srst_fst_value = offsetof(struct rk322x_cru, 504 cru_glb_srst_fst_value); 505 priv->glb_srst_snd_value = offsetof(struct rk322x_cru, 506 cru_glb_srst_snd_value); 507 sys_child->priv = priv; 508 } 509 510 #if CONFIG_IS_ENABLED(CONFIG_RESET_ROCKCHIP) 511 ret = offsetof(struct rk322x_cru, cru_softrst_con[0]); 512 ret = rockchip_reset_bind(dev, ret, 9); 513 if (ret) 514 debug("Warning: software reset driver bind faile\n"); 515 #endif 516 517 return 0; 518 } 519 520 static const struct udevice_id rk322x_clk_ids[] = { 521 { .compatible = "rockchip,rk3228-cru" }, 522 { } 523 }; 524 525 U_BOOT_DRIVER(rockchip_rk322x_cru) = { 526 .name = "clk_rk322x", 527 .id = UCLASS_CLK, 528 .of_match = rk322x_clk_ids, 529 .priv_auto_alloc_size = sizeof(struct rk322x_clk_priv), 530 .ofdata_to_platdata = rk322x_clk_ofdata_to_platdata, 531 .ops = &rk322x_clk_ops, 532 .bind = rk322x_clk_bind, 533 .probe = rk322x_clk_probe, 534 }; 535