1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * Copyright 2017-2018 NXP.
4 */
5
6 #define pr_fmt(fmt) "pll14xx: " fmt
7
8 #include <linux/bitfield.h>
9 #include <linux/bits.h>
10 #include <linux/clk-provider.h>
11 #include <linux/err.h>
12 #include <linux/export.h>
13 #include <linux/io.h>
14 #include <linux/iopoll.h>
15 #include <linux/slab.h>
16 #include <linux/jiffies.h>
17
18 #include "clk.h"
19
20 #define GNRL_CTL 0x0
21 #define DIV_CTL0 0x4
22 #define DIV_CTL1 0x8
23 #define LOCK_STATUS BIT(31)
24 #define LOCK_SEL_MASK BIT(29)
25 #define CLKE_MASK BIT(11)
26 #define RST_MASK BIT(9)
27 #define BYPASS_MASK BIT(4)
28 #define MDIV_MASK GENMASK(21, 12)
29 #define PDIV_MASK GENMASK(9, 4)
30 #define SDIV_MASK GENMASK(2, 0)
31 #define KDIV_MASK GENMASK(15, 0)
32 #define KDIV_MIN SHRT_MIN
33 #define KDIV_MAX SHRT_MAX
34
35 #define LOCK_TIMEOUT_US 10000
36
37 struct clk_pll14xx {
38 struct clk_hw hw;
39 void __iomem *base;
40 enum imx_pll14xx_type type;
41 const struct imx_pll14xx_rate_table *rate_table;
42 int rate_count;
43 };
44
45 #define to_clk_pll14xx(_hw) container_of(_hw, struct clk_pll14xx, hw)
46
47 static const struct imx_pll14xx_rate_table imx_pll1416x_tbl[] = {
48 PLL_1416X_RATE(1800000000U, 225, 3, 0),
49 PLL_1416X_RATE(1600000000U, 200, 3, 0),
50 PLL_1416X_RATE(1500000000U, 375, 3, 1),
51 PLL_1416X_RATE(1400000000U, 350, 3, 1),
52 PLL_1416X_RATE(1200000000U, 300, 3, 1),
53 PLL_1416X_RATE(1000000000U, 250, 3, 1),
54 PLL_1416X_RATE(800000000U, 200, 3, 1),
55 PLL_1416X_RATE(750000000U, 250, 2, 2),
56 PLL_1416X_RATE(700000000U, 350, 3, 2),
57 PLL_1416X_RATE(640000000U, 320, 3, 2),
58 PLL_1416X_RATE(600000000U, 300, 3, 2),
59 PLL_1416X_RATE(320000000U, 160, 3, 2),
60 };
61
62 static const struct imx_pll14xx_rate_table imx_pll1443x_tbl[] = {
63 PLL_1443X_RATE(1039500000U, 173, 2, 1, 16384),
64 PLL_1443X_RATE(650000000U, 325, 3, 2, 0),
65 PLL_1443X_RATE(594000000U, 198, 2, 2, 0),
66 PLL_1443X_RATE(519750000U, 173, 2, 2, 16384),
67 };
68
69 struct imx_pll14xx_clk imx_1443x_pll = {
70 .type = PLL_1443X,
71 .rate_table = imx_pll1443x_tbl,
72 .rate_count = ARRAY_SIZE(imx_pll1443x_tbl),
73 };
74 EXPORT_SYMBOL_GPL(imx_1443x_pll);
75
76 struct imx_pll14xx_clk imx_1443x_dram_pll = {
77 .type = PLL_1443X,
78 .rate_table = imx_pll1443x_tbl,
79 .rate_count = ARRAY_SIZE(imx_pll1443x_tbl),
80 .flags = CLK_GET_RATE_NOCACHE,
81 };
82 EXPORT_SYMBOL_GPL(imx_1443x_dram_pll);
83
84 struct imx_pll14xx_clk imx_1416x_pll = {
85 .type = PLL_1416X,
86 .rate_table = imx_pll1416x_tbl,
87 .rate_count = ARRAY_SIZE(imx_pll1416x_tbl),
88 };
89 EXPORT_SYMBOL_GPL(imx_1416x_pll);
90
imx_get_pll_settings(struct clk_pll14xx * pll,unsigned long rate)91 static const struct imx_pll14xx_rate_table *imx_get_pll_settings(
92 struct clk_pll14xx *pll, unsigned long rate)
93 {
94 const struct imx_pll14xx_rate_table *rate_table = pll->rate_table;
95 int i;
96
97 for (i = 0; i < pll->rate_count; i++)
98 if (rate == rate_table[i].rate)
99 return &rate_table[i];
100
101 return NULL;
102 }
103
pll14xx_calc_rate(struct clk_pll14xx * pll,int mdiv,int pdiv,int sdiv,int kdiv,unsigned long prate)104 static long pll14xx_calc_rate(struct clk_pll14xx *pll, int mdiv, int pdiv,
105 int sdiv, int kdiv, unsigned long prate)
106 {
107 u64 fvco = prate;
108
109 /* fvco = (m * 65536 + k) * Fin / (p * 65536) */
110 fvco *= (mdiv * 65536 + kdiv);
111 pdiv *= 65536;
112
113 do_div(fvco, pdiv << sdiv);
114
115 return fvco;
116 }
117
pll1443x_calc_kdiv(int mdiv,int pdiv,int sdiv,unsigned long rate,unsigned long prate)118 static long pll1443x_calc_kdiv(int mdiv, int pdiv, int sdiv,
119 unsigned long rate, unsigned long prate)
120 {
121 long kdiv;
122
123 /* calc kdiv = round(rate * pdiv * 65536 * 2^sdiv / prate) - (mdiv * 65536) */
124 kdiv = ((rate * ((pdiv * 65536) << sdiv) + prate / 2) / prate) - (mdiv * 65536);
125
126 return clamp_t(short, kdiv, KDIV_MIN, KDIV_MAX);
127 }
128
imx_pll14xx_calc_settings(struct clk_pll14xx * pll,unsigned long rate,unsigned long prate,struct imx_pll14xx_rate_table * t)129 static void imx_pll14xx_calc_settings(struct clk_pll14xx *pll, unsigned long rate,
130 unsigned long prate, struct imx_pll14xx_rate_table *t)
131 {
132 u32 pll_div_ctl0, pll_div_ctl1;
133 int mdiv, pdiv, sdiv, kdiv;
134 long fvco, rate_min, rate_max, dist, best = LONG_MAX;
135 const struct imx_pll14xx_rate_table *tt;
136
137 /*
138 * Fractional PLL constrains:
139 *
140 * a) 1 <= p <= 63
141 * b) 64 <= m <= 1023
142 * c) 0 <= s <= 6
143 * d) -32768 <= k <= 32767
144 *
145 * fvco = (m * 65536 + k) * prate / (p * 65536)
146 */
147
148 /* First try if we can get the desired rate from one of the static entries */
149 tt = imx_get_pll_settings(pll, rate);
150 if (tt) {
151 pr_debug("%s: in=%ld, want=%ld, Using PLL setting from table\n",
152 clk_hw_get_name(&pll->hw), prate, rate);
153 t->rate = tt->rate;
154 t->mdiv = tt->mdiv;
155 t->pdiv = tt->pdiv;
156 t->sdiv = tt->sdiv;
157 t->kdiv = tt->kdiv;
158 return;
159 }
160
161 pll_div_ctl0 = readl_relaxed(pll->base + DIV_CTL0);
162 mdiv = FIELD_GET(MDIV_MASK, pll_div_ctl0);
163 pdiv = FIELD_GET(PDIV_MASK, pll_div_ctl0);
164 sdiv = FIELD_GET(SDIV_MASK, pll_div_ctl0);
165 pll_div_ctl1 = readl_relaxed(pll->base + DIV_CTL1);
166
167 /* Then see if we can get the desired rate by only adjusting kdiv (glitch free) */
168 rate_min = pll14xx_calc_rate(pll, mdiv, pdiv, sdiv, KDIV_MIN, prate);
169 rate_max = pll14xx_calc_rate(pll, mdiv, pdiv, sdiv, KDIV_MAX, prate);
170
171 if (rate >= rate_min && rate <= rate_max) {
172 kdiv = pll1443x_calc_kdiv(mdiv, pdiv, sdiv, rate, prate);
173 pr_debug("%s: in=%ld, want=%ld Only adjust kdiv %ld -> %d\n",
174 clk_hw_get_name(&pll->hw), prate, rate,
175 FIELD_GET(KDIV_MASK, pll_div_ctl1), kdiv);
176 fvco = pll14xx_calc_rate(pll, mdiv, pdiv, sdiv, kdiv, prate);
177 t->rate = (unsigned int)fvco;
178 t->mdiv = mdiv;
179 t->pdiv = pdiv;
180 t->sdiv = sdiv;
181 t->kdiv = kdiv;
182 return;
183 }
184
185 /* Finally calculate best values */
186 for (pdiv = 1; pdiv <= 63; pdiv++) {
187 for (sdiv = 0; sdiv <= 6; sdiv++) {
188 /* calc mdiv = round(rate * pdiv * 2^sdiv) / prate) */
189 mdiv = DIV_ROUND_CLOSEST(rate * (pdiv << sdiv), prate);
190 mdiv = clamp(mdiv, 64, 1023);
191
192 kdiv = pll1443x_calc_kdiv(mdiv, pdiv, sdiv, rate, prate);
193 fvco = pll14xx_calc_rate(pll, mdiv, pdiv, sdiv, kdiv, prate);
194
195 /* best match */
196 dist = abs((long)rate - (long)fvco);
197 if (dist < best) {
198 best = dist;
199 t->rate = (unsigned int)fvco;
200 t->mdiv = mdiv;
201 t->pdiv = pdiv;
202 t->sdiv = sdiv;
203 t->kdiv = kdiv;
204
205 if (!dist)
206 goto found;
207 }
208 }
209 }
210 found:
211 pr_debug("%s: in=%ld, want=%ld got=%d (pdiv=%d sdiv=%d mdiv=%d kdiv=%d)\n",
212 clk_hw_get_name(&pll->hw), prate, rate, t->rate, t->pdiv, t->sdiv,
213 t->mdiv, t->kdiv);
214 }
215
clk_pll1416x_round_rate(struct clk_hw * hw,unsigned long rate,unsigned long * prate)216 static long clk_pll1416x_round_rate(struct clk_hw *hw, unsigned long rate,
217 unsigned long *prate)
218 {
219 struct clk_pll14xx *pll = to_clk_pll14xx(hw);
220 const struct imx_pll14xx_rate_table *rate_table = pll->rate_table;
221 int i;
222
223 /* Assuming rate_table is in descending order */
224 for (i = 0; i < pll->rate_count; i++)
225 if (rate >= rate_table[i].rate)
226 return rate_table[i].rate;
227
228 /* return minimum supported value */
229 return rate_table[pll->rate_count - 1].rate;
230 }
231
clk_pll1443x_round_rate(struct clk_hw * hw,unsigned long rate,unsigned long * prate)232 static long clk_pll1443x_round_rate(struct clk_hw *hw, unsigned long rate,
233 unsigned long *prate)
234 {
235 struct clk_pll14xx *pll = to_clk_pll14xx(hw);
236 struct imx_pll14xx_rate_table t;
237
238 imx_pll14xx_calc_settings(pll, rate, *prate, &t);
239
240 return t.rate;
241 }
242
clk_pll14xx_recalc_rate(struct clk_hw * hw,unsigned long parent_rate)243 static unsigned long clk_pll14xx_recalc_rate(struct clk_hw *hw,
244 unsigned long parent_rate)
245 {
246 struct clk_pll14xx *pll = to_clk_pll14xx(hw);
247 u32 mdiv, pdiv, sdiv, kdiv, pll_div_ctl0, pll_div_ctl1;
248
249 pll_div_ctl0 = readl_relaxed(pll->base + DIV_CTL0);
250 mdiv = FIELD_GET(MDIV_MASK, pll_div_ctl0);
251 pdiv = FIELD_GET(PDIV_MASK, pll_div_ctl0);
252 sdiv = FIELD_GET(SDIV_MASK, pll_div_ctl0);
253
254 if (pll->type == PLL_1443X) {
255 pll_div_ctl1 = readl_relaxed(pll->base + DIV_CTL1);
256 kdiv = (s16)FIELD_GET(KDIV_MASK, pll_div_ctl1);
257 } else {
258 kdiv = 0;
259 }
260
261 return pll14xx_calc_rate(pll, mdiv, pdiv, sdiv, kdiv, parent_rate);
262 }
263
clk_pll14xx_mp_change(const struct imx_pll14xx_rate_table * rate,u32 pll_div)264 static inline bool clk_pll14xx_mp_change(const struct imx_pll14xx_rate_table *rate,
265 u32 pll_div)
266 {
267 u32 old_mdiv, old_pdiv;
268
269 old_mdiv = FIELD_GET(MDIV_MASK, pll_div);
270 old_pdiv = FIELD_GET(PDIV_MASK, pll_div);
271
272 return rate->mdiv != old_mdiv || rate->pdiv != old_pdiv;
273 }
274
clk_pll14xx_wait_lock(struct clk_pll14xx * pll)275 static int clk_pll14xx_wait_lock(struct clk_pll14xx *pll)
276 {
277 u32 val;
278
279 return readl_poll_timeout(pll->base + GNRL_CTL, val, val & LOCK_STATUS, 0,
280 LOCK_TIMEOUT_US);
281 }
282
clk_pll1416x_set_rate(struct clk_hw * hw,unsigned long drate,unsigned long prate)283 static int clk_pll1416x_set_rate(struct clk_hw *hw, unsigned long drate,
284 unsigned long prate)
285 {
286 struct clk_pll14xx *pll = to_clk_pll14xx(hw);
287 const struct imx_pll14xx_rate_table *rate;
288 u32 tmp, div_val;
289 int ret;
290
291 rate = imx_get_pll_settings(pll, drate);
292 if (!rate) {
293 pr_err("Invalid rate %lu for pll clk %s\n", drate,
294 clk_hw_get_name(hw));
295 return -EINVAL;
296 }
297
298 tmp = readl_relaxed(pll->base + DIV_CTL0);
299
300 if (!clk_pll14xx_mp_change(rate, tmp)) {
301 tmp &= ~SDIV_MASK;
302 tmp |= FIELD_PREP(SDIV_MASK, rate->sdiv);
303 writel_relaxed(tmp, pll->base + DIV_CTL0);
304
305 return 0;
306 }
307
308 /* Bypass clock and set lock to pll output lock */
309 tmp = readl_relaxed(pll->base + GNRL_CTL);
310 tmp |= LOCK_SEL_MASK;
311 writel_relaxed(tmp, pll->base + GNRL_CTL);
312
313 /* Enable RST */
314 tmp &= ~RST_MASK;
315 writel_relaxed(tmp, pll->base + GNRL_CTL);
316
317 /* Enable BYPASS */
318 tmp |= BYPASS_MASK;
319 writel(tmp, pll->base + GNRL_CTL);
320
321 div_val = FIELD_PREP(MDIV_MASK, rate->mdiv) | FIELD_PREP(PDIV_MASK, rate->pdiv) |
322 FIELD_PREP(SDIV_MASK, rate->sdiv);
323 writel_relaxed(div_val, pll->base + DIV_CTL0);
324
325 /*
326 * According to SPEC, t3 - t2 need to be greater than
327 * 1us and 1/FREF, respectively.
328 * FREF is FIN / Prediv, the prediv is [1, 63], so choose
329 * 3us.
330 */
331 udelay(3);
332
333 /* Disable RST */
334 tmp |= RST_MASK;
335 writel_relaxed(tmp, pll->base + GNRL_CTL);
336
337 /* Wait Lock */
338 ret = clk_pll14xx_wait_lock(pll);
339 if (ret)
340 return ret;
341
342 /* Bypass */
343 tmp &= ~BYPASS_MASK;
344 writel_relaxed(tmp, pll->base + GNRL_CTL);
345
346 return 0;
347 }
348
clk_pll1443x_set_rate(struct clk_hw * hw,unsigned long drate,unsigned long prate)349 static int clk_pll1443x_set_rate(struct clk_hw *hw, unsigned long drate,
350 unsigned long prate)
351 {
352 struct clk_pll14xx *pll = to_clk_pll14xx(hw);
353 struct imx_pll14xx_rate_table rate;
354 u32 gnrl_ctl, div_ctl0;
355 int ret;
356
357 imx_pll14xx_calc_settings(pll, drate, prate, &rate);
358
359 div_ctl0 = readl_relaxed(pll->base + DIV_CTL0);
360
361 if (!clk_pll14xx_mp_change(&rate, div_ctl0)) {
362 /* only sdiv and/or kdiv changed - no need to RESET PLL */
363 div_ctl0 &= ~SDIV_MASK;
364 div_ctl0 |= FIELD_PREP(SDIV_MASK, rate.sdiv);
365 writel_relaxed(div_ctl0, pll->base + DIV_CTL0);
366
367 writel_relaxed(FIELD_PREP(KDIV_MASK, rate.kdiv),
368 pll->base + DIV_CTL1);
369
370 return 0;
371 }
372
373 /* Enable RST */
374 gnrl_ctl = readl_relaxed(pll->base + GNRL_CTL);
375 gnrl_ctl &= ~RST_MASK;
376 writel_relaxed(gnrl_ctl, pll->base + GNRL_CTL);
377
378 /* Enable BYPASS */
379 gnrl_ctl |= BYPASS_MASK;
380 writel_relaxed(gnrl_ctl, pll->base + GNRL_CTL);
381
382 div_ctl0 = FIELD_PREP(MDIV_MASK, rate.mdiv) |
383 FIELD_PREP(PDIV_MASK, rate.pdiv) |
384 FIELD_PREP(SDIV_MASK, rate.sdiv);
385 writel_relaxed(div_ctl0, pll->base + DIV_CTL0);
386
387 writel_relaxed(FIELD_PREP(KDIV_MASK, rate.kdiv), pll->base + DIV_CTL1);
388
389 /*
390 * According to SPEC, t3 - t2 need to be greater than
391 * 1us and 1/FREF, respectively.
392 * FREF is FIN / Prediv, the prediv is [1, 63], so choose
393 * 3us.
394 */
395 udelay(3);
396
397 /* Disable RST */
398 gnrl_ctl |= RST_MASK;
399 writel_relaxed(gnrl_ctl, pll->base + GNRL_CTL);
400
401 /* Wait Lock*/
402 ret = clk_pll14xx_wait_lock(pll);
403 if (ret)
404 return ret;
405
406 /* Bypass */
407 gnrl_ctl &= ~BYPASS_MASK;
408 writel_relaxed(gnrl_ctl, pll->base + GNRL_CTL);
409
410 return 0;
411 }
412
clk_pll14xx_prepare(struct clk_hw * hw)413 static int clk_pll14xx_prepare(struct clk_hw *hw)
414 {
415 struct clk_pll14xx *pll = to_clk_pll14xx(hw);
416 u32 val;
417 int ret;
418
419 /*
420 * RESETB = 1 from 0, PLL starts its normal
421 * operation after lock time
422 */
423 val = readl_relaxed(pll->base + GNRL_CTL);
424 if (val & RST_MASK)
425 return 0;
426 val |= BYPASS_MASK;
427 writel_relaxed(val, pll->base + GNRL_CTL);
428 val |= RST_MASK;
429 writel_relaxed(val, pll->base + GNRL_CTL);
430
431 ret = clk_pll14xx_wait_lock(pll);
432 if (ret)
433 return ret;
434
435 val &= ~BYPASS_MASK;
436 writel_relaxed(val, pll->base + GNRL_CTL);
437
438 return 0;
439 }
440
clk_pll14xx_is_prepared(struct clk_hw * hw)441 static int clk_pll14xx_is_prepared(struct clk_hw *hw)
442 {
443 struct clk_pll14xx *pll = to_clk_pll14xx(hw);
444 u32 val;
445
446 val = readl_relaxed(pll->base + GNRL_CTL);
447
448 return (val & RST_MASK) ? 1 : 0;
449 }
450
clk_pll14xx_unprepare(struct clk_hw * hw)451 static void clk_pll14xx_unprepare(struct clk_hw *hw)
452 {
453 struct clk_pll14xx *pll = to_clk_pll14xx(hw);
454 u32 val;
455
456 /*
457 * Set RST to 0, power down mode is enabled and
458 * every digital block is reset
459 */
460 val = readl_relaxed(pll->base + GNRL_CTL);
461 val &= ~RST_MASK;
462 writel_relaxed(val, pll->base + GNRL_CTL);
463 }
464
465 static const struct clk_ops clk_pll1416x_ops = {
466 .prepare = clk_pll14xx_prepare,
467 .unprepare = clk_pll14xx_unprepare,
468 .is_prepared = clk_pll14xx_is_prepared,
469 .recalc_rate = clk_pll14xx_recalc_rate,
470 .round_rate = clk_pll1416x_round_rate,
471 .set_rate = clk_pll1416x_set_rate,
472 };
473
474 static const struct clk_ops clk_pll1416x_min_ops = {
475 .recalc_rate = clk_pll14xx_recalc_rate,
476 };
477
478 static const struct clk_ops clk_pll1443x_ops = {
479 .prepare = clk_pll14xx_prepare,
480 .unprepare = clk_pll14xx_unprepare,
481 .is_prepared = clk_pll14xx_is_prepared,
482 .recalc_rate = clk_pll14xx_recalc_rate,
483 .round_rate = clk_pll1443x_round_rate,
484 .set_rate = clk_pll1443x_set_rate,
485 };
486
imx_dev_clk_hw_pll14xx(struct device * dev,const char * name,const char * parent_name,void __iomem * base,const struct imx_pll14xx_clk * pll_clk)487 struct clk_hw *imx_dev_clk_hw_pll14xx(struct device *dev, const char *name,
488 const char *parent_name, void __iomem *base,
489 const struct imx_pll14xx_clk *pll_clk)
490 {
491 struct clk_pll14xx *pll;
492 struct clk_hw *hw;
493 struct clk_init_data init;
494 int ret;
495 u32 val;
496
497 pll = kzalloc(sizeof(*pll), GFP_KERNEL);
498 if (!pll)
499 return ERR_PTR(-ENOMEM);
500
501 init.name = name;
502 init.flags = pll_clk->flags;
503 init.parent_names = &parent_name;
504 init.num_parents = 1;
505
506 switch (pll_clk->type) {
507 case PLL_1416X:
508 if (!pll_clk->rate_table)
509 init.ops = &clk_pll1416x_min_ops;
510 else
511 init.ops = &clk_pll1416x_ops;
512 break;
513 case PLL_1443X:
514 init.ops = &clk_pll1443x_ops;
515 break;
516 default:
517 pr_err("Unknown pll type for pll clk %s\n", name);
518 kfree(pll);
519 return ERR_PTR(-EINVAL);
520 }
521
522 pll->base = base;
523 pll->hw.init = &init;
524 pll->type = pll_clk->type;
525 pll->rate_table = pll_clk->rate_table;
526 pll->rate_count = pll_clk->rate_count;
527
528 val = readl_relaxed(pll->base + GNRL_CTL);
529 val &= ~BYPASS_MASK;
530 writel_relaxed(val, pll->base + GNRL_CTL);
531
532 hw = &pll->hw;
533
534 ret = clk_hw_register(dev, hw);
535 if (ret) {
536 pr_err("failed to register pll %s %d\n", name, ret);
537 kfree(pll);
538 return ERR_PTR(ret);
539 }
540
541 return hw;
542 }
543 EXPORT_SYMBOL_GPL(imx_dev_clk_hw_pll14xx);
544