xref: /openbmc/u-boot/drivers/clk/mediatek/clk-mtk.c (revision 7b3af03f)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * MediaTek common clock driver
4  *
5  * Copyright (C) 2018 MediaTek Inc.
6  * Author: Ryder Lee <ryder.lee@mediatek.com>
7  */
8 
9 #include <common.h>
10 #include <clk-uclass.h>
11 #include <div64.h>
12 #include <dm.h>
13 #include <asm/io.h>
14 
15 #include "clk-mtk.h"
16 
17 #define REG_CON0			0
18 #define REG_CON1			4
19 
20 #define CON0_BASE_EN			BIT(0)
21 #define CON0_PWR_ON			BIT(0)
22 #define CON0_ISO_EN			BIT(1)
23 #define CON1_PCW_CHG			BIT(31)
24 
25 #define POSTDIV_MASK			0x7
26 #define INTEGER_BITS			7
27 
28 /* scpsys clock off control */
29 #define CLK_SCP_CFG0			0x200
30 #define CLK_SCP_CFG1			0x204
31 #define SCP_ARMCK_OFF_EN		GENMASK(9, 0)
32 #define SCP_AXICK_DCM_DIS_EN		BIT(0)
33 #define SCP_AXICK_26M_SEL_EN		BIT(4)
34 
35 /* shared functions */
36 
37 /*
38  * In case the rate change propagation to parent clocks is undesirable,
39  * this function is recursively called to find the parent to calculate
40  * the accurate frequency.
41  */
42 static int mtk_clk_find_parent_rate(struct clk *clk, int id,
43 				    const struct driver *drv)
44 {
45 	struct clk parent = { .id = id, };
46 
47 	if (drv) {
48 		struct udevice *dev;
49 
50 		if (uclass_get_device_by_driver(UCLASS_CLK, drv, &dev))
51 			return -ENODEV;
52 
53 		parent.dev = dev;
54 	} else {
55 		parent.dev = clk->dev;
56 	}
57 
58 	return clk_get_rate(&parent);
59 }
60 
61 static int mtk_clk_mux_set_parent(void __iomem *base, u32 parent,
62 				  const struct mtk_composite *mux)
63 {
64 	u32 val, index = 0;
65 
66 	while (mux->parent[index] != parent)
67 		if (++index == mux->num_parents)
68 			return -EINVAL;
69 
70 	/* switch mux to a select parent */
71 	val = readl(base + mux->mux_reg);
72 	val &= ~(mux->mux_mask << mux->mux_shift);
73 
74 	val |= index << mux->mux_shift;
75 	writel(val, base + mux->mux_reg);
76 
77 	return 0;
78 }
79 
80 /* apmixedsys functions */
81 
82 static unsigned long __mtk_pll_recalc_rate(const struct mtk_pll_data *pll,
83 					   u32 fin, u32 pcw, int postdiv)
84 {
85 	int pcwbits = pll->pcwbits;
86 	int pcwfbits;
87 	u64 vco;
88 	u8 c = 0;
89 
90 	/* The fractional part of the PLL divider. */
91 	pcwfbits = pcwbits > INTEGER_BITS ? pcwbits - INTEGER_BITS : 0;
92 
93 	vco = (u64)fin * pcw;
94 
95 	if (pcwfbits && (vco & GENMASK(pcwfbits - 1, 0)))
96 		c = 1;
97 
98 	vco >>= pcwfbits;
99 
100 	if (c)
101 		vco++;
102 
103 	return ((unsigned long)vco + postdiv - 1) / postdiv;
104 }
105 
106 /**
107  * MediaTek PLLs are configured through their pcw value. The pcw value
108  * describes a divider in the PLL feedback loop which consists of 7 bits
109  * for the integer part and the remaining bits (if present) for the
110  * fractional part. Also they have a 3 bit power-of-two post divider.
111  */
112 static void mtk_pll_set_rate_regs(struct clk *clk, u32 pcw, int postdiv)
113 {
114 	struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
115 	const struct mtk_pll_data *pll = &priv->tree->plls[clk->id];
116 	u32 val;
117 
118 	/* set postdiv */
119 	val = readl(priv->base + pll->pd_reg);
120 	val &= ~(POSTDIV_MASK << pll->pd_shift);
121 	val |= (ffs(postdiv) - 1) << pll->pd_shift;
122 
123 	/* postdiv and pcw need to set at the same time if on same register */
124 	if (pll->pd_reg != pll->pcw_reg) {
125 		writel(val, priv->base + pll->pd_reg);
126 		val = readl(priv->base + pll->pcw_reg);
127 	}
128 
129 	/* set pcw */
130 	val &= ~GENMASK(pll->pcw_shift + pll->pcwbits - 1, pll->pcw_shift);
131 	val |= pcw << pll->pcw_shift;
132 	val &= ~CON1_PCW_CHG;
133 	writel(val, priv->base + pll->pcw_reg);
134 
135 	val |= CON1_PCW_CHG;
136 	writel(val, priv->base + pll->pcw_reg);
137 
138 	udelay(20);
139 }
140 
141 /**
142  * mtk_pll_calc_values - calculate good values for a given input frequency.
143  * @clk:	The clk
144  * @pcw:	The pcw value (output)
145  * @postdiv:	The post divider (output)
146  * @freq:	The desired target frequency
147  */
148 static void mtk_pll_calc_values(struct clk *clk, u32 *pcw, u32 *postdiv,
149 				u32 freq)
150 {
151 	struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
152 	const struct mtk_pll_data *pll = &priv->tree->plls[clk->id];
153 	unsigned long fmin = 1000 * MHZ;
154 	u64 _pcw;
155 	u32 val;
156 
157 	if (freq > pll->fmax)
158 		freq = pll->fmax;
159 
160 	for (val = 0; val < 5; val++) {
161 		*postdiv = 1 << val;
162 		if ((u64)freq * *postdiv >= fmin)
163 			break;
164 	}
165 
166 	/* _pcw = freq * postdiv / xtal_rate * 2^pcwfbits */
167 	_pcw = ((u64)freq << val) << (pll->pcwbits - INTEGER_BITS);
168 	do_div(_pcw, priv->tree->xtal2_rate);
169 
170 	*pcw = (u32)_pcw;
171 }
172 
173 static ulong mtk_apmixedsys_set_rate(struct clk *clk, ulong rate)
174 {
175 	u32 pcw = 0;
176 	u32 postdiv;
177 
178 	mtk_pll_calc_values(clk, &pcw, &postdiv, rate);
179 	mtk_pll_set_rate_regs(clk, pcw, postdiv);
180 
181 	return 0;
182 }
183 
184 static ulong mtk_apmixedsys_get_rate(struct clk *clk)
185 {
186 	struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
187 	const struct mtk_pll_data *pll = &priv->tree->plls[clk->id];
188 	u32 postdiv;
189 	u32 pcw;
190 
191 	postdiv = (readl(priv->base + pll->pd_reg) >> pll->pd_shift) &
192 		   POSTDIV_MASK;
193 	postdiv = 1 << postdiv;
194 
195 	pcw = readl(priv->base + pll->pcw_reg) >> pll->pcw_shift;
196 	pcw &= GENMASK(pll->pcwbits - 1, 0);
197 
198 	return __mtk_pll_recalc_rate(pll, priv->tree->xtal2_rate,
199 				     pcw, postdiv);
200 }
201 
202 static int mtk_apmixedsys_enable(struct clk *clk)
203 {
204 	struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
205 	const struct mtk_pll_data *pll = &priv->tree->plls[clk->id];
206 	u32 r;
207 
208 	r = readl(priv->base + pll->pwr_reg) | CON0_PWR_ON;
209 	writel(r, priv->base + pll->pwr_reg);
210 	udelay(1);
211 
212 	r = readl(priv->base + pll->pwr_reg) & ~CON0_ISO_EN;
213 	writel(r, priv->base + pll->pwr_reg);
214 	udelay(1);
215 
216 	r = readl(priv->base + pll->reg + REG_CON0);
217 	r |= pll->en_mask;
218 	writel(r, priv->base + pll->reg + REG_CON0);
219 
220 	udelay(20);
221 
222 	if (pll->flags & HAVE_RST_BAR) {
223 		r = readl(priv->base + pll->reg + REG_CON0);
224 		r |= pll->rst_bar_mask;
225 		writel(r, priv->base + pll->reg + REG_CON0);
226 	}
227 
228 	return 0;
229 }
230 
231 static int mtk_apmixedsys_disable(struct clk *clk)
232 {
233 	struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
234 	const struct mtk_pll_data *pll = &priv->tree->plls[clk->id];
235 	u32 r;
236 
237 	if (pll->flags & HAVE_RST_BAR) {
238 		r = readl(priv->base + pll->reg + REG_CON0);
239 		r &= ~pll->rst_bar_mask;
240 		writel(r, priv->base + pll->reg + REG_CON0);
241 	}
242 
243 	r = readl(priv->base + pll->reg + REG_CON0);
244 	r &= ~CON0_BASE_EN;
245 	writel(r, priv->base + pll->reg + REG_CON0);
246 
247 	r = readl(priv->base + pll->pwr_reg) | CON0_ISO_EN;
248 	writel(r, priv->base + pll->pwr_reg);
249 
250 	r = readl(priv->base + pll->pwr_reg) & ~CON0_PWR_ON;
251 	writel(r, priv->base + pll->pwr_reg);
252 
253 	return 0;
254 }
255 
256 /* topckgen functions */
257 
258 static ulong mtk_factor_recalc_rate(const struct mtk_fixed_factor *fdiv,
259 				    ulong parent_rate)
260 {
261 	u64 rate = parent_rate * fdiv->mult;
262 
263 	do_div(rate, fdiv->div);
264 
265 	return rate;
266 }
267 
268 static int mtk_topckgen_get_factor_rate(struct clk *clk, u32 off)
269 {
270 	struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
271 	const struct mtk_fixed_factor *fdiv = &priv->tree->fdivs[off];
272 	ulong rate;
273 
274 	switch (fdiv->flags & CLK_PARENT_MASK) {
275 	case CLK_PARENT_APMIXED:
276 		rate = mtk_clk_find_parent_rate(clk, fdiv->parent,
277 				DM_GET_DRIVER(mtk_clk_apmixedsys));
278 		break;
279 	case CLK_PARENT_TOPCKGEN:
280 		rate = mtk_clk_find_parent_rate(clk, fdiv->parent, NULL);
281 		break;
282 
283 	default:
284 		rate = priv->tree->xtal_rate;
285 	}
286 
287 	return mtk_factor_recalc_rate(fdiv, rate);
288 }
289 
290 static int mtk_topckgen_get_mux_rate(struct clk *clk, u32 off)
291 {
292 	struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
293 	const struct mtk_composite *mux = &priv->tree->muxes[off];
294 	u32 index;
295 
296 	index = readl(priv->base + mux->mux_reg);
297 	index &= mux->mux_mask << mux->mux_shift;
298 	index = index >> mux->mux_shift;
299 
300 	if (mux->parent[index])
301 		return mtk_clk_find_parent_rate(clk, mux->parent[index],
302 						NULL);
303 
304 	return priv->tree->xtal_rate;
305 }
306 
307 static ulong mtk_topckgen_get_rate(struct clk *clk)
308 {
309 	struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
310 
311 	if (clk->id < priv->tree->fdivs_offs)
312 		return priv->tree->fclks[clk->id].rate;
313 	else if (clk->id < priv->tree->muxes_offs)
314 		return mtk_topckgen_get_factor_rate(clk, clk->id -
315 						    priv->tree->fdivs_offs);
316 	else
317 		return mtk_topckgen_get_mux_rate(clk, clk->id -
318 						 priv->tree->muxes_offs);
319 }
320 
321 static int mtk_topckgen_enable(struct clk *clk)
322 {
323 	struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
324 	const struct mtk_composite *mux;
325 	u32 val;
326 
327 	if (clk->id < priv->tree->muxes_offs)
328 		return 0;
329 
330 	mux = &priv->tree->muxes[clk->id - priv->tree->muxes_offs];
331 	if (mux->gate_shift < 0)
332 		return 0;
333 
334 	/* enable clock gate */
335 	val = readl(priv->base + mux->gate_reg);
336 	val &= ~BIT(mux->gate_shift);
337 	writel(val, priv->base + mux->gate_reg);
338 
339 	if (mux->flags & CLK_DOMAIN_SCPSYS) {
340 		/* enable scpsys clock off control */
341 		writel(SCP_ARMCK_OFF_EN, priv->base + CLK_SCP_CFG0);
342 		writel(SCP_AXICK_DCM_DIS_EN | SCP_AXICK_26M_SEL_EN,
343 		       priv->base + CLK_SCP_CFG1);
344 	}
345 
346 	return 0;
347 }
348 
349 static int mtk_topckgen_disable(struct clk *clk)
350 {
351 	struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
352 	const struct mtk_composite *mux;
353 	u32 val;
354 
355 	if (clk->id < priv->tree->muxes_offs)
356 		return 0;
357 
358 	mux = &priv->tree->muxes[clk->id - priv->tree->muxes_offs];
359 	if (mux->gate_shift < 0)
360 		return 0;
361 
362 	/* disable clock gate */
363 	val = readl(priv->base + mux->gate_reg);
364 	val |= BIT(mux->gate_shift);
365 	writel(val, priv->base + mux->gate_reg);
366 
367 	return 0;
368 }
369 
370 static int mtk_topckgen_set_parent(struct clk *clk, struct clk *parent)
371 {
372 	struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
373 
374 	if (clk->id < priv->tree->muxes_offs)
375 		return 0;
376 
377 	return mtk_clk_mux_set_parent(priv->base, parent->id,
378 			&priv->tree->muxes[clk->id - priv->tree->muxes_offs]);
379 }
380 
381 /* CG functions */
382 
383 static int mtk_clk_gate_enable(struct clk *clk)
384 {
385 	struct mtk_cg_priv *priv = dev_get_priv(clk->dev);
386 	const struct mtk_gate *gate = &priv->gates[clk->id];
387 	u32 bit = BIT(gate->shift);
388 
389 	switch (gate->flags & CLK_GATE_MASK) {
390 	case CLK_GATE_SETCLR:
391 		writel(bit, priv->base + gate->regs->clr_ofs);
392 		break;
393 	case CLK_GATE_NO_SETCLR_INV:
394 		clrsetbits_le32(priv->base + gate->regs->sta_ofs, bit, bit);
395 		break;
396 
397 	default:
398 		return -EINVAL;
399 	}
400 
401 	return 0;
402 }
403 
404 static int mtk_clk_gate_disable(struct clk *clk)
405 {
406 	struct mtk_cg_priv *priv = dev_get_priv(clk->dev);
407 	const struct mtk_gate *gate = &priv->gates[clk->id];
408 	u32 bit = BIT(gate->shift);
409 
410 	switch (gate->flags & CLK_GATE_MASK) {
411 	case CLK_GATE_SETCLR:
412 		writel(bit, priv->base + gate->regs->set_ofs);
413 		break;
414 	case CLK_GATE_NO_SETCLR_INV:
415 		clrsetbits_le32(priv->base + gate->regs->sta_ofs, bit, 0);
416 		break;
417 
418 	default:
419 		return -EINVAL;
420 	}
421 
422 	return 0;
423 }
424 
425 static ulong mtk_clk_gate_get_rate(struct clk *clk)
426 {
427 	struct mtk_cg_priv *priv = dev_get_priv(clk->dev);
428 	const struct mtk_gate *gate = &priv->gates[clk->id];
429 
430 	switch (gate->flags & CLK_PARENT_MASK) {
431 	case CLK_PARENT_APMIXED:
432 		return mtk_clk_find_parent_rate(clk, gate->parent,
433 				DM_GET_DRIVER(mtk_clk_apmixedsys));
434 		break;
435 	case CLK_PARENT_TOPCKGEN:
436 		return mtk_clk_find_parent_rate(clk, gate->parent,
437 				DM_GET_DRIVER(mtk_clk_topckgen));
438 		break;
439 
440 	default:
441 		return priv->tree->xtal_rate;
442 	}
443 }
444 
445 const struct clk_ops mtk_clk_apmixedsys_ops = {
446 	.enable = mtk_apmixedsys_enable,
447 	.disable = mtk_apmixedsys_disable,
448 	.set_rate = mtk_apmixedsys_set_rate,
449 	.get_rate = mtk_apmixedsys_get_rate,
450 };
451 
452 const struct clk_ops mtk_clk_topckgen_ops = {
453 	.enable = mtk_topckgen_enable,
454 	.disable = mtk_topckgen_disable,
455 	.get_rate = mtk_topckgen_get_rate,
456 	.set_parent = mtk_topckgen_set_parent,
457 };
458 
459 const struct clk_ops mtk_clk_gate_ops = {
460 	.enable = mtk_clk_gate_enable,
461 	.disable = mtk_clk_gate_disable,
462 	.get_rate = mtk_clk_gate_get_rate,
463 };
464 
465 int mtk_common_clk_init(struct udevice *dev,
466 			const struct mtk_clk_tree *tree)
467 {
468 	struct mtk_clk_priv *priv = dev_get_priv(dev);
469 
470 	priv->base = dev_read_addr_ptr(dev);
471 	if (!priv->base)
472 		return -ENOENT;
473 
474 	priv->tree = tree;
475 
476 	return 0;
477 }
478 
479 int mtk_common_clk_gate_init(struct udevice *dev,
480 			     const struct mtk_clk_tree *tree,
481 			     const struct mtk_gate *gates)
482 {
483 	struct mtk_cg_priv *priv = dev_get_priv(dev);
484 
485 	priv->base = dev_read_addr_ptr(dev);
486 	if (!priv->base)
487 		return -ENOENT;
488 
489 	priv->tree = tree;
490 	priv->gates = gates;
491 
492 	return 0;
493 }
494