xref: /openbmc/linux/drivers/clk/qcom/clk-rcg.c (revision 9d3745d4)
1 /*
2  * Copyright (c) 2013, The Linux Foundation. All rights reserved.
3  *
4  * This software is licensed under the terms of the GNU General Public
5  * License version 2, as published by the Free Software Foundation, and
6  * may be copied, distributed, and modified under those terms.
7  *
8  * This program is distributed in the hope that it will be useful,
9  * but WITHOUT ANY WARRANTY; without even the implied warranty of
10  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11  * GNU General Public License for more details.
12  */
13 
14 #include <linux/kernel.h>
15 #include <linux/bitops.h>
16 #include <linux/err.h>
17 #include <linux/export.h>
18 #include <linux/clk-provider.h>
19 #include <linux/regmap.h>
20 
21 #include <asm/div64.h>
22 
23 #include "clk-rcg.h"
24 #include "common.h"
25 
26 static u32 ns_to_src(struct src_sel *s, u32 ns)
27 {
28 	ns >>= s->src_sel_shift;
29 	ns &= SRC_SEL_MASK;
30 	return ns;
31 }
32 
33 static u32 src_to_ns(struct src_sel *s, u8 src, u32 ns)
34 {
35 	u32 mask;
36 
37 	mask = SRC_SEL_MASK;
38 	mask <<= s->src_sel_shift;
39 	ns &= ~mask;
40 
41 	ns |= src << s->src_sel_shift;
42 	return ns;
43 }
44 
45 static u8 clk_rcg_get_parent(struct clk_hw *hw)
46 {
47 	struct clk_rcg *rcg = to_clk_rcg(hw);
48 	int num_parents = __clk_get_num_parents(hw->clk);
49 	u32 ns;
50 	int i;
51 
52 	regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
53 	ns = ns_to_src(&rcg->s, ns);
54 	for (i = 0; i < num_parents; i++)
55 		if (ns == rcg->s.parent_map[i])
56 			return i;
57 
58 	return -EINVAL;
59 }
60 
61 static int reg_to_bank(struct clk_dyn_rcg *rcg, u32 bank)
62 {
63 	bank &= BIT(rcg->mux_sel_bit);
64 	return !!bank;
65 }
66 
67 static u8 clk_dyn_rcg_get_parent(struct clk_hw *hw)
68 {
69 	struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
70 	int num_parents = __clk_get_num_parents(hw->clk);
71 	u32 ns, reg;
72 	int bank;
73 	int i;
74 	struct src_sel *s;
75 
76 	regmap_read(rcg->clkr.regmap, rcg->bank_reg, &reg);
77 	bank = reg_to_bank(rcg, reg);
78 	s = &rcg->s[bank];
79 
80 	regmap_read(rcg->clkr.regmap, rcg->ns_reg[bank], &ns);
81 	ns = ns_to_src(s, ns);
82 
83 	for (i = 0; i < num_parents; i++)
84 		if (ns == s->parent_map[i])
85 			return i;
86 
87 	return -EINVAL;
88 }
89 
90 static int clk_rcg_set_parent(struct clk_hw *hw, u8 index)
91 {
92 	struct clk_rcg *rcg = to_clk_rcg(hw);
93 	u32 ns;
94 
95 	regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
96 	ns = src_to_ns(&rcg->s, rcg->s.parent_map[index], ns);
97 	regmap_write(rcg->clkr.regmap, rcg->ns_reg, ns);
98 
99 	return 0;
100 }
101 
102 static u32 md_to_m(struct mn *mn, u32 md)
103 {
104 	md >>= mn->m_val_shift;
105 	md &= BIT(mn->width) - 1;
106 	return md;
107 }
108 
109 static u32 ns_to_pre_div(struct pre_div *p, u32 ns)
110 {
111 	ns >>= p->pre_div_shift;
112 	ns &= BIT(p->pre_div_width) - 1;
113 	return ns;
114 }
115 
116 static u32 pre_div_to_ns(struct pre_div *p, u8 pre_div, u32 ns)
117 {
118 	u32 mask;
119 
120 	mask = BIT(p->pre_div_width) - 1;
121 	mask <<= p->pre_div_shift;
122 	ns &= ~mask;
123 
124 	ns |= pre_div << p->pre_div_shift;
125 	return ns;
126 }
127 
128 static u32 mn_to_md(struct mn *mn, u32 m, u32 n, u32 md)
129 {
130 	u32 mask, mask_w;
131 
132 	mask_w = BIT(mn->width) - 1;
133 	mask = (mask_w << mn->m_val_shift) | mask_w;
134 	md &= ~mask;
135 
136 	if (n) {
137 		m <<= mn->m_val_shift;
138 		md |= m;
139 		md |= ~n & mask_w;
140 	}
141 
142 	return md;
143 }
144 
145 static u32 ns_m_to_n(struct mn *mn, u32 ns, u32 m)
146 {
147 	ns = ~ns >> mn->n_val_shift;
148 	ns &= BIT(mn->width) - 1;
149 	return ns + m;
150 }
151 
152 static u32 reg_to_mnctr_mode(struct mn *mn, u32 val)
153 {
154 	val >>= mn->mnctr_mode_shift;
155 	val &= MNCTR_MODE_MASK;
156 	return val;
157 }
158 
159 static u32 mn_to_ns(struct mn *mn, u32 m, u32 n, u32 ns)
160 {
161 	u32 mask;
162 
163 	mask = BIT(mn->width) - 1;
164 	mask <<= mn->n_val_shift;
165 	ns &= ~mask;
166 
167 	if (n) {
168 		n = n - m;
169 		n = ~n;
170 		n &= BIT(mn->width) - 1;
171 		n <<= mn->n_val_shift;
172 		ns |= n;
173 	}
174 
175 	return ns;
176 }
177 
178 static u32 mn_to_reg(struct mn *mn, u32 m, u32 n, u32 val)
179 {
180 	u32 mask;
181 
182 	mask = MNCTR_MODE_MASK << mn->mnctr_mode_shift;
183 	mask |= BIT(mn->mnctr_en_bit);
184 	val &= ~mask;
185 
186 	if (n) {
187 		val |= BIT(mn->mnctr_en_bit);
188 		val |= MNCTR_MODE_DUAL << mn->mnctr_mode_shift;
189 	}
190 
191 	return val;
192 }
193 
194 static void configure_bank(struct clk_dyn_rcg *rcg, const struct freq_tbl *f)
195 {
196 	u32 ns, md, reg;
197 	int bank, new_bank;
198 	struct mn *mn;
199 	struct pre_div *p;
200 	struct src_sel *s;
201 	bool enabled;
202 	u32 md_reg, ns_reg;
203 	bool banked_mn = !!rcg->mn[1].width;
204 	bool banked_p = !!rcg->p[1].pre_div_width;
205 	struct clk_hw *hw = &rcg->clkr.hw;
206 
207 	enabled = __clk_is_enabled(hw->clk);
208 
209 	regmap_read(rcg->clkr.regmap, rcg->bank_reg, &reg);
210 	bank = reg_to_bank(rcg, reg);
211 	new_bank = enabled ? !bank : bank;
212 
213 	ns_reg = rcg->ns_reg[new_bank];
214 	regmap_read(rcg->clkr.regmap, ns_reg, &ns);
215 
216 	if (banked_mn) {
217 		mn = &rcg->mn[new_bank];
218 		md_reg = rcg->md_reg[new_bank];
219 
220 		ns |= BIT(mn->mnctr_reset_bit);
221 		regmap_write(rcg->clkr.regmap, ns_reg, ns);
222 
223 		regmap_read(rcg->clkr.regmap, md_reg, &md);
224 		md = mn_to_md(mn, f->m, f->n, md);
225 		regmap_write(rcg->clkr.regmap, md_reg, md);
226 
227 		ns = mn_to_ns(mn, f->m, f->n, ns);
228 		regmap_write(rcg->clkr.regmap, ns_reg, ns);
229 
230 		/* Two NS registers means mode control is in NS register */
231 		if (rcg->ns_reg[0] != rcg->ns_reg[1]) {
232 			ns = mn_to_reg(mn, f->m, f->n, ns);
233 			regmap_write(rcg->clkr.regmap, ns_reg, ns);
234 		} else {
235 			reg = mn_to_reg(mn, f->m, f->n, reg);
236 			regmap_write(rcg->clkr.regmap, rcg->bank_reg, reg);
237 		}
238 
239 		ns &= ~BIT(mn->mnctr_reset_bit);
240 		regmap_write(rcg->clkr.regmap, ns_reg, ns);
241 	}
242 
243 	if (banked_p) {
244 		p = &rcg->p[new_bank];
245 		ns = pre_div_to_ns(p, f->pre_div - 1, ns);
246 	}
247 
248 	s = &rcg->s[new_bank];
249 	ns = src_to_ns(s, s->parent_map[f->src], ns);
250 	regmap_write(rcg->clkr.regmap, ns_reg, ns);
251 
252 	if (enabled) {
253 		regmap_read(rcg->clkr.regmap, rcg->bank_reg, &reg);
254 		reg ^= BIT(rcg->mux_sel_bit);
255 		regmap_write(rcg->clkr.regmap, rcg->bank_reg, reg);
256 	}
257 }
258 
259 static int clk_dyn_rcg_set_parent(struct clk_hw *hw, u8 index)
260 {
261 	struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
262 	u32 ns, md, reg;
263 	int bank;
264 	struct freq_tbl f = { 0 };
265 	bool banked_mn = !!rcg->mn[1].width;
266 	bool banked_p = !!rcg->p[1].pre_div_width;
267 
268 	regmap_read(rcg->clkr.regmap, rcg->bank_reg, &reg);
269 	bank = reg_to_bank(rcg, reg);
270 
271 	regmap_read(rcg->clkr.regmap, rcg->ns_reg[bank], &ns);
272 
273 	if (banked_mn) {
274 		regmap_read(rcg->clkr.regmap, rcg->md_reg[bank], &md);
275 		f.m = md_to_m(&rcg->mn[bank], md);
276 		f.n = ns_m_to_n(&rcg->mn[bank], ns, f.m);
277 	}
278 
279 	if (banked_p)
280 		f.pre_div = ns_to_pre_div(&rcg->p[bank], ns) + 1;
281 
282 	f.src = index;
283 	configure_bank(rcg, &f);
284 
285 	return 0;
286 }
287 
288 /*
289  * Calculate m/n:d rate
290  *
291  *          parent_rate     m
292  *   rate = ----------- x  ---
293  *            pre_div       n
294  */
295 static unsigned long
296 calc_rate(unsigned long rate, u32 m, u32 n, u32 mode, u32 pre_div)
297 {
298 	if (pre_div)
299 		rate /= pre_div + 1;
300 
301 	if (mode) {
302 		u64 tmp = rate;
303 		tmp *= m;
304 		do_div(tmp, n);
305 		rate = tmp;
306 	}
307 
308 	return rate;
309 }
310 
311 static unsigned long
312 clk_rcg_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
313 {
314 	struct clk_rcg *rcg = to_clk_rcg(hw);
315 	u32 pre_div, m = 0, n = 0, ns, md, mode = 0;
316 	struct mn *mn = &rcg->mn;
317 
318 	regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
319 	pre_div = ns_to_pre_div(&rcg->p, ns);
320 
321 	if (rcg->mn.width) {
322 		regmap_read(rcg->clkr.regmap, rcg->md_reg, &md);
323 		m = md_to_m(mn, md);
324 		n = ns_m_to_n(mn, ns, m);
325 		/* MN counter mode is in hw.enable_reg sometimes */
326 		if (rcg->clkr.enable_reg != rcg->ns_reg)
327 			regmap_read(rcg->clkr.regmap, rcg->clkr.enable_reg, &mode);
328 		else
329 			mode = ns;
330 		mode = reg_to_mnctr_mode(mn, mode);
331 	}
332 
333 	return calc_rate(parent_rate, m, n, mode, pre_div);
334 }
335 
336 static unsigned long
337 clk_dyn_rcg_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
338 {
339 	struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
340 	u32 m, n, pre_div, ns, md, mode, reg;
341 	int bank;
342 	struct mn *mn;
343 	bool banked_p = !!rcg->p[1].pre_div_width;
344 	bool banked_mn = !!rcg->mn[1].width;
345 
346 	regmap_read(rcg->clkr.regmap, rcg->bank_reg, &reg);
347 	bank = reg_to_bank(rcg, reg);
348 
349 	regmap_read(rcg->clkr.regmap, rcg->ns_reg[bank], &ns);
350 	m = n = pre_div = mode = 0;
351 
352 	if (banked_mn) {
353 		mn = &rcg->mn[bank];
354 		regmap_read(rcg->clkr.regmap, rcg->md_reg[bank], &md);
355 		m = md_to_m(mn, md);
356 		n = ns_m_to_n(mn, ns, m);
357 		/* Two NS registers means mode control is in NS register */
358 		if (rcg->ns_reg[0] != rcg->ns_reg[1])
359 			reg = ns;
360 		mode = reg_to_mnctr_mode(mn, reg);
361 	}
362 
363 	if (banked_p)
364 		pre_div = ns_to_pre_div(&rcg->p[bank], ns);
365 
366 	return calc_rate(parent_rate, m, n, mode, pre_div);
367 }
368 
369 static long _freq_tbl_determine_rate(struct clk_hw *hw,
370 		const struct freq_tbl *f, unsigned long rate,
371 		unsigned long min_rate, unsigned long max_rate,
372 		unsigned long *p_rate, struct clk_hw **p_hw)
373 {
374 	unsigned long clk_flags;
375 	struct clk *p;
376 
377 	f = qcom_find_freq(f, rate);
378 	if (!f)
379 		return -EINVAL;
380 
381 	clk_flags = __clk_get_flags(hw->clk);
382 	p = clk_get_parent_by_index(hw->clk, f->src);
383 	if (clk_flags & CLK_SET_RATE_PARENT) {
384 		rate = rate * f->pre_div;
385 		if (f->n) {
386 			u64 tmp = rate;
387 			tmp = tmp * f->n;
388 			do_div(tmp, f->m);
389 			rate = tmp;
390 		}
391 	} else {
392 		rate =  __clk_get_rate(p);
393 	}
394 	*p_hw = __clk_get_hw(p);
395 	*p_rate = rate;
396 
397 	return f->freq;
398 }
399 
400 static long clk_rcg_determine_rate(struct clk_hw *hw, unsigned long rate,
401 		unsigned long min_rate, unsigned long max_rate,
402 		unsigned long *p_rate, struct clk_hw **p)
403 {
404 	struct clk_rcg *rcg = to_clk_rcg(hw);
405 
406 	return _freq_tbl_determine_rate(hw, rcg->freq_tbl, rate, min_rate,
407 			max_rate, p_rate, p);
408 }
409 
410 static long clk_dyn_rcg_determine_rate(struct clk_hw *hw, unsigned long rate,
411 		unsigned long min_rate, unsigned long max_rate,
412 		unsigned long *p_rate, struct clk_hw **p)
413 {
414 	struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
415 
416 	return _freq_tbl_determine_rate(hw, rcg->freq_tbl, rate, min_rate,
417 			max_rate, p_rate, p);
418 }
419 
420 static long clk_rcg_bypass_determine_rate(struct clk_hw *hw, unsigned long rate,
421 		unsigned long min_rate, unsigned long max_rate,
422 		unsigned long *p_rate, struct clk_hw **p_hw)
423 {
424 	struct clk_rcg *rcg = to_clk_rcg(hw);
425 	const struct freq_tbl *f = rcg->freq_tbl;
426 	struct clk *p;
427 
428 	p = clk_get_parent_by_index(hw->clk, f->src);
429 	*p_hw = __clk_get_hw(p);
430 	*p_rate = __clk_round_rate(p, rate);
431 
432 	return *p_rate;
433 }
434 
435 static int __clk_rcg_set_rate(struct clk_rcg *rcg, const struct freq_tbl *f)
436 {
437 	u32 ns, md, ctl;
438 	struct mn *mn = &rcg->mn;
439 	u32 mask = 0;
440 	unsigned int reset_reg;
441 
442 	if (rcg->mn.reset_in_cc)
443 		reset_reg = rcg->clkr.enable_reg;
444 	else
445 		reset_reg = rcg->ns_reg;
446 
447 	if (rcg->mn.width) {
448 		mask = BIT(mn->mnctr_reset_bit);
449 		regmap_update_bits(rcg->clkr.regmap, reset_reg, mask, mask);
450 
451 		regmap_read(rcg->clkr.regmap, rcg->md_reg, &md);
452 		md = mn_to_md(mn, f->m, f->n, md);
453 		regmap_write(rcg->clkr.regmap, rcg->md_reg, md);
454 
455 		regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
456 		/* MN counter mode is in hw.enable_reg sometimes */
457 		if (rcg->clkr.enable_reg != rcg->ns_reg) {
458 			regmap_read(rcg->clkr.regmap, rcg->clkr.enable_reg, &ctl);
459 			ctl = mn_to_reg(mn, f->m, f->n, ctl);
460 			regmap_write(rcg->clkr.regmap, rcg->clkr.enable_reg, ctl);
461 		} else {
462 			ns = mn_to_reg(mn, f->m, f->n, ns);
463 		}
464 		ns = mn_to_ns(mn, f->m, f->n, ns);
465 	} else {
466 		regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
467 	}
468 
469 	ns = pre_div_to_ns(&rcg->p, f->pre_div - 1, ns);
470 	regmap_write(rcg->clkr.regmap, rcg->ns_reg, ns);
471 
472 	regmap_update_bits(rcg->clkr.regmap, reset_reg, mask, 0);
473 
474 	return 0;
475 }
476 
477 static int clk_rcg_set_rate(struct clk_hw *hw, unsigned long rate,
478 			    unsigned long parent_rate)
479 {
480 	struct clk_rcg *rcg = to_clk_rcg(hw);
481 	const struct freq_tbl *f;
482 
483 	f = qcom_find_freq(rcg->freq_tbl, rate);
484 	if (!f)
485 		return -EINVAL;
486 
487 	return __clk_rcg_set_rate(rcg, f);
488 }
489 
490 static int clk_rcg_bypass_set_rate(struct clk_hw *hw, unsigned long rate,
491 				unsigned long parent_rate)
492 {
493 	struct clk_rcg *rcg = to_clk_rcg(hw);
494 
495 	return __clk_rcg_set_rate(rcg, rcg->freq_tbl);
496 }
497 
498 /*
499  * This type of clock has a glitch-free mux that switches between the output of
500  * the M/N counter and an always on clock source (XO). When clk_set_rate() is
501  * called we need to make sure that we don't switch to the M/N counter if it
502  * isn't clocking because the mux will get stuck and the clock will stop
503  * outputting a clock. This can happen if the framework isn't aware that this
504  * clock is on and so clk_set_rate() doesn't turn on the new parent. To fix
505  * this we switch the mux in the enable/disable ops and reprogram the M/N
506  * counter in the set_rate op. We also make sure to switch away from the M/N
507  * counter in set_rate if software thinks the clock is off.
508  */
509 static int clk_rcg_lcc_set_rate(struct clk_hw *hw, unsigned long rate,
510 				unsigned long parent_rate)
511 {
512 	struct clk_rcg *rcg = to_clk_rcg(hw);
513 	const struct freq_tbl *f;
514 	int ret;
515 	u32 gfm = BIT(10);
516 
517 	f = qcom_find_freq(rcg->freq_tbl, rate);
518 	if (!f)
519 		return -EINVAL;
520 
521 	/* Switch to XO to avoid glitches */
522 	regmap_update_bits(rcg->clkr.regmap, rcg->ns_reg, gfm, 0);
523 	ret = __clk_rcg_set_rate(rcg, f);
524 	/* Switch back to M/N if it's clocking */
525 	if (__clk_is_enabled(hw->clk))
526 		regmap_update_bits(rcg->clkr.regmap, rcg->ns_reg, gfm, gfm);
527 
528 	return ret;
529 }
530 
531 static int clk_rcg_lcc_enable(struct clk_hw *hw)
532 {
533 	struct clk_rcg *rcg = to_clk_rcg(hw);
534 	u32 gfm = BIT(10);
535 
536 	/* Use M/N */
537 	return regmap_update_bits(rcg->clkr.regmap, rcg->ns_reg, gfm, gfm);
538 }
539 
540 static void clk_rcg_lcc_disable(struct clk_hw *hw)
541 {
542 	struct clk_rcg *rcg = to_clk_rcg(hw);
543 	u32 gfm = BIT(10);
544 
545 	/* Use XO */
546 	regmap_update_bits(rcg->clkr.regmap, rcg->ns_reg, gfm, 0);
547 }
548 
549 static int __clk_dyn_rcg_set_rate(struct clk_hw *hw, unsigned long rate)
550 {
551 	struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
552 	const struct freq_tbl *f;
553 
554 	f = qcom_find_freq(rcg->freq_tbl, rate);
555 	if (!f)
556 		return -EINVAL;
557 
558 	configure_bank(rcg, f);
559 
560 	return 0;
561 }
562 
563 static int clk_dyn_rcg_set_rate(struct clk_hw *hw, unsigned long rate,
564 			    unsigned long parent_rate)
565 {
566 	return __clk_dyn_rcg_set_rate(hw, rate);
567 }
568 
569 static int clk_dyn_rcg_set_rate_and_parent(struct clk_hw *hw,
570 		unsigned long rate, unsigned long parent_rate, u8 index)
571 {
572 	return __clk_dyn_rcg_set_rate(hw, rate);
573 }
574 
575 const struct clk_ops clk_rcg_ops = {
576 	.enable = clk_enable_regmap,
577 	.disable = clk_disable_regmap,
578 	.get_parent = clk_rcg_get_parent,
579 	.set_parent = clk_rcg_set_parent,
580 	.recalc_rate = clk_rcg_recalc_rate,
581 	.determine_rate = clk_rcg_determine_rate,
582 	.set_rate = clk_rcg_set_rate,
583 };
584 EXPORT_SYMBOL_GPL(clk_rcg_ops);
585 
586 const struct clk_ops clk_rcg_bypass_ops = {
587 	.enable = clk_enable_regmap,
588 	.disable = clk_disable_regmap,
589 	.get_parent = clk_rcg_get_parent,
590 	.set_parent = clk_rcg_set_parent,
591 	.recalc_rate = clk_rcg_recalc_rate,
592 	.determine_rate = clk_rcg_bypass_determine_rate,
593 	.set_rate = clk_rcg_bypass_set_rate,
594 };
595 EXPORT_SYMBOL_GPL(clk_rcg_bypass_ops);
596 
597 const struct clk_ops clk_rcg_lcc_ops = {
598 	.enable = clk_rcg_lcc_enable,
599 	.disable = clk_rcg_lcc_disable,
600 	.get_parent = clk_rcg_get_parent,
601 	.set_parent = clk_rcg_set_parent,
602 	.recalc_rate = clk_rcg_recalc_rate,
603 	.determine_rate = clk_rcg_determine_rate,
604 	.set_rate = clk_rcg_lcc_set_rate,
605 };
606 EXPORT_SYMBOL_GPL(clk_rcg_lcc_ops);
607 
608 const struct clk_ops clk_dyn_rcg_ops = {
609 	.enable = clk_enable_regmap,
610 	.is_enabled = clk_is_enabled_regmap,
611 	.disable = clk_disable_regmap,
612 	.get_parent = clk_dyn_rcg_get_parent,
613 	.set_parent = clk_dyn_rcg_set_parent,
614 	.recalc_rate = clk_dyn_rcg_recalc_rate,
615 	.determine_rate = clk_dyn_rcg_determine_rate,
616 	.set_rate = clk_dyn_rcg_set_rate,
617 	.set_rate_and_parent = clk_dyn_rcg_set_rate_and_parent,
618 };
619 EXPORT_SYMBOL_GPL(clk_dyn_rcg_ops);
620