xref: /openbmc/linux/drivers/clk/qcom/clk-rcg.c (revision 2359ccdd)
1 /*
2  * Copyright (c) 2013, The Linux Foundation. All rights reserved.
3  *
4  * This software is licensed under the terms of the GNU General Public
5  * License version 2, as published by the Free Software Foundation, and
6  * may be copied, distributed, and modified under those terms.
7  *
8  * This program is distributed in the hope that it will be useful,
9  * but WITHOUT ANY WARRANTY; without even the implied warranty of
10  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11  * GNU General Public License for more details.
12  */
13 
14 #include <linux/kernel.h>
15 #include <linux/bitops.h>
16 #include <linux/err.h>
17 #include <linux/export.h>
18 #include <linux/clk-provider.h>
19 #include <linux/regmap.h>
20 
21 #include <asm/div64.h>
22 
23 #include "clk-rcg.h"
24 #include "common.h"
25 
26 static u32 ns_to_src(struct src_sel *s, u32 ns)
27 {
28 	ns >>= s->src_sel_shift;
29 	ns &= SRC_SEL_MASK;
30 	return ns;
31 }
32 
33 static u32 src_to_ns(struct src_sel *s, u8 src, u32 ns)
34 {
35 	u32 mask;
36 
37 	mask = SRC_SEL_MASK;
38 	mask <<= s->src_sel_shift;
39 	ns &= ~mask;
40 
41 	ns |= src << s->src_sel_shift;
42 	return ns;
43 }
44 
45 static u8 clk_rcg_get_parent(struct clk_hw *hw)
46 {
47 	struct clk_rcg *rcg = to_clk_rcg(hw);
48 	int num_parents = clk_hw_get_num_parents(hw);
49 	u32 ns;
50 	int i, ret;
51 
52 	ret = regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
53 	if (ret)
54 		goto err;
55 	ns = ns_to_src(&rcg->s, ns);
56 	for (i = 0; i < num_parents; i++)
57 		if (ns == rcg->s.parent_map[i].cfg)
58 			return i;
59 
60 err:
61 	pr_debug("%s: Clock %s has invalid parent, using default.\n",
62 		 __func__, clk_hw_get_name(hw));
63 	return 0;
64 }
65 
66 static int reg_to_bank(struct clk_dyn_rcg *rcg, u32 bank)
67 {
68 	bank &= BIT(rcg->mux_sel_bit);
69 	return !!bank;
70 }
71 
72 static u8 clk_dyn_rcg_get_parent(struct clk_hw *hw)
73 {
74 	struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
75 	int num_parents = clk_hw_get_num_parents(hw);
76 	u32 ns, reg;
77 	int bank;
78 	int i, ret;
79 	struct src_sel *s;
80 
81 	ret = regmap_read(rcg->clkr.regmap, rcg->bank_reg, &reg);
82 	if (ret)
83 		goto err;
84 	bank = reg_to_bank(rcg, reg);
85 	s = &rcg->s[bank];
86 
87 	ret = regmap_read(rcg->clkr.regmap, rcg->ns_reg[bank], &ns);
88 	if (ret)
89 		goto err;
90 	ns = ns_to_src(s, ns);
91 
92 	for (i = 0; i < num_parents; i++)
93 		if (ns == s->parent_map[i].cfg)
94 			return i;
95 
96 err:
97 	pr_debug("%s: Clock %s has invalid parent, using default.\n",
98 		 __func__, clk_hw_get_name(hw));
99 	return 0;
100 }
101 
102 static int clk_rcg_set_parent(struct clk_hw *hw, u8 index)
103 {
104 	struct clk_rcg *rcg = to_clk_rcg(hw);
105 	u32 ns;
106 
107 	regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
108 	ns = src_to_ns(&rcg->s, rcg->s.parent_map[index].cfg, ns);
109 	regmap_write(rcg->clkr.regmap, rcg->ns_reg, ns);
110 
111 	return 0;
112 }
113 
114 static u32 md_to_m(struct mn *mn, u32 md)
115 {
116 	md >>= mn->m_val_shift;
117 	md &= BIT(mn->width) - 1;
118 	return md;
119 }
120 
121 static u32 ns_to_pre_div(struct pre_div *p, u32 ns)
122 {
123 	ns >>= p->pre_div_shift;
124 	ns &= BIT(p->pre_div_width) - 1;
125 	return ns;
126 }
127 
128 static u32 pre_div_to_ns(struct pre_div *p, u8 pre_div, u32 ns)
129 {
130 	u32 mask;
131 
132 	mask = BIT(p->pre_div_width) - 1;
133 	mask <<= p->pre_div_shift;
134 	ns &= ~mask;
135 
136 	ns |= pre_div << p->pre_div_shift;
137 	return ns;
138 }
139 
140 static u32 mn_to_md(struct mn *mn, u32 m, u32 n, u32 md)
141 {
142 	u32 mask, mask_w;
143 
144 	mask_w = BIT(mn->width) - 1;
145 	mask = (mask_w << mn->m_val_shift) | mask_w;
146 	md &= ~mask;
147 
148 	if (n) {
149 		m <<= mn->m_val_shift;
150 		md |= m;
151 		md |= ~n & mask_w;
152 	}
153 
154 	return md;
155 }
156 
157 static u32 ns_m_to_n(struct mn *mn, u32 ns, u32 m)
158 {
159 	ns = ~ns >> mn->n_val_shift;
160 	ns &= BIT(mn->width) - 1;
161 	return ns + m;
162 }
163 
164 static u32 reg_to_mnctr_mode(struct mn *mn, u32 val)
165 {
166 	val >>= mn->mnctr_mode_shift;
167 	val &= MNCTR_MODE_MASK;
168 	return val;
169 }
170 
171 static u32 mn_to_ns(struct mn *mn, u32 m, u32 n, u32 ns)
172 {
173 	u32 mask;
174 
175 	mask = BIT(mn->width) - 1;
176 	mask <<= mn->n_val_shift;
177 	ns &= ~mask;
178 
179 	if (n) {
180 		n = n - m;
181 		n = ~n;
182 		n &= BIT(mn->width) - 1;
183 		n <<= mn->n_val_shift;
184 		ns |= n;
185 	}
186 
187 	return ns;
188 }
189 
190 static u32 mn_to_reg(struct mn *mn, u32 m, u32 n, u32 val)
191 {
192 	u32 mask;
193 
194 	mask = MNCTR_MODE_MASK << mn->mnctr_mode_shift;
195 	mask |= BIT(mn->mnctr_en_bit);
196 	val &= ~mask;
197 
198 	if (n) {
199 		val |= BIT(mn->mnctr_en_bit);
200 		val |= MNCTR_MODE_DUAL << mn->mnctr_mode_shift;
201 	}
202 
203 	return val;
204 }
205 
206 static int configure_bank(struct clk_dyn_rcg *rcg, const struct freq_tbl *f)
207 {
208 	u32 ns, md, reg;
209 	int bank, new_bank, ret, index;
210 	struct mn *mn;
211 	struct pre_div *p;
212 	struct src_sel *s;
213 	bool enabled;
214 	u32 md_reg, ns_reg;
215 	bool banked_mn = !!rcg->mn[1].width;
216 	bool banked_p = !!rcg->p[1].pre_div_width;
217 	struct clk_hw *hw = &rcg->clkr.hw;
218 
219 	enabled = __clk_is_enabled(hw->clk);
220 
221 	ret = regmap_read(rcg->clkr.regmap, rcg->bank_reg, &reg);
222 	if (ret)
223 		return ret;
224 	bank = reg_to_bank(rcg, reg);
225 	new_bank = enabled ? !bank : bank;
226 
227 	ns_reg = rcg->ns_reg[new_bank];
228 	ret = regmap_read(rcg->clkr.regmap, ns_reg, &ns);
229 	if (ret)
230 		return ret;
231 
232 	if (banked_mn) {
233 		mn = &rcg->mn[new_bank];
234 		md_reg = rcg->md_reg[new_bank];
235 
236 		ns |= BIT(mn->mnctr_reset_bit);
237 		ret = regmap_write(rcg->clkr.regmap, ns_reg, ns);
238 		if (ret)
239 			return ret;
240 
241 		ret = regmap_read(rcg->clkr.regmap, md_reg, &md);
242 		if (ret)
243 			return ret;
244 		md = mn_to_md(mn, f->m, f->n, md);
245 		ret = regmap_write(rcg->clkr.regmap, md_reg, md);
246 		if (ret)
247 			return ret;
248 		ns = mn_to_ns(mn, f->m, f->n, ns);
249 		ret = regmap_write(rcg->clkr.regmap, ns_reg, ns);
250 		if (ret)
251 			return ret;
252 
253 		/* Two NS registers means mode control is in NS register */
254 		if (rcg->ns_reg[0] != rcg->ns_reg[1]) {
255 			ns = mn_to_reg(mn, f->m, f->n, ns);
256 			ret = regmap_write(rcg->clkr.regmap, ns_reg, ns);
257 			if (ret)
258 				return ret;
259 		} else {
260 			reg = mn_to_reg(mn, f->m, f->n, reg);
261 			ret = regmap_write(rcg->clkr.regmap, rcg->bank_reg,
262 					   reg);
263 			if (ret)
264 				return ret;
265 		}
266 
267 		ns &= ~BIT(mn->mnctr_reset_bit);
268 		ret = regmap_write(rcg->clkr.regmap, ns_reg, ns);
269 		if (ret)
270 			return ret;
271 	}
272 
273 	if (banked_p) {
274 		p = &rcg->p[new_bank];
275 		ns = pre_div_to_ns(p, f->pre_div - 1, ns);
276 	}
277 
278 	s = &rcg->s[new_bank];
279 	index = qcom_find_src_index(hw, s->parent_map, f->src);
280 	if (index < 0)
281 		return index;
282 	ns = src_to_ns(s, s->parent_map[index].cfg, ns);
283 	ret = regmap_write(rcg->clkr.regmap, ns_reg, ns);
284 	if (ret)
285 		return ret;
286 
287 	if (enabled) {
288 		ret = regmap_read(rcg->clkr.regmap, rcg->bank_reg, &reg);
289 		if (ret)
290 			return ret;
291 		reg ^= BIT(rcg->mux_sel_bit);
292 		ret = regmap_write(rcg->clkr.regmap, rcg->bank_reg, reg);
293 		if (ret)
294 			return ret;
295 	}
296 	return 0;
297 }
298 
299 static int clk_dyn_rcg_set_parent(struct clk_hw *hw, u8 index)
300 {
301 	struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
302 	u32 ns, md, reg;
303 	int bank;
304 	struct freq_tbl f = { 0 };
305 	bool banked_mn = !!rcg->mn[1].width;
306 	bool banked_p = !!rcg->p[1].pre_div_width;
307 
308 	regmap_read(rcg->clkr.regmap, rcg->bank_reg, &reg);
309 	bank = reg_to_bank(rcg, reg);
310 
311 	regmap_read(rcg->clkr.regmap, rcg->ns_reg[bank], &ns);
312 
313 	if (banked_mn) {
314 		regmap_read(rcg->clkr.regmap, rcg->md_reg[bank], &md);
315 		f.m = md_to_m(&rcg->mn[bank], md);
316 		f.n = ns_m_to_n(&rcg->mn[bank], ns, f.m);
317 	}
318 
319 	if (banked_p)
320 		f.pre_div = ns_to_pre_div(&rcg->p[bank], ns) + 1;
321 
322 	f.src = qcom_find_src_index(hw, rcg->s[bank].parent_map, index);
323 	return configure_bank(rcg, &f);
324 }
325 
326 /*
327  * Calculate m/n:d rate
328  *
329  *          parent_rate     m
330  *   rate = ----------- x  ---
331  *            pre_div       n
332  */
333 static unsigned long
334 calc_rate(unsigned long rate, u32 m, u32 n, u32 mode, u32 pre_div)
335 {
336 	if (pre_div)
337 		rate /= pre_div + 1;
338 
339 	if (mode) {
340 		u64 tmp = rate;
341 		tmp *= m;
342 		do_div(tmp, n);
343 		rate = tmp;
344 	}
345 
346 	return rate;
347 }
348 
349 static unsigned long
350 clk_rcg_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
351 {
352 	struct clk_rcg *rcg = to_clk_rcg(hw);
353 	u32 pre_div, m = 0, n = 0, ns, md, mode = 0;
354 	struct mn *mn = &rcg->mn;
355 
356 	regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
357 	pre_div = ns_to_pre_div(&rcg->p, ns);
358 
359 	if (rcg->mn.width) {
360 		regmap_read(rcg->clkr.regmap, rcg->md_reg, &md);
361 		m = md_to_m(mn, md);
362 		n = ns_m_to_n(mn, ns, m);
363 		/* MN counter mode is in hw.enable_reg sometimes */
364 		if (rcg->clkr.enable_reg != rcg->ns_reg)
365 			regmap_read(rcg->clkr.regmap, rcg->clkr.enable_reg, &mode);
366 		else
367 			mode = ns;
368 		mode = reg_to_mnctr_mode(mn, mode);
369 	}
370 
371 	return calc_rate(parent_rate, m, n, mode, pre_div);
372 }
373 
374 static unsigned long
375 clk_dyn_rcg_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
376 {
377 	struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
378 	u32 m, n, pre_div, ns, md, mode, reg;
379 	int bank;
380 	struct mn *mn;
381 	bool banked_p = !!rcg->p[1].pre_div_width;
382 	bool banked_mn = !!rcg->mn[1].width;
383 
384 	regmap_read(rcg->clkr.regmap, rcg->bank_reg, &reg);
385 	bank = reg_to_bank(rcg, reg);
386 
387 	regmap_read(rcg->clkr.regmap, rcg->ns_reg[bank], &ns);
388 	m = n = pre_div = mode = 0;
389 
390 	if (banked_mn) {
391 		mn = &rcg->mn[bank];
392 		regmap_read(rcg->clkr.regmap, rcg->md_reg[bank], &md);
393 		m = md_to_m(mn, md);
394 		n = ns_m_to_n(mn, ns, m);
395 		/* Two NS registers means mode control is in NS register */
396 		if (rcg->ns_reg[0] != rcg->ns_reg[1])
397 			reg = ns;
398 		mode = reg_to_mnctr_mode(mn, reg);
399 	}
400 
401 	if (banked_p)
402 		pre_div = ns_to_pre_div(&rcg->p[bank], ns);
403 
404 	return calc_rate(parent_rate, m, n, mode, pre_div);
405 }
406 
407 static int _freq_tbl_determine_rate(struct clk_hw *hw, const struct freq_tbl *f,
408 		struct clk_rate_request *req,
409 		const struct parent_map *parent_map)
410 {
411 	unsigned long clk_flags, rate = req->rate;
412 	struct clk_hw *p;
413 	int index;
414 
415 	f = qcom_find_freq(f, rate);
416 	if (!f)
417 		return -EINVAL;
418 
419 	index = qcom_find_src_index(hw, parent_map, f->src);
420 	if (index < 0)
421 		return index;
422 
423 	clk_flags = clk_hw_get_flags(hw);
424 	p = clk_hw_get_parent_by_index(hw, index);
425 	if (clk_flags & CLK_SET_RATE_PARENT) {
426 		rate = rate * f->pre_div;
427 		if (f->n) {
428 			u64 tmp = rate;
429 			tmp = tmp * f->n;
430 			do_div(tmp, f->m);
431 			rate = tmp;
432 		}
433 	} else {
434 		rate =  clk_hw_get_rate(p);
435 	}
436 	req->best_parent_hw = p;
437 	req->best_parent_rate = rate;
438 	req->rate = f->freq;
439 
440 	return 0;
441 }
442 
443 static int clk_rcg_determine_rate(struct clk_hw *hw,
444 				  struct clk_rate_request *req)
445 {
446 	struct clk_rcg *rcg = to_clk_rcg(hw);
447 
448 	return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req,
449 					rcg->s.parent_map);
450 }
451 
452 static int clk_dyn_rcg_determine_rate(struct clk_hw *hw,
453 				      struct clk_rate_request *req)
454 {
455 	struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
456 	u32 reg;
457 	int bank;
458 	struct src_sel *s;
459 
460 	regmap_read(rcg->clkr.regmap, rcg->bank_reg, &reg);
461 	bank = reg_to_bank(rcg, reg);
462 	s = &rcg->s[bank];
463 
464 	return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req, s->parent_map);
465 }
466 
467 static int clk_rcg_bypass_determine_rate(struct clk_hw *hw,
468 					 struct clk_rate_request *req)
469 {
470 	struct clk_rcg *rcg = to_clk_rcg(hw);
471 	const struct freq_tbl *f = rcg->freq_tbl;
472 	struct clk_hw *p;
473 	int index = qcom_find_src_index(hw, rcg->s.parent_map, f->src);
474 
475 	req->best_parent_hw = p = clk_hw_get_parent_by_index(hw, index);
476 	req->best_parent_rate = clk_hw_round_rate(p, req->rate);
477 	req->rate = req->best_parent_rate;
478 
479 	return 0;
480 }
481 
482 static int __clk_rcg_set_rate(struct clk_rcg *rcg, const struct freq_tbl *f)
483 {
484 	u32 ns, md, ctl;
485 	struct mn *mn = &rcg->mn;
486 	u32 mask = 0;
487 	unsigned int reset_reg;
488 
489 	if (rcg->mn.reset_in_cc)
490 		reset_reg = rcg->clkr.enable_reg;
491 	else
492 		reset_reg = rcg->ns_reg;
493 
494 	if (rcg->mn.width) {
495 		mask = BIT(mn->mnctr_reset_bit);
496 		regmap_update_bits(rcg->clkr.regmap, reset_reg, mask, mask);
497 
498 		regmap_read(rcg->clkr.regmap, rcg->md_reg, &md);
499 		md = mn_to_md(mn, f->m, f->n, md);
500 		regmap_write(rcg->clkr.regmap, rcg->md_reg, md);
501 
502 		regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
503 		/* MN counter mode is in hw.enable_reg sometimes */
504 		if (rcg->clkr.enable_reg != rcg->ns_reg) {
505 			regmap_read(rcg->clkr.regmap, rcg->clkr.enable_reg, &ctl);
506 			ctl = mn_to_reg(mn, f->m, f->n, ctl);
507 			regmap_write(rcg->clkr.regmap, rcg->clkr.enable_reg, ctl);
508 		} else {
509 			ns = mn_to_reg(mn, f->m, f->n, ns);
510 		}
511 		ns = mn_to_ns(mn, f->m, f->n, ns);
512 	} else {
513 		regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
514 	}
515 
516 	ns = pre_div_to_ns(&rcg->p, f->pre_div - 1, ns);
517 	regmap_write(rcg->clkr.regmap, rcg->ns_reg, ns);
518 
519 	regmap_update_bits(rcg->clkr.regmap, reset_reg, mask, 0);
520 
521 	return 0;
522 }
523 
524 static int clk_rcg_set_rate(struct clk_hw *hw, unsigned long rate,
525 			    unsigned long parent_rate)
526 {
527 	struct clk_rcg *rcg = to_clk_rcg(hw);
528 	const struct freq_tbl *f;
529 
530 	f = qcom_find_freq(rcg->freq_tbl, rate);
531 	if (!f)
532 		return -EINVAL;
533 
534 	return __clk_rcg_set_rate(rcg, f);
535 }
536 
537 static int clk_rcg_bypass_set_rate(struct clk_hw *hw, unsigned long rate,
538 				unsigned long parent_rate)
539 {
540 	struct clk_rcg *rcg = to_clk_rcg(hw);
541 
542 	return __clk_rcg_set_rate(rcg, rcg->freq_tbl);
543 }
544 
545 static int clk_rcg_bypass2_determine_rate(struct clk_hw *hw,
546 				struct clk_rate_request *req)
547 {
548 	struct clk_hw *p;
549 
550 	p = req->best_parent_hw;
551 	req->best_parent_rate = clk_hw_round_rate(p, req->rate);
552 	req->rate = req->best_parent_rate;
553 
554 	return 0;
555 }
556 
557 static int clk_rcg_bypass2_set_rate(struct clk_hw *hw, unsigned long rate,
558 				unsigned long parent_rate)
559 {
560 	struct clk_rcg *rcg = to_clk_rcg(hw);
561 	struct freq_tbl f = { 0 };
562 	u32 ns, src;
563 	int i, ret, num_parents = clk_hw_get_num_parents(hw);
564 
565 	ret = regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
566 	if (ret)
567 		return ret;
568 
569 	src = ns_to_src(&rcg->s, ns);
570 	f.pre_div = ns_to_pre_div(&rcg->p, ns) + 1;
571 
572 	for (i = 0; i < num_parents; i++) {
573 		if (src == rcg->s.parent_map[i].cfg) {
574 			f.src = rcg->s.parent_map[i].src;
575 			return __clk_rcg_set_rate(rcg, &f);
576 		}
577 	}
578 
579 	return -EINVAL;
580 }
581 
582 static int clk_rcg_bypass2_set_rate_and_parent(struct clk_hw *hw,
583 		unsigned long rate, unsigned long parent_rate, u8 index)
584 {
585 	/* Read the hardware to determine parent during set_rate */
586 	return clk_rcg_bypass2_set_rate(hw, rate, parent_rate);
587 }
588 
589 struct frac_entry {
590 	int num;
591 	int den;
592 };
593 
594 static const struct frac_entry pixel_table[] = {
595 	{ 1, 2 },
596 	{ 1, 3 },
597 	{ 3, 16 },
598 	{ }
599 };
600 
601 static int clk_rcg_pixel_determine_rate(struct clk_hw *hw,
602 		struct clk_rate_request *req)
603 {
604 	int delta = 100000;
605 	const struct frac_entry *frac = pixel_table;
606 	unsigned long request, src_rate;
607 
608 	for (; frac->num; frac++) {
609 		request = (req->rate * frac->den) / frac->num;
610 
611 		src_rate = clk_hw_round_rate(req->best_parent_hw, request);
612 
613 		if ((src_rate < (request - delta)) ||
614 			(src_rate > (request + delta)))
615 			continue;
616 
617 		req->best_parent_rate = src_rate;
618 		req->rate = (src_rate * frac->num) / frac->den;
619 		return 0;
620 	}
621 
622 	return -EINVAL;
623 }
624 
625 static int clk_rcg_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
626 				unsigned long parent_rate)
627 {
628 	struct clk_rcg *rcg = to_clk_rcg(hw);
629 	int delta = 100000;
630 	const struct frac_entry *frac = pixel_table;
631 	unsigned long request;
632 	struct freq_tbl f = { 0 };
633 	u32 ns, src;
634 	int i, ret, num_parents = clk_hw_get_num_parents(hw);
635 
636 	ret = regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
637 	if (ret)
638 		return ret;
639 
640 	src = ns_to_src(&rcg->s, ns);
641 
642 	for (i = 0; i < num_parents; i++) {
643 		if (src == rcg->s.parent_map[i].cfg) {
644 			f.src = rcg->s.parent_map[i].src;
645 			break;
646 		}
647 	}
648 
649 	/* bypass the pre divider */
650 	f.pre_div = 1;
651 
652 	/* let us find appropriate m/n values for this */
653 	for (; frac->num; frac++) {
654 		request = (rate * frac->den) / frac->num;
655 
656 		if ((parent_rate < (request - delta)) ||
657 			(parent_rate > (request + delta)))
658 			continue;
659 
660 		f.m = frac->num;
661 		f.n = frac->den;
662 
663 		return __clk_rcg_set_rate(rcg, &f);
664 	}
665 
666 	return -EINVAL;
667 }
668 
669 static int clk_rcg_pixel_set_rate_and_parent(struct clk_hw *hw,
670 		unsigned long rate, unsigned long parent_rate, u8 index)
671 {
672 	return clk_rcg_pixel_set_rate(hw, rate, parent_rate);
673 }
674 
675 static int clk_rcg_esc_determine_rate(struct clk_hw *hw,
676 		struct clk_rate_request *req)
677 {
678 	struct clk_rcg *rcg = to_clk_rcg(hw);
679 	int pre_div_max = BIT(rcg->p.pre_div_width);
680 	int div;
681 	unsigned long src_rate;
682 
683 	if (req->rate == 0)
684 		return -EINVAL;
685 
686 	src_rate = clk_hw_get_rate(req->best_parent_hw);
687 
688 	div = src_rate / req->rate;
689 
690 	if (div >= 1 && div <= pre_div_max) {
691 		req->best_parent_rate = src_rate;
692 		req->rate = src_rate / div;
693 		return 0;
694 	}
695 
696 	return -EINVAL;
697 }
698 
699 static int clk_rcg_esc_set_rate(struct clk_hw *hw, unsigned long rate,
700 				unsigned long parent_rate)
701 {
702 	struct clk_rcg *rcg = to_clk_rcg(hw);
703 	struct freq_tbl f = { 0 };
704 	int pre_div_max = BIT(rcg->p.pre_div_width);
705 	int div;
706 	u32 ns;
707 	int i, ret, num_parents = clk_hw_get_num_parents(hw);
708 
709 	if (rate == 0)
710 		return -EINVAL;
711 
712 	ret = regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
713 	if (ret)
714 		return ret;
715 
716 	ns = ns_to_src(&rcg->s, ns);
717 
718 	for (i = 0; i < num_parents; i++) {
719 		if (ns == rcg->s.parent_map[i].cfg) {
720 			f.src = rcg->s.parent_map[i].src;
721 			break;
722 		}
723 	}
724 
725 	div = parent_rate / rate;
726 
727 	if (div >= 1 && div <= pre_div_max) {
728 		f.pre_div = div;
729 		return __clk_rcg_set_rate(rcg, &f);
730 	}
731 
732 	return -EINVAL;
733 }
734 
735 static int clk_rcg_esc_set_rate_and_parent(struct clk_hw *hw,
736 		unsigned long rate, unsigned long parent_rate, u8 index)
737 {
738 	return clk_rcg_esc_set_rate(hw, rate, parent_rate);
739 }
740 
741 /*
742  * This type of clock has a glitch-free mux that switches between the output of
743  * the M/N counter and an always on clock source (XO). When clk_set_rate() is
744  * called we need to make sure that we don't switch to the M/N counter if it
745  * isn't clocking because the mux will get stuck and the clock will stop
746  * outputting a clock. This can happen if the framework isn't aware that this
747  * clock is on and so clk_set_rate() doesn't turn on the new parent. To fix
748  * this we switch the mux in the enable/disable ops and reprogram the M/N
749  * counter in the set_rate op. We also make sure to switch away from the M/N
750  * counter in set_rate if software thinks the clock is off.
751  */
752 static int clk_rcg_lcc_set_rate(struct clk_hw *hw, unsigned long rate,
753 				unsigned long parent_rate)
754 {
755 	struct clk_rcg *rcg = to_clk_rcg(hw);
756 	const struct freq_tbl *f;
757 	int ret;
758 	u32 gfm = BIT(10);
759 
760 	f = qcom_find_freq(rcg->freq_tbl, rate);
761 	if (!f)
762 		return -EINVAL;
763 
764 	/* Switch to XO to avoid glitches */
765 	regmap_update_bits(rcg->clkr.regmap, rcg->ns_reg, gfm, 0);
766 	ret = __clk_rcg_set_rate(rcg, f);
767 	/* Switch back to M/N if it's clocking */
768 	if (__clk_is_enabled(hw->clk))
769 		regmap_update_bits(rcg->clkr.regmap, rcg->ns_reg, gfm, gfm);
770 
771 	return ret;
772 }
773 
774 static int clk_rcg_lcc_enable(struct clk_hw *hw)
775 {
776 	struct clk_rcg *rcg = to_clk_rcg(hw);
777 	u32 gfm = BIT(10);
778 
779 	/* Use M/N */
780 	return regmap_update_bits(rcg->clkr.regmap, rcg->ns_reg, gfm, gfm);
781 }
782 
783 static void clk_rcg_lcc_disable(struct clk_hw *hw)
784 {
785 	struct clk_rcg *rcg = to_clk_rcg(hw);
786 	u32 gfm = BIT(10);
787 
788 	/* Use XO */
789 	regmap_update_bits(rcg->clkr.regmap, rcg->ns_reg, gfm, 0);
790 }
791 
792 static int __clk_dyn_rcg_set_rate(struct clk_hw *hw, unsigned long rate)
793 {
794 	struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
795 	const struct freq_tbl *f;
796 
797 	f = qcom_find_freq(rcg->freq_tbl, rate);
798 	if (!f)
799 		return -EINVAL;
800 
801 	return configure_bank(rcg, f);
802 }
803 
804 static int clk_dyn_rcg_set_rate(struct clk_hw *hw, unsigned long rate,
805 			    unsigned long parent_rate)
806 {
807 	return __clk_dyn_rcg_set_rate(hw, rate);
808 }
809 
810 static int clk_dyn_rcg_set_rate_and_parent(struct clk_hw *hw,
811 		unsigned long rate, unsigned long parent_rate, u8 index)
812 {
813 	return __clk_dyn_rcg_set_rate(hw, rate);
814 }
815 
816 const struct clk_ops clk_rcg_ops = {
817 	.enable = clk_enable_regmap,
818 	.disable = clk_disable_regmap,
819 	.get_parent = clk_rcg_get_parent,
820 	.set_parent = clk_rcg_set_parent,
821 	.recalc_rate = clk_rcg_recalc_rate,
822 	.determine_rate = clk_rcg_determine_rate,
823 	.set_rate = clk_rcg_set_rate,
824 };
825 EXPORT_SYMBOL_GPL(clk_rcg_ops);
826 
827 const struct clk_ops clk_rcg_bypass_ops = {
828 	.enable = clk_enable_regmap,
829 	.disable = clk_disable_regmap,
830 	.get_parent = clk_rcg_get_parent,
831 	.set_parent = clk_rcg_set_parent,
832 	.recalc_rate = clk_rcg_recalc_rate,
833 	.determine_rate = clk_rcg_bypass_determine_rate,
834 	.set_rate = clk_rcg_bypass_set_rate,
835 };
836 EXPORT_SYMBOL_GPL(clk_rcg_bypass_ops);
837 
838 const struct clk_ops clk_rcg_bypass2_ops = {
839 	.enable = clk_enable_regmap,
840 	.disable = clk_disable_regmap,
841 	.get_parent = clk_rcg_get_parent,
842 	.set_parent = clk_rcg_set_parent,
843 	.recalc_rate = clk_rcg_recalc_rate,
844 	.determine_rate = clk_rcg_bypass2_determine_rate,
845 	.set_rate = clk_rcg_bypass2_set_rate,
846 	.set_rate_and_parent = clk_rcg_bypass2_set_rate_and_parent,
847 };
848 EXPORT_SYMBOL_GPL(clk_rcg_bypass2_ops);
849 
850 const struct clk_ops clk_rcg_pixel_ops = {
851 	.enable = clk_enable_regmap,
852 	.disable = clk_disable_regmap,
853 	.get_parent = clk_rcg_get_parent,
854 	.set_parent = clk_rcg_set_parent,
855 	.recalc_rate = clk_rcg_recalc_rate,
856 	.determine_rate = clk_rcg_pixel_determine_rate,
857 	.set_rate = clk_rcg_pixel_set_rate,
858 	.set_rate_and_parent = clk_rcg_pixel_set_rate_and_parent,
859 };
860 EXPORT_SYMBOL_GPL(clk_rcg_pixel_ops);
861 
862 const struct clk_ops clk_rcg_esc_ops = {
863 	.enable = clk_enable_regmap,
864 	.disable = clk_disable_regmap,
865 	.get_parent = clk_rcg_get_parent,
866 	.set_parent = clk_rcg_set_parent,
867 	.recalc_rate = clk_rcg_recalc_rate,
868 	.determine_rate = clk_rcg_esc_determine_rate,
869 	.set_rate = clk_rcg_esc_set_rate,
870 	.set_rate_and_parent = clk_rcg_esc_set_rate_and_parent,
871 };
872 EXPORT_SYMBOL_GPL(clk_rcg_esc_ops);
873 
874 const struct clk_ops clk_rcg_lcc_ops = {
875 	.enable = clk_rcg_lcc_enable,
876 	.disable = clk_rcg_lcc_disable,
877 	.get_parent = clk_rcg_get_parent,
878 	.set_parent = clk_rcg_set_parent,
879 	.recalc_rate = clk_rcg_recalc_rate,
880 	.determine_rate = clk_rcg_determine_rate,
881 	.set_rate = clk_rcg_lcc_set_rate,
882 };
883 EXPORT_SYMBOL_GPL(clk_rcg_lcc_ops);
884 
885 const struct clk_ops clk_dyn_rcg_ops = {
886 	.enable = clk_enable_regmap,
887 	.is_enabled = clk_is_enabled_regmap,
888 	.disable = clk_disable_regmap,
889 	.get_parent = clk_dyn_rcg_get_parent,
890 	.set_parent = clk_dyn_rcg_set_parent,
891 	.recalc_rate = clk_dyn_rcg_recalc_rate,
892 	.determine_rate = clk_dyn_rcg_determine_rate,
893 	.set_rate = clk_dyn_rcg_set_rate,
894 	.set_rate_and_parent = clk_dyn_rcg_set_rate_and_parent,
895 };
896 EXPORT_SYMBOL_GPL(clk_dyn_rcg_ops);
897