xref: /openbmc/linux/drivers/clk/qcom/clk-rcg2.c (revision 2359ccdd)
1 /*
2  * Copyright (c) 2013, The Linux Foundation. All rights reserved.
3  *
4  * This software is licensed under the terms of the GNU General Public
5  * License version 2, as published by the Free Software Foundation, and
6  * may be copied, distributed, and modified under those terms.
7  *
8  * This program is distributed in the hope that it will be useful,
9  * but WITHOUT ANY WARRANTY; without even the implied warranty of
10  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11  * GNU General Public License for more details.
12  */
13 
14 #include <linux/kernel.h>
15 #include <linux/bitops.h>
16 #include <linux/err.h>
17 #include <linux/bug.h>
18 #include <linux/export.h>
19 #include <linux/clk-provider.h>
20 #include <linux/delay.h>
21 #include <linux/regmap.h>
22 #include <linux/math64.h>
23 
24 #include <asm/div64.h>
25 
26 #include "clk-rcg.h"
27 #include "common.h"
28 
29 #define CMD_REG			0x0
30 #define CMD_UPDATE		BIT(0)
31 #define CMD_ROOT_EN		BIT(1)
32 #define CMD_DIRTY_CFG		BIT(4)
33 #define CMD_DIRTY_N		BIT(5)
34 #define CMD_DIRTY_M		BIT(6)
35 #define CMD_DIRTY_D		BIT(7)
36 #define CMD_ROOT_OFF		BIT(31)
37 
38 #define CFG_REG			0x4
39 #define CFG_SRC_DIV_SHIFT	0
40 #define CFG_SRC_SEL_SHIFT	8
41 #define CFG_SRC_SEL_MASK	(0x7 << CFG_SRC_SEL_SHIFT)
42 #define CFG_MODE_SHIFT		12
43 #define CFG_MODE_MASK		(0x3 << CFG_MODE_SHIFT)
44 #define CFG_MODE_DUAL_EDGE	(0x2 << CFG_MODE_SHIFT)
45 
46 #define M_REG			0x8
47 #define N_REG			0xc
48 #define D_REG			0x10
49 
50 enum freq_policy {
51 	FLOOR,
52 	CEIL,
53 };
54 
55 static int clk_rcg2_is_enabled(struct clk_hw *hw)
56 {
57 	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
58 	u32 cmd;
59 	int ret;
60 
61 	ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
62 	if (ret)
63 		return ret;
64 
65 	return (cmd & CMD_ROOT_OFF) == 0;
66 }
67 
68 static u8 clk_rcg2_get_parent(struct clk_hw *hw)
69 {
70 	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
71 	int num_parents = clk_hw_get_num_parents(hw);
72 	u32 cfg;
73 	int i, ret;
74 
75 	ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
76 	if (ret)
77 		goto err;
78 
79 	cfg &= CFG_SRC_SEL_MASK;
80 	cfg >>= CFG_SRC_SEL_SHIFT;
81 
82 	for (i = 0; i < num_parents; i++)
83 		if (cfg == rcg->parent_map[i].cfg)
84 			return i;
85 
86 err:
87 	pr_debug("%s: Clock %s has invalid parent, using default.\n",
88 		 __func__, clk_hw_get_name(hw));
89 	return 0;
90 }
91 
92 static int update_config(struct clk_rcg2 *rcg)
93 {
94 	int count, ret;
95 	u32 cmd;
96 	struct clk_hw *hw = &rcg->clkr.hw;
97 	const char *name = clk_hw_get_name(hw);
98 
99 	ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
100 				 CMD_UPDATE, CMD_UPDATE);
101 	if (ret)
102 		return ret;
103 
104 	/* Wait for update to take effect */
105 	for (count = 500; count > 0; count--) {
106 		ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
107 		if (ret)
108 			return ret;
109 		if (!(cmd & CMD_UPDATE))
110 			return 0;
111 		udelay(1);
112 	}
113 
114 	WARN(1, "%s: rcg didn't update its configuration.", name);
115 	return 0;
116 }
117 
118 static int clk_rcg2_set_parent(struct clk_hw *hw, u8 index)
119 {
120 	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
121 	int ret;
122 	u32 cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
123 
124 	ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
125 				 CFG_SRC_SEL_MASK, cfg);
126 	if (ret)
127 		return ret;
128 
129 	return update_config(rcg);
130 }
131 
132 /*
133  * Calculate m/n:d rate
134  *
135  *          parent_rate     m
136  *   rate = ----------- x  ---
137  *            hid_div       n
138  */
139 static unsigned long
140 calc_rate(unsigned long rate, u32 m, u32 n, u32 mode, u32 hid_div)
141 {
142 	if (hid_div) {
143 		rate *= 2;
144 		rate /= hid_div + 1;
145 	}
146 
147 	if (mode) {
148 		u64 tmp = rate;
149 		tmp *= m;
150 		do_div(tmp, n);
151 		rate = tmp;
152 	}
153 
154 	return rate;
155 }
156 
157 static unsigned long
158 clk_rcg2_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
159 {
160 	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
161 	u32 cfg, hid_div, m = 0, n = 0, mode = 0, mask;
162 
163 	regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
164 
165 	if (rcg->mnd_width) {
166 		mask = BIT(rcg->mnd_width) - 1;
167 		regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + M_REG, &m);
168 		m &= mask;
169 		regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + N_REG, &n);
170 		n =  ~n;
171 		n &= mask;
172 		n += m;
173 		mode = cfg & CFG_MODE_MASK;
174 		mode >>= CFG_MODE_SHIFT;
175 	}
176 
177 	mask = BIT(rcg->hid_width) - 1;
178 	hid_div = cfg >> CFG_SRC_DIV_SHIFT;
179 	hid_div &= mask;
180 
181 	return calc_rate(parent_rate, m, n, mode, hid_div);
182 }
183 
184 static int _freq_tbl_determine_rate(struct clk_hw *hw, const struct freq_tbl *f,
185 				    struct clk_rate_request *req,
186 				    enum freq_policy policy)
187 {
188 	unsigned long clk_flags, rate = req->rate;
189 	struct clk_hw *p;
190 	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
191 	int index;
192 
193 	switch (policy) {
194 	case FLOOR:
195 		f = qcom_find_freq_floor(f, rate);
196 		break;
197 	case CEIL:
198 		f = qcom_find_freq(f, rate);
199 		break;
200 	default:
201 		return -EINVAL;
202 	};
203 
204 	if (!f)
205 		return -EINVAL;
206 
207 	index = qcom_find_src_index(hw, rcg->parent_map, f->src);
208 	if (index < 0)
209 		return index;
210 
211 	clk_flags = clk_hw_get_flags(hw);
212 	p = clk_hw_get_parent_by_index(hw, index);
213 	if (clk_flags & CLK_SET_RATE_PARENT) {
214 		if (f->pre_div) {
215 			rate /= 2;
216 			rate *= f->pre_div + 1;
217 		}
218 
219 		if (f->n) {
220 			u64 tmp = rate;
221 			tmp = tmp * f->n;
222 			do_div(tmp, f->m);
223 			rate = tmp;
224 		}
225 	} else {
226 		rate =  clk_hw_get_rate(p);
227 	}
228 	req->best_parent_hw = p;
229 	req->best_parent_rate = rate;
230 	req->rate = f->freq;
231 
232 	return 0;
233 }
234 
235 static int clk_rcg2_determine_rate(struct clk_hw *hw,
236 				   struct clk_rate_request *req)
237 {
238 	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
239 
240 	return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req, CEIL);
241 }
242 
243 static int clk_rcg2_determine_floor_rate(struct clk_hw *hw,
244 					 struct clk_rate_request *req)
245 {
246 	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
247 
248 	return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req, FLOOR);
249 }
250 
251 static int clk_rcg2_configure(struct clk_rcg2 *rcg, const struct freq_tbl *f)
252 {
253 	u32 cfg, mask;
254 	struct clk_hw *hw = &rcg->clkr.hw;
255 	int ret, index = qcom_find_src_index(hw, rcg->parent_map, f->src);
256 
257 	if (index < 0)
258 		return index;
259 
260 	if (rcg->mnd_width && f->n) {
261 		mask = BIT(rcg->mnd_width) - 1;
262 		ret = regmap_update_bits(rcg->clkr.regmap,
263 				rcg->cmd_rcgr + M_REG, mask, f->m);
264 		if (ret)
265 			return ret;
266 
267 		ret = regmap_update_bits(rcg->clkr.regmap,
268 				rcg->cmd_rcgr + N_REG, mask, ~(f->n - f->m));
269 		if (ret)
270 			return ret;
271 
272 		ret = regmap_update_bits(rcg->clkr.regmap,
273 				rcg->cmd_rcgr + D_REG, mask, ~f->n);
274 		if (ret)
275 			return ret;
276 	}
277 
278 	mask = BIT(rcg->hid_width) - 1;
279 	mask |= CFG_SRC_SEL_MASK | CFG_MODE_MASK;
280 	cfg = f->pre_div << CFG_SRC_DIV_SHIFT;
281 	cfg |= rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
282 	if (rcg->mnd_width && f->n && (f->m != f->n))
283 		cfg |= CFG_MODE_DUAL_EDGE;
284 	ret = regmap_update_bits(rcg->clkr.regmap,
285 			rcg->cmd_rcgr + CFG_REG, mask, cfg);
286 	if (ret)
287 		return ret;
288 
289 	return update_config(rcg);
290 }
291 
292 static int __clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate,
293 			       enum freq_policy policy)
294 {
295 	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
296 	const struct freq_tbl *f;
297 
298 	switch (policy) {
299 	case FLOOR:
300 		f = qcom_find_freq_floor(rcg->freq_tbl, rate);
301 		break;
302 	case CEIL:
303 		f = qcom_find_freq(rcg->freq_tbl, rate);
304 		break;
305 	default:
306 		return -EINVAL;
307 	};
308 
309 	if (!f)
310 		return -EINVAL;
311 
312 	return clk_rcg2_configure(rcg, f);
313 }
314 
315 static int clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate,
316 			    unsigned long parent_rate)
317 {
318 	return __clk_rcg2_set_rate(hw, rate, CEIL);
319 }
320 
321 static int clk_rcg2_set_floor_rate(struct clk_hw *hw, unsigned long rate,
322 				   unsigned long parent_rate)
323 {
324 	return __clk_rcg2_set_rate(hw, rate, FLOOR);
325 }
326 
327 static int clk_rcg2_set_rate_and_parent(struct clk_hw *hw,
328 		unsigned long rate, unsigned long parent_rate, u8 index)
329 {
330 	return __clk_rcg2_set_rate(hw, rate, CEIL);
331 }
332 
333 static int clk_rcg2_set_floor_rate_and_parent(struct clk_hw *hw,
334 		unsigned long rate, unsigned long parent_rate, u8 index)
335 {
336 	return __clk_rcg2_set_rate(hw, rate, FLOOR);
337 }
338 
339 const struct clk_ops clk_rcg2_ops = {
340 	.is_enabled = clk_rcg2_is_enabled,
341 	.get_parent = clk_rcg2_get_parent,
342 	.set_parent = clk_rcg2_set_parent,
343 	.recalc_rate = clk_rcg2_recalc_rate,
344 	.determine_rate = clk_rcg2_determine_rate,
345 	.set_rate = clk_rcg2_set_rate,
346 	.set_rate_and_parent = clk_rcg2_set_rate_and_parent,
347 };
348 EXPORT_SYMBOL_GPL(clk_rcg2_ops);
349 
350 const struct clk_ops clk_rcg2_floor_ops = {
351 	.is_enabled = clk_rcg2_is_enabled,
352 	.get_parent = clk_rcg2_get_parent,
353 	.set_parent = clk_rcg2_set_parent,
354 	.recalc_rate = clk_rcg2_recalc_rate,
355 	.determine_rate = clk_rcg2_determine_floor_rate,
356 	.set_rate = clk_rcg2_set_floor_rate,
357 	.set_rate_and_parent = clk_rcg2_set_floor_rate_and_parent,
358 };
359 EXPORT_SYMBOL_GPL(clk_rcg2_floor_ops);
360 
361 struct frac_entry {
362 	int num;
363 	int den;
364 };
365 
366 static const struct frac_entry frac_table_675m[] = {	/* link rate of 270M */
367 	{ 52, 295 },	/* 119 M */
368 	{ 11, 57 },	/* 130.25 M */
369 	{ 63, 307 },	/* 138.50 M */
370 	{ 11, 50 },	/* 148.50 M */
371 	{ 47, 206 },	/* 154 M */
372 	{ 31, 100 },	/* 205.25 M */
373 	{ 107, 269 },	/* 268.50 M */
374 	{ },
375 };
376 
377 static struct frac_entry frac_table_810m[] = { /* Link rate of 162M */
378 	{ 31, 211 },	/* 119 M */
379 	{ 32, 199 },	/* 130.25 M */
380 	{ 63, 307 },	/* 138.50 M */
381 	{ 11, 60 },	/* 148.50 M */
382 	{ 50, 263 },	/* 154 M */
383 	{ 31, 120 },	/* 205.25 M */
384 	{ 119, 359 },	/* 268.50 M */
385 	{ },
386 };
387 
388 static int clk_edp_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
389 			      unsigned long parent_rate)
390 {
391 	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
392 	struct freq_tbl f = *rcg->freq_tbl;
393 	const struct frac_entry *frac;
394 	int delta = 100000;
395 	s64 src_rate = parent_rate;
396 	s64 request;
397 	u32 mask = BIT(rcg->hid_width) - 1;
398 	u32 hid_div;
399 
400 	if (src_rate == 810000000)
401 		frac = frac_table_810m;
402 	else
403 		frac = frac_table_675m;
404 
405 	for (; frac->num; frac++) {
406 		request = rate;
407 		request *= frac->den;
408 		request = div_s64(request, frac->num);
409 		if ((src_rate < (request - delta)) ||
410 		    (src_rate > (request + delta)))
411 			continue;
412 
413 		regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
414 				&hid_div);
415 		f.pre_div = hid_div;
416 		f.pre_div >>= CFG_SRC_DIV_SHIFT;
417 		f.pre_div &= mask;
418 		f.m = frac->num;
419 		f.n = frac->den;
420 
421 		return clk_rcg2_configure(rcg, &f);
422 	}
423 
424 	return -EINVAL;
425 }
426 
427 static int clk_edp_pixel_set_rate_and_parent(struct clk_hw *hw,
428 		unsigned long rate, unsigned long parent_rate, u8 index)
429 {
430 	/* Parent index is set statically in frequency table */
431 	return clk_edp_pixel_set_rate(hw, rate, parent_rate);
432 }
433 
434 static int clk_edp_pixel_determine_rate(struct clk_hw *hw,
435 					struct clk_rate_request *req)
436 {
437 	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
438 	const struct freq_tbl *f = rcg->freq_tbl;
439 	const struct frac_entry *frac;
440 	int delta = 100000;
441 	s64 request;
442 	u32 mask = BIT(rcg->hid_width) - 1;
443 	u32 hid_div;
444 	int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
445 
446 	/* Force the correct parent */
447 	req->best_parent_hw = clk_hw_get_parent_by_index(hw, index);
448 	req->best_parent_rate = clk_hw_get_rate(req->best_parent_hw);
449 
450 	if (req->best_parent_rate == 810000000)
451 		frac = frac_table_810m;
452 	else
453 		frac = frac_table_675m;
454 
455 	for (; frac->num; frac++) {
456 		request = req->rate;
457 		request *= frac->den;
458 		request = div_s64(request, frac->num);
459 		if ((req->best_parent_rate < (request - delta)) ||
460 		    (req->best_parent_rate > (request + delta)))
461 			continue;
462 
463 		regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
464 				&hid_div);
465 		hid_div >>= CFG_SRC_DIV_SHIFT;
466 		hid_div &= mask;
467 
468 		req->rate = calc_rate(req->best_parent_rate,
469 				      frac->num, frac->den,
470 				      !!frac->den, hid_div);
471 		return 0;
472 	}
473 
474 	return -EINVAL;
475 }
476 
477 const struct clk_ops clk_edp_pixel_ops = {
478 	.is_enabled = clk_rcg2_is_enabled,
479 	.get_parent = clk_rcg2_get_parent,
480 	.set_parent = clk_rcg2_set_parent,
481 	.recalc_rate = clk_rcg2_recalc_rate,
482 	.set_rate = clk_edp_pixel_set_rate,
483 	.set_rate_and_parent = clk_edp_pixel_set_rate_and_parent,
484 	.determine_rate = clk_edp_pixel_determine_rate,
485 };
486 EXPORT_SYMBOL_GPL(clk_edp_pixel_ops);
487 
488 static int clk_byte_determine_rate(struct clk_hw *hw,
489 				   struct clk_rate_request *req)
490 {
491 	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
492 	const struct freq_tbl *f = rcg->freq_tbl;
493 	int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
494 	unsigned long parent_rate, div;
495 	u32 mask = BIT(rcg->hid_width) - 1;
496 	struct clk_hw *p;
497 
498 	if (req->rate == 0)
499 		return -EINVAL;
500 
501 	req->best_parent_hw = p = clk_hw_get_parent_by_index(hw, index);
502 	req->best_parent_rate = parent_rate = clk_hw_round_rate(p, req->rate);
503 
504 	div = DIV_ROUND_UP((2 * parent_rate), req->rate) - 1;
505 	div = min_t(u32, div, mask);
506 
507 	req->rate = calc_rate(parent_rate, 0, 0, 0, div);
508 
509 	return 0;
510 }
511 
512 static int clk_byte_set_rate(struct clk_hw *hw, unsigned long rate,
513 			 unsigned long parent_rate)
514 {
515 	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
516 	struct freq_tbl f = *rcg->freq_tbl;
517 	unsigned long div;
518 	u32 mask = BIT(rcg->hid_width) - 1;
519 
520 	div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
521 	div = min_t(u32, div, mask);
522 
523 	f.pre_div = div;
524 
525 	return clk_rcg2_configure(rcg, &f);
526 }
527 
528 static int clk_byte_set_rate_and_parent(struct clk_hw *hw,
529 		unsigned long rate, unsigned long parent_rate, u8 index)
530 {
531 	/* Parent index is set statically in frequency table */
532 	return clk_byte_set_rate(hw, rate, parent_rate);
533 }
534 
535 const struct clk_ops clk_byte_ops = {
536 	.is_enabled = clk_rcg2_is_enabled,
537 	.get_parent = clk_rcg2_get_parent,
538 	.set_parent = clk_rcg2_set_parent,
539 	.recalc_rate = clk_rcg2_recalc_rate,
540 	.set_rate = clk_byte_set_rate,
541 	.set_rate_and_parent = clk_byte_set_rate_and_parent,
542 	.determine_rate = clk_byte_determine_rate,
543 };
544 EXPORT_SYMBOL_GPL(clk_byte_ops);
545 
546 static int clk_byte2_determine_rate(struct clk_hw *hw,
547 				    struct clk_rate_request *req)
548 {
549 	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
550 	unsigned long parent_rate, div;
551 	u32 mask = BIT(rcg->hid_width) - 1;
552 	struct clk_hw *p;
553 	unsigned long rate = req->rate;
554 
555 	if (rate == 0)
556 		return -EINVAL;
557 
558 	p = req->best_parent_hw;
559 	req->best_parent_rate = parent_rate = clk_hw_round_rate(p, rate);
560 
561 	div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
562 	div = min_t(u32, div, mask);
563 
564 	req->rate = calc_rate(parent_rate, 0, 0, 0, div);
565 
566 	return 0;
567 }
568 
569 static int clk_byte2_set_rate(struct clk_hw *hw, unsigned long rate,
570 			 unsigned long parent_rate)
571 {
572 	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
573 	struct freq_tbl f = { 0 };
574 	unsigned long div;
575 	int i, num_parents = clk_hw_get_num_parents(hw);
576 	u32 mask = BIT(rcg->hid_width) - 1;
577 	u32 cfg;
578 
579 	div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
580 	div = min_t(u32, div, mask);
581 
582 	f.pre_div = div;
583 
584 	regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
585 	cfg &= CFG_SRC_SEL_MASK;
586 	cfg >>= CFG_SRC_SEL_SHIFT;
587 
588 	for (i = 0; i < num_parents; i++) {
589 		if (cfg == rcg->parent_map[i].cfg) {
590 			f.src = rcg->parent_map[i].src;
591 			return clk_rcg2_configure(rcg, &f);
592 		}
593 	}
594 
595 	return -EINVAL;
596 }
597 
598 static int clk_byte2_set_rate_and_parent(struct clk_hw *hw,
599 		unsigned long rate, unsigned long parent_rate, u8 index)
600 {
601 	/* Read the hardware to determine parent during set_rate */
602 	return clk_byte2_set_rate(hw, rate, parent_rate);
603 }
604 
605 const struct clk_ops clk_byte2_ops = {
606 	.is_enabled = clk_rcg2_is_enabled,
607 	.get_parent = clk_rcg2_get_parent,
608 	.set_parent = clk_rcg2_set_parent,
609 	.recalc_rate = clk_rcg2_recalc_rate,
610 	.set_rate = clk_byte2_set_rate,
611 	.set_rate_and_parent = clk_byte2_set_rate_and_parent,
612 	.determine_rate = clk_byte2_determine_rate,
613 };
614 EXPORT_SYMBOL_GPL(clk_byte2_ops);
615 
616 static const struct frac_entry frac_table_pixel[] = {
617 	{ 3, 8 },
618 	{ 2, 9 },
619 	{ 4, 9 },
620 	{ 1, 1 },
621 	{ }
622 };
623 
624 static int clk_pixel_determine_rate(struct clk_hw *hw,
625 				    struct clk_rate_request *req)
626 {
627 	unsigned long request, src_rate;
628 	int delta = 100000;
629 	const struct frac_entry *frac = frac_table_pixel;
630 
631 	for (; frac->num; frac++) {
632 		request = (req->rate * frac->den) / frac->num;
633 
634 		src_rate = clk_hw_round_rate(req->best_parent_hw, request);
635 		if ((src_rate < (request - delta)) ||
636 			(src_rate > (request + delta)))
637 			continue;
638 
639 		req->best_parent_rate = src_rate;
640 		req->rate = (src_rate * frac->num) / frac->den;
641 		return 0;
642 	}
643 
644 	return -EINVAL;
645 }
646 
647 static int clk_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
648 		unsigned long parent_rate)
649 {
650 	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
651 	struct freq_tbl f = { 0 };
652 	const struct frac_entry *frac = frac_table_pixel;
653 	unsigned long request;
654 	int delta = 100000;
655 	u32 mask = BIT(rcg->hid_width) - 1;
656 	u32 hid_div, cfg;
657 	int i, num_parents = clk_hw_get_num_parents(hw);
658 
659 	regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
660 	cfg &= CFG_SRC_SEL_MASK;
661 	cfg >>= CFG_SRC_SEL_SHIFT;
662 
663 	for (i = 0; i < num_parents; i++)
664 		if (cfg == rcg->parent_map[i].cfg) {
665 			f.src = rcg->parent_map[i].src;
666 			break;
667 		}
668 
669 	for (; frac->num; frac++) {
670 		request = (rate * frac->den) / frac->num;
671 
672 		if ((parent_rate < (request - delta)) ||
673 			(parent_rate > (request + delta)))
674 			continue;
675 
676 		regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
677 				&hid_div);
678 		f.pre_div = hid_div;
679 		f.pre_div >>= CFG_SRC_DIV_SHIFT;
680 		f.pre_div &= mask;
681 		f.m = frac->num;
682 		f.n = frac->den;
683 
684 		return clk_rcg2_configure(rcg, &f);
685 	}
686 	return -EINVAL;
687 }
688 
689 static int clk_pixel_set_rate_and_parent(struct clk_hw *hw, unsigned long rate,
690 		unsigned long parent_rate, u8 index)
691 {
692 	return clk_pixel_set_rate(hw, rate, parent_rate);
693 }
694 
695 const struct clk_ops clk_pixel_ops = {
696 	.is_enabled = clk_rcg2_is_enabled,
697 	.get_parent = clk_rcg2_get_parent,
698 	.set_parent = clk_rcg2_set_parent,
699 	.recalc_rate = clk_rcg2_recalc_rate,
700 	.set_rate = clk_pixel_set_rate,
701 	.set_rate_and_parent = clk_pixel_set_rate_and_parent,
702 	.determine_rate = clk_pixel_determine_rate,
703 };
704 EXPORT_SYMBOL_GPL(clk_pixel_ops);
705 
706 static int clk_gfx3d_determine_rate(struct clk_hw *hw,
707 				    struct clk_rate_request *req)
708 {
709 	struct clk_rate_request parent_req = { };
710 	struct clk_hw *p2, *p8, *p9, *xo;
711 	unsigned long p9_rate;
712 	int ret;
713 
714 	xo = clk_hw_get_parent_by_index(hw, 0);
715 	if (req->rate == clk_hw_get_rate(xo)) {
716 		req->best_parent_hw = xo;
717 		return 0;
718 	}
719 
720 	p9 = clk_hw_get_parent_by_index(hw, 2);
721 	p2 = clk_hw_get_parent_by_index(hw, 3);
722 	p8 = clk_hw_get_parent_by_index(hw, 4);
723 
724 	/* PLL9 is a fixed rate PLL */
725 	p9_rate = clk_hw_get_rate(p9);
726 
727 	parent_req.rate = req->rate = min(req->rate, p9_rate);
728 	if (req->rate == p9_rate) {
729 		req->rate = req->best_parent_rate = p9_rate;
730 		req->best_parent_hw = p9;
731 		return 0;
732 	}
733 
734 	if (req->best_parent_hw == p9) {
735 		/* Are we going back to a previously used rate? */
736 		if (clk_hw_get_rate(p8) == req->rate)
737 			req->best_parent_hw = p8;
738 		else
739 			req->best_parent_hw = p2;
740 	} else if (req->best_parent_hw == p8) {
741 		req->best_parent_hw = p2;
742 	} else {
743 		req->best_parent_hw = p8;
744 	}
745 
746 	ret = __clk_determine_rate(req->best_parent_hw, &parent_req);
747 	if (ret)
748 		return ret;
749 
750 	req->rate = req->best_parent_rate = parent_req.rate;
751 
752 	return 0;
753 }
754 
755 static int clk_gfx3d_set_rate_and_parent(struct clk_hw *hw, unsigned long rate,
756 		unsigned long parent_rate, u8 index)
757 {
758 	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
759 	u32 cfg;
760 	int ret;
761 
762 	/* Just mux it, we don't use the division or m/n hardware */
763 	cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
764 	ret = regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, cfg);
765 	if (ret)
766 		return ret;
767 
768 	return update_config(rcg);
769 }
770 
771 static int clk_gfx3d_set_rate(struct clk_hw *hw, unsigned long rate,
772 			      unsigned long parent_rate)
773 {
774 	/*
775 	 * We should never get here; clk_gfx3d_determine_rate() should always
776 	 * make us use a different parent than what we're currently using, so
777 	 * clk_gfx3d_set_rate_and_parent() should always be called.
778 	 */
779 	return 0;
780 }
781 
782 const struct clk_ops clk_gfx3d_ops = {
783 	.is_enabled = clk_rcg2_is_enabled,
784 	.get_parent = clk_rcg2_get_parent,
785 	.set_parent = clk_rcg2_set_parent,
786 	.recalc_rate = clk_rcg2_recalc_rate,
787 	.set_rate = clk_gfx3d_set_rate,
788 	.set_rate_and_parent = clk_gfx3d_set_rate_and_parent,
789 	.determine_rate = clk_gfx3d_determine_rate,
790 };
791 EXPORT_SYMBOL_GPL(clk_gfx3d_ops);
792