xref: /openbmc/linux/drivers/clk/qcom/clk-rcg2.c (revision 3b27d139)
1 /*
2  * Copyright (c) 2013, The Linux Foundation. All rights reserved.
3  *
4  * This software is licensed under the terms of the GNU General Public
5  * License version 2, as published by the Free Software Foundation, and
6  * may be copied, distributed, and modified under those terms.
7  *
8  * This program is distributed in the hope that it will be useful,
9  * but WITHOUT ANY WARRANTY; without even the implied warranty of
10  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11  * GNU General Public License for more details.
12  */
13 
14 #include <linux/kernel.h>
15 #include <linux/bitops.h>
16 #include <linux/err.h>
17 #include <linux/bug.h>
18 #include <linux/export.h>
19 #include <linux/clk-provider.h>
20 #include <linux/delay.h>
21 #include <linux/regmap.h>
22 #include <linux/math64.h>
23 
24 #include <asm/div64.h>
25 
26 #include "clk-rcg.h"
27 #include "common.h"
28 
29 #define CMD_REG			0x0
30 #define CMD_UPDATE		BIT(0)
31 #define CMD_ROOT_EN		BIT(1)
32 #define CMD_DIRTY_CFG		BIT(4)
33 #define CMD_DIRTY_N		BIT(5)
34 #define CMD_DIRTY_M		BIT(6)
35 #define CMD_DIRTY_D		BIT(7)
36 #define CMD_ROOT_OFF		BIT(31)
37 
38 #define CFG_REG			0x4
39 #define CFG_SRC_DIV_SHIFT	0
40 #define CFG_SRC_SEL_SHIFT	8
41 #define CFG_SRC_SEL_MASK	(0x7 << CFG_SRC_SEL_SHIFT)
42 #define CFG_MODE_SHIFT		12
43 #define CFG_MODE_MASK		(0x3 << CFG_MODE_SHIFT)
44 #define CFG_MODE_DUAL_EDGE	(0x2 << CFG_MODE_SHIFT)
45 
46 #define M_REG			0x8
47 #define N_REG			0xc
48 #define D_REG			0x10
49 
50 static int clk_rcg2_is_enabled(struct clk_hw *hw)
51 {
52 	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
53 	u32 cmd;
54 	int ret;
55 
56 	ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
57 	if (ret)
58 		return ret;
59 
60 	return (cmd & CMD_ROOT_OFF) == 0;
61 }
62 
63 static u8 clk_rcg2_get_parent(struct clk_hw *hw)
64 {
65 	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
66 	int num_parents = clk_hw_get_num_parents(hw);
67 	u32 cfg;
68 	int i, ret;
69 
70 	ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
71 	if (ret)
72 		goto err;
73 
74 	cfg &= CFG_SRC_SEL_MASK;
75 	cfg >>= CFG_SRC_SEL_SHIFT;
76 
77 	for (i = 0; i < num_parents; i++)
78 		if (cfg == rcg->parent_map[i].cfg)
79 			return i;
80 
81 err:
82 	pr_debug("%s: Clock %s has invalid parent, using default.\n",
83 		 __func__, clk_hw_get_name(hw));
84 	return 0;
85 }
86 
87 static int update_config(struct clk_rcg2 *rcg)
88 {
89 	int count, ret;
90 	u32 cmd;
91 	struct clk_hw *hw = &rcg->clkr.hw;
92 	const char *name = clk_hw_get_name(hw);
93 
94 	ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
95 				 CMD_UPDATE, CMD_UPDATE);
96 	if (ret)
97 		return ret;
98 
99 	/* Wait for update to take effect */
100 	for (count = 500; count > 0; count--) {
101 		ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
102 		if (ret)
103 			return ret;
104 		if (!(cmd & CMD_UPDATE))
105 			return 0;
106 		udelay(1);
107 	}
108 
109 	WARN(1, "%s: rcg didn't update its configuration.", name);
110 	return 0;
111 }
112 
113 static int clk_rcg2_set_parent(struct clk_hw *hw, u8 index)
114 {
115 	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
116 	int ret;
117 	u32 cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
118 
119 	ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
120 				 CFG_SRC_SEL_MASK, cfg);
121 	if (ret)
122 		return ret;
123 
124 	return update_config(rcg);
125 }
126 
127 /*
128  * Calculate m/n:d rate
129  *
130  *          parent_rate     m
131  *   rate = ----------- x  ---
132  *            hid_div       n
133  */
134 static unsigned long
135 calc_rate(unsigned long rate, u32 m, u32 n, u32 mode, u32 hid_div)
136 {
137 	if (hid_div) {
138 		rate *= 2;
139 		rate /= hid_div + 1;
140 	}
141 
142 	if (mode) {
143 		u64 tmp = rate;
144 		tmp *= m;
145 		do_div(tmp, n);
146 		rate = tmp;
147 	}
148 
149 	return rate;
150 }
151 
152 static unsigned long
153 clk_rcg2_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
154 {
155 	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
156 	u32 cfg, hid_div, m = 0, n = 0, mode = 0, mask;
157 
158 	regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
159 
160 	if (rcg->mnd_width) {
161 		mask = BIT(rcg->mnd_width) - 1;
162 		regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + M_REG, &m);
163 		m &= mask;
164 		regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + N_REG, &n);
165 		n =  ~n;
166 		n &= mask;
167 		n += m;
168 		mode = cfg & CFG_MODE_MASK;
169 		mode >>= CFG_MODE_SHIFT;
170 	}
171 
172 	mask = BIT(rcg->hid_width) - 1;
173 	hid_div = cfg >> CFG_SRC_DIV_SHIFT;
174 	hid_div &= mask;
175 
176 	return calc_rate(parent_rate, m, n, mode, hid_div);
177 }
178 
179 static int _freq_tbl_determine_rate(struct clk_hw *hw,
180 		const struct freq_tbl *f, struct clk_rate_request *req)
181 {
182 	unsigned long clk_flags, rate = req->rate;
183 	struct clk_hw *p;
184 	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
185 	int index;
186 
187 	f = qcom_find_freq(f, rate);
188 	if (!f)
189 		return -EINVAL;
190 
191 	index = qcom_find_src_index(hw, rcg->parent_map, f->src);
192 	if (index < 0)
193 		return index;
194 
195 	clk_flags = clk_hw_get_flags(hw);
196 	p = clk_hw_get_parent_by_index(hw, index);
197 	if (clk_flags & CLK_SET_RATE_PARENT) {
198 		if (f->pre_div) {
199 			rate /= 2;
200 			rate *= f->pre_div + 1;
201 		}
202 
203 		if (f->n) {
204 			u64 tmp = rate;
205 			tmp = tmp * f->n;
206 			do_div(tmp, f->m);
207 			rate = tmp;
208 		}
209 	} else {
210 		rate =  clk_hw_get_rate(p);
211 	}
212 	req->best_parent_hw = p;
213 	req->best_parent_rate = rate;
214 	req->rate = f->freq;
215 
216 	return 0;
217 }
218 
219 static int clk_rcg2_determine_rate(struct clk_hw *hw,
220 				   struct clk_rate_request *req)
221 {
222 	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
223 
224 	return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req);
225 }
226 
227 static int clk_rcg2_configure(struct clk_rcg2 *rcg, const struct freq_tbl *f)
228 {
229 	u32 cfg, mask;
230 	struct clk_hw *hw = &rcg->clkr.hw;
231 	int ret, index = qcom_find_src_index(hw, rcg->parent_map, f->src);
232 
233 	if (index < 0)
234 		return index;
235 
236 	if (rcg->mnd_width && f->n) {
237 		mask = BIT(rcg->mnd_width) - 1;
238 		ret = regmap_update_bits(rcg->clkr.regmap,
239 				rcg->cmd_rcgr + M_REG, mask, f->m);
240 		if (ret)
241 			return ret;
242 
243 		ret = regmap_update_bits(rcg->clkr.regmap,
244 				rcg->cmd_rcgr + N_REG, mask, ~(f->n - f->m));
245 		if (ret)
246 			return ret;
247 
248 		ret = regmap_update_bits(rcg->clkr.regmap,
249 				rcg->cmd_rcgr + D_REG, mask, ~f->n);
250 		if (ret)
251 			return ret;
252 	}
253 
254 	mask = BIT(rcg->hid_width) - 1;
255 	mask |= CFG_SRC_SEL_MASK | CFG_MODE_MASK;
256 	cfg = f->pre_div << CFG_SRC_DIV_SHIFT;
257 	cfg |= rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
258 	if (rcg->mnd_width && f->n && (f->m != f->n))
259 		cfg |= CFG_MODE_DUAL_EDGE;
260 	ret = regmap_update_bits(rcg->clkr.regmap,
261 			rcg->cmd_rcgr + CFG_REG, mask, cfg);
262 	if (ret)
263 		return ret;
264 
265 	return update_config(rcg);
266 }
267 
268 static int __clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate)
269 {
270 	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
271 	const struct freq_tbl *f;
272 
273 	f = qcom_find_freq(rcg->freq_tbl, rate);
274 	if (!f)
275 		return -EINVAL;
276 
277 	return clk_rcg2_configure(rcg, f);
278 }
279 
280 static int clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate,
281 			    unsigned long parent_rate)
282 {
283 	return __clk_rcg2_set_rate(hw, rate);
284 }
285 
286 static int clk_rcg2_set_rate_and_parent(struct clk_hw *hw,
287 		unsigned long rate, unsigned long parent_rate, u8 index)
288 {
289 	return __clk_rcg2_set_rate(hw, rate);
290 }
291 
292 const struct clk_ops clk_rcg2_ops = {
293 	.is_enabled = clk_rcg2_is_enabled,
294 	.get_parent = clk_rcg2_get_parent,
295 	.set_parent = clk_rcg2_set_parent,
296 	.recalc_rate = clk_rcg2_recalc_rate,
297 	.determine_rate = clk_rcg2_determine_rate,
298 	.set_rate = clk_rcg2_set_rate,
299 	.set_rate_and_parent = clk_rcg2_set_rate_and_parent,
300 };
301 EXPORT_SYMBOL_GPL(clk_rcg2_ops);
302 
303 struct frac_entry {
304 	int num;
305 	int den;
306 };
307 
308 static const struct frac_entry frac_table_675m[] = {	/* link rate of 270M */
309 	{ 52, 295 },	/* 119 M */
310 	{ 11, 57 },	/* 130.25 M */
311 	{ 63, 307 },	/* 138.50 M */
312 	{ 11, 50 },	/* 148.50 M */
313 	{ 47, 206 },	/* 154 M */
314 	{ 31, 100 },	/* 205.25 M */
315 	{ 107, 269 },	/* 268.50 M */
316 	{ },
317 };
318 
319 static struct frac_entry frac_table_810m[] = { /* Link rate of 162M */
320 	{ 31, 211 },	/* 119 M */
321 	{ 32, 199 },	/* 130.25 M */
322 	{ 63, 307 },	/* 138.50 M */
323 	{ 11, 60 },	/* 148.50 M */
324 	{ 50, 263 },	/* 154 M */
325 	{ 31, 120 },	/* 205.25 M */
326 	{ 119, 359 },	/* 268.50 M */
327 	{ },
328 };
329 
330 static int clk_edp_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
331 			      unsigned long parent_rate)
332 {
333 	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
334 	struct freq_tbl f = *rcg->freq_tbl;
335 	const struct frac_entry *frac;
336 	int delta = 100000;
337 	s64 src_rate = parent_rate;
338 	s64 request;
339 	u32 mask = BIT(rcg->hid_width) - 1;
340 	u32 hid_div;
341 
342 	if (src_rate == 810000000)
343 		frac = frac_table_810m;
344 	else
345 		frac = frac_table_675m;
346 
347 	for (; frac->num; frac++) {
348 		request = rate;
349 		request *= frac->den;
350 		request = div_s64(request, frac->num);
351 		if ((src_rate < (request - delta)) ||
352 		    (src_rate > (request + delta)))
353 			continue;
354 
355 		regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
356 				&hid_div);
357 		f.pre_div = hid_div;
358 		f.pre_div >>= CFG_SRC_DIV_SHIFT;
359 		f.pre_div &= mask;
360 		f.m = frac->num;
361 		f.n = frac->den;
362 
363 		return clk_rcg2_configure(rcg, &f);
364 	}
365 
366 	return -EINVAL;
367 }
368 
369 static int clk_edp_pixel_set_rate_and_parent(struct clk_hw *hw,
370 		unsigned long rate, unsigned long parent_rate, u8 index)
371 {
372 	/* Parent index is set statically in frequency table */
373 	return clk_edp_pixel_set_rate(hw, rate, parent_rate);
374 }
375 
376 static int clk_edp_pixel_determine_rate(struct clk_hw *hw,
377 					struct clk_rate_request *req)
378 {
379 	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
380 	const struct freq_tbl *f = rcg->freq_tbl;
381 	const struct frac_entry *frac;
382 	int delta = 100000;
383 	s64 request;
384 	u32 mask = BIT(rcg->hid_width) - 1;
385 	u32 hid_div;
386 	int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
387 
388 	/* Force the correct parent */
389 	req->best_parent_hw = clk_hw_get_parent_by_index(hw, index);
390 	req->best_parent_rate = clk_hw_get_rate(req->best_parent_hw);
391 
392 	if (req->best_parent_rate == 810000000)
393 		frac = frac_table_810m;
394 	else
395 		frac = frac_table_675m;
396 
397 	for (; frac->num; frac++) {
398 		request = req->rate;
399 		request *= frac->den;
400 		request = div_s64(request, frac->num);
401 		if ((req->best_parent_rate < (request - delta)) ||
402 		    (req->best_parent_rate > (request + delta)))
403 			continue;
404 
405 		regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
406 				&hid_div);
407 		hid_div >>= CFG_SRC_DIV_SHIFT;
408 		hid_div &= mask;
409 
410 		req->rate = calc_rate(req->best_parent_rate,
411 				      frac->num, frac->den,
412 				      !!frac->den, hid_div);
413 		return 0;
414 	}
415 
416 	return -EINVAL;
417 }
418 
419 const struct clk_ops clk_edp_pixel_ops = {
420 	.is_enabled = clk_rcg2_is_enabled,
421 	.get_parent = clk_rcg2_get_parent,
422 	.set_parent = clk_rcg2_set_parent,
423 	.recalc_rate = clk_rcg2_recalc_rate,
424 	.set_rate = clk_edp_pixel_set_rate,
425 	.set_rate_and_parent = clk_edp_pixel_set_rate_and_parent,
426 	.determine_rate = clk_edp_pixel_determine_rate,
427 };
428 EXPORT_SYMBOL_GPL(clk_edp_pixel_ops);
429 
430 static int clk_byte_determine_rate(struct clk_hw *hw,
431 				   struct clk_rate_request *req)
432 {
433 	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
434 	const struct freq_tbl *f = rcg->freq_tbl;
435 	int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
436 	unsigned long parent_rate, div;
437 	u32 mask = BIT(rcg->hid_width) - 1;
438 	struct clk_hw *p;
439 
440 	if (req->rate == 0)
441 		return -EINVAL;
442 
443 	req->best_parent_hw = p = clk_hw_get_parent_by_index(hw, index);
444 	req->best_parent_rate = parent_rate = clk_hw_round_rate(p, req->rate);
445 
446 	div = DIV_ROUND_UP((2 * parent_rate), req->rate) - 1;
447 	div = min_t(u32, div, mask);
448 
449 	req->rate = calc_rate(parent_rate, 0, 0, 0, div);
450 
451 	return 0;
452 }
453 
454 static int clk_byte_set_rate(struct clk_hw *hw, unsigned long rate,
455 			 unsigned long parent_rate)
456 {
457 	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
458 	struct freq_tbl f = *rcg->freq_tbl;
459 	unsigned long div;
460 	u32 mask = BIT(rcg->hid_width) - 1;
461 
462 	div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
463 	div = min_t(u32, div, mask);
464 
465 	f.pre_div = div;
466 
467 	return clk_rcg2_configure(rcg, &f);
468 }
469 
470 static int clk_byte_set_rate_and_parent(struct clk_hw *hw,
471 		unsigned long rate, unsigned long parent_rate, u8 index)
472 {
473 	/* Parent index is set statically in frequency table */
474 	return clk_byte_set_rate(hw, rate, parent_rate);
475 }
476 
477 const struct clk_ops clk_byte_ops = {
478 	.is_enabled = clk_rcg2_is_enabled,
479 	.get_parent = clk_rcg2_get_parent,
480 	.set_parent = clk_rcg2_set_parent,
481 	.recalc_rate = clk_rcg2_recalc_rate,
482 	.set_rate = clk_byte_set_rate,
483 	.set_rate_and_parent = clk_byte_set_rate_and_parent,
484 	.determine_rate = clk_byte_determine_rate,
485 };
486 EXPORT_SYMBOL_GPL(clk_byte_ops);
487 
488 static const struct frac_entry frac_table_pixel[] = {
489 	{ 3, 8 },
490 	{ 2, 9 },
491 	{ 4, 9 },
492 	{ 1, 1 },
493 	{ }
494 };
495 
496 static int clk_pixel_determine_rate(struct clk_hw *hw,
497 				    struct clk_rate_request *req)
498 {
499 	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
500 	unsigned long request, src_rate;
501 	int delta = 100000;
502 	const struct freq_tbl *f = rcg->freq_tbl;
503 	const struct frac_entry *frac = frac_table_pixel;
504 	int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
505 
506 	req->best_parent_hw = clk_hw_get_parent_by_index(hw, index);
507 
508 	for (; frac->num; frac++) {
509 		request = (req->rate * frac->den) / frac->num;
510 
511 		src_rate = clk_hw_round_rate(req->best_parent_hw, request);
512 		if ((src_rate < (request - delta)) ||
513 			(src_rate > (request + delta)))
514 			continue;
515 
516 		req->best_parent_rate = src_rate;
517 		req->rate = (src_rate * frac->num) / frac->den;
518 		return 0;
519 	}
520 
521 	return -EINVAL;
522 }
523 
524 static int clk_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
525 		unsigned long parent_rate)
526 {
527 	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
528 	struct freq_tbl f = *rcg->freq_tbl;
529 	const struct frac_entry *frac = frac_table_pixel;
530 	unsigned long request;
531 	int delta = 100000;
532 	u32 mask = BIT(rcg->hid_width) - 1;
533 	u32 hid_div;
534 
535 	for (; frac->num; frac++) {
536 		request = (rate * frac->den) / frac->num;
537 
538 		if ((parent_rate < (request - delta)) ||
539 			(parent_rate > (request + delta)))
540 			continue;
541 
542 		regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
543 				&hid_div);
544 		f.pre_div = hid_div;
545 		f.pre_div >>= CFG_SRC_DIV_SHIFT;
546 		f.pre_div &= mask;
547 		f.m = frac->num;
548 		f.n = frac->den;
549 
550 		return clk_rcg2_configure(rcg, &f);
551 	}
552 	return -EINVAL;
553 }
554 
555 static int clk_pixel_set_rate_and_parent(struct clk_hw *hw, unsigned long rate,
556 		unsigned long parent_rate, u8 index)
557 {
558 	/* Parent index is set statically in frequency table */
559 	return clk_pixel_set_rate(hw, rate, parent_rate);
560 }
561 
562 const struct clk_ops clk_pixel_ops = {
563 	.is_enabled = clk_rcg2_is_enabled,
564 	.get_parent = clk_rcg2_get_parent,
565 	.set_parent = clk_rcg2_set_parent,
566 	.recalc_rate = clk_rcg2_recalc_rate,
567 	.set_rate = clk_pixel_set_rate,
568 	.set_rate_and_parent = clk_pixel_set_rate_and_parent,
569 	.determine_rate = clk_pixel_determine_rate,
570 };
571 EXPORT_SYMBOL_GPL(clk_pixel_ops);
572