1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3 * Driver for Renesas Versaclock 3
4 *
5 * Copyright (C) 2023 Renesas Electronics Corp.
6 */
7
8 #include <linux/clk-provider.h>
9 #include <linux/i2c.h>
10 #include <linux/limits.h>
11 #include <linux/module.h>
12 #include <linux/regmap.h>
13
14 #define NUM_CONFIG_REGISTERS 37
15
16 #define VC3_GENERAL_CTR 0x0
17 #define VC3_GENERAL_CTR_DIV1_SRC_SEL BIT(3)
18 #define VC3_GENERAL_CTR_PLL3_REFIN_SEL BIT(2)
19
20 #define VC3_PLL3_M_DIVIDER 0x3
21 #define VC3_PLL3_M_DIV1 BIT(7)
22 #define VC3_PLL3_M_DIV2 BIT(6)
23 #define VC3_PLL3_M_DIV(n) ((n) & GENMASK(5, 0))
24
25 #define VC3_PLL3_N_DIVIDER 0x4
26 #define VC3_PLL3_LOOP_FILTER_N_DIV_MSB 0x5
27
28 #define VC3_PLL3_CHARGE_PUMP_CTRL 0x6
29 #define VC3_PLL3_CHARGE_PUMP_CTRL_OUTDIV3_SRC_SEL BIT(7)
30
31 #define VC3_PLL1_CTRL_OUTDIV5 0x7
32 #define VC3_PLL1_CTRL_OUTDIV5_PLL1_MDIV_DOUBLER BIT(7)
33
34 #define VC3_PLL1_M_DIVIDER 0x8
35 #define VC3_PLL1_M_DIV1 BIT(7)
36 #define VC3_PLL1_M_DIV2 BIT(6)
37 #define VC3_PLL1_M_DIV(n) ((n) & GENMASK(5, 0))
38
39 #define VC3_PLL1_VCO_N_DIVIDER 0x9
40 #define VC3_PLL1_LOOP_FILTER_N_DIV_MSB 0x0a
41
42 #define VC3_OUT_DIV1_DIV2_CTRL 0xf
43
44 #define VC3_PLL2_FB_INT_DIV_MSB 0x10
45 #define VC3_PLL2_FB_INT_DIV_LSB 0x11
46 #define VC3_PLL2_FB_FRC_DIV_MSB 0x12
47 #define VC3_PLL2_FB_FRC_DIV_LSB 0x13
48
49 #define VC3_PLL2_M_DIVIDER 0x1a
50 #define VC3_PLL2_MDIV_DOUBLER BIT(7)
51 #define VC3_PLL2_M_DIV1 BIT(6)
52 #define VC3_PLL2_M_DIV2 BIT(5)
53 #define VC3_PLL2_M_DIV(n) ((n) & GENMASK(4, 0))
54
55 #define VC3_OUT_DIV3_DIV4_CTRL 0x1b
56
57 #define VC3_PLL_OP_CTRL 0x1c
58 #define VC3_PLL_OP_CTRL_PLL2_REFIN_SEL 6
59
60 #define VC3_OUTPUT_CTR 0x1d
61 #define VC3_OUTPUT_CTR_DIV4_SRC_SEL BIT(3)
62
63 #define VC3_SE2_CTRL_REG0 0x1f
64 #define VC3_SE2_CTRL_REG0_SE2_CLK_SEL BIT(6)
65
66 #define VC3_SE3_DIFF1_CTRL_REG 0x21
67 #define VC3_SE3_DIFF1_CTRL_REG_SE3_CLK_SEL BIT(6)
68
69 #define VC3_DIFF1_CTRL_REG 0x22
70 #define VC3_DIFF1_CTRL_REG_DIFF1_CLK_SEL BIT(7)
71
72 #define VC3_DIFF2_CTRL_REG 0x23
73 #define VC3_DIFF2_CTRL_REG_DIFF2_CLK_SEL BIT(7)
74
75 #define VC3_SE1_DIV4_CTRL 0x24
76 #define VC3_SE1_DIV4_CTRL_SE1_CLK_SEL BIT(3)
77
78 #define VC3_PLL1_VCO_MIN 300000000UL
79 #define VC3_PLL1_VCO_MAX 600000000UL
80
81 #define VC3_PLL2_VCO_MIN 400000000UL
82 #define VC3_PLL2_VCO_MAX 1200000000UL
83
84 #define VC3_PLL3_VCO_MIN 300000000UL
85 #define VC3_PLL3_VCO_MAX 800000000UL
86
87 #define VC3_2_POW_16 (U16_MAX + 1)
88 #define VC3_DIV_MASK(width) ((1 << (width)) - 1)
89
90 enum vc3_pfd_mux {
91 VC3_PFD2_MUX,
92 VC3_PFD3_MUX,
93 };
94
95 enum vc3_pfd {
96 VC3_PFD1,
97 VC3_PFD2,
98 VC3_PFD3,
99 };
100
101 enum vc3_pll {
102 VC3_PLL1,
103 VC3_PLL2,
104 VC3_PLL3,
105 };
106
107 enum vc3_div_mux {
108 VC3_DIV1_MUX,
109 VC3_DIV3_MUX,
110 VC3_DIV4_MUX,
111 };
112
113 enum vc3_div {
114 VC3_DIV1,
115 VC3_DIV2,
116 VC3_DIV3,
117 VC3_DIV4,
118 VC3_DIV5,
119 };
120
121 enum vc3_clk {
122 VC3_REF,
123 VC3_SE1,
124 VC3_SE2,
125 VC3_SE3,
126 VC3_DIFF1,
127 VC3_DIFF2,
128 };
129
130 enum vc3_clk_mux {
131 VC3_SE1_MUX = VC3_SE1 - 1,
132 VC3_SE2_MUX = VC3_SE2 - 1,
133 VC3_SE3_MUX = VC3_SE3 - 1,
134 VC3_DIFF1_MUX = VC3_DIFF1 - 1,
135 VC3_DIFF2_MUX = VC3_DIFF2 - 1,
136 };
137
138 struct vc3_clk_data {
139 u8 offs;
140 u8 bitmsk;
141 };
142
143 struct vc3_pfd_data {
144 u8 num;
145 u8 offs;
146 u8 mdiv1_bitmsk;
147 u8 mdiv2_bitmsk;
148 };
149
150 struct vc3_pll_data {
151 u8 num;
152 u8 int_div_msb_offs;
153 u8 int_div_lsb_offs;
154 unsigned long vco_min;
155 unsigned long vco_max;
156 };
157
158 struct vc3_div_data {
159 u8 offs;
160 const struct clk_div_table *table;
161 u8 shift;
162 u8 width;
163 u8 flags;
164 };
165
166 struct vc3_hw_data {
167 struct clk_hw hw;
168 struct regmap *regmap;
169 const void *data;
170
171 u32 div_int;
172 u32 div_frc;
173 };
174
175 static const struct clk_div_table div1_divs[] = {
176 { .val = 0, .div = 1, }, { .val = 1, .div = 4, },
177 { .val = 2, .div = 5, }, { .val = 3, .div = 6, },
178 { .val = 4, .div = 2, }, { .val = 5, .div = 8, },
179 { .val = 6, .div = 10, }, { .val = 7, .div = 12, },
180 { .val = 8, .div = 4, }, { .val = 9, .div = 16, },
181 { .val = 10, .div = 20, }, { .val = 11, .div = 24, },
182 { .val = 12, .div = 8, }, { .val = 13, .div = 32, },
183 { .val = 14, .div = 40, }, { .val = 15, .div = 48, },
184 {}
185 };
186
187 static const struct clk_div_table div245_divs[] = {
188 { .val = 0, .div = 1, }, { .val = 1, .div = 3, },
189 { .val = 2, .div = 5, }, { .val = 3, .div = 10, },
190 { .val = 4, .div = 2, }, { .val = 5, .div = 6, },
191 { .val = 6, .div = 10, }, { .val = 7, .div = 20, },
192 { .val = 8, .div = 4, }, { .val = 9, .div = 12, },
193 { .val = 10, .div = 20, }, { .val = 11, .div = 40, },
194 { .val = 12, .div = 5, }, { .val = 13, .div = 15, },
195 { .val = 14, .div = 25, }, { .val = 15, .div = 50, },
196 {}
197 };
198
199 static const struct clk_div_table div3_divs[] = {
200 { .val = 0, .div = 1, }, { .val = 1, .div = 3, },
201 { .val = 2, .div = 5, }, { .val = 3, .div = 10, },
202 { .val = 4, .div = 2, }, { .val = 5, .div = 6, },
203 { .val = 6, .div = 10, }, { .val = 7, .div = 20, },
204 { .val = 8, .div = 4, }, { .val = 9, .div = 12, },
205 { .val = 10, .div = 20, }, { .val = 11, .div = 40, },
206 { .val = 12, .div = 8, }, { .val = 13, .div = 24, },
207 { .val = 14, .div = 40, }, { .val = 15, .div = 80, },
208 {}
209 };
210
211 static struct clk_hw *clk_out[6];
212
vc3_pfd_mux_get_parent(struct clk_hw * hw)213 static unsigned char vc3_pfd_mux_get_parent(struct clk_hw *hw)
214 {
215 struct vc3_hw_data *vc3 = container_of(hw, struct vc3_hw_data, hw);
216 const struct vc3_clk_data *pfd_mux = vc3->data;
217 u32 src;
218
219 regmap_read(vc3->regmap, pfd_mux->offs, &src);
220
221 return !!(src & pfd_mux->bitmsk);
222 }
223
vc3_pfd_mux_set_parent(struct clk_hw * hw,u8 index)224 static int vc3_pfd_mux_set_parent(struct clk_hw *hw, u8 index)
225 {
226 struct vc3_hw_data *vc3 = container_of(hw, struct vc3_hw_data, hw);
227 const struct vc3_clk_data *pfd_mux = vc3->data;
228
229 regmap_update_bits(vc3->regmap, pfd_mux->offs, pfd_mux->bitmsk,
230 index ? pfd_mux->bitmsk : 0);
231 return 0;
232 }
233
234 static const struct clk_ops vc3_pfd_mux_ops = {
235 .determine_rate = clk_hw_determine_rate_no_reparent,
236 .set_parent = vc3_pfd_mux_set_parent,
237 .get_parent = vc3_pfd_mux_get_parent,
238 };
239
vc3_pfd_recalc_rate(struct clk_hw * hw,unsigned long parent_rate)240 static unsigned long vc3_pfd_recalc_rate(struct clk_hw *hw,
241 unsigned long parent_rate)
242 {
243 struct vc3_hw_data *vc3 = container_of(hw, struct vc3_hw_data, hw);
244 const struct vc3_pfd_data *pfd = vc3->data;
245 unsigned int prediv, premul;
246 unsigned long rate;
247 u8 mdiv;
248
249 regmap_read(vc3->regmap, pfd->offs, &prediv);
250 if (pfd->num == VC3_PFD1) {
251 /* The bypass_prediv is set, PLL fed from Ref_in directly. */
252 if (prediv & pfd->mdiv1_bitmsk) {
253 /* check doubler is set or not */
254 regmap_read(vc3->regmap, VC3_PLL1_CTRL_OUTDIV5, &premul);
255 if (premul & VC3_PLL1_CTRL_OUTDIV5_PLL1_MDIV_DOUBLER)
256 parent_rate *= 2;
257 return parent_rate;
258 }
259 mdiv = VC3_PLL1_M_DIV(prediv);
260 } else if (pfd->num == VC3_PFD2) {
261 /* The bypass_prediv is set, PLL fed from Ref_in directly. */
262 if (prediv & pfd->mdiv1_bitmsk) {
263 regmap_read(vc3->regmap, VC3_PLL2_M_DIVIDER, &premul);
264 /* check doubler is set or not */
265 if (premul & VC3_PLL2_MDIV_DOUBLER)
266 parent_rate *= 2;
267 return parent_rate;
268 }
269
270 mdiv = VC3_PLL2_M_DIV(prediv);
271 } else {
272 /* The bypass_prediv is set, PLL fed from Ref_in directly. */
273 if (prediv & pfd->mdiv1_bitmsk)
274 return parent_rate;
275
276 mdiv = VC3_PLL3_M_DIV(prediv);
277 }
278
279 if (prediv & pfd->mdiv2_bitmsk)
280 rate = parent_rate / 2;
281 else
282 rate = parent_rate / mdiv;
283
284 return rate;
285 }
286
vc3_pfd_round_rate(struct clk_hw * hw,unsigned long rate,unsigned long * parent_rate)287 static long vc3_pfd_round_rate(struct clk_hw *hw, unsigned long rate,
288 unsigned long *parent_rate)
289 {
290 struct vc3_hw_data *vc3 = container_of(hw, struct vc3_hw_data, hw);
291 const struct vc3_pfd_data *pfd = vc3->data;
292 unsigned long idiv;
293
294 /* PLL cannot operate with input clock above 50 MHz. */
295 if (rate > 50000000)
296 return -EINVAL;
297
298 /* CLKIN within range of PLL input, feed directly to PLL. */
299 if (*parent_rate <= 50000000)
300 return *parent_rate;
301
302 idiv = DIV_ROUND_UP(*parent_rate, rate);
303 if (pfd->num == VC3_PFD1 || pfd->num == VC3_PFD3) {
304 if (idiv > 63)
305 return -EINVAL;
306 } else {
307 if (idiv > 31)
308 return -EINVAL;
309 }
310
311 return *parent_rate / idiv;
312 }
313
vc3_pfd_set_rate(struct clk_hw * hw,unsigned long rate,unsigned long parent_rate)314 static int vc3_pfd_set_rate(struct clk_hw *hw, unsigned long rate,
315 unsigned long parent_rate)
316 {
317 struct vc3_hw_data *vc3 = container_of(hw, struct vc3_hw_data, hw);
318 const struct vc3_pfd_data *pfd = vc3->data;
319 unsigned long idiv;
320 u8 div;
321
322 /* CLKIN within range of PLL input, feed directly to PLL. */
323 if (parent_rate <= 50000000) {
324 regmap_update_bits(vc3->regmap, pfd->offs, pfd->mdiv1_bitmsk,
325 pfd->mdiv1_bitmsk);
326 regmap_update_bits(vc3->regmap, pfd->offs, pfd->mdiv2_bitmsk, 0);
327 return 0;
328 }
329
330 idiv = DIV_ROUND_UP(parent_rate, rate);
331 /* We have dedicated div-2 predivider. */
332 if (idiv == 2) {
333 regmap_update_bits(vc3->regmap, pfd->offs, pfd->mdiv2_bitmsk,
334 pfd->mdiv2_bitmsk);
335 regmap_update_bits(vc3->regmap, pfd->offs, pfd->mdiv1_bitmsk, 0);
336 } else {
337 if (pfd->num == VC3_PFD1)
338 div = VC3_PLL1_M_DIV(idiv);
339 else if (pfd->num == VC3_PFD2)
340 div = VC3_PLL2_M_DIV(idiv);
341 else
342 div = VC3_PLL3_M_DIV(idiv);
343
344 regmap_write(vc3->regmap, pfd->offs, div);
345 }
346
347 return 0;
348 }
349
350 static const struct clk_ops vc3_pfd_ops = {
351 .recalc_rate = vc3_pfd_recalc_rate,
352 .round_rate = vc3_pfd_round_rate,
353 .set_rate = vc3_pfd_set_rate,
354 };
355
vc3_pll_recalc_rate(struct clk_hw * hw,unsigned long parent_rate)356 static unsigned long vc3_pll_recalc_rate(struct clk_hw *hw,
357 unsigned long parent_rate)
358 {
359 struct vc3_hw_data *vc3 = container_of(hw, struct vc3_hw_data, hw);
360 const struct vc3_pll_data *pll = vc3->data;
361 u32 div_int, div_frc, val;
362 unsigned long rate;
363
364 regmap_read(vc3->regmap, pll->int_div_msb_offs, &val);
365 div_int = (val & GENMASK(2, 0)) << 8;
366 regmap_read(vc3->regmap, pll->int_div_lsb_offs, &val);
367 div_int |= val;
368
369 if (pll->num == VC3_PLL2) {
370 regmap_read(vc3->regmap, VC3_PLL2_FB_FRC_DIV_MSB, &val);
371 div_frc = val << 8;
372 regmap_read(vc3->regmap, VC3_PLL2_FB_FRC_DIV_LSB, &val);
373 div_frc |= val;
374 rate = (parent_rate *
375 (div_int * VC3_2_POW_16 + div_frc) / VC3_2_POW_16);
376 } else {
377 rate = parent_rate * div_int;
378 }
379
380 return rate;
381 }
382
vc3_pll_round_rate(struct clk_hw * hw,unsigned long rate,unsigned long * parent_rate)383 static long vc3_pll_round_rate(struct clk_hw *hw, unsigned long rate,
384 unsigned long *parent_rate)
385 {
386 struct vc3_hw_data *vc3 = container_of(hw, struct vc3_hw_data, hw);
387 const struct vc3_pll_data *pll = vc3->data;
388 u64 div_frc;
389
390 if (rate < pll->vco_min)
391 rate = pll->vco_min;
392 if (rate > pll->vco_max)
393 rate = pll->vco_max;
394
395 vc3->div_int = rate / *parent_rate;
396
397 if (pll->num == VC3_PLL2) {
398 if (vc3->div_int > 0x7ff)
399 rate = *parent_rate * 0x7ff;
400
401 /* Determine best fractional part, which is 16 bit wide */
402 div_frc = rate % *parent_rate;
403 div_frc *= BIT(16) - 1;
404
405 vc3->div_frc = min_t(u64, div64_ul(div_frc, *parent_rate), U16_MAX);
406 rate = (*parent_rate *
407 (vc3->div_int * VC3_2_POW_16 + vc3->div_frc) / VC3_2_POW_16);
408 } else {
409 rate = *parent_rate * vc3->div_int;
410 }
411
412 return rate;
413 }
414
vc3_pll_set_rate(struct clk_hw * hw,unsigned long rate,unsigned long parent_rate)415 static int vc3_pll_set_rate(struct clk_hw *hw, unsigned long rate,
416 unsigned long parent_rate)
417 {
418 struct vc3_hw_data *vc3 = container_of(hw, struct vc3_hw_data, hw);
419 const struct vc3_pll_data *pll = vc3->data;
420 u32 val;
421
422 regmap_read(vc3->regmap, pll->int_div_msb_offs, &val);
423 val = (val & 0xf8) | ((vc3->div_int >> 8) & 0x7);
424 regmap_write(vc3->regmap, pll->int_div_msb_offs, val);
425 regmap_write(vc3->regmap, pll->int_div_lsb_offs, vc3->div_int & 0xff);
426
427 if (pll->num == VC3_PLL2) {
428 regmap_write(vc3->regmap, VC3_PLL2_FB_FRC_DIV_MSB,
429 vc3->div_frc >> 8);
430 regmap_write(vc3->regmap, VC3_PLL2_FB_FRC_DIV_LSB,
431 vc3->div_frc & 0xff);
432 }
433
434 return 0;
435 }
436
437 static const struct clk_ops vc3_pll_ops = {
438 .recalc_rate = vc3_pll_recalc_rate,
439 .round_rate = vc3_pll_round_rate,
440 .set_rate = vc3_pll_set_rate,
441 };
442
vc3_div_mux_get_parent(struct clk_hw * hw)443 static unsigned char vc3_div_mux_get_parent(struct clk_hw *hw)
444 {
445 struct vc3_hw_data *vc3 = container_of(hw, struct vc3_hw_data, hw);
446 const struct vc3_clk_data *div_mux = vc3->data;
447 u32 src;
448
449 regmap_read(vc3->regmap, div_mux->offs, &src);
450
451 return !!(src & div_mux->bitmsk);
452 }
453
vc3_div_mux_set_parent(struct clk_hw * hw,u8 index)454 static int vc3_div_mux_set_parent(struct clk_hw *hw, u8 index)
455 {
456 struct vc3_hw_data *vc3 = container_of(hw, struct vc3_hw_data, hw);
457 const struct vc3_clk_data *div_mux = vc3->data;
458
459 regmap_update_bits(vc3->regmap, div_mux->offs, div_mux->bitmsk,
460 index ? div_mux->bitmsk : 0);
461
462 return 0;
463 }
464
465 static const struct clk_ops vc3_div_mux_ops = {
466 .determine_rate = clk_hw_determine_rate_no_reparent,
467 .set_parent = vc3_div_mux_set_parent,
468 .get_parent = vc3_div_mux_get_parent,
469 };
470
vc3_get_div(const struct clk_div_table * table,unsigned int val,unsigned long flag)471 static unsigned int vc3_get_div(const struct clk_div_table *table,
472 unsigned int val, unsigned long flag)
473 {
474 const struct clk_div_table *clkt;
475
476 for (clkt = table; clkt->div; clkt++)
477 if (clkt->val == val)
478 return clkt->div;
479
480 return 0;
481 }
482
vc3_div_recalc_rate(struct clk_hw * hw,unsigned long parent_rate)483 static unsigned long vc3_div_recalc_rate(struct clk_hw *hw,
484 unsigned long parent_rate)
485 {
486 struct vc3_hw_data *vc3 = container_of(hw, struct vc3_hw_data, hw);
487 const struct vc3_div_data *div_data = vc3->data;
488 unsigned int val;
489
490 regmap_read(vc3->regmap, div_data->offs, &val);
491 val >>= div_data->shift;
492 val &= VC3_DIV_MASK(div_data->width);
493
494 return divider_recalc_rate(hw, parent_rate, val, div_data->table,
495 div_data->flags, div_data->width);
496 }
497
vc3_div_round_rate(struct clk_hw * hw,unsigned long rate,unsigned long * parent_rate)498 static long vc3_div_round_rate(struct clk_hw *hw, unsigned long rate,
499 unsigned long *parent_rate)
500 {
501 struct vc3_hw_data *vc3 = container_of(hw, struct vc3_hw_data, hw);
502 const struct vc3_div_data *div_data = vc3->data;
503 unsigned int bestdiv;
504
505 /* if read only, just return current value */
506 if (div_data->flags & CLK_DIVIDER_READ_ONLY) {
507 regmap_read(vc3->regmap, div_data->offs, &bestdiv);
508 bestdiv >>= div_data->shift;
509 bestdiv &= VC3_DIV_MASK(div_data->width);
510 bestdiv = vc3_get_div(div_data->table, bestdiv, div_data->flags);
511 return DIV_ROUND_UP(*parent_rate, bestdiv);
512 }
513
514 return divider_round_rate(hw, rate, parent_rate, div_data->table,
515 div_data->width, div_data->flags);
516 }
517
vc3_div_set_rate(struct clk_hw * hw,unsigned long rate,unsigned long parent_rate)518 static int vc3_div_set_rate(struct clk_hw *hw, unsigned long rate,
519 unsigned long parent_rate)
520 {
521 struct vc3_hw_data *vc3 = container_of(hw, struct vc3_hw_data, hw);
522 const struct vc3_div_data *div_data = vc3->data;
523 unsigned int value;
524
525 value = divider_get_val(rate, parent_rate, div_data->table,
526 div_data->width, div_data->flags);
527 regmap_update_bits(vc3->regmap, div_data->offs,
528 VC3_DIV_MASK(div_data->width) << div_data->shift,
529 value << div_data->shift);
530 return 0;
531 }
532
533 static const struct clk_ops vc3_div_ops = {
534 .recalc_rate = vc3_div_recalc_rate,
535 .round_rate = vc3_div_round_rate,
536 .set_rate = vc3_div_set_rate,
537 };
538
vc3_clk_mux_determine_rate(struct clk_hw * hw,struct clk_rate_request * req)539 static int vc3_clk_mux_determine_rate(struct clk_hw *hw,
540 struct clk_rate_request *req)
541 {
542 int ret;
543 int frc;
544
545 ret = clk_mux_determine_rate_flags(hw, req, CLK_SET_RATE_PARENT);
546 if (ret) {
547 /* The below check is equivalent to (best_parent_rate/rate) */
548 if (req->best_parent_rate >= req->rate) {
549 frc = DIV_ROUND_CLOSEST_ULL(req->best_parent_rate,
550 req->rate);
551 req->rate *= frc;
552 return clk_mux_determine_rate_flags(hw, req,
553 CLK_SET_RATE_PARENT);
554 }
555 ret = 0;
556 }
557
558 return ret;
559 }
560
vc3_clk_mux_get_parent(struct clk_hw * hw)561 static unsigned char vc3_clk_mux_get_parent(struct clk_hw *hw)
562 {
563 struct vc3_hw_data *vc3 = container_of(hw, struct vc3_hw_data, hw);
564 const struct vc3_clk_data *clk_mux = vc3->data;
565 u32 val;
566
567 regmap_read(vc3->regmap, clk_mux->offs, &val);
568
569 return !!(val & clk_mux->bitmsk);
570 }
571
vc3_clk_mux_set_parent(struct clk_hw * hw,u8 index)572 static int vc3_clk_mux_set_parent(struct clk_hw *hw, u8 index)
573 {
574 struct vc3_hw_data *vc3 = container_of(hw, struct vc3_hw_data, hw);
575 const struct vc3_clk_data *clk_mux = vc3->data;
576
577 regmap_update_bits(vc3->regmap, clk_mux->offs,
578 clk_mux->bitmsk, index ? clk_mux->bitmsk : 0);
579 return 0;
580 }
581
582 static const struct clk_ops vc3_clk_mux_ops = {
583 .determine_rate = vc3_clk_mux_determine_rate,
584 .set_parent = vc3_clk_mux_set_parent,
585 .get_parent = vc3_clk_mux_get_parent,
586 };
587
vc3_regmap_is_writeable(struct device * dev,unsigned int reg)588 static bool vc3_regmap_is_writeable(struct device *dev, unsigned int reg)
589 {
590 return true;
591 }
592
593 static const struct regmap_config vc3_regmap_config = {
594 .reg_bits = 8,
595 .val_bits = 8,
596 .cache_type = REGCACHE_RBTREE,
597 .max_register = 0x24,
598 .writeable_reg = vc3_regmap_is_writeable,
599 };
600
601 static struct vc3_hw_data clk_div[5];
602
603 static const struct clk_parent_data pfd_mux_parent_data[] = {
604 { .index = 0, },
605 { .hw = &clk_div[VC3_DIV2].hw }
606 };
607
608 static struct vc3_hw_data clk_pfd_mux[] = {
609 [VC3_PFD2_MUX] = {
610 .data = &(struct vc3_clk_data) {
611 .offs = VC3_PLL_OP_CTRL,
612 .bitmsk = BIT(VC3_PLL_OP_CTRL_PLL2_REFIN_SEL)
613 },
614 .hw.init = &(struct clk_init_data){
615 .name = "pfd2_mux",
616 .ops = &vc3_pfd_mux_ops,
617 .parent_data = pfd_mux_parent_data,
618 .num_parents = 2,
619 .flags = CLK_SET_RATE_PARENT | CLK_SET_RATE_NO_REPARENT
620 }
621 },
622 [VC3_PFD3_MUX] = {
623 .data = &(struct vc3_clk_data) {
624 .offs = VC3_GENERAL_CTR,
625 .bitmsk = BIT(VC3_GENERAL_CTR_PLL3_REFIN_SEL)
626 },
627 .hw.init = &(struct clk_init_data){
628 .name = "pfd3_mux",
629 .ops = &vc3_pfd_mux_ops,
630 .parent_data = pfd_mux_parent_data,
631 .num_parents = 2,
632 .flags = CLK_SET_RATE_PARENT | CLK_SET_RATE_NO_REPARENT
633 }
634 }
635 };
636
637 static struct vc3_hw_data clk_pfd[] = {
638 [VC3_PFD1] = {
639 .data = &(struct vc3_pfd_data) {
640 .num = VC3_PFD1,
641 .offs = VC3_PLL1_M_DIVIDER,
642 .mdiv1_bitmsk = VC3_PLL1_M_DIV1,
643 .mdiv2_bitmsk = VC3_PLL1_M_DIV2
644 },
645 .hw.init = &(struct clk_init_data){
646 .name = "pfd1",
647 .ops = &vc3_pfd_ops,
648 .parent_data = &(const struct clk_parent_data) {
649 .index = 0
650 },
651 .num_parents = 1,
652 .flags = CLK_SET_RATE_PARENT
653 }
654 },
655 [VC3_PFD2] = {
656 .data = &(struct vc3_pfd_data) {
657 .num = VC3_PFD2,
658 .offs = VC3_PLL2_M_DIVIDER,
659 .mdiv1_bitmsk = VC3_PLL2_M_DIV1,
660 .mdiv2_bitmsk = VC3_PLL2_M_DIV2
661 },
662 .hw.init = &(struct clk_init_data){
663 .name = "pfd2",
664 .ops = &vc3_pfd_ops,
665 .parent_hws = (const struct clk_hw *[]) {
666 &clk_pfd_mux[VC3_PFD2_MUX].hw
667 },
668 .num_parents = 1,
669 .flags = CLK_SET_RATE_PARENT
670 }
671 },
672 [VC3_PFD3] = {
673 .data = &(struct vc3_pfd_data) {
674 .num = VC3_PFD3,
675 .offs = VC3_PLL3_M_DIVIDER,
676 .mdiv1_bitmsk = VC3_PLL3_M_DIV1,
677 .mdiv2_bitmsk = VC3_PLL3_M_DIV2
678 },
679 .hw.init = &(struct clk_init_data){
680 .name = "pfd3",
681 .ops = &vc3_pfd_ops,
682 .parent_hws = (const struct clk_hw *[]) {
683 &clk_pfd_mux[VC3_PFD3_MUX].hw
684 },
685 .num_parents = 1,
686 .flags = CLK_SET_RATE_PARENT
687 }
688 }
689 };
690
691 static struct vc3_hw_data clk_pll[] = {
692 [VC3_PLL1] = {
693 .data = &(struct vc3_pll_data) {
694 .num = VC3_PLL1,
695 .int_div_msb_offs = VC3_PLL1_LOOP_FILTER_N_DIV_MSB,
696 .int_div_lsb_offs = VC3_PLL1_VCO_N_DIVIDER,
697 .vco_min = VC3_PLL1_VCO_MIN,
698 .vco_max = VC3_PLL1_VCO_MAX
699 },
700 .hw.init = &(struct clk_init_data){
701 .name = "pll1",
702 .ops = &vc3_pll_ops,
703 .parent_hws = (const struct clk_hw *[]) {
704 &clk_pfd[VC3_PFD1].hw
705 },
706 .num_parents = 1,
707 .flags = CLK_SET_RATE_PARENT
708 }
709 },
710 [VC3_PLL2] = {
711 .data = &(struct vc3_pll_data) {
712 .num = VC3_PLL2,
713 .int_div_msb_offs = VC3_PLL2_FB_INT_DIV_MSB,
714 .int_div_lsb_offs = VC3_PLL2_FB_INT_DIV_LSB,
715 .vco_min = VC3_PLL2_VCO_MIN,
716 .vco_max = VC3_PLL2_VCO_MAX
717 },
718 .hw.init = &(struct clk_init_data){
719 .name = "pll2",
720 .ops = &vc3_pll_ops,
721 .parent_hws = (const struct clk_hw *[]) {
722 &clk_pfd[VC3_PFD2].hw
723 },
724 .num_parents = 1,
725 .flags = CLK_SET_RATE_PARENT
726 }
727 },
728 [VC3_PLL3] = {
729 .data = &(struct vc3_pll_data) {
730 .num = VC3_PLL3,
731 .int_div_msb_offs = VC3_PLL3_LOOP_FILTER_N_DIV_MSB,
732 .int_div_lsb_offs = VC3_PLL3_N_DIVIDER,
733 .vco_min = VC3_PLL3_VCO_MIN,
734 .vco_max = VC3_PLL3_VCO_MAX
735 },
736 .hw.init = &(struct clk_init_data){
737 .name = "pll3",
738 .ops = &vc3_pll_ops,
739 .parent_hws = (const struct clk_hw *[]) {
740 &clk_pfd[VC3_PFD3].hw
741 },
742 .num_parents = 1,
743 .flags = CLK_SET_RATE_PARENT
744 }
745 }
746 };
747
748 static const struct clk_parent_data div_mux_parent_data[][2] = {
749 [VC3_DIV1_MUX] = {
750 { .hw = &clk_pll[VC3_PLL1].hw },
751 { .index = 0 }
752 },
753 [VC3_DIV3_MUX] = {
754 { .hw = &clk_pll[VC3_PLL2].hw },
755 { .hw = &clk_pll[VC3_PLL3].hw }
756 },
757 [VC3_DIV4_MUX] = {
758 { .hw = &clk_pll[VC3_PLL2].hw },
759 { .index = 0 }
760 }
761 };
762
763 static struct vc3_hw_data clk_div_mux[] = {
764 [VC3_DIV1_MUX] = {
765 .data = &(struct vc3_clk_data) {
766 .offs = VC3_GENERAL_CTR,
767 .bitmsk = VC3_GENERAL_CTR_DIV1_SRC_SEL
768 },
769 .hw.init = &(struct clk_init_data){
770 .name = "div1_mux",
771 .ops = &vc3_div_mux_ops,
772 .parent_data = div_mux_parent_data[VC3_DIV1_MUX],
773 .num_parents = 2,
774 .flags = CLK_SET_RATE_PARENT | CLK_SET_RATE_NO_REPARENT
775 }
776 },
777 [VC3_DIV3_MUX] = {
778 .data = &(struct vc3_clk_data) {
779 .offs = VC3_PLL3_CHARGE_PUMP_CTRL,
780 .bitmsk = VC3_PLL3_CHARGE_PUMP_CTRL_OUTDIV3_SRC_SEL
781 },
782 .hw.init = &(struct clk_init_data){
783 .name = "div3_mux",
784 .ops = &vc3_div_mux_ops,
785 .parent_data = div_mux_parent_data[VC3_DIV3_MUX],
786 .num_parents = 2,
787 .flags = CLK_SET_RATE_PARENT | CLK_SET_RATE_NO_REPARENT
788 }
789 },
790 [VC3_DIV4_MUX] = {
791 .data = &(struct vc3_clk_data) {
792 .offs = VC3_OUTPUT_CTR,
793 .bitmsk = VC3_OUTPUT_CTR_DIV4_SRC_SEL
794 },
795 .hw.init = &(struct clk_init_data){
796 .name = "div4_mux",
797 .ops = &vc3_div_mux_ops,
798 .parent_data = div_mux_parent_data[VC3_DIV4_MUX],
799 .num_parents = 2,
800 .flags = CLK_SET_RATE_PARENT | CLK_SET_RATE_NO_REPARENT
801 }
802 }
803 };
804
805 static struct vc3_hw_data clk_div[] = {
806 [VC3_DIV1] = {
807 .data = &(struct vc3_div_data) {
808 .offs = VC3_OUT_DIV1_DIV2_CTRL,
809 .table = div1_divs,
810 .shift = 4,
811 .width = 4,
812 .flags = CLK_DIVIDER_READ_ONLY
813 },
814 .hw.init = &(struct clk_init_data){
815 .name = "div1",
816 .ops = &vc3_div_ops,
817 .parent_hws = (const struct clk_hw *[]) {
818 &clk_div_mux[VC3_DIV1_MUX].hw
819 },
820 .num_parents = 1,
821 .flags = CLK_SET_RATE_PARENT
822 }
823 },
824 [VC3_DIV2] = {
825 .data = &(struct vc3_div_data) {
826 .offs = VC3_OUT_DIV1_DIV2_CTRL,
827 .table = div245_divs,
828 .shift = 0,
829 .width = 4,
830 .flags = CLK_DIVIDER_READ_ONLY
831 },
832 .hw.init = &(struct clk_init_data){
833 .name = "div2",
834 .ops = &vc3_div_ops,
835 .parent_hws = (const struct clk_hw *[]) {
836 &clk_pll[VC3_PLL1].hw
837 },
838 .num_parents = 1,
839 .flags = CLK_SET_RATE_PARENT
840 }
841 },
842 [VC3_DIV3] = {
843 .data = &(struct vc3_div_data) {
844 .offs = VC3_OUT_DIV3_DIV4_CTRL,
845 .table = div3_divs,
846 .shift = 4,
847 .width = 4,
848 .flags = CLK_DIVIDER_READ_ONLY
849 },
850 .hw.init = &(struct clk_init_data){
851 .name = "div3",
852 .ops = &vc3_div_ops,
853 .parent_hws = (const struct clk_hw *[]) {
854 &clk_div_mux[VC3_DIV3_MUX].hw
855 },
856 .num_parents = 1,
857 .flags = CLK_SET_RATE_PARENT
858 }
859 },
860 [VC3_DIV4] = {
861 .data = &(struct vc3_div_data) {
862 .offs = VC3_OUT_DIV3_DIV4_CTRL,
863 .table = div245_divs,
864 .shift = 0,
865 .width = 4,
866 .flags = CLK_DIVIDER_READ_ONLY
867 },
868 .hw.init = &(struct clk_init_data){
869 .name = "div4",
870 .ops = &vc3_div_ops,
871 .parent_hws = (const struct clk_hw *[]) {
872 &clk_div_mux[VC3_DIV4_MUX].hw
873 },
874 .num_parents = 1,
875 .flags = CLK_SET_RATE_PARENT
876 }
877 },
878 [VC3_DIV5] = {
879 .data = &(struct vc3_div_data) {
880 .offs = VC3_PLL1_CTRL_OUTDIV5,
881 .table = div245_divs,
882 .shift = 0,
883 .width = 4,
884 .flags = CLK_DIVIDER_READ_ONLY
885 },
886 .hw.init = &(struct clk_init_data){
887 .name = "div5",
888 .ops = &vc3_div_ops,
889 .parent_hws = (const struct clk_hw *[]) {
890 &clk_pll[VC3_PLL3].hw
891 },
892 .num_parents = 1,
893 .flags = CLK_SET_RATE_PARENT
894 }
895 }
896 };
897
898 static struct vc3_hw_data clk_mux[] = {
899 [VC3_SE1_MUX] = {
900 .data = &(struct vc3_clk_data) {
901 .offs = VC3_SE1_DIV4_CTRL,
902 .bitmsk = VC3_SE1_DIV4_CTRL_SE1_CLK_SEL
903 },
904 .hw.init = &(struct clk_init_data){
905 .name = "se1_mux",
906 .ops = &vc3_clk_mux_ops,
907 .parent_hws = (const struct clk_hw *[]) {
908 &clk_div[VC3_DIV5].hw,
909 &clk_div[VC3_DIV4].hw
910 },
911 .num_parents = 2,
912 .flags = CLK_SET_RATE_PARENT
913 }
914 },
915 [VC3_SE2_MUX] = {
916 .data = &(struct vc3_clk_data) {
917 .offs = VC3_SE2_CTRL_REG0,
918 .bitmsk = VC3_SE2_CTRL_REG0_SE2_CLK_SEL
919 },
920 .hw.init = &(struct clk_init_data){
921 .name = "se2_mux",
922 .ops = &vc3_clk_mux_ops,
923 .parent_hws = (const struct clk_hw *[]) {
924 &clk_div[VC3_DIV5].hw,
925 &clk_div[VC3_DIV4].hw
926 },
927 .num_parents = 2,
928 .flags = CLK_SET_RATE_PARENT
929 }
930 },
931 [VC3_SE3_MUX] = {
932 .data = &(struct vc3_clk_data) {
933 .offs = VC3_SE3_DIFF1_CTRL_REG,
934 .bitmsk = VC3_SE3_DIFF1_CTRL_REG_SE3_CLK_SEL
935 },
936 .hw.init = &(struct clk_init_data){
937 .name = "se3_mux",
938 .ops = &vc3_clk_mux_ops,
939 .parent_hws = (const struct clk_hw *[]) {
940 &clk_div[VC3_DIV2].hw,
941 &clk_div[VC3_DIV4].hw
942 },
943 .num_parents = 2,
944 .flags = CLK_SET_RATE_PARENT
945 }
946 },
947 [VC3_DIFF1_MUX] = {
948 .data = &(struct vc3_clk_data) {
949 .offs = VC3_DIFF1_CTRL_REG,
950 .bitmsk = VC3_DIFF1_CTRL_REG_DIFF1_CLK_SEL
951 },
952 .hw.init = &(struct clk_init_data){
953 .name = "diff1_mux",
954 .ops = &vc3_clk_mux_ops,
955 .parent_hws = (const struct clk_hw *[]) {
956 &clk_div[VC3_DIV1].hw,
957 &clk_div[VC3_DIV3].hw
958 },
959 .num_parents = 2,
960 .flags = CLK_SET_RATE_PARENT
961 }
962 },
963 [VC3_DIFF2_MUX] = {
964 .data = &(struct vc3_clk_data) {
965 .offs = VC3_DIFF2_CTRL_REG,
966 .bitmsk = VC3_DIFF2_CTRL_REG_DIFF2_CLK_SEL
967 },
968 .hw.init = &(struct clk_init_data){
969 .name = "diff2_mux",
970 .ops = &vc3_clk_mux_ops,
971 .parent_hws = (const struct clk_hw *[]) {
972 &clk_div[VC3_DIV1].hw,
973 &clk_div[VC3_DIV3].hw
974 },
975 .num_parents = 2,
976 .flags = CLK_SET_RATE_PARENT
977 }
978 }
979 };
980
vc3_of_clk_get(struct of_phandle_args * clkspec,void * data)981 static struct clk_hw *vc3_of_clk_get(struct of_phandle_args *clkspec,
982 void *data)
983 {
984 unsigned int idx = clkspec->args[0];
985 struct clk_hw **clkout_hw = data;
986
987 if (idx >= ARRAY_SIZE(clk_out)) {
988 pr_err("invalid clk index %u for provider %pOF\n", idx, clkspec->np);
989 return ERR_PTR(-EINVAL);
990 }
991
992 return clkout_hw[idx];
993 }
994
vc3_probe(struct i2c_client * client)995 static int vc3_probe(struct i2c_client *client)
996 {
997 struct device *dev = &client->dev;
998 u8 settings[NUM_CONFIG_REGISTERS];
999 struct regmap *regmap;
1000 const char *name;
1001 int ret, i;
1002
1003 regmap = devm_regmap_init_i2c(client, &vc3_regmap_config);
1004 if (IS_ERR(regmap))
1005 return dev_err_probe(dev, PTR_ERR(regmap),
1006 "failed to allocate register map\n");
1007
1008 ret = of_property_read_u8_array(dev->of_node, "renesas,settings",
1009 settings, ARRAY_SIZE(settings));
1010 if (!ret) {
1011 /*
1012 * A raw settings array was specified in the DT. Write the
1013 * settings to the device immediately.
1014 */
1015 for (i = 0; i < NUM_CONFIG_REGISTERS; i++) {
1016 ret = regmap_write(regmap, i, settings[i]);
1017 if (ret) {
1018 dev_err(dev, "error writing to chip (%i)\n", ret);
1019 return ret;
1020 }
1021 }
1022 } else if (ret == -EOVERFLOW) {
1023 dev_err(&client->dev, "EOVERFLOW reg settings. ARRAY_SIZE: %zu\n",
1024 ARRAY_SIZE(settings));
1025 return ret;
1026 }
1027
1028 /* Register pfd muxes */
1029 for (i = 0; i < ARRAY_SIZE(clk_pfd_mux); i++) {
1030 clk_pfd_mux[i].regmap = regmap;
1031 ret = devm_clk_hw_register(dev, &clk_pfd_mux[i].hw);
1032 if (ret)
1033 return dev_err_probe(dev, ret, "%s failed\n",
1034 clk_pfd_mux[i].hw.init->name);
1035 }
1036
1037 /* Register pfd's */
1038 for (i = 0; i < ARRAY_SIZE(clk_pfd); i++) {
1039 clk_pfd[i].regmap = regmap;
1040 ret = devm_clk_hw_register(dev, &clk_pfd[i].hw);
1041 if (ret)
1042 return dev_err_probe(dev, ret, "%s failed\n",
1043 clk_pfd[i].hw.init->name);
1044 }
1045
1046 /* Register pll's */
1047 for (i = 0; i < ARRAY_SIZE(clk_pll); i++) {
1048 clk_pll[i].regmap = regmap;
1049 ret = devm_clk_hw_register(dev, &clk_pll[i].hw);
1050 if (ret)
1051 return dev_err_probe(dev, ret, "%s failed\n",
1052 clk_pll[i].hw.init->name);
1053 }
1054
1055 /* Register divider muxes */
1056 for (i = 0; i < ARRAY_SIZE(clk_div_mux); i++) {
1057 clk_div_mux[i].regmap = regmap;
1058 ret = devm_clk_hw_register(dev, &clk_div_mux[i].hw);
1059 if (ret)
1060 return dev_err_probe(dev, ret, "%s failed\n",
1061 clk_div_mux[i].hw.init->name);
1062 }
1063
1064 /* Register dividers */
1065 for (i = 0; i < ARRAY_SIZE(clk_div); i++) {
1066 clk_div[i].regmap = regmap;
1067 ret = devm_clk_hw_register(dev, &clk_div[i].hw);
1068 if (ret)
1069 return dev_err_probe(dev, ret, "%s failed\n",
1070 clk_div[i].hw.init->name);
1071 }
1072
1073 /* Register clk muxes */
1074 for (i = 0; i < ARRAY_SIZE(clk_mux); i++) {
1075 clk_mux[i].regmap = regmap;
1076 ret = devm_clk_hw_register(dev, &clk_mux[i].hw);
1077 if (ret)
1078 return dev_err_probe(dev, ret, "%s failed\n",
1079 clk_mux[i].hw.init->name);
1080 }
1081
1082 /* Register clk outputs */
1083 for (i = 0; i < ARRAY_SIZE(clk_out); i++) {
1084 switch (i) {
1085 case VC3_DIFF2:
1086 name = "diff2";
1087 break;
1088 case VC3_DIFF1:
1089 name = "diff1";
1090 break;
1091 case VC3_SE3:
1092 name = "se3";
1093 break;
1094 case VC3_SE2:
1095 name = "se2";
1096 break;
1097 case VC3_SE1:
1098 name = "se1";
1099 break;
1100 case VC3_REF:
1101 name = "ref";
1102 break;
1103 default:
1104 return dev_err_probe(dev, -EINVAL, "invalid clk output %d\n", i);
1105 }
1106
1107 if (i == VC3_REF)
1108 clk_out[i] = devm_clk_hw_register_fixed_factor_index(dev,
1109 name, 0, CLK_SET_RATE_PARENT, 1, 1);
1110 else
1111 clk_out[i] = devm_clk_hw_register_fixed_factor_parent_hw(dev,
1112 name, &clk_mux[i - 1].hw, CLK_SET_RATE_PARENT, 1, 1);
1113
1114 if (IS_ERR(clk_out[i]))
1115 return PTR_ERR(clk_out[i]);
1116 }
1117
1118 ret = devm_of_clk_add_hw_provider(dev, vc3_of_clk_get, clk_out);
1119 if (ret)
1120 return dev_err_probe(dev, ret, "unable to add clk provider\n");
1121
1122 return ret;
1123 }
1124
1125 static const struct of_device_id dev_ids[] = {
1126 { .compatible = "renesas,5p35023" },
1127 { /* Sentinel */ }
1128 };
1129 MODULE_DEVICE_TABLE(of, dev_ids);
1130
1131 static struct i2c_driver vc3_driver = {
1132 .driver = {
1133 .name = "vc3",
1134 .of_match_table = of_match_ptr(dev_ids),
1135 },
1136 .probe = vc3_probe,
1137 };
1138 module_i2c_driver(vc3_driver);
1139
1140 MODULE_AUTHOR("Biju Das <biju.das.jz@bp.renesas.com>");
1141 MODULE_DESCRIPTION("Renesas VersaClock 3 driver");
1142 MODULE_LICENSE("GPL");
1143