xref: /openbmc/linux/drivers/clk/clk-composite.c (revision 9b68f30b)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (c) 2013 NVIDIA CORPORATION.  All rights reserved.
4  */
5 
6 #include <linux/clk-provider.h>
7 #include <linux/device.h>
8 #include <linux/err.h>
9 #include <linux/slab.h>
10 
11 static u8 clk_composite_get_parent(struct clk_hw *hw)
12 {
13 	struct clk_composite *composite = to_clk_composite(hw);
14 	const struct clk_ops *mux_ops = composite->mux_ops;
15 	struct clk_hw *mux_hw = composite->mux_hw;
16 
17 	__clk_hw_set_clk(mux_hw, hw);
18 
19 	return mux_ops->get_parent(mux_hw);
20 }
21 
22 static int clk_composite_set_parent(struct clk_hw *hw, u8 index)
23 {
24 	struct clk_composite *composite = to_clk_composite(hw);
25 	const struct clk_ops *mux_ops = composite->mux_ops;
26 	struct clk_hw *mux_hw = composite->mux_hw;
27 
28 	__clk_hw_set_clk(mux_hw, hw);
29 
30 	return mux_ops->set_parent(mux_hw, index);
31 }
32 
33 static unsigned long clk_composite_recalc_rate(struct clk_hw *hw,
34 					    unsigned long parent_rate)
35 {
36 	struct clk_composite *composite = to_clk_composite(hw);
37 	const struct clk_ops *rate_ops = composite->rate_ops;
38 	struct clk_hw *rate_hw = composite->rate_hw;
39 
40 	__clk_hw_set_clk(rate_hw, hw);
41 
42 	return rate_ops->recalc_rate(rate_hw, parent_rate);
43 }
44 
45 static int clk_composite_determine_rate_for_parent(struct clk_hw *rate_hw,
46 						   struct clk_rate_request *req,
47 						   struct clk_hw *parent_hw,
48 						   const struct clk_ops *rate_ops)
49 {
50 	long rate;
51 
52 	req->best_parent_hw = parent_hw;
53 	req->best_parent_rate = clk_hw_get_rate(parent_hw);
54 
55 	if (rate_ops->determine_rate)
56 		return rate_ops->determine_rate(rate_hw, req);
57 
58 	rate = rate_ops->round_rate(rate_hw, req->rate,
59 				    &req->best_parent_rate);
60 	if (rate < 0)
61 		return rate;
62 
63 	req->rate = rate;
64 
65 	return 0;
66 }
67 
68 static int clk_composite_determine_rate(struct clk_hw *hw,
69 					struct clk_rate_request *req)
70 {
71 	struct clk_composite *composite = to_clk_composite(hw);
72 	const struct clk_ops *rate_ops = composite->rate_ops;
73 	const struct clk_ops *mux_ops = composite->mux_ops;
74 	struct clk_hw *rate_hw = composite->rate_hw;
75 	struct clk_hw *mux_hw = composite->mux_hw;
76 	struct clk_hw *parent;
77 	unsigned long rate_diff;
78 	unsigned long best_rate_diff = ULONG_MAX;
79 	unsigned long best_rate = 0;
80 	int i, ret;
81 
82 	if (rate_hw && rate_ops &&
83 	    (rate_ops->determine_rate || rate_ops->round_rate) &&
84 	    mux_hw && mux_ops && mux_ops->set_parent) {
85 		req->best_parent_hw = NULL;
86 
87 		if (clk_hw_get_flags(hw) & CLK_SET_RATE_NO_REPARENT) {
88 			struct clk_rate_request tmp_req;
89 
90 			parent = clk_hw_get_parent(mux_hw);
91 
92 			clk_hw_forward_rate_request(hw, req, parent, &tmp_req, req->rate);
93 			ret = clk_composite_determine_rate_for_parent(rate_hw,
94 								      &tmp_req,
95 								      parent,
96 								      rate_ops);
97 			if (ret)
98 				return ret;
99 
100 			req->rate = tmp_req.rate;
101 			req->best_parent_hw = tmp_req.best_parent_hw;
102 			req->best_parent_rate = tmp_req.best_parent_rate;
103 
104 			return 0;
105 		}
106 
107 		for (i = 0; i < clk_hw_get_num_parents(mux_hw); i++) {
108 			struct clk_rate_request tmp_req;
109 
110 			parent = clk_hw_get_parent_by_index(mux_hw, i);
111 			if (!parent)
112 				continue;
113 
114 			clk_hw_forward_rate_request(hw, req, parent, &tmp_req, req->rate);
115 			ret = clk_composite_determine_rate_for_parent(rate_hw,
116 								      &tmp_req,
117 								      parent,
118 								      rate_ops);
119 			if (ret)
120 				continue;
121 
122 			rate_diff = abs(req->rate - tmp_req.rate);
123 
124 			if (!rate_diff || !req->best_parent_hw
125 				       || best_rate_diff > rate_diff) {
126 				req->best_parent_hw = parent;
127 				req->best_parent_rate = tmp_req.best_parent_rate;
128 				best_rate_diff = rate_diff;
129 				best_rate = tmp_req.rate;
130 			}
131 
132 			if (!rate_diff)
133 				return 0;
134 		}
135 
136 		req->rate = best_rate;
137 		return 0;
138 	} else if (rate_hw && rate_ops && rate_ops->determine_rate) {
139 		__clk_hw_set_clk(rate_hw, hw);
140 		return rate_ops->determine_rate(rate_hw, req);
141 	} else if (mux_hw && mux_ops && mux_ops->determine_rate) {
142 		__clk_hw_set_clk(mux_hw, hw);
143 		return mux_ops->determine_rate(mux_hw, req);
144 	} else {
145 		pr_err("clk: clk_composite_determine_rate function called, but no mux or rate callback set!\n");
146 		return -EINVAL;
147 	}
148 }
149 
150 static long clk_composite_round_rate(struct clk_hw *hw, unsigned long rate,
151 				  unsigned long *prate)
152 {
153 	struct clk_composite *composite = to_clk_composite(hw);
154 	const struct clk_ops *rate_ops = composite->rate_ops;
155 	struct clk_hw *rate_hw = composite->rate_hw;
156 
157 	__clk_hw_set_clk(rate_hw, hw);
158 
159 	return rate_ops->round_rate(rate_hw, rate, prate);
160 }
161 
162 static int clk_composite_set_rate(struct clk_hw *hw, unsigned long rate,
163 			       unsigned long parent_rate)
164 {
165 	struct clk_composite *composite = to_clk_composite(hw);
166 	const struct clk_ops *rate_ops = composite->rate_ops;
167 	struct clk_hw *rate_hw = composite->rate_hw;
168 
169 	__clk_hw_set_clk(rate_hw, hw);
170 
171 	return rate_ops->set_rate(rate_hw, rate, parent_rate);
172 }
173 
174 static int clk_composite_set_rate_and_parent(struct clk_hw *hw,
175 					     unsigned long rate,
176 					     unsigned long parent_rate,
177 					     u8 index)
178 {
179 	struct clk_composite *composite = to_clk_composite(hw);
180 	const struct clk_ops *rate_ops = composite->rate_ops;
181 	const struct clk_ops *mux_ops = composite->mux_ops;
182 	struct clk_hw *rate_hw = composite->rate_hw;
183 	struct clk_hw *mux_hw = composite->mux_hw;
184 	unsigned long temp_rate;
185 
186 	__clk_hw_set_clk(rate_hw, hw);
187 	__clk_hw_set_clk(mux_hw, hw);
188 
189 	temp_rate = rate_ops->recalc_rate(rate_hw, parent_rate);
190 	if (temp_rate > rate) {
191 		rate_ops->set_rate(rate_hw, rate, parent_rate);
192 		mux_ops->set_parent(mux_hw, index);
193 	} else {
194 		mux_ops->set_parent(mux_hw, index);
195 		rate_ops->set_rate(rate_hw, rate, parent_rate);
196 	}
197 
198 	return 0;
199 }
200 
201 static int clk_composite_is_enabled(struct clk_hw *hw)
202 {
203 	struct clk_composite *composite = to_clk_composite(hw);
204 	const struct clk_ops *gate_ops = composite->gate_ops;
205 	struct clk_hw *gate_hw = composite->gate_hw;
206 
207 	__clk_hw_set_clk(gate_hw, hw);
208 
209 	return gate_ops->is_enabled(gate_hw);
210 }
211 
212 static int clk_composite_enable(struct clk_hw *hw)
213 {
214 	struct clk_composite *composite = to_clk_composite(hw);
215 	const struct clk_ops *gate_ops = composite->gate_ops;
216 	struct clk_hw *gate_hw = composite->gate_hw;
217 
218 	__clk_hw_set_clk(gate_hw, hw);
219 
220 	return gate_ops->enable(gate_hw);
221 }
222 
223 static void clk_composite_disable(struct clk_hw *hw)
224 {
225 	struct clk_composite *composite = to_clk_composite(hw);
226 	const struct clk_ops *gate_ops = composite->gate_ops;
227 	struct clk_hw *gate_hw = composite->gate_hw;
228 
229 	__clk_hw_set_clk(gate_hw, hw);
230 
231 	gate_ops->disable(gate_hw);
232 }
233 
234 static struct clk_hw *__clk_hw_register_composite(struct device *dev,
235 			const char *name, const char * const *parent_names,
236 			const struct clk_parent_data *pdata, int num_parents,
237 			struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
238 			struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
239 			struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
240 			unsigned long flags)
241 {
242 	struct clk_hw *hw;
243 	struct clk_init_data init = {};
244 	struct clk_composite *composite;
245 	struct clk_ops *clk_composite_ops;
246 	int ret;
247 
248 	composite = kzalloc(sizeof(*composite), GFP_KERNEL);
249 	if (!composite)
250 		return ERR_PTR(-ENOMEM);
251 
252 	init.name = name;
253 	init.flags = flags;
254 	if (parent_names)
255 		init.parent_names = parent_names;
256 	else
257 		init.parent_data = pdata;
258 	init.num_parents = num_parents;
259 	hw = &composite->hw;
260 
261 	clk_composite_ops = &composite->ops;
262 
263 	if (mux_hw && mux_ops) {
264 		if (!mux_ops->get_parent) {
265 			hw = ERR_PTR(-EINVAL);
266 			goto err;
267 		}
268 
269 		composite->mux_hw = mux_hw;
270 		composite->mux_ops = mux_ops;
271 		clk_composite_ops->get_parent = clk_composite_get_parent;
272 		if (mux_ops->set_parent)
273 			clk_composite_ops->set_parent = clk_composite_set_parent;
274 		if (mux_ops->determine_rate)
275 			clk_composite_ops->determine_rate = clk_composite_determine_rate;
276 	}
277 
278 	if (rate_hw && rate_ops) {
279 		if (!rate_ops->recalc_rate) {
280 			hw = ERR_PTR(-EINVAL);
281 			goto err;
282 		}
283 		clk_composite_ops->recalc_rate = clk_composite_recalc_rate;
284 
285 		if (rate_ops->determine_rate)
286 			clk_composite_ops->determine_rate =
287 				clk_composite_determine_rate;
288 		else if (rate_ops->round_rate)
289 			clk_composite_ops->round_rate =
290 				clk_composite_round_rate;
291 
292 		/* .set_rate requires either .round_rate or .determine_rate */
293 		if (rate_ops->set_rate) {
294 			if (rate_ops->determine_rate || rate_ops->round_rate)
295 				clk_composite_ops->set_rate =
296 						clk_composite_set_rate;
297 			else
298 				WARN(1, "%s: missing round_rate op is required\n",
299 						__func__);
300 		}
301 
302 		composite->rate_hw = rate_hw;
303 		composite->rate_ops = rate_ops;
304 	}
305 
306 	if (mux_hw && mux_ops && rate_hw && rate_ops) {
307 		if (mux_ops->set_parent && rate_ops->set_rate)
308 			clk_composite_ops->set_rate_and_parent =
309 			clk_composite_set_rate_and_parent;
310 	}
311 
312 	if (gate_hw && gate_ops) {
313 		if (!gate_ops->is_enabled || !gate_ops->enable ||
314 		    !gate_ops->disable) {
315 			hw = ERR_PTR(-EINVAL);
316 			goto err;
317 		}
318 
319 		composite->gate_hw = gate_hw;
320 		composite->gate_ops = gate_ops;
321 		clk_composite_ops->is_enabled = clk_composite_is_enabled;
322 		clk_composite_ops->enable = clk_composite_enable;
323 		clk_composite_ops->disable = clk_composite_disable;
324 	}
325 
326 	init.ops = clk_composite_ops;
327 	composite->hw.init = &init;
328 
329 	ret = clk_hw_register(dev, hw);
330 	if (ret) {
331 		hw = ERR_PTR(ret);
332 		goto err;
333 	}
334 
335 	if (composite->mux_hw)
336 		composite->mux_hw->clk = hw->clk;
337 
338 	if (composite->rate_hw)
339 		composite->rate_hw->clk = hw->clk;
340 
341 	if (composite->gate_hw)
342 		composite->gate_hw->clk = hw->clk;
343 
344 	return hw;
345 
346 err:
347 	kfree(composite);
348 	return hw;
349 }
350 
351 struct clk_hw *clk_hw_register_composite(struct device *dev, const char *name,
352 			const char * const *parent_names, int num_parents,
353 			struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
354 			struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
355 			struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
356 			unsigned long flags)
357 {
358 	return __clk_hw_register_composite(dev, name, parent_names, NULL,
359 					   num_parents, mux_hw, mux_ops,
360 					   rate_hw, rate_ops, gate_hw,
361 					   gate_ops, flags);
362 }
363 EXPORT_SYMBOL_GPL(clk_hw_register_composite);
364 
365 struct clk_hw *clk_hw_register_composite_pdata(struct device *dev,
366 			const char *name,
367 			const struct clk_parent_data *parent_data,
368 			int num_parents,
369 			struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
370 			struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
371 			struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
372 			unsigned long flags)
373 {
374 	return __clk_hw_register_composite(dev, name, NULL, parent_data,
375 					   num_parents, mux_hw, mux_ops,
376 					   rate_hw, rate_ops, gate_hw,
377 					   gate_ops, flags);
378 }
379 
380 struct clk *clk_register_composite(struct device *dev, const char *name,
381 			const char * const *parent_names, int num_parents,
382 			struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
383 			struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
384 			struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
385 			unsigned long flags)
386 {
387 	struct clk_hw *hw;
388 
389 	hw = clk_hw_register_composite(dev, name, parent_names, num_parents,
390 			mux_hw, mux_ops, rate_hw, rate_ops, gate_hw, gate_ops,
391 			flags);
392 	if (IS_ERR(hw))
393 		return ERR_CAST(hw);
394 	return hw->clk;
395 }
396 EXPORT_SYMBOL_GPL(clk_register_composite);
397 
398 struct clk *clk_register_composite_pdata(struct device *dev, const char *name,
399 			const struct clk_parent_data *parent_data,
400 			int num_parents,
401 			struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
402 			struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
403 			struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
404 			unsigned long flags)
405 {
406 	struct clk_hw *hw;
407 
408 	hw = clk_hw_register_composite_pdata(dev, name, parent_data,
409 			num_parents, mux_hw, mux_ops, rate_hw, rate_ops,
410 			gate_hw, gate_ops, flags);
411 	if (IS_ERR(hw))
412 		return ERR_CAST(hw);
413 	return hw->clk;
414 }
415 
416 void clk_unregister_composite(struct clk *clk)
417 {
418 	struct clk_composite *composite;
419 	struct clk_hw *hw;
420 
421 	hw = __clk_get_hw(clk);
422 	if (!hw)
423 		return;
424 
425 	composite = to_clk_composite(hw);
426 
427 	clk_unregister(clk);
428 	kfree(composite);
429 }
430 
431 void clk_hw_unregister_composite(struct clk_hw *hw)
432 {
433 	struct clk_composite *composite;
434 
435 	composite = to_clk_composite(hw);
436 
437 	clk_hw_unregister(hw);
438 	kfree(composite);
439 }
440 EXPORT_SYMBOL_GPL(clk_hw_unregister_composite);
441 
442 static void devm_clk_hw_release_composite(struct device *dev, void *res)
443 {
444 	clk_hw_unregister_composite(*(struct clk_hw **)res);
445 }
446 
447 static struct clk_hw *__devm_clk_hw_register_composite(struct device *dev,
448 			const char *name, const char * const *parent_names,
449 			const struct clk_parent_data *pdata, int num_parents,
450 			struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
451 			struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
452 			struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
453 			unsigned long flags)
454 {
455 	struct clk_hw **ptr, *hw;
456 
457 	ptr = devres_alloc(devm_clk_hw_release_composite, sizeof(*ptr),
458 			   GFP_KERNEL);
459 	if (!ptr)
460 		return ERR_PTR(-ENOMEM);
461 
462 	hw = __clk_hw_register_composite(dev, name, parent_names, pdata,
463 					 num_parents, mux_hw, mux_ops, rate_hw,
464 					 rate_ops, gate_hw, gate_ops, flags);
465 
466 	if (!IS_ERR(hw)) {
467 		*ptr = hw;
468 		devres_add(dev, ptr);
469 	} else {
470 		devres_free(ptr);
471 	}
472 
473 	return hw;
474 }
475 
476 struct clk_hw *devm_clk_hw_register_composite_pdata(struct device *dev,
477 			const char *name,
478 			const struct clk_parent_data *parent_data,
479 			int num_parents,
480 			struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
481 			struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
482 			struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
483 			unsigned long flags)
484 {
485 	return __devm_clk_hw_register_composite(dev, name, NULL, parent_data,
486 						num_parents, mux_hw, mux_ops,
487 						rate_hw, rate_ops, gate_hw,
488 						gate_ops, flags);
489 }
490