xref: /openbmc/linux/drivers/clk/clk-composite.c (revision 5c816641)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (c) 2013 NVIDIA CORPORATION.  All rights reserved.
4  */
5 
6 #include <linux/clk-provider.h>
7 #include <linux/device.h>
8 #include <linux/err.h>
9 #include <linux/slab.h>
10 
11 static u8 clk_composite_get_parent(struct clk_hw *hw)
12 {
13 	struct clk_composite *composite = to_clk_composite(hw);
14 	const struct clk_ops *mux_ops = composite->mux_ops;
15 	struct clk_hw *mux_hw = composite->mux_hw;
16 
17 	__clk_hw_set_clk(mux_hw, hw);
18 
19 	return mux_ops->get_parent(mux_hw);
20 }
21 
22 static int clk_composite_set_parent(struct clk_hw *hw, u8 index)
23 {
24 	struct clk_composite *composite = to_clk_composite(hw);
25 	const struct clk_ops *mux_ops = composite->mux_ops;
26 	struct clk_hw *mux_hw = composite->mux_hw;
27 
28 	__clk_hw_set_clk(mux_hw, hw);
29 
30 	return mux_ops->set_parent(mux_hw, index);
31 }
32 
33 static unsigned long clk_composite_recalc_rate(struct clk_hw *hw,
34 					    unsigned long parent_rate)
35 {
36 	struct clk_composite *composite = to_clk_composite(hw);
37 	const struct clk_ops *rate_ops = composite->rate_ops;
38 	struct clk_hw *rate_hw = composite->rate_hw;
39 
40 	__clk_hw_set_clk(rate_hw, hw);
41 
42 	return rate_ops->recalc_rate(rate_hw, parent_rate);
43 }
44 
45 static int clk_composite_determine_rate_for_parent(struct clk_hw *rate_hw,
46 						   struct clk_rate_request *req,
47 						   struct clk_hw *parent_hw,
48 						   const struct clk_ops *rate_ops)
49 {
50 	long rate;
51 
52 	req->best_parent_hw = parent_hw;
53 	req->best_parent_rate = clk_hw_get_rate(parent_hw);
54 
55 	if (rate_ops->determine_rate)
56 		return rate_ops->determine_rate(rate_hw, req);
57 
58 	rate = rate_ops->round_rate(rate_hw, req->rate,
59 				    &req->best_parent_rate);
60 	if (rate < 0)
61 		return rate;
62 
63 	req->rate = rate;
64 
65 	return 0;
66 }
67 
68 static int clk_composite_determine_rate(struct clk_hw *hw,
69 					struct clk_rate_request *req)
70 {
71 	struct clk_composite *composite = to_clk_composite(hw);
72 	const struct clk_ops *rate_ops = composite->rate_ops;
73 	const struct clk_ops *mux_ops = composite->mux_ops;
74 	struct clk_hw *rate_hw = composite->rate_hw;
75 	struct clk_hw *mux_hw = composite->mux_hw;
76 	struct clk_hw *parent;
77 	unsigned long rate_diff;
78 	unsigned long best_rate_diff = ULONG_MAX;
79 	unsigned long best_rate = 0;
80 	int i, ret;
81 
82 	if (rate_hw && rate_ops &&
83 	    (rate_ops->determine_rate || rate_ops->round_rate) &&
84 	    mux_hw && mux_ops && mux_ops->set_parent) {
85 		req->best_parent_hw = NULL;
86 
87 		if (clk_hw_get_flags(hw) & CLK_SET_RATE_NO_REPARENT) {
88 			struct clk_rate_request tmp_req = *req;
89 
90 			parent = clk_hw_get_parent(mux_hw);
91 
92 			ret = clk_composite_determine_rate_for_parent(rate_hw,
93 								      &tmp_req,
94 								      parent,
95 								      rate_ops);
96 			if (ret)
97 				return ret;
98 
99 			req->rate = tmp_req.rate;
100 			req->best_parent_hw = tmp_req.best_parent_hw;
101 			req->best_parent_rate = tmp_req.best_parent_rate;
102 
103 			return 0;
104 		}
105 
106 		for (i = 0; i < clk_hw_get_num_parents(mux_hw); i++) {
107 			struct clk_rate_request tmp_req = *req;
108 
109 			parent = clk_hw_get_parent_by_index(mux_hw, i);
110 			if (!parent)
111 				continue;
112 
113 			ret = clk_composite_determine_rate_for_parent(rate_hw,
114 								      &tmp_req,
115 								      parent,
116 								      rate_ops);
117 			if (ret)
118 				continue;
119 
120 			rate_diff = abs(req->rate - tmp_req.rate);
121 
122 			if (!rate_diff || !req->best_parent_hw
123 				       || best_rate_diff > rate_diff) {
124 				req->best_parent_hw = parent;
125 				req->best_parent_rate = tmp_req.best_parent_rate;
126 				best_rate_diff = rate_diff;
127 				best_rate = tmp_req.rate;
128 			}
129 
130 			if (!rate_diff)
131 				return 0;
132 		}
133 
134 		req->rate = best_rate;
135 		return 0;
136 	} else if (rate_hw && rate_ops && rate_ops->determine_rate) {
137 		__clk_hw_set_clk(rate_hw, hw);
138 		return rate_ops->determine_rate(rate_hw, req);
139 	} else if (mux_hw && mux_ops && mux_ops->determine_rate) {
140 		__clk_hw_set_clk(mux_hw, hw);
141 		return mux_ops->determine_rate(mux_hw, req);
142 	} else {
143 		pr_err("clk: clk_composite_determine_rate function called, but no mux or rate callback set!\n");
144 		return -EINVAL;
145 	}
146 }
147 
148 static long clk_composite_round_rate(struct clk_hw *hw, unsigned long rate,
149 				  unsigned long *prate)
150 {
151 	struct clk_composite *composite = to_clk_composite(hw);
152 	const struct clk_ops *rate_ops = composite->rate_ops;
153 	struct clk_hw *rate_hw = composite->rate_hw;
154 
155 	__clk_hw_set_clk(rate_hw, hw);
156 
157 	return rate_ops->round_rate(rate_hw, rate, prate);
158 }
159 
160 static int clk_composite_set_rate(struct clk_hw *hw, unsigned long rate,
161 			       unsigned long parent_rate)
162 {
163 	struct clk_composite *composite = to_clk_composite(hw);
164 	const struct clk_ops *rate_ops = composite->rate_ops;
165 	struct clk_hw *rate_hw = composite->rate_hw;
166 
167 	__clk_hw_set_clk(rate_hw, hw);
168 
169 	return rate_ops->set_rate(rate_hw, rate, parent_rate);
170 }
171 
172 static int clk_composite_set_rate_and_parent(struct clk_hw *hw,
173 					     unsigned long rate,
174 					     unsigned long parent_rate,
175 					     u8 index)
176 {
177 	struct clk_composite *composite = to_clk_composite(hw);
178 	const struct clk_ops *rate_ops = composite->rate_ops;
179 	const struct clk_ops *mux_ops = composite->mux_ops;
180 	struct clk_hw *rate_hw = composite->rate_hw;
181 	struct clk_hw *mux_hw = composite->mux_hw;
182 	unsigned long temp_rate;
183 
184 	__clk_hw_set_clk(rate_hw, hw);
185 	__clk_hw_set_clk(mux_hw, hw);
186 
187 	temp_rate = rate_ops->recalc_rate(rate_hw, parent_rate);
188 	if (temp_rate > rate) {
189 		rate_ops->set_rate(rate_hw, rate, parent_rate);
190 		mux_ops->set_parent(mux_hw, index);
191 	} else {
192 		mux_ops->set_parent(mux_hw, index);
193 		rate_ops->set_rate(rate_hw, rate, parent_rate);
194 	}
195 
196 	return 0;
197 }
198 
199 static int clk_composite_is_enabled(struct clk_hw *hw)
200 {
201 	struct clk_composite *composite = to_clk_composite(hw);
202 	const struct clk_ops *gate_ops = composite->gate_ops;
203 	struct clk_hw *gate_hw = composite->gate_hw;
204 
205 	__clk_hw_set_clk(gate_hw, hw);
206 
207 	return gate_ops->is_enabled(gate_hw);
208 }
209 
210 static int clk_composite_enable(struct clk_hw *hw)
211 {
212 	struct clk_composite *composite = to_clk_composite(hw);
213 	const struct clk_ops *gate_ops = composite->gate_ops;
214 	struct clk_hw *gate_hw = composite->gate_hw;
215 
216 	__clk_hw_set_clk(gate_hw, hw);
217 
218 	return gate_ops->enable(gate_hw);
219 }
220 
221 static void clk_composite_disable(struct clk_hw *hw)
222 {
223 	struct clk_composite *composite = to_clk_composite(hw);
224 	const struct clk_ops *gate_ops = composite->gate_ops;
225 	struct clk_hw *gate_hw = composite->gate_hw;
226 
227 	__clk_hw_set_clk(gate_hw, hw);
228 
229 	gate_ops->disable(gate_hw);
230 }
231 
232 static struct clk_hw *__clk_hw_register_composite(struct device *dev,
233 			const char *name, const char * const *parent_names,
234 			const struct clk_parent_data *pdata, int num_parents,
235 			struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
236 			struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
237 			struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
238 			unsigned long flags)
239 {
240 	struct clk_hw *hw;
241 	struct clk_init_data init = {};
242 	struct clk_composite *composite;
243 	struct clk_ops *clk_composite_ops;
244 	int ret;
245 
246 	composite = kzalloc(sizeof(*composite), GFP_KERNEL);
247 	if (!composite)
248 		return ERR_PTR(-ENOMEM);
249 
250 	init.name = name;
251 	init.flags = flags;
252 	if (parent_names)
253 		init.parent_names = parent_names;
254 	else
255 		init.parent_data = pdata;
256 	init.num_parents = num_parents;
257 	hw = &composite->hw;
258 
259 	clk_composite_ops = &composite->ops;
260 
261 	if (mux_hw && mux_ops) {
262 		if (!mux_ops->get_parent) {
263 			hw = ERR_PTR(-EINVAL);
264 			goto err;
265 		}
266 
267 		composite->mux_hw = mux_hw;
268 		composite->mux_ops = mux_ops;
269 		clk_composite_ops->get_parent = clk_composite_get_parent;
270 		if (mux_ops->set_parent)
271 			clk_composite_ops->set_parent = clk_composite_set_parent;
272 		if (mux_ops->determine_rate)
273 			clk_composite_ops->determine_rate = clk_composite_determine_rate;
274 	}
275 
276 	if (rate_hw && rate_ops) {
277 		if (!rate_ops->recalc_rate) {
278 			hw = ERR_PTR(-EINVAL);
279 			goto err;
280 		}
281 		clk_composite_ops->recalc_rate = clk_composite_recalc_rate;
282 
283 		if (rate_ops->determine_rate)
284 			clk_composite_ops->determine_rate =
285 				clk_composite_determine_rate;
286 		else if (rate_ops->round_rate)
287 			clk_composite_ops->round_rate =
288 				clk_composite_round_rate;
289 
290 		/* .set_rate requires either .round_rate or .determine_rate */
291 		if (rate_ops->set_rate) {
292 			if (rate_ops->determine_rate || rate_ops->round_rate)
293 				clk_composite_ops->set_rate =
294 						clk_composite_set_rate;
295 			else
296 				WARN(1, "%s: missing round_rate op is required\n",
297 						__func__);
298 		}
299 
300 		composite->rate_hw = rate_hw;
301 		composite->rate_ops = rate_ops;
302 	}
303 
304 	if (mux_hw && mux_ops && rate_hw && rate_ops) {
305 		if (mux_ops->set_parent && rate_ops->set_rate)
306 			clk_composite_ops->set_rate_and_parent =
307 			clk_composite_set_rate_and_parent;
308 	}
309 
310 	if (gate_hw && gate_ops) {
311 		if (!gate_ops->is_enabled || !gate_ops->enable ||
312 		    !gate_ops->disable) {
313 			hw = ERR_PTR(-EINVAL);
314 			goto err;
315 		}
316 
317 		composite->gate_hw = gate_hw;
318 		composite->gate_ops = gate_ops;
319 		clk_composite_ops->is_enabled = clk_composite_is_enabled;
320 		clk_composite_ops->enable = clk_composite_enable;
321 		clk_composite_ops->disable = clk_composite_disable;
322 	}
323 
324 	init.ops = clk_composite_ops;
325 	composite->hw.init = &init;
326 
327 	ret = clk_hw_register(dev, hw);
328 	if (ret) {
329 		hw = ERR_PTR(ret);
330 		goto err;
331 	}
332 
333 	if (composite->mux_hw)
334 		composite->mux_hw->clk = hw->clk;
335 
336 	if (composite->rate_hw)
337 		composite->rate_hw->clk = hw->clk;
338 
339 	if (composite->gate_hw)
340 		composite->gate_hw->clk = hw->clk;
341 
342 	return hw;
343 
344 err:
345 	kfree(composite);
346 	return hw;
347 }
348 
349 struct clk_hw *clk_hw_register_composite(struct device *dev, const char *name,
350 			const char * const *parent_names, int num_parents,
351 			struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
352 			struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
353 			struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
354 			unsigned long flags)
355 {
356 	return __clk_hw_register_composite(dev, name, parent_names, NULL,
357 					   num_parents, mux_hw, mux_ops,
358 					   rate_hw, rate_ops, gate_hw,
359 					   gate_ops, flags);
360 }
361 EXPORT_SYMBOL_GPL(clk_hw_register_composite);
362 
363 struct clk_hw *clk_hw_register_composite_pdata(struct device *dev,
364 			const char *name,
365 			const struct clk_parent_data *parent_data,
366 			int num_parents,
367 			struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
368 			struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
369 			struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
370 			unsigned long flags)
371 {
372 	return __clk_hw_register_composite(dev, name, NULL, parent_data,
373 					   num_parents, mux_hw, mux_ops,
374 					   rate_hw, rate_ops, gate_hw,
375 					   gate_ops, flags);
376 }
377 
378 struct clk *clk_register_composite(struct device *dev, const char *name,
379 			const char * const *parent_names, int num_parents,
380 			struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
381 			struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
382 			struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
383 			unsigned long flags)
384 {
385 	struct clk_hw *hw;
386 
387 	hw = clk_hw_register_composite(dev, name, parent_names, num_parents,
388 			mux_hw, mux_ops, rate_hw, rate_ops, gate_hw, gate_ops,
389 			flags);
390 	if (IS_ERR(hw))
391 		return ERR_CAST(hw);
392 	return hw->clk;
393 }
394 EXPORT_SYMBOL_GPL(clk_register_composite);
395 
396 struct clk *clk_register_composite_pdata(struct device *dev, const char *name,
397 			const struct clk_parent_data *parent_data,
398 			int num_parents,
399 			struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
400 			struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
401 			struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
402 			unsigned long flags)
403 {
404 	struct clk_hw *hw;
405 
406 	hw = clk_hw_register_composite_pdata(dev, name, parent_data,
407 			num_parents, mux_hw, mux_ops, rate_hw, rate_ops,
408 			gate_hw, gate_ops, flags);
409 	if (IS_ERR(hw))
410 		return ERR_CAST(hw);
411 	return hw->clk;
412 }
413 
414 void clk_unregister_composite(struct clk *clk)
415 {
416 	struct clk_composite *composite;
417 	struct clk_hw *hw;
418 
419 	hw = __clk_get_hw(clk);
420 	if (!hw)
421 		return;
422 
423 	composite = to_clk_composite(hw);
424 
425 	clk_unregister(clk);
426 	kfree(composite);
427 }
428 
429 void clk_hw_unregister_composite(struct clk_hw *hw)
430 {
431 	struct clk_composite *composite;
432 
433 	composite = to_clk_composite(hw);
434 
435 	clk_hw_unregister(hw);
436 	kfree(composite);
437 }
438 EXPORT_SYMBOL_GPL(clk_hw_unregister_composite);
439 
440 static void devm_clk_hw_release_composite(struct device *dev, void *res)
441 {
442 	clk_hw_unregister_composite(*(struct clk_hw **)res);
443 }
444 
445 static struct clk_hw *__devm_clk_hw_register_composite(struct device *dev,
446 			const char *name, const char * const *parent_names,
447 			const struct clk_parent_data *pdata, int num_parents,
448 			struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
449 			struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
450 			struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
451 			unsigned long flags)
452 {
453 	struct clk_hw **ptr, *hw;
454 
455 	ptr = devres_alloc(devm_clk_hw_release_composite, sizeof(*ptr),
456 			   GFP_KERNEL);
457 	if (!ptr)
458 		return ERR_PTR(-ENOMEM);
459 
460 	hw = __clk_hw_register_composite(dev, name, parent_names, pdata,
461 					 num_parents, mux_hw, mux_ops, rate_hw,
462 					 rate_ops, gate_hw, gate_ops, flags);
463 
464 	if (!IS_ERR(hw)) {
465 		*ptr = hw;
466 		devres_add(dev, ptr);
467 	} else {
468 		devres_free(ptr);
469 	}
470 
471 	return hw;
472 }
473 
474 struct clk_hw *devm_clk_hw_register_composite_pdata(struct device *dev,
475 			const char *name,
476 			const struct clk_parent_data *parent_data,
477 			int num_parents,
478 			struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
479 			struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
480 			struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
481 			unsigned long flags)
482 {
483 	return __devm_clk_hw_register_composite(dev, name, NULL, parent_data,
484 						num_parents, mux_hw, mux_ops,
485 						rate_hw, rate_ops, gate_hw,
486 						gate_ops, flags);
487 }
488