1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * Copyright (c) 2013 NVIDIA CORPORATION. All rights reserved. 4 */ 5 6 #include <linux/clk-provider.h> 7 #include <linux/device.h> 8 #include <linux/err.h> 9 #include <linux/slab.h> 10 11 static u8 clk_composite_get_parent(struct clk_hw *hw) 12 { 13 struct clk_composite *composite = to_clk_composite(hw); 14 const struct clk_ops *mux_ops = composite->mux_ops; 15 struct clk_hw *mux_hw = composite->mux_hw; 16 17 __clk_hw_set_clk(mux_hw, hw); 18 19 return mux_ops->get_parent(mux_hw); 20 } 21 22 static int clk_composite_set_parent(struct clk_hw *hw, u8 index) 23 { 24 struct clk_composite *composite = to_clk_composite(hw); 25 const struct clk_ops *mux_ops = composite->mux_ops; 26 struct clk_hw *mux_hw = composite->mux_hw; 27 28 __clk_hw_set_clk(mux_hw, hw); 29 30 return mux_ops->set_parent(mux_hw, index); 31 } 32 33 static unsigned long clk_composite_recalc_rate(struct clk_hw *hw, 34 unsigned long parent_rate) 35 { 36 struct clk_composite *composite = to_clk_composite(hw); 37 const struct clk_ops *rate_ops = composite->rate_ops; 38 struct clk_hw *rate_hw = composite->rate_hw; 39 40 __clk_hw_set_clk(rate_hw, hw); 41 42 return rate_ops->recalc_rate(rate_hw, parent_rate); 43 } 44 45 static int clk_composite_determine_rate_for_parent(struct clk_hw *rate_hw, 46 struct clk_rate_request *req, 47 struct clk_hw *parent_hw, 48 const struct clk_ops *rate_ops) 49 { 50 long rate; 51 52 req->best_parent_hw = parent_hw; 53 req->best_parent_rate = clk_hw_get_rate(parent_hw); 54 55 if (rate_ops->determine_rate) 56 return rate_ops->determine_rate(rate_hw, req); 57 58 rate = rate_ops->round_rate(rate_hw, req->rate, 59 &req->best_parent_rate); 60 if (rate < 0) 61 return rate; 62 63 req->rate = rate; 64 65 return 0; 66 } 67 68 static int clk_composite_determine_rate(struct clk_hw *hw, 69 struct clk_rate_request *req) 70 { 71 struct clk_composite *composite = to_clk_composite(hw); 72 const struct clk_ops *rate_ops = composite->rate_ops; 73 const struct clk_ops *mux_ops = composite->mux_ops; 74 struct clk_hw *rate_hw = composite->rate_hw; 75 struct clk_hw *mux_hw = composite->mux_hw; 76 struct clk_hw *parent; 77 unsigned long rate_diff; 78 unsigned long best_rate_diff = ULONG_MAX; 79 unsigned long best_rate = 0; 80 int i, ret; 81 82 if (rate_hw && rate_ops && 83 (rate_ops->determine_rate || rate_ops->round_rate) && 84 mux_hw && mux_ops && mux_ops->set_parent) { 85 req->best_parent_hw = NULL; 86 87 if (clk_hw_get_flags(hw) & CLK_SET_RATE_NO_REPARENT) { 88 struct clk_rate_request tmp_req = *req; 89 90 parent = clk_hw_get_parent(mux_hw); 91 92 ret = clk_composite_determine_rate_for_parent(rate_hw, 93 &tmp_req, 94 parent, 95 rate_ops); 96 if (ret) 97 return ret; 98 99 req->rate = tmp_req.rate; 100 req->best_parent_rate = tmp_req.best_parent_rate; 101 102 return 0; 103 } 104 105 for (i = 0; i < clk_hw_get_num_parents(mux_hw); i++) { 106 struct clk_rate_request tmp_req = *req; 107 108 parent = clk_hw_get_parent_by_index(mux_hw, i); 109 if (!parent) 110 continue; 111 112 ret = clk_composite_determine_rate_for_parent(rate_hw, 113 &tmp_req, 114 parent, 115 rate_ops); 116 if (ret) 117 continue; 118 119 rate_diff = abs(req->rate - tmp_req.rate); 120 121 if (!rate_diff || !req->best_parent_hw 122 || best_rate_diff > rate_diff) { 123 req->best_parent_hw = parent; 124 req->best_parent_rate = tmp_req.best_parent_rate; 125 best_rate_diff = rate_diff; 126 best_rate = tmp_req.rate; 127 } 128 129 if (!rate_diff) 130 return 0; 131 } 132 133 req->rate = best_rate; 134 return 0; 135 } else if (rate_hw && rate_ops && rate_ops->determine_rate) { 136 __clk_hw_set_clk(rate_hw, hw); 137 return rate_ops->determine_rate(rate_hw, req); 138 } else if (mux_hw && mux_ops && mux_ops->determine_rate) { 139 __clk_hw_set_clk(mux_hw, hw); 140 return mux_ops->determine_rate(mux_hw, req); 141 } else { 142 pr_err("clk: clk_composite_determine_rate function called, but no mux or rate callback set!\n"); 143 return -EINVAL; 144 } 145 } 146 147 static long clk_composite_round_rate(struct clk_hw *hw, unsigned long rate, 148 unsigned long *prate) 149 { 150 struct clk_composite *composite = to_clk_composite(hw); 151 const struct clk_ops *rate_ops = composite->rate_ops; 152 struct clk_hw *rate_hw = composite->rate_hw; 153 154 __clk_hw_set_clk(rate_hw, hw); 155 156 return rate_ops->round_rate(rate_hw, rate, prate); 157 } 158 159 static int clk_composite_set_rate(struct clk_hw *hw, unsigned long rate, 160 unsigned long parent_rate) 161 { 162 struct clk_composite *composite = to_clk_composite(hw); 163 const struct clk_ops *rate_ops = composite->rate_ops; 164 struct clk_hw *rate_hw = composite->rate_hw; 165 166 __clk_hw_set_clk(rate_hw, hw); 167 168 return rate_ops->set_rate(rate_hw, rate, parent_rate); 169 } 170 171 static int clk_composite_set_rate_and_parent(struct clk_hw *hw, 172 unsigned long rate, 173 unsigned long parent_rate, 174 u8 index) 175 { 176 struct clk_composite *composite = to_clk_composite(hw); 177 const struct clk_ops *rate_ops = composite->rate_ops; 178 const struct clk_ops *mux_ops = composite->mux_ops; 179 struct clk_hw *rate_hw = composite->rate_hw; 180 struct clk_hw *mux_hw = composite->mux_hw; 181 unsigned long temp_rate; 182 183 __clk_hw_set_clk(rate_hw, hw); 184 __clk_hw_set_clk(mux_hw, hw); 185 186 temp_rate = rate_ops->recalc_rate(rate_hw, parent_rate); 187 if (temp_rate > rate) { 188 rate_ops->set_rate(rate_hw, rate, parent_rate); 189 mux_ops->set_parent(mux_hw, index); 190 } else { 191 mux_ops->set_parent(mux_hw, index); 192 rate_ops->set_rate(rate_hw, rate, parent_rate); 193 } 194 195 return 0; 196 } 197 198 static int clk_composite_is_enabled(struct clk_hw *hw) 199 { 200 struct clk_composite *composite = to_clk_composite(hw); 201 const struct clk_ops *gate_ops = composite->gate_ops; 202 struct clk_hw *gate_hw = composite->gate_hw; 203 204 __clk_hw_set_clk(gate_hw, hw); 205 206 return gate_ops->is_enabled(gate_hw); 207 } 208 209 static int clk_composite_enable(struct clk_hw *hw) 210 { 211 struct clk_composite *composite = to_clk_composite(hw); 212 const struct clk_ops *gate_ops = composite->gate_ops; 213 struct clk_hw *gate_hw = composite->gate_hw; 214 215 __clk_hw_set_clk(gate_hw, hw); 216 217 return gate_ops->enable(gate_hw); 218 } 219 220 static void clk_composite_disable(struct clk_hw *hw) 221 { 222 struct clk_composite *composite = to_clk_composite(hw); 223 const struct clk_ops *gate_ops = composite->gate_ops; 224 struct clk_hw *gate_hw = composite->gate_hw; 225 226 __clk_hw_set_clk(gate_hw, hw); 227 228 gate_ops->disable(gate_hw); 229 } 230 231 static struct clk_hw *__clk_hw_register_composite(struct device *dev, 232 const char *name, const char * const *parent_names, 233 const struct clk_parent_data *pdata, int num_parents, 234 struct clk_hw *mux_hw, const struct clk_ops *mux_ops, 235 struct clk_hw *rate_hw, const struct clk_ops *rate_ops, 236 struct clk_hw *gate_hw, const struct clk_ops *gate_ops, 237 unsigned long flags) 238 { 239 struct clk_hw *hw; 240 struct clk_init_data init = {}; 241 struct clk_composite *composite; 242 struct clk_ops *clk_composite_ops; 243 int ret; 244 245 composite = kzalloc(sizeof(*composite), GFP_KERNEL); 246 if (!composite) 247 return ERR_PTR(-ENOMEM); 248 249 init.name = name; 250 init.flags = flags; 251 if (parent_names) 252 init.parent_names = parent_names; 253 else 254 init.parent_data = pdata; 255 init.num_parents = num_parents; 256 hw = &composite->hw; 257 258 clk_composite_ops = &composite->ops; 259 260 if (mux_hw && mux_ops) { 261 if (!mux_ops->get_parent) { 262 hw = ERR_PTR(-EINVAL); 263 goto err; 264 } 265 266 composite->mux_hw = mux_hw; 267 composite->mux_ops = mux_ops; 268 clk_composite_ops->get_parent = clk_composite_get_parent; 269 if (mux_ops->set_parent) 270 clk_composite_ops->set_parent = clk_composite_set_parent; 271 if (mux_ops->determine_rate) 272 clk_composite_ops->determine_rate = clk_composite_determine_rate; 273 } 274 275 if (rate_hw && rate_ops) { 276 if (!rate_ops->recalc_rate) { 277 hw = ERR_PTR(-EINVAL); 278 goto err; 279 } 280 clk_composite_ops->recalc_rate = clk_composite_recalc_rate; 281 282 if (rate_ops->determine_rate) 283 clk_composite_ops->determine_rate = 284 clk_composite_determine_rate; 285 else if (rate_ops->round_rate) 286 clk_composite_ops->round_rate = 287 clk_composite_round_rate; 288 289 /* .set_rate requires either .round_rate or .determine_rate */ 290 if (rate_ops->set_rate) { 291 if (rate_ops->determine_rate || rate_ops->round_rate) 292 clk_composite_ops->set_rate = 293 clk_composite_set_rate; 294 else 295 WARN(1, "%s: missing round_rate op is required\n", 296 __func__); 297 } 298 299 composite->rate_hw = rate_hw; 300 composite->rate_ops = rate_ops; 301 } 302 303 if (mux_hw && mux_ops && rate_hw && rate_ops) { 304 if (mux_ops->set_parent && rate_ops->set_rate) 305 clk_composite_ops->set_rate_and_parent = 306 clk_composite_set_rate_and_parent; 307 } 308 309 if (gate_hw && gate_ops) { 310 if (!gate_ops->is_enabled || !gate_ops->enable || 311 !gate_ops->disable) { 312 hw = ERR_PTR(-EINVAL); 313 goto err; 314 } 315 316 composite->gate_hw = gate_hw; 317 composite->gate_ops = gate_ops; 318 clk_composite_ops->is_enabled = clk_composite_is_enabled; 319 clk_composite_ops->enable = clk_composite_enable; 320 clk_composite_ops->disable = clk_composite_disable; 321 } 322 323 init.ops = clk_composite_ops; 324 composite->hw.init = &init; 325 326 ret = clk_hw_register(dev, hw); 327 if (ret) { 328 hw = ERR_PTR(ret); 329 goto err; 330 } 331 332 if (composite->mux_hw) 333 composite->mux_hw->clk = hw->clk; 334 335 if (composite->rate_hw) 336 composite->rate_hw->clk = hw->clk; 337 338 if (composite->gate_hw) 339 composite->gate_hw->clk = hw->clk; 340 341 return hw; 342 343 err: 344 kfree(composite); 345 return hw; 346 } 347 348 struct clk_hw *clk_hw_register_composite(struct device *dev, const char *name, 349 const char * const *parent_names, int num_parents, 350 struct clk_hw *mux_hw, const struct clk_ops *mux_ops, 351 struct clk_hw *rate_hw, const struct clk_ops *rate_ops, 352 struct clk_hw *gate_hw, const struct clk_ops *gate_ops, 353 unsigned long flags) 354 { 355 return __clk_hw_register_composite(dev, name, parent_names, NULL, 356 num_parents, mux_hw, mux_ops, 357 rate_hw, rate_ops, gate_hw, 358 gate_ops, flags); 359 } 360 EXPORT_SYMBOL_GPL(clk_hw_register_composite); 361 362 struct clk_hw *clk_hw_register_composite_pdata(struct device *dev, 363 const char *name, 364 const struct clk_parent_data *parent_data, 365 int num_parents, 366 struct clk_hw *mux_hw, const struct clk_ops *mux_ops, 367 struct clk_hw *rate_hw, const struct clk_ops *rate_ops, 368 struct clk_hw *gate_hw, const struct clk_ops *gate_ops, 369 unsigned long flags) 370 { 371 return __clk_hw_register_composite(dev, name, NULL, parent_data, 372 num_parents, mux_hw, mux_ops, 373 rate_hw, rate_ops, gate_hw, 374 gate_ops, flags); 375 } 376 377 struct clk *clk_register_composite(struct device *dev, const char *name, 378 const char * const *parent_names, int num_parents, 379 struct clk_hw *mux_hw, const struct clk_ops *mux_ops, 380 struct clk_hw *rate_hw, const struct clk_ops *rate_ops, 381 struct clk_hw *gate_hw, const struct clk_ops *gate_ops, 382 unsigned long flags) 383 { 384 struct clk_hw *hw; 385 386 hw = clk_hw_register_composite(dev, name, parent_names, num_parents, 387 mux_hw, mux_ops, rate_hw, rate_ops, gate_hw, gate_ops, 388 flags); 389 if (IS_ERR(hw)) 390 return ERR_CAST(hw); 391 return hw->clk; 392 } 393 EXPORT_SYMBOL_GPL(clk_register_composite); 394 395 struct clk *clk_register_composite_pdata(struct device *dev, const char *name, 396 const struct clk_parent_data *parent_data, 397 int num_parents, 398 struct clk_hw *mux_hw, const struct clk_ops *mux_ops, 399 struct clk_hw *rate_hw, const struct clk_ops *rate_ops, 400 struct clk_hw *gate_hw, const struct clk_ops *gate_ops, 401 unsigned long flags) 402 { 403 struct clk_hw *hw; 404 405 hw = clk_hw_register_composite_pdata(dev, name, parent_data, 406 num_parents, mux_hw, mux_ops, rate_hw, rate_ops, 407 gate_hw, gate_ops, flags); 408 if (IS_ERR(hw)) 409 return ERR_CAST(hw); 410 return hw->clk; 411 } 412 413 void clk_unregister_composite(struct clk *clk) 414 { 415 struct clk_composite *composite; 416 struct clk_hw *hw; 417 418 hw = __clk_get_hw(clk); 419 if (!hw) 420 return; 421 422 composite = to_clk_composite(hw); 423 424 clk_unregister(clk); 425 kfree(composite); 426 } 427 428 void clk_hw_unregister_composite(struct clk_hw *hw) 429 { 430 struct clk_composite *composite; 431 432 composite = to_clk_composite(hw); 433 434 clk_hw_unregister(hw); 435 kfree(composite); 436 } 437 EXPORT_SYMBOL_GPL(clk_hw_unregister_composite); 438 439 static void devm_clk_hw_release_composite(struct device *dev, void *res) 440 { 441 clk_hw_unregister_composite(*(struct clk_hw **)res); 442 } 443 444 static struct clk_hw *__devm_clk_hw_register_composite(struct device *dev, 445 const char *name, const char * const *parent_names, 446 const struct clk_parent_data *pdata, int num_parents, 447 struct clk_hw *mux_hw, const struct clk_ops *mux_ops, 448 struct clk_hw *rate_hw, const struct clk_ops *rate_ops, 449 struct clk_hw *gate_hw, const struct clk_ops *gate_ops, 450 unsigned long flags) 451 { 452 struct clk_hw **ptr, *hw; 453 454 ptr = devres_alloc(devm_clk_hw_release_composite, sizeof(*ptr), 455 GFP_KERNEL); 456 if (!ptr) 457 return ERR_PTR(-ENOMEM); 458 459 hw = __clk_hw_register_composite(dev, name, parent_names, pdata, 460 num_parents, mux_hw, mux_ops, rate_hw, 461 rate_ops, gate_hw, gate_ops, flags); 462 463 if (!IS_ERR(hw)) { 464 *ptr = hw; 465 devres_add(dev, ptr); 466 } else { 467 devres_free(ptr); 468 } 469 470 return hw; 471 } 472 473 struct clk_hw *devm_clk_hw_register_composite_pdata(struct device *dev, 474 const char *name, 475 const struct clk_parent_data *parent_data, 476 int num_parents, 477 struct clk_hw *mux_hw, const struct clk_ops *mux_ops, 478 struct clk_hw *rate_hw, const struct clk_ops *rate_ops, 479 struct clk_hw *gate_hw, const struct clk_ops *gate_ops, 480 unsigned long flags) 481 { 482 return __devm_clk_hw_register_composite(dev, name, NULL, parent_data, 483 num_parents, mux_hw, mux_ops, 484 rate_hw, rate_ops, gate_hw, 485 gate_ops, flags); 486 } 487