1 /* 2 * Copyright (c) 2013, The Linux Foundation. All rights reserved. 3 * 4 * This software is licensed under the terms of the GNU General Public 5 * License version 2, as published by the Free Software Foundation, and 6 * may be copied, distributed, and modified under those terms. 7 * 8 * This program is distributed in the hope that it will be useful, 9 * but WITHOUT ANY WARRANTY; without even the implied warranty of 10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 11 * GNU General Public License for more details. 12 */ 13 14 #include <linux/kernel.h> 15 #include <linux/bitops.h> 16 #include <linux/err.h> 17 #include <linux/bug.h> 18 #include <linux/export.h> 19 #include <linux/clk-provider.h> 20 #include <linux/delay.h> 21 #include <linux/regmap.h> 22 #include <linux/math64.h> 23 24 #include <asm/div64.h> 25 26 #include "clk-rcg.h" 27 #include "common.h" 28 29 #define CMD_REG 0x0 30 #define CMD_UPDATE BIT(0) 31 #define CMD_ROOT_EN BIT(1) 32 #define CMD_DIRTY_CFG BIT(4) 33 #define CMD_DIRTY_N BIT(5) 34 #define CMD_DIRTY_M BIT(6) 35 #define CMD_DIRTY_D BIT(7) 36 #define CMD_ROOT_OFF BIT(31) 37 38 #define CFG_REG 0x4 39 #define CFG_SRC_DIV_SHIFT 0 40 #define CFG_SRC_SEL_SHIFT 8 41 #define CFG_SRC_SEL_MASK (0x7 << CFG_SRC_SEL_SHIFT) 42 #define CFG_MODE_SHIFT 12 43 #define CFG_MODE_MASK (0x3 << CFG_MODE_SHIFT) 44 #define CFG_MODE_DUAL_EDGE (0x2 << CFG_MODE_SHIFT) 45 46 #define M_REG 0x8 47 #define N_REG 0xc 48 #define D_REG 0x10 49 50 enum freq_policy { 51 FLOOR, 52 CEIL, 53 }; 54 55 static int clk_rcg2_is_enabled(struct clk_hw *hw) 56 { 57 struct clk_rcg2 *rcg = to_clk_rcg2(hw); 58 u32 cmd; 59 int ret; 60 61 ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd); 62 if (ret) 63 return ret; 64 65 return (cmd & CMD_ROOT_OFF) == 0; 66 } 67 68 static u8 clk_rcg2_get_parent(struct clk_hw *hw) 69 { 70 struct clk_rcg2 *rcg = to_clk_rcg2(hw); 71 int num_parents = clk_hw_get_num_parents(hw); 72 u32 cfg; 73 int i, ret; 74 75 ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg); 76 if (ret) 77 goto err; 78 79 cfg &= CFG_SRC_SEL_MASK; 80 cfg >>= CFG_SRC_SEL_SHIFT; 81 82 for (i = 0; i < num_parents; i++) 83 if (cfg == rcg->parent_map[i].cfg) 84 return i; 85 86 err: 87 pr_debug("%s: Clock %s has invalid parent, using default.\n", 88 __func__, clk_hw_get_name(hw)); 89 return 0; 90 } 91 92 static int update_config(struct clk_rcg2 *rcg) 93 { 94 int count, ret; 95 u32 cmd; 96 struct clk_hw *hw = &rcg->clkr.hw; 97 const char *name = clk_hw_get_name(hw); 98 99 ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, 100 CMD_UPDATE, CMD_UPDATE); 101 if (ret) 102 return ret; 103 104 /* Wait for update to take effect */ 105 for (count = 500; count > 0; count--) { 106 ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd); 107 if (ret) 108 return ret; 109 if (!(cmd & CMD_UPDATE)) 110 return 0; 111 udelay(1); 112 } 113 114 WARN(1, "%s: rcg didn't update its configuration.", name); 115 return 0; 116 } 117 118 static int clk_rcg2_set_parent(struct clk_hw *hw, u8 index) 119 { 120 struct clk_rcg2 *rcg = to_clk_rcg2(hw); 121 int ret; 122 u32 cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT; 123 124 ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, 125 CFG_SRC_SEL_MASK, cfg); 126 if (ret) 127 return ret; 128 129 return update_config(rcg); 130 } 131 132 /* 133 * Calculate m/n:d rate 134 * 135 * parent_rate m 136 * rate = ----------- x --- 137 * hid_div n 138 */ 139 static unsigned long 140 calc_rate(unsigned long rate, u32 m, u32 n, u32 mode, u32 hid_div) 141 { 142 if (hid_div) { 143 rate *= 2; 144 rate /= hid_div + 1; 145 } 146 147 if (mode) { 148 u64 tmp = rate; 149 tmp *= m; 150 do_div(tmp, n); 151 rate = tmp; 152 } 153 154 return rate; 155 } 156 157 static unsigned long 158 clk_rcg2_recalc_rate(struct clk_hw *hw, unsigned long parent_rate) 159 { 160 struct clk_rcg2 *rcg = to_clk_rcg2(hw); 161 u32 cfg, hid_div, m = 0, n = 0, mode = 0, mask; 162 163 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg); 164 165 if (rcg->mnd_width) { 166 mask = BIT(rcg->mnd_width) - 1; 167 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + M_REG, &m); 168 m &= mask; 169 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + N_REG, &n); 170 n = ~n; 171 n &= mask; 172 n += m; 173 mode = cfg & CFG_MODE_MASK; 174 mode >>= CFG_MODE_SHIFT; 175 } 176 177 mask = BIT(rcg->hid_width) - 1; 178 hid_div = cfg >> CFG_SRC_DIV_SHIFT; 179 hid_div &= mask; 180 181 return calc_rate(parent_rate, m, n, mode, hid_div); 182 } 183 184 static int _freq_tbl_determine_rate(struct clk_hw *hw, const struct freq_tbl *f, 185 struct clk_rate_request *req, 186 enum freq_policy policy) 187 { 188 unsigned long clk_flags, rate = req->rate; 189 struct clk_hw *p; 190 struct clk_rcg2 *rcg = to_clk_rcg2(hw); 191 int index; 192 193 switch (policy) { 194 case FLOOR: 195 f = qcom_find_freq_floor(f, rate); 196 break; 197 case CEIL: 198 f = qcom_find_freq(f, rate); 199 break; 200 default: 201 return -EINVAL; 202 }; 203 204 if (!f) 205 return -EINVAL; 206 207 index = qcom_find_src_index(hw, rcg->parent_map, f->src); 208 if (index < 0) 209 return index; 210 211 clk_flags = clk_hw_get_flags(hw); 212 p = clk_hw_get_parent_by_index(hw, index); 213 if (clk_flags & CLK_SET_RATE_PARENT) { 214 if (f->pre_div) { 215 rate /= 2; 216 rate *= f->pre_div + 1; 217 } 218 219 if (f->n) { 220 u64 tmp = rate; 221 tmp = tmp * f->n; 222 do_div(tmp, f->m); 223 rate = tmp; 224 } 225 } else { 226 rate = clk_hw_get_rate(p); 227 } 228 req->best_parent_hw = p; 229 req->best_parent_rate = rate; 230 req->rate = f->freq; 231 232 return 0; 233 } 234 235 static int clk_rcg2_determine_rate(struct clk_hw *hw, 236 struct clk_rate_request *req) 237 { 238 struct clk_rcg2 *rcg = to_clk_rcg2(hw); 239 240 return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req, CEIL); 241 } 242 243 static int clk_rcg2_determine_floor_rate(struct clk_hw *hw, 244 struct clk_rate_request *req) 245 { 246 struct clk_rcg2 *rcg = to_clk_rcg2(hw); 247 248 return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req, FLOOR); 249 } 250 251 static int clk_rcg2_configure(struct clk_rcg2 *rcg, const struct freq_tbl *f) 252 { 253 u32 cfg, mask; 254 struct clk_hw *hw = &rcg->clkr.hw; 255 int ret, index = qcom_find_src_index(hw, rcg->parent_map, f->src); 256 257 if (index < 0) 258 return index; 259 260 if (rcg->mnd_width && f->n) { 261 mask = BIT(rcg->mnd_width) - 1; 262 ret = regmap_update_bits(rcg->clkr.regmap, 263 rcg->cmd_rcgr + M_REG, mask, f->m); 264 if (ret) 265 return ret; 266 267 ret = regmap_update_bits(rcg->clkr.regmap, 268 rcg->cmd_rcgr + N_REG, mask, ~(f->n - f->m)); 269 if (ret) 270 return ret; 271 272 ret = regmap_update_bits(rcg->clkr.regmap, 273 rcg->cmd_rcgr + D_REG, mask, ~f->n); 274 if (ret) 275 return ret; 276 } 277 278 mask = BIT(rcg->hid_width) - 1; 279 mask |= CFG_SRC_SEL_MASK | CFG_MODE_MASK; 280 cfg = f->pre_div << CFG_SRC_DIV_SHIFT; 281 cfg |= rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT; 282 if (rcg->mnd_width && f->n && (f->m != f->n)) 283 cfg |= CFG_MODE_DUAL_EDGE; 284 ret = regmap_update_bits(rcg->clkr.regmap, 285 rcg->cmd_rcgr + CFG_REG, mask, cfg); 286 if (ret) 287 return ret; 288 289 return update_config(rcg); 290 } 291 292 static int __clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate, 293 enum freq_policy policy) 294 { 295 struct clk_rcg2 *rcg = to_clk_rcg2(hw); 296 const struct freq_tbl *f; 297 298 switch (policy) { 299 case FLOOR: 300 f = qcom_find_freq_floor(rcg->freq_tbl, rate); 301 break; 302 case CEIL: 303 f = qcom_find_freq(rcg->freq_tbl, rate); 304 break; 305 default: 306 return -EINVAL; 307 }; 308 309 if (!f) 310 return -EINVAL; 311 312 return clk_rcg2_configure(rcg, f); 313 } 314 315 static int clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate, 316 unsigned long parent_rate) 317 { 318 return __clk_rcg2_set_rate(hw, rate, CEIL); 319 } 320 321 static int clk_rcg2_set_floor_rate(struct clk_hw *hw, unsigned long rate, 322 unsigned long parent_rate) 323 { 324 return __clk_rcg2_set_rate(hw, rate, FLOOR); 325 } 326 327 static int clk_rcg2_set_rate_and_parent(struct clk_hw *hw, 328 unsigned long rate, unsigned long parent_rate, u8 index) 329 { 330 return __clk_rcg2_set_rate(hw, rate, CEIL); 331 } 332 333 static int clk_rcg2_set_floor_rate_and_parent(struct clk_hw *hw, 334 unsigned long rate, unsigned long parent_rate, u8 index) 335 { 336 return __clk_rcg2_set_rate(hw, rate, FLOOR); 337 } 338 339 const struct clk_ops clk_rcg2_ops = { 340 .is_enabled = clk_rcg2_is_enabled, 341 .get_parent = clk_rcg2_get_parent, 342 .set_parent = clk_rcg2_set_parent, 343 .recalc_rate = clk_rcg2_recalc_rate, 344 .determine_rate = clk_rcg2_determine_rate, 345 .set_rate = clk_rcg2_set_rate, 346 .set_rate_and_parent = clk_rcg2_set_rate_and_parent, 347 }; 348 EXPORT_SYMBOL_GPL(clk_rcg2_ops); 349 350 const struct clk_ops clk_rcg2_floor_ops = { 351 .is_enabled = clk_rcg2_is_enabled, 352 .get_parent = clk_rcg2_get_parent, 353 .set_parent = clk_rcg2_set_parent, 354 .recalc_rate = clk_rcg2_recalc_rate, 355 .determine_rate = clk_rcg2_determine_floor_rate, 356 .set_rate = clk_rcg2_set_floor_rate, 357 .set_rate_and_parent = clk_rcg2_set_floor_rate_and_parent, 358 }; 359 EXPORT_SYMBOL_GPL(clk_rcg2_floor_ops); 360 361 static int clk_rcg2_shared_force_enable(struct clk_hw *hw, unsigned long rate) 362 { 363 struct clk_rcg2 *rcg = to_clk_rcg2(hw); 364 const char *name = clk_hw_get_name(hw); 365 int ret, count; 366 367 /* force enable RCG */ 368 ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, 369 CMD_ROOT_EN, CMD_ROOT_EN); 370 if (ret) 371 return ret; 372 373 /* wait for RCG to turn ON */ 374 for (count = 500; count > 0; count--) { 375 ret = clk_rcg2_is_enabled(hw); 376 if (ret) 377 break; 378 udelay(1); 379 } 380 if (!count) 381 pr_err("%s: RCG did not turn on\n", name); 382 383 /* set clock rate */ 384 ret = __clk_rcg2_set_rate(hw, rate, CEIL); 385 if (ret) 386 return ret; 387 388 /* clear force enable RCG */ 389 return regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, 390 CMD_ROOT_EN, 0); 391 } 392 393 static int clk_rcg2_shared_set_rate(struct clk_hw *hw, unsigned long rate, 394 unsigned long parent_rate) 395 { 396 struct clk_rcg2 *rcg = to_clk_rcg2(hw); 397 398 /* cache the rate */ 399 rcg->current_freq = rate; 400 401 if (!__clk_is_enabled(hw->clk)) 402 return 0; 403 404 return clk_rcg2_shared_force_enable(hw, rcg->current_freq); 405 } 406 407 static unsigned long 408 clk_rcg2_shared_recalc_rate(struct clk_hw *hw, unsigned long parent_rate) 409 { 410 struct clk_rcg2 *rcg = to_clk_rcg2(hw); 411 412 return rcg->current_freq = clk_rcg2_recalc_rate(hw, parent_rate); 413 } 414 415 static int clk_rcg2_shared_enable(struct clk_hw *hw) 416 { 417 struct clk_rcg2 *rcg = to_clk_rcg2(hw); 418 419 return clk_rcg2_shared_force_enable(hw, rcg->current_freq); 420 } 421 422 static void clk_rcg2_shared_disable(struct clk_hw *hw) 423 { 424 struct clk_rcg2 *rcg = to_clk_rcg2(hw); 425 426 /* switch to XO, which is the lowest entry in the freq table */ 427 clk_rcg2_shared_set_rate(hw, rcg->freq_tbl[0].freq, 0); 428 } 429 430 const struct clk_ops clk_rcg2_shared_ops = { 431 .enable = clk_rcg2_shared_enable, 432 .disable = clk_rcg2_shared_disable, 433 .get_parent = clk_rcg2_get_parent, 434 .recalc_rate = clk_rcg2_shared_recalc_rate, 435 .determine_rate = clk_rcg2_determine_rate, 436 .set_rate = clk_rcg2_shared_set_rate, 437 }; 438 EXPORT_SYMBOL_GPL(clk_rcg2_shared_ops); 439 440 struct frac_entry { 441 int num; 442 int den; 443 }; 444 445 static const struct frac_entry frac_table_675m[] = { /* link rate of 270M */ 446 { 52, 295 }, /* 119 M */ 447 { 11, 57 }, /* 130.25 M */ 448 { 63, 307 }, /* 138.50 M */ 449 { 11, 50 }, /* 148.50 M */ 450 { 47, 206 }, /* 154 M */ 451 { 31, 100 }, /* 205.25 M */ 452 { 107, 269 }, /* 268.50 M */ 453 { }, 454 }; 455 456 static struct frac_entry frac_table_810m[] = { /* Link rate of 162M */ 457 { 31, 211 }, /* 119 M */ 458 { 32, 199 }, /* 130.25 M */ 459 { 63, 307 }, /* 138.50 M */ 460 { 11, 60 }, /* 148.50 M */ 461 { 50, 263 }, /* 154 M */ 462 { 31, 120 }, /* 205.25 M */ 463 { 119, 359 }, /* 268.50 M */ 464 { }, 465 }; 466 467 static int clk_edp_pixel_set_rate(struct clk_hw *hw, unsigned long rate, 468 unsigned long parent_rate) 469 { 470 struct clk_rcg2 *rcg = to_clk_rcg2(hw); 471 struct freq_tbl f = *rcg->freq_tbl; 472 const struct frac_entry *frac; 473 int delta = 100000; 474 s64 src_rate = parent_rate; 475 s64 request; 476 u32 mask = BIT(rcg->hid_width) - 1; 477 u32 hid_div; 478 479 if (src_rate == 810000000) 480 frac = frac_table_810m; 481 else 482 frac = frac_table_675m; 483 484 for (; frac->num; frac++) { 485 request = rate; 486 request *= frac->den; 487 request = div_s64(request, frac->num); 488 if ((src_rate < (request - delta)) || 489 (src_rate > (request + delta))) 490 continue; 491 492 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, 493 &hid_div); 494 f.pre_div = hid_div; 495 f.pre_div >>= CFG_SRC_DIV_SHIFT; 496 f.pre_div &= mask; 497 f.m = frac->num; 498 f.n = frac->den; 499 500 return clk_rcg2_configure(rcg, &f); 501 } 502 503 return -EINVAL; 504 } 505 506 static int clk_edp_pixel_set_rate_and_parent(struct clk_hw *hw, 507 unsigned long rate, unsigned long parent_rate, u8 index) 508 { 509 /* Parent index is set statically in frequency table */ 510 return clk_edp_pixel_set_rate(hw, rate, parent_rate); 511 } 512 513 static int clk_edp_pixel_determine_rate(struct clk_hw *hw, 514 struct clk_rate_request *req) 515 { 516 struct clk_rcg2 *rcg = to_clk_rcg2(hw); 517 const struct freq_tbl *f = rcg->freq_tbl; 518 const struct frac_entry *frac; 519 int delta = 100000; 520 s64 request; 521 u32 mask = BIT(rcg->hid_width) - 1; 522 u32 hid_div; 523 int index = qcom_find_src_index(hw, rcg->parent_map, f->src); 524 525 /* Force the correct parent */ 526 req->best_parent_hw = clk_hw_get_parent_by_index(hw, index); 527 req->best_parent_rate = clk_hw_get_rate(req->best_parent_hw); 528 529 if (req->best_parent_rate == 810000000) 530 frac = frac_table_810m; 531 else 532 frac = frac_table_675m; 533 534 for (; frac->num; frac++) { 535 request = req->rate; 536 request *= frac->den; 537 request = div_s64(request, frac->num); 538 if ((req->best_parent_rate < (request - delta)) || 539 (req->best_parent_rate > (request + delta))) 540 continue; 541 542 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, 543 &hid_div); 544 hid_div >>= CFG_SRC_DIV_SHIFT; 545 hid_div &= mask; 546 547 req->rate = calc_rate(req->best_parent_rate, 548 frac->num, frac->den, 549 !!frac->den, hid_div); 550 return 0; 551 } 552 553 return -EINVAL; 554 } 555 556 const struct clk_ops clk_edp_pixel_ops = { 557 .is_enabled = clk_rcg2_is_enabled, 558 .get_parent = clk_rcg2_get_parent, 559 .set_parent = clk_rcg2_set_parent, 560 .recalc_rate = clk_rcg2_recalc_rate, 561 .set_rate = clk_edp_pixel_set_rate, 562 .set_rate_and_parent = clk_edp_pixel_set_rate_and_parent, 563 .determine_rate = clk_edp_pixel_determine_rate, 564 }; 565 EXPORT_SYMBOL_GPL(clk_edp_pixel_ops); 566 567 static int clk_byte_determine_rate(struct clk_hw *hw, 568 struct clk_rate_request *req) 569 { 570 struct clk_rcg2 *rcg = to_clk_rcg2(hw); 571 const struct freq_tbl *f = rcg->freq_tbl; 572 int index = qcom_find_src_index(hw, rcg->parent_map, f->src); 573 unsigned long parent_rate, div; 574 u32 mask = BIT(rcg->hid_width) - 1; 575 struct clk_hw *p; 576 577 if (req->rate == 0) 578 return -EINVAL; 579 580 req->best_parent_hw = p = clk_hw_get_parent_by_index(hw, index); 581 req->best_parent_rate = parent_rate = clk_hw_round_rate(p, req->rate); 582 583 div = DIV_ROUND_UP((2 * parent_rate), req->rate) - 1; 584 div = min_t(u32, div, mask); 585 586 req->rate = calc_rate(parent_rate, 0, 0, 0, div); 587 588 return 0; 589 } 590 591 static int clk_byte_set_rate(struct clk_hw *hw, unsigned long rate, 592 unsigned long parent_rate) 593 { 594 struct clk_rcg2 *rcg = to_clk_rcg2(hw); 595 struct freq_tbl f = *rcg->freq_tbl; 596 unsigned long div; 597 u32 mask = BIT(rcg->hid_width) - 1; 598 599 div = DIV_ROUND_UP((2 * parent_rate), rate) - 1; 600 div = min_t(u32, div, mask); 601 602 f.pre_div = div; 603 604 return clk_rcg2_configure(rcg, &f); 605 } 606 607 static int clk_byte_set_rate_and_parent(struct clk_hw *hw, 608 unsigned long rate, unsigned long parent_rate, u8 index) 609 { 610 /* Parent index is set statically in frequency table */ 611 return clk_byte_set_rate(hw, rate, parent_rate); 612 } 613 614 const struct clk_ops clk_byte_ops = { 615 .is_enabled = clk_rcg2_is_enabled, 616 .get_parent = clk_rcg2_get_parent, 617 .set_parent = clk_rcg2_set_parent, 618 .recalc_rate = clk_rcg2_recalc_rate, 619 .set_rate = clk_byte_set_rate, 620 .set_rate_and_parent = clk_byte_set_rate_and_parent, 621 .determine_rate = clk_byte_determine_rate, 622 }; 623 EXPORT_SYMBOL_GPL(clk_byte_ops); 624 625 static int clk_byte2_determine_rate(struct clk_hw *hw, 626 struct clk_rate_request *req) 627 { 628 struct clk_rcg2 *rcg = to_clk_rcg2(hw); 629 unsigned long parent_rate, div; 630 u32 mask = BIT(rcg->hid_width) - 1; 631 struct clk_hw *p; 632 unsigned long rate = req->rate; 633 634 if (rate == 0) 635 return -EINVAL; 636 637 p = req->best_parent_hw; 638 req->best_parent_rate = parent_rate = clk_hw_round_rate(p, rate); 639 640 div = DIV_ROUND_UP((2 * parent_rate), rate) - 1; 641 div = min_t(u32, div, mask); 642 643 req->rate = calc_rate(parent_rate, 0, 0, 0, div); 644 645 return 0; 646 } 647 648 static int clk_byte2_set_rate(struct clk_hw *hw, unsigned long rate, 649 unsigned long parent_rate) 650 { 651 struct clk_rcg2 *rcg = to_clk_rcg2(hw); 652 struct freq_tbl f = { 0 }; 653 unsigned long div; 654 int i, num_parents = clk_hw_get_num_parents(hw); 655 u32 mask = BIT(rcg->hid_width) - 1; 656 u32 cfg; 657 658 div = DIV_ROUND_UP((2 * parent_rate), rate) - 1; 659 div = min_t(u32, div, mask); 660 661 f.pre_div = div; 662 663 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg); 664 cfg &= CFG_SRC_SEL_MASK; 665 cfg >>= CFG_SRC_SEL_SHIFT; 666 667 for (i = 0; i < num_parents; i++) { 668 if (cfg == rcg->parent_map[i].cfg) { 669 f.src = rcg->parent_map[i].src; 670 return clk_rcg2_configure(rcg, &f); 671 } 672 } 673 674 return -EINVAL; 675 } 676 677 static int clk_byte2_set_rate_and_parent(struct clk_hw *hw, 678 unsigned long rate, unsigned long parent_rate, u8 index) 679 { 680 /* Read the hardware to determine parent during set_rate */ 681 return clk_byte2_set_rate(hw, rate, parent_rate); 682 } 683 684 const struct clk_ops clk_byte2_ops = { 685 .is_enabled = clk_rcg2_is_enabled, 686 .get_parent = clk_rcg2_get_parent, 687 .set_parent = clk_rcg2_set_parent, 688 .recalc_rate = clk_rcg2_recalc_rate, 689 .set_rate = clk_byte2_set_rate, 690 .set_rate_and_parent = clk_byte2_set_rate_and_parent, 691 .determine_rate = clk_byte2_determine_rate, 692 }; 693 EXPORT_SYMBOL_GPL(clk_byte2_ops); 694 695 static const struct frac_entry frac_table_pixel[] = { 696 { 3, 8 }, 697 { 2, 9 }, 698 { 4, 9 }, 699 { 1, 1 }, 700 { } 701 }; 702 703 static int clk_pixel_determine_rate(struct clk_hw *hw, 704 struct clk_rate_request *req) 705 { 706 unsigned long request, src_rate; 707 int delta = 100000; 708 const struct frac_entry *frac = frac_table_pixel; 709 710 for (; frac->num; frac++) { 711 request = (req->rate * frac->den) / frac->num; 712 713 src_rate = clk_hw_round_rate(req->best_parent_hw, request); 714 if ((src_rate < (request - delta)) || 715 (src_rate > (request + delta))) 716 continue; 717 718 req->best_parent_rate = src_rate; 719 req->rate = (src_rate * frac->num) / frac->den; 720 return 0; 721 } 722 723 return -EINVAL; 724 } 725 726 static int clk_pixel_set_rate(struct clk_hw *hw, unsigned long rate, 727 unsigned long parent_rate) 728 { 729 struct clk_rcg2 *rcg = to_clk_rcg2(hw); 730 struct freq_tbl f = { 0 }; 731 const struct frac_entry *frac = frac_table_pixel; 732 unsigned long request; 733 int delta = 100000; 734 u32 mask = BIT(rcg->hid_width) - 1; 735 u32 hid_div, cfg; 736 int i, num_parents = clk_hw_get_num_parents(hw); 737 738 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg); 739 cfg &= CFG_SRC_SEL_MASK; 740 cfg >>= CFG_SRC_SEL_SHIFT; 741 742 for (i = 0; i < num_parents; i++) 743 if (cfg == rcg->parent_map[i].cfg) { 744 f.src = rcg->parent_map[i].src; 745 break; 746 } 747 748 for (; frac->num; frac++) { 749 request = (rate * frac->den) / frac->num; 750 751 if ((parent_rate < (request - delta)) || 752 (parent_rate > (request + delta))) 753 continue; 754 755 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, 756 &hid_div); 757 f.pre_div = hid_div; 758 f.pre_div >>= CFG_SRC_DIV_SHIFT; 759 f.pre_div &= mask; 760 f.m = frac->num; 761 f.n = frac->den; 762 763 return clk_rcg2_configure(rcg, &f); 764 } 765 return -EINVAL; 766 } 767 768 static int clk_pixel_set_rate_and_parent(struct clk_hw *hw, unsigned long rate, 769 unsigned long parent_rate, u8 index) 770 { 771 return clk_pixel_set_rate(hw, rate, parent_rate); 772 } 773 774 const struct clk_ops clk_pixel_ops = { 775 .is_enabled = clk_rcg2_is_enabled, 776 .get_parent = clk_rcg2_get_parent, 777 .set_parent = clk_rcg2_set_parent, 778 .recalc_rate = clk_rcg2_recalc_rate, 779 .set_rate = clk_pixel_set_rate, 780 .set_rate_and_parent = clk_pixel_set_rate_and_parent, 781 .determine_rate = clk_pixel_determine_rate, 782 }; 783 EXPORT_SYMBOL_GPL(clk_pixel_ops); 784 785 static int clk_gfx3d_determine_rate(struct clk_hw *hw, 786 struct clk_rate_request *req) 787 { 788 struct clk_rate_request parent_req = { }; 789 struct clk_hw *p2, *p8, *p9, *xo; 790 unsigned long p9_rate; 791 int ret; 792 793 xo = clk_hw_get_parent_by_index(hw, 0); 794 if (req->rate == clk_hw_get_rate(xo)) { 795 req->best_parent_hw = xo; 796 return 0; 797 } 798 799 p9 = clk_hw_get_parent_by_index(hw, 2); 800 p2 = clk_hw_get_parent_by_index(hw, 3); 801 p8 = clk_hw_get_parent_by_index(hw, 4); 802 803 /* PLL9 is a fixed rate PLL */ 804 p9_rate = clk_hw_get_rate(p9); 805 806 parent_req.rate = req->rate = min(req->rate, p9_rate); 807 if (req->rate == p9_rate) { 808 req->rate = req->best_parent_rate = p9_rate; 809 req->best_parent_hw = p9; 810 return 0; 811 } 812 813 if (req->best_parent_hw == p9) { 814 /* Are we going back to a previously used rate? */ 815 if (clk_hw_get_rate(p8) == req->rate) 816 req->best_parent_hw = p8; 817 else 818 req->best_parent_hw = p2; 819 } else if (req->best_parent_hw == p8) { 820 req->best_parent_hw = p2; 821 } else { 822 req->best_parent_hw = p8; 823 } 824 825 ret = __clk_determine_rate(req->best_parent_hw, &parent_req); 826 if (ret) 827 return ret; 828 829 req->rate = req->best_parent_rate = parent_req.rate; 830 831 return 0; 832 } 833 834 static int clk_gfx3d_set_rate_and_parent(struct clk_hw *hw, unsigned long rate, 835 unsigned long parent_rate, u8 index) 836 { 837 struct clk_rcg2 *rcg = to_clk_rcg2(hw); 838 u32 cfg; 839 int ret; 840 841 /* Just mux it, we don't use the division or m/n hardware */ 842 cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT; 843 ret = regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, cfg); 844 if (ret) 845 return ret; 846 847 return update_config(rcg); 848 } 849 850 static int clk_gfx3d_set_rate(struct clk_hw *hw, unsigned long rate, 851 unsigned long parent_rate) 852 { 853 /* 854 * We should never get here; clk_gfx3d_determine_rate() should always 855 * make us use a different parent than what we're currently using, so 856 * clk_gfx3d_set_rate_and_parent() should always be called. 857 */ 858 return 0; 859 } 860 861 const struct clk_ops clk_gfx3d_ops = { 862 .is_enabled = clk_rcg2_is_enabled, 863 .get_parent = clk_rcg2_get_parent, 864 .set_parent = clk_rcg2_set_parent, 865 .recalc_rate = clk_rcg2_recalc_rate, 866 .set_rate = clk_gfx3d_set_rate, 867 .set_rate_and_parent = clk_gfx3d_set_rate_and_parent, 868 .determine_rate = clk_gfx3d_determine_rate, 869 }; 870 EXPORT_SYMBOL_GPL(clk_gfx3d_ops); 871