1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * Copyright (C) 2013 Boris BREZILLON <b.brezillon@overkiz.com> 4 */ 5 6 #include <linux/clk-provider.h> 7 #include <linux/clkdev.h> 8 #include <linux/clk.h> 9 #include <linux/clk/at91_pmc.h> 10 #include <linux/of.h> 11 #include <linux/mfd/syscon.h> 12 #include <linux/regmap.h> 13 14 #include "pmc.h" 15 16 #define MASTER_PRES_MASK 0x7 17 #define MASTER_PRES_MAX MASTER_PRES_MASK 18 #define MASTER_DIV_SHIFT 8 19 #define MASTER_DIV_MASK 0x7 20 21 #define PMC_MCR_CSS_SHIFT (16) 22 23 #define MASTER_MAX_ID 4 24 25 #define to_clk_master(hw) container_of(hw, struct clk_master, hw) 26 27 struct clk_master { 28 struct clk_hw hw; 29 struct regmap *regmap; 30 spinlock_t *lock; 31 const struct clk_master_layout *layout; 32 const struct clk_master_characteristics *characteristics; 33 struct at91_clk_pms pms; 34 u32 *mux_table; 35 u32 mckr; 36 int chg_pid; 37 u8 id; 38 u8 parent; 39 u8 div; 40 u32 safe_div; 41 }; 42 43 /* MCK div reference to be used by notifier. */ 44 static struct clk_master *master_div; 45 46 static inline bool clk_master_ready(struct clk_master *master) 47 { 48 unsigned int bit = master->id ? AT91_PMC_MCKXRDY : AT91_PMC_MCKRDY; 49 unsigned int status; 50 51 regmap_read(master->regmap, AT91_PMC_SR, &status); 52 53 return !!(status & bit); 54 } 55 56 static int clk_master_prepare(struct clk_hw *hw) 57 { 58 struct clk_master *master = to_clk_master(hw); 59 unsigned long flags; 60 61 spin_lock_irqsave(master->lock, flags); 62 63 while (!clk_master_ready(master)) 64 cpu_relax(); 65 66 spin_unlock_irqrestore(master->lock, flags); 67 68 return 0; 69 } 70 71 static int clk_master_is_prepared(struct clk_hw *hw) 72 { 73 struct clk_master *master = to_clk_master(hw); 74 unsigned long flags; 75 bool status; 76 77 spin_lock_irqsave(master->lock, flags); 78 status = clk_master_ready(master); 79 spin_unlock_irqrestore(master->lock, flags); 80 81 return status; 82 } 83 84 static unsigned long clk_master_div_recalc_rate(struct clk_hw *hw, 85 unsigned long parent_rate) 86 { 87 u8 div; 88 unsigned long flags, rate = parent_rate; 89 struct clk_master *master = to_clk_master(hw); 90 const struct clk_master_layout *layout = master->layout; 91 const struct clk_master_characteristics *characteristics = 92 master->characteristics; 93 unsigned int mckr; 94 95 spin_lock_irqsave(master->lock, flags); 96 regmap_read(master->regmap, master->layout->offset, &mckr); 97 spin_unlock_irqrestore(master->lock, flags); 98 99 mckr &= layout->mask; 100 101 div = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK; 102 103 rate /= characteristics->divisors[div]; 104 105 if (rate < characteristics->output.min) 106 pr_warn("master clk div is underclocked"); 107 else if (rate > characteristics->output.max) 108 pr_warn("master clk div is overclocked"); 109 110 return rate; 111 } 112 113 static int clk_master_div_save_context(struct clk_hw *hw) 114 { 115 struct clk_master *master = to_clk_master(hw); 116 struct clk_hw *parent_hw = clk_hw_get_parent(hw); 117 unsigned long flags; 118 unsigned int mckr, div; 119 120 spin_lock_irqsave(master->lock, flags); 121 regmap_read(master->regmap, master->layout->offset, &mckr); 122 spin_unlock_irqrestore(master->lock, flags); 123 124 mckr &= master->layout->mask; 125 div = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK; 126 div = master->characteristics->divisors[div]; 127 128 master->pms.parent_rate = clk_hw_get_rate(parent_hw); 129 master->pms.rate = DIV_ROUND_CLOSEST(master->pms.parent_rate, div); 130 131 return 0; 132 } 133 134 static void clk_master_div_restore_context(struct clk_hw *hw) 135 { 136 struct clk_master *master = to_clk_master(hw); 137 unsigned long flags; 138 unsigned int mckr; 139 u8 div; 140 141 spin_lock_irqsave(master->lock, flags); 142 regmap_read(master->regmap, master->layout->offset, &mckr); 143 spin_unlock_irqrestore(master->lock, flags); 144 145 mckr &= master->layout->mask; 146 div = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK; 147 div = master->characteristics->divisors[div]; 148 149 if (div != DIV_ROUND_CLOSEST(master->pms.parent_rate, master->pms.rate)) 150 pr_warn("MCKR DIV not configured properly by firmware!\n"); 151 } 152 153 static const struct clk_ops master_div_ops = { 154 .prepare = clk_master_prepare, 155 .is_prepared = clk_master_is_prepared, 156 .recalc_rate = clk_master_div_recalc_rate, 157 .save_context = clk_master_div_save_context, 158 .restore_context = clk_master_div_restore_context, 159 }; 160 161 /* This function must be called with lock acquired. */ 162 static int clk_master_div_set(struct clk_master *master, 163 unsigned long parent_rate, int div) 164 { 165 const struct clk_master_characteristics *characteristics = 166 master->characteristics; 167 unsigned long rate = parent_rate; 168 unsigned int max_div = 0, div_index = 0, max_div_index = 0; 169 unsigned int i, mckr, tmp; 170 int ret; 171 172 for (i = 0; i < ARRAY_SIZE(characteristics->divisors); i++) { 173 if (!characteristics->divisors[i]) 174 break; 175 176 if (div == characteristics->divisors[i]) 177 div_index = i; 178 179 if (max_div < characteristics->divisors[i]) { 180 max_div = characteristics->divisors[i]; 181 max_div_index = i; 182 } 183 } 184 185 if (div > max_div) 186 div_index = max_div_index; 187 188 ret = regmap_read(master->regmap, master->layout->offset, &mckr); 189 if (ret) 190 return ret; 191 192 mckr &= master->layout->mask; 193 tmp = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK; 194 if (tmp == div_index) 195 return 0; 196 197 rate /= characteristics->divisors[div_index]; 198 if (rate < characteristics->output.min) 199 pr_warn("master clk div is underclocked"); 200 else if (rate > characteristics->output.max) 201 pr_warn("master clk div is overclocked"); 202 203 mckr &= ~(MASTER_DIV_MASK << MASTER_DIV_SHIFT); 204 mckr |= (div_index << MASTER_DIV_SHIFT); 205 ret = regmap_write(master->regmap, master->layout->offset, mckr); 206 if (ret) 207 return ret; 208 209 while (!clk_master_ready(master)) 210 cpu_relax(); 211 212 master->div = characteristics->divisors[div_index]; 213 214 return 0; 215 } 216 217 static unsigned long clk_master_div_recalc_rate_chg(struct clk_hw *hw, 218 unsigned long parent_rate) 219 { 220 struct clk_master *master = to_clk_master(hw); 221 222 return DIV_ROUND_CLOSEST_ULL(parent_rate, master->div); 223 } 224 225 static void clk_master_div_restore_context_chg(struct clk_hw *hw) 226 { 227 struct clk_master *master = to_clk_master(hw); 228 unsigned long flags; 229 int ret; 230 231 spin_lock_irqsave(master->lock, flags); 232 ret = clk_master_div_set(master, master->pms.parent_rate, 233 DIV_ROUND_CLOSEST(master->pms.parent_rate, 234 master->pms.rate)); 235 spin_unlock_irqrestore(master->lock, flags); 236 if (ret) 237 pr_warn("Failed to restore MCK DIV clock\n"); 238 } 239 240 static const struct clk_ops master_div_ops_chg = { 241 .prepare = clk_master_prepare, 242 .is_prepared = clk_master_is_prepared, 243 .recalc_rate = clk_master_div_recalc_rate_chg, 244 .save_context = clk_master_div_save_context, 245 .restore_context = clk_master_div_restore_context_chg, 246 }; 247 248 static int clk_master_div_notifier_fn(struct notifier_block *notifier, 249 unsigned long code, void *data) 250 { 251 const struct clk_master_characteristics *characteristics = 252 master_div->characteristics; 253 struct clk_notifier_data *cnd = data; 254 unsigned long flags, new_parent_rate, new_rate; 255 unsigned int mckr, div, new_div = 0; 256 int ret, i; 257 long tmp_diff; 258 long best_diff = -1; 259 260 spin_lock_irqsave(master_div->lock, flags); 261 switch (code) { 262 case PRE_RATE_CHANGE: 263 /* 264 * We want to avoid any overclocking of MCK DIV domain. To do 265 * this we set a safe divider (the underclocking is not of 266 * interest as we can go as low as 32KHz). The relation 267 * b/w this clock and its parents are as follows: 268 * 269 * FRAC PLL -> DIV PLL -> MCK DIV 270 * 271 * With the proper safe divider we should be good even with FRAC 272 * PLL at its maximum value. 273 */ 274 ret = regmap_read(master_div->regmap, master_div->layout->offset, 275 &mckr); 276 if (ret) { 277 ret = NOTIFY_STOP_MASK; 278 goto unlock; 279 } 280 281 mckr &= master_div->layout->mask; 282 div = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK; 283 284 /* Switch to safe divider. */ 285 clk_master_div_set(master_div, 286 cnd->old_rate * characteristics->divisors[div], 287 master_div->safe_div); 288 break; 289 290 case POST_RATE_CHANGE: 291 /* 292 * At this point we want to restore MCK DIV domain to its maximum 293 * allowed rate. 294 */ 295 ret = regmap_read(master_div->regmap, master_div->layout->offset, 296 &mckr); 297 if (ret) { 298 ret = NOTIFY_STOP_MASK; 299 goto unlock; 300 } 301 302 mckr &= master_div->layout->mask; 303 div = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK; 304 new_parent_rate = cnd->new_rate * characteristics->divisors[div]; 305 306 for (i = 0; i < ARRAY_SIZE(characteristics->divisors); i++) { 307 if (!characteristics->divisors[i]) 308 break; 309 310 new_rate = DIV_ROUND_CLOSEST_ULL(new_parent_rate, 311 characteristics->divisors[i]); 312 313 tmp_diff = characteristics->output.max - new_rate; 314 if (tmp_diff < 0) 315 continue; 316 317 if (best_diff < 0 || best_diff > tmp_diff) { 318 new_div = characteristics->divisors[i]; 319 best_diff = tmp_diff; 320 } 321 322 if (!tmp_diff) 323 break; 324 } 325 326 if (!new_div) { 327 ret = NOTIFY_STOP_MASK; 328 goto unlock; 329 } 330 331 /* Update the div to preserve MCK DIV clock rate. */ 332 clk_master_div_set(master_div, new_parent_rate, 333 new_div); 334 335 ret = NOTIFY_OK; 336 break; 337 338 default: 339 ret = NOTIFY_DONE; 340 break; 341 } 342 343 unlock: 344 spin_unlock_irqrestore(master_div->lock, flags); 345 346 return ret; 347 } 348 349 static struct notifier_block clk_master_div_notifier = { 350 .notifier_call = clk_master_div_notifier_fn, 351 }; 352 353 static void clk_sama7g5_master_best_diff(struct clk_rate_request *req, 354 struct clk_hw *parent, 355 unsigned long parent_rate, 356 long *best_rate, 357 long *best_diff, 358 u32 div) 359 { 360 unsigned long tmp_rate, tmp_diff; 361 362 if (div == MASTER_PRES_MAX) 363 tmp_rate = parent_rate / 3; 364 else 365 tmp_rate = parent_rate >> div; 366 367 tmp_diff = abs(req->rate - tmp_rate); 368 369 if (*best_diff < 0 || *best_diff >= tmp_diff) { 370 *best_rate = tmp_rate; 371 *best_diff = tmp_diff; 372 req->best_parent_rate = parent_rate; 373 req->best_parent_hw = parent; 374 } 375 } 376 377 static unsigned long clk_master_pres_recalc_rate(struct clk_hw *hw, 378 unsigned long parent_rate) 379 { 380 struct clk_master *master = to_clk_master(hw); 381 const struct clk_master_characteristics *characteristics = 382 master->characteristics; 383 unsigned long flags; 384 unsigned int val, pres; 385 386 spin_lock_irqsave(master->lock, flags); 387 regmap_read(master->regmap, master->layout->offset, &val); 388 spin_unlock_irqrestore(master->lock, flags); 389 390 val &= master->layout->mask; 391 pres = (val >> master->layout->pres_shift) & MASTER_PRES_MASK; 392 if (pres == MASTER_PRES_MAX && characteristics->have_div3_pres) 393 pres = 3; 394 else 395 pres = (1 << pres); 396 397 return DIV_ROUND_CLOSEST_ULL(parent_rate, pres); 398 } 399 400 static u8 clk_master_pres_get_parent(struct clk_hw *hw) 401 { 402 struct clk_master *master = to_clk_master(hw); 403 unsigned long flags; 404 unsigned int mckr; 405 406 spin_lock_irqsave(master->lock, flags); 407 regmap_read(master->regmap, master->layout->offset, &mckr); 408 spin_unlock_irqrestore(master->lock, flags); 409 410 mckr &= master->layout->mask; 411 412 return mckr & AT91_PMC_CSS; 413 } 414 415 static int clk_master_pres_save_context(struct clk_hw *hw) 416 { 417 struct clk_master *master = to_clk_master(hw); 418 struct clk_hw *parent_hw = clk_hw_get_parent(hw); 419 unsigned long flags; 420 unsigned int val, pres; 421 422 spin_lock_irqsave(master->lock, flags); 423 regmap_read(master->regmap, master->layout->offset, &val); 424 spin_unlock_irqrestore(master->lock, flags); 425 426 val &= master->layout->mask; 427 pres = (val >> master->layout->pres_shift) & MASTER_PRES_MASK; 428 if (pres == MASTER_PRES_MAX && master->characteristics->have_div3_pres) 429 pres = 3; 430 else 431 pres = (1 << pres); 432 433 master->pms.parent = val & AT91_PMC_CSS; 434 master->pms.parent_rate = clk_hw_get_rate(parent_hw); 435 master->pms.rate = DIV_ROUND_CLOSEST_ULL(master->pms.parent_rate, pres); 436 437 return 0; 438 } 439 440 static void clk_master_pres_restore_context(struct clk_hw *hw) 441 { 442 struct clk_master *master = to_clk_master(hw); 443 unsigned long flags; 444 unsigned int val, pres; 445 446 spin_lock_irqsave(master->lock, flags); 447 regmap_read(master->regmap, master->layout->offset, &val); 448 spin_unlock_irqrestore(master->lock, flags); 449 450 val &= master->layout->mask; 451 pres = (val >> master->layout->pres_shift) & MASTER_PRES_MASK; 452 if (pres == MASTER_PRES_MAX && master->characteristics->have_div3_pres) 453 pres = 3; 454 else 455 pres = (1 << pres); 456 457 if (master->pms.rate != 458 DIV_ROUND_CLOSEST_ULL(master->pms.parent_rate, pres) || 459 (master->pms.parent != (val & AT91_PMC_CSS))) 460 pr_warn("MCKR PRES was not configured properly by firmware!\n"); 461 } 462 463 static const struct clk_ops master_pres_ops = { 464 .prepare = clk_master_prepare, 465 .is_prepared = clk_master_is_prepared, 466 .recalc_rate = clk_master_pres_recalc_rate, 467 .get_parent = clk_master_pres_get_parent, 468 .save_context = clk_master_pres_save_context, 469 .restore_context = clk_master_pres_restore_context, 470 }; 471 472 static struct clk_hw * __init 473 at91_clk_register_master_internal(struct regmap *regmap, 474 const char *name, int num_parents, 475 const char **parent_names, 476 const struct clk_master_layout *layout, 477 const struct clk_master_characteristics *characteristics, 478 const struct clk_ops *ops, spinlock_t *lock, u32 flags) 479 { 480 struct clk_master *master; 481 struct clk_init_data init; 482 struct clk_hw *hw; 483 unsigned int mckr; 484 unsigned long irqflags; 485 int ret; 486 487 if (!name || !num_parents || !parent_names || !lock) 488 return ERR_PTR(-EINVAL); 489 490 master = kzalloc(sizeof(*master), GFP_KERNEL); 491 if (!master) 492 return ERR_PTR(-ENOMEM); 493 494 init.name = name; 495 init.ops = ops; 496 init.parent_names = parent_names; 497 init.num_parents = num_parents; 498 init.flags = flags; 499 500 master->hw.init = &init; 501 master->layout = layout; 502 master->characteristics = characteristics; 503 master->regmap = regmap; 504 master->lock = lock; 505 506 if (ops == &master_div_ops_chg) { 507 spin_lock_irqsave(master->lock, irqflags); 508 regmap_read(master->regmap, master->layout->offset, &mckr); 509 spin_unlock_irqrestore(master->lock, irqflags); 510 511 mckr &= layout->mask; 512 mckr = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK; 513 master->div = characteristics->divisors[mckr]; 514 } 515 516 hw = &master->hw; 517 ret = clk_hw_register(NULL, &master->hw); 518 if (ret) { 519 kfree(master); 520 hw = ERR_PTR(ret); 521 } 522 523 return hw; 524 } 525 526 struct clk_hw * __init 527 at91_clk_register_master_pres(struct regmap *regmap, 528 const char *name, int num_parents, 529 const char **parent_names, 530 const struct clk_master_layout *layout, 531 const struct clk_master_characteristics *characteristics, 532 spinlock_t *lock) 533 { 534 return at91_clk_register_master_internal(regmap, name, num_parents, 535 parent_names, layout, 536 characteristics, 537 &master_pres_ops, 538 lock, CLK_SET_RATE_GATE); 539 } 540 541 struct clk_hw * __init 542 at91_clk_register_master_div(struct regmap *regmap, 543 const char *name, const char *parent_name, 544 const struct clk_master_layout *layout, 545 const struct clk_master_characteristics *characteristics, 546 spinlock_t *lock, u32 flags, u32 safe_div) 547 { 548 const struct clk_ops *ops; 549 struct clk_hw *hw; 550 551 if (flags & CLK_SET_RATE_GATE) 552 ops = &master_div_ops; 553 else 554 ops = &master_div_ops_chg; 555 556 hw = at91_clk_register_master_internal(regmap, name, 1, 557 &parent_name, layout, 558 characteristics, ops, 559 lock, flags); 560 561 if (!IS_ERR(hw) && safe_div) { 562 master_div = to_clk_master(hw); 563 master_div->safe_div = safe_div; 564 clk_notifier_register(hw->clk, 565 &clk_master_div_notifier); 566 } 567 568 return hw; 569 } 570 571 static unsigned long 572 clk_sama7g5_master_recalc_rate(struct clk_hw *hw, 573 unsigned long parent_rate) 574 { 575 struct clk_master *master = to_clk_master(hw); 576 577 return DIV_ROUND_CLOSEST_ULL(parent_rate, (1 << master->div)); 578 } 579 580 static int clk_sama7g5_master_determine_rate(struct clk_hw *hw, 581 struct clk_rate_request *req) 582 { 583 struct clk_master *master = to_clk_master(hw); 584 struct clk_rate_request req_parent = *req; 585 struct clk_hw *parent; 586 long best_rate = LONG_MIN, best_diff = LONG_MIN; 587 unsigned long parent_rate; 588 unsigned int div, i; 589 590 /* First: check the dividers of MCR. */ 591 for (i = 0; i < clk_hw_get_num_parents(hw); i++) { 592 parent = clk_hw_get_parent_by_index(hw, i); 593 if (!parent) 594 continue; 595 596 parent_rate = clk_hw_get_rate(parent); 597 if (!parent_rate) 598 continue; 599 600 for (div = 0; div < MASTER_PRES_MAX + 1; div++) { 601 clk_sama7g5_master_best_diff(req, parent, parent_rate, 602 &best_rate, &best_diff, 603 div); 604 if (!best_diff) 605 break; 606 } 607 608 if (!best_diff) 609 break; 610 } 611 612 /* Second: try to request rate form changeable parent. */ 613 if (master->chg_pid < 0) 614 goto end; 615 616 parent = clk_hw_get_parent_by_index(hw, master->chg_pid); 617 if (!parent) 618 goto end; 619 620 for (div = 0; div < MASTER_PRES_MAX + 1; div++) { 621 if (div == MASTER_PRES_MAX) 622 req_parent.rate = req->rate * 3; 623 else 624 req_parent.rate = req->rate << div; 625 626 if (__clk_determine_rate(parent, &req_parent)) 627 continue; 628 629 clk_sama7g5_master_best_diff(req, parent, req_parent.rate, 630 &best_rate, &best_diff, div); 631 632 if (!best_diff) 633 break; 634 } 635 636 end: 637 pr_debug("MCK: %s, best_rate = %ld, parent clk: %s @ %ld\n", 638 __func__, best_rate, 639 __clk_get_name((req->best_parent_hw)->clk), 640 req->best_parent_rate); 641 642 if (best_rate < 0) 643 return -EINVAL; 644 645 req->rate = best_rate; 646 647 return 0; 648 } 649 650 static u8 clk_sama7g5_master_get_parent(struct clk_hw *hw) 651 { 652 struct clk_master *master = to_clk_master(hw); 653 unsigned long flags; 654 u8 index; 655 656 spin_lock_irqsave(master->lock, flags); 657 index = clk_mux_val_to_index(&master->hw, master->mux_table, 0, 658 master->parent); 659 spin_unlock_irqrestore(master->lock, flags); 660 661 return index; 662 } 663 664 static int clk_sama7g5_master_set_parent(struct clk_hw *hw, u8 index) 665 { 666 struct clk_master *master = to_clk_master(hw); 667 unsigned long flags; 668 669 if (index >= clk_hw_get_num_parents(hw)) 670 return -EINVAL; 671 672 spin_lock_irqsave(master->lock, flags); 673 master->parent = clk_mux_index_to_val(master->mux_table, 0, index); 674 spin_unlock_irqrestore(master->lock, flags); 675 676 return 0; 677 } 678 679 static void clk_sama7g5_master_set(struct clk_master *master, 680 unsigned int status) 681 { 682 unsigned long flags; 683 unsigned int val, cparent; 684 unsigned int enable = status ? AT91_PMC_MCR_V2_EN : 0; 685 unsigned int parent = master->parent << PMC_MCR_CSS_SHIFT; 686 unsigned int div = master->div << MASTER_DIV_SHIFT; 687 688 spin_lock_irqsave(master->lock, flags); 689 690 regmap_write(master->regmap, AT91_PMC_MCR_V2, 691 AT91_PMC_MCR_V2_ID(master->id)); 692 regmap_read(master->regmap, AT91_PMC_MCR_V2, &val); 693 regmap_update_bits(master->regmap, AT91_PMC_MCR_V2, 694 enable | AT91_PMC_MCR_V2_CSS | AT91_PMC_MCR_V2_DIV | 695 AT91_PMC_MCR_V2_CMD | AT91_PMC_MCR_V2_ID_MSK, 696 enable | parent | div | AT91_PMC_MCR_V2_CMD | 697 AT91_PMC_MCR_V2_ID(master->id)); 698 699 cparent = (val & AT91_PMC_MCR_V2_CSS) >> PMC_MCR_CSS_SHIFT; 700 701 /* Wait here only if parent is being changed. */ 702 while ((cparent != master->parent) && !clk_master_ready(master)) 703 cpu_relax(); 704 705 spin_unlock_irqrestore(master->lock, flags); 706 } 707 708 static int clk_sama7g5_master_enable(struct clk_hw *hw) 709 { 710 struct clk_master *master = to_clk_master(hw); 711 712 clk_sama7g5_master_set(master, 1); 713 714 return 0; 715 } 716 717 static void clk_sama7g5_master_disable(struct clk_hw *hw) 718 { 719 struct clk_master *master = to_clk_master(hw); 720 unsigned long flags; 721 722 spin_lock_irqsave(master->lock, flags); 723 724 regmap_write(master->regmap, AT91_PMC_MCR_V2, master->id); 725 regmap_update_bits(master->regmap, AT91_PMC_MCR_V2, 726 AT91_PMC_MCR_V2_EN | AT91_PMC_MCR_V2_CMD | 727 AT91_PMC_MCR_V2_ID_MSK, 728 AT91_PMC_MCR_V2_CMD | 729 AT91_PMC_MCR_V2_ID(master->id)); 730 731 spin_unlock_irqrestore(master->lock, flags); 732 } 733 734 static int clk_sama7g5_master_is_enabled(struct clk_hw *hw) 735 { 736 struct clk_master *master = to_clk_master(hw); 737 unsigned long flags; 738 unsigned int val; 739 740 spin_lock_irqsave(master->lock, flags); 741 742 regmap_write(master->regmap, AT91_PMC_MCR_V2, master->id); 743 regmap_read(master->regmap, AT91_PMC_MCR_V2, &val); 744 745 spin_unlock_irqrestore(master->lock, flags); 746 747 return !!(val & AT91_PMC_MCR_V2_EN); 748 } 749 750 static int clk_sama7g5_master_set_rate(struct clk_hw *hw, unsigned long rate, 751 unsigned long parent_rate) 752 { 753 struct clk_master *master = to_clk_master(hw); 754 unsigned long div, flags; 755 756 div = DIV_ROUND_CLOSEST(parent_rate, rate); 757 if ((div > (1 << (MASTER_PRES_MAX - 1))) || (div & (div - 1))) 758 return -EINVAL; 759 760 if (div == 3) 761 div = MASTER_PRES_MAX; 762 else if (div) 763 div = ffs(div) - 1; 764 765 spin_lock_irqsave(master->lock, flags); 766 master->div = div; 767 spin_unlock_irqrestore(master->lock, flags); 768 769 return 0; 770 } 771 772 static int clk_sama7g5_master_save_context(struct clk_hw *hw) 773 { 774 struct clk_master *master = to_clk_master(hw); 775 776 master->pms.status = clk_sama7g5_master_is_enabled(hw); 777 778 return 0; 779 } 780 781 static void clk_sama7g5_master_restore_context(struct clk_hw *hw) 782 { 783 struct clk_master *master = to_clk_master(hw); 784 785 if (master->pms.status) 786 clk_sama7g5_master_set(master, master->pms.status); 787 } 788 789 static const struct clk_ops sama7g5_master_ops = { 790 .enable = clk_sama7g5_master_enable, 791 .disable = clk_sama7g5_master_disable, 792 .is_enabled = clk_sama7g5_master_is_enabled, 793 .recalc_rate = clk_sama7g5_master_recalc_rate, 794 .determine_rate = clk_sama7g5_master_determine_rate, 795 .set_rate = clk_sama7g5_master_set_rate, 796 .get_parent = clk_sama7g5_master_get_parent, 797 .set_parent = clk_sama7g5_master_set_parent, 798 .save_context = clk_sama7g5_master_save_context, 799 .restore_context = clk_sama7g5_master_restore_context, 800 }; 801 802 struct clk_hw * __init 803 at91_clk_sama7g5_register_master(struct regmap *regmap, 804 const char *name, int num_parents, 805 const char **parent_names, 806 u32 *mux_table, 807 spinlock_t *lock, u8 id, 808 bool critical, int chg_pid) 809 { 810 struct clk_master *master; 811 struct clk_hw *hw; 812 struct clk_init_data init; 813 unsigned long flags; 814 unsigned int val; 815 int ret; 816 817 if (!name || !num_parents || !parent_names || !mux_table || 818 !lock || id > MASTER_MAX_ID) 819 return ERR_PTR(-EINVAL); 820 821 master = kzalloc(sizeof(*master), GFP_KERNEL); 822 if (!master) 823 return ERR_PTR(-ENOMEM); 824 825 init.name = name; 826 init.ops = &sama7g5_master_ops; 827 init.parent_names = parent_names; 828 init.num_parents = num_parents; 829 init.flags = CLK_SET_RATE_GATE | CLK_SET_PARENT_GATE; 830 if (chg_pid >= 0) 831 init.flags |= CLK_SET_RATE_PARENT; 832 if (critical) 833 init.flags |= CLK_IS_CRITICAL; 834 835 master->hw.init = &init; 836 master->regmap = regmap; 837 master->id = id; 838 master->chg_pid = chg_pid; 839 master->lock = lock; 840 master->mux_table = mux_table; 841 842 spin_lock_irqsave(master->lock, flags); 843 regmap_write(master->regmap, AT91_PMC_MCR_V2, master->id); 844 regmap_read(master->regmap, AT91_PMC_MCR_V2, &val); 845 master->parent = (val & AT91_PMC_MCR_V2_CSS) >> PMC_MCR_CSS_SHIFT; 846 master->div = (val & AT91_PMC_MCR_V2_DIV) >> MASTER_DIV_SHIFT; 847 spin_unlock_irqrestore(master->lock, flags); 848 849 hw = &master->hw; 850 ret = clk_hw_register(NULL, &master->hw); 851 if (ret) { 852 kfree(master); 853 hw = ERR_PTR(ret); 854 } 855 856 return hw; 857 } 858 859 const struct clk_master_layout at91rm9200_master_layout = { 860 .mask = 0x31F, 861 .pres_shift = 2, 862 .offset = AT91_PMC_MCKR, 863 }; 864 865 const struct clk_master_layout at91sam9x5_master_layout = { 866 .mask = 0x373, 867 .pres_shift = 4, 868 .offset = AT91_PMC_MCKR, 869 }; 870