1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * Copyright (C) 2013 Boris BREZILLON <b.brezillon@overkiz.com> 4 */ 5 6 #include <linux/clk-provider.h> 7 #include <linux/clkdev.h> 8 #include <linux/clk.h> 9 #include <linux/clk/at91_pmc.h> 10 #include <linux/of.h> 11 #include <linux/mfd/syscon.h> 12 #include <linux/regmap.h> 13 14 #include "pmc.h" 15 16 #define MASTER_PRES_MASK 0x7 17 #define MASTER_PRES_MAX MASTER_PRES_MASK 18 #define MASTER_DIV_SHIFT 8 19 #define MASTER_DIV_MASK 0x7 20 21 #define PMC_MCR_CSS_SHIFT (16) 22 23 #define MASTER_MAX_ID 4 24 25 #define to_clk_master(hw) container_of(hw, struct clk_master, hw) 26 27 struct clk_master { 28 struct clk_hw hw; 29 struct regmap *regmap; 30 spinlock_t *lock; 31 const struct clk_master_layout *layout; 32 const struct clk_master_characteristics *characteristics; 33 struct at91_clk_pms pms; 34 u32 *mux_table; 35 u32 mckr; 36 int chg_pid; 37 u8 id; 38 u8 parent; 39 u8 div; 40 u32 safe_div; 41 }; 42 43 /* MCK div reference to be used by notifier. */ 44 static struct clk_master *master_div; 45 46 static inline bool clk_master_ready(struct clk_master *master) 47 { 48 unsigned int bit = master->id ? AT91_PMC_MCKXRDY : AT91_PMC_MCKRDY; 49 unsigned int status; 50 51 regmap_read(master->regmap, AT91_PMC_SR, &status); 52 53 return !!(status & bit); 54 } 55 56 static int clk_master_prepare(struct clk_hw *hw) 57 { 58 struct clk_master *master = to_clk_master(hw); 59 unsigned long flags; 60 61 spin_lock_irqsave(master->lock, flags); 62 63 while (!clk_master_ready(master)) 64 cpu_relax(); 65 66 spin_unlock_irqrestore(master->lock, flags); 67 68 return 0; 69 } 70 71 static int clk_master_is_prepared(struct clk_hw *hw) 72 { 73 struct clk_master *master = to_clk_master(hw); 74 unsigned long flags; 75 bool status; 76 77 spin_lock_irqsave(master->lock, flags); 78 status = clk_master_ready(master); 79 spin_unlock_irqrestore(master->lock, flags); 80 81 return status; 82 } 83 84 static unsigned long clk_master_div_recalc_rate(struct clk_hw *hw, 85 unsigned long parent_rate) 86 { 87 u8 div; 88 unsigned long flags, rate = parent_rate; 89 struct clk_master *master = to_clk_master(hw); 90 const struct clk_master_layout *layout = master->layout; 91 const struct clk_master_characteristics *characteristics = 92 master->characteristics; 93 unsigned int mckr; 94 95 spin_lock_irqsave(master->lock, flags); 96 regmap_read(master->regmap, master->layout->offset, &mckr); 97 spin_unlock_irqrestore(master->lock, flags); 98 99 mckr &= layout->mask; 100 101 div = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK; 102 103 rate /= characteristics->divisors[div]; 104 105 if (rate < characteristics->output.min) 106 pr_warn("master clk div is underclocked"); 107 else if (rate > characteristics->output.max) 108 pr_warn("master clk div is overclocked"); 109 110 return rate; 111 } 112 113 static int clk_master_div_save_context(struct clk_hw *hw) 114 { 115 struct clk_master *master = to_clk_master(hw); 116 struct clk_hw *parent_hw = clk_hw_get_parent(hw); 117 unsigned long flags; 118 unsigned int mckr, div; 119 120 spin_lock_irqsave(master->lock, flags); 121 regmap_read(master->regmap, master->layout->offset, &mckr); 122 spin_unlock_irqrestore(master->lock, flags); 123 124 mckr &= master->layout->mask; 125 div = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK; 126 div = master->characteristics->divisors[div]; 127 128 master->pms.parent_rate = clk_hw_get_rate(parent_hw); 129 master->pms.rate = DIV_ROUND_CLOSEST(master->pms.parent_rate, div); 130 131 return 0; 132 } 133 134 static void clk_master_div_restore_context(struct clk_hw *hw) 135 { 136 struct clk_master *master = to_clk_master(hw); 137 unsigned long flags; 138 unsigned int mckr; 139 u8 div; 140 141 spin_lock_irqsave(master->lock, flags); 142 regmap_read(master->regmap, master->layout->offset, &mckr); 143 spin_unlock_irqrestore(master->lock, flags); 144 145 mckr &= master->layout->mask; 146 div = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK; 147 div = master->characteristics->divisors[div]; 148 149 if (div != DIV_ROUND_CLOSEST(master->pms.parent_rate, master->pms.rate)) 150 pr_warn("MCKR DIV not configured properly by firmware!\n"); 151 } 152 153 static const struct clk_ops master_div_ops = { 154 .prepare = clk_master_prepare, 155 .is_prepared = clk_master_is_prepared, 156 .recalc_rate = clk_master_div_recalc_rate, 157 .save_context = clk_master_div_save_context, 158 .restore_context = clk_master_div_restore_context, 159 }; 160 161 /* This function must be called with lock acquired. */ 162 static int clk_master_div_set(struct clk_master *master, 163 unsigned long parent_rate, int div) 164 { 165 const struct clk_master_characteristics *characteristics = 166 master->characteristics; 167 unsigned long rate = parent_rate; 168 unsigned int max_div = 0, div_index = 0, max_div_index = 0; 169 unsigned int i, mckr, tmp; 170 int ret; 171 172 for (i = 0; i < ARRAY_SIZE(characteristics->divisors); i++) { 173 if (!characteristics->divisors[i]) 174 break; 175 176 if (div == characteristics->divisors[i]) 177 div_index = i; 178 179 if (max_div < characteristics->divisors[i]) { 180 max_div = characteristics->divisors[i]; 181 max_div_index = i; 182 } 183 } 184 185 if (div > max_div) 186 div_index = max_div_index; 187 188 ret = regmap_read(master->regmap, master->layout->offset, &mckr); 189 if (ret) 190 return ret; 191 192 mckr &= master->layout->mask; 193 tmp = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK; 194 if (tmp == div_index) 195 return 0; 196 197 rate /= characteristics->divisors[div_index]; 198 if (rate < characteristics->output.min) 199 pr_warn("master clk div is underclocked"); 200 else if (rate > characteristics->output.max) 201 pr_warn("master clk div is overclocked"); 202 203 mckr &= ~(MASTER_DIV_MASK << MASTER_DIV_SHIFT); 204 mckr |= (div_index << MASTER_DIV_SHIFT); 205 ret = regmap_write(master->regmap, master->layout->offset, mckr); 206 if (ret) 207 return ret; 208 209 while (!clk_master_ready(master)) 210 cpu_relax(); 211 212 master->div = characteristics->divisors[div_index]; 213 214 return 0; 215 } 216 217 static unsigned long clk_master_div_recalc_rate_chg(struct clk_hw *hw, 218 unsigned long parent_rate) 219 { 220 struct clk_master *master = to_clk_master(hw); 221 222 return DIV_ROUND_CLOSEST_ULL(parent_rate, master->div); 223 } 224 225 static void clk_master_div_restore_context_chg(struct clk_hw *hw) 226 { 227 struct clk_master *master = to_clk_master(hw); 228 unsigned long flags; 229 int ret; 230 231 spin_lock_irqsave(master->lock, flags); 232 ret = clk_master_div_set(master, master->pms.parent_rate, 233 DIV_ROUND_CLOSEST(master->pms.parent_rate, 234 master->pms.rate)); 235 spin_unlock_irqrestore(master->lock, flags); 236 if (ret) 237 pr_warn("Failed to restore MCK DIV clock\n"); 238 } 239 240 static const struct clk_ops master_div_ops_chg = { 241 .prepare = clk_master_prepare, 242 .is_prepared = clk_master_is_prepared, 243 .recalc_rate = clk_master_div_recalc_rate_chg, 244 .save_context = clk_master_div_save_context, 245 .restore_context = clk_master_div_restore_context_chg, 246 }; 247 248 static int clk_master_div_notifier_fn(struct notifier_block *notifier, 249 unsigned long code, void *data) 250 { 251 const struct clk_master_characteristics *characteristics = 252 master_div->characteristics; 253 struct clk_notifier_data *cnd = data; 254 unsigned long flags, new_parent_rate, new_rate; 255 unsigned int mckr, div, new_div = 0; 256 int ret, i; 257 long tmp_diff; 258 long best_diff = -1; 259 260 spin_lock_irqsave(master_div->lock, flags); 261 switch (code) { 262 case PRE_RATE_CHANGE: 263 /* 264 * We want to avoid any overclocking of MCK DIV domain. To do 265 * this we set a safe divider (the underclocking is not of 266 * interest as we can go as low as 32KHz). The relation 267 * b/w this clock and its parents are as follows: 268 * 269 * FRAC PLL -> DIV PLL -> MCK DIV 270 * 271 * With the proper safe divider we should be good even with FRAC 272 * PLL at its maximum value. 273 */ 274 ret = regmap_read(master_div->regmap, master_div->layout->offset, 275 &mckr); 276 if (ret) { 277 ret = NOTIFY_STOP_MASK; 278 goto unlock; 279 } 280 281 mckr &= master_div->layout->mask; 282 div = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK; 283 284 /* Switch to safe divider. */ 285 clk_master_div_set(master_div, 286 cnd->old_rate * characteristics->divisors[div], 287 master_div->safe_div); 288 break; 289 290 case POST_RATE_CHANGE: 291 /* 292 * At this point we want to restore MCK DIV domain to its maximum 293 * allowed rate. 294 */ 295 ret = regmap_read(master_div->regmap, master_div->layout->offset, 296 &mckr); 297 if (ret) { 298 ret = NOTIFY_STOP_MASK; 299 goto unlock; 300 } 301 302 mckr &= master_div->layout->mask; 303 div = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK; 304 new_parent_rate = cnd->new_rate * characteristics->divisors[div]; 305 306 for (i = 0; i < ARRAY_SIZE(characteristics->divisors); i++) { 307 if (!characteristics->divisors[i]) 308 break; 309 310 new_rate = DIV_ROUND_CLOSEST_ULL(new_parent_rate, 311 characteristics->divisors[i]); 312 313 tmp_diff = characteristics->output.max - new_rate; 314 if (tmp_diff < 0) 315 continue; 316 317 if (best_diff < 0 || best_diff > tmp_diff) { 318 new_div = characteristics->divisors[i]; 319 best_diff = tmp_diff; 320 } 321 322 if (!tmp_diff) 323 break; 324 } 325 326 if (!new_div) { 327 ret = NOTIFY_STOP_MASK; 328 goto unlock; 329 } 330 331 /* Update the div to preserve MCK DIV clock rate. */ 332 clk_master_div_set(master_div, new_parent_rate, 333 new_div); 334 335 ret = NOTIFY_OK; 336 break; 337 338 default: 339 ret = NOTIFY_DONE; 340 break; 341 } 342 343 unlock: 344 spin_unlock_irqrestore(master_div->lock, flags); 345 346 return ret; 347 } 348 349 static struct notifier_block clk_master_div_notifier = { 350 .notifier_call = clk_master_div_notifier_fn, 351 }; 352 353 static void clk_sama7g5_master_best_diff(struct clk_rate_request *req, 354 struct clk_hw *parent, 355 unsigned long parent_rate, 356 long *best_rate, 357 long *best_diff, 358 u32 div) 359 { 360 unsigned long tmp_rate, tmp_diff; 361 362 if (div == MASTER_PRES_MAX) 363 tmp_rate = parent_rate / 3; 364 else 365 tmp_rate = parent_rate >> div; 366 367 tmp_diff = abs(req->rate - tmp_rate); 368 369 if (*best_diff < 0 || *best_diff >= tmp_diff) { 370 *best_rate = tmp_rate; 371 *best_diff = tmp_diff; 372 req->best_parent_rate = parent_rate; 373 req->best_parent_hw = parent; 374 } 375 } 376 377 static unsigned long clk_master_pres_recalc_rate(struct clk_hw *hw, 378 unsigned long parent_rate) 379 { 380 struct clk_master *master = to_clk_master(hw); 381 const struct clk_master_characteristics *characteristics = 382 master->characteristics; 383 unsigned long flags; 384 unsigned int val, pres; 385 386 spin_lock_irqsave(master->lock, flags); 387 regmap_read(master->regmap, master->layout->offset, &val); 388 spin_unlock_irqrestore(master->lock, flags); 389 390 val &= master->layout->mask; 391 pres = (val >> master->layout->pres_shift) & MASTER_PRES_MASK; 392 if (pres == MASTER_PRES_MAX && characteristics->have_div3_pres) 393 pres = 3; 394 else 395 pres = (1 << pres); 396 397 return DIV_ROUND_CLOSEST_ULL(parent_rate, pres); 398 } 399 400 static u8 clk_master_pres_get_parent(struct clk_hw *hw) 401 { 402 struct clk_master *master = to_clk_master(hw); 403 unsigned long flags; 404 unsigned int mckr; 405 406 spin_lock_irqsave(master->lock, flags); 407 regmap_read(master->regmap, master->layout->offset, &mckr); 408 spin_unlock_irqrestore(master->lock, flags); 409 410 mckr &= master->layout->mask; 411 412 return mckr & AT91_PMC_CSS; 413 } 414 415 static int clk_master_pres_save_context(struct clk_hw *hw) 416 { 417 struct clk_master *master = to_clk_master(hw); 418 struct clk_hw *parent_hw = clk_hw_get_parent(hw); 419 unsigned long flags; 420 unsigned int val, pres; 421 422 spin_lock_irqsave(master->lock, flags); 423 regmap_read(master->regmap, master->layout->offset, &val); 424 spin_unlock_irqrestore(master->lock, flags); 425 426 val &= master->layout->mask; 427 pres = (val >> master->layout->pres_shift) & MASTER_PRES_MASK; 428 if (pres == MASTER_PRES_MAX && master->characteristics->have_div3_pres) 429 pres = 3; 430 else 431 pres = (1 << pres); 432 433 master->pms.parent = val & AT91_PMC_CSS; 434 master->pms.parent_rate = clk_hw_get_rate(parent_hw); 435 master->pms.rate = DIV_ROUND_CLOSEST_ULL(master->pms.parent_rate, pres); 436 437 return 0; 438 } 439 440 static void clk_master_pres_restore_context(struct clk_hw *hw) 441 { 442 struct clk_master *master = to_clk_master(hw); 443 unsigned long flags; 444 unsigned int val, pres; 445 446 spin_lock_irqsave(master->lock, flags); 447 regmap_read(master->regmap, master->layout->offset, &val); 448 spin_unlock_irqrestore(master->lock, flags); 449 450 val &= master->layout->mask; 451 pres = (val >> master->layout->pres_shift) & MASTER_PRES_MASK; 452 if (pres == MASTER_PRES_MAX && master->characteristics->have_div3_pres) 453 pres = 3; 454 else 455 pres = (1 << pres); 456 457 if (master->pms.rate != 458 DIV_ROUND_CLOSEST_ULL(master->pms.parent_rate, pres) || 459 (master->pms.parent != (val & AT91_PMC_CSS))) 460 pr_warn("MCKR PRES was not configured properly by firmware!\n"); 461 } 462 463 static const struct clk_ops master_pres_ops = { 464 .prepare = clk_master_prepare, 465 .is_prepared = clk_master_is_prepared, 466 .recalc_rate = clk_master_pres_recalc_rate, 467 .get_parent = clk_master_pres_get_parent, 468 .save_context = clk_master_pres_save_context, 469 .restore_context = clk_master_pres_restore_context, 470 }; 471 472 static struct clk_hw * __init 473 at91_clk_register_master_internal(struct regmap *regmap, 474 const char *name, int num_parents, 475 const char **parent_names, 476 const struct clk_master_layout *layout, 477 const struct clk_master_characteristics *characteristics, 478 const struct clk_ops *ops, spinlock_t *lock, u32 flags) 479 { 480 struct clk_master *master; 481 struct clk_init_data init; 482 struct clk_hw *hw; 483 unsigned int mckr; 484 unsigned long irqflags; 485 int ret; 486 487 if (!name || !num_parents || !parent_names || !lock) 488 return ERR_PTR(-EINVAL); 489 490 master = kzalloc(sizeof(*master), GFP_KERNEL); 491 if (!master) 492 return ERR_PTR(-ENOMEM); 493 494 init.name = name; 495 init.ops = ops; 496 init.parent_names = parent_names; 497 init.num_parents = num_parents; 498 init.flags = flags; 499 500 master->hw.init = &init; 501 master->layout = layout; 502 master->characteristics = characteristics; 503 master->regmap = regmap; 504 master->lock = lock; 505 506 if (ops == &master_div_ops_chg) { 507 spin_lock_irqsave(master->lock, irqflags); 508 regmap_read(master->regmap, master->layout->offset, &mckr); 509 spin_unlock_irqrestore(master->lock, irqflags); 510 511 mckr &= layout->mask; 512 mckr = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK; 513 master->div = characteristics->divisors[mckr]; 514 } 515 516 hw = &master->hw; 517 ret = clk_hw_register(NULL, &master->hw); 518 if (ret) { 519 kfree(master); 520 hw = ERR_PTR(ret); 521 } 522 523 return hw; 524 } 525 526 struct clk_hw * __init 527 at91_clk_register_master_pres(struct regmap *regmap, 528 const char *name, int num_parents, 529 const char **parent_names, 530 const struct clk_master_layout *layout, 531 const struct clk_master_characteristics *characteristics, 532 spinlock_t *lock) 533 { 534 return at91_clk_register_master_internal(regmap, name, num_parents, 535 parent_names, layout, 536 characteristics, 537 &master_pres_ops, 538 lock, CLK_SET_RATE_GATE); 539 } 540 541 struct clk_hw * __init 542 at91_clk_register_master_div(struct regmap *regmap, 543 const char *name, const char *parent_name, 544 const struct clk_master_layout *layout, 545 const struct clk_master_characteristics *characteristics, 546 spinlock_t *lock, u32 flags, u32 safe_div) 547 { 548 const struct clk_ops *ops; 549 struct clk_hw *hw; 550 551 if (flags & CLK_SET_RATE_GATE) 552 ops = &master_div_ops; 553 else 554 ops = &master_div_ops_chg; 555 556 hw = at91_clk_register_master_internal(regmap, name, 1, 557 &parent_name, layout, 558 characteristics, ops, 559 lock, flags); 560 561 if (!IS_ERR(hw) && safe_div) { 562 master_div = to_clk_master(hw); 563 master_div->safe_div = safe_div; 564 clk_notifier_register(hw->clk, 565 &clk_master_div_notifier); 566 } 567 568 return hw; 569 } 570 571 static unsigned long 572 clk_sama7g5_master_recalc_rate(struct clk_hw *hw, 573 unsigned long parent_rate) 574 { 575 struct clk_master *master = to_clk_master(hw); 576 577 return DIV_ROUND_CLOSEST_ULL(parent_rate, (1 << master->div)); 578 } 579 580 static int clk_sama7g5_master_determine_rate(struct clk_hw *hw, 581 struct clk_rate_request *req) 582 { 583 struct clk_master *master = to_clk_master(hw); 584 struct clk_hw *parent; 585 long best_rate = LONG_MIN, best_diff = LONG_MIN; 586 unsigned long parent_rate; 587 unsigned int div, i; 588 589 /* First: check the dividers of MCR. */ 590 for (i = 0; i < clk_hw_get_num_parents(hw); i++) { 591 parent = clk_hw_get_parent_by_index(hw, i); 592 if (!parent) 593 continue; 594 595 parent_rate = clk_hw_get_rate(parent); 596 if (!parent_rate) 597 continue; 598 599 for (div = 0; div < MASTER_PRES_MAX + 1; div++) { 600 clk_sama7g5_master_best_diff(req, parent, parent_rate, 601 &best_rate, &best_diff, 602 div); 603 if (!best_diff) 604 break; 605 } 606 607 if (!best_diff) 608 break; 609 } 610 611 /* Second: try to request rate form changeable parent. */ 612 if (master->chg_pid < 0) 613 goto end; 614 615 parent = clk_hw_get_parent_by_index(hw, master->chg_pid); 616 if (!parent) 617 goto end; 618 619 for (div = 0; div < MASTER_PRES_MAX + 1; div++) { 620 struct clk_rate_request req_parent; 621 unsigned long req_rate; 622 623 if (div == MASTER_PRES_MAX) 624 req_rate = req->rate * 3; 625 else 626 req_rate = req->rate << div; 627 628 clk_hw_forward_rate_request(hw, req, parent, &req_parent, req_rate); 629 if (__clk_determine_rate(parent, &req_parent)) 630 continue; 631 632 clk_sama7g5_master_best_diff(req, parent, req_parent.rate, 633 &best_rate, &best_diff, div); 634 635 if (!best_diff) 636 break; 637 } 638 639 end: 640 pr_debug("MCK: %s, best_rate = %ld, parent clk: %s @ %ld\n", 641 __func__, best_rate, 642 __clk_get_name((req->best_parent_hw)->clk), 643 req->best_parent_rate); 644 645 if (best_rate < 0) 646 return -EINVAL; 647 648 req->rate = best_rate; 649 650 return 0; 651 } 652 653 static u8 clk_sama7g5_master_get_parent(struct clk_hw *hw) 654 { 655 struct clk_master *master = to_clk_master(hw); 656 unsigned long flags; 657 u8 index; 658 659 spin_lock_irqsave(master->lock, flags); 660 index = clk_mux_val_to_index(&master->hw, master->mux_table, 0, 661 master->parent); 662 spin_unlock_irqrestore(master->lock, flags); 663 664 return index; 665 } 666 667 static int clk_sama7g5_master_set_parent(struct clk_hw *hw, u8 index) 668 { 669 struct clk_master *master = to_clk_master(hw); 670 unsigned long flags; 671 672 if (index >= clk_hw_get_num_parents(hw)) 673 return -EINVAL; 674 675 spin_lock_irqsave(master->lock, flags); 676 master->parent = clk_mux_index_to_val(master->mux_table, 0, index); 677 spin_unlock_irqrestore(master->lock, flags); 678 679 return 0; 680 } 681 682 static void clk_sama7g5_master_set(struct clk_master *master, 683 unsigned int status) 684 { 685 unsigned long flags; 686 unsigned int val, cparent; 687 unsigned int enable = status ? AT91_PMC_MCR_V2_EN : 0; 688 unsigned int parent = master->parent << PMC_MCR_CSS_SHIFT; 689 unsigned int div = master->div << MASTER_DIV_SHIFT; 690 691 spin_lock_irqsave(master->lock, flags); 692 693 regmap_write(master->regmap, AT91_PMC_MCR_V2, 694 AT91_PMC_MCR_V2_ID(master->id)); 695 regmap_read(master->regmap, AT91_PMC_MCR_V2, &val); 696 regmap_update_bits(master->regmap, AT91_PMC_MCR_V2, 697 enable | AT91_PMC_MCR_V2_CSS | AT91_PMC_MCR_V2_DIV | 698 AT91_PMC_MCR_V2_CMD | AT91_PMC_MCR_V2_ID_MSK, 699 enable | parent | div | AT91_PMC_MCR_V2_CMD | 700 AT91_PMC_MCR_V2_ID(master->id)); 701 702 cparent = (val & AT91_PMC_MCR_V2_CSS) >> PMC_MCR_CSS_SHIFT; 703 704 /* Wait here only if parent is being changed. */ 705 while ((cparent != master->parent) && !clk_master_ready(master)) 706 cpu_relax(); 707 708 spin_unlock_irqrestore(master->lock, flags); 709 } 710 711 static int clk_sama7g5_master_enable(struct clk_hw *hw) 712 { 713 struct clk_master *master = to_clk_master(hw); 714 715 clk_sama7g5_master_set(master, 1); 716 717 return 0; 718 } 719 720 static void clk_sama7g5_master_disable(struct clk_hw *hw) 721 { 722 struct clk_master *master = to_clk_master(hw); 723 unsigned long flags; 724 725 spin_lock_irqsave(master->lock, flags); 726 727 regmap_write(master->regmap, AT91_PMC_MCR_V2, master->id); 728 regmap_update_bits(master->regmap, AT91_PMC_MCR_V2, 729 AT91_PMC_MCR_V2_EN | AT91_PMC_MCR_V2_CMD | 730 AT91_PMC_MCR_V2_ID_MSK, 731 AT91_PMC_MCR_V2_CMD | 732 AT91_PMC_MCR_V2_ID(master->id)); 733 734 spin_unlock_irqrestore(master->lock, flags); 735 } 736 737 static int clk_sama7g5_master_is_enabled(struct clk_hw *hw) 738 { 739 struct clk_master *master = to_clk_master(hw); 740 unsigned long flags; 741 unsigned int val; 742 743 spin_lock_irqsave(master->lock, flags); 744 745 regmap_write(master->regmap, AT91_PMC_MCR_V2, master->id); 746 regmap_read(master->regmap, AT91_PMC_MCR_V2, &val); 747 748 spin_unlock_irqrestore(master->lock, flags); 749 750 return !!(val & AT91_PMC_MCR_V2_EN); 751 } 752 753 static int clk_sama7g5_master_set_rate(struct clk_hw *hw, unsigned long rate, 754 unsigned long parent_rate) 755 { 756 struct clk_master *master = to_clk_master(hw); 757 unsigned long div, flags; 758 759 div = DIV_ROUND_CLOSEST(parent_rate, rate); 760 if ((div > (1 << (MASTER_PRES_MAX - 1))) || (div & (div - 1))) 761 return -EINVAL; 762 763 if (div == 3) 764 div = MASTER_PRES_MAX; 765 else if (div) 766 div = ffs(div) - 1; 767 768 spin_lock_irqsave(master->lock, flags); 769 master->div = div; 770 spin_unlock_irqrestore(master->lock, flags); 771 772 return 0; 773 } 774 775 static int clk_sama7g5_master_save_context(struct clk_hw *hw) 776 { 777 struct clk_master *master = to_clk_master(hw); 778 779 master->pms.status = clk_sama7g5_master_is_enabled(hw); 780 781 return 0; 782 } 783 784 static void clk_sama7g5_master_restore_context(struct clk_hw *hw) 785 { 786 struct clk_master *master = to_clk_master(hw); 787 788 if (master->pms.status) 789 clk_sama7g5_master_set(master, master->pms.status); 790 } 791 792 static const struct clk_ops sama7g5_master_ops = { 793 .enable = clk_sama7g5_master_enable, 794 .disable = clk_sama7g5_master_disable, 795 .is_enabled = clk_sama7g5_master_is_enabled, 796 .recalc_rate = clk_sama7g5_master_recalc_rate, 797 .determine_rate = clk_sama7g5_master_determine_rate, 798 .set_rate = clk_sama7g5_master_set_rate, 799 .get_parent = clk_sama7g5_master_get_parent, 800 .set_parent = clk_sama7g5_master_set_parent, 801 .save_context = clk_sama7g5_master_save_context, 802 .restore_context = clk_sama7g5_master_restore_context, 803 }; 804 805 struct clk_hw * __init 806 at91_clk_sama7g5_register_master(struct regmap *regmap, 807 const char *name, int num_parents, 808 const char **parent_names, 809 u32 *mux_table, 810 spinlock_t *lock, u8 id, 811 bool critical, int chg_pid) 812 { 813 struct clk_master *master; 814 struct clk_hw *hw; 815 struct clk_init_data init; 816 unsigned long flags; 817 unsigned int val; 818 int ret; 819 820 if (!name || !num_parents || !parent_names || !mux_table || 821 !lock || id > MASTER_MAX_ID) 822 return ERR_PTR(-EINVAL); 823 824 master = kzalloc(sizeof(*master), GFP_KERNEL); 825 if (!master) 826 return ERR_PTR(-ENOMEM); 827 828 init.name = name; 829 init.ops = &sama7g5_master_ops; 830 init.parent_names = parent_names; 831 init.num_parents = num_parents; 832 init.flags = CLK_SET_RATE_GATE | CLK_SET_PARENT_GATE; 833 if (chg_pid >= 0) 834 init.flags |= CLK_SET_RATE_PARENT; 835 if (critical) 836 init.flags |= CLK_IS_CRITICAL; 837 838 master->hw.init = &init; 839 master->regmap = regmap; 840 master->id = id; 841 master->chg_pid = chg_pid; 842 master->lock = lock; 843 master->mux_table = mux_table; 844 845 spin_lock_irqsave(master->lock, flags); 846 regmap_write(master->regmap, AT91_PMC_MCR_V2, master->id); 847 regmap_read(master->regmap, AT91_PMC_MCR_V2, &val); 848 master->parent = (val & AT91_PMC_MCR_V2_CSS) >> PMC_MCR_CSS_SHIFT; 849 master->div = (val & AT91_PMC_MCR_V2_DIV) >> MASTER_DIV_SHIFT; 850 spin_unlock_irqrestore(master->lock, flags); 851 852 hw = &master->hw; 853 ret = clk_hw_register(NULL, &master->hw); 854 if (ret) { 855 kfree(master); 856 hw = ERR_PTR(ret); 857 } 858 859 return hw; 860 } 861 862 const struct clk_master_layout at91rm9200_master_layout = { 863 .mask = 0x31F, 864 .pres_shift = 2, 865 .offset = AT91_PMC_MCKR, 866 }; 867 868 const struct clk_master_layout at91sam9x5_master_layout = { 869 .mask = 0x373, 870 .pres_shift = 4, 871 .offset = AT91_PMC_MCKR, 872 }; 873