1 /* 2 * Copyright (C) 2015 Atmel Corporation, 3 * Nicolas Ferre <nicolas.ferre@atmel.com> 4 * 5 * Based on clk-programmable & clk-peripheral drivers by Boris BREZILLON. 6 * 7 * This program is free software; you can redistribute it and/or modify 8 * it under the terms of the GNU General Public License as published by 9 * the Free Software Foundation; either version 2 of the License, or 10 * (at your option) any later version. 11 * 12 */ 13 14 #include <linux/clk-provider.h> 15 #include <linux/clkdev.h> 16 #include <linux/clk/at91_pmc.h> 17 #include <linux/of.h> 18 #include <linux/mfd/syscon.h> 19 #include <linux/regmap.h> 20 21 #include "pmc.h" 22 23 #define PERIPHERAL_MAX 64 24 #define PERIPHERAL_ID_MIN 2 25 26 #define GENERATED_SOURCE_MAX 6 27 #define GENERATED_MAX_DIV 255 28 29 struct clk_generated { 30 struct clk_hw hw; 31 struct regmap *regmap; 32 struct clk_range range; 33 spinlock_t *lock; 34 u32 id; 35 u32 gckdiv; 36 u8 parent_id; 37 }; 38 39 #define to_clk_generated(hw) \ 40 container_of(hw, struct clk_generated, hw) 41 42 static int clk_generated_enable(struct clk_hw *hw) 43 { 44 struct clk_generated *gck = to_clk_generated(hw); 45 unsigned long flags; 46 47 pr_debug("GCLK: %s, gckdiv = %d, parent id = %d\n", 48 __func__, gck->gckdiv, gck->parent_id); 49 50 spin_lock_irqsave(gck->lock, flags); 51 regmap_write(gck->regmap, AT91_PMC_PCR, 52 (gck->id & AT91_PMC_PCR_PID_MASK)); 53 regmap_update_bits(gck->regmap, AT91_PMC_PCR, 54 AT91_PMC_PCR_GCKDIV_MASK | AT91_PMC_PCR_GCKCSS_MASK | 55 AT91_PMC_PCR_CMD | AT91_PMC_PCR_GCKEN, 56 AT91_PMC_PCR_GCKCSS(gck->parent_id) | 57 AT91_PMC_PCR_CMD | 58 AT91_PMC_PCR_GCKDIV(gck->gckdiv) | 59 AT91_PMC_PCR_GCKEN); 60 spin_unlock_irqrestore(gck->lock, flags); 61 return 0; 62 } 63 64 static void clk_generated_disable(struct clk_hw *hw) 65 { 66 struct clk_generated *gck = to_clk_generated(hw); 67 unsigned long flags; 68 69 spin_lock_irqsave(gck->lock, flags); 70 regmap_write(gck->regmap, AT91_PMC_PCR, 71 (gck->id & AT91_PMC_PCR_PID_MASK)); 72 regmap_update_bits(gck->regmap, AT91_PMC_PCR, 73 AT91_PMC_PCR_CMD | AT91_PMC_PCR_GCKEN, 74 AT91_PMC_PCR_CMD); 75 spin_unlock_irqrestore(gck->lock, flags); 76 } 77 78 static int clk_generated_is_enabled(struct clk_hw *hw) 79 { 80 struct clk_generated *gck = to_clk_generated(hw); 81 unsigned long flags; 82 unsigned int status; 83 84 spin_lock_irqsave(gck->lock, flags); 85 regmap_write(gck->regmap, AT91_PMC_PCR, 86 (gck->id & AT91_PMC_PCR_PID_MASK)); 87 regmap_read(gck->regmap, AT91_PMC_PCR, &status); 88 spin_unlock_irqrestore(gck->lock, flags); 89 90 return status & AT91_PMC_PCR_GCKEN ? 1 : 0; 91 } 92 93 static unsigned long 94 clk_generated_recalc_rate(struct clk_hw *hw, 95 unsigned long parent_rate) 96 { 97 struct clk_generated *gck = to_clk_generated(hw); 98 99 return DIV_ROUND_CLOSEST(parent_rate, gck->gckdiv + 1); 100 } 101 102 static int clk_generated_determine_rate(struct clk_hw *hw, 103 struct clk_rate_request *req) 104 { 105 struct clk_generated *gck = to_clk_generated(hw); 106 struct clk_hw *parent = NULL; 107 long best_rate = -EINVAL; 108 unsigned long tmp_rate, min_rate; 109 int best_diff = -1; 110 int tmp_diff; 111 int i; 112 113 for (i = 0; i < clk_hw_get_num_parents(hw); i++) { 114 u32 div; 115 unsigned long parent_rate; 116 117 parent = clk_hw_get_parent_by_index(hw, i); 118 if (!parent) 119 continue; 120 121 parent_rate = clk_hw_get_rate(parent); 122 min_rate = DIV_ROUND_CLOSEST(parent_rate, GENERATED_MAX_DIV + 1); 123 if (!parent_rate || 124 (gck->range.max && min_rate > gck->range.max)) 125 continue; 126 127 for (div = 1; div < GENERATED_MAX_DIV + 2; div++) { 128 tmp_rate = DIV_ROUND_CLOSEST(parent_rate, div); 129 tmp_diff = abs(req->rate - tmp_rate); 130 131 if (best_diff < 0 || best_diff > tmp_diff) { 132 best_rate = tmp_rate; 133 best_diff = tmp_diff; 134 req->best_parent_rate = parent_rate; 135 req->best_parent_hw = parent; 136 } 137 138 if (!best_diff || tmp_rate < req->rate) 139 break; 140 } 141 142 if (!best_diff) 143 break; 144 } 145 146 pr_debug("GCLK: %s, best_rate = %ld, parent clk: %s @ %ld\n", 147 __func__, best_rate, 148 __clk_get_name((req->best_parent_hw)->clk), 149 req->best_parent_rate); 150 151 if (best_rate < 0) 152 return best_rate; 153 154 req->rate = best_rate; 155 return 0; 156 } 157 158 /* No modification of hardware as we have the flag CLK_SET_PARENT_GATE set */ 159 static int clk_generated_set_parent(struct clk_hw *hw, u8 index) 160 { 161 struct clk_generated *gck = to_clk_generated(hw); 162 163 if (index >= clk_hw_get_num_parents(hw)) 164 return -EINVAL; 165 166 gck->parent_id = index; 167 return 0; 168 } 169 170 static u8 clk_generated_get_parent(struct clk_hw *hw) 171 { 172 struct clk_generated *gck = to_clk_generated(hw); 173 174 return gck->parent_id; 175 } 176 177 /* No modification of hardware as we have the flag CLK_SET_RATE_GATE set */ 178 static int clk_generated_set_rate(struct clk_hw *hw, 179 unsigned long rate, 180 unsigned long parent_rate) 181 { 182 struct clk_generated *gck = to_clk_generated(hw); 183 u32 div; 184 185 if (!rate) 186 return -EINVAL; 187 188 if (gck->range.max && rate > gck->range.max) 189 return -EINVAL; 190 191 div = DIV_ROUND_CLOSEST(parent_rate, rate); 192 if (div > GENERATED_MAX_DIV + 1 || !div) 193 return -EINVAL; 194 195 gck->gckdiv = div - 1; 196 return 0; 197 } 198 199 static const struct clk_ops generated_ops = { 200 .enable = clk_generated_enable, 201 .disable = clk_generated_disable, 202 .is_enabled = clk_generated_is_enabled, 203 .recalc_rate = clk_generated_recalc_rate, 204 .determine_rate = clk_generated_determine_rate, 205 .get_parent = clk_generated_get_parent, 206 .set_parent = clk_generated_set_parent, 207 .set_rate = clk_generated_set_rate, 208 }; 209 210 /** 211 * clk_generated_startup - Initialize a given clock to its default parent and 212 * divisor parameter. 213 * 214 * @gck: Generated clock to set the startup parameters for. 215 * 216 * Take parameters from the hardware and update local clock configuration 217 * accordingly. 218 */ 219 static void clk_generated_startup(struct clk_generated *gck) 220 { 221 u32 tmp; 222 unsigned long flags; 223 224 spin_lock_irqsave(gck->lock, flags); 225 regmap_write(gck->regmap, AT91_PMC_PCR, 226 (gck->id & AT91_PMC_PCR_PID_MASK)); 227 regmap_read(gck->regmap, AT91_PMC_PCR, &tmp); 228 spin_unlock_irqrestore(gck->lock, flags); 229 230 gck->parent_id = (tmp & AT91_PMC_PCR_GCKCSS_MASK) 231 >> AT91_PMC_PCR_GCKCSS_OFFSET; 232 gck->gckdiv = (tmp & AT91_PMC_PCR_GCKDIV_MASK) 233 >> AT91_PMC_PCR_GCKDIV_OFFSET; 234 } 235 236 static struct clk_hw * __init 237 at91_clk_register_generated(struct regmap *regmap, spinlock_t *lock, 238 const char *name, const char **parent_names, 239 u8 num_parents, u8 id, 240 const struct clk_range *range) 241 { 242 struct clk_generated *gck; 243 struct clk_init_data init; 244 struct clk_hw *hw; 245 int ret; 246 247 gck = kzalloc(sizeof(*gck), GFP_KERNEL); 248 if (!gck) 249 return ERR_PTR(-ENOMEM); 250 251 init.name = name; 252 init.ops = &generated_ops; 253 init.parent_names = parent_names; 254 init.num_parents = num_parents; 255 init.flags = CLK_SET_RATE_GATE | CLK_SET_PARENT_GATE; 256 257 gck->id = id; 258 gck->hw.init = &init; 259 gck->regmap = regmap; 260 gck->lock = lock; 261 gck->range = *range; 262 263 clk_generated_startup(gck); 264 hw = &gck->hw; 265 ret = clk_hw_register(NULL, &gck->hw); 266 if (ret) { 267 kfree(gck); 268 hw = ERR_PTR(ret); 269 } else { 270 pmc_register_id(id); 271 } 272 273 return hw; 274 } 275 276 static void __init of_sama5d2_clk_generated_setup(struct device_node *np) 277 { 278 int num; 279 u32 id; 280 const char *name; 281 struct clk_hw *hw; 282 unsigned int num_parents; 283 const char *parent_names[GENERATED_SOURCE_MAX]; 284 struct device_node *gcknp; 285 struct clk_range range = CLK_RANGE(0, 0); 286 struct regmap *regmap; 287 288 num_parents = of_clk_get_parent_count(np); 289 if (num_parents == 0 || num_parents > GENERATED_SOURCE_MAX) 290 return; 291 292 of_clk_parent_fill(np, parent_names, num_parents); 293 294 num = of_get_child_count(np); 295 if (!num || num > PERIPHERAL_MAX) 296 return; 297 298 regmap = syscon_node_to_regmap(of_get_parent(np)); 299 if (IS_ERR(regmap)) 300 return; 301 302 for_each_child_of_node(np, gcknp) { 303 if (of_property_read_u32(gcknp, "reg", &id)) 304 continue; 305 306 if (id < PERIPHERAL_ID_MIN || id >= PERIPHERAL_MAX) 307 continue; 308 309 if (of_property_read_string(np, "clock-output-names", &name)) 310 name = gcknp->name; 311 312 of_at91_get_clk_range(gcknp, "atmel,clk-output-range", 313 &range); 314 315 hw = at91_clk_register_generated(regmap, &pmc_pcr_lock, name, 316 parent_names, num_parents, 317 id, &range); 318 if (IS_ERR(hw)) 319 continue; 320 321 of_clk_add_hw_provider(gcknp, of_clk_hw_simple_get, hw); 322 } 323 } 324 CLK_OF_DECLARE(of_sama5d2_clk_generated_setup, "atmel,sama5d2-clk-generated", 325 of_sama5d2_clk_generated_setup); 326