clk-sam9x60-pll.c (20796B)
1// SPDX-License-Identifier: GPL-2.0+ 2/* 3 * Copyright (C) 2019 Microchip Technology Inc. 4 * 5 */ 6 7#include <linux/bitfield.h> 8#include <linux/clk.h> 9#include <linux/clk-provider.h> 10#include <linux/clkdev.h> 11#include <linux/clk/at91_pmc.h> 12#include <linux/of.h> 13#include <linux/mfd/syscon.h> 14#include <linux/regmap.h> 15 16#include "pmc.h" 17 18#define PMC_PLL_CTRL0_DIV_MSK GENMASK(7, 0) 19#define PMC_PLL_CTRL1_MUL_MSK GENMASK(31, 24) 20#define PMC_PLL_CTRL1_FRACR_MSK GENMASK(21, 0) 21 22#define PLL_DIV_MAX (FIELD_GET(PMC_PLL_CTRL0_DIV_MSK, UINT_MAX) + 1) 23#define UPLL_DIV 2 24#define PLL_MUL_MAX (FIELD_GET(PMC_PLL_CTRL1_MUL_MSK, UINT_MAX) + 1) 25 26#define FCORE_MIN (600000000) 27#define FCORE_MAX (1200000000) 28 29#define PLL_MAX_ID 7 30 31struct sam9x60_pll_core { 32 struct regmap *regmap; 33 spinlock_t *lock; 34 const struct clk_pll_characteristics *characteristics; 35 const struct clk_pll_layout *layout; 36 struct clk_hw hw; 37 u8 id; 38}; 39 40struct sam9x60_frac { 41 struct sam9x60_pll_core core; 42 struct at91_clk_pms pms; 43 u32 frac; 44 u16 mul; 45}; 46 47struct sam9x60_div { 48 struct sam9x60_pll_core core; 49 struct at91_clk_pms pms; 50 u8 div; 51 u8 safe_div; 52}; 53 54#define to_sam9x60_pll_core(hw) container_of(hw, struct sam9x60_pll_core, hw) 55#define to_sam9x60_frac(core) container_of(core, struct sam9x60_frac, core) 56#define to_sam9x60_div(core) container_of(core, struct sam9x60_div, core) 57 58static struct sam9x60_div *notifier_div; 59 60static inline bool sam9x60_pll_ready(struct regmap *regmap, int id) 61{ 62 unsigned int status; 63 64 regmap_read(regmap, AT91_PMC_PLL_ISR0, &status); 65 66 return !!(status & BIT(id)); 67} 68 69static bool sam9x60_frac_pll_ready(struct regmap *regmap, u8 id) 70{ 71 return sam9x60_pll_ready(regmap, id); 72} 73 74static unsigned long sam9x60_frac_pll_recalc_rate(struct clk_hw *hw, 75 unsigned long parent_rate) 76{ 77 struct sam9x60_pll_core *core = to_sam9x60_pll_core(hw); 78 struct sam9x60_frac *frac = to_sam9x60_frac(core); 79 80 return parent_rate * (frac->mul + 1) + 81 DIV_ROUND_CLOSEST_ULL((u64)parent_rate * frac->frac, (1 << 22)); 82} 83 84static int sam9x60_frac_pll_set(struct sam9x60_pll_core *core) 85{ 86 struct sam9x60_frac *frac = to_sam9x60_frac(core); 87 struct regmap *regmap = core->regmap; 88 unsigned int val, cfrac, cmul; 89 unsigned long flags; 90 91 spin_lock_irqsave(core->lock, flags); 92 93 regmap_update_bits(regmap, AT91_PMC_PLL_UPDT, 94 AT91_PMC_PLL_UPDT_ID_MSK, core->id); 95 regmap_read(regmap, AT91_PMC_PLL_CTRL1, &val); 96 cmul = (val & core->layout->mul_mask) >> core->layout->mul_shift; 97 cfrac = (val & core->layout->frac_mask) >> core->layout->frac_shift; 98 99 if (sam9x60_frac_pll_ready(regmap, core->id) && 100 (cmul == frac->mul && cfrac == frac->frac)) 101 goto unlock; 102 103 /* Recommended value for PMC_PLL_ACR */ 104 if (core->characteristics->upll) 105 val = AT91_PMC_PLL_ACR_DEFAULT_UPLL; 106 else 107 val = AT91_PMC_PLL_ACR_DEFAULT_PLLA; 108 regmap_write(regmap, AT91_PMC_PLL_ACR, val); 109 110 regmap_write(regmap, AT91_PMC_PLL_CTRL1, 111 (frac->mul << core->layout->mul_shift) | 112 (frac->frac << core->layout->frac_shift)); 113 114 if (core->characteristics->upll) { 115 /* Enable the UTMI internal bandgap */ 116 val |= AT91_PMC_PLL_ACR_UTMIBG; 117 regmap_write(regmap, AT91_PMC_PLL_ACR, val); 118 119 udelay(10); 120 121 /* Enable the UTMI internal regulator */ 122 val |= AT91_PMC_PLL_ACR_UTMIVR; 123 regmap_write(regmap, AT91_PMC_PLL_ACR, val); 124 125 udelay(10); 126 } 127 128 regmap_update_bits(regmap, AT91_PMC_PLL_UPDT, 129 AT91_PMC_PLL_UPDT_UPDATE | AT91_PMC_PLL_UPDT_ID_MSK, 130 AT91_PMC_PLL_UPDT_UPDATE | core->id); 131 132 regmap_update_bits(regmap, AT91_PMC_PLL_CTRL0, 133 AT91_PMC_PLL_CTRL0_ENLOCK | AT91_PMC_PLL_CTRL0_ENPLL, 134 AT91_PMC_PLL_CTRL0_ENLOCK | AT91_PMC_PLL_CTRL0_ENPLL); 135 136 regmap_update_bits(regmap, AT91_PMC_PLL_UPDT, 137 AT91_PMC_PLL_UPDT_UPDATE | AT91_PMC_PLL_UPDT_ID_MSK, 138 AT91_PMC_PLL_UPDT_UPDATE | core->id); 139 140 while (!sam9x60_pll_ready(regmap, core->id)) 141 cpu_relax(); 142 143unlock: 144 spin_unlock_irqrestore(core->lock, flags); 145 146 return 0; 147} 148 149static int sam9x60_frac_pll_prepare(struct clk_hw *hw) 150{ 151 struct sam9x60_pll_core *core = to_sam9x60_pll_core(hw); 152 153 return sam9x60_frac_pll_set(core); 154} 155 156static void sam9x60_frac_pll_unprepare(struct clk_hw *hw) 157{ 158 struct sam9x60_pll_core *core = to_sam9x60_pll_core(hw); 159 struct regmap *regmap = core->regmap; 160 unsigned long flags; 161 162 spin_lock_irqsave(core->lock, flags); 163 164 regmap_update_bits(regmap, AT91_PMC_PLL_UPDT, 165 AT91_PMC_PLL_UPDT_ID_MSK, core->id); 166 167 regmap_update_bits(regmap, AT91_PMC_PLL_CTRL0, AT91_PMC_PLL_CTRL0_ENPLL, 0); 168 169 if (core->characteristics->upll) 170 regmap_update_bits(regmap, AT91_PMC_PLL_ACR, 171 AT91_PMC_PLL_ACR_UTMIBG | AT91_PMC_PLL_ACR_UTMIVR, 0); 172 173 regmap_update_bits(regmap, AT91_PMC_PLL_UPDT, 174 AT91_PMC_PLL_UPDT_UPDATE | AT91_PMC_PLL_UPDT_ID_MSK, 175 AT91_PMC_PLL_UPDT_UPDATE | core->id); 176 177 spin_unlock_irqrestore(core->lock, flags); 178} 179 180static int sam9x60_frac_pll_is_prepared(struct clk_hw *hw) 181{ 182 struct sam9x60_pll_core *core = to_sam9x60_pll_core(hw); 183 184 return sam9x60_pll_ready(core->regmap, core->id); 185} 186 187static long sam9x60_frac_pll_compute_mul_frac(struct sam9x60_pll_core *core, 188 unsigned long rate, 189 unsigned long parent_rate, 190 bool update) 191{ 192 struct sam9x60_frac *frac = to_sam9x60_frac(core); 193 unsigned long tmprate, remainder; 194 unsigned long nmul = 0; 195 unsigned long nfrac = 0; 196 197 if (rate < FCORE_MIN || rate > FCORE_MAX) 198 return -ERANGE; 199 200 /* 201 * Calculate the multiplier associated with the current 202 * divider that provide the closest rate to the requested one. 203 */ 204 nmul = mult_frac(rate, 1, parent_rate); 205 tmprate = mult_frac(parent_rate, nmul, 1); 206 remainder = rate - tmprate; 207 208 if (remainder) { 209 nfrac = DIV_ROUND_CLOSEST_ULL((u64)remainder * (1 << 22), 210 parent_rate); 211 212 tmprate += DIV_ROUND_CLOSEST_ULL((u64)nfrac * parent_rate, 213 (1 << 22)); 214 } 215 216 /* Check if resulted rate is a valid. */ 217 if (tmprate < FCORE_MIN || tmprate > FCORE_MAX) 218 return -ERANGE; 219 220 if (update) { 221 frac->mul = nmul - 1; 222 frac->frac = nfrac; 223 } 224 225 return tmprate; 226} 227 228static long sam9x60_frac_pll_round_rate(struct clk_hw *hw, unsigned long rate, 229 unsigned long *parent_rate) 230{ 231 struct sam9x60_pll_core *core = to_sam9x60_pll_core(hw); 232 233 return sam9x60_frac_pll_compute_mul_frac(core, rate, *parent_rate, false); 234} 235 236static int sam9x60_frac_pll_set_rate(struct clk_hw *hw, unsigned long rate, 237 unsigned long parent_rate) 238{ 239 struct sam9x60_pll_core *core = to_sam9x60_pll_core(hw); 240 241 return sam9x60_frac_pll_compute_mul_frac(core, rate, parent_rate, true); 242} 243 244static int sam9x60_frac_pll_set_rate_chg(struct clk_hw *hw, unsigned long rate, 245 unsigned long parent_rate) 246{ 247 struct sam9x60_pll_core *core = to_sam9x60_pll_core(hw); 248 struct sam9x60_frac *frac = to_sam9x60_frac(core); 249 struct regmap *regmap = core->regmap; 250 unsigned long irqflags; 251 unsigned int val, cfrac, cmul; 252 long ret; 253 254 ret = sam9x60_frac_pll_compute_mul_frac(core, rate, parent_rate, true); 255 if (ret <= 0) 256 return ret; 257 258 spin_lock_irqsave(core->lock, irqflags); 259 260 regmap_update_bits(regmap, AT91_PMC_PLL_UPDT, AT91_PMC_PLL_UPDT_ID_MSK, 261 core->id); 262 regmap_read(regmap, AT91_PMC_PLL_CTRL1, &val); 263 cmul = (val & core->layout->mul_mask) >> core->layout->mul_shift; 264 cfrac = (val & core->layout->frac_mask) >> core->layout->frac_shift; 265 266 if (cmul == frac->mul && cfrac == frac->frac) 267 goto unlock; 268 269 regmap_write(regmap, AT91_PMC_PLL_CTRL1, 270 (frac->mul << core->layout->mul_shift) | 271 (frac->frac << core->layout->frac_shift)); 272 273 regmap_update_bits(regmap, AT91_PMC_PLL_UPDT, 274 AT91_PMC_PLL_UPDT_UPDATE | AT91_PMC_PLL_UPDT_ID_MSK, 275 AT91_PMC_PLL_UPDT_UPDATE | core->id); 276 277 regmap_update_bits(regmap, AT91_PMC_PLL_CTRL0, 278 AT91_PMC_PLL_CTRL0_ENLOCK | AT91_PMC_PLL_CTRL0_ENPLL, 279 AT91_PMC_PLL_CTRL0_ENLOCK | 280 AT91_PMC_PLL_CTRL0_ENPLL); 281 282 regmap_update_bits(regmap, AT91_PMC_PLL_UPDT, 283 AT91_PMC_PLL_UPDT_UPDATE | AT91_PMC_PLL_UPDT_ID_MSK, 284 AT91_PMC_PLL_UPDT_UPDATE | core->id); 285 286 while (!sam9x60_pll_ready(regmap, core->id)) 287 cpu_relax(); 288 289unlock: 290 spin_unlock_irqrestore(core->lock, irqflags); 291 292 return ret; 293} 294 295static int sam9x60_frac_pll_save_context(struct clk_hw *hw) 296{ 297 struct sam9x60_pll_core *core = to_sam9x60_pll_core(hw); 298 struct sam9x60_frac *frac = to_sam9x60_frac(core); 299 300 frac->pms.status = sam9x60_pll_ready(core->regmap, core->id); 301 302 return 0; 303} 304 305static void sam9x60_frac_pll_restore_context(struct clk_hw *hw) 306{ 307 struct sam9x60_pll_core *core = to_sam9x60_pll_core(hw); 308 struct sam9x60_frac *frac = to_sam9x60_frac(core); 309 310 if (frac->pms.status) 311 sam9x60_frac_pll_set(core); 312} 313 314static const struct clk_ops sam9x60_frac_pll_ops = { 315 .prepare = sam9x60_frac_pll_prepare, 316 .unprepare = sam9x60_frac_pll_unprepare, 317 .is_prepared = sam9x60_frac_pll_is_prepared, 318 .recalc_rate = sam9x60_frac_pll_recalc_rate, 319 .round_rate = sam9x60_frac_pll_round_rate, 320 .set_rate = sam9x60_frac_pll_set_rate, 321 .save_context = sam9x60_frac_pll_save_context, 322 .restore_context = sam9x60_frac_pll_restore_context, 323}; 324 325static const struct clk_ops sam9x60_frac_pll_ops_chg = { 326 .prepare = sam9x60_frac_pll_prepare, 327 .unprepare = sam9x60_frac_pll_unprepare, 328 .is_prepared = sam9x60_frac_pll_is_prepared, 329 .recalc_rate = sam9x60_frac_pll_recalc_rate, 330 .round_rate = sam9x60_frac_pll_round_rate, 331 .set_rate = sam9x60_frac_pll_set_rate_chg, 332 .save_context = sam9x60_frac_pll_save_context, 333 .restore_context = sam9x60_frac_pll_restore_context, 334}; 335 336/* This function should be called with spinlock acquired. */ 337static void sam9x60_div_pll_set_div(struct sam9x60_pll_core *core, u32 div, 338 bool enable) 339{ 340 struct regmap *regmap = core->regmap; 341 u32 ena_msk = enable ? core->layout->endiv_mask : 0; 342 u32 ena_val = enable ? (1 << core->layout->endiv_shift) : 0; 343 344 regmap_update_bits(regmap, AT91_PMC_PLL_CTRL0, 345 core->layout->div_mask | ena_msk, 346 (div << core->layout->div_shift) | ena_val); 347 348 regmap_update_bits(regmap, AT91_PMC_PLL_UPDT, 349 AT91_PMC_PLL_UPDT_UPDATE | AT91_PMC_PLL_UPDT_ID_MSK, 350 AT91_PMC_PLL_UPDT_UPDATE | core->id); 351 352 while (!sam9x60_pll_ready(regmap, core->id)) 353 cpu_relax(); 354} 355 356static int sam9x60_div_pll_set(struct sam9x60_pll_core *core) 357{ 358 struct sam9x60_div *div = to_sam9x60_div(core); 359 struct regmap *regmap = core->regmap; 360 unsigned long flags; 361 unsigned int val, cdiv; 362 363 spin_lock_irqsave(core->lock, flags); 364 regmap_update_bits(regmap, AT91_PMC_PLL_UPDT, 365 AT91_PMC_PLL_UPDT_ID_MSK, core->id); 366 regmap_read(regmap, AT91_PMC_PLL_CTRL0, &val); 367 cdiv = (val & core->layout->div_mask) >> core->layout->div_shift; 368 369 /* Stop if enabled an nothing changed. */ 370 if (!!(val & core->layout->endiv_mask) && cdiv == div->div) 371 goto unlock; 372 373 sam9x60_div_pll_set_div(core, div->div, 1); 374 375unlock: 376 spin_unlock_irqrestore(core->lock, flags); 377 378 return 0; 379} 380 381static int sam9x60_div_pll_prepare(struct clk_hw *hw) 382{ 383 struct sam9x60_pll_core *core = to_sam9x60_pll_core(hw); 384 385 return sam9x60_div_pll_set(core); 386} 387 388static void sam9x60_div_pll_unprepare(struct clk_hw *hw) 389{ 390 struct sam9x60_pll_core *core = to_sam9x60_pll_core(hw); 391 struct regmap *regmap = core->regmap; 392 unsigned long flags; 393 394 spin_lock_irqsave(core->lock, flags); 395 396 regmap_update_bits(regmap, AT91_PMC_PLL_UPDT, 397 AT91_PMC_PLL_UPDT_ID_MSK, core->id); 398 399 regmap_update_bits(regmap, AT91_PMC_PLL_CTRL0, 400 core->layout->endiv_mask, 0); 401 402 regmap_update_bits(regmap, AT91_PMC_PLL_UPDT, 403 AT91_PMC_PLL_UPDT_UPDATE | AT91_PMC_PLL_UPDT_ID_MSK, 404 AT91_PMC_PLL_UPDT_UPDATE | core->id); 405 406 spin_unlock_irqrestore(core->lock, flags); 407} 408 409static int sam9x60_div_pll_is_prepared(struct clk_hw *hw) 410{ 411 struct sam9x60_pll_core *core = to_sam9x60_pll_core(hw); 412 struct regmap *regmap = core->regmap; 413 unsigned long flags; 414 unsigned int val; 415 416 spin_lock_irqsave(core->lock, flags); 417 418 regmap_update_bits(regmap, AT91_PMC_PLL_UPDT, 419 AT91_PMC_PLL_UPDT_ID_MSK, core->id); 420 regmap_read(regmap, AT91_PMC_PLL_CTRL0, &val); 421 422 spin_unlock_irqrestore(core->lock, flags); 423 424 return !!(val & core->layout->endiv_mask); 425} 426 427static unsigned long sam9x60_div_pll_recalc_rate(struct clk_hw *hw, 428 unsigned long parent_rate) 429{ 430 struct sam9x60_pll_core *core = to_sam9x60_pll_core(hw); 431 struct sam9x60_div *div = to_sam9x60_div(core); 432 433 return DIV_ROUND_CLOSEST_ULL(parent_rate, (div->div + 1)); 434} 435 436static long sam9x60_div_pll_compute_div(struct sam9x60_pll_core *core, 437 unsigned long *parent_rate, 438 unsigned long rate) 439{ 440 const struct clk_pll_characteristics *characteristics = 441 core->characteristics; 442 struct clk_hw *parent = clk_hw_get_parent(&core->hw); 443 unsigned long tmp_rate, tmp_parent_rate, tmp_diff; 444 long best_diff = -1, best_rate = -EINVAL; 445 u32 divid; 446 447 if (!rate) 448 return 0; 449 450 if (rate < characteristics->output[0].min || 451 rate > characteristics->output[0].max) 452 return -ERANGE; 453 454 for (divid = 1; divid < core->layout->div_mask; divid++) { 455 tmp_parent_rate = clk_hw_round_rate(parent, rate * divid); 456 if (!tmp_parent_rate) 457 continue; 458 459 tmp_rate = DIV_ROUND_CLOSEST_ULL(tmp_parent_rate, divid); 460 tmp_diff = abs(rate - tmp_rate); 461 462 if (best_diff < 0 || best_diff > tmp_diff) { 463 *parent_rate = tmp_parent_rate; 464 best_rate = tmp_rate; 465 best_diff = tmp_diff; 466 } 467 468 if (!best_diff) 469 break; 470 } 471 472 if (best_rate < characteristics->output[0].min || 473 best_rate > characteristics->output[0].max) 474 return -ERANGE; 475 476 return best_rate; 477} 478 479static long sam9x60_div_pll_round_rate(struct clk_hw *hw, unsigned long rate, 480 unsigned long *parent_rate) 481{ 482 struct sam9x60_pll_core *core = to_sam9x60_pll_core(hw); 483 484 return sam9x60_div_pll_compute_div(core, parent_rate, rate); 485} 486 487static int sam9x60_div_pll_set_rate(struct clk_hw *hw, unsigned long rate, 488 unsigned long parent_rate) 489{ 490 struct sam9x60_pll_core *core = to_sam9x60_pll_core(hw); 491 struct sam9x60_div *div = to_sam9x60_div(core); 492 493 div->div = DIV_ROUND_CLOSEST(parent_rate, rate) - 1; 494 495 return 0; 496} 497 498static int sam9x60_div_pll_set_rate_chg(struct clk_hw *hw, unsigned long rate, 499 unsigned long parent_rate) 500{ 501 struct sam9x60_pll_core *core = to_sam9x60_pll_core(hw); 502 struct sam9x60_div *div = to_sam9x60_div(core); 503 struct regmap *regmap = core->regmap; 504 unsigned long irqflags; 505 unsigned int val, cdiv; 506 507 div->div = DIV_ROUND_CLOSEST(parent_rate, rate) - 1; 508 509 spin_lock_irqsave(core->lock, irqflags); 510 regmap_update_bits(regmap, AT91_PMC_PLL_UPDT, AT91_PMC_PLL_UPDT_ID_MSK, 511 core->id); 512 regmap_read(regmap, AT91_PMC_PLL_CTRL0, &val); 513 cdiv = (val & core->layout->div_mask) >> core->layout->div_shift; 514 515 /* Stop if nothing changed. */ 516 if (cdiv == div->div) 517 goto unlock; 518 519 sam9x60_div_pll_set_div(core, div->div, 0); 520 521unlock: 522 spin_unlock_irqrestore(core->lock, irqflags); 523 524 return 0; 525} 526 527static int sam9x60_div_pll_save_context(struct clk_hw *hw) 528{ 529 struct sam9x60_pll_core *core = to_sam9x60_pll_core(hw); 530 struct sam9x60_div *div = to_sam9x60_div(core); 531 532 div->pms.status = sam9x60_div_pll_is_prepared(hw); 533 534 return 0; 535} 536 537static void sam9x60_div_pll_restore_context(struct clk_hw *hw) 538{ 539 struct sam9x60_pll_core *core = to_sam9x60_pll_core(hw); 540 struct sam9x60_div *div = to_sam9x60_div(core); 541 542 if (div->pms.status) 543 sam9x60_div_pll_set(core); 544} 545 546static int sam9x60_div_pll_notifier_fn(struct notifier_block *notifier, 547 unsigned long code, void *data) 548{ 549 struct sam9x60_div *div = notifier_div; 550 struct sam9x60_pll_core core = div->core; 551 struct regmap *regmap = core.regmap; 552 unsigned long irqflags; 553 u32 val, cdiv; 554 int ret = NOTIFY_DONE; 555 556 if (code != PRE_RATE_CHANGE) 557 return ret; 558 559 /* 560 * We switch to safe divider to avoid overclocking of other domains 561 * feed by us while the frac PLL (our parent) is changed. 562 */ 563 div->div = div->safe_div; 564 565 spin_lock_irqsave(core.lock, irqflags); 566 regmap_update_bits(regmap, AT91_PMC_PLL_UPDT, AT91_PMC_PLL_UPDT_ID_MSK, 567 core.id); 568 regmap_read(regmap, AT91_PMC_PLL_CTRL0, &val); 569 cdiv = (val & core.layout->div_mask) >> core.layout->div_shift; 570 571 /* Stop if nothing changed. */ 572 if (cdiv == div->safe_div) 573 goto unlock; 574 575 sam9x60_div_pll_set_div(&core, div->div, 0); 576 ret = NOTIFY_OK; 577 578unlock: 579 spin_unlock_irqrestore(core.lock, irqflags); 580 581 return ret; 582} 583 584static struct notifier_block sam9x60_div_pll_notifier = { 585 .notifier_call = sam9x60_div_pll_notifier_fn, 586}; 587 588static const struct clk_ops sam9x60_div_pll_ops = { 589 .prepare = sam9x60_div_pll_prepare, 590 .unprepare = sam9x60_div_pll_unprepare, 591 .is_prepared = sam9x60_div_pll_is_prepared, 592 .recalc_rate = sam9x60_div_pll_recalc_rate, 593 .round_rate = sam9x60_div_pll_round_rate, 594 .set_rate = sam9x60_div_pll_set_rate, 595 .save_context = sam9x60_div_pll_save_context, 596 .restore_context = sam9x60_div_pll_restore_context, 597}; 598 599static const struct clk_ops sam9x60_div_pll_ops_chg = { 600 .prepare = sam9x60_div_pll_prepare, 601 .unprepare = sam9x60_div_pll_unprepare, 602 .is_prepared = sam9x60_div_pll_is_prepared, 603 .recalc_rate = sam9x60_div_pll_recalc_rate, 604 .round_rate = sam9x60_div_pll_round_rate, 605 .set_rate = sam9x60_div_pll_set_rate_chg, 606 .save_context = sam9x60_div_pll_save_context, 607 .restore_context = sam9x60_div_pll_restore_context, 608}; 609 610struct clk_hw * __init 611sam9x60_clk_register_frac_pll(struct regmap *regmap, spinlock_t *lock, 612 const char *name, const char *parent_name, 613 struct clk_hw *parent_hw, u8 id, 614 const struct clk_pll_characteristics *characteristics, 615 const struct clk_pll_layout *layout, u32 flags) 616{ 617 struct sam9x60_frac *frac; 618 struct clk_hw *hw; 619 struct clk_init_data init; 620 unsigned long parent_rate, irqflags; 621 unsigned int val; 622 int ret; 623 624 if (id > PLL_MAX_ID || !lock || !parent_hw) 625 return ERR_PTR(-EINVAL); 626 627 frac = kzalloc(sizeof(*frac), GFP_KERNEL); 628 if (!frac) 629 return ERR_PTR(-ENOMEM); 630 631 init.name = name; 632 init.parent_names = &parent_name; 633 init.num_parents = 1; 634 if (flags & CLK_SET_RATE_GATE) 635 init.ops = &sam9x60_frac_pll_ops; 636 else 637 init.ops = &sam9x60_frac_pll_ops_chg; 638 639 init.flags = flags; 640 641 frac->core.id = id; 642 frac->core.hw.init = &init; 643 frac->core.characteristics = characteristics; 644 frac->core.layout = layout; 645 frac->core.regmap = regmap; 646 frac->core.lock = lock; 647 648 spin_lock_irqsave(frac->core.lock, irqflags); 649 if (sam9x60_pll_ready(regmap, id)) { 650 regmap_update_bits(regmap, AT91_PMC_PLL_UPDT, 651 AT91_PMC_PLL_UPDT_ID_MSK, id); 652 regmap_read(regmap, AT91_PMC_PLL_CTRL1, &val); 653 frac->mul = FIELD_GET(PMC_PLL_CTRL1_MUL_MSK, val); 654 frac->frac = FIELD_GET(PMC_PLL_CTRL1_FRACR_MSK, val); 655 } else { 656 /* 657 * This means the PLL is not setup by bootloaders. In this 658 * case we need to set the minimum rate for it. Otherwise 659 * a clock child of this PLL may be enabled before setting 660 * its rate leading to enabling this PLL with unsupported 661 * rate. This will lead to PLL not being locked at all. 662 */ 663 parent_rate = clk_hw_get_rate(parent_hw); 664 if (!parent_rate) { 665 hw = ERR_PTR(-EINVAL); 666 goto free; 667 } 668 669 ret = sam9x60_frac_pll_compute_mul_frac(&frac->core, FCORE_MIN, 670 parent_rate, true); 671 if (ret <= 0) { 672 hw = ERR_PTR(ret); 673 goto free; 674 } 675 } 676 spin_unlock_irqrestore(frac->core.lock, irqflags); 677 678 hw = &frac->core.hw; 679 ret = clk_hw_register(NULL, hw); 680 if (ret) { 681 kfree(frac); 682 hw = ERR_PTR(ret); 683 } 684 685 return hw; 686 687free: 688 spin_unlock_irqrestore(frac->core.lock, irqflags); 689 kfree(frac); 690 return hw; 691} 692 693struct clk_hw * __init 694sam9x60_clk_register_div_pll(struct regmap *regmap, spinlock_t *lock, 695 const char *name, const char *parent_name, u8 id, 696 const struct clk_pll_characteristics *characteristics, 697 const struct clk_pll_layout *layout, u32 flags, 698 u32 safe_div) 699{ 700 struct sam9x60_div *div; 701 struct clk_hw *hw; 702 struct clk_init_data init; 703 unsigned long irqflags; 704 unsigned int val; 705 int ret; 706 707 /* We only support one changeable PLL. */ 708 if (id > PLL_MAX_ID || !lock || (safe_div && notifier_div)) 709 return ERR_PTR(-EINVAL); 710 711 if (safe_div >= PLL_DIV_MAX) 712 safe_div = PLL_DIV_MAX - 1; 713 714 div = kzalloc(sizeof(*div), GFP_KERNEL); 715 if (!div) 716 return ERR_PTR(-ENOMEM); 717 718 init.name = name; 719 init.parent_names = &parent_name; 720 init.num_parents = 1; 721 if (flags & CLK_SET_RATE_GATE) 722 init.ops = &sam9x60_div_pll_ops; 723 else 724 init.ops = &sam9x60_div_pll_ops_chg; 725 init.flags = flags; 726 727 div->core.id = id; 728 div->core.hw.init = &init; 729 div->core.characteristics = characteristics; 730 div->core.layout = layout; 731 div->core.regmap = regmap; 732 div->core.lock = lock; 733 div->safe_div = safe_div; 734 735 spin_lock_irqsave(div->core.lock, irqflags); 736 737 regmap_update_bits(regmap, AT91_PMC_PLL_UPDT, 738 AT91_PMC_PLL_UPDT_ID_MSK, id); 739 regmap_read(regmap, AT91_PMC_PLL_CTRL0, &val); 740 div->div = FIELD_GET(PMC_PLL_CTRL0_DIV_MSK, val); 741 742 spin_unlock_irqrestore(div->core.lock, irqflags); 743 744 hw = &div->core.hw; 745 ret = clk_hw_register(NULL, hw); 746 if (ret) { 747 kfree(div); 748 hw = ERR_PTR(ret); 749 } else if (div->safe_div) { 750 notifier_div = div; 751 clk_notifier_register(hw->clk, &sam9x60_div_pll_notifier); 752 } 753 754 return hw; 755} 756