Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
torvalds
GitHub Repository: torvalds/linux
Path: blob/master/drivers/clk/at91/clk-master.c
26282 views
1
// SPDX-License-Identifier: GPL-2.0-or-later
2
/*
3
* Copyright (C) 2013 Boris BREZILLON <[email protected]>
4
*/
5
6
#include <linux/clk-provider.h>
7
#include <linux/clkdev.h>
8
#include <linux/clk.h>
9
#include <linux/clk/at91_pmc.h>
10
#include <linux/of.h>
11
#include <linux/mfd/syscon.h>
12
#include <linux/regmap.h>
13
14
#include "pmc.h"
15
16
#define MASTER_PRES_MASK 0x7
17
#define MASTER_PRES_MAX MASTER_PRES_MASK
18
#define MASTER_DIV_SHIFT 8
19
#define MASTER_DIV_MASK 0x7
20
21
#define PMC_MCR_CSS_SHIFT (16)
22
23
#define MASTER_MAX_ID 9
24
25
#define to_clk_master(hw) container_of(hw, struct clk_master, hw)
26
27
struct clk_master {
28
struct clk_hw hw;
29
struct regmap *regmap;
30
spinlock_t *lock;
31
const struct clk_master_layout *layout;
32
const struct clk_master_characteristics *characteristics;
33
struct at91_clk_pms pms;
34
u32 *mux_table;
35
u32 mckr;
36
int chg_pid;
37
u8 id;
38
u8 parent;
39
u8 div;
40
u32 safe_div;
41
};
42
43
/* MCK div reference to be used by notifier. */
44
static struct clk_master *master_div;
45
46
static inline bool clk_master_ready(struct clk_master *master)
47
{
48
unsigned int bit = master->id ? AT91_PMC_MCKXRDY : AT91_PMC_MCKRDY;
49
unsigned int status;
50
51
regmap_read(master->regmap, AT91_PMC_SR, &status);
52
53
return !!(status & bit);
54
}
55
56
static int clk_master_prepare(struct clk_hw *hw)
57
{
58
struct clk_master *master = to_clk_master(hw);
59
unsigned long flags;
60
61
spin_lock_irqsave(master->lock, flags);
62
63
while (!clk_master_ready(master))
64
cpu_relax();
65
66
spin_unlock_irqrestore(master->lock, flags);
67
68
return 0;
69
}
70
71
static int clk_master_is_prepared(struct clk_hw *hw)
72
{
73
struct clk_master *master = to_clk_master(hw);
74
unsigned long flags;
75
bool status;
76
77
spin_lock_irqsave(master->lock, flags);
78
status = clk_master_ready(master);
79
spin_unlock_irqrestore(master->lock, flags);
80
81
return status;
82
}
83
84
static unsigned long clk_master_div_recalc_rate(struct clk_hw *hw,
85
unsigned long parent_rate)
86
{
87
u8 div;
88
unsigned long flags, rate = parent_rate;
89
struct clk_master *master = to_clk_master(hw);
90
const struct clk_master_layout *layout = master->layout;
91
const struct clk_master_characteristics *characteristics =
92
master->characteristics;
93
unsigned int mckr;
94
95
spin_lock_irqsave(master->lock, flags);
96
regmap_read(master->regmap, master->layout->offset, &mckr);
97
spin_unlock_irqrestore(master->lock, flags);
98
99
mckr &= layout->mask;
100
101
div = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK;
102
103
rate /= characteristics->divisors[div];
104
105
if (rate < characteristics->output.min)
106
pr_warn("master clk div is underclocked");
107
else if (rate > characteristics->output.max)
108
pr_warn("master clk div is overclocked");
109
110
return rate;
111
}
112
113
static int clk_master_div_save_context(struct clk_hw *hw)
114
{
115
struct clk_master *master = to_clk_master(hw);
116
struct clk_hw *parent_hw = clk_hw_get_parent(hw);
117
unsigned long flags;
118
unsigned int mckr, div;
119
120
spin_lock_irqsave(master->lock, flags);
121
regmap_read(master->regmap, master->layout->offset, &mckr);
122
spin_unlock_irqrestore(master->lock, flags);
123
124
mckr &= master->layout->mask;
125
div = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK;
126
div = master->characteristics->divisors[div];
127
128
master->pms.parent_rate = clk_hw_get_rate(parent_hw);
129
master->pms.rate = DIV_ROUND_CLOSEST(master->pms.parent_rate, div);
130
131
return 0;
132
}
133
134
static void clk_master_div_restore_context(struct clk_hw *hw)
135
{
136
struct clk_master *master = to_clk_master(hw);
137
unsigned long flags;
138
unsigned int mckr;
139
u8 div;
140
141
spin_lock_irqsave(master->lock, flags);
142
regmap_read(master->regmap, master->layout->offset, &mckr);
143
spin_unlock_irqrestore(master->lock, flags);
144
145
mckr &= master->layout->mask;
146
div = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK;
147
div = master->characteristics->divisors[div];
148
149
if (div != DIV_ROUND_CLOSEST(master->pms.parent_rate, master->pms.rate))
150
pr_warn("MCKR DIV not configured properly by firmware!\n");
151
}
152
153
static const struct clk_ops master_div_ops = {
154
.prepare = clk_master_prepare,
155
.is_prepared = clk_master_is_prepared,
156
.recalc_rate = clk_master_div_recalc_rate,
157
.save_context = clk_master_div_save_context,
158
.restore_context = clk_master_div_restore_context,
159
};
160
161
/* This function must be called with lock acquired. */
162
static int clk_master_div_set(struct clk_master *master,
163
unsigned long parent_rate, int div)
164
{
165
const struct clk_master_characteristics *characteristics =
166
master->characteristics;
167
unsigned long rate = parent_rate;
168
unsigned int max_div = 0, div_index = 0, max_div_index = 0;
169
unsigned int i, mckr, tmp;
170
int ret;
171
172
for (i = 0; i < ARRAY_SIZE(characteristics->divisors); i++) {
173
if (!characteristics->divisors[i])
174
break;
175
176
if (div == characteristics->divisors[i])
177
div_index = i;
178
179
if (max_div < characteristics->divisors[i]) {
180
max_div = characteristics->divisors[i];
181
max_div_index = i;
182
}
183
}
184
185
if (div > max_div)
186
div_index = max_div_index;
187
188
ret = regmap_read(master->regmap, master->layout->offset, &mckr);
189
if (ret)
190
return ret;
191
192
mckr &= master->layout->mask;
193
tmp = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK;
194
if (tmp == div_index)
195
return 0;
196
197
rate /= characteristics->divisors[div_index];
198
if (rate < characteristics->output.min)
199
pr_warn("master clk div is underclocked");
200
else if (rate > characteristics->output.max)
201
pr_warn("master clk div is overclocked");
202
203
mckr &= ~(MASTER_DIV_MASK << MASTER_DIV_SHIFT);
204
mckr |= (div_index << MASTER_DIV_SHIFT);
205
ret = regmap_write(master->regmap, master->layout->offset, mckr);
206
if (ret)
207
return ret;
208
209
while (!clk_master_ready(master))
210
cpu_relax();
211
212
master->div = characteristics->divisors[div_index];
213
214
return 0;
215
}
216
217
static unsigned long clk_master_div_recalc_rate_chg(struct clk_hw *hw,
218
unsigned long parent_rate)
219
{
220
struct clk_master *master = to_clk_master(hw);
221
222
return DIV_ROUND_CLOSEST_ULL(parent_rate, master->div);
223
}
224
225
static void clk_master_div_restore_context_chg(struct clk_hw *hw)
226
{
227
struct clk_master *master = to_clk_master(hw);
228
unsigned long flags;
229
int ret;
230
231
spin_lock_irqsave(master->lock, flags);
232
ret = clk_master_div_set(master, master->pms.parent_rate,
233
DIV_ROUND_CLOSEST(master->pms.parent_rate,
234
master->pms.rate));
235
spin_unlock_irqrestore(master->lock, flags);
236
if (ret)
237
pr_warn("Failed to restore MCK DIV clock\n");
238
}
239
240
static const struct clk_ops master_div_ops_chg = {
241
.prepare = clk_master_prepare,
242
.is_prepared = clk_master_is_prepared,
243
.recalc_rate = clk_master_div_recalc_rate_chg,
244
.save_context = clk_master_div_save_context,
245
.restore_context = clk_master_div_restore_context_chg,
246
};
247
248
static int clk_master_div_notifier_fn(struct notifier_block *notifier,
249
unsigned long code, void *data)
250
{
251
const struct clk_master_characteristics *characteristics =
252
master_div->characteristics;
253
struct clk_notifier_data *cnd = data;
254
unsigned long flags, new_parent_rate, new_rate;
255
unsigned int mckr, div, new_div = 0;
256
int ret, i;
257
long tmp_diff;
258
long best_diff = -1;
259
260
spin_lock_irqsave(master_div->lock, flags);
261
switch (code) {
262
case PRE_RATE_CHANGE:
263
/*
264
* We want to avoid any overclocking of MCK DIV domain. To do
265
* this we set a safe divider (the underclocking is not of
266
* interest as we can go as low as 32KHz). The relation
267
* b/w this clock and its parents are as follows:
268
*
269
* FRAC PLL -> DIV PLL -> MCK DIV
270
*
271
* With the proper safe divider we should be good even with FRAC
272
* PLL at its maximum value.
273
*/
274
ret = regmap_read(master_div->regmap, master_div->layout->offset,
275
&mckr);
276
if (ret) {
277
ret = NOTIFY_STOP_MASK;
278
goto unlock;
279
}
280
281
mckr &= master_div->layout->mask;
282
div = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK;
283
284
/* Switch to safe divider. */
285
clk_master_div_set(master_div,
286
cnd->old_rate * characteristics->divisors[div],
287
master_div->safe_div);
288
break;
289
290
case POST_RATE_CHANGE:
291
/*
292
* At this point we want to restore MCK DIV domain to its maximum
293
* allowed rate.
294
*/
295
ret = regmap_read(master_div->regmap, master_div->layout->offset,
296
&mckr);
297
if (ret) {
298
ret = NOTIFY_STOP_MASK;
299
goto unlock;
300
}
301
302
mckr &= master_div->layout->mask;
303
div = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK;
304
new_parent_rate = cnd->new_rate * characteristics->divisors[div];
305
306
for (i = 0; i < ARRAY_SIZE(characteristics->divisors); i++) {
307
if (!characteristics->divisors[i])
308
break;
309
310
new_rate = DIV_ROUND_CLOSEST_ULL(new_parent_rate,
311
characteristics->divisors[i]);
312
313
tmp_diff = characteristics->output.max - new_rate;
314
if (tmp_diff < 0)
315
continue;
316
317
if (best_diff < 0 || best_diff > tmp_diff) {
318
new_div = characteristics->divisors[i];
319
best_diff = tmp_diff;
320
}
321
322
if (!tmp_diff)
323
break;
324
}
325
326
if (!new_div) {
327
ret = NOTIFY_STOP_MASK;
328
goto unlock;
329
}
330
331
/* Update the div to preserve MCK DIV clock rate. */
332
clk_master_div_set(master_div, new_parent_rate,
333
new_div);
334
335
ret = NOTIFY_OK;
336
break;
337
338
default:
339
ret = NOTIFY_DONE;
340
break;
341
}
342
343
unlock:
344
spin_unlock_irqrestore(master_div->lock, flags);
345
346
return ret;
347
}
348
349
static struct notifier_block clk_master_div_notifier = {
350
.notifier_call = clk_master_div_notifier_fn,
351
};
352
353
static void clk_sama7g5_master_best_diff(struct clk_rate_request *req,
354
struct clk_hw *parent,
355
unsigned long parent_rate,
356
long *best_rate,
357
long *best_diff,
358
u32 div)
359
{
360
unsigned long tmp_rate, tmp_diff;
361
362
if (div == MASTER_PRES_MAX)
363
tmp_rate = parent_rate / 3;
364
else
365
tmp_rate = parent_rate >> div;
366
367
tmp_diff = abs(req->rate - tmp_rate);
368
369
if (*best_diff < 0 || *best_diff >= tmp_diff) {
370
*best_rate = tmp_rate;
371
*best_diff = tmp_diff;
372
req->best_parent_rate = parent_rate;
373
req->best_parent_hw = parent;
374
}
375
}
376
377
static unsigned long clk_master_pres_recalc_rate(struct clk_hw *hw,
378
unsigned long parent_rate)
379
{
380
struct clk_master *master = to_clk_master(hw);
381
const struct clk_master_characteristics *characteristics =
382
master->characteristics;
383
unsigned long flags;
384
unsigned int val, pres;
385
386
spin_lock_irqsave(master->lock, flags);
387
regmap_read(master->regmap, master->layout->offset, &val);
388
spin_unlock_irqrestore(master->lock, flags);
389
390
val &= master->layout->mask;
391
pres = (val >> master->layout->pres_shift) & MASTER_PRES_MASK;
392
if (pres == MASTER_PRES_MAX && characteristics->have_div3_pres)
393
pres = 3;
394
else
395
pres = (1 << pres);
396
397
return DIV_ROUND_CLOSEST_ULL(parent_rate, pres);
398
}
399
400
static u8 clk_master_pres_get_parent(struct clk_hw *hw)
401
{
402
struct clk_master *master = to_clk_master(hw);
403
unsigned long flags;
404
unsigned int mckr;
405
406
spin_lock_irqsave(master->lock, flags);
407
regmap_read(master->regmap, master->layout->offset, &mckr);
408
spin_unlock_irqrestore(master->lock, flags);
409
410
mckr &= master->layout->mask;
411
412
return mckr & AT91_PMC_CSS;
413
}
414
415
static int clk_master_pres_save_context(struct clk_hw *hw)
416
{
417
struct clk_master *master = to_clk_master(hw);
418
struct clk_hw *parent_hw = clk_hw_get_parent(hw);
419
unsigned long flags;
420
unsigned int val, pres;
421
422
spin_lock_irqsave(master->lock, flags);
423
regmap_read(master->regmap, master->layout->offset, &val);
424
spin_unlock_irqrestore(master->lock, flags);
425
426
val &= master->layout->mask;
427
pres = (val >> master->layout->pres_shift) & MASTER_PRES_MASK;
428
if (pres == MASTER_PRES_MAX && master->characteristics->have_div3_pres)
429
pres = 3;
430
else
431
pres = (1 << pres);
432
433
master->pms.parent = val & AT91_PMC_CSS;
434
master->pms.parent_rate = clk_hw_get_rate(parent_hw);
435
master->pms.rate = DIV_ROUND_CLOSEST_ULL(master->pms.parent_rate, pres);
436
437
return 0;
438
}
439
440
static void clk_master_pres_restore_context(struct clk_hw *hw)
441
{
442
struct clk_master *master = to_clk_master(hw);
443
unsigned long flags;
444
unsigned int val, pres;
445
446
spin_lock_irqsave(master->lock, flags);
447
regmap_read(master->regmap, master->layout->offset, &val);
448
spin_unlock_irqrestore(master->lock, flags);
449
450
val &= master->layout->mask;
451
pres = (val >> master->layout->pres_shift) & MASTER_PRES_MASK;
452
if (pres == MASTER_PRES_MAX && master->characteristics->have_div3_pres)
453
pres = 3;
454
else
455
pres = (1 << pres);
456
457
if (master->pms.rate !=
458
DIV_ROUND_CLOSEST_ULL(master->pms.parent_rate, pres) ||
459
(master->pms.parent != (val & AT91_PMC_CSS)))
460
pr_warn("MCKR PRES was not configured properly by firmware!\n");
461
}
462
463
static const struct clk_ops master_pres_ops = {
464
.prepare = clk_master_prepare,
465
.is_prepared = clk_master_is_prepared,
466
.recalc_rate = clk_master_pres_recalc_rate,
467
.get_parent = clk_master_pres_get_parent,
468
.save_context = clk_master_pres_save_context,
469
.restore_context = clk_master_pres_restore_context,
470
};
471
472
static struct clk_hw * __init
473
at91_clk_register_master_internal(struct regmap *regmap,
474
const char *name, int num_parents,
475
const char **parent_names,
476
struct clk_hw **parent_hws,
477
const struct clk_master_layout *layout,
478
const struct clk_master_characteristics *characteristics,
479
const struct clk_ops *ops, spinlock_t *lock, u32 flags)
480
{
481
struct clk_master *master;
482
struct clk_init_data init = {};
483
struct clk_hw *hw;
484
unsigned int mckr;
485
unsigned long irqflags;
486
int ret;
487
488
if (!name || !num_parents || !(parent_names || parent_hws) || !lock)
489
return ERR_PTR(-EINVAL);
490
491
master = kzalloc(sizeof(*master), GFP_KERNEL);
492
if (!master)
493
return ERR_PTR(-ENOMEM);
494
495
init.name = name;
496
init.ops = ops;
497
if (parent_hws)
498
init.parent_hws = (const struct clk_hw **)parent_hws;
499
else
500
init.parent_names = parent_names;
501
init.num_parents = num_parents;
502
init.flags = flags;
503
504
master->hw.init = &init;
505
master->layout = layout;
506
master->characteristics = characteristics;
507
master->regmap = regmap;
508
master->lock = lock;
509
510
if (ops == &master_div_ops_chg) {
511
spin_lock_irqsave(master->lock, irqflags);
512
regmap_read(master->regmap, master->layout->offset, &mckr);
513
spin_unlock_irqrestore(master->lock, irqflags);
514
515
mckr &= layout->mask;
516
mckr = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK;
517
master->div = characteristics->divisors[mckr];
518
}
519
520
hw = &master->hw;
521
ret = clk_hw_register(NULL, &master->hw);
522
if (ret) {
523
kfree(master);
524
hw = ERR_PTR(ret);
525
}
526
527
return hw;
528
}
529
530
struct clk_hw * __init
531
at91_clk_register_master_pres(struct regmap *regmap,
532
const char *name, int num_parents,
533
const char **parent_names,
534
struct clk_hw **parent_hws,
535
const struct clk_master_layout *layout,
536
const struct clk_master_characteristics *characteristics,
537
spinlock_t *lock)
538
{
539
return at91_clk_register_master_internal(regmap, name, num_parents,
540
parent_names, parent_hws, layout,
541
characteristics,
542
&master_pres_ops,
543
lock, CLK_SET_RATE_GATE);
544
}
545
546
struct clk_hw * __init
547
at91_clk_register_master_div(struct regmap *regmap,
548
const char *name, const char *parent_name,
549
struct clk_hw *parent_hw, const struct clk_master_layout *layout,
550
const struct clk_master_characteristics *characteristics,
551
spinlock_t *lock, u32 flags, u32 safe_div)
552
{
553
const struct clk_ops *ops;
554
struct clk_hw *hw;
555
556
if (flags & CLK_SET_RATE_GATE)
557
ops = &master_div_ops;
558
else
559
ops = &master_div_ops_chg;
560
561
hw = at91_clk_register_master_internal(regmap, name, 1,
562
parent_name ? &parent_name : NULL,
563
parent_hw ? &parent_hw : NULL, layout,
564
characteristics, ops,
565
lock, flags);
566
567
if (!IS_ERR(hw) && safe_div) {
568
master_div = to_clk_master(hw);
569
master_div->safe_div = safe_div;
570
clk_notifier_register(hw->clk,
571
&clk_master_div_notifier);
572
}
573
574
return hw;
575
}
576
577
static unsigned long
578
clk_sama7g5_master_recalc_rate(struct clk_hw *hw,
579
unsigned long parent_rate)
580
{
581
struct clk_master *master = to_clk_master(hw);
582
583
return DIV_ROUND_CLOSEST_ULL(parent_rate, (1 << master->div));
584
}
585
586
static int clk_sama7g5_master_determine_rate(struct clk_hw *hw,
587
struct clk_rate_request *req)
588
{
589
struct clk_master *master = to_clk_master(hw);
590
struct clk_hw *parent;
591
long best_rate = LONG_MIN, best_diff = LONG_MIN;
592
unsigned long parent_rate;
593
unsigned int div, i;
594
595
/* First: check the dividers of MCR. */
596
for (i = 0; i < clk_hw_get_num_parents(hw); i++) {
597
parent = clk_hw_get_parent_by_index(hw, i);
598
if (!parent)
599
continue;
600
601
parent_rate = clk_hw_get_rate(parent);
602
if (!parent_rate)
603
continue;
604
605
for (div = 0; div < MASTER_PRES_MAX + 1; div++) {
606
clk_sama7g5_master_best_diff(req, parent, parent_rate,
607
&best_rate, &best_diff,
608
div);
609
if (!best_diff)
610
break;
611
}
612
613
if (!best_diff)
614
break;
615
}
616
617
/* Second: try to request rate form changeable parent. */
618
if (master->chg_pid < 0)
619
goto end;
620
621
parent = clk_hw_get_parent_by_index(hw, master->chg_pid);
622
if (!parent)
623
goto end;
624
625
for (div = 0; div < MASTER_PRES_MAX + 1; div++) {
626
struct clk_rate_request req_parent;
627
unsigned long req_rate;
628
629
if (div == MASTER_PRES_MAX)
630
req_rate = req->rate * 3;
631
else
632
req_rate = req->rate << div;
633
634
clk_hw_forward_rate_request(hw, req, parent, &req_parent, req_rate);
635
if (__clk_determine_rate(parent, &req_parent))
636
continue;
637
638
clk_sama7g5_master_best_diff(req, parent, req_parent.rate,
639
&best_rate, &best_diff, div);
640
641
if (!best_diff)
642
break;
643
}
644
645
end:
646
pr_debug("MCK: %s, best_rate = %ld, parent clk: %s @ %ld\n",
647
__func__, best_rate,
648
__clk_get_name((req->best_parent_hw)->clk),
649
req->best_parent_rate);
650
651
if (best_rate < 0)
652
return -EINVAL;
653
654
req->rate = best_rate;
655
656
return 0;
657
}
658
659
static u8 clk_sama7g5_master_get_parent(struct clk_hw *hw)
660
{
661
struct clk_master *master = to_clk_master(hw);
662
unsigned long flags;
663
u8 index;
664
665
spin_lock_irqsave(master->lock, flags);
666
index = clk_mux_val_to_index(&master->hw, master->mux_table, 0,
667
master->parent);
668
spin_unlock_irqrestore(master->lock, flags);
669
670
return index;
671
}
672
673
static int clk_sama7g5_master_set_parent(struct clk_hw *hw, u8 index)
674
{
675
struct clk_master *master = to_clk_master(hw);
676
unsigned long flags;
677
678
if (index >= clk_hw_get_num_parents(hw))
679
return -EINVAL;
680
681
spin_lock_irqsave(master->lock, flags);
682
master->parent = clk_mux_index_to_val(master->mux_table, 0, index);
683
spin_unlock_irqrestore(master->lock, flags);
684
685
return 0;
686
}
687
688
static void clk_sama7g5_master_set(struct clk_master *master,
689
unsigned int status)
690
{
691
unsigned long flags;
692
unsigned int val, cparent;
693
unsigned int enable = status ? AT91_PMC_MCR_V2_EN : 0;
694
unsigned int parent = master->parent << PMC_MCR_CSS_SHIFT;
695
unsigned int div = master->div << MASTER_DIV_SHIFT;
696
697
spin_lock_irqsave(master->lock, flags);
698
699
regmap_write(master->regmap, AT91_PMC_MCR_V2,
700
AT91_PMC_MCR_V2_ID(master->id));
701
regmap_read(master->regmap, AT91_PMC_MCR_V2, &val);
702
regmap_update_bits(master->regmap, AT91_PMC_MCR_V2,
703
enable | AT91_PMC_MCR_V2_CSS | AT91_PMC_MCR_V2_DIV |
704
AT91_PMC_MCR_V2_CMD | AT91_PMC_MCR_V2_ID_MSK,
705
enable | parent | div | AT91_PMC_MCR_V2_CMD |
706
AT91_PMC_MCR_V2_ID(master->id));
707
708
cparent = (val & AT91_PMC_MCR_V2_CSS) >> PMC_MCR_CSS_SHIFT;
709
710
/* Wait here only if parent is being changed. */
711
while ((cparent != master->parent) && !clk_master_ready(master))
712
cpu_relax();
713
714
spin_unlock_irqrestore(master->lock, flags);
715
}
716
717
static int clk_sama7g5_master_enable(struct clk_hw *hw)
718
{
719
struct clk_master *master = to_clk_master(hw);
720
721
clk_sama7g5_master_set(master, 1);
722
723
return 0;
724
}
725
726
static void clk_sama7g5_master_disable(struct clk_hw *hw)
727
{
728
struct clk_master *master = to_clk_master(hw);
729
unsigned long flags;
730
731
spin_lock_irqsave(master->lock, flags);
732
733
regmap_write(master->regmap, AT91_PMC_MCR_V2, master->id);
734
regmap_update_bits(master->regmap, AT91_PMC_MCR_V2,
735
AT91_PMC_MCR_V2_EN | AT91_PMC_MCR_V2_CMD |
736
AT91_PMC_MCR_V2_ID_MSK,
737
AT91_PMC_MCR_V2_CMD |
738
AT91_PMC_MCR_V2_ID(master->id));
739
740
spin_unlock_irqrestore(master->lock, flags);
741
}
742
743
static int clk_sama7g5_master_is_enabled(struct clk_hw *hw)
744
{
745
struct clk_master *master = to_clk_master(hw);
746
unsigned long flags;
747
unsigned int val;
748
749
spin_lock_irqsave(master->lock, flags);
750
751
regmap_write(master->regmap, AT91_PMC_MCR_V2, master->id);
752
regmap_read(master->regmap, AT91_PMC_MCR_V2, &val);
753
754
spin_unlock_irqrestore(master->lock, flags);
755
756
return !!(val & AT91_PMC_MCR_V2_EN);
757
}
758
759
static int clk_sama7g5_master_set_rate(struct clk_hw *hw, unsigned long rate,
760
unsigned long parent_rate)
761
{
762
struct clk_master *master = to_clk_master(hw);
763
unsigned long div, flags;
764
765
div = DIV_ROUND_CLOSEST(parent_rate, rate);
766
if ((div > (1 << (MASTER_PRES_MAX - 1))) || (div & (div - 1)))
767
return -EINVAL;
768
769
if (div == 3)
770
div = MASTER_PRES_MAX;
771
else if (div)
772
div = ffs(div) - 1;
773
774
spin_lock_irqsave(master->lock, flags);
775
master->div = div;
776
spin_unlock_irqrestore(master->lock, flags);
777
778
return 0;
779
}
780
781
static int clk_sama7g5_master_save_context(struct clk_hw *hw)
782
{
783
struct clk_master *master = to_clk_master(hw);
784
785
master->pms.status = clk_sama7g5_master_is_enabled(hw);
786
787
return 0;
788
}
789
790
static void clk_sama7g5_master_restore_context(struct clk_hw *hw)
791
{
792
struct clk_master *master = to_clk_master(hw);
793
794
if (master->pms.status)
795
clk_sama7g5_master_set(master, master->pms.status);
796
}
797
798
static const struct clk_ops sama7g5_master_ops = {
799
.enable = clk_sama7g5_master_enable,
800
.disable = clk_sama7g5_master_disable,
801
.is_enabled = clk_sama7g5_master_is_enabled,
802
.recalc_rate = clk_sama7g5_master_recalc_rate,
803
.determine_rate = clk_sama7g5_master_determine_rate,
804
.set_rate = clk_sama7g5_master_set_rate,
805
.get_parent = clk_sama7g5_master_get_parent,
806
.set_parent = clk_sama7g5_master_set_parent,
807
.save_context = clk_sama7g5_master_save_context,
808
.restore_context = clk_sama7g5_master_restore_context,
809
};
810
811
struct clk_hw * __init
812
at91_clk_sama7g5_register_master(struct regmap *regmap,
813
const char *name, int num_parents,
814
const char **parent_names,
815
struct clk_hw **parent_hws,
816
u32 *mux_table,
817
spinlock_t *lock, u8 id,
818
bool critical, int chg_pid)
819
{
820
struct clk_master *master;
821
struct clk_hw *hw;
822
struct clk_init_data init = {};
823
unsigned long flags;
824
unsigned int val;
825
int ret;
826
827
if (!name || !num_parents || !(parent_names || parent_hws) || !mux_table ||
828
!lock || id > MASTER_MAX_ID)
829
return ERR_PTR(-EINVAL);
830
831
master = kzalloc(sizeof(*master), GFP_KERNEL);
832
if (!master)
833
return ERR_PTR(-ENOMEM);
834
835
init.name = name;
836
init.ops = &sama7g5_master_ops;
837
if (parent_hws)
838
init.parent_hws = (const struct clk_hw **)parent_hws;
839
else
840
init.parent_names = parent_names;
841
init.num_parents = num_parents;
842
init.flags = CLK_SET_RATE_GATE | CLK_SET_PARENT_GATE;
843
if (chg_pid >= 0)
844
init.flags |= CLK_SET_RATE_PARENT;
845
if (critical)
846
init.flags |= CLK_IS_CRITICAL;
847
848
master->hw.init = &init;
849
master->regmap = regmap;
850
master->id = id;
851
master->chg_pid = chg_pid;
852
master->lock = lock;
853
master->mux_table = mux_table;
854
855
spin_lock_irqsave(master->lock, flags);
856
regmap_write(master->regmap, AT91_PMC_MCR_V2, master->id);
857
regmap_read(master->regmap, AT91_PMC_MCR_V2, &val);
858
master->parent = (val & AT91_PMC_MCR_V2_CSS) >> PMC_MCR_CSS_SHIFT;
859
master->div = (val & AT91_PMC_MCR_V2_DIV) >> MASTER_DIV_SHIFT;
860
spin_unlock_irqrestore(master->lock, flags);
861
862
hw = &master->hw;
863
ret = clk_hw_register(NULL, &master->hw);
864
if (ret) {
865
kfree(master);
866
hw = ERR_PTR(ret);
867
}
868
869
return hw;
870
}
871
872
const struct clk_master_layout at91rm9200_master_layout = {
873
.mask = 0x31F,
874
.pres_shift = 2,
875
.offset = AT91_PMC_MCKR,
876
};
877
878
const struct clk_master_layout at91sam9x5_master_layout = {
879
.mask = 0x373,
880
.pres_shift = 4,
881
.offset = AT91_PMC_MCKR,
882
};
883
884