blob: 51b2388d80ac92f561ddadaee0dd522dedf628e3 [file] [log] [blame]
xjb04a4022021-11-25 15:01:52 +08001// SPDX-License-Identifier: GPL-2.0
2/*
3 * Copyright (c) 2013, 2018, The Linux Foundation. All rights reserved.
4 */
5
6#include <linux/kernel.h>
7#include <linux/bitops.h>
8#include <linux/err.h>
9#include <linux/bug.h>
10#include <linux/export.h>
11#include <linux/clk-provider.h>
12#include <linux/delay.h>
13#include <linux/regmap.h>
14#include <linux/math64.h>
15
16#include <asm/div64.h>
17
18#include "clk-rcg.h"
19#include "common.h"
20
21#define CMD_REG 0x0
22#define CMD_UPDATE BIT(0)
23#define CMD_ROOT_EN BIT(1)
24#define CMD_DIRTY_CFG BIT(4)
25#define CMD_DIRTY_N BIT(5)
26#define CMD_DIRTY_M BIT(6)
27#define CMD_DIRTY_D BIT(7)
28#define CMD_ROOT_OFF BIT(31)
29
30#define CFG_REG 0x4
31#define CFG_SRC_DIV_SHIFT 0
32#define CFG_SRC_SEL_SHIFT 8
33#define CFG_SRC_SEL_MASK (0x7 << CFG_SRC_SEL_SHIFT)
34#define CFG_MODE_SHIFT 12
35#define CFG_MODE_MASK (0x3 << CFG_MODE_SHIFT)
36#define CFG_MODE_DUAL_EDGE (0x2 << CFG_MODE_SHIFT)
37#define CFG_HW_CLK_CTRL_MASK BIT(20)
38
39#define M_REG 0x8
40#define N_REG 0xc
41#define D_REG 0x10
42
43enum freq_policy {
44 FLOOR,
45 CEIL,
46};
47
48static int clk_rcg2_is_enabled(struct clk_hw *hw)
49{
50 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
51 u32 cmd;
52 int ret;
53
54 ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
55 if (ret)
56 return ret;
57
58 return (cmd & CMD_ROOT_OFF) == 0;
59}
60
61static u8 clk_rcg2_get_parent(struct clk_hw *hw)
62{
63 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
64 int num_parents = clk_hw_get_num_parents(hw);
65 u32 cfg;
66 int i, ret;
67
68 ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
69 if (ret)
70 goto err;
71
72 cfg &= CFG_SRC_SEL_MASK;
73 cfg >>= CFG_SRC_SEL_SHIFT;
74
75 for (i = 0; i < num_parents; i++)
76 if (cfg == rcg->parent_map[i].cfg)
77 return i;
78
79err:
80 pr_debug("%s: Clock %s has invalid parent, using default.\n",
81 __func__, clk_hw_get_name(hw));
82 return 0;
83}
84
85static int update_config(struct clk_rcg2 *rcg)
86{
87 int count, ret;
88 u32 cmd;
89 struct clk_hw *hw = &rcg->clkr.hw;
90 const char *name = clk_hw_get_name(hw);
91
92 ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
93 CMD_UPDATE, CMD_UPDATE);
94 if (ret)
95 return ret;
96
97 /* Wait for update to take effect */
98 for (count = 500; count > 0; count--) {
99 ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
100 if (ret)
101 return ret;
102 if (!(cmd & CMD_UPDATE))
103 return 0;
104 udelay(1);
105 }
106
107 WARN(1, "%s: rcg didn't update its configuration.", name);
108 return 0;
109}
110
111static int clk_rcg2_set_parent(struct clk_hw *hw, u8 index)
112{
113 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
114 int ret;
115 u32 cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
116
117 ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
118 CFG_SRC_SEL_MASK, cfg);
119 if (ret)
120 return ret;
121
122 return update_config(rcg);
123}
124
125/*
126 * Calculate m/n:d rate
127 *
128 * parent_rate m
129 * rate = ----------- x ---
130 * hid_div n
131 */
132static unsigned long
133calc_rate(unsigned long rate, u32 m, u32 n, u32 mode, u32 hid_div)
134{
135 if (hid_div) {
136 rate *= 2;
137 rate /= hid_div + 1;
138 }
139
140 if (mode) {
141 u64 tmp = rate;
142 tmp *= m;
143 do_div(tmp, n);
144 rate = tmp;
145 }
146
147 return rate;
148}
149
150static unsigned long
151clk_rcg2_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
152{
153 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
154 u32 cfg, hid_div, m = 0, n = 0, mode = 0, mask;
155
156 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
157
158 if (rcg->mnd_width) {
159 mask = BIT(rcg->mnd_width) - 1;
160 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + M_REG, &m);
161 m &= mask;
162 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + N_REG, &n);
163 n = ~n;
164 n &= mask;
165 n += m;
166 mode = cfg & CFG_MODE_MASK;
167 mode >>= CFG_MODE_SHIFT;
168 }
169
170 mask = BIT(rcg->hid_width) - 1;
171 hid_div = cfg >> CFG_SRC_DIV_SHIFT;
172 hid_div &= mask;
173
174 return calc_rate(parent_rate, m, n, mode, hid_div);
175}
176
177static int _freq_tbl_determine_rate(struct clk_hw *hw, const struct freq_tbl *f,
178 struct clk_rate_request *req,
179 enum freq_policy policy)
180{
181 unsigned long clk_flags, rate = req->rate;
182 struct clk_hw *p;
183 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
184 int index;
185
186 switch (policy) {
187 case FLOOR:
188 f = qcom_find_freq_floor(f, rate);
189 break;
190 case CEIL:
191 f = qcom_find_freq(f, rate);
192 break;
193 default:
194 return -EINVAL;
195 };
196
197 if (!f)
198 return -EINVAL;
199
200 index = qcom_find_src_index(hw, rcg->parent_map, f->src);
201 if (index < 0)
202 return index;
203
204 clk_flags = clk_hw_get_flags(hw);
205 p = clk_hw_get_parent_by_index(hw, index);
206 if (clk_flags & CLK_SET_RATE_PARENT) {
207 rate = f->freq;
208 if (f->pre_div) {
209 if (!rate)
210 rate = req->rate;
211 rate /= 2;
212 rate *= f->pre_div + 1;
213 }
214
215 if (f->n) {
216 u64 tmp = rate;
217 tmp = tmp * f->n;
218 do_div(tmp, f->m);
219 rate = tmp;
220 }
221 } else {
222 rate = clk_hw_get_rate(p);
223 }
224 req->best_parent_hw = p;
225 req->best_parent_rate = rate;
226 req->rate = f->freq;
227
228 return 0;
229}
230
231static int clk_rcg2_determine_rate(struct clk_hw *hw,
232 struct clk_rate_request *req)
233{
234 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
235
236 return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req, CEIL);
237}
238
239static int clk_rcg2_determine_floor_rate(struct clk_hw *hw,
240 struct clk_rate_request *req)
241{
242 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
243
244 return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req, FLOOR);
245}
246
247static int __clk_rcg2_configure(struct clk_rcg2 *rcg, const struct freq_tbl *f)
248{
249 u32 cfg, mask;
250 struct clk_hw *hw = &rcg->clkr.hw;
251 int ret, index = qcom_find_src_index(hw, rcg->parent_map, f->src);
252
253 if (index < 0)
254 return index;
255
256 if (rcg->mnd_width && f->n) {
257 mask = BIT(rcg->mnd_width) - 1;
258 ret = regmap_update_bits(rcg->clkr.regmap,
259 rcg->cmd_rcgr + M_REG, mask, f->m);
260 if (ret)
261 return ret;
262
263 ret = regmap_update_bits(rcg->clkr.regmap,
264 rcg->cmd_rcgr + N_REG, mask, ~(f->n - f->m));
265 if (ret)
266 return ret;
267
268 ret = regmap_update_bits(rcg->clkr.regmap,
269 rcg->cmd_rcgr + D_REG, mask, ~f->n);
270 if (ret)
271 return ret;
272 }
273
274 mask = BIT(rcg->hid_width) - 1;
275 mask |= CFG_SRC_SEL_MASK | CFG_MODE_MASK | CFG_HW_CLK_CTRL_MASK;
276 cfg = f->pre_div << CFG_SRC_DIV_SHIFT;
277 cfg |= rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
278 if (rcg->mnd_width && f->n && (f->m != f->n))
279 cfg |= CFG_MODE_DUAL_EDGE;
280
281 return regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
282 mask, cfg);
283}
284
285static int clk_rcg2_configure(struct clk_rcg2 *rcg, const struct freq_tbl *f)
286{
287 int ret;
288
289 ret = __clk_rcg2_configure(rcg, f);
290 if (ret)
291 return ret;
292
293 return update_config(rcg);
294}
295
296static int __clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate,
297 enum freq_policy policy)
298{
299 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
300 const struct freq_tbl *f;
301
302 switch (policy) {
303 case FLOOR:
304 f = qcom_find_freq_floor(rcg->freq_tbl, rate);
305 break;
306 case CEIL:
307 f = qcom_find_freq(rcg->freq_tbl, rate);
308 break;
309 default:
310 return -EINVAL;
311 };
312
313 if (!f)
314 return -EINVAL;
315
316 return clk_rcg2_configure(rcg, f);
317}
318
319static int clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate,
320 unsigned long parent_rate)
321{
322 return __clk_rcg2_set_rate(hw, rate, CEIL);
323}
324
325static int clk_rcg2_set_floor_rate(struct clk_hw *hw, unsigned long rate,
326 unsigned long parent_rate)
327{
328 return __clk_rcg2_set_rate(hw, rate, FLOOR);
329}
330
331static int clk_rcg2_set_rate_and_parent(struct clk_hw *hw,
332 unsigned long rate, unsigned long parent_rate, u8 index)
333{
334 return __clk_rcg2_set_rate(hw, rate, CEIL);
335}
336
337static int clk_rcg2_set_floor_rate_and_parent(struct clk_hw *hw,
338 unsigned long rate, unsigned long parent_rate, u8 index)
339{
340 return __clk_rcg2_set_rate(hw, rate, FLOOR);
341}
342
343const struct clk_ops clk_rcg2_ops = {
344 .is_enabled = clk_rcg2_is_enabled,
345 .get_parent = clk_rcg2_get_parent,
346 .set_parent = clk_rcg2_set_parent,
347 .recalc_rate = clk_rcg2_recalc_rate,
348 .determine_rate = clk_rcg2_determine_rate,
349 .set_rate = clk_rcg2_set_rate,
350 .set_rate_and_parent = clk_rcg2_set_rate_and_parent,
351};
352EXPORT_SYMBOL_GPL(clk_rcg2_ops);
353
354const struct clk_ops clk_rcg2_floor_ops = {
355 .is_enabled = clk_rcg2_is_enabled,
356 .get_parent = clk_rcg2_get_parent,
357 .set_parent = clk_rcg2_set_parent,
358 .recalc_rate = clk_rcg2_recalc_rate,
359 .determine_rate = clk_rcg2_determine_floor_rate,
360 .set_rate = clk_rcg2_set_floor_rate,
361 .set_rate_and_parent = clk_rcg2_set_floor_rate_and_parent,
362};
363EXPORT_SYMBOL_GPL(clk_rcg2_floor_ops);
364
365struct frac_entry {
366 int num;
367 int den;
368};
369
370static const struct frac_entry frac_table_675m[] = { /* link rate of 270M */
371 { 52, 295 }, /* 119 M */
372 { 11, 57 }, /* 130.25 M */
373 { 63, 307 }, /* 138.50 M */
374 { 11, 50 }, /* 148.50 M */
375 { 47, 206 }, /* 154 M */
376 { 31, 100 }, /* 205.25 M */
377 { 107, 269 }, /* 268.50 M */
378 { },
379};
380
381static struct frac_entry frac_table_810m[] = { /* Link rate of 162M */
382 { 31, 211 }, /* 119 M */
383 { 32, 199 }, /* 130.25 M */
384 { 63, 307 }, /* 138.50 M */
385 { 11, 60 }, /* 148.50 M */
386 { 50, 263 }, /* 154 M */
387 { 31, 120 }, /* 205.25 M */
388 { 119, 359 }, /* 268.50 M */
389 { },
390};
391
392static int clk_edp_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
393 unsigned long parent_rate)
394{
395 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
396 struct freq_tbl f = *rcg->freq_tbl;
397 const struct frac_entry *frac;
398 int delta = 100000;
399 s64 src_rate = parent_rate;
400 s64 request;
401 u32 mask = BIT(rcg->hid_width) - 1;
402 u32 hid_div;
403
404 if (src_rate == 810000000)
405 frac = frac_table_810m;
406 else
407 frac = frac_table_675m;
408
409 for (; frac->num; frac++) {
410 request = rate;
411 request *= frac->den;
412 request = div_s64(request, frac->num);
413 if ((src_rate < (request - delta)) ||
414 (src_rate > (request + delta)))
415 continue;
416
417 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
418 &hid_div);
419 f.pre_div = hid_div;
420 f.pre_div >>= CFG_SRC_DIV_SHIFT;
421 f.pre_div &= mask;
422 f.m = frac->num;
423 f.n = frac->den;
424
425 return clk_rcg2_configure(rcg, &f);
426 }
427
428 return -EINVAL;
429}
430
431static int clk_edp_pixel_set_rate_and_parent(struct clk_hw *hw,
432 unsigned long rate, unsigned long parent_rate, u8 index)
433{
434 /* Parent index is set statically in frequency table */
435 return clk_edp_pixel_set_rate(hw, rate, parent_rate);
436}
437
438static int clk_edp_pixel_determine_rate(struct clk_hw *hw,
439 struct clk_rate_request *req)
440{
441 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
442 const struct freq_tbl *f = rcg->freq_tbl;
443 const struct frac_entry *frac;
444 int delta = 100000;
445 s64 request;
446 u32 mask = BIT(rcg->hid_width) - 1;
447 u32 hid_div;
448 int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
449
450 /* Force the correct parent */
451 req->best_parent_hw = clk_hw_get_parent_by_index(hw, index);
452 req->best_parent_rate = clk_hw_get_rate(req->best_parent_hw);
453
454 if (req->best_parent_rate == 810000000)
455 frac = frac_table_810m;
456 else
457 frac = frac_table_675m;
458
459 for (; frac->num; frac++) {
460 request = req->rate;
461 request *= frac->den;
462 request = div_s64(request, frac->num);
463 if ((req->best_parent_rate < (request - delta)) ||
464 (req->best_parent_rate > (request + delta)))
465 continue;
466
467 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
468 &hid_div);
469 hid_div >>= CFG_SRC_DIV_SHIFT;
470 hid_div &= mask;
471
472 req->rate = calc_rate(req->best_parent_rate,
473 frac->num, frac->den,
474 !!frac->den, hid_div);
475 return 0;
476 }
477
478 return -EINVAL;
479}
480
481const struct clk_ops clk_edp_pixel_ops = {
482 .is_enabled = clk_rcg2_is_enabled,
483 .get_parent = clk_rcg2_get_parent,
484 .set_parent = clk_rcg2_set_parent,
485 .recalc_rate = clk_rcg2_recalc_rate,
486 .set_rate = clk_edp_pixel_set_rate,
487 .set_rate_and_parent = clk_edp_pixel_set_rate_and_parent,
488 .determine_rate = clk_edp_pixel_determine_rate,
489};
490EXPORT_SYMBOL_GPL(clk_edp_pixel_ops);
491
492static int clk_byte_determine_rate(struct clk_hw *hw,
493 struct clk_rate_request *req)
494{
495 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
496 const struct freq_tbl *f = rcg->freq_tbl;
497 int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
498 unsigned long parent_rate, div;
499 u32 mask = BIT(rcg->hid_width) - 1;
500 struct clk_hw *p;
501
502 if (req->rate == 0)
503 return -EINVAL;
504
505 req->best_parent_hw = p = clk_hw_get_parent_by_index(hw, index);
506 req->best_parent_rate = parent_rate = clk_hw_round_rate(p, req->rate);
507
508 div = DIV_ROUND_UP((2 * parent_rate), req->rate) - 1;
509 div = min_t(u32, div, mask);
510
511 req->rate = calc_rate(parent_rate, 0, 0, 0, div);
512
513 return 0;
514}
515
516static int clk_byte_set_rate(struct clk_hw *hw, unsigned long rate,
517 unsigned long parent_rate)
518{
519 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
520 struct freq_tbl f = *rcg->freq_tbl;
521 unsigned long div;
522 u32 mask = BIT(rcg->hid_width) - 1;
523
524 div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
525 div = min_t(u32, div, mask);
526
527 f.pre_div = div;
528
529 return clk_rcg2_configure(rcg, &f);
530}
531
532static int clk_byte_set_rate_and_parent(struct clk_hw *hw,
533 unsigned long rate, unsigned long parent_rate, u8 index)
534{
535 /* Parent index is set statically in frequency table */
536 return clk_byte_set_rate(hw, rate, parent_rate);
537}
538
539const struct clk_ops clk_byte_ops = {
540 .is_enabled = clk_rcg2_is_enabled,
541 .get_parent = clk_rcg2_get_parent,
542 .set_parent = clk_rcg2_set_parent,
543 .recalc_rate = clk_rcg2_recalc_rate,
544 .set_rate = clk_byte_set_rate,
545 .set_rate_and_parent = clk_byte_set_rate_and_parent,
546 .determine_rate = clk_byte_determine_rate,
547};
548EXPORT_SYMBOL_GPL(clk_byte_ops);
549
550static int clk_byte2_determine_rate(struct clk_hw *hw,
551 struct clk_rate_request *req)
552{
553 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
554 unsigned long parent_rate, div;
555 u32 mask = BIT(rcg->hid_width) - 1;
556 struct clk_hw *p;
557 unsigned long rate = req->rate;
558
559 if (rate == 0)
560 return -EINVAL;
561
562 p = req->best_parent_hw;
563 req->best_parent_rate = parent_rate = clk_hw_round_rate(p, rate);
564
565 div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
566 div = min_t(u32, div, mask);
567
568 req->rate = calc_rate(parent_rate, 0, 0, 0, div);
569
570 return 0;
571}
572
573static int clk_byte2_set_rate(struct clk_hw *hw, unsigned long rate,
574 unsigned long parent_rate)
575{
576 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
577 struct freq_tbl f = { 0 };
578 unsigned long div;
579 int i, num_parents = clk_hw_get_num_parents(hw);
580 u32 mask = BIT(rcg->hid_width) - 1;
581 u32 cfg;
582
583 div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
584 div = min_t(u32, div, mask);
585
586 f.pre_div = div;
587
588 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
589 cfg &= CFG_SRC_SEL_MASK;
590 cfg >>= CFG_SRC_SEL_SHIFT;
591
592 for (i = 0; i < num_parents; i++) {
593 if (cfg == rcg->parent_map[i].cfg) {
594 f.src = rcg->parent_map[i].src;
595 return clk_rcg2_configure(rcg, &f);
596 }
597 }
598
599 return -EINVAL;
600}
601
602static int clk_byte2_set_rate_and_parent(struct clk_hw *hw,
603 unsigned long rate, unsigned long parent_rate, u8 index)
604{
605 /* Read the hardware to determine parent during set_rate */
606 return clk_byte2_set_rate(hw, rate, parent_rate);
607}
608
609const struct clk_ops clk_byte2_ops = {
610 .is_enabled = clk_rcg2_is_enabled,
611 .get_parent = clk_rcg2_get_parent,
612 .set_parent = clk_rcg2_set_parent,
613 .recalc_rate = clk_rcg2_recalc_rate,
614 .set_rate = clk_byte2_set_rate,
615 .set_rate_and_parent = clk_byte2_set_rate_and_parent,
616 .determine_rate = clk_byte2_determine_rate,
617};
618EXPORT_SYMBOL_GPL(clk_byte2_ops);
619
620static const struct frac_entry frac_table_pixel[] = {
621 { 3, 8 },
622 { 2, 9 },
623 { 4, 9 },
624 { 1, 1 },
625 { }
626};
627
628static int clk_pixel_determine_rate(struct clk_hw *hw,
629 struct clk_rate_request *req)
630{
631 unsigned long request, src_rate;
632 int delta = 100000;
633 const struct frac_entry *frac = frac_table_pixel;
634
635 for (; frac->num; frac++) {
636 request = (req->rate * frac->den) / frac->num;
637
638 src_rate = clk_hw_round_rate(req->best_parent_hw, request);
639 if ((src_rate < (request - delta)) ||
640 (src_rate > (request + delta)))
641 continue;
642
643 req->best_parent_rate = src_rate;
644 req->rate = (src_rate * frac->num) / frac->den;
645 return 0;
646 }
647
648 return -EINVAL;
649}
650
651static int clk_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
652 unsigned long parent_rate)
653{
654 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
655 struct freq_tbl f = { 0 };
656 const struct frac_entry *frac = frac_table_pixel;
657 unsigned long request;
658 int delta = 100000;
659 u32 mask = BIT(rcg->hid_width) - 1;
660 u32 hid_div, cfg;
661 int i, num_parents = clk_hw_get_num_parents(hw);
662
663 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
664 cfg &= CFG_SRC_SEL_MASK;
665 cfg >>= CFG_SRC_SEL_SHIFT;
666
667 for (i = 0; i < num_parents; i++)
668 if (cfg == rcg->parent_map[i].cfg) {
669 f.src = rcg->parent_map[i].src;
670 break;
671 }
672
673 for (; frac->num; frac++) {
674 request = (rate * frac->den) / frac->num;
675
676 if ((parent_rate < (request - delta)) ||
677 (parent_rate > (request + delta)))
678 continue;
679
680 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
681 &hid_div);
682 f.pre_div = hid_div;
683 f.pre_div >>= CFG_SRC_DIV_SHIFT;
684 f.pre_div &= mask;
685 f.m = frac->num;
686 f.n = frac->den;
687
688 return clk_rcg2_configure(rcg, &f);
689 }
690 return -EINVAL;
691}
692
693static int clk_pixel_set_rate_and_parent(struct clk_hw *hw, unsigned long rate,
694 unsigned long parent_rate, u8 index)
695{
696 return clk_pixel_set_rate(hw, rate, parent_rate);
697}
698
699const struct clk_ops clk_pixel_ops = {
700 .is_enabled = clk_rcg2_is_enabled,
701 .get_parent = clk_rcg2_get_parent,
702 .set_parent = clk_rcg2_set_parent,
703 .recalc_rate = clk_rcg2_recalc_rate,
704 .set_rate = clk_pixel_set_rate,
705 .set_rate_and_parent = clk_pixel_set_rate_and_parent,
706 .determine_rate = clk_pixel_determine_rate,
707};
708EXPORT_SYMBOL_GPL(clk_pixel_ops);
709
710static int clk_gfx3d_determine_rate(struct clk_hw *hw,
711 struct clk_rate_request *req)
712{
713 struct clk_rate_request parent_req = { };
714 struct clk_hw *p2, *p8, *p9, *xo;
715 unsigned long p9_rate;
716 int ret;
717
718 xo = clk_hw_get_parent_by_index(hw, 0);
719 if (req->rate == clk_hw_get_rate(xo)) {
720 req->best_parent_hw = xo;
721 return 0;
722 }
723
724 p9 = clk_hw_get_parent_by_index(hw, 2);
725 p2 = clk_hw_get_parent_by_index(hw, 3);
726 p8 = clk_hw_get_parent_by_index(hw, 4);
727
728 /* PLL9 is a fixed rate PLL */
729 p9_rate = clk_hw_get_rate(p9);
730
731 parent_req.rate = req->rate = min(req->rate, p9_rate);
732 if (req->rate == p9_rate) {
733 req->rate = req->best_parent_rate = p9_rate;
734 req->best_parent_hw = p9;
735 return 0;
736 }
737
738 if (req->best_parent_hw == p9) {
739 /* Are we going back to a previously used rate? */
740 if (clk_hw_get_rate(p8) == req->rate)
741 req->best_parent_hw = p8;
742 else
743 req->best_parent_hw = p2;
744 } else if (req->best_parent_hw == p8) {
745 req->best_parent_hw = p2;
746 } else {
747 req->best_parent_hw = p8;
748 }
749
750 ret = __clk_determine_rate(req->best_parent_hw, &parent_req);
751 if (ret)
752 return ret;
753
754 req->rate = req->best_parent_rate = parent_req.rate;
755
756 return 0;
757}
758
759static int clk_gfx3d_set_rate_and_parent(struct clk_hw *hw, unsigned long rate,
760 unsigned long parent_rate, u8 index)
761{
762 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
763 u32 cfg;
764 int ret;
765
766 /* Just mux it, we don't use the division or m/n hardware */
767 cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
768 ret = regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, cfg);
769 if (ret)
770 return ret;
771
772 return update_config(rcg);
773}
774
775static int clk_gfx3d_set_rate(struct clk_hw *hw, unsigned long rate,
776 unsigned long parent_rate)
777{
778 /*
779 * We should never get here; clk_gfx3d_determine_rate() should always
780 * make us use a different parent than what we're currently using, so
781 * clk_gfx3d_set_rate_and_parent() should always be called.
782 */
783 return 0;
784}
785
786const struct clk_ops clk_gfx3d_ops = {
787 .is_enabled = clk_rcg2_is_enabled,
788 .get_parent = clk_rcg2_get_parent,
789 .set_parent = clk_rcg2_set_parent,
790 .recalc_rate = clk_rcg2_recalc_rate,
791 .set_rate = clk_gfx3d_set_rate,
792 .set_rate_and_parent = clk_gfx3d_set_rate_and_parent,
793 .determine_rate = clk_gfx3d_determine_rate,
794};
795EXPORT_SYMBOL_GPL(clk_gfx3d_ops);
796
797static int clk_rcg2_set_force_enable(struct clk_hw *hw)
798{
799 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
800 const char *name = clk_hw_get_name(hw);
801 int ret, count;
802
803 ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
804 CMD_ROOT_EN, CMD_ROOT_EN);
805 if (ret)
806 return ret;
807
808 /* wait for RCG to turn ON */
809 for (count = 500; count > 0; count--) {
810 if (clk_rcg2_is_enabled(hw))
811 return 0;
812
813 udelay(1);
814 }
815
816 pr_err("%s: RCG did not turn on\n", name);
817 return -ETIMEDOUT;
818}
819
820static int clk_rcg2_clear_force_enable(struct clk_hw *hw)
821{
822 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
823
824 return regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
825 CMD_ROOT_EN, 0);
826}
827
828static int
829clk_rcg2_shared_force_enable_clear(struct clk_hw *hw, const struct freq_tbl *f)
830{
831 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
832 int ret;
833
834 ret = clk_rcg2_set_force_enable(hw);
835 if (ret)
836 return ret;
837
838 ret = clk_rcg2_configure(rcg, f);
839 if (ret)
840 return ret;
841
842 return clk_rcg2_clear_force_enable(hw);
843}
844
845static int clk_rcg2_shared_set_rate(struct clk_hw *hw, unsigned long rate,
846 unsigned long parent_rate)
847{
848 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
849 const struct freq_tbl *f;
850
851 f = qcom_find_freq(rcg->freq_tbl, rate);
852 if (!f)
853 return -EINVAL;
854
855 /*
856 * In case clock is disabled, update the CFG, M, N and D registers
857 * and don't hit the update bit of CMD register.
858 */
859 if (!__clk_is_enabled(hw->clk))
860 return __clk_rcg2_configure(rcg, f);
861
862 return clk_rcg2_shared_force_enable_clear(hw, f);
863}
864
865static int clk_rcg2_shared_set_rate_and_parent(struct clk_hw *hw,
866 unsigned long rate, unsigned long parent_rate, u8 index)
867{
868 return clk_rcg2_shared_set_rate(hw, rate, parent_rate);
869}
870
871static int clk_rcg2_shared_enable(struct clk_hw *hw)
872{
873 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
874 int ret;
875
876 /*
877 * Set the update bit because required configuration has already
878 * been written in clk_rcg2_shared_set_rate()
879 */
880 ret = clk_rcg2_set_force_enable(hw);
881 if (ret)
882 return ret;
883
884 ret = update_config(rcg);
885 if (ret)
886 return ret;
887
888 return clk_rcg2_clear_force_enable(hw);
889}
890
891static void clk_rcg2_shared_disable(struct clk_hw *hw)
892{
893 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
894 u32 cfg;
895
896 /*
897 * Store current configuration as switching to safe source would clear
898 * the SRC and DIV of CFG register
899 */
900 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
901
902 /*
903 * Park the RCG at a safe configuration - sourced off of safe source.
904 * Force enable and disable the RCG while configuring it to safeguard
905 * against any update signal coming from the downstream clock.
906 * The current parent is still prepared and enabled at this point, and
907 * the safe source is always on while application processor subsystem
908 * is online. Therefore, the RCG can safely switch its parent.
909 */
910 clk_rcg2_set_force_enable(hw);
911
912 regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
913 rcg->safe_src_index << CFG_SRC_SEL_SHIFT);
914
915 update_config(rcg);
916
917 clk_rcg2_clear_force_enable(hw);
918
919 /* Write back the stored configuration corresponding to current rate */
920 regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, cfg);
921}
922
923const struct clk_ops clk_rcg2_shared_ops = {
924 .enable = clk_rcg2_shared_enable,
925 .disable = clk_rcg2_shared_disable,
926 .get_parent = clk_rcg2_get_parent,
927 .set_parent = clk_rcg2_set_parent,
928 .recalc_rate = clk_rcg2_recalc_rate,
929 .determine_rate = clk_rcg2_determine_rate,
930 .set_rate = clk_rcg2_shared_set_rate,
931 .set_rate_and_parent = clk_rcg2_shared_set_rate_and_parent,
932};
933EXPORT_SYMBOL_GPL(clk_rcg2_shared_ops);