blob: 76d31c0a33426651a84983e4e14c553ded609a79 [file] [log] [blame]
Jerome Brunet3054a552018-05-22 18:34:55 +02001// SPDX-License-Identifier: (GPL-2.0 OR MIT)
2/*
3 * Copyright (c) 2018 BayLibre, SAS.
4 * Author: Jerome Brunet <jbrunet@baylibre.com>
5 *
6 * Sample clock generator divider:
7 * This HW divider gates with value 0 but is otherwise a zero based divider:
8 *
9 * val >= 1
10 * divider = val + 1
11 *
12 * The duty cycle may also be set for the LR clock variant. The duty cycle
13 * ratio is:
14 *
15 * hi = [0 - val]
16 * duty_cycle = (1 + hi) / (1 + val)
17 */
18
Jerome Brunet889c2b72019-02-01 13:58:41 +010019#include <linux/clk-provider.h>
20#include <linux/module.h>
21
22#include "clk-regmap.h"
23#include "sclk-div.h"
Jerome Brunet3054a552018-05-22 18:34:55 +020024
25static inline struct meson_sclk_div_data *
26meson_sclk_div_data(struct clk_regmap *clk)
27{
28 return (struct meson_sclk_div_data *)clk->data;
29}
30
31static int sclk_div_maxval(struct meson_sclk_div_data *sclk)
32{
33 return (1 << sclk->div.width) - 1;
34}
35
36static int sclk_div_maxdiv(struct meson_sclk_div_data *sclk)
37{
38 return sclk_div_maxval(sclk) + 1;
39}
40
41static int sclk_div_getdiv(struct clk_hw *hw, unsigned long rate,
42 unsigned long prate, int maxdiv)
43{
44 int div = DIV_ROUND_CLOSEST_ULL((u64)prate, rate);
45
46 return clamp(div, 2, maxdiv);
47}
48
49static int sclk_div_bestdiv(struct clk_hw *hw, unsigned long rate,
50 unsigned long *prate,
51 struct meson_sclk_div_data *sclk)
52{
53 struct clk_hw *parent = clk_hw_get_parent(hw);
54 int bestdiv = 0, i;
55 unsigned long maxdiv, now, parent_now;
56 unsigned long best = 0, best_parent = 0;
57
58 if (!rate)
59 rate = 1;
60
61 maxdiv = sclk_div_maxdiv(sclk);
62
63 if (!(clk_hw_get_flags(hw) & CLK_SET_RATE_PARENT))
64 return sclk_div_getdiv(hw, rate, *prate, maxdiv);
65
66 /*
67 * The maximum divider we can use without overflowing
68 * unsigned long in rate * i below
69 */
70 maxdiv = min(ULONG_MAX / rate, maxdiv);
71
72 for (i = 2; i <= maxdiv; i++) {
73 /*
74 * It's the most ideal case if the requested rate can be
75 * divided from parent clock without needing to change
76 * parent rate, so return the divider immediately.
77 */
78 if (rate * i == *prate)
79 return i;
80
81 parent_now = clk_hw_round_rate(parent, rate * i);
82 now = DIV_ROUND_UP_ULL((u64)parent_now, i);
83
84 if (abs(rate - now) < abs(rate - best)) {
85 bestdiv = i;
86 best = now;
87 best_parent = parent_now;
88 }
89 }
90
91 if (!bestdiv)
92 bestdiv = sclk_div_maxdiv(sclk);
93 else
94 *prate = best_parent;
95
96 return bestdiv;
97}
98
99static long sclk_div_round_rate(struct clk_hw *hw, unsigned long rate,
100 unsigned long *prate)
101{
102 struct clk_regmap *clk = to_clk_regmap(hw);
103 struct meson_sclk_div_data *sclk = meson_sclk_div_data(clk);
104 int div;
105
106 div = sclk_div_bestdiv(hw, rate, prate, sclk);
107
108 return DIV_ROUND_UP_ULL((u64)*prate, div);
109}
110
111static void sclk_apply_ratio(struct clk_regmap *clk,
112 struct meson_sclk_div_data *sclk)
113{
114 unsigned int hi = DIV_ROUND_CLOSEST(sclk->cached_div *
115 sclk->cached_duty.num,
116 sclk->cached_duty.den);
117
118 if (hi)
119 hi -= 1;
120
121 meson_parm_write(clk->map, &sclk->hi, hi);
122}
123
124static int sclk_div_set_duty_cycle(struct clk_hw *hw,
125 struct clk_duty *duty)
126{
127 struct clk_regmap *clk = to_clk_regmap(hw);
128 struct meson_sclk_div_data *sclk = meson_sclk_div_data(clk);
129
130 if (MESON_PARM_APPLICABLE(&sclk->hi)) {
131 memcpy(&sclk->cached_duty, duty, sizeof(*duty));
132 sclk_apply_ratio(clk, sclk);
133 }
134
135 return 0;
136}
137
138static int sclk_div_get_duty_cycle(struct clk_hw *hw,
139 struct clk_duty *duty)
140{
141 struct clk_regmap *clk = to_clk_regmap(hw);
142 struct meson_sclk_div_data *sclk = meson_sclk_div_data(clk);
143 int hi;
144
145 if (!MESON_PARM_APPLICABLE(&sclk->hi)) {
146 duty->num = 1;
147 duty->den = 2;
148 return 0;
149 }
150
151 hi = meson_parm_read(clk->map, &sclk->hi);
152 duty->num = hi + 1;
153 duty->den = sclk->cached_div;
154 return 0;
155}
156
157static void sclk_apply_divider(struct clk_regmap *clk,
158 struct meson_sclk_div_data *sclk)
159{
160 if (MESON_PARM_APPLICABLE(&sclk->hi))
161 sclk_apply_ratio(clk, sclk);
162
163 meson_parm_write(clk->map, &sclk->div, sclk->cached_div - 1);
164}
165
166static int sclk_div_set_rate(struct clk_hw *hw, unsigned long rate,
167 unsigned long prate)
168{
169 struct clk_regmap *clk = to_clk_regmap(hw);
170 struct meson_sclk_div_data *sclk = meson_sclk_div_data(clk);
171 unsigned long maxdiv = sclk_div_maxdiv(sclk);
172
173 sclk->cached_div = sclk_div_getdiv(hw, rate, prate, maxdiv);
174
175 if (clk_hw_is_enabled(hw))
176 sclk_apply_divider(clk, sclk);
177
178 return 0;
179}
180
181static unsigned long sclk_div_recalc_rate(struct clk_hw *hw,
182 unsigned long prate)
183{
184 struct clk_regmap *clk = to_clk_regmap(hw);
185 struct meson_sclk_div_data *sclk = meson_sclk_div_data(clk);
186
187 return DIV_ROUND_UP_ULL((u64)prate, sclk->cached_div);
188}
189
190static int sclk_div_enable(struct clk_hw *hw)
191{
192 struct clk_regmap *clk = to_clk_regmap(hw);
193 struct meson_sclk_div_data *sclk = meson_sclk_div_data(clk);
194
195 sclk_apply_divider(clk, sclk);
196
197 return 0;
198}
199
200static void sclk_div_disable(struct clk_hw *hw)
201{
202 struct clk_regmap *clk = to_clk_regmap(hw);
203 struct meson_sclk_div_data *sclk = meson_sclk_div_data(clk);
204
205 meson_parm_write(clk->map, &sclk->div, 0);
206}
207
208static int sclk_div_is_enabled(struct clk_hw *hw)
209{
210 struct clk_regmap *clk = to_clk_regmap(hw);
211 struct meson_sclk_div_data *sclk = meson_sclk_div_data(clk);
212
213 if (meson_parm_read(clk->map, &sclk->div))
214 return 1;
215
216 return 0;
217}
218
Jerome Brunet89d079d2019-09-24 14:39:53 +0200219static int sclk_div_init(struct clk_hw *hw)
Jerome Brunet3054a552018-05-22 18:34:55 +0200220{
221 struct clk_regmap *clk = to_clk_regmap(hw);
222 struct meson_sclk_div_data *sclk = meson_sclk_div_data(clk);
223 unsigned int val;
224
225 val = meson_parm_read(clk->map, &sclk->div);
226
227 /* if the divider is initially disabled, assume max */
228 if (!val)
229 sclk->cached_div = sclk_div_maxdiv(sclk);
230 else
231 sclk->cached_div = val + 1;
232
233 sclk_div_get_duty_cycle(hw, &sclk->cached_duty);
Jerome Brunet89d079d2019-09-24 14:39:53 +0200234
235 return 0;
Jerome Brunet3054a552018-05-22 18:34:55 +0200236}
237
238const struct clk_ops meson_sclk_div_ops = {
239 .recalc_rate = sclk_div_recalc_rate,
240 .round_rate = sclk_div_round_rate,
241 .set_rate = sclk_div_set_rate,
242 .enable = sclk_div_enable,
243 .disable = sclk_div_disable,
244 .is_enabled = sclk_div_is_enabled,
245 .get_duty_cycle = sclk_div_get_duty_cycle,
246 .set_duty_cycle = sclk_div_set_duty_cycle,
247 .init = sclk_div_init,
248};
249EXPORT_SYMBOL_GPL(meson_sclk_div_ops);
Jerome Brunet889c2b72019-02-01 13:58:41 +0100250
251MODULE_DESCRIPTION("Amlogic Sample divider driver");
252MODULE_AUTHOR("Jerome Brunet <jbrunet@baylibre.com>");
253MODULE_LICENSE("GPL v2");