blob: f32157cb4013881c3933ae994245f8d6247884fd [file] [log] [blame]
Stephen Boyde1bd55e2018-12-11 09:57:48 -08001// SPDX-License-Identifier: GPL-2.0
Mike Turquette9d9f78e2012-03-15 23:11:20 -07002/*
3 * Copyright (C) 2011 Sascha Hauer, Pengutronix <s.hauer@pengutronix.de>
4 * Copyright (C) 2011 Richard Zhao, Linaro <richard.zhao@linaro.org>
5 * Copyright (C) 2011-2012 Mike Turquette, Linaro Ltd <mturquette@linaro.org>
6 *
Mike Turquette9d9f78e2012-03-15 23:11:20 -07007 * Adjustable divider clock implementation
8 */
9
10#include <linux/clk-provider.h>
11#include <linux/module.h>
12#include <linux/slab.h>
13#include <linux/io.h>
14#include <linux/err.h>
15#include <linux/string.h>
James Hogan1a3cd182013-01-15 10:28:05 +000016#include <linux/log2.h>
Mike Turquette9d9f78e2012-03-15 23:11:20 -070017
18/*
19 * DOC: basic adjustable divider clock that cannot gate
20 *
21 * Traits of this clock:
22 * prepare - clk_prepare only ensures that parents are prepared
23 * enable - clk_enable only ensures that parents are enabled
Brian Norris9556f9d2015-04-13 16:03:21 -070024 * rate - rate is adjustable. clk->rate = ceiling(parent->rate / divisor)
Mike Turquette9d9f78e2012-03-15 23:11:20 -070025 * parent - fixed parent. No clk_set_parent support
26 */
27
Jonas Gorski434d69f2019-04-18 13:12:04 +020028static inline u32 clk_div_readl(struct clk_divider *divider)
29{
30 if (divider->flags & CLK_DIVIDER_BIG_ENDIAN)
31 return ioread32be(divider->reg);
32
Jonas Gorski5834fd72019-04-18 13:12:11 +020033 return readl(divider->reg);
Jonas Gorski434d69f2019-04-18 13:12:04 +020034}
35
36static inline void clk_div_writel(struct clk_divider *divider, u32 val)
37{
38 if (divider->flags & CLK_DIVIDER_BIG_ENDIAN)
39 iowrite32be(val, divider->reg);
40 else
Jonas Gorski5834fd72019-04-18 13:12:11 +020041 writel(val, divider->reg);
Jonas Gorski434d69f2019-04-18 13:12:04 +020042}
43
Stephen Boydfab88ca2015-11-30 17:31:38 -080044static unsigned int _get_table_maxdiv(const struct clk_div_table *table,
45 u8 width)
Rajendra Nayak357c3f02012-06-29 19:06:32 +053046{
Jerome Brunete6d3cc72018-02-14 14:43:33 +010047 unsigned int maxdiv = 0, mask = clk_div_mask(width);
Rajendra Nayak357c3f02012-06-29 19:06:32 +053048 const struct clk_div_table *clkt;
49
50 for (clkt = table; clkt->div; clkt++)
Stephen Boydfab88ca2015-11-30 17:31:38 -080051 if (clkt->div > maxdiv && clkt->val <= mask)
Rajendra Nayak357c3f02012-06-29 19:06:32 +053052 maxdiv = clkt->div;
53 return maxdiv;
54}
55
Maxime COQUELIN774b5142014-01-29 17:24:07 +010056static unsigned int _get_table_mindiv(const struct clk_div_table *table)
57{
58 unsigned int mindiv = UINT_MAX;
59 const struct clk_div_table *clkt;
60
61 for (clkt = table; clkt->div; clkt++)
62 if (clkt->div < mindiv)
63 mindiv = clkt->div;
64 return mindiv;
65}
66
Stephen Boydbca96902015-01-19 18:05:29 -080067static unsigned int _get_maxdiv(const struct clk_div_table *table, u8 width,
68 unsigned long flags)
Rajendra Nayak6d9252b2012-05-17 15:52:13 +053069{
Stephen Boydbca96902015-01-19 18:05:29 -080070 if (flags & CLK_DIVIDER_ONE_BASED)
Jerome Brunete6d3cc72018-02-14 14:43:33 +010071 return clk_div_mask(width);
Stephen Boydbca96902015-01-19 18:05:29 -080072 if (flags & CLK_DIVIDER_POWER_OF_TWO)
Jerome Brunete6d3cc72018-02-14 14:43:33 +010073 return 1 << clk_div_mask(width);
Stephen Boydbca96902015-01-19 18:05:29 -080074 if (table)
Stephen Boydfab88ca2015-11-30 17:31:38 -080075 return _get_table_maxdiv(table, width);
Jerome Brunete6d3cc72018-02-14 14:43:33 +010076 return clk_div_mask(width) + 1;
Rajendra Nayak6d9252b2012-05-17 15:52:13 +053077}
78
Rajendra Nayak357c3f02012-06-29 19:06:32 +053079static unsigned int _get_table_div(const struct clk_div_table *table,
80 unsigned int val)
81{
82 const struct clk_div_table *clkt;
83
84 for (clkt = table; clkt->div; clkt++)
85 if (clkt->val == val)
86 return clkt->div;
87 return 0;
88}
89
Stephen Boydbca96902015-01-19 18:05:29 -080090static unsigned int _get_div(const struct clk_div_table *table,
Jim Quinlanafe76c8f2015-05-15 15:45:47 -040091 unsigned int val, unsigned long flags, u8 width)
Rajendra Nayak6d9252b2012-05-17 15:52:13 +053092{
Stephen Boydbca96902015-01-19 18:05:29 -080093 if (flags & CLK_DIVIDER_ONE_BASED)
Rajendra Nayak6d9252b2012-05-17 15:52:13 +053094 return val;
Stephen Boydbca96902015-01-19 18:05:29 -080095 if (flags & CLK_DIVIDER_POWER_OF_TWO)
Rajendra Nayak6d9252b2012-05-17 15:52:13 +053096 return 1 << val;
Jim Quinlanafe76c8f2015-05-15 15:45:47 -040097 if (flags & CLK_DIVIDER_MAX_AT_ZERO)
Jerome Brunete6d3cc72018-02-14 14:43:33 +010098 return val ? val : clk_div_mask(width) + 1;
Stephen Boydbca96902015-01-19 18:05:29 -080099 if (table)
100 return _get_table_div(table, val);
Rajendra Nayak6d9252b2012-05-17 15:52:13 +0530101 return val + 1;
102}
103
Rajendra Nayak357c3f02012-06-29 19:06:32 +0530104static unsigned int _get_table_val(const struct clk_div_table *table,
105 unsigned int div)
106{
107 const struct clk_div_table *clkt;
108
109 for (clkt = table; clkt->div; clkt++)
110 if (clkt->div == div)
111 return clkt->val;
112 return 0;
113}
114
Stephen Boydbca96902015-01-19 18:05:29 -0800115static unsigned int _get_val(const struct clk_div_table *table,
Jim Quinlanafe76c8f2015-05-15 15:45:47 -0400116 unsigned int div, unsigned long flags, u8 width)
Rajendra Nayak6d9252b2012-05-17 15:52:13 +0530117{
Stephen Boydbca96902015-01-19 18:05:29 -0800118 if (flags & CLK_DIVIDER_ONE_BASED)
Rajendra Nayak6d9252b2012-05-17 15:52:13 +0530119 return div;
Stephen Boydbca96902015-01-19 18:05:29 -0800120 if (flags & CLK_DIVIDER_POWER_OF_TWO)
Rajendra Nayak6d9252b2012-05-17 15:52:13 +0530121 return __ffs(div);
Jim Quinlanafe76c8f2015-05-15 15:45:47 -0400122 if (flags & CLK_DIVIDER_MAX_AT_ZERO)
Jerome Brunete6d3cc72018-02-14 14:43:33 +0100123 return (div == clk_div_mask(width) + 1) ? 0 : div;
Stephen Boydbca96902015-01-19 18:05:29 -0800124 if (table)
125 return _get_table_val(table, div);
Rajendra Nayak6d9252b2012-05-17 15:52:13 +0530126 return div - 1;
127}
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700128
Stephen Boydbca96902015-01-19 18:05:29 -0800129unsigned long divider_recalc_rate(struct clk_hw *hw, unsigned long parent_rate,
130 unsigned int val,
131 const struct clk_div_table *table,
Jerome Brunet12a26c22017-12-21 17:30:54 +0100132 unsigned long flags, unsigned long width)
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700133{
Stephen Boydbca96902015-01-19 18:05:29 -0800134 unsigned int div;
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700135
Jerome Brunet12a26c22017-12-21 17:30:54 +0100136 div = _get_div(table, val, flags, width);
Rajendra Nayak6d9252b2012-05-17 15:52:13 +0530137 if (!div) {
Stephen Boydbca96902015-01-19 18:05:29 -0800138 WARN(!(flags & CLK_DIVIDER_ALLOW_ZERO),
Soren Brinkmann056b20532013-04-02 15:36:56 -0700139 "%s: Zero divisor and CLK_DIVIDER_ALLOW_ZERO not set\n",
Stephen Boyd2f508a92015-07-30 17:20:57 -0700140 clk_hw_get_name(hw));
Rajendra Nayak6d9252b2012-05-17 15:52:13 +0530141 return parent_rate;
142 }
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700143
Brian Norris9556f9d2015-04-13 16:03:21 -0700144 return DIV_ROUND_UP_ULL((u64)parent_rate, div);
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700145}
Stephen Boydbca96902015-01-19 18:05:29 -0800146EXPORT_SYMBOL_GPL(divider_recalc_rate);
147
148static unsigned long clk_divider_recalc_rate(struct clk_hw *hw,
149 unsigned long parent_rate)
150{
151 struct clk_divider *divider = to_clk_divider(hw);
152 unsigned int val;
153
Jonas Gorski434d69f2019-04-18 13:12:04 +0200154 val = clk_div_readl(divider) >> divider->shift;
Jerome Brunete6d3cc72018-02-14 14:43:33 +0100155 val &= clk_div_mask(divider->width);
Stephen Boydbca96902015-01-19 18:05:29 -0800156
157 return divider_recalc_rate(hw, parent_rate, val, divider->table,
Jerome Brunet12a26c22017-12-21 17:30:54 +0100158 divider->flags, divider->width);
Stephen Boydbca96902015-01-19 18:05:29 -0800159}
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700160
Rajendra Nayak357c3f02012-06-29 19:06:32 +0530161static bool _is_valid_table_div(const struct clk_div_table *table,
162 unsigned int div)
163{
164 const struct clk_div_table *clkt;
165
166 for (clkt = table; clkt->div; clkt++)
167 if (clkt->div == div)
168 return true;
169 return false;
170}
171
Stephen Boydbca96902015-01-19 18:05:29 -0800172static bool _is_valid_div(const struct clk_div_table *table, unsigned int div,
173 unsigned long flags)
Rajendra Nayak357c3f02012-06-29 19:06:32 +0530174{
Stephen Boydbca96902015-01-19 18:05:29 -0800175 if (flags & CLK_DIVIDER_POWER_OF_TWO)
James Hogan1a3cd182013-01-15 10:28:05 +0000176 return is_power_of_2(div);
Stephen Boydbca96902015-01-19 18:05:29 -0800177 if (table)
178 return _is_valid_table_div(table, div);
Rajendra Nayak357c3f02012-06-29 19:06:32 +0530179 return true;
180}
181
Maxime COQUELINdd23c2c2014-01-29 17:24:06 +0100182static int _round_up_table(const struct clk_div_table *table, int div)
183{
184 const struct clk_div_table *clkt;
Maxime COQUELINfe52e752014-05-07 18:48:52 +0200185 int up = INT_MAX;
Maxime COQUELINdd23c2c2014-01-29 17:24:06 +0100186
187 for (clkt = table; clkt->div; clkt++) {
188 if (clkt->div == div)
189 return clkt->div;
190 else if (clkt->div < div)
191 continue;
192
193 if ((clkt->div - div) < (up - div))
194 up = clkt->div;
195 }
196
197 return up;
198}
199
Maxime COQUELIN774b5142014-01-29 17:24:07 +0100200static int _round_down_table(const struct clk_div_table *table, int div)
201{
202 const struct clk_div_table *clkt;
203 int down = _get_table_mindiv(table);
204
205 for (clkt = table; clkt->div; clkt++) {
206 if (clkt->div == div)
207 return clkt->div;
208 else if (clkt->div > div)
209 continue;
210
211 if ((div - clkt->div) < (div - down))
212 down = clkt->div;
213 }
214
215 return down;
216}
217
Stephen Boydbca96902015-01-19 18:05:29 -0800218static int _div_round_up(const struct clk_div_table *table,
219 unsigned long parent_rate, unsigned long rate,
220 unsigned long flags)
Maxime COQUELINdd23c2c2014-01-29 17:24:06 +0100221{
Brian Norris9556f9d2015-04-13 16:03:21 -0700222 int div = DIV_ROUND_UP_ULL((u64)parent_rate, rate);
Maxime COQUELINdd23c2c2014-01-29 17:24:06 +0100223
Stephen Boydbca96902015-01-19 18:05:29 -0800224 if (flags & CLK_DIVIDER_POWER_OF_TWO)
Maxime COQUELINdd23c2c2014-01-29 17:24:06 +0100225 div = __roundup_pow_of_two(div);
Stephen Boydbca96902015-01-19 18:05:29 -0800226 if (table)
227 div = _round_up_table(table, div);
Maxime COQUELINdd23c2c2014-01-29 17:24:06 +0100228
229 return div;
230}
231
Stephen Boydbca96902015-01-19 18:05:29 -0800232static int _div_round_closest(const struct clk_div_table *table,
233 unsigned long parent_rate, unsigned long rate,
234 unsigned long flags)
Maxime COQUELIN774b5142014-01-29 17:24:07 +0100235{
Uwe Kleine-König93155142015-02-21 11:40:25 +0100236 int up, down;
Uwe Kleine-König26bac952015-02-21 11:40:24 +0100237 unsigned long up_rate, down_rate;
Maxime COQUELIN774b5142014-01-29 17:24:07 +0100238
Brian Norris9556f9d2015-04-13 16:03:21 -0700239 up = DIV_ROUND_UP_ULL((u64)parent_rate, rate);
Uwe Kleine-König93155142015-02-21 11:40:25 +0100240 down = parent_rate / rate;
Maxime COQUELIN774b5142014-01-29 17:24:07 +0100241
Stephen Boydbca96902015-01-19 18:05:29 -0800242 if (flags & CLK_DIVIDER_POWER_OF_TWO) {
Uwe Kleine-König93155142015-02-21 11:40:25 +0100243 up = __roundup_pow_of_two(up);
244 down = __rounddown_pow_of_two(down);
Stephen Boydbca96902015-01-19 18:05:29 -0800245 } else if (table) {
Uwe Kleine-König93155142015-02-21 11:40:25 +0100246 up = _round_up_table(table, up);
247 down = _round_down_table(table, down);
Maxime COQUELIN774b5142014-01-29 17:24:07 +0100248 }
249
Brian Norris9556f9d2015-04-13 16:03:21 -0700250 up_rate = DIV_ROUND_UP_ULL((u64)parent_rate, up);
251 down_rate = DIV_ROUND_UP_ULL((u64)parent_rate, down);
Uwe Kleine-König26bac952015-02-21 11:40:24 +0100252
253 return (rate - up_rate) <= (down_rate - rate) ? up : down;
Maxime COQUELIN774b5142014-01-29 17:24:07 +0100254}
255
Stephen Boydbca96902015-01-19 18:05:29 -0800256static int _div_round(const struct clk_div_table *table,
257 unsigned long parent_rate, unsigned long rate,
258 unsigned long flags)
Maxime COQUELIN774b5142014-01-29 17:24:07 +0100259{
Stephen Boydbca96902015-01-19 18:05:29 -0800260 if (flags & CLK_DIVIDER_ROUND_CLOSEST)
261 return _div_round_closest(table, parent_rate, rate, flags);
Maxime COQUELIN774b5142014-01-29 17:24:07 +0100262
Stephen Boydbca96902015-01-19 18:05:29 -0800263 return _div_round_up(table, parent_rate, rate, flags);
Maxime COQUELIN774b5142014-01-29 17:24:07 +0100264}
265
Stephen Boydbca96902015-01-19 18:05:29 -0800266static bool _is_best_div(unsigned long rate, unsigned long now,
267 unsigned long best, unsigned long flags)
Maxime COQUELIN774b5142014-01-29 17:24:07 +0100268{
Stephen Boydbca96902015-01-19 18:05:29 -0800269 if (flags & CLK_DIVIDER_ROUND_CLOSEST)
Maxime COQUELIN774b5142014-01-29 17:24:07 +0100270 return abs(rate - now) < abs(rate - best);
271
272 return now <= rate && now > best;
273}
274
Stephen Boydbca96902015-01-19 18:05:29 -0800275static int _next_div(const struct clk_div_table *table, int div,
276 unsigned long flags)
Maxime COQUELIN0e2de782014-01-29 17:24:08 +0100277{
278 div++;
279
Stephen Boydbca96902015-01-19 18:05:29 -0800280 if (flags & CLK_DIVIDER_POWER_OF_TWO)
Maxime COQUELIN0e2de782014-01-29 17:24:08 +0100281 return __roundup_pow_of_two(div);
Stephen Boydbca96902015-01-19 18:05:29 -0800282 if (table)
283 return _round_up_table(table, div);
Maxime COQUELIN0e2de782014-01-29 17:24:08 +0100284
285 return div;
286}
287
Maxime Ripard22833a92017-05-17 09:40:30 +0200288static int clk_divider_bestdiv(struct clk_hw *hw, struct clk_hw *parent,
289 unsigned long rate,
Stephen Boydbca96902015-01-19 18:05:29 -0800290 unsigned long *best_parent_rate,
291 const struct clk_div_table *table, u8 width,
292 unsigned long flags)
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700293{
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700294 int i, bestdiv = 0;
295 unsigned long parent_rate, best = 0, now, maxdiv;
Shawn Guo081c9022013-06-02 22:20:55 +0800296 unsigned long parent_rate_saved = *best_parent_rate;
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700297
298 if (!rate)
299 rate = 1;
300
Stephen Boydbca96902015-01-19 18:05:29 -0800301 maxdiv = _get_maxdiv(table, width, flags);
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700302
Stephen Boyd98d8a602015-06-29 16:56:30 -0700303 if (!(clk_hw_get_flags(hw) & CLK_SET_RATE_PARENT)) {
Shawn Guo81536e02012-04-12 20:50:17 +0800304 parent_rate = *best_parent_rate;
Stephen Boydbca96902015-01-19 18:05:29 -0800305 bestdiv = _div_round(table, parent_rate, rate, flags);
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700306 bestdiv = bestdiv == 0 ? 1 : bestdiv;
307 bestdiv = bestdiv > maxdiv ? maxdiv : bestdiv;
308 return bestdiv;
309 }
310
311 /*
312 * The maximum divider we can use without overflowing
313 * unsigned long in rate * i below
314 */
315 maxdiv = min(ULONG_MAX / rate, maxdiv);
316
Masahiro Yamada653d1452016-01-05 12:43:41 +0900317 for (i = _next_div(table, 0, flags); i <= maxdiv;
318 i = _next_div(table, i, flags)) {
Shawn Guo081c9022013-06-02 22:20:55 +0800319 if (rate * i == parent_rate_saved) {
320 /*
321 * It's the most ideal case if the requested rate can be
322 * divided from parent clock without needing to change
323 * parent rate, so return the divider immediately.
324 */
325 *best_parent_rate = parent_rate_saved;
326 return i;
327 }
Maxime Ripard22833a92017-05-17 09:40:30 +0200328 parent_rate = clk_hw_round_rate(parent, rate * i);
Brian Norris9556f9d2015-04-13 16:03:21 -0700329 now = DIV_ROUND_UP_ULL((u64)parent_rate, i);
Stephen Boydbca96902015-01-19 18:05:29 -0800330 if (_is_best_div(rate, now, best, flags)) {
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700331 bestdiv = i;
332 best = now;
333 *best_parent_rate = parent_rate;
334 }
335 }
336
337 if (!bestdiv) {
Stephen Boydbca96902015-01-19 18:05:29 -0800338 bestdiv = _get_maxdiv(table, width, flags);
Maxime Ripard22833a92017-05-17 09:40:30 +0200339 *best_parent_rate = clk_hw_round_rate(parent, 1);
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700340 }
341
342 return bestdiv;
343}
344
Maxime Ripard22833a92017-05-17 09:40:30 +0200345long divider_round_rate_parent(struct clk_hw *hw, struct clk_hw *parent,
346 unsigned long rate, unsigned long *prate,
347 const struct clk_div_table *table,
348 u8 width, unsigned long flags)
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700349{
350 int div;
Stephen Boydbca96902015-01-19 18:05:29 -0800351
Maxime Ripard22833a92017-05-17 09:40:30 +0200352 div = clk_divider_bestdiv(hw, parent, rate, prate, table, width, flags);
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700353
Brian Norris9556f9d2015-04-13 16:03:21 -0700354 return DIV_ROUND_UP_ULL((u64)*prate, div);
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700355}
Maxime Ripard22833a92017-05-17 09:40:30 +0200356EXPORT_SYMBOL_GPL(divider_round_rate_parent);
Stephen Boydbca96902015-01-19 18:05:29 -0800357
Jerome Brunetb15ee492018-02-14 14:43:39 +0100358long divider_ro_round_rate_parent(struct clk_hw *hw, struct clk_hw *parent,
359 unsigned long rate, unsigned long *prate,
360 const struct clk_div_table *table, u8 width,
361 unsigned long flags, unsigned int val)
362{
363 int div;
364
365 div = _get_div(table, val, flags, width);
366
367 /* Even a read-only clock can propagate a rate change */
368 if (clk_hw_get_flags(hw) & CLK_SET_RATE_PARENT) {
369 if (!parent)
370 return -EINVAL;
371
372 *prate = clk_hw_round_rate(parent, rate * div);
373 }
374
375 return DIV_ROUND_UP_ULL((u64)*prate, div);
376}
377EXPORT_SYMBOL_GPL(divider_ro_round_rate_parent);
378
379
Stephen Boydbca96902015-01-19 18:05:29 -0800380static long clk_divider_round_rate(struct clk_hw *hw, unsigned long rate,
381 unsigned long *prate)
382{
383 struct clk_divider *divider = to_clk_divider(hw);
Stephen Boydbca96902015-01-19 18:05:29 -0800384
385 /* if read only, just return current value */
386 if (divider->flags & CLK_DIVIDER_READ_ONLY) {
Jerome Brunetb15ee492018-02-14 14:43:39 +0100387 u32 val;
388
Jonas Gorski434d69f2019-04-18 13:12:04 +0200389 val = clk_div_readl(divider) >> divider->shift;
Jerome Brunetb15ee492018-02-14 14:43:39 +0100390 val &= clk_div_mask(divider->width);
391
392 return divider_ro_round_rate(hw, rate, prate, divider->table,
393 divider->width, divider->flags,
394 val);
Stephen Boydbca96902015-01-19 18:05:29 -0800395 }
396
397 return divider_round_rate(hw, rate, prate, divider->table,
398 divider->width, divider->flags);
399}
400
401int divider_get_val(unsigned long rate, unsigned long parent_rate,
402 const struct clk_div_table *table, u8 width,
403 unsigned long flags)
404{
405 unsigned int div, value;
406
Brian Norris9556f9d2015-04-13 16:03:21 -0700407 div = DIV_ROUND_UP_ULL((u64)parent_rate, rate);
Stephen Boydbca96902015-01-19 18:05:29 -0800408
409 if (!_is_valid_div(table, div, flags))
410 return -EINVAL;
411
Jim Quinlanafe76c8f2015-05-15 15:45:47 -0400412 value = _get_val(table, div, flags, width);
Stephen Boydbca96902015-01-19 18:05:29 -0800413
Jerome Brunete6d3cc72018-02-14 14:43:33 +0100414 return min_t(unsigned int, value, clk_div_mask(width));
Stephen Boydbca96902015-01-19 18:05:29 -0800415}
416EXPORT_SYMBOL_GPL(divider_get_val);
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700417
Shawn Guo1c0035d2012-04-12 20:50:18 +0800418static int clk_divider_set_rate(struct clk_hw *hw, unsigned long rate,
419 unsigned long parent_rate)
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700420{
421 struct clk_divider *divider = to_clk_divider(hw);
Alex Frid2316a7a2017-07-25 13:18:40 +0300422 int value;
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700423 unsigned long flags = 0;
424 u32 val;
425
Stephen Boydbca96902015-01-19 18:05:29 -0800426 value = divider_get_val(rate, parent_rate, divider->table,
427 divider->width, divider->flags);
Alex Frid2316a7a2017-07-25 13:18:40 +0300428 if (value < 0)
429 return value;
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700430
431 if (divider->lock)
432 spin_lock_irqsave(divider->lock, flags);
Stephen Boyd661e2182015-07-24 12:21:12 -0700433 else
434 __acquire(divider->lock);
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700435
Haojian Zhuangd57dfe72013-06-08 22:47:18 +0800436 if (divider->flags & CLK_DIVIDER_HIWORD_MASK) {
Jerome Brunete6d3cc72018-02-14 14:43:33 +0100437 val = clk_div_mask(divider->width) << (divider->shift + 16);
Haojian Zhuangd57dfe72013-06-08 22:47:18 +0800438 } else {
Jonas Gorski434d69f2019-04-18 13:12:04 +0200439 val = clk_div_readl(divider);
Jerome Brunete6d3cc72018-02-14 14:43:33 +0100440 val &= ~(clk_div_mask(divider->width) << divider->shift);
Haojian Zhuangd57dfe72013-06-08 22:47:18 +0800441 }
Alex Frid2316a7a2017-07-25 13:18:40 +0300442 val |= (u32)value << divider->shift;
Jonas Gorski434d69f2019-04-18 13:12:04 +0200443 clk_div_writel(divider, val);
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700444
445 if (divider->lock)
446 spin_unlock_irqrestore(divider->lock, flags);
Stephen Boyd661e2182015-07-24 12:21:12 -0700447 else
448 __release(divider->lock);
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700449
450 return 0;
451}
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700452
Shawn Guo822c2502012-03-27 15:23:22 +0800453const struct clk_ops clk_divider_ops = {
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700454 .recalc_rate = clk_divider_recalc_rate,
455 .round_rate = clk_divider_round_rate,
456 .set_rate = clk_divider_set_rate,
457};
458EXPORT_SYMBOL_GPL(clk_divider_ops);
459
Heiko Stuebner50359812016-01-21 21:53:09 +0100460const struct clk_ops clk_divider_ro_ops = {
461 .recalc_rate = clk_divider_recalc_rate,
462 .round_rate = clk_divider_round_rate,
463};
464EXPORT_SYMBOL_GPL(clk_divider_ro_ops);
465
Stephen Boydff258812019-08-30 08:09:23 -0700466struct clk_hw *__clk_hw_register_divider(struct device *dev,
467 struct device_node *np, const char *name,
468 const char *parent_name, const struct clk_hw *parent_hw,
469 const struct clk_parent_data *parent_data, unsigned long flags,
470 void __iomem *reg, u8 shift, u8 width, u8 clk_divider_flags,
471 const struct clk_div_table *table, spinlock_t *lock)
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700472{
473 struct clk_divider *div;
Stephen Boydeb7d2642016-02-06 23:26:37 -0800474 struct clk_hw *hw;
Manivannan Sadhasivamcc819cf2019-11-15 21:58:55 +0530475 struct clk_init_data init = {};
Stephen Boydeb7d2642016-02-06 23:26:37 -0800476 int ret;
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700477
Haojian Zhuangd57dfe72013-06-08 22:47:18 +0800478 if (clk_divider_flags & CLK_DIVIDER_HIWORD_MASK) {
479 if (width + shift > 16) {
480 pr_warn("divider value exceeds LOWORD field\n");
481 return ERR_PTR(-EINVAL);
482 }
483 }
484
Mike Turquette27d54592012-03-26 17:51:03 -0700485 /* allocate the divider */
Stephen Boydd122db72015-05-14 16:47:10 -0700486 div = kzalloc(sizeof(*div), GFP_KERNEL);
487 if (!div)
Mike Turquette27d54592012-03-26 17:51:03 -0700488 return ERR_PTR(-ENOMEM);
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700489
Saravana Kannan0197b3e2012-04-25 22:58:56 -0700490 init.name = name;
Heiko Stuebner50359812016-01-21 21:53:09 +0100491 if (clk_divider_flags & CLK_DIVIDER_READ_ONLY)
492 init.ops = &clk_divider_ro_ops;
493 else
494 init.ops = &clk_divider_ops;
Stephen Boyd90b6c5c2019-04-25 10:57:37 -0700495 init.flags = flags;
Michael Tretter5273b9b2021-01-21 08:16:46 +0100496 init.parent_names = parent_name ? &parent_name : NULL;
497 init.parent_hws = parent_hw ? &parent_hw : NULL;
498 init.parent_data = parent_data;
499 if (parent_name || parent_hw || parent_data)
500 init.num_parents = 1;
501 else
502 init.num_parents = 0;
Saravana Kannan0197b3e2012-04-25 22:58:56 -0700503
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700504 /* struct clk_divider assignments */
505 div->reg = reg;
506 div->shift = shift;
507 div->width = width;
508 div->flags = clk_divider_flags;
509 div->lock = lock;
Saravana Kannan0197b3e2012-04-25 22:58:56 -0700510 div->hw.init = &init;
Rajendra Nayak357c3f02012-06-29 19:06:32 +0530511 div->table = table;
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700512
Mike Turquette27d54592012-03-26 17:51:03 -0700513 /* register the clock */
Stephen Boydeb7d2642016-02-06 23:26:37 -0800514 hw = &div->hw;
515 ret = clk_hw_register(dev, hw);
516 if (ret) {
Mike Turquette27d54592012-03-26 17:51:03 -0700517 kfree(div);
Stephen Boydeb7d2642016-02-06 23:26:37 -0800518 hw = ERR_PTR(ret);
519 }
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700520
Stephen Boydeb7d2642016-02-06 23:26:37 -0800521 return hw;
Mike Turquette9d9f78e2012-03-15 23:11:20 -0700522}
Stephen Boydff258812019-08-30 08:09:23 -0700523EXPORT_SYMBOL_GPL(__clk_hw_register_divider);
Rajendra Nayak357c3f02012-06-29 19:06:32 +0530524
525/**
526 * clk_register_divider_table - register a table based divider clock with
527 * the clock framework
528 * @dev: device registering this clock
529 * @name: name of this clock
530 * @parent_name: name of clock's parent
531 * @flags: framework-specific flags
532 * @reg: register address to adjust divider
533 * @shift: number of bits to shift the bitfield
534 * @width: width of the bitfield
535 * @clk_divider_flags: divider-specific flags for this clock
536 * @table: array of divider/value pairs ending with a div set to 0
537 * @lock: shared register lock for this clock
538 */
539struct clk *clk_register_divider_table(struct device *dev, const char *name,
540 const char *parent_name, unsigned long flags,
541 void __iomem *reg, u8 shift, u8 width,
542 u8 clk_divider_flags, const struct clk_div_table *table,
543 spinlock_t *lock)
544{
Stephen Boydeb7d2642016-02-06 23:26:37 -0800545 struct clk_hw *hw;
546
Stephen Boydff258812019-08-30 08:09:23 -0700547 hw = __clk_hw_register_divider(dev, NULL, name, parent_name, NULL,
548 NULL, flags, reg, shift, width, clk_divider_flags,
549 table, lock);
Stephen Boydeb7d2642016-02-06 23:26:37 -0800550 if (IS_ERR(hw))
551 return ERR_CAST(hw);
552 return hw->clk;
553}
554EXPORT_SYMBOL_GPL(clk_register_divider_table);
555
Krzysztof Kozlowski4e3c0212015-01-05 10:52:40 +0100556void clk_unregister_divider(struct clk *clk)
557{
558 struct clk_divider *div;
559 struct clk_hw *hw;
560
561 hw = __clk_get_hw(clk);
562 if (!hw)
563 return;
564
565 div = to_clk_divider(hw);
566
567 clk_unregister(clk);
568 kfree(div);
569}
570EXPORT_SYMBOL_GPL(clk_unregister_divider);
Stephen Boydeb7d2642016-02-06 23:26:37 -0800571
572/**
573 * clk_hw_unregister_divider - unregister a clk divider
574 * @hw: hardware-specific clock data to unregister
575 */
576void clk_hw_unregister_divider(struct clk_hw *hw)
577{
578 struct clk_divider *div;
579
580 div = to_clk_divider(hw);
581
582 clk_hw_unregister(hw);
583 kfree(div);
584}
585EXPORT_SYMBOL_GPL(clk_hw_unregister_divider);