blob: b1e556f20911f408875fbf7decfbb7131d2fba9e [file] [log] [blame]
Stephen Boyde1bd55e2018-12-11 09:57:48 -08001// SPDX-License-Identifier: GPL-2.0
Heikki Krogeruse2d0e902014-05-15 16:40:25 +03002/*
3 * Copyright (C) 2014 Intel Corporation
4 *
Heikki Krogeruse2d0e902014-05-15 16:40:25 +03005 * Adjustable fractional divider clock implementation.
6 * Output rate = (m / n) * parent_rate.
Andy Shevchenko07775912015-09-22 18:54:11 +03007 * Uses rational best approximation algorithm.
Heikki Krogeruse2d0e902014-05-15 16:40:25 +03008 */
9
10#include <linux/clk-provider.h>
Stephen Boyd62e59c42019-04-18 15:20:22 -070011#include <linux/io.h>
Heikki Krogeruse2d0e902014-05-15 16:40:25 +030012#include <linux/module.h>
13#include <linux/device.h>
14#include <linux/slab.h>
Andy Shevchenko07775912015-09-22 18:54:11 +030015#include <linux/rational.h>
Heikki Krogeruse2d0e902014-05-15 16:40:25 +030016
Jonas Gorski58a2b4c2019-04-18 13:12:05 +020017static inline u32 clk_fd_readl(struct clk_fractional_divider *fd)
18{
19 if (fd->flags & CLK_FRAC_DIVIDER_BIG_ENDIAN)
20 return ioread32be(fd->reg);
21
Jonas Gorski5834fd72019-04-18 13:12:11 +020022 return readl(fd->reg);
Jonas Gorski58a2b4c2019-04-18 13:12:05 +020023}
24
25static inline void clk_fd_writel(struct clk_fractional_divider *fd, u32 val)
26{
27 if (fd->flags & CLK_FRAC_DIVIDER_BIG_ENDIAN)
28 iowrite32be(val, fd->reg);
29 else
Jonas Gorski5834fd72019-04-18 13:12:11 +020030 writel(val, fd->reg);
Jonas Gorski58a2b4c2019-04-18 13:12:05 +020031}
32
Heikki Krogeruse2d0e902014-05-15 16:40:25 +030033static unsigned long clk_fd_recalc_rate(struct clk_hw *hw,
34 unsigned long parent_rate)
35{
36 struct clk_fractional_divider *fd = to_clk_fd(hw);
37 unsigned long flags = 0;
Andy Shevchenko07775912015-09-22 18:54:11 +030038 unsigned long m, n;
39 u32 val;
Heikki Krogeruse2d0e902014-05-15 16:40:25 +030040 u64 ret;
41
42 if (fd->lock)
43 spin_lock_irqsave(fd->lock, flags);
Stephen Boyd661e2182015-07-24 12:21:12 -070044 else
45 __acquire(fd->lock);
Heikki Krogeruse2d0e902014-05-15 16:40:25 +030046
Jonas Gorski58a2b4c2019-04-18 13:12:05 +020047 val = clk_fd_readl(fd);
Heikki Krogeruse2d0e902014-05-15 16:40:25 +030048
49 if (fd->lock)
50 spin_unlock_irqrestore(fd->lock, flags);
Stephen Boyd661e2182015-07-24 12:21:12 -070051 else
52 __release(fd->lock);
Heikki Krogeruse2d0e902014-05-15 16:40:25 +030053
54 m = (val & fd->mmask) >> fd->mshift;
55 n = (val & fd->nmask) >> fd->nshift;
56
A.s. Donge983da22018-11-14 13:01:39 +000057 if (fd->flags & CLK_FRAC_DIVIDER_ZERO_BASED) {
58 m++;
59 n++;
60 }
61
Heikki Krogerus6b547832015-02-02 15:37:04 +020062 if (!n || !m)
63 return parent_rate;
64
Heiko Stübnerfeaefa02014-08-28 12:46:10 +020065 ret = (u64)parent_rate * m;
Heikki Krogeruse2d0e902014-05-15 16:40:25 +030066 do_div(ret, n);
67
68 return ret;
69}
70
Elaine Zhangec52e462017-08-01 18:21:22 +020071static void clk_fd_general_approximation(struct clk_hw *hw, unsigned long rate,
72 unsigned long *parent_rate,
73 unsigned long *m, unsigned long *n)
Heikki Krogeruse2d0e902014-05-15 16:40:25 +030074{
75 struct clk_fractional_divider *fd = to_clk_fd(hw);
Andy Shevchenko07775912015-09-22 18:54:11 +030076 unsigned long scale;
Heikki Krogeruse2d0e902014-05-15 16:40:25 +030077
Andy Shevchenko07775912015-09-22 18:54:11 +030078 /*
79 * Get rate closer to *parent_rate to guarantee there is no overflow
80 * for m and n. In the result it will be the nearest rate left shifted
81 * by (scale - fd->nwidth) bits.
82 */
83 scale = fls_long(*parent_rate / rate - 1);
84 if (scale > fd->nwidth)
85 rate <<= scale - fd->nwidth;
Heikki Krogeruse2d0e902014-05-15 16:40:25 +030086
Andy Shevchenko07775912015-09-22 18:54:11 +030087 rational_best_approximation(rate, *parent_rate,
88 GENMASK(fd->mwidth - 1, 0), GENMASK(fd->nwidth - 1, 0),
Elaine Zhangec52e462017-08-01 18:21:22 +020089 m, n);
90}
91
92static long clk_fd_round_rate(struct clk_hw *hw, unsigned long rate,
93 unsigned long *parent_rate)
94{
95 struct clk_fractional_divider *fd = to_clk_fd(hw);
96 unsigned long m, n;
97 u64 ret;
98
Katsuhiro Suzukid13501a2019-02-11 00:38:06 +090099 if (!rate || (!clk_hw_can_set_rate_parent(hw) && rate >= *parent_rate))
Elaine Zhangec52e462017-08-01 18:21:22 +0200100 return *parent_rate;
101
102 if (fd->approximation)
103 fd->approximation(hw, rate, parent_rate, &m, &n);
104 else
105 clk_fd_general_approximation(hw, rate, parent_rate, &m, &n);
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300106
Andy Shevchenko07775912015-09-22 18:54:11 +0300107 ret = (u64)*parent_rate * m;
108 do_div(ret, n);
109
110 return ret;
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300111}
112
113static int clk_fd_set_rate(struct clk_hw *hw, unsigned long rate,
114 unsigned long parent_rate)
115{
116 struct clk_fractional_divider *fd = to_clk_fd(hw);
117 unsigned long flags = 0;
Andy Shevchenko07775912015-09-22 18:54:11 +0300118 unsigned long m, n;
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300119 u32 val;
120
Andy Shevchenko07775912015-09-22 18:54:11 +0300121 rational_best_approximation(rate, parent_rate,
122 GENMASK(fd->mwidth - 1, 0), GENMASK(fd->nwidth - 1, 0),
123 &m, &n);
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300124
A.s. Donge983da22018-11-14 13:01:39 +0000125 if (fd->flags & CLK_FRAC_DIVIDER_ZERO_BASED) {
126 m--;
127 n--;
128 }
129
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300130 if (fd->lock)
131 spin_lock_irqsave(fd->lock, flags);
Stephen Boyd661e2182015-07-24 12:21:12 -0700132 else
133 __acquire(fd->lock);
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300134
Jonas Gorski58a2b4c2019-04-18 13:12:05 +0200135 val = clk_fd_readl(fd);
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300136 val &= ~(fd->mmask | fd->nmask);
137 val |= (m << fd->mshift) | (n << fd->nshift);
Jonas Gorski58a2b4c2019-04-18 13:12:05 +0200138 clk_fd_writel(fd, val);
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300139
140 if (fd->lock)
141 spin_unlock_irqrestore(fd->lock, flags);
Stephen Boyd661e2182015-07-24 12:21:12 -0700142 else
143 __release(fd->lock);
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300144
145 return 0;
146}
147
148const struct clk_ops clk_fractional_divider_ops = {
149 .recalc_rate = clk_fd_recalc_rate,
150 .round_rate = clk_fd_round_rate,
151 .set_rate = clk_fd_set_rate,
152};
153EXPORT_SYMBOL_GPL(clk_fractional_divider_ops);
154
Stephen Boyd39b44cf2016-02-07 00:15:09 -0800155struct clk_hw *clk_hw_register_fractional_divider(struct device *dev,
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300156 const char *name, const char *parent_name, unsigned long flags,
157 void __iomem *reg, u8 mshift, u8 mwidth, u8 nshift, u8 nwidth,
158 u8 clk_divider_flags, spinlock_t *lock)
159{
160 struct clk_fractional_divider *fd;
161 struct clk_init_data init;
Stephen Boyd39b44cf2016-02-07 00:15:09 -0800162 struct clk_hw *hw;
163 int ret;
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300164
165 fd = kzalloc(sizeof(*fd), GFP_KERNEL);
Stephen Boydd122db72015-05-14 16:47:10 -0700166 if (!fd)
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300167 return ERR_PTR(-ENOMEM);
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300168
169 init.name = name;
170 init.ops = &clk_fractional_divider_ops;
Stephen Boyd90b6c5c2019-04-25 10:57:37 -0700171 init.flags = flags;
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300172 init.parent_names = parent_name ? &parent_name : NULL;
173 init.num_parents = parent_name ? 1 : 0;
174
175 fd->reg = reg;
176 fd->mshift = mshift;
Andy Shevchenko934e2532015-09-22 18:54:09 +0300177 fd->mwidth = mwidth;
178 fd->mmask = GENMASK(mwidth - 1, 0) << mshift;
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300179 fd->nshift = nshift;
Andy Shevchenko934e2532015-09-22 18:54:09 +0300180 fd->nwidth = nwidth;
181 fd->nmask = GENMASK(nwidth - 1, 0) << nshift;
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300182 fd->flags = clk_divider_flags;
183 fd->lock = lock;
184 fd->hw.init = &init;
185
Stephen Boyd39b44cf2016-02-07 00:15:09 -0800186 hw = &fd->hw;
187 ret = clk_hw_register(dev, hw);
188 if (ret) {
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300189 kfree(fd);
Stephen Boyd39b44cf2016-02-07 00:15:09 -0800190 hw = ERR_PTR(ret);
191 }
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300192
Stephen Boyd39b44cf2016-02-07 00:15:09 -0800193 return hw;
194}
195EXPORT_SYMBOL_GPL(clk_hw_register_fractional_divider);
196
197struct clk *clk_register_fractional_divider(struct device *dev,
198 const char *name, const char *parent_name, unsigned long flags,
199 void __iomem *reg, u8 mshift, u8 mwidth, u8 nshift, u8 nwidth,
200 u8 clk_divider_flags, spinlock_t *lock)
201{
202 struct clk_hw *hw;
203
204 hw = clk_hw_register_fractional_divider(dev, name, parent_name, flags,
205 reg, mshift, mwidth, nshift, nwidth, clk_divider_flags,
206 lock);
207 if (IS_ERR(hw))
208 return ERR_CAST(hw);
209 return hw->clk;
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300210}
211EXPORT_SYMBOL_GPL(clk_register_fractional_divider);
Stephen Boyd39b44cf2016-02-07 00:15:09 -0800212
213void clk_hw_unregister_fractional_divider(struct clk_hw *hw)
214{
215 struct clk_fractional_divider *fd;
216
217 fd = to_clk_fd(hw);
218
219 clk_hw_unregister(hw);
220 kfree(fd);
221}