blob: e6bcc0a7170c826a9b3f68f8d20a601bb38858c5 [file] [log] [blame]
Thomas Gleixner2874c5f2019-05-27 08:55:01 +02001// SPDX-License-Identifier: GPL-2.0-or-later
Maxime Ripard6174a1e2016-06-29 21:05:31 +02002/*
3 * Copyright (C) 2016 Maxime Ripard
4 * Maxime Ripard <maxime.ripard@free-electrons.com>
Maxime Ripard6174a1e2016-06-29 21:05:31 +02005 */
6
7#include <linux/clk-provider.h>
Stephen Boyd62e59c42019-04-18 15:20:22 -07008#include <linux/io.h>
Maxime Ripard6174a1e2016-06-29 21:05:31 +02009
10#include "ccu_frac.h"
11#include "ccu_gate.h"
12#include "ccu_nm.h"
13
Maxime Ripardee286482016-09-29 22:53:12 +020014struct _ccu_nm {
Maxime Ripard6e0d50d2016-09-29 22:57:26 +020015 unsigned long n, min_n, max_n;
16 unsigned long m, min_m, max_m;
Maxime Ripardee286482016-09-29 22:53:12 +020017};
18
Jernej Skrabec65b66572018-11-04 19:26:40 +010019static unsigned long ccu_nm_calc_rate(unsigned long parent,
20 unsigned long n, unsigned long m)
21{
22 u64 rate = parent;
23
24 rate *= n;
25 do_div(rate, m);
26
27 return rate;
28}
29
Maxime Ripardee286482016-09-29 22:53:12 +020030static void ccu_nm_find_best(unsigned long parent, unsigned long rate,
31 struct _ccu_nm *nm)
32{
33 unsigned long best_rate = 0;
34 unsigned long best_n = 0, best_m = 0;
35 unsigned long _n, _m;
36
Maxime Ripard6e0d50d2016-09-29 22:57:26 +020037 for (_n = nm->min_n; _n <= nm->max_n; _n++) {
38 for (_m = nm->min_m; _m <= nm->max_m; _m++) {
Jernej Skrabec65b66572018-11-04 19:26:40 +010039 unsigned long tmp_rate = ccu_nm_calc_rate(parent,
40 _n, _m);
Maxime Ripardee286482016-09-29 22:53:12 +020041
42 if (tmp_rate > rate)
43 continue;
44
45 if ((rate - tmp_rate) < (rate - best_rate)) {
46 best_rate = tmp_rate;
47 best_n = _n;
48 best_m = _m;
49 }
50 }
51 }
52
53 nm->n = best_n;
54 nm->m = best_m;
55}
56
Maxime Ripard6174a1e2016-06-29 21:05:31 +020057static void ccu_nm_disable(struct clk_hw *hw)
58{
59 struct ccu_nm *nm = hw_to_ccu_nm(hw);
60
61 return ccu_gate_helper_disable(&nm->common, nm->enable);
62}
63
64static int ccu_nm_enable(struct clk_hw *hw)
65{
66 struct ccu_nm *nm = hw_to_ccu_nm(hw);
67
68 return ccu_gate_helper_enable(&nm->common, nm->enable);
69}
70
71static int ccu_nm_is_enabled(struct clk_hw *hw)
72{
73 struct ccu_nm *nm = hw_to_ccu_nm(hw);
74
75 return ccu_gate_helper_is_enabled(&nm->common, nm->enable);
76}
77
78static unsigned long ccu_nm_recalc_rate(struct clk_hw *hw,
79 unsigned long parent_rate)
80{
81 struct ccu_nm *nm = hw_to_ccu_nm(hw);
Chen-Yu Tsai7d333ef2017-12-08 16:35:10 +080082 unsigned long rate;
Maxime Ripard6174a1e2016-06-29 21:05:31 +020083 unsigned long n, m;
84 u32 reg;
85
Chen-Yu Tsai7d333ef2017-12-08 16:35:10 +080086 if (ccu_frac_helper_is_enabled(&nm->common, &nm->frac)) {
87 rate = ccu_frac_helper_read_rate(&nm->common, &nm->frac);
88
89 if (nm->common.features & CCU_FEATURE_FIXED_POSTDIV)
90 rate /= nm->fixed_post_div;
91
92 return rate;
93 }
Maxime Ripard6174a1e2016-06-29 21:05:31 +020094
95 reg = readl(nm->common.base + nm->common.reg);
96
97 n = reg >> nm->n.shift;
98 n &= (1 << nm->n.width) - 1;
Maxime Riparde66f81b2016-11-08 18:12:34 +010099 n += nm->n.offset;
100 if (!n)
101 n++;
Maxime Ripard6174a1e2016-06-29 21:05:31 +0200102
103 m = reg >> nm->m.shift;
104 m &= (1 << nm->m.width) - 1;
Maxime Riparde66f81b2016-11-08 18:12:34 +0100105 m += nm->m.offset;
106 if (!m)
107 m++;
Maxime Ripard6174a1e2016-06-29 21:05:31 +0200108
Chen-Yu Tsai7d333ef2017-12-08 16:35:10 +0800109 if (ccu_sdm_helper_is_enabled(&nm->common, &nm->sdm))
110 rate = ccu_sdm_helper_read_rate(&nm->common, &nm->sdm, m, n);
111 else
Jernej Skrabec65b66572018-11-04 19:26:40 +0100112 rate = ccu_nm_calc_rate(parent_rate, n, m);
Chen-Yu Tsai392ba5f2017-10-12 16:37:00 +0800113
Chen-Yu Tsai7d333ef2017-12-08 16:35:10 +0800114 if (nm->common.features & CCU_FEATURE_FIXED_POSTDIV)
115 rate /= nm->fixed_post_div;
116
117 return rate;
Maxime Ripard6174a1e2016-06-29 21:05:31 +0200118}
119
120static long ccu_nm_round_rate(struct clk_hw *hw, unsigned long rate,
121 unsigned long *parent_rate)
122{
123 struct ccu_nm *nm = hw_to_ccu_nm(hw);
Maxime Ripardee286482016-09-29 22:53:12 +0200124 struct _ccu_nm _nm;
Maxime Ripard6174a1e2016-06-29 21:05:31 +0200125
Chen-Yu Tsai7d333ef2017-12-08 16:35:10 +0800126 if (nm->common.features & CCU_FEATURE_FIXED_POSTDIV)
127 rate *= nm->fixed_post_div;
Chen-Yu Tsai4cdbc402017-10-12 16:36:58 +0800128
Jernej Skrabec2d2b61c2018-03-01 22:34:27 +0100129 if (rate < nm->min_rate) {
130 rate = nm->min_rate;
131 if (nm->common.features & CCU_FEATURE_FIXED_POSTDIV)
132 rate /= nm->fixed_post_div;
133 return rate;
134 }
135
Jernej Skrabeccb54fbd2018-08-09 18:52:13 +0200136 if (nm->max_rate && rate > nm->max_rate) {
137 rate = nm->max_rate;
138 if (nm->common.features & CCU_FEATURE_FIXED_POSTDIV)
139 rate /= nm->fixed_post_div;
140 return rate;
141 }
142
Chen-Yu Tsai7d333ef2017-12-08 16:35:10 +0800143 if (ccu_frac_helper_has_rate(&nm->common, &nm->frac, rate)) {
144 if (nm->common.features & CCU_FEATURE_FIXED_POSTDIV)
145 rate /= nm->fixed_post_div;
Chen-Yu Tsai392ba5f2017-10-12 16:37:00 +0800146 return rate;
Chen-Yu Tsai7d333ef2017-12-08 16:35:10 +0800147 }
148
149 if (ccu_sdm_helper_has_rate(&nm->common, &nm->sdm, rate)) {
150 if (nm->common.features & CCU_FEATURE_FIXED_POSTDIV)
151 rate /= nm->fixed_post_div;
152 return rate;
153 }
Chen-Yu Tsai392ba5f2017-10-12 16:37:00 +0800154
Chen-Yu Tsai4162c5c2017-03-24 16:33:05 +0800155 _nm.min_n = nm->n.min ?: 1;
Maxime Ripard0c3c8e12016-10-14 12:08:19 +0200156 _nm.max_n = nm->n.max ?: 1 << nm->n.width;
Maxime Ripard6e0d50d2016-09-29 22:57:26 +0200157 _nm.min_m = 1;
Maxime Ripardee286482016-09-29 22:53:12 +0200158 _nm.max_m = nm->m.max ?: 1 << nm->m.width;
Maxime Ripard87ba9e52016-09-06 12:29:04 +0200159
Maxime Ripardee286482016-09-29 22:53:12 +0200160 ccu_nm_find_best(*parent_rate, rate, &_nm);
Jernej Skrabec65b66572018-11-04 19:26:40 +0100161 rate = ccu_nm_calc_rate(*parent_rate, _nm.n, _nm.m);
Maxime Ripard6174a1e2016-06-29 21:05:31 +0200162
Chen-Yu Tsai7d333ef2017-12-08 16:35:10 +0800163 if (nm->common.features & CCU_FEATURE_FIXED_POSTDIV)
164 rate /= nm->fixed_post_div;
165
166 return rate;
Maxime Ripard6174a1e2016-06-29 21:05:31 +0200167}
168
169static int ccu_nm_set_rate(struct clk_hw *hw, unsigned long rate,
170 unsigned long parent_rate)
171{
172 struct ccu_nm *nm = hw_to_ccu_nm(hw);
Maxime Ripardee286482016-09-29 22:53:12 +0200173 struct _ccu_nm _nm;
Maxime Ripard6174a1e2016-06-29 21:05:31 +0200174 unsigned long flags;
Maxime Ripard6174a1e2016-06-29 21:05:31 +0200175 u32 reg;
176
Chen-Yu Tsai7d333ef2017-12-08 16:35:10 +0800177 /* Adjust target rate according to post-dividers */
178 if (nm->common.features & CCU_FEATURE_FIXED_POSTDIV)
179 rate = rate * nm->fixed_post_div;
180
Jernej Škrabecb64dfec2017-07-30 18:41:47 +0200181 if (ccu_frac_helper_has_rate(&nm->common, &nm->frac, rate)) {
182 spin_lock_irqsave(nm->common.lock, flags);
183
184 /* most SoCs require M to be 0 if fractional mode is used */
185 reg = readl(nm->common.base + nm->common.reg);
186 reg &= ~GENMASK(nm->m.width + nm->m.shift - 1, nm->m.shift);
187 writel(reg, nm->common.base + nm->common.reg);
188
189 spin_unlock_irqrestore(nm->common.lock, flags);
190
191 ccu_frac_helper_enable(&nm->common, &nm->frac);
192
Jernej Škrabec1d424602017-07-30 18:41:50 +0200193 return ccu_frac_helper_set_rate(&nm->common, &nm->frac,
194 rate, nm->lock);
Jernej Škrabecb64dfec2017-07-30 18:41:47 +0200195 } else {
Maxime Ripard6174a1e2016-06-29 21:05:31 +0200196 ccu_frac_helper_disable(&nm->common, &nm->frac);
Jernej Škrabecb64dfec2017-07-30 18:41:47 +0200197 }
Maxime Ripard6174a1e2016-06-29 21:05:31 +0200198
Chen-Yu Tsai95ad8ed2017-03-24 16:33:06 +0800199 _nm.min_n = nm->n.min ?: 1;
Maxime Ripard0c3c8e12016-10-14 12:08:19 +0200200 _nm.max_n = nm->n.max ?: 1 << nm->n.width;
Maxime Ripard6e0d50d2016-09-29 22:57:26 +0200201 _nm.min_m = 1;
Maxime Ripardee286482016-09-29 22:53:12 +0200202 _nm.max_m = nm->m.max ?: 1 << nm->m.width;
Maxime Ripard87ba9e52016-09-06 12:29:04 +0200203
Chen-Yu Tsai392ba5f2017-10-12 16:37:00 +0800204 if (ccu_sdm_helper_has_rate(&nm->common, &nm->sdm, rate)) {
205 ccu_sdm_helper_enable(&nm->common, &nm->sdm, rate);
206
207 /* Sigma delta modulation requires specific N and M factors */
208 ccu_sdm_helper_get_factors(&nm->common, &nm->sdm, rate,
209 &_nm.m, &_nm.n);
210 } else {
211 ccu_sdm_helper_disable(&nm->common, &nm->sdm);
212 ccu_nm_find_best(parent_rate, rate, &_nm);
213 }
Maxime Ripard6174a1e2016-06-29 21:05:31 +0200214
215 spin_lock_irqsave(nm->common.lock, flags);
216
217 reg = readl(nm->common.base + nm->common.reg);
218 reg &= ~GENMASK(nm->n.width + nm->n.shift - 1, nm->n.shift);
219 reg &= ~GENMASK(nm->m.width + nm->m.shift - 1, nm->m.shift);
220
Maxime Riparde66f81b2016-11-08 18:12:34 +0100221 reg |= (_nm.n - nm->n.offset) << nm->n.shift;
222 reg |= (_nm.m - nm->m.offset) << nm->m.shift;
223 writel(reg, nm->common.base + nm->common.reg);
Maxime Ripard6174a1e2016-06-29 21:05:31 +0200224
225 spin_unlock_irqrestore(nm->common.lock, flags);
226
227 ccu_helper_wait_for_lock(&nm->common, nm->lock);
228
229 return 0;
230}
231
232const struct clk_ops ccu_nm_ops = {
233 .disable = ccu_nm_disable,
234 .enable = ccu_nm_enable,
235 .is_enabled = ccu_nm_is_enabled,
236
237 .recalc_rate = ccu_nm_recalc_rate,
238 .round_rate = ccu_nm_round_rate,
239 .set_rate = ccu_nm_set_rate,
240};