blob: 39413cb0985cec3664c82b4a85c15a844a7d8c00 [file] [log] [blame]
Thomas Gleixner2874c5f2019-05-27 08:55:01 +02001// SPDX-License-Identifier: GPL-2.0-or-later
Maxime Ripard4f728b52016-06-29 21:05:33 +02002/*
3 * Copyright (C) 2016 Maxime Ripard
4 * Maxime Ripard <maxime.ripard@free-electrons.com>
Maxime Ripard4f728b52016-06-29 21:05:33 +02005 */
6
7#include <linux/clk-provider.h>
Stephen Boyd62e59c42019-04-18 15:20:22 -07008#include <linux/io.h>
Maxime Ripard4f728b52016-06-29 21:05:33 +02009
10#include "ccu_gate.h"
11#include "ccu_nkmp.h"
12
13struct _ccu_nkmp {
Maxime Ripard6e0d50d2016-09-29 22:57:26 +020014 unsigned long n, min_n, max_n;
15 unsigned long k, min_k, max_k;
16 unsigned long m, min_m, max_m;
17 unsigned long p, min_p, max_p;
Maxime Ripard4f728b52016-06-29 21:05:33 +020018};
19
Jernej Skrabeca5ebc332018-02-14 21:08:56 +010020static unsigned long ccu_nkmp_calc_rate(unsigned long parent,
21 unsigned long n, unsigned long k,
22 unsigned long m, unsigned long p)
23{
24 u64 rate = parent;
25
26 rate *= n * k;
27 do_div(rate, m * p);
28
29 return rate;
30}
31
Maxime Ripard4f728b52016-06-29 21:05:33 +020032static void ccu_nkmp_find_best(unsigned long parent, unsigned long rate,
33 struct _ccu_nkmp *nkmp)
34{
35 unsigned long best_rate = 0;
36 unsigned long best_n = 0, best_k = 0, best_m = 0, best_p = 0;
37 unsigned long _n, _k, _m, _p;
38
Maxime Ripard6e0d50d2016-09-29 22:57:26 +020039 for (_k = nkmp->min_k; _k <= nkmp->max_k; _k++) {
40 for (_n = nkmp->min_n; _n <= nkmp->max_n; _n++) {
41 for (_m = nkmp->min_m; _m <= nkmp->max_m; _m++) {
42 for (_p = nkmp->min_p; _p <= nkmp->max_p; _p <<= 1) {
Maxime Ripardee286482016-09-29 22:53:12 +020043 unsigned long tmp_rate;
Maxime Ripard4f728b52016-06-29 21:05:33 +020044
Jernej Skrabeca5ebc332018-02-14 21:08:56 +010045 tmp_rate = ccu_nkmp_calc_rate(parent,
46 _n, _k,
47 _m, _p);
Maxime Ripard4f728b52016-06-29 21:05:33 +020048
Maxime Ripardee286482016-09-29 22:53:12 +020049 if (tmp_rate > rate)
50 continue;
Maxime Ripard4f728b52016-06-29 21:05:33 +020051
Maxime Ripardee286482016-09-29 22:53:12 +020052 if ((rate - tmp_rate) < (rate - best_rate)) {
53 best_rate = tmp_rate;
54 best_n = _n;
55 best_k = _k;
56 best_m = _m;
57 best_p = _p;
58 }
59 }
Maxime Ripard4f728b52016-06-29 21:05:33 +020060 }
61 }
62 }
63
64 nkmp->n = best_n;
65 nkmp->k = best_k;
66 nkmp->m = best_m;
67 nkmp->p = best_p;
68}
69
70static void ccu_nkmp_disable(struct clk_hw *hw)
71{
72 struct ccu_nkmp *nkmp = hw_to_ccu_nkmp(hw);
73
74 return ccu_gate_helper_disable(&nkmp->common, nkmp->enable);
75}
76
77static int ccu_nkmp_enable(struct clk_hw *hw)
78{
79 struct ccu_nkmp *nkmp = hw_to_ccu_nkmp(hw);
80
81 return ccu_gate_helper_enable(&nkmp->common, nkmp->enable);
82}
83
84static int ccu_nkmp_is_enabled(struct clk_hw *hw)
85{
86 struct ccu_nkmp *nkmp = hw_to_ccu_nkmp(hw);
87
88 return ccu_gate_helper_is_enabled(&nkmp->common, nkmp->enable);
89}
90
91static unsigned long ccu_nkmp_recalc_rate(struct clk_hw *hw,
92 unsigned long parent_rate)
93{
94 struct ccu_nkmp *nkmp = hw_to_ccu_nkmp(hw);
Icenowy Zhenga910f252018-03-16 22:02:11 +080095 unsigned long n, m, k, p, rate;
Maxime Ripard4f728b52016-06-29 21:05:33 +020096 u32 reg;
97
98 reg = readl(nkmp->common.base + nkmp->common.reg);
99
100 n = reg >> nkmp->n.shift;
101 n &= (1 << nkmp->n.width) - 1;
Maxime Riparde66f81b2016-11-08 18:12:34 +0100102 n += nkmp->n.offset;
103 if (!n)
104 n++;
Maxime Ripard4f728b52016-06-29 21:05:33 +0200105
106 k = reg >> nkmp->k.shift;
107 k &= (1 << nkmp->k.width) - 1;
Maxime Riparde66f81b2016-11-08 18:12:34 +0100108 k += nkmp->k.offset;
109 if (!k)
110 k++;
Maxime Ripard4f728b52016-06-29 21:05:33 +0200111
112 m = reg >> nkmp->m.shift;
113 m &= (1 << nkmp->m.width) - 1;
Maxime Riparde66f81b2016-11-08 18:12:34 +0100114 m += nkmp->m.offset;
115 if (!m)
116 m++;
Maxime Ripard4f728b52016-06-29 21:05:33 +0200117
118 p = reg >> nkmp->p.shift;
119 p &= (1 << nkmp->p.width) - 1;
120
Icenowy Zhenga910f252018-03-16 22:02:11 +0800121 rate = ccu_nkmp_calc_rate(parent_rate, n, k, m, 1 << p);
122 if (nkmp->common.features & CCU_FEATURE_FIXED_POSTDIV)
123 rate /= nkmp->fixed_post_div;
124
125 return rate;
Maxime Ripard4f728b52016-06-29 21:05:33 +0200126}
127
128static long ccu_nkmp_round_rate(struct clk_hw *hw, unsigned long rate,
129 unsigned long *parent_rate)
130{
131 struct ccu_nkmp *nkmp = hw_to_ccu_nkmp(hw);
132 struct _ccu_nkmp _nkmp;
133
Icenowy Zhenga910f252018-03-16 22:02:11 +0800134 if (nkmp->common.features & CCU_FEATURE_FIXED_POSTDIV)
135 rate *= nkmp->fixed_post_div;
136
Jernej Skrabeca8e54332018-08-09 18:52:16 +0200137 if (nkmp->max_rate && rate > nkmp->max_rate) {
138 rate = nkmp->max_rate;
139 if (nkmp->common.features & CCU_FEATURE_FIXED_POSTDIV)
140 rate /= nkmp->fixed_post_div;
141 return rate;
142 }
143
Chen-Yu Tsai4162c5c2017-03-24 16:33:05 +0800144 _nkmp.min_n = nkmp->n.min ?: 1;
Maxime Ripard0c3c8e12016-10-14 12:08:19 +0200145 _nkmp.max_n = nkmp->n.max ?: 1 << nkmp->n.width;
Chen-Yu Tsai4162c5c2017-03-24 16:33:05 +0800146 _nkmp.min_k = nkmp->k.min ?: 1;
Maxime Ripard0c3c8e12016-10-14 12:08:19 +0200147 _nkmp.max_k = nkmp->k.max ?: 1 << nkmp->k.width;
Maxime Ripard6e0d50d2016-09-29 22:57:26 +0200148 _nkmp.min_m = 1;
Maxime Ripard87ba9e52016-09-06 12:29:04 +0200149 _nkmp.max_m = nkmp->m.max ?: 1 << nkmp->m.width;
Maxime Ripard6e0d50d2016-09-29 22:57:26 +0200150 _nkmp.min_p = 1;
Maxime Ripard87ba9e52016-09-06 12:29:04 +0200151 _nkmp.max_p = nkmp->p.max ?: 1 << ((1 << nkmp->p.width) - 1);
Maxime Ripard4f728b52016-06-29 21:05:33 +0200152
Maxime Ripard87ba9e52016-09-06 12:29:04 +0200153 ccu_nkmp_find_best(*parent_rate, rate, &_nkmp);
Maxime Ripard4f728b52016-06-29 21:05:33 +0200154
Icenowy Zhenga910f252018-03-16 22:02:11 +0800155 rate = ccu_nkmp_calc_rate(*parent_rate, _nkmp.n, _nkmp.k,
Jernej Skrabeca5ebc332018-02-14 21:08:56 +0100156 _nkmp.m, _nkmp.p);
Icenowy Zhenga910f252018-03-16 22:02:11 +0800157 if (nkmp->common.features & CCU_FEATURE_FIXED_POSTDIV)
158 rate = rate / nkmp->fixed_post_div;
159
160 return rate;
Maxime Ripard4f728b52016-06-29 21:05:33 +0200161}
162
163static int ccu_nkmp_set_rate(struct clk_hw *hw, unsigned long rate,
164 unsigned long parent_rate)
165{
166 struct ccu_nkmp *nkmp = hw_to_ccu_nkmp(hw);
Jernej Skrabec2abc3302019-04-02 23:06:21 +0200167 u32 n_mask = 0, k_mask = 0, m_mask = 0, p_mask = 0;
Maxime Ripard4f728b52016-06-29 21:05:33 +0200168 struct _ccu_nkmp _nkmp;
169 unsigned long flags;
170 u32 reg;
171
Icenowy Zhenga910f252018-03-16 22:02:11 +0800172 if (nkmp->common.features & CCU_FEATURE_FIXED_POSTDIV)
173 rate = rate * nkmp->fixed_post_div;
174
Chen-Yu Tsai95ad8ed2017-03-24 16:33:06 +0800175 _nkmp.min_n = nkmp->n.min ?: 1;
Maxime Ripard0c3c8e12016-10-14 12:08:19 +0200176 _nkmp.max_n = nkmp->n.max ?: 1 << nkmp->n.width;
Chen-Yu Tsai95ad8ed2017-03-24 16:33:06 +0800177 _nkmp.min_k = nkmp->k.min ?: 1;
Maxime Ripard0c3c8e12016-10-14 12:08:19 +0200178 _nkmp.max_k = nkmp->k.max ?: 1 << nkmp->k.width;
Maxime Ripard6e0d50d2016-09-29 22:57:26 +0200179 _nkmp.min_m = 1;
Maxime Ripard87ba9e52016-09-06 12:29:04 +0200180 _nkmp.max_m = nkmp->m.max ?: 1 << nkmp->m.width;
Maxime Ripard6e0d50d2016-09-29 22:57:26 +0200181 _nkmp.min_p = 1;
Maxime Ripard87ba9e52016-09-06 12:29:04 +0200182 _nkmp.max_p = nkmp->p.max ?: 1 << ((1 << nkmp->p.width) - 1);
Maxime Ripard4f728b52016-06-29 21:05:33 +0200183
184 ccu_nkmp_find_best(parent_rate, rate, &_nkmp);
185
Jernej Skrabec1054e4d2019-04-03 17:14:04 +0200186 /*
187 * If width is 0, GENMASK() macro may not generate expected mask (0)
188 * as it falls under undefined behaviour by C standard due to shifts
189 * which are equal or greater than width of left operand. This can
190 * be easily avoided by explicitly checking if width is 0.
191 */
Jernej Skrabec2abc3302019-04-02 23:06:21 +0200192 if (nkmp->n.width)
193 n_mask = GENMASK(nkmp->n.width + nkmp->n.shift - 1,
194 nkmp->n.shift);
195 if (nkmp->k.width)
196 k_mask = GENMASK(nkmp->k.width + nkmp->k.shift - 1,
197 nkmp->k.shift);
198 if (nkmp->m.width)
199 m_mask = GENMASK(nkmp->m.width + nkmp->m.shift - 1,
200 nkmp->m.shift);
201 if (nkmp->p.width)
202 p_mask = GENMASK(nkmp->p.width + nkmp->p.shift - 1,
203 nkmp->p.shift);
Jernej Skrabecd897ef52018-02-14 21:08:55 +0100204
Maxime Ripard4f728b52016-06-29 21:05:33 +0200205 spin_lock_irqsave(nkmp->common.lock, flags);
206
207 reg = readl(nkmp->common.base + nkmp->common.reg);
Jernej Skrabecd897ef52018-02-14 21:08:55 +0100208 reg &= ~(n_mask | k_mask | m_mask | p_mask);
Maxime Ripard4f728b52016-06-29 21:05:33 +0200209
Jernej Skrabecd897ef52018-02-14 21:08:55 +0100210 reg |= ((_nkmp.n - nkmp->n.offset) << nkmp->n.shift) & n_mask;
211 reg |= ((_nkmp.k - nkmp->k.offset) << nkmp->k.shift) & k_mask;
212 reg |= ((_nkmp.m - nkmp->m.offset) << nkmp->m.shift) & m_mask;
213 reg |= (ilog2(_nkmp.p) << nkmp->p.shift) & p_mask;
Maxime Ripard4f728b52016-06-29 21:05:33 +0200214
215 writel(reg, nkmp->common.base + nkmp->common.reg);
216
217 spin_unlock_irqrestore(nkmp->common.lock, flags);
218
219 ccu_helper_wait_for_lock(&nkmp->common, nkmp->lock);
220
221 return 0;
222}
223
224const struct clk_ops ccu_nkmp_ops = {
225 .disable = ccu_nkmp_disable,
226 .enable = ccu_nkmp_enable,
227 .is_enabled = ccu_nkmp_is_enabled,
228
229 .recalc_rate = ccu_nkmp_recalc_rate,
230 .round_rate = ccu_nkmp_round_rate,
231 .set_rate = ccu_nkmp_set_rate,
232};
Samuel Holland551b62b2021-11-18 21:33:34 -0600233EXPORT_SYMBOL_NS_GPL(ccu_nkmp_ops, SUNXI_CCU);