Thomas Gleixner | 2874c5f | 2019-05-27 08:55:01 +0200 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0-or-later |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 2 | /* |
| 3 | * Copyright (C) 2016 Maxime Ripard |
| 4 | * Maxime Ripard <maxime.ripard@free-electrons.com> |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 5 | */ |
| 6 | |
| 7 | #include <linux/clk-provider.h> |
Stephen Boyd | 62e59c4 | 2019-04-18 15:20:22 -0700 | [diff] [blame] | 8 | #include <linux/io.h> |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 9 | |
| 10 | #include "ccu_gate.h" |
| 11 | #include "ccu_nkmp.h" |
| 12 | |
| 13 | struct _ccu_nkmp { |
Maxime Ripard | 6e0d50d | 2016-09-29 22:57:26 +0200 | [diff] [blame] | 14 | unsigned long n, min_n, max_n; |
| 15 | unsigned long k, min_k, max_k; |
| 16 | unsigned long m, min_m, max_m; |
| 17 | unsigned long p, min_p, max_p; |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 18 | }; |
| 19 | |
Jernej Skrabec | a5ebc33 | 2018-02-14 21:08:56 +0100 | [diff] [blame] | 20 | static unsigned long ccu_nkmp_calc_rate(unsigned long parent, |
| 21 | unsigned long n, unsigned long k, |
| 22 | unsigned long m, unsigned long p) |
| 23 | { |
| 24 | u64 rate = parent; |
| 25 | |
| 26 | rate *= n * k; |
| 27 | do_div(rate, m * p); |
| 28 | |
| 29 | return rate; |
| 30 | } |
| 31 | |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 32 | static void ccu_nkmp_find_best(unsigned long parent, unsigned long rate, |
| 33 | struct _ccu_nkmp *nkmp) |
| 34 | { |
| 35 | unsigned long best_rate = 0; |
| 36 | unsigned long best_n = 0, best_k = 0, best_m = 0, best_p = 0; |
| 37 | unsigned long _n, _k, _m, _p; |
| 38 | |
Maxime Ripard | 6e0d50d | 2016-09-29 22:57:26 +0200 | [diff] [blame] | 39 | for (_k = nkmp->min_k; _k <= nkmp->max_k; _k++) { |
| 40 | for (_n = nkmp->min_n; _n <= nkmp->max_n; _n++) { |
| 41 | for (_m = nkmp->min_m; _m <= nkmp->max_m; _m++) { |
| 42 | for (_p = nkmp->min_p; _p <= nkmp->max_p; _p <<= 1) { |
Maxime Ripard | ee28648 | 2016-09-29 22:53:12 +0200 | [diff] [blame] | 43 | unsigned long tmp_rate; |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 44 | |
Jernej Skrabec | a5ebc33 | 2018-02-14 21:08:56 +0100 | [diff] [blame] | 45 | tmp_rate = ccu_nkmp_calc_rate(parent, |
| 46 | _n, _k, |
| 47 | _m, _p); |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 48 | |
Maxime Ripard | ee28648 | 2016-09-29 22:53:12 +0200 | [diff] [blame] | 49 | if (tmp_rate > rate) |
| 50 | continue; |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 51 | |
Maxime Ripard | ee28648 | 2016-09-29 22:53:12 +0200 | [diff] [blame] | 52 | if ((rate - tmp_rate) < (rate - best_rate)) { |
| 53 | best_rate = tmp_rate; |
| 54 | best_n = _n; |
| 55 | best_k = _k; |
| 56 | best_m = _m; |
| 57 | best_p = _p; |
| 58 | } |
| 59 | } |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 60 | } |
| 61 | } |
| 62 | } |
| 63 | |
| 64 | nkmp->n = best_n; |
| 65 | nkmp->k = best_k; |
| 66 | nkmp->m = best_m; |
| 67 | nkmp->p = best_p; |
| 68 | } |
| 69 | |
| 70 | static void ccu_nkmp_disable(struct clk_hw *hw) |
| 71 | { |
| 72 | struct ccu_nkmp *nkmp = hw_to_ccu_nkmp(hw); |
| 73 | |
| 74 | return ccu_gate_helper_disable(&nkmp->common, nkmp->enable); |
| 75 | } |
| 76 | |
| 77 | static int ccu_nkmp_enable(struct clk_hw *hw) |
| 78 | { |
| 79 | struct ccu_nkmp *nkmp = hw_to_ccu_nkmp(hw); |
| 80 | |
| 81 | return ccu_gate_helper_enable(&nkmp->common, nkmp->enable); |
| 82 | } |
| 83 | |
| 84 | static int ccu_nkmp_is_enabled(struct clk_hw *hw) |
| 85 | { |
| 86 | struct ccu_nkmp *nkmp = hw_to_ccu_nkmp(hw); |
| 87 | |
| 88 | return ccu_gate_helper_is_enabled(&nkmp->common, nkmp->enable); |
| 89 | } |
| 90 | |
| 91 | static unsigned long ccu_nkmp_recalc_rate(struct clk_hw *hw, |
| 92 | unsigned long parent_rate) |
| 93 | { |
| 94 | struct ccu_nkmp *nkmp = hw_to_ccu_nkmp(hw); |
Icenowy Zheng | a910f25 | 2018-03-16 22:02:11 +0800 | [diff] [blame] | 95 | unsigned long n, m, k, p, rate; |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 96 | u32 reg; |
| 97 | |
| 98 | reg = readl(nkmp->common.base + nkmp->common.reg); |
| 99 | |
| 100 | n = reg >> nkmp->n.shift; |
| 101 | n &= (1 << nkmp->n.width) - 1; |
Maxime Ripard | e66f81b | 2016-11-08 18:12:34 +0100 | [diff] [blame] | 102 | n += nkmp->n.offset; |
| 103 | if (!n) |
| 104 | n++; |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 105 | |
| 106 | k = reg >> nkmp->k.shift; |
| 107 | k &= (1 << nkmp->k.width) - 1; |
Maxime Ripard | e66f81b | 2016-11-08 18:12:34 +0100 | [diff] [blame] | 108 | k += nkmp->k.offset; |
| 109 | if (!k) |
| 110 | k++; |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 111 | |
| 112 | m = reg >> nkmp->m.shift; |
| 113 | m &= (1 << nkmp->m.width) - 1; |
Maxime Ripard | e66f81b | 2016-11-08 18:12:34 +0100 | [diff] [blame] | 114 | m += nkmp->m.offset; |
| 115 | if (!m) |
| 116 | m++; |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 117 | |
| 118 | p = reg >> nkmp->p.shift; |
| 119 | p &= (1 << nkmp->p.width) - 1; |
| 120 | |
Icenowy Zheng | a910f25 | 2018-03-16 22:02:11 +0800 | [diff] [blame] | 121 | rate = ccu_nkmp_calc_rate(parent_rate, n, k, m, 1 << p); |
| 122 | if (nkmp->common.features & CCU_FEATURE_FIXED_POSTDIV) |
| 123 | rate /= nkmp->fixed_post_div; |
| 124 | |
| 125 | return rate; |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 126 | } |
| 127 | |
| 128 | static long ccu_nkmp_round_rate(struct clk_hw *hw, unsigned long rate, |
| 129 | unsigned long *parent_rate) |
| 130 | { |
| 131 | struct ccu_nkmp *nkmp = hw_to_ccu_nkmp(hw); |
| 132 | struct _ccu_nkmp _nkmp; |
| 133 | |
Icenowy Zheng | a910f25 | 2018-03-16 22:02:11 +0800 | [diff] [blame] | 134 | if (nkmp->common.features & CCU_FEATURE_FIXED_POSTDIV) |
| 135 | rate *= nkmp->fixed_post_div; |
| 136 | |
Jernej Skrabec | a8e5433 | 2018-08-09 18:52:16 +0200 | [diff] [blame] | 137 | if (nkmp->max_rate && rate > nkmp->max_rate) { |
| 138 | rate = nkmp->max_rate; |
| 139 | if (nkmp->common.features & CCU_FEATURE_FIXED_POSTDIV) |
| 140 | rate /= nkmp->fixed_post_div; |
| 141 | return rate; |
| 142 | } |
| 143 | |
Chen-Yu Tsai | 4162c5c | 2017-03-24 16:33:05 +0800 | [diff] [blame] | 144 | _nkmp.min_n = nkmp->n.min ?: 1; |
Maxime Ripard | 0c3c8e1 | 2016-10-14 12:08:19 +0200 | [diff] [blame] | 145 | _nkmp.max_n = nkmp->n.max ?: 1 << nkmp->n.width; |
Chen-Yu Tsai | 4162c5c | 2017-03-24 16:33:05 +0800 | [diff] [blame] | 146 | _nkmp.min_k = nkmp->k.min ?: 1; |
Maxime Ripard | 0c3c8e1 | 2016-10-14 12:08:19 +0200 | [diff] [blame] | 147 | _nkmp.max_k = nkmp->k.max ?: 1 << nkmp->k.width; |
Maxime Ripard | 6e0d50d | 2016-09-29 22:57:26 +0200 | [diff] [blame] | 148 | _nkmp.min_m = 1; |
Maxime Ripard | 87ba9e5 | 2016-09-06 12:29:04 +0200 | [diff] [blame] | 149 | _nkmp.max_m = nkmp->m.max ?: 1 << nkmp->m.width; |
Maxime Ripard | 6e0d50d | 2016-09-29 22:57:26 +0200 | [diff] [blame] | 150 | _nkmp.min_p = 1; |
Maxime Ripard | 87ba9e5 | 2016-09-06 12:29:04 +0200 | [diff] [blame] | 151 | _nkmp.max_p = nkmp->p.max ?: 1 << ((1 << nkmp->p.width) - 1); |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 152 | |
Maxime Ripard | 87ba9e5 | 2016-09-06 12:29:04 +0200 | [diff] [blame] | 153 | ccu_nkmp_find_best(*parent_rate, rate, &_nkmp); |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 154 | |
Icenowy Zheng | a910f25 | 2018-03-16 22:02:11 +0800 | [diff] [blame] | 155 | rate = ccu_nkmp_calc_rate(*parent_rate, _nkmp.n, _nkmp.k, |
Jernej Skrabec | a5ebc33 | 2018-02-14 21:08:56 +0100 | [diff] [blame] | 156 | _nkmp.m, _nkmp.p); |
Icenowy Zheng | a910f25 | 2018-03-16 22:02:11 +0800 | [diff] [blame] | 157 | if (nkmp->common.features & CCU_FEATURE_FIXED_POSTDIV) |
| 158 | rate = rate / nkmp->fixed_post_div; |
| 159 | |
| 160 | return rate; |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 161 | } |
| 162 | |
| 163 | static int ccu_nkmp_set_rate(struct clk_hw *hw, unsigned long rate, |
| 164 | unsigned long parent_rate) |
| 165 | { |
| 166 | struct ccu_nkmp *nkmp = hw_to_ccu_nkmp(hw); |
Jernej Skrabec | 2abc330 | 2019-04-02 23:06:21 +0200 | [diff] [blame] | 167 | u32 n_mask = 0, k_mask = 0, m_mask = 0, p_mask = 0; |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 168 | struct _ccu_nkmp _nkmp; |
| 169 | unsigned long flags; |
| 170 | u32 reg; |
| 171 | |
Icenowy Zheng | a910f25 | 2018-03-16 22:02:11 +0800 | [diff] [blame] | 172 | if (nkmp->common.features & CCU_FEATURE_FIXED_POSTDIV) |
| 173 | rate = rate * nkmp->fixed_post_div; |
| 174 | |
Chen-Yu Tsai | 95ad8ed | 2017-03-24 16:33:06 +0800 | [diff] [blame] | 175 | _nkmp.min_n = nkmp->n.min ?: 1; |
Maxime Ripard | 0c3c8e1 | 2016-10-14 12:08:19 +0200 | [diff] [blame] | 176 | _nkmp.max_n = nkmp->n.max ?: 1 << nkmp->n.width; |
Chen-Yu Tsai | 95ad8ed | 2017-03-24 16:33:06 +0800 | [diff] [blame] | 177 | _nkmp.min_k = nkmp->k.min ?: 1; |
Maxime Ripard | 0c3c8e1 | 2016-10-14 12:08:19 +0200 | [diff] [blame] | 178 | _nkmp.max_k = nkmp->k.max ?: 1 << nkmp->k.width; |
Maxime Ripard | 6e0d50d | 2016-09-29 22:57:26 +0200 | [diff] [blame] | 179 | _nkmp.min_m = 1; |
Maxime Ripard | 87ba9e5 | 2016-09-06 12:29:04 +0200 | [diff] [blame] | 180 | _nkmp.max_m = nkmp->m.max ?: 1 << nkmp->m.width; |
Maxime Ripard | 6e0d50d | 2016-09-29 22:57:26 +0200 | [diff] [blame] | 181 | _nkmp.min_p = 1; |
Maxime Ripard | 87ba9e5 | 2016-09-06 12:29:04 +0200 | [diff] [blame] | 182 | _nkmp.max_p = nkmp->p.max ?: 1 << ((1 << nkmp->p.width) - 1); |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 183 | |
| 184 | ccu_nkmp_find_best(parent_rate, rate, &_nkmp); |
| 185 | |
Jernej Skrabec | 1054e4d | 2019-04-03 17:14:04 +0200 | [diff] [blame] | 186 | /* |
| 187 | * If width is 0, GENMASK() macro may not generate expected mask (0) |
| 188 | * as it falls under undefined behaviour by C standard due to shifts |
| 189 | * which are equal or greater than width of left operand. This can |
| 190 | * be easily avoided by explicitly checking if width is 0. |
| 191 | */ |
Jernej Skrabec | 2abc330 | 2019-04-02 23:06:21 +0200 | [diff] [blame] | 192 | if (nkmp->n.width) |
| 193 | n_mask = GENMASK(nkmp->n.width + nkmp->n.shift - 1, |
| 194 | nkmp->n.shift); |
| 195 | if (nkmp->k.width) |
| 196 | k_mask = GENMASK(nkmp->k.width + nkmp->k.shift - 1, |
| 197 | nkmp->k.shift); |
| 198 | if (nkmp->m.width) |
| 199 | m_mask = GENMASK(nkmp->m.width + nkmp->m.shift - 1, |
| 200 | nkmp->m.shift); |
| 201 | if (nkmp->p.width) |
| 202 | p_mask = GENMASK(nkmp->p.width + nkmp->p.shift - 1, |
| 203 | nkmp->p.shift); |
Jernej Skrabec | d897ef5 | 2018-02-14 21:08:55 +0100 | [diff] [blame] | 204 | |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 205 | spin_lock_irqsave(nkmp->common.lock, flags); |
| 206 | |
| 207 | reg = readl(nkmp->common.base + nkmp->common.reg); |
Jernej Skrabec | d897ef5 | 2018-02-14 21:08:55 +0100 | [diff] [blame] | 208 | reg &= ~(n_mask | k_mask | m_mask | p_mask); |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 209 | |
Jernej Skrabec | d897ef5 | 2018-02-14 21:08:55 +0100 | [diff] [blame] | 210 | reg |= ((_nkmp.n - nkmp->n.offset) << nkmp->n.shift) & n_mask; |
| 211 | reg |= ((_nkmp.k - nkmp->k.offset) << nkmp->k.shift) & k_mask; |
| 212 | reg |= ((_nkmp.m - nkmp->m.offset) << nkmp->m.shift) & m_mask; |
| 213 | reg |= (ilog2(_nkmp.p) << nkmp->p.shift) & p_mask; |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 214 | |
| 215 | writel(reg, nkmp->common.base + nkmp->common.reg); |
| 216 | |
| 217 | spin_unlock_irqrestore(nkmp->common.lock, flags); |
| 218 | |
| 219 | ccu_helper_wait_for_lock(&nkmp->common, nkmp->lock); |
| 220 | |
| 221 | return 0; |
| 222 | } |
| 223 | |
| 224 | const struct clk_ops ccu_nkmp_ops = { |
| 225 | .disable = ccu_nkmp_disable, |
| 226 | .enable = ccu_nkmp_enable, |
| 227 | .is_enabled = ccu_nkmp_is_enabled, |
| 228 | |
| 229 | .recalc_rate = ccu_nkmp_recalc_rate, |
| 230 | .round_rate = ccu_nkmp_round_rate, |
| 231 | .set_rate = ccu_nkmp_set_rate, |
| 232 | }; |
Samuel Holland | 551b62b | 2021-11-18 21:33:34 -0600 | [diff] [blame] | 233 | EXPORT_SYMBOL_NS_GPL(ccu_nkmp_ops, SUNXI_CCU); |