Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2016 Maxime Ripard |
| 3 | * Maxime Ripard <maxime.ripard@free-electrons.com> |
| 4 | * |
| 5 | * This program is free software; you can redistribute it and/or |
| 6 | * modify it under the terms of the GNU General Public License as |
| 7 | * published by the Free Software Foundation; either version 2 of |
| 8 | * the License, or (at your option) any later version. |
| 9 | */ |
| 10 | |
| 11 | #include <linux/clk-provider.h> |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 12 | |
| 13 | #include "ccu_gate.h" |
| 14 | #include "ccu_nkmp.h" |
| 15 | |
| 16 | struct _ccu_nkmp { |
Maxime Ripard | 6e0d50d | 2016-09-29 22:57:26 +0200 | [diff] [blame] | 17 | unsigned long n, min_n, max_n; |
| 18 | unsigned long k, min_k, max_k; |
| 19 | unsigned long m, min_m, max_m; |
| 20 | unsigned long p, min_p, max_p; |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 21 | }; |
| 22 | |
Jernej Skrabec | a5ebc33 | 2018-02-14 21:08:56 +0100 | [diff] [blame] | 23 | static unsigned long ccu_nkmp_calc_rate(unsigned long parent, |
| 24 | unsigned long n, unsigned long k, |
| 25 | unsigned long m, unsigned long p) |
| 26 | { |
| 27 | u64 rate = parent; |
| 28 | |
| 29 | rate *= n * k; |
| 30 | do_div(rate, m * p); |
| 31 | |
| 32 | return rate; |
| 33 | } |
| 34 | |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 35 | static void ccu_nkmp_find_best(unsigned long parent, unsigned long rate, |
| 36 | struct _ccu_nkmp *nkmp) |
| 37 | { |
| 38 | unsigned long best_rate = 0; |
| 39 | unsigned long best_n = 0, best_k = 0, best_m = 0, best_p = 0; |
| 40 | unsigned long _n, _k, _m, _p; |
| 41 | |
Maxime Ripard | 6e0d50d | 2016-09-29 22:57:26 +0200 | [diff] [blame] | 42 | for (_k = nkmp->min_k; _k <= nkmp->max_k; _k++) { |
| 43 | for (_n = nkmp->min_n; _n <= nkmp->max_n; _n++) { |
| 44 | for (_m = nkmp->min_m; _m <= nkmp->max_m; _m++) { |
| 45 | for (_p = nkmp->min_p; _p <= nkmp->max_p; _p <<= 1) { |
Maxime Ripard | ee28648 | 2016-09-29 22:53:12 +0200 | [diff] [blame] | 46 | unsigned long tmp_rate; |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 47 | |
Jernej Skrabec | a5ebc33 | 2018-02-14 21:08:56 +0100 | [diff] [blame] | 48 | tmp_rate = ccu_nkmp_calc_rate(parent, |
| 49 | _n, _k, |
| 50 | _m, _p); |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 51 | |
Maxime Ripard | ee28648 | 2016-09-29 22:53:12 +0200 | [diff] [blame] | 52 | if (tmp_rate > rate) |
| 53 | continue; |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 54 | |
Maxime Ripard | ee28648 | 2016-09-29 22:53:12 +0200 | [diff] [blame] | 55 | if ((rate - tmp_rate) < (rate - best_rate)) { |
| 56 | best_rate = tmp_rate; |
| 57 | best_n = _n; |
| 58 | best_k = _k; |
| 59 | best_m = _m; |
| 60 | best_p = _p; |
| 61 | } |
| 62 | } |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 63 | } |
| 64 | } |
| 65 | } |
| 66 | |
| 67 | nkmp->n = best_n; |
| 68 | nkmp->k = best_k; |
| 69 | nkmp->m = best_m; |
| 70 | nkmp->p = best_p; |
| 71 | } |
| 72 | |
| 73 | static void ccu_nkmp_disable(struct clk_hw *hw) |
| 74 | { |
| 75 | struct ccu_nkmp *nkmp = hw_to_ccu_nkmp(hw); |
| 76 | |
| 77 | return ccu_gate_helper_disable(&nkmp->common, nkmp->enable); |
| 78 | } |
| 79 | |
| 80 | static int ccu_nkmp_enable(struct clk_hw *hw) |
| 81 | { |
| 82 | struct ccu_nkmp *nkmp = hw_to_ccu_nkmp(hw); |
| 83 | |
| 84 | return ccu_gate_helper_enable(&nkmp->common, nkmp->enable); |
| 85 | } |
| 86 | |
| 87 | static int ccu_nkmp_is_enabled(struct clk_hw *hw) |
| 88 | { |
| 89 | struct ccu_nkmp *nkmp = hw_to_ccu_nkmp(hw); |
| 90 | |
| 91 | return ccu_gate_helper_is_enabled(&nkmp->common, nkmp->enable); |
| 92 | } |
| 93 | |
| 94 | static unsigned long ccu_nkmp_recalc_rate(struct clk_hw *hw, |
| 95 | unsigned long parent_rate) |
| 96 | { |
| 97 | struct ccu_nkmp *nkmp = hw_to_ccu_nkmp(hw); |
Icenowy Zheng | a910f25 | 2018-03-16 22:02:11 +0800 | [diff] [blame] | 98 | unsigned long n, m, k, p, rate; |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 99 | u32 reg; |
| 100 | |
| 101 | reg = readl(nkmp->common.base + nkmp->common.reg); |
| 102 | |
| 103 | n = reg >> nkmp->n.shift; |
| 104 | n &= (1 << nkmp->n.width) - 1; |
Maxime Ripard | e66f81b | 2016-11-08 18:12:34 +0100 | [diff] [blame] | 105 | n += nkmp->n.offset; |
| 106 | if (!n) |
| 107 | n++; |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 108 | |
| 109 | k = reg >> nkmp->k.shift; |
| 110 | k &= (1 << nkmp->k.width) - 1; |
Maxime Ripard | e66f81b | 2016-11-08 18:12:34 +0100 | [diff] [blame] | 111 | k += nkmp->k.offset; |
| 112 | if (!k) |
| 113 | k++; |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 114 | |
| 115 | m = reg >> nkmp->m.shift; |
| 116 | m &= (1 << nkmp->m.width) - 1; |
Maxime Ripard | e66f81b | 2016-11-08 18:12:34 +0100 | [diff] [blame] | 117 | m += nkmp->m.offset; |
| 118 | if (!m) |
| 119 | m++; |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 120 | |
| 121 | p = reg >> nkmp->p.shift; |
| 122 | p &= (1 << nkmp->p.width) - 1; |
| 123 | |
Icenowy Zheng | a910f25 | 2018-03-16 22:02:11 +0800 | [diff] [blame] | 124 | rate = ccu_nkmp_calc_rate(parent_rate, n, k, m, 1 << p); |
| 125 | if (nkmp->common.features & CCU_FEATURE_FIXED_POSTDIV) |
| 126 | rate /= nkmp->fixed_post_div; |
| 127 | |
| 128 | return rate; |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 129 | } |
| 130 | |
| 131 | static long ccu_nkmp_round_rate(struct clk_hw *hw, unsigned long rate, |
| 132 | unsigned long *parent_rate) |
| 133 | { |
| 134 | struct ccu_nkmp *nkmp = hw_to_ccu_nkmp(hw); |
| 135 | struct _ccu_nkmp _nkmp; |
| 136 | |
Icenowy Zheng | a910f25 | 2018-03-16 22:02:11 +0800 | [diff] [blame] | 137 | if (nkmp->common.features & CCU_FEATURE_FIXED_POSTDIV) |
| 138 | rate *= nkmp->fixed_post_div; |
| 139 | |
Jernej Skrabec | a8e5433 | 2018-08-09 18:52:16 +0200 | [diff] [blame] | 140 | if (nkmp->max_rate && rate > nkmp->max_rate) { |
| 141 | rate = nkmp->max_rate; |
| 142 | if (nkmp->common.features & CCU_FEATURE_FIXED_POSTDIV) |
| 143 | rate /= nkmp->fixed_post_div; |
| 144 | return rate; |
| 145 | } |
| 146 | |
Chen-Yu Tsai | 4162c5c | 2017-03-24 16:33:05 +0800 | [diff] [blame] | 147 | _nkmp.min_n = nkmp->n.min ?: 1; |
Maxime Ripard | 0c3c8e1 | 2016-10-14 12:08:19 +0200 | [diff] [blame] | 148 | _nkmp.max_n = nkmp->n.max ?: 1 << nkmp->n.width; |
Chen-Yu Tsai | 4162c5c | 2017-03-24 16:33:05 +0800 | [diff] [blame] | 149 | _nkmp.min_k = nkmp->k.min ?: 1; |
Maxime Ripard | 0c3c8e1 | 2016-10-14 12:08:19 +0200 | [diff] [blame] | 150 | _nkmp.max_k = nkmp->k.max ?: 1 << nkmp->k.width; |
Maxime Ripard | 6e0d50d | 2016-09-29 22:57:26 +0200 | [diff] [blame] | 151 | _nkmp.min_m = 1; |
Maxime Ripard | 87ba9e5 | 2016-09-06 12:29:04 +0200 | [diff] [blame] | 152 | _nkmp.max_m = nkmp->m.max ?: 1 << nkmp->m.width; |
Maxime Ripard | 6e0d50d | 2016-09-29 22:57:26 +0200 | [diff] [blame] | 153 | _nkmp.min_p = 1; |
Maxime Ripard | 87ba9e5 | 2016-09-06 12:29:04 +0200 | [diff] [blame] | 154 | _nkmp.max_p = nkmp->p.max ?: 1 << ((1 << nkmp->p.width) - 1); |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 155 | |
Maxime Ripard | 87ba9e5 | 2016-09-06 12:29:04 +0200 | [diff] [blame] | 156 | ccu_nkmp_find_best(*parent_rate, rate, &_nkmp); |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 157 | |
Icenowy Zheng | a910f25 | 2018-03-16 22:02:11 +0800 | [diff] [blame] | 158 | rate = ccu_nkmp_calc_rate(*parent_rate, _nkmp.n, _nkmp.k, |
Jernej Skrabec | a5ebc33 | 2018-02-14 21:08:56 +0100 | [diff] [blame] | 159 | _nkmp.m, _nkmp.p); |
Icenowy Zheng | a910f25 | 2018-03-16 22:02:11 +0800 | [diff] [blame] | 160 | if (nkmp->common.features & CCU_FEATURE_FIXED_POSTDIV) |
| 161 | rate = rate / nkmp->fixed_post_div; |
| 162 | |
| 163 | return rate; |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 164 | } |
| 165 | |
| 166 | static int ccu_nkmp_set_rate(struct clk_hw *hw, unsigned long rate, |
| 167 | unsigned long parent_rate) |
| 168 | { |
| 169 | struct ccu_nkmp *nkmp = hw_to_ccu_nkmp(hw); |
Jernej Skrabec | 2abc330 | 2019-04-02 23:06:21 +0200 | [diff] [blame] | 170 | u32 n_mask = 0, k_mask = 0, m_mask = 0, p_mask = 0; |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 171 | struct _ccu_nkmp _nkmp; |
| 172 | unsigned long flags; |
| 173 | u32 reg; |
| 174 | |
Icenowy Zheng | a910f25 | 2018-03-16 22:02:11 +0800 | [diff] [blame] | 175 | if (nkmp->common.features & CCU_FEATURE_FIXED_POSTDIV) |
| 176 | rate = rate * nkmp->fixed_post_div; |
| 177 | |
Chen-Yu Tsai | 95ad8ed | 2017-03-24 16:33:06 +0800 | [diff] [blame] | 178 | _nkmp.min_n = nkmp->n.min ?: 1; |
Maxime Ripard | 0c3c8e1 | 2016-10-14 12:08:19 +0200 | [diff] [blame] | 179 | _nkmp.max_n = nkmp->n.max ?: 1 << nkmp->n.width; |
Chen-Yu Tsai | 95ad8ed | 2017-03-24 16:33:06 +0800 | [diff] [blame] | 180 | _nkmp.min_k = nkmp->k.min ?: 1; |
Maxime Ripard | 0c3c8e1 | 2016-10-14 12:08:19 +0200 | [diff] [blame] | 181 | _nkmp.max_k = nkmp->k.max ?: 1 << nkmp->k.width; |
Maxime Ripard | 6e0d50d | 2016-09-29 22:57:26 +0200 | [diff] [blame] | 182 | _nkmp.min_m = 1; |
Maxime Ripard | 87ba9e5 | 2016-09-06 12:29:04 +0200 | [diff] [blame] | 183 | _nkmp.max_m = nkmp->m.max ?: 1 << nkmp->m.width; |
Maxime Ripard | 6e0d50d | 2016-09-29 22:57:26 +0200 | [diff] [blame] | 184 | _nkmp.min_p = 1; |
Maxime Ripard | 87ba9e5 | 2016-09-06 12:29:04 +0200 | [diff] [blame] | 185 | _nkmp.max_p = nkmp->p.max ?: 1 << ((1 << nkmp->p.width) - 1); |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 186 | |
| 187 | ccu_nkmp_find_best(parent_rate, rate, &_nkmp); |
| 188 | |
Jernej Skrabec | 1054e4d | 2019-04-03 17:14:04 +0200 | [diff] [blame^] | 189 | /* |
| 190 | * If width is 0, GENMASK() macro may not generate expected mask (0) |
| 191 | * as it falls under undefined behaviour by C standard due to shifts |
| 192 | * which are equal or greater than width of left operand. This can |
| 193 | * be easily avoided by explicitly checking if width is 0. |
| 194 | */ |
Jernej Skrabec | 2abc330 | 2019-04-02 23:06:21 +0200 | [diff] [blame] | 195 | if (nkmp->n.width) |
| 196 | n_mask = GENMASK(nkmp->n.width + nkmp->n.shift - 1, |
| 197 | nkmp->n.shift); |
| 198 | if (nkmp->k.width) |
| 199 | k_mask = GENMASK(nkmp->k.width + nkmp->k.shift - 1, |
| 200 | nkmp->k.shift); |
| 201 | if (nkmp->m.width) |
| 202 | m_mask = GENMASK(nkmp->m.width + nkmp->m.shift - 1, |
| 203 | nkmp->m.shift); |
| 204 | if (nkmp->p.width) |
| 205 | p_mask = GENMASK(nkmp->p.width + nkmp->p.shift - 1, |
| 206 | nkmp->p.shift); |
Jernej Skrabec | d897ef5 | 2018-02-14 21:08:55 +0100 | [diff] [blame] | 207 | |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 208 | spin_lock_irqsave(nkmp->common.lock, flags); |
| 209 | |
| 210 | reg = readl(nkmp->common.base + nkmp->common.reg); |
Jernej Skrabec | d897ef5 | 2018-02-14 21:08:55 +0100 | [diff] [blame] | 211 | reg &= ~(n_mask | k_mask | m_mask | p_mask); |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 212 | |
Jernej Skrabec | d897ef5 | 2018-02-14 21:08:55 +0100 | [diff] [blame] | 213 | reg |= ((_nkmp.n - nkmp->n.offset) << nkmp->n.shift) & n_mask; |
| 214 | reg |= ((_nkmp.k - nkmp->k.offset) << nkmp->k.shift) & k_mask; |
| 215 | reg |= ((_nkmp.m - nkmp->m.offset) << nkmp->m.shift) & m_mask; |
| 216 | reg |= (ilog2(_nkmp.p) << nkmp->p.shift) & p_mask; |
Maxime Ripard | 4f728b5 | 2016-06-29 21:05:33 +0200 | [diff] [blame] | 217 | |
| 218 | writel(reg, nkmp->common.base + nkmp->common.reg); |
| 219 | |
| 220 | spin_unlock_irqrestore(nkmp->common.lock, flags); |
| 221 | |
| 222 | ccu_helper_wait_for_lock(&nkmp->common, nkmp->lock); |
| 223 | |
| 224 | return 0; |
| 225 | } |
| 226 | |
| 227 | const struct clk_ops ccu_nkmp_ops = { |
| 228 | .disable = ccu_nkmp_disable, |
| 229 | .enable = ccu_nkmp_enable, |
| 230 | .is_enabled = ccu_nkmp_is_enabled, |
| 231 | |
| 232 | .recalc_rate = ccu_nkmp_recalc_rate, |
| 233 | .round_rate = ccu_nkmp_round_rate, |
| 234 | .set_rate = ccu_nkmp_set_rate, |
| 235 | }; |