Thomas Gleixner | fcaf203 | 2019-05-27 08:55:08 +0200 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0-or-later |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 2 | /* |
| 3 | * Copyright 2012 Freescale Semiconductor, Inc. |
| 4 | * Copyright 2012 Linaro Ltd. |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 5 | */ |
| 6 | |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 7 | #include <linux/clk-provider.h> |
Shawn Guo | 322503a | 2013-10-30 15:12:55 +0800 | [diff] [blame] | 8 | #include <linux/delay.h> |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 9 | #include <linux/io.h> |
Anson Huang | 9558b51 | 2020-03-20 07:44:03 +0800 | [diff] [blame] | 10 | #include <linux/iopoll.h> |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 11 | #include <linux/slab.h> |
| 12 | #include <linux/jiffies.h> |
| 13 | #include <linux/err.h> |
| 14 | #include "clk.h" |
| 15 | |
| 16 | #define PLL_NUM_OFFSET 0x10 |
| 17 | #define PLL_DENOM_OFFSET 0x20 |
Anson Huang | b4a4cb5 | 2019-04-22 08:32:45 +0000 | [diff] [blame] | 18 | #define PLL_IMX7_NUM_OFFSET 0x20 |
| 19 | #define PLL_IMX7_DENOM_OFFSET 0x30 |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 20 | |
Nikita Yushchenko | c77cbdd1 | 2016-12-19 11:12:09 +0300 | [diff] [blame] | 21 | #define PLL_VF610_NUM_OFFSET 0x20 |
| 22 | #define PLL_VF610_DENOM_OFFSET 0x30 |
| 23 | |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 24 | #define BM_PLL_POWER (0x1 << 12) |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 25 | #define BM_PLL_LOCK (0x1 << 31) |
Frank Li | f539474 | 2015-05-19 02:45:02 +0800 | [diff] [blame] | 26 | #define IMX7_ENET_PLL_POWER (0x1 << 5) |
Fabio Estevam | ad14972 | 2017-05-15 08:55:05 -0300 | [diff] [blame] | 27 | #define IMX7_DDR_PLL_POWER (0x1 << 20) |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 28 | |
Anson Huang | 9558b51 | 2020-03-20 07:44:03 +0800 | [diff] [blame] | 29 | #define PLL_LOCK_TIMEOUT 10000 |
| 30 | |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 31 | /** |
| 32 | * struct clk_pllv3 - IMX PLL clock version 3 |
Krzysztof Kozlowski | cca87e5 | 2020-09-02 17:02:44 +0200 | [diff] [blame] | 33 | * @hw: clock source |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 34 | * @base: base address of PLL registers |
Dong Aisheng | c684766 | 2016-06-13 20:24:52 +0800 | [diff] [blame] | 35 | * @power_bit: pll power bit mask |
| 36 | * @powerup_set: set power_bit to power up the PLL |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 37 | * @div_mask: mask of divider bits |
Stefan Agner | 60ad846 | 2014-12-02 17:59:42 +0100 | [diff] [blame] | 38 | * @div_shift: shift of divider bits |
Krzysztof Kozlowski | cca87e5 | 2020-09-02 17:02:44 +0200 | [diff] [blame] | 39 | * @ref_clock: reference clock rate |
| 40 | * @num_offset: num register offset |
| 41 | * @denom_offset: denom register offset |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 42 | * |
| 43 | * IMX PLL clock version 3, found on i.MX6 series. Divider for pllv3 |
| 44 | * is actually a multiplier, and always sits at bit 0. |
| 45 | */ |
| 46 | struct clk_pllv3 { |
| 47 | struct clk_hw hw; |
| 48 | void __iomem *base; |
Dong Aisheng | c684766 | 2016-06-13 20:24:52 +0800 | [diff] [blame] | 49 | u32 power_bit; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 50 | bool powerup_set; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 51 | u32 div_mask; |
Stefan Agner | 60ad846 | 2014-12-02 17:59:42 +0100 | [diff] [blame] | 52 | u32 div_shift; |
Stefan Agner | 585a60f | 2016-01-29 14:49:24 -0800 | [diff] [blame] | 53 | unsigned long ref_clock; |
Anson Huang | b4a4cb5 | 2019-04-22 08:32:45 +0000 | [diff] [blame] | 54 | u32 num_offset; |
| 55 | u32 denom_offset; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 56 | }; |
| 57 | |
| 58 | #define to_clk_pllv3(_hw) container_of(_hw, struct clk_pllv3, hw) |
| 59 | |
Shawn Guo | bc3b84d | 2013-10-30 15:56:22 +0800 | [diff] [blame] | 60 | static int clk_pllv3_wait_lock(struct clk_pllv3 *pll) |
| 61 | { |
Dong Aisheng | c684766 | 2016-06-13 20:24:52 +0800 | [diff] [blame] | 62 | u32 val = readl_relaxed(pll->base) & pll->power_bit; |
Shawn Guo | bc3b84d | 2013-10-30 15:56:22 +0800 | [diff] [blame] | 63 | |
| 64 | /* No need to wait for lock when pll is not powered up */ |
| 65 | if ((pll->powerup_set && !val) || (!pll->powerup_set && val)) |
| 66 | return 0; |
| 67 | |
Anson Huang | 9558b51 | 2020-03-20 07:44:03 +0800 | [diff] [blame] | 68 | return readl_relaxed_poll_timeout(pll->base, val, val & BM_PLL_LOCK, |
| 69 | 500, PLL_LOCK_TIMEOUT); |
Shawn Guo | bc3b84d | 2013-10-30 15:56:22 +0800 | [diff] [blame] | 70 | } |
| 71 | |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 72 | static int clk_pllv3_prepare(struct clk_hw *hw) |
| 73 | { |
| 74 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 75 | u32 val; |
| 76 | |
| 77 | val = readl_relaxed(pll->base); |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 78 | if (pll->powerup_set) |
Dong Aisheng | c684766 | 2016-06-13 20:24:52 +0800 | [diff] [blame] | 79 | val |= pll->power_bit; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 80 | else |
Dong Aisheng | c684766 | 2016-06-13 20:24:52 +0800 | [diff] [blame] | 81 | val &= ~pll->power_bit; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 82 | writel_relaxed(val, pll->base); |
| 83 | |
Dmitry Voytik | c400f7a | 2014-11-06 22:49:32 +0400 | [diff] [blame] | 84 | return clk_pllv3_wait_lock(pll); |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 85 | } |
| 86 | |
| 87 | static void clk_pllv3_unprepare(struct clk_hw *hw) |
| 88 | { |
| 89 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
| 90 | u32 val; |
| 91 | |
| 92 | val = readl_relaxed(pll->base); |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 93 | if (pll->powerup_set) |
Dong Aisheng | c684766 | 2016-06-13 20:24:52 +0800 | [diff] [blame] | 94 | val &= ~pll->power_bit; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 95 | else |
Dong Aisheng | c684766 | 2016-06-13 20:24:52 +0800 | [diff] [blame] | 96 | val |= pll->power_bit; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 97 | writel_relaxed(val, pll->base); |
| 98 | } |
| 99 | |
Bai Ping | 4824b61 | 2015-11-25 00:06:53 +0800 | [diff] [blame] | 100 | static int clk_pllv3_is_prepared(struct clk_hw *hw) |
| 101 | { |
| 102 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
| 103 | |
| 104 | if (readl_relaxed(pll->base) & BM_PLL_LOCK) |
| 105 | return 1; |
| 106 | |
| 107 | return 0; |
| 108 | } |
| 109 | |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 110 | static unsigned long clk_pllv3_recalc_rate(struct clk_hw *hw, |
| 111 | unsigned long parent_rate) |
| 112 | { |
| 113 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
Stefan Agner | 60ad846 | 2014-12-02 17:59:42 +0100 | [diff] [blame] | 114 | u32 div = (readl_relaxed(pll->base) >> pll->div_shift) & pll->div_mask; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 115 | |
| 116 | return (div == 1) ? parent_rate * 22 : parent_rate * 20; |
| 117 | } |
| 118 | |
| 119 | static long clk_pllv3_round_rate(struct clk_hw *hw, unsigned long rate, |
| 120 | unsigned long *prate) |
| 121 | { |
| 122 | unsigned long parent_rate = *prate; |
| 123 | |
| 124 | return (rate >= parent_rate * 22) ? parent_rate * 22 : |
| 125 | parent_rate * 20; |
| 126 | } |
| 127 | |
| 128 | static int clk_pllv3_set_rate(struct clk_hw *hw, unsigned long rate, |
| 129 | unsigned long parent_rate) |
| 130 | { |
| 131 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
| 132 | u32 val, div; |
| 133 | |
| 134 | if (rate == parent_rate * 22) |
| 135 | div = 1; |
| 136 | else if (rate == parent_rate * 20) |
| 137 | div = 0; |
| 138 | else |
| 139 | return -EINVAL; |
| 140 | |
| 141 | val = readl_relaxed(pll->base); |
Stefan Agner | 60ad846 | 2014-12-02 17:59:42 +0100 | [diff] [blame] | 142 | val &= ~(pll->div_mask << pll->div_shift); |
| 143 | val |= (div << pll->div_shift); |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 144 | writel_relaxed(val, pll->base); |
| 145 | |
Shawn Guo | bc3b84d | 2013-10-30 15:56:22 +0800 | [diff] [blame] | 146 | return clk_pllv3_wait_lock(pll); |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 147 | } |
| 148 | |
| 149 | static const struct clk_ops clk_pllv3_ops = { |
| 150 | .prepare = clk_pllv3_prepare, |
| 151 | .unprepare = clk_pllv3_unprepare, |
Bai Ping | 4824b61 | 2015-11-25 00:06:53 +0800 | [diff] [blame] | 152 | .is_prepared = clk_pllv3_is_prepared, |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 153 | .recalc_rate = clk_pllv3_recalc_rate, |
| 154 | .round_rate = clk_pllv3_round_rate, |
| 155 | .set_rate = clk_pllv3_set_rate, |
| 156 | }; |
| 157 | |
| 158 | static unsigned long clk_pllv3_sys_recalc_rate(struct clk_hw *hw, |
| 159 | unsigned long parent_rate) |
| 160 | { |
| 161 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
| 162 | u32 div = readl_relaxed(pll->base) & pll->div_mask; |
| 163 | |
| 164 | return parent_rate * div / 2; |
| 165 | } |
| 166 | |
| 167 | static long clk_pllv3_sys_round_rate(struct clk_hw *hw, unsigned long rate, |
| 168 | unsigned long *prate) |
| 169 | { |
| 170 | unsigned long parent_rate = *prate; |
| 171 | unsigned long min_rate = parent_rate * 54 / 2; |
| 172 | unsigned long max_rate = parent_rate * 108 / 2; |
| 173 | u32 div; |
| 174 | |
| 175 | if (rate > max_rate) |
| 176 | rate = max_rate; |
| 177 | else if (rate < min_rate) |
| 178 | rate = min_rate; |
| 179 | div = rate * 2 / parent_rate; |
| 180 | |
| 181 | return parent_rate * div / 2; |
| 182 | } |
| 183 | |
| 184 | static int clk_pllv3_sys_set_rate(struct clk_hw *hw, unsigned long rate, |
| 185 | unsigned long parent_rate) |
| 186 | { |
| 187 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
| 188 | unsigned long min_rate = parent_rate * 54 / 2; |
| 189 | unsigned long max_rate = parent_rate * 108 / 2; |
| 190 | u32 val, div; |
| 191 | |
| 192 | if (rate < min_rate || rate > max_rate) |
| 193 | return -EINVAL; |
| 194 | |
| 195 | div = rate * 2 / parent_rate; |
| 196 | val = readl_relaxed(pll->base); |
| 197 | val &= ~pll->div_mask; |
| 198 | val |= div; |
| 199 | writel_relaxed(val, pll->base); |
| 200 | |
Shawn Guo | bc3b84d | 2013-10-30 15:56:22 +0800 | [diff] [blame] | 201 | return clk_pllv3_wait_lock(pll); |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 202 | } |
| 203 | |
| 204 | static const struct clk_ops clk_pllv3_sys_ops = { |
| 205 | .prepare = clk_pllv3_prepare, |
| 206 | .unprepare = clk_pllv3_unprepare, |
Bai Ping | 4824b61 | 2015-11-25 00:06:53 +0800 | [diff] [blame] | 207 | .is_prepared = clk_pllv3_is_prepared, |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 208 | .recalc_rate = clk_pllv3_sys_recalc_rate, |
| 209 | .round_rate = clk_pllv3_sys_round_rate, |
| 210 | .set_rate = clk_pllv3_sys_set_rate, |
| 211 | }; |
| 212 | |
| 213 | static unsigned long clk_pllv3_av_recalc_rate(struct clk_hw *hw, |
| 214 | unsigned long parent_rate) |
| 215 | { |
| 216 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
Anson Huang | b4a4cb5 | 2019-04-22 08:32:45 +0000 | [diff] [blame] | 217 | u32 mfn = readl_relaxed(pll->base + pll->num_offset); |
| 218 | u32 mfd = readl_relaxed(pll->base + pll->denom_offset); |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 219 | u32 div = readl_relaxed(pll->base) & pll->div_mask; |
Anson Huang | ba7f4f5 | 2016-06-08 22:33:31 +0800 | [diff] [blame] | 220 | u64 temp64 = (u64)parent_rate; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 221 | |
Anson Huang | ba7f4f5 | 2016-06-08 22:33:31 +0800 | [diff] [blame] | 222 | temp64 *= mfn; |
| 223 | do_div(temp64, mfd); |
| 224 | |
Emil Lundmark | 5c2f117 | 2016-10-12 12:31:40 +0200 | [diff] [blame] | 225 | return parent_rate * div + (unsigned long)temp64; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 226 | } |
| 227 | |
| 228 | static long clk_pllv3_av_round_rate(struct clk_hw *hw, unsigned long rate, |
| 229 | unsigned long *prate) |
| 230 | { |
| 231 | unsigned long parent_rate = *prate; |
| 232 | unsigned long min_rate = parent_rate * 27; |
| 233 | unsigned long max_rate = parent_rate * 54; |
| 234 | u32 div; |
| 235 | u32 mfn, mfd = 1000000; |
Emil Lundmark | c5a8045 | 2016-10-12 12:31:41 +0200 | [diff] [blame] | 236 | u32 max_mfd = 0x3FFFFFFF; |
Anson Huang | 7a5568c | 2015-05-08 00:16:51 +0800 | [diff] [blame] | 237 | u64 temp64; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 238 | |
| 239 | if (rate > max_rate) |
| 240 | rate = max_rate; |
| 241 | else if (rate < min_rate) |
| 242 | rate = min_rate; |
| 243 | |
Emil Lundmark | c5a8045 | 2016-10-12 12:31:41 +0200 | [diff] [blame] | 244 | if (parent_rate <= max_mfd) |
| 245 | mfd = parent_rate; |
| 246 | |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 247 | div = rate / parent_rate; |
| 248 | temp64 = (u64) (rate - div * parent_rate); |
| 249 | temp64 *= mfd; |
| 250 | do_div(temp64, parent_rate); |
| 251 | mfn = temp64; |
| 252 | |
Emil Lundmark | 5c2f117 | 2016-10-12 12:31:40 +0200 | [diff] [blame] | 253 | temp64 = (u64)parent_rate; |
| 254 | temp64 *= mfn; |
| 255 | do_div(temp64, mfd); |
| 256 | |
| 257 | return parent_rate * div + (unsigned long)temp64; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 258 | } |
| 259 | |
| 260 | static int clk_pllv3_av_set_rate(struct clk_hw *hw, unsigned long rate, |
| 261 | unsigned long parent_rate) |
| 262 | { |
| 263 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
| 264 | unsigned long min_rate = parent_rate * 27; |
| 265 | unsigned long max_rate = parent_rate * 54; |
| 266 | u32 val, div; |
| 267 | u32 mfn, mfd = 1000000; |
Emil Lundmark | c5a8045 | 2016-10-12 12:31:41 +0200 | [diff] [blame] | 268 | u32 max_mfd = 0x3FFFFFFF; |
Anson Huang | 7a5568c | 2015-05-08 00:16:51 +0800 | [diff] [blame] | 269 | u64 temp64; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 270 | |
| 271 | if (rate < min_rate || rate > max_rate) |
| 272 | return -EINVAL; |
| 273 | |
Emil Lundmark | c5a8045 | 2016-10-12 12:31:41 +0200 | [diff] [blame] | 274 | if (parent_rate <= max_mfd) |
| 275 | mfd = parent_rate; |
| 276 | |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 277 | div = rate / parent_rate; |
| 278 | temp64 = (u64) (rate - div * parent_rate); |
| 279 | temp64 *= mfd; |
| 280 | do_div(temp64, parent_rate); |
| 281 | mfn = temp64; |
| 282 | |
| 283 | val = readl_relaxed(pll->base); |
| 284 | val &= ~pll->div_mask; |
| 285 | val |= div; |
| 286 | writel_relaxed(val, pll->base); |
Anson Huang | b4a4cb5 | 2019-04-22 08:32:45 +0000 | [diff] [blame] | 287 | writel_relaxed(mfn, pll->base + pll->num_offset); |
| 288 | writel_relaxed(mfd, pll->base + pll->denom_offset); |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 289 | |
Shawn Guo | bc3b84d | 2013-10-30 15:56:22 +0800 | [diff] [blame] | 290 | return clk_pllv3_wait_lock(pll); |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 291 | } |
| 292 | |
| 293 | static const struct clk_ops clk_pllv3_av_ops = { |
| 294 | .prepare = clk_pllv3_prepare, |
| 295 | .unprepare = clk_pllv3_unprepare, |
Bai Ping | 4824b61 | 2015-11-25 00:06:53 +0800 | [diff] [blame] | 296 | .is_prepared = clk_pllv3_is_prepared, |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 297 | .recalc_rate = clk_pllv3_av_recalc_rate, |
| 298 | .round_rate = clk_pllv3_av_round_rate, |
| 299 | .set_rate = clk_pllv3_av_set_rate, |
| 300 | }; |
| 301 | |
Nikita Yushchenko | c77cbdd1 | 2016-12-19 11:12:09 +0300 | [diff] [blame] | 302 | struct clk_pllv3_vf610_mf { |
| 303 | u32 mfi; /* integer part, can be 20 or 22 */ |
| 304 | u32 mfn; /* numerator, 30-bit value */ |
| 305 | u32 mfd; /* denominator, 30-bit value, must be less than mfn */ |
| 306 | }; |
| 307 | |
| 308 | static unsigned long clk_pllv3_vf610_mf_to_rate(unsigned long parent_rate, |
| 309 | struct clk_pllv3_vf610_mf mf) |
| 310 | { |
| 311 | u64 temp64; |
| 312 | |
| 313 | temp64 = parent_rate; |
| 314 | temp64 *= mf.mfn; |
| 315 | do_div(temp64, mf.mfd); |
| 316 | |
| 317 | return (parent_rate * mf.mfi) + temp64; |
| 318 | } |
| 319 | |
| 320 | static struct clk_pllv3_vf610_mf clk_pllv3_vf610_rate_to_mf( |
| 321 | unsigned long parent_rate, unsigned long rate) |
| 322 | { |
| 323 | struct clk_pllv3_vf610_mf mf; |
| 324 | u64 temp64; |
| 325 | |
| 326 | mf.mfi = (rate >= 22 * parent_rate) ? 22 : 20; |
| 327 | mf.mfd = 0x3fffffff; /* use max supported value for best accuracy */ |
| 328 | |
| 329 | if (rate <= parent_rate * mf.mfi) |
| 330 | mf.mfn = 0; |
| 331 | else if (rate >= parent_rate * (mf.mfi + 1)) |
| 332 | mf.mfn = mf.mfd - 1; |
| 333 | else { |
| 334 | /* rate = parent_rate * (mfi + mfn/mfd) */ |
| 335 | temp64 = rate - parent_rate * mf.mfi; |
| 336 | temp64 *= mf.mfd; |
| 337 | do_div(temp64, parent_rate); |
| 338 | mf.mfn = temp64; |
| 339 | } |
| 340 | |
| 341 | return mf; |
| 342 | } |
| 343 | |
| 344 | static unsigned long clk_pllv3_vf610_recalc_rate(struct clk_hw *hw, |
| 345 | unsigned long parent_rate) |
| 346 | { |
| 347 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
| 348 | struct clk_pllv3_vf610_mf mf; |
| 349 | |
Anson Huang | b4a4cb5 | 2019-04-22 08:32:45 +0000 | [diff] [blame] | 350 | mf.mfn = readl_relaxed(pll->base + pll->num_offset); |
| 351 | mf.mfd = readl_relaxed(pll->base + pll->denom_offset); |
Nikita Yushchenko | c77cbdd1 | 2016-12-19 11:12:09 +0300 | [diff] [blame] | 352 | mf.mfi = (readl_relaxed(pll->base) & pll->div_mask) ? 22 : 20; |
| 353 | |
| 354 | return clk_pllv3_vf610_mf_to_rate(parent_rate, mf); |
| 355 | } |
| 356 | |
| 357 | static long clk_pllv3_vf610_round_rate(struct clk_hw *hw, unsigned long rate, |
| 358 | unsigned long *prate) |
| 359 | { |
| 360 | struct clk_pllv3_vf610_mf mf = clk_pllv3_vf610_rate_to_mf(*prate, rate); |
| 361 | |
| 362 | return clk_pllv3_vf610_mf_to_rate(*prate, mf); |
| 363 | } |
| 364 | |
| 365 | static int clk_pllv3_vf610_set_rate(struct clk_hw *hw, unsigned long rate, |
| 366 | unsigned long parent_rate) |
| 367 | { |
| 368 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
| 369 | struct clk_pllv3_vf610_mf mf = |
| 370 | clk_pllv3_vf610_rate_to_mf(parent_rate, rate); |
| 371 | u32 val; |
| 372 | |
| 373 | val = readl_relaxed(pll->base); |
| 374 | if (mf.mfi == 20) |
| 375 | val &= ~pll->div_mask; /* clear bit for mfi=20 */ |
| 376 | else |
| 377 | val |= pll->div_mask; /* set bit for mfi=22 */ |
| 378 | writel_relaxed(val, pll->base); |
| 379 | |
Anson Huang | b4a4cb5 | 2019-04-22 08:32:45 +0000 | [diff] [blame] | 380 | writel_relaxed(mf.mfn, pll->base + pll->num_offset); |
| 381 | writel_relaxed(mf.mfd, pll->base + pll->denom_offset); |
Nikita Yushchenko | c77cbdd1 | 2016-12-19 11:12:09 +0300 | [diff] [blame] | 382 | |
| 383 | return clk_pllv3_wait_lock(pll); |
| 384 | } |
| 385 | |
| 386 | static const struct clk_ops clk_pllv3_vf610_ops = { |
| 387 | .prepare = clk_pllv3_prepare, |
| 388 | .unprepare = clk_pllv3_unprepare, |
| 389 | .is_prepared = clk_pllv3_is_prepared, |
| 390 | .recalc_rate = clk_pllv3_vf610_recalc_rate, |
| 391 | .round_rate = clk_pllv3_vf610_round_rate, |
| 392 | .set_rate = clk_pllv3_vf610_set_rate, |
| 393 | }; |
| 394 | |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 395 | static unsigned long clk_pllv3_enet_recalc_rate(struct clk_hw *hw, |
| 396 | unsigned long parent_rate) |
| 397 | { |
Stefan Agner | 585a60f | 2016-01-29 14:49:24 -0800 | [diff] [blame] | 398 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
| 399 | |
| 400 | return pll->ref_clock; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 401 | } |
| 402 | |
| 403 | static const struct clk_ops clk_pllv3_enet_ops = { |
| 404 | .prepare = clk_pllv3_prepare, |
| 405 | .unprepare = clk_pllv3_unprepare, |
Bai Ping | 4824b61 | 2015-11-25 00:06:53 +0800 | [diff] [blame] | 406 | .is_prepared = clk_pllv3_is_prepared, |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 407 | .recalc_rate = clk_pllv3_enet_recalc_rate, |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 408 | }; |
| 409 | |
Abel Vesa | e5674a4 | 2019-05-29 12:26:43 +0000 | [diff] [blame] | 410 | struct clk_hw *imx_clk_hw_pllv3(enum imx_pllv3_type type, const char *name, |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 411 | const char *parent_name, void __iomem *base, |
Sascha Hauer | 2b25469 | 2012-11-22 10:18:41 +0100 | [diff] [blame] | 412 | u32 div_mask) |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 413 | { |
| 414 | struct clk_pllv3 *pll; |
| 415 | const struct clk_ops *ops; |
Abel Vesa | e5674a4 | 2019-05-29 12:26:43 +0000 | [diff] [blame] | 416 | struct clk_hw *hw; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 417 | struct clk_init_data init; |
Abel Vesa | e5674a4 | 2019-05-29 12:26:43 +0000 | [diff] [blame] | 418 | int ret; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 419 | |
| 420 | pll = kzalloc(sizeof(*pll), GFP_KERNEL); |
| 421 | if (!pll) |
| 422 | return ERR_PTR(-ENOMEM); |
| 423 | |
Dong Aisheng | c684766 | 2016-06-13 20:24:52 +0800 | [diff] [blame] | 424 | pll->power_bit = BM_PLL_POWER; |
Anson Huang | b4a4cb5 | 2019-04-22 08:32:45 +0000 | [diff] [blame] | 425 | pll->num_offset = PLL_NUM_OFFSET; |
| 426 | pll->denom_offset = PLL_DENOM_OFFSET; |
Frank Li | f539474 | 2015-05-19 02:45:02 +0800 | [diff] [blame] | 427 | |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 428 | switch (type) { |
| 429 | case IMX_PLLV3_SYS: |
| 430 | ops = &clk_pllv3_sys_ops; |
| 431 | break; |
Nikita Yushchenko | c77cbdd1 | 2016-12-19 11:12:09 +0300 | [diff] [blame] | 432 | case IMX_PLLV3_SYS_VF610: |
| 433 | ops = &clk_pllv3_vf610_ops; |
Anson Huang | b4a4cb5 | 2019-04-22 08:32:45 +0000 | [diff] [blame] | 434 | pll->num_offset = PLL_VF610_NUM_OFFSET; |
| 435 | pll->denom_offset = PLL_VF610_DENOM_OFFSET; |
Nikita Yushchenko | c77cbdd1 | 2016-12-19 11:12:09 +0300 | [diff] [blame] | 436 | break; |
Stefan Agner | 60ad846 | 2014-12-02 17:59:42 +0100 | [diff] [blame] | 437 | case IMX_PLLV3_USB_VF610: |
| 438 | pll->div_shift = 1; |
Gustavo A. R. Silva | d388e18 | 2020-07-27 15:09:22 -0500 | [diff] [blame] | 439 | fallthrough; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 440 | case IMX_PLLV3_USB: |
| 441 | ops = &clk_pllv3_ops; |
| 442 | pll->powerup_set = true; |
| 443 | break; |
Anson Huang | b4a4cb5 | 2019-04-22 08:32:45 +0000 | [diff] [blame] | 444 | case IMX_PLLV3_AV_IMX7: |
| 445 | pll->num_offset = PLL_IMX7_NUM_OFFSET; |
| 446 | pll->denom_offset = PLL_IMX7_DENOM_OFFSET; |
Gustavo A. R. Silva | d388e18 | 2020-07-27 15:09:22 -0500 | [diff] [blame] | 447 | fallthrough; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 448 | case IMX_PLLV3_AV: |
| 449 | ops = &clk_pllv3_av_ops; |
| 450 | break; |
Frank Li | f539474 | 2015-05-19 02:45:02 +0800 | [diff] [blame] | 451 | case IMX_PLLV3_ENET_IMX7: |
Dong Aisheng | c684766 | 2016-06-13 20:24:52 +0800 | [diff] [blame] | 452 | pll->power_bit = IMX7_ENET_PLL_POWER; |
Stefan Agner | 585a60f | 2016-01-29 14:49:24 -0800 | [diff] [blame] | 453 | pll->ref_clock = 1000000000; |
| 454 | ops = &clk_pllv3_enet_ops; |
| 455 | break; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 456 | case IMX_PLLV3_ENET: |
Stefan Agner | 585a60f | 2016-01-29 14:49:24 -0800 | [diff] [blame] | 457 | pll->ref_clock = 500000000; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 458 | ops = &clk_pllv3_enet_ops; |
| 459 | break; |
Fabio Estevam | ad14972 | 2017-05-15 08:55:05 -0300 | [diff] [blame] | 460 | case IMX_PLLV3_DDR_IMX7: |
Fabio Estevam | b608a89 | 2017-06-06 12:45:54 -0300 | [diff] [blame] | 461 | pll->power_bit = IMX7_DDR_PLL_POWER; |
Anson Huang | b4a4cb5 | 2019-04-22 08:32:45 +0000 | [diff] [blame] | 462 | pll->num_offset = PLL_IMX7_NUM_OFFSET; |
| 463 | pll->denom_offset = PLL_IMX7_DENOM_OFFSET; |
Fabio Estevam | ad14972 | 2017-05-15 08:55:05 -0300 | [diff] [blame] | 464 | ops = &clk_pllv3_av_ops; |
| 465 | break; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 466 | default: |
| 467 | ops = &clk_pllv3_ops; |
| 468 | } |
| 469 | pll->base = base; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 470 | pll->div_mask = div_mask; |
| 471 | |
| 472 | init.name = name; |
| 473 | init.ops = ops; |
| 474 | init.flags = 0; |
| 475 | init.parent_names = &parent_name; |
| 476 | init.num_parents = 1; |
| 477 | |
| 478 | pll->hw.init = &init; |
Abel Vesa | e5674a4 | 2019-05-29 12:26:43 +0000 | [diff] [blame] | 479 | hw = &pll->hw; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 480 | |
Abel Vesa | e5674a4 | 2019-05-29 12:26:43 +0000 | [diff] [blame] | 481 | ret = clk_hw_register(NULL, hw); |
| 482 | if (ret) { |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 483 | kfree(pll); |
Abel Vesa | e5674a4 | 2019-05-29 12:26:43 +0000 | [diff] [blame] | 484 | return ERR_PTR(ret); |
| 485 | } |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 486 | |
Abel Vesa | e5674a4 | 2019-05-29 12:26:43 +0000 | [diff] [blame] | 487 | return hw; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 488 | } |