Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2012 Freescale Semiconductor, Inc. |
| 3 | * Copyright 2012 Linaro Ltd. |
| 4 | * |
| 5 | * The code contained herein is licensed under the GNU General Public |
| 6 | * License. You may obtain a copy of the GNU General Public License |
| 7 | * Version 2 or later at the following locations: |
| 8 | * |
| 9 | * http://www.opensource.org/licenses/gpl-license.html |
| 10 | * http://www.gnu.org/copyleft/gpl.html |
| 11 | */ |
| 12 | |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 13 | #include <linux/clk-provider.h> |
Shawn Guo | 322503a | 2013-10-30 15:12:55 +0800 | [diff] [blame] | 14 | #include <linux/delay.h> |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 15 | #include <linux/io.h> |
| 16 | #include <linux/slab.h> |
| 17 | #include <linux/jiffies.h> |
| 18 | #include <linux/err.h> |
| 19 | #include "clk.h" |
| 20 | |
| 21 | #define PLL_NUM_OFFSET 0x10 |
| 22 | #define PLL_DENOM_OFFSET 0x20 |
| 23 | |
Nikita Yushchenko | c77cbdd1 | 2016-12-19 11:12:09 +0300 | [diff] [blame] | 24 | #define PLL_VF610_NUM_OFFSET 0x20 |
| 25 | #define PLL_VF610_DENOM_OFFSET 0x30 |
| 26 | |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 27 | #define BM_PLL_POWER (0x1 << 12) |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 28 | #define BM_PLL_LOCK (0x1 << 31) |
Frank Li | f539474 | 2015-05-19 02:45:02 +0800 | [diff] [blame] | 29 | #define IMX7_ENET_PLL_POWER (0x1 << 5) |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 30 | |
| 31 | /** |
| 32 | * struct clk_pllv3 - IMX PLL clock version 3 |
| 33 | * @clk_hw: clock source |
| 34 | * @base: base address of PLL registers |
Dong Aisheng | c684766 | 2016-06-13 20:24:52 +0800 | [diff] [blame] | 35 | * @power_bit: pll power bit mask |
| 36 | * @powerup_set: set power_bit to power up the PLL |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 37 | * @div_mask: mask of divider bits |
Stefan Agner | 60ad846 | 2014-12-02 17:59:42 +0100 | [diff] [blame] | 38 | * @div_shift: shift of divider bits |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 39 | * |
| 40 | * IMX PLL clock version 3, found on i.MX6 series. Divider for pllv3 |
| 41 | * is actually a multiplier, and always sits at bit 0. |
| 42 | */ |
| 43 | struct clk_pllv3 { |
| 44 | struct clk_hw hw; |
| 45 | void __iomem *base; |
Dong Aisheng | c684766 | 2016-06-13 20:24:52 +0800 | [diff] [blame] | 46 | u32 power_bit; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 47 | bool powerup_set; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 48 | u32 div_mask; |
Stefan Agner | 60ad846 | 2014-12-02 17:59:42 +0100 | [diff] [blame] | 49 | u32 div_shift; |
Stefan Agner | 585a60f | 2016-01-29 14:49:24 -0800 | [diff] [blame] | 50 | unsigned long ref_clock; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 51 | }; |
| 52 | |
| 53 | #define to_clk_pllv3(_hw) container_of(_hw, struct clk_pllv3, hw) |
| 54 | |
Shawn Guo | bc3b84d | 2013-10-30 15:56:22 +0800 | [diff] [blame] | 55 | static int clk_pllv3_wait_lock(struct clk_pllv3 *pll) |
| 56 | { |
| 57 | unsigned long timeout = jiffies + msecs_to_jiffies(10); |
Dong Aisheng | c684766 | 2016-06-13 20:24:52 +0800 | [diff] [blame] | 58 | u32 val = readl_relaxed(pll->base) & pll->power_bit; |
Shawn Guo | bc3b84d | 2013-10-30 15:56:22 +0800 | [diff] [blame] | 59 | |
| 60 | /* No need to wait for lock when pll is not powered up */ |
| 61 | if ((pll->powerup_set && !val) || (!pll->powerup_set && val)) |
| 62 | return 0; |
| 63 | |
| 64 | /* Wait for PLL to lock */ |
| 65 | do { |
| 66 | if (readl_relaxed(pll->base) & BM_PLL_LOCK) |
| 67 | break; |
| 68 | if (time_after(jiffies, timeout)) |
| 69 | break; |
| 70 | usleep_range(50, 500); |
| 71 | } while (1); |
| 72 | |
| 73 | return readl_relaxed(pll->base) & BM_PLL_LOCK ? 0 : -ETIMEDOUT; |
| 74 | } |
| 75 | |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 76 | static int clk_pllv3_prepare(struct clk_hw *hw) |
| 77 | { |
| 78 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 79 | u32 val; |
| 80 | |
| 81 | val = readl_relaxed(pll->base); |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 82 | if (pll->powerup_set) |
Dong Aisheng | c684766 | 2016-06-13 20:24:52 +0800 | [diff] [blame] | 83 | val |= pll->power_bit; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 84 | else |
Dong Aisheng | c684766 | 2016-06-13 20:24:52 +0800 | [diff] [blame] | 85 | val &= ~pll->power_bit; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 86 | writel_relaxed(val, pll->base); |
| 87 | |
Dmitry Voytik | c400f7a | 2014-11-06 22:49:32 +0400 | [diff] [blame] | 88 | return clk_pllv3_wait_lock(pll); |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 89 | } |
| 90 | |
| 91 | static void clk_pllv3_unprepare(struct clk_hw *hw) |
| 92 | { |
| 93 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
| 94 | u32 val; |
| 95 | |
| 96 | val = readl_relaxed(pll->base); |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 97 | if (pll->powerup_set) |
Dong Aisheng | c684766 | 2016-06-13 20:24:52 +0800 | [diff] [blame] | 98 | val &= ~pll->power_bit; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 99 | else |
Dong Aisheng | c684766 | 2016-06-13 20:24:52 +0800 | [diff] [blame] | 100 | val |= pll->power_bit; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 101 | writel_relaxed(val, pll->base); |
| 102 | } |
| 103 | |
Bai Ping | 4824b61 | 2015-11-25 00:06:53 +0800 | [diff] [blame] | 104 | static int clk_pllv3_is_prepared(struct clk_hw *hw) |
| 105 | { |
| 106 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
| 107 | |
| 108 | if (readl_relaxed(pll->base) & BM_PLL_LOCK) |
| 109 | return 1; |
| 110 | |
| 111 | return 0; |
| 112 | } |
| 113 | |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 114 | static unsigned long clk_pllv3_recalc_rate(struct clk_hw *hw, |
| 115 | unsigned long parent_rate) |
| 116 | { |
| 117 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
Stefan Agner | 60ad846 | 2014-12-02 17:59:42 +0100 | [diff] [blame] | 118 | u32 div = (readl_relaxed(pll->base) >> pll->div_shift) & pll->div_mask; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 119 | |
| 120 | return (div == 1) ? parent_rate * 22 : parent_rate * 20; |
| 121 | } |
| 122 | |
| 123 | static long clk_pllv3_round_rate(struct clk_hw *hw, unsigned long rate, |
| 124 | unsigned long *prate) |
| 125 | { |
| 126 | unsigned long parent_rate = *prate; |
| 127 | |
| 128 | return (rate >= parent_rate * 22) ? parent_rate * 22 : |
| 129 | parent_rate * 20; |
| 130 | } |
| 131 | |
| 132 | static int clk_pllv3_set_rate(struct clk_hw *hw, unsigned long rate, |
| 133 | unsigned long parent_rate) |
| 134 | { |
| 135 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
| 136 | u32 val, div; |
| 137 | |
| 138 | if (rate == parent_rate * 22) |
| 139 | div = 1; |
| 140 | else if (rate == parent_rate * 20) |
| 141 | div = 0; |
| 142 | else |
| 143 | return -EINVAL; |
| 144 | |
| 145 | val = readl_relaxed(pll->base); |
Stefan Agner | 60ad846 | 2014-12-02 17:59:42 +0100 | [diff] [blame] | 146 | val &= ~(pll->div_mask << pll->div_shift); |
| 147 | val |= (div << pll->div_shift); |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 148 | writel_relaxed(val, pll->base); |
| 149 | |
Shawn Guo | bc3b84d | 2013-10-30 15:56:22 +0800 | [diff] [blame] | 150 | return clk_pllv3_wait_lock(pll); |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 151 | } |
| 152 | |
| 153 | static const struct clk_ops clk_pllv3_ops = { |
| 154 | .prepare = clk_pllv3_prepare, |
| 155 | .unprepare = clk_pllv3_unprepare, |
Bai Ping | 4824b61 | 2015-11-25 00:06:53 +0800 | [diff] [blame] | 156 | .is_prepared = clk_pllv3_is_prepared, |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 157 | .recalc_rate = clk_pllv3_recalc_rate, |
| 158 | .round_rate = clk_pllv3_round_rate, |
| 159 | .set_rate = clk_pllv3_set_rate, |
| 160 | }; |
| 161 | |
| 162 | static unsigned long clk_pllv3_sys_recalc_rate(struct clk_hw *hw, |
| 163 | unsigned long parent_rate) |
| 164 | { |
| 165 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
| 166 | u32 div = readl_relaxed(pll->base) & pll->div_mask; |
| 167 | |
| 168 | return parent_rate * div / 2; |
| 169 | } |
| 170 | |
| 171 | static long clk_pllv3_sys_round_rate(struct clk_hw *hw, unsigned long rate, |
| 172 | unsigned long *prate) |
| 173 | { |
| 174 | unsigned long parent_rate = *prate; |
| 175 | unsigned long min_rate = parent_rate * 54 / 2; |
| 176 | unsigned long max_rate = parent_rate * 108 / 2; |
| 177 | u32 div; |
| 178 | |
| 179 | if (rate > max_rate) |
| 180 | rate = max_rate; |
| 181 | else if (rate < min_rate) |
| 182 | rate = min_rate; |
| 183 | div = rate * 2 / parent_rate; |
| 184 | |
| 185 | return parent_rate * div / 2; |
| 186 | } |
| 187 | |
| 188 | static int clk_pllv3_sys_set_rate(struct clk_hw *hw, unsigned long rate, |
| 189 | unsigned long parent_rate) |
| 190 | { |
| 191 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
| 192 | unsigned long min_rate = parent_rate * 54 / 2; |
| 193 | unsigned long max_rate = parent_rate * 108 / 2; |
| 194 | u32 val, div; |
| 195 | |
| 196 | if (rate < min_rate || rate > max_rate) |
| 197 | return -EINVAL; |
| 198 | |
| 199 | div = rate * 2 / parent_rate; |
| 200 | val = readl_relaxed(pll->base); |
| 201 | val &= ~pll->div_mask; |
| 202 | val |= div; |
| 203 | writel_relaxed(val, pll->base); |
| 204 | |
Shawn Guo | bc3b84d | 2013-10-30 15:56:22 +0800 | [diff] [blame] | 205 | return clk_pllv3_wait_lock(pll); |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 206 | } |
| 207 | |
| 208 | static const struct clk_ops clk_pllv3_sys_ops = { |
| 209 | .prepare = clk_pllv3_prepare, |
| 210 | .unprepare = clk_pllv3_unprepare, |
Bai Ping | 4824b61 | 2015-11-25 00:06:53 +0800 | [diff] [blame] | 211 | .is_prepared = clk_pllv3_is_prepared, |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 212 | .recalc_rate = clk_pllv3_sys_recalc_rate, |
| 213 | .round_rate = clk_pllv3_sys_round_rate, |
| 214 | .set_rate = clk_pllv3_sys_set_rate, |
| 215 | }; |
| 216 | |
| 217 | static unsigned long clk_pllv3_av_recalc_rate(struct clk_hw *hw, |
| 218 | unsigned long parent_rate) |
| 219 | { |
| 220 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
| 221 | u32 mfn = readl_relaxed(pll->base + PLL_NUM_OFFSET); |
| 222 | u32 mfd = readl_relaxed(pll->base + PLL_DENOM_OFFSET); |
| 223 | u32 div = readl_relaxed(pll->base) & pll->div_mask; |
Anson Huang | ba7f4f5 | 2016-06-08 22:33:31 +0800 | [diff] [blame] | 224 | u64 temp64 = (u64)parent_rate; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 225 | |
Anson Huang | ba7f4f5 | 2016-06-08 22:33:31 +0800 | [diff] [blame] | 226 | temp64 *= mfn; |
| 227 | do_div(temp64, mfd); |
| 228 | |
Emil Lundmark | 5c2f117 | 2016-10-12 12:31:40 +0200 | [diff] [blame] | 229 | return parent_rate * div + (unsigned long)temp64; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 230 | } |
| 231 | |
| 232 | static long clk_pllv3_av_round_rate(struct clk_hw *hw, unsigned long rate, |
| 233 | unsigned long *prate) |
| 234 | { |
| 235 | unsigned long parent_rate = *prate; |
| 236 | unsigned long min_rate = parent_rate * 27; |
| 237 | unsigned long max_rate = parent_rate * 54; |
| 238 | u32 div; |
| 239 | u32 mfn, mfd = 1000000; |
Emil Lundmark | c5a8045 | 2016-10-12 12:31:41 +0200 | [diff] [blame] | 240 | u32 max_mfd = 0x3FFFFFFF; |
Anson Huang | 7a5568c | 2015-05-08 00:16:51 +0800 | [diff] [blame] | 241 | u64 temp64; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 242 | |
| 243 | if (rate > max_rate) |
| 244 | rate = max_rate; |
| 245 | else if (rate < min_rate) |
| 246 | rate = min_rate; |
| 247 | |
Emil Lundmark | c5a8045 | 2016-10-12 12:31:41 +0200 | [diff] [blame] | 248 | if (parent_rate <= max_mfd) |
| 249 | mfd = parent_rate; |
| 250 | |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 251 | div = rate / parent_rate; |
| 252 | temp64 = (u64) (rate - div * parent_rate); |
| 253 | temp64 *= mfd; |
| 254 | do_div(temp64, parent_rate); |
| 255 | mfn = temp64; |
| 256 | |
Emil Lundmark | 5c2f117 | 2016-10-12 12:31:40 +0200 | [diff] [blame] | 257 | temp64 = (u64)parent_rate; |
| 258 | temp64 *= mfn; |
| 259 | do_div(temp64, mfd); |
| 260 | |
| 261 | return parent_rate * div + (unsigned long)temp64; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 262 | } |
| 263 | |
| 264 | static int clk_pllv3_av_set_rate(struct clk_hw *hw, unsigned long rate, |
| 265 | unsigned long parent_rate) |
| 266 | { |
| 267 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
| 268 | unsigned long min_rate = parent_rate * 27; |
| 269 | unsigned long max_rate = parent_rate * 54; |
| 270 | u32 val, div; |
| 271 | u32 mfn, mfd = 1000000; |
Emil Lundmark | c5a8045 | 2016-10-12 12:31:41 +0200 | [diff] [blame] | 272 | u32 max_mfd = 0x3FFFFFFF; |
Anson Huang | 7a5568c | 2015-05-08 00:16:51 +0800 | [diff] [blame] | 273 | u64 temp64; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 274 | |
| 275 | if (rate < min_rate || rate > max_rate) |
| 276 | return -EINVAL; |
| 277 | |
Emil Lundmark | c5a8045 | 2016-10-12 12:31:41 +0200 | [diff] [blame] | 278 | if (parent_rate <= max_mfd) |
| 279 | mfd = parent_rate; |
| 280 | |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 281 | div = rate / parent_rate; |
| 282 | temp64 = (u64) (rate - div * parent_rate); |
| 283 | temp64 *= mfd; |
| 284 | do_div(temp64, parent_rate); |
| 285 | mfn = temp64; |
| 286 | |
| 287 | val = readl_relaxed(pll->base); |
| 288 | val &= ~pll->div_mask; |
| 289 | val |= div; |
| 290 | writel_relaxed(val, pll->base); |
| 291 | writel_relaxed(mfn, pll->base + PLL_NUM_OFFSET); |
| 292 | writel_relaxed(mfd, pll->base + PLL_DENOM_OFFSET); |
| 293 | |
Shawn Guo | bc3b84d | 2013-10-30 15:56:22 +0800 | [diff] [blame] | 294 | return clk_pllv3_wait_lock(pll); |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 295 | } |
| 296 | |
| 297 | static const struct clk_ops clk_pllv3_av_ops = { |
| 298 | .prepare = clk_pllv3_prepare, |
| 299 | .unprepare = clk_pllv3_unprepare, |
Bai Ping | 4824b61 | 2015-11-25 00:06:53 +0800 | [diff] [blame] | 300 | .is_prepared = clk_pllv3_is_prepared, |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 301 | .recalc_rate = clk_pllv3_av_recalc_rate, |
| 302 | .round_rate = clk_pllv3_av_round_rate, |
| 303 | .set_rate = clk_pllv3_av_set_rate, |
| 304 | }; |
| 305 | |
Nikita Yushchenko | c77cbdd1 | 2016-12-19 11:12:09 +0300 | [diff] [blame] | 306 | struct clk_pllv3_vf610_mf { |
| 307 | u32 mfi; /* integer part, can be 20 or 22 */ |
| 308 | u32 mfn; /* numerator, 30-bit value */ |
| 309 | u32 mfd; /* denominator, 30-bit value, must be less than mfn */ |
| 310 | }; |
| 311 | |
| 312 | static unsigned long clk_pllv3_vf610_mf_to_rate(unsigned long parent_rate, |
| 313 | struct clk_pllv3_vf610_mf mf) |
| 314 | { |
| 315 | u64 temp64; |
| 316 | |
| 317 | temp64 = parent_rate; |
| 318 | temp64 *= mf.mfn; |
| 319 | do_div(temp64, mf.mfd); |
| 320 | |
| 321 | return (parent_rate * mf.mfi) + temp64; |
| 322 | } |
| 323 | |
| 324 | static struct clk_pllv3_vf610_mf clk_pllv3_vf610_rate_to_mf( |
| 325 | unsigned long parent_rate, unsigned long rate) |
| 326 | { |
| 327 | struct clk_pllv3_vf610_mf mf; |
| 328 | u64 temp64; |
| 329 | |
| 330 | mf.mfi = (rate >= 22 * parent_rate) ? 22 : 20; |
| 331 | mf.mfd = 0x3fffffff; /* use max supported value for best accuracy */ |
| 332 | |
| 333 | if (rate <= parent_rate * mf.mfi) |
| 334 | mf.mfn = 0; |
| 335 | else if (rate >= parent_rate * (mf.mfi + 1)) |
| 336 | mf.mfn = mf.mfd - 1; |
| 337 | else { |
| 338 | /* rate = parent_rate * (mfi + mfn/mfd) */ |
| 339 | temp64 = rate - parent_rate * mf.mfi; |
| 340 | temp64 *= mf.mfd; |
| 341 | do_div(temp64, parent_rate); |
| 342 | mf.mfn = temp64; |
| 343 | } |
| 344 | |
| 345 | return mf; |
| 346 | } |
| 347 | |
| 348 | static unsigned long clk_pllv3_vf610_recalc_rate(struct clk_hw *hw, |
| 349 | unsigned long parent_rate) |
| 350 | { |
| 351 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
| 352 | struct clk_pllv3_vf610_mf mf; |
| 353 | |
| 354 | mf.mfn = readl_relaxed(pll->base + PLL_VF610_NUM_OFFSET); |
| 355 | mf.mfd = readl_relaxed(pll->base + PLL_VF610_DENOM_OFFSET); |
| 356 | mf.mfi = (readl_relaxed(pll->base) & pll->div_mask) ? 22 : 20; |
| 357 | |
| 358 | return clk_pllv3_vf610_mf_to_rate(parent_rate, mf); |
| 359 | } |
| 360 | |
| 361 | static long clk_pllv3_vf610_round_rate(struct clk_hw *hw, unsigned long rate, |
| 362 | unsigned long *prate) |
| 363 | { |
| 364 | struct clk_pllv3_vf610_mf mf = clk_pllv3_vf610_rate_to_mf(*prate, rate); |
| 365 | |
| 366 | return clk_pllv3_vf610_mf_to_rate(*prate, mf); |
| 367 | } |
| 368 | |
| 369 | static int clk_pllv3_vf610_set_rate(struct clk_hw *hw, unsigned long rate, |
| 370 | unsigned long parent_rate) |
| 371 | { |
| 372 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
| 373 | struct clk_pllv3_vf610_mf mf = |
| 374 | clk_pllv3_vf610_rate_to_mf(parent_rate, rate); |
| 375 | u32 val; |
| 376 | |
| 377 | val = readl_relaxed(pll->base); |
| 378 | if (mf.mfi == 20) |
| 379 | val &= ~pll->div_mask; /* clear bit for mfi=20 */ |
| 380 | else |
| 381 | val |= pll->div_mask; /* set bit for mfi=22 */ |
| 382 | writel_relaxed(val, pll->base); |
| 383 | |
| 384 | writel_relaxed(mf.mfn, pll->base + PLL_VF610_NUM_OFFSET); |
| 385 | writel_relaxed(mf.mfd, pll->base + PLL_VF610_DENOM_OFFSET); |
| 386 | |
| 387 | return clk_pllv3_wait_lock(pll); |
| 388 | } |
| 389 | |
| 390 | static const struct clk_ops clk_pllv3_vf610_ops = { |
| 391 | .prepare = clk_pllv3_prepare, |
| 392 | .unprepare = clk_pllv3_unprepare, |
| 393 | .is_prepared = clk_pllv3_is_prepared, |
| 394 | .recalc_rate = clk_pllv3_vf610_recalc_rate, |
| 395 | .round_rate = clk_pllv3_vf610_round_rate, |
| 396 | .set_rate = clk_pllv3_vf610_set_rate, |
| 397 | }; |
| 398 | |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 399 | static unsigned long clk_pllv3_enet_recalc_rate(struct clk_hw *hw, |
| 400 | unsigned long parent_rate) |
| 401 | { |
Stefan Agner | 585a60f | 2016-01-29 14:49:24 -0800 | [diff] [blame] | 402 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
| 403 | |
| 404 | return pll->ref_clock; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 405 | } |
| 406 | |
| 407 | static const struct clk_ops clk_pllv3_enet_ops = { |
| 408 | .prepare = clk_pllv3_prepare, |
| 409 | .unprepare = clk_pllv3_unprepare, |
Bai Ping | 4824b61 | 2015-11-25 00:06:53 +0800 | [diff] [blame] | 410 | .is_prepared = clk_pllv3_is_prepared, |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 411 | .recalc_rate = clk_pllv3_enet_recalc_rate, |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 412 | }; |
| 413 | |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 414 | struct clk *imx_clk_pllv3(enum imx_pllv3_type type, const char *name, |
| 415 | const char *parent_name, void __iomem *base, |
Sascha Hauer | 2b25469 | 2012-11-22 10:18:41 +0100 | [diff] [blame] | 416 | u32 div_mask) |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 417 | { |
| 418 | struct clk_pllv3 *pll; |
| 419 | const struct clk_ops *ops; |
| 420 | struct clk *clk; |
| 421 | struct clk_init_data init; |
| 422 | |
| 423 | pll = kzalloc(sizeof(*pll), GFP_KERNEL); |
| 424 | if (!pll) |
| 425 | return ERR_PTR(-ENOMEM); |
| 426 | |
Dong Aisheng | c684766 | 2016-06-13 20:24:52 +0800 | [diff] [blame] | 427 | pll->power_bit = BM_PLL_POWER; |
Frank Li | f539474 | 2015-05-19 02:45:02 +0800 | [diff] [blame] | 428 | |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 429 | switch (type) { |
| 430 | case IMX_PLLV3_SYS: |
| 431 | ops = &clk_pllv3_sys_ops; |
| 432 | break; |
Nikita Yushchenko | c77cbdd1 | 2016-12-19 11:12:09 +0300 | [diff] [blame] | 433 | case IMX_PLLV3_SYS_VF610: |
| 434 | ops = &clk_pllv3_vf610_ops; |
| 435 | break; |
Stefan Agner | 60ad846 | 2014-12-02 17:59:42 +0100 | [diff] [blame] | 436 | case IMX_PLLV3_USB_VF610: |
| 437 | pll->div_shift = 1; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 438 | case IMX_PLLV3_USB: |
| 439 | ops = &clk_pllv3_ops; |
| 440 | pll->powerup_set = true; |
| 441 | break; |
| 442 | case IMX_PLLV3_AV: |
| 443 | ops = &clk_pllv3_av_ops; |
| 444 | break; |
Frank Li | f539474 | 2015-05-19 02:45:02 +0800 | [diff] [blame] | 445 | case IMX_PLLV3_ENET_IMX7: |
Dong Aisheng | c684766 | 2016-06-13 20:24:52 +0800 | [diff] [blame] | 446 | pll->power_bit = IMX7_ENET_PLL_POWER; |
Stefan Agner | 585a60f | 2016-01-29 14:49:24 -0800 | [diff] [blame] | 447 | pll->ref_clock = 1000000000; |
| 448 | ops = &clk_pllv3_enet_ops; |
| 449 | break; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 450 | case IMX_PLLV3_ENET: |
Stefan Agner | 585a60f | 2016-01-29 14:49:24 -0800 | [diff] [blame] | 451 | pll->ref_clock = 500000000; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 452 | ops = &clk_pllv3_enet_ops; |
| 453 | break; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 454 | default: |
| 455 | ops = &clk_pllv3_ops; |
| 456 | } |
| 457 | pll->base = base; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 458 | pll->div_mask = div_mask; |
| 459 | |
| 460 | init.name = name; |
| 461 | init.ops = ops; |
| 462 | init.flags = 0; |
| 463 | init.parent_names = &parent_name; |
| 464 | init.num_parents = 1; |
| 465 | |
| 466 | pll->hw.init = &init; |
| 467 | |
| 468 | clk = clk_register(NULL, &pll->hw); |
| 469 | if (IS_ERR(clk)) |
| 470 | kfree(pll); |
| 471 | |
| 472 | return clk; |
| 473 | } |