Stephen Boyd | e1bd55e | 2018-12-11 09:57:48 -0800 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0 |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 2 | /* |
| 3 | * Copyright (C) 2011 Sascha Hauer, Pengutronix <s.hauer@pengutronix.de> |
| 4 | * Copyright (C) 2011 Richard Zhao, Linaro <richard.zhao@linaro.org> |
| 5 | * Copyright (C) 2011-2012 Mike Turquette, Linaro Ltd <mturquette@linaro.org> |
| 6 | * |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 7 | * Simple multiplexer clock implementation |
| 8 | */ |
| 9 | |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 10 | #include <linux/clk-provider.h> |
Dmitry Baryshkov | b308407 | 2021-03-31 13:57:12 +0300 | [diff] [blame] | 11 | #include <linux/device.h> |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 12 | #include <linux/module.h> |
| 13 | #include <linux/slab.h> |
| 14 | #include <linux/io.h> |
| 15 | #include <linux/err.h> |
| 16 | |
| 17 | /* |
| 18 | * DOC: basic adjustable multiplexer clock that cannot gate |
| 19 | * |
| 20 | * Traits of this clock: |
| 21 | * prepare - clk_prepare only ensures that parents are prepared |
| 22 | * enable - clk_enable only ensures that parents are enabled |
| 23 | * rate - rate is only affected by parent switching. No clk_set_rate support |
| 24 | * parent - parent is adjustable through clk_set_parent |
| 25 | */ |
| 26 | |
Jonas Gorski | 3a72751 | 2019-04-18 13:12:08 +0200 | [diff] [blame] | 27 | static inline u32 clk_mux_readl(struct clk_mux *mux) |
| 28 | { |
| 29 | if (mux->flags & CLK_MUX_BIG_ENDIAN) |
| 30 | return ioread32be(mux->reg); |
| 31 | |
Jonas Gorski | 5834fd7 | 2019-04-18 13:12:11 +0200 | [diff] [blame] | 32 | return readl(mux->reg); |
Jonas Gorski | 3a72751 | 2019-04-18 13:12:08 +0200 | [diff] [blame] | 33 | } |
| 34 | |
| 35 | static inline void clk_mux_writel(struct clk_mux *mux, u32 val) |
| 36 | { |
| 37 | if (mux->flags & CLK_MUX_BIG_ENDIAN) |
| 38 | iowrite32be(val, mux->reg); |
| 39 | else |
Jonas Gorski | 5834fd7 | 2019-04-18 13:12:11 +0200 | [diff] [blame] | 40 | writel(val, mux->reg); |
Jonas Gorski | 3a72751 | 2019-04-18 13:12:08 +0200 | [diff] [blame] | 41 | } |
| 42 | |
Jerome Brunet | 77deb66 | 2018-02-14 14:43:34 +0100 | [diff] [blame] | 43 | int clk_mux_val_to_index(struct clk_hw *hw, u32 *table, unsigned int flags, |
| 44 | unsigned int val) |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 45 | { |
Stephen Boyd | 497295a | 2015-06-25 16:53:23 -0700 | [diff] [blame] | 46 | int num_parents = clk_hw_get_num_parents(hw); |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 47 | |
Jerome Brunet | 77deb66 | 2018-02-14 14:43:34 +0100 | [diff] [blame] | 48 | if (table) { |
Peter De Schrijver | ce4f331 | 2013-03-22 14:07:53 +0200 | [diff] [blame] | 49 | int i; |
| 50 | |
| 51 | for (i = 0; i < num_parents; i++) |
Jerome Brunet | 77deb66 | 2018-02-14 14:43:34 +0100 | [diff] [blame] | 52 | if (table[i] == val) |
Peter De Schrijver | ce4f331 | 2013-03-22 14:07:53 +0200 | [diff] [blame] | 53 | return i; |
| 54 | return -EINVAL; |
| 55 | } |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 56 | |
Jerome Brunet | 77deb66 | 2018-02-14 14:43:34 +0100 | [diff] [blame] | 57 | if (val && (flags & CLK_MUX_INDEX_BIT)) |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 58 | val = ffs(val) - 1; |
| 59 | |
Jerome Brunet | 77deb66 | 2018-02-14 14:43:34 +0100 | [diff] [blame] | 60 | if (val && (flags & CLK_MUX_INDEX_ONE)) |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 61 | val--; |
| 62 | |
Peter De Schrijver | ce4f331 | 2013-03-22 14:07:53 +0200 | [diff] [blame] | 63 | if (val >= num_parents) |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 64 | return -EINVAL; |
| 65 | |
| 66 | return val; |
| 67 | } |
Jerome Brunet | 77deb66 | 2018-02-14 14:43:34 +0100 | [diff] [blame] | 68 | EXPORT_SYMBOL_GPL(clk_mux_val_to_index); |
| 69 | |
| 70 | unsigned int clk_mux_index_to_val(u32 *table, unsigned int flags, u8 index) |
| 71 | { |
| 72 | unsigned int val = index; |
| 73 | |
| 74 | if (table) { |
| 75 | val = table[index]; |
| 76 | } else { |
| 77 | if (flags & CLK_MUX_INDEX_BIT) |
| 78 | val = 1 << index; |
| 79 | |
| 80 | if (flags & CLK_MUX_INDEX_ONE) |
| 81 | val++; |
| 82 | } |
| 83 | |
| 84 | return val; |
| 85 | } |
| 86 | EXPORT_SYMBOL_GPL(clk_mux_index_to_val); |
| 87 | |
| 88 | static u8 clk_mux_get_parent(struct clk_hw *hw) |
| 89 | { |
| 90 | struct clk_mux *mux = to_clk_mux(hw); |
| 91 | u32 val; |
| 92 | |
Jonas Gorski | 3a72751 | 2019-04-18 13:12:08 +0200 | [diff] [blame] | 93 | val = clk_mux_readl(mux) >> mux->shift; |
Jerome Brunet | 77deb66 | 2018-02-14 14:43:34 +0100 | [diff] [blame] | 94 | val &= mux->mask; |
| 95 | |
| 96 | return clk_mux_val_to_index(hw, mux->table, mux->flags, val); |
| 97 | } |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 98 | |
| 99 | static int clk_mux_set_parent(struct clk_hw *hw, u8 index) |
| 100 | { |
| 101 | struct clk_mux *mux = to_clk_mux(hw); |
Jerome Brunet | 77deb66 | 2018-02-14 14:43:34 +0100 | [diff] [blame] | 102 | u32 val = clk_mux_index_to_val(mux->table, mux->flags, index); |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 103 | unsigned long flags = 0; |
Jerome Brunet | 77deb66 | 2018-02-14 14:43:34 +0100 | [diff] [blame] | 104 | u32 reg; |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 105 | |
| 106 | if (mux->lock) |
| 107 | spin_lock_irqsave(mux->lock, flags); |
Stephen Boyd | 661e218 | 2015-07-24 12:21:12 -0700 | [diff] [blame] | 108 | else |
| 109 | __acquire(mux->lock); |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 110 | |
Haojian Zhuang | ba492e9 | 2013-06-08 22:47:17 +0800 | [diff] [blame] | 111 | if (mux->flags & CLK_MUX_HIWORD_MASK) { |
Jerome Brunet | 77deb66 | 2018-02-14 14:43:34 +0100 | [diff] [blame] | 112 | reg = mux->mask << (mux->shift + 16); |
Haojian Zhuang | ba492e9 | 2013-06-08 22:47:17 +0800 | [diff] [blame] | 113 | } else { |
Jonas Gorski | 3a72751 | 2019-04-18 13:12:08 +0200 | [diff] [blame] | 114 | reg = clk_mux_readl(mux); |
Jerome Brunet | 77deb66 | 2018-02-14 14:43:34 +0100 | [diff] [blame] | 115 | reg &= ~(mux->mask << mux->shift); |
Haojian Zhuang | ba492e9 | 2013-06-08 22:47:17 +0800 | [diff] [blame] | 116 | } |
Jerome Brunet | 77deb66 | 2018-02-14 14:43:34 +0100 | [diff] [blame] | 117 | val = val << mux->shift; |
| 118 | reg |= val; |
Jonas Gorski | 3a72751 | 2019-04-18 13:12:08 +0200 | [diff] [blame] | 119 | clk_mux_writel(mux, reg); |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 120 | |
| 121 | if (mux->lock) |
| 122 | spin_unlock_irqrestore(mux->lock, flags); |
Stephen Boyd | 661e218 | 2015-07-24 12:21:12 -0700 | [diff] [blame] | 123 | else |
| 124 | __release(mux->lock); |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 125 | |
| 126 | return 0; |
| 127 | } |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 128 | |
Jerome Brunet | 4ad69b80 | 2018-04-09 15:59:20 +0200 | [diff] [blame] | 129 | static int clk_mux_determine_rate(struct clk_hw *hw, |
| 130 | struct clk_rate_request *req) |
| 131 | { |
| 132 | struct clk_mux *mux = to_clk_mux(hw); |
| 133 | |
| 134 | return clk_mux_determine_rate_flags(hw, req, mux->flags); |
| 135 | } |
| 136 | |
Shawn Guo | 822c250 | 2012-03-27 15:23:22 +0800 | [diff] [blame] | 137 | const struct clk_ops clk_mux_ops = { |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 138 | .get_parent = clk_mux_get_parent, |
| 139 | .set_parent = clk_mux_set_parent, |
Jerome Brunet | 4ad69b80 | 2018-04-09 15:59:20 +0200 | [diff] [blame] | 140 | .determine_rate = clk_mux_determine_rate, |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 141 | }; |
| 142 | EXPORT_SYMBOL_GPL(clk_mux_ops); |
| 143 | |
Tomasz Figa | c57acd1 | 2013-07-23 01:49:18 +0200 | [diff] [blame] | 144 | const struct clk_ops clk_mux_ro_ops = { |
| 145 | .get_parent = clk_mux_get_parent, |
| 146 | }; |
| 147 | EXPORT_SYMBOL_GPL(clk_mux_ro_ops); |
| 148 | |
Stephen Boyd | 9611b3a | 2019-08-30 08:09:21 -0700 | [diff] [blame] | 149 | struct clk_hw *__clk_hw_register_mux(struct device *dev, struct device_node *np, |
| 150 | const char *name, u8 num_parents, |
| 151 | const char * const *parent_names, |
| 152 | const struct clk_hw **parent_hws, |
| 153 | const struct clk_parent_data *parent_data, |
| 154 | unsigned long flags, void __iomem *reg, u8 shift, u32 mask, |
Peter De Schrijver | ce4f331 | 2013-03-22 14:07:53 +0200 | [diff] [blame] | 155 | u8 clk_mux_flags, u32 *table, spinlock_t *lock) |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 156 | { |
| 157 | struct clk_mux *mux; |
Stephen Boyd | 264b317 | 2016-02-07 00:05:48 -0800 | [diff] [blame] | 158 | struct clk_hw *hw; |
Manivannan Sadhasivam | cc819cf | 2019-11-15 21:58:55 +0530 | [diff] [blame] | 159 | struct clk_init_data init = {}; |
Haojian Zhuang | ba492e9 | 2013-06-08 22:47:17 +0800 | [diff] [blame] | 160 | u8 width = 0; |
Stephen Boyd | 9611b3a | 2019-08-30 08:09:21 -0700 | [diff] [blame] | 161 | int ret = -EINVAL; |
Haojian Zhuang | ba492e9 | 2013-06-08 22:47:17 +0800 | [diff] [blame] | 162 | |
| 163 | if (clk_mux_flags & CLK_MUX_HIWORD_MASK) { |
| 164 | width = fls(mask) - ffs(mask) + 1; |
| 165 | if (width + shift > 16) { |
| 166 | pr_err("mux value exceeds LOWORD field\n"); |
| 167 | return ERR_PTR(-EINVAL); |
| 168 | } |
| 169 | } |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 170 | |
Mike Turquette | 27d5459 | 2012-03-26 17:51:03 -0700 | [diff] [blame] | 171 | /* allocate the mux */ |
Markus Elfring | 1e28733 | 2017-09-26 17:30:06 +0200 | [diff] [blame] | 172 | mux = kzalloc(sizeof(*mux), GFP_KERNEL); |
Markus Elfring | 0b91040 | 2017-09-26 17:23:04 +0200 | [diff] [blame] | 173 | if (!mux) |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 174 | return ERR_PTR(-ENOMEM); |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 175 | |
Saravana Kannan | 0197b3e | 2012-04-25 22:58:56 -0700 | [diff] [blame] | 176 | init.name = name; |
Tomasz Figa | c57acd1 | 2013-07-23 01:49:18 +0200 | [diff] [blame] | 177 | if (clk_mux_flags & CLK_MUX_READ_ONLY) |
| 178 | init.ops = &clk_mux_ro_ops; |
| 179 | else |
| 180 | init.ops = &clk_mux_ops; |
Stephen Boyd | 90b6c5c | 2019-04-25 10:57:37 -0700 | [diff] [blame] | 181 | init.flags = flags; |
Saravana Kannan | 0197b3e | 2012-04-25 22:58:56 -0700 | [diff] [blame] | 182 | init.parent_names = parent_names; |
Stephen Boyd | 9611b3a | 2019-08-30 08:09:21 -0700 | [diff] [blame] | 183 | init.parent_data = parent_data; |
| 184 | init.parent_hws = parent_hws; |
Saravana Kannan | 0197b3e | 2012-04-25 22:58:56 -0700 | [diff] [blame] | 185 | init.num_parents = num_parents; |
| 186 | |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 187 | /* struct clk_mux assignments */ |
| 188 | mux->reg = reg; |
| 189 | mux->shift = shift; |
Peter De Schrijver | ce4f331 | 2013-03-22 14:07:53 +0200 | [diff] [blame] | 190 | mux->mask = mask; |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 191 | mux->flags = clk_mux_flags; |
| 192 | mux->lock = lock; |
Peter De Schrijver | ce4f331 | 2013-03-22 14:07:53 +0200 | [diff] [blame] | 193 | mux->table = table; |
Mike Turquette | 31df9db | 2012-05-06 18:48:11 -0700 | [diff] [blame] | 194 | mux->hw.init = &init; |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 195 | |
Stephen Boyd | 264b317 | 2016-02-07 00:05:48 -0800 | [diff] [blame] | 196 | hw = &mux->hw; |
Stephen Boyd | 9611b3a | 2019-08-30 08:09:21 -0700 | [diff] [blame] | 197 | if (dev || !np) |
| 198 | ret = clk_hw_register(dev, hw); |
| 199 | else if (np) |
| 200 | ret = of_clk_hw_register(np, hw); |
Stephen Boyd | 264b317 | 2016-02-07 00:05:48 -0800 | [diff] [blame] | 201 | if (ret) { |
Mike Turquette | 27d5459 | 2012-03-26 17:51:03 -0700 | [diff] [blame] | 202 | kfree(mux); |
Stephen Boyd | 264b317 | 2016-02-07 00:05:48 -0800 | [diff] [blame] | 203 | hw = ERR_PTR(ret); |
| 204 | } |
Mike Turquette | 27d5459 | 2012-03-26 17:51:03 -0700 | [diff] [blame] | 205 | |
Stephen Boyd | 264b317 | 2016-02-07 00:05:48 -0800 | [diff] [blame] | 206 | return hw; |
| 207 | } |
Stephen Boyd | 9611b3a | 2019-08-30 08:09:21 -0700 | [diff] [blame] | 208 | EXPORT_SYMBOL_GPL(__clk_hw_register_mux); |
Stephen Boyd | 264b317 | 2016-02-07 00:05:48 -0800 | [diff] [blame] | 209 | |
Dmitry Baryshkov | b308407 | 2021-03-31 13:57:12 +0300 | [diff] [blame] | 210 | static void devm_clk_hw_release_mux(struct device *dev, void *res) |
| 211 | { |
| 212 | clk_hw_unregister_mux(*(struct clk_hw **)res); |
| 213 | } |
| 214 | |
| 215 | struct clk_hw *__devm_clk_hw_register_mux(struct device *dev, struct device_node *np, |
| 216 | const char *name, u8 num_parents, |
| 217 | const char * const *parent_names, |
| 218 | const struct clk_hw **parent_hws, |
| 219 | const struct clk_parent_data *parent_data, |
| 220 | unsigned long flags, void __iomem *reg, u8 shift, u32 mask, |
| 221 | u8 clk_mux_flags, u32 *table, spinlock_t *lock) |
| 222 | { |
| 223 | struct clk_hw **ptr, *hw; |
| 224 | |
| 225 | ptr = devres_alloc(devm_clk_hw_release_mux, sizeof(*ptr), GFP_KERNEL); |
| 226 | if (!ptr) |
| 227 | return ERR_PTR(-ENOMEM); |
| 228 | |
| 229 | hw = __clk_hw_register_mux(dev, np, name, num_parents, parent_names, parent_hws, |
| 230 | parent_data, flags, reg, shift, mask, |
| 231 | clk_mux_flags, table, lock); |
| 232 | |
| 233 | if (!IS_ERR(hw)) { |
| 234 | *ptr = hw; |
| 235 | devres_add(dev, ptr); |
| 236 | } else { |
| 237 | devres_free(ptr); |
| 238 | } |
| 239 | |
| 240 | return hw; |
| 241 | } |
| 242 | EXPORT_SYMBOL_GPL(__devm_clk_hw_register_mux); |
| 243 | |
Stephen Boyd | 264b317 | 2016-02-07 00:05:48 -0800 | [diff] [blame] | 244 | struct clk *clk_register_mux_table(struct device *dev, const char *name, |
| 245 | const char * const *parent_names, u8 num_parents, |
Stephen Boyd | 9611b3a | 2019-08-30 08:09:21 -0700 | [diff] [blame] | 246 | unsigned long flags, void __iomem *reg, u8 shift, u32 mask, |
Stephen Boyd | 264b317 | 2016-02-07 00:05:48 -0800 | [diff] [blame] | 247 | u8 clk_mux_flags, u32 *table, spinlock_t *lock) |
| 248 | { |
| 249 | struct clk_hw *hw; |
| 250 | |
Stephen Boyd | 9611b3a | 2019-08-30 08:09:21 -0700 | [diff] [blame] | 251 | hw = clk_hw_register_mux_table(dev, name, parent_names, |
| 252 | num_parents, flags, reg, shift, mask, |
| 253 | clk_mux_flags, table, lock); |
Stephen Boyd | 264b317 | 2016-02-07 00:05:48 -0800 | [diff] [blame] | 254 | if (IS_ERR(hw)) |
| 255 | return ERR_CAST(hw); |
| 256 | return hw->clk; |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 257 | } |
Mike Turquette | 5cfe10b | 2013-08-15 19:06:29 -0700 | [diff] [blame] | 258 | EXPORT_SYMBOL_GPL(clk_register_mux_table); |
Peter De Schrijver | ce4f331 | 2013-03-22 14:07:53 +0200 | [diff] [blame] | 259 | |
Krzysztof Kozlowski | 4e3c021 | 2015-01-05 10:52:40 +0100 | [diff] [blame] | 260 | void clk_unregister_mux(struct clk *clk) |
| 261 | { |
| 262 | struct clk_mux *mux; |
| 263 | struct clk_hw *hw; |
| 264 | |
| 265 | hw = __clk_get_hw(clk); |
| 266 | if (!hw) |
| 267 | return; |
| 268 | |
| 269 | mux = to_clk_mux(hw); |
| 270 | |
| 271 | clk_unregister(clk); |
| 272 | kfree(mux); |
| 273 | } |
| 274 | EXPORT_SYMBOL_GPL(clk_unregister_mux); |
Stephen Boyd | 264b317 | 2016-02-07 00:05:48 -0800 | [diff] [blame] | 275 | |
| 276 | void clk_hw_unregister_mux(struct clk_hw *hw) |
| 277 | { |
| 278 | struct clk_mux *mux; |
| 279 | |
| 280 | mux = to_clk_mux(hw); |
| 281 | |
| 282 | clk_hw_unregister(hw); |
| 283 | kfree(mux); |
| 284 | } |
| 285 | EXPORT_SYMBOL_GPL(clk_hw_unregister_mux); |