Thomas Gleixner | 1802d0b | 2019-05-27 08:55:21 +0200 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0-only |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 2 | /* |
| 3 | * |
| 4 | * Copyright (C) STMicroelectronics SA 2017 |
| 5 | * Author(s): M'boumba Cedric Madianga <cedric.madianga@gmail.com> |
| 6 | * Pierre-Yves Mordret <pierre-yves.mordret@st.com> |
| 7 | * |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 8 | * Driver for STM32 MDMA controller |
| 9 | * |
| 10 | * Inspired by stm32-dma.c and dma-jz4780.c |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 11 | */ |
| 12 | |
| 13 | #include <linux/clk.h> |
| 14 | #include <linux/delay.h> |
| 15 | #include <linux/dmaengine.h> |
| 16 | #include <linux/dma-mapping.h> |
| 17 | #include <linux/dmapool.h> |
| 18 | #include <linux/err.h> |
| 19 | #include <linux/init.h> |
| 20 | #include <linux/iopoll.h> |
| 21 | #include <linux/jiffies.h> |
| 22 | #include <linux/list.h> |
| 23 | #include <linux/log2.h> |
| 24 | #include <linux/module.h> |
| 25 | #include <linux/of.h> |
| 26 | #include <linux/of_device.h> |
| 27 | #include <linux/of_dma.h> |
| 28 | #include <linux/platform_device.h> |
Pierre-Yves MORDRET | 89e987e | 2019-01-03 11:17:10 +0100 | [diff] [blame] | 29 | #include <linux/pm_runtime.h> |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 30 | #include <linux/reset.h> |
| 31 | #include <linux/slab.h> |
| 32 | |
| 33 | #include "virt-dma.h" |
| 34 | |
| 35 | /* MDMA Generic getter/setter */ |
| 36 | #define STM32_MDMA_SHIFT(n) (ffs(n) - 1) |
| 37 | #define STM32_MDMA_SET(n, mask) (((n) << STM32_MDMA_SHIFT(mask)) & \ |
| 38 | (mask)) |
| 39 | #define STM32_MDMA_GET(n, mask) (((n) & (mask)) >> \ |
| 40 | STM32_MDMA_SHIFT(mask)) |
| 41 | |
| 42 | #define STM32_MDMA_GISR0 0x0000 /* MDMA Int Status Reg 1 */ |
| 43 | #define STM32_MDMA_GISR1 0x0004 /* MDMA Int Status Reg 2 */ |
| 44 | |
| 45 | /* MDMA Channel x interrupt/status register */ |
| 46 | #define STM32_MDMA_CISR(x) (0x40 + 0x40 * (x)) /* x = 0..62 */ |
| 47 | #define STM32_MDMA_CISR_CRQA BIT(16) |
| 48 | #define STM32_MDMA_CISR_TCIF BIT(4) |
| 49 | #define STM32_MDMA_CISR_BTIF BIT(3) |
| 50 | #define STM32_MDMA_CISR_BRTIF BIT(2) |
| 51 | #define STM32_MDMA_CISR_CTCIF BIT(1) |
| 52 | #define STM32_MDMA_CISR_TEIF BIT(0) |
| 53 | |
| 54 | /* MDMA Channel x interrupt flag clear register */ |
| 55 | #define STM32_MDMA_CIFCR(x) (0x44 + 0x40 * (x)) |
| 56 | #define STM32_MDMA_CIFCR_CLTCIF BIT(4) |
| 57 | #define STM32_MDMA_CIFCR_CBTIF BIT(3) |
| 58 | #define STM32_MDMA_CIFCR_CBRTIF BIT(2) |
| 59 | #define STM32_MDMA_CIFCR_CCTCIF BIT(1) |
| 60 | #define STM32_MDMA_CIFCR_CTEIF BIT(0) |
| 61 | #define STM32_MDMA_CIFCR_CLEAR_ALL (STM32_MDMA_CIFCR_CLTCIF \ |
| 62 | | STM32_MDMA_CIFCR_CBTIF \ |
| 63 | | STM32_MDMA_CIFCR_CBRTIF \ |
| 64 | | STM32_MDMA_CIFCR_CCTCIF \ |
| 65 | | STM32_MDMA_CIFCR_CTEIF) |
| 66 | |
| 67 | /* MDMA Channel x error status register */ |
| 68 | #define STM32_MDMA_CESR(x) (0x48 + 0x40 * (x)) |
| 69 | #define STM32_MDMA_CESR_BSE BIT(11) |
| 70 | #define STM32_MDMA_CESR_ASR BIT(10) |
| 71 | #define STM32_MDMA_CESR_TEMD BIT(9) |
| 72 | #define STM32_MDMA_CESR_TELD BIT(8) |
| 73 | #define STM32_MDMA_CESR_TED BIT(7) |
| 74 | #define STM32_MDMA_CESR_TEA_MASK GENMASK(6, 0) |
| 75 | |
| 76 | /* MDMA Channel x control register */ |
| 77 | #define STM32_MDMA_CCR(x) (0x4C + 0x40 * (x)) |
| 78 | #define STM32_MDMA_CCR_SWRQ BIT(16) |
| 79 | #define STM32_MDMA_CCR_WEX BIT(14) |
| 80 | #define STM32_MDMA_CCR_HEX BIT(13) |
| 81 | #define STM32_MDMA_CCR_BEX BIT(12) |
| 82 | #define STM32_MDMA_CCR_PL_MASK GENMASK(7, 6) |
| 83 | #define STM32_MDMA_CCR_PL(n) STM32_MDMA_SET(n, \ |
| 84 | STM32_MDMA_CCR_PL_MASK) |
| 85 | #define STM32_MDMA_CCR_TCIE BIT(5) |
| 86 | #define STM32_MDMA_CCR_BTIE BIT(4) |
| 87 | #define STM32_MDMA_CCR_BRTIE BIT(3) |
| 88 | #define STM32_MDMA_CCR_CTCIE BIT(2) |
| 89 | #define STM32_MDMA_CCR_TEIE BIT(1) |
| 90 | #define STM32_MDMA_CCR_EN BIT(0) |
| 91 | #define STM32_MDMA_CCR_IRQ_MASK (STM32_MDMA_CCR_TCIE \ |
| 92 | | STM32_MDMA_CCR_BTIE \ |
| 93 | | STM32_MDMA_CCR_BRTIE \ |
| 94 | | STM32_MDMA_CCR_CTCIE \ |
| 95 | | STM32_MDMA_CCR_TEIE) |
| 96 | |
| 97 | /* MDMA Channel x transfer configuration register */ |
| 98 | #define STM32_MDMA_CTCR(x) (0x50 + 0x40 * (x)) |
| 99 | #define STM32_MDMA_CTCR_BWM BIT(31) |
| 100 | #define STM32_MDMA_CTCR_SWRM BIT(30) |
| 101 | #define STM32_MDMA_CTCR_TRGM_MSK GENMASK(29, 28) |
| 102 | #define STM32_MDMA_CTCR_TRGM(n) STM32_MDMA_SET((n), \ |
| 103 | STM32_MDMA_CTCR_TRGM_MSK) |
| 104 | #define STM32_MDMA_CTCR_TRGM_GET(n) STM32_MDMA_GET((n), \ |
| 105 | STM32_MDMA_CTCR_TRGM_MSK) |
| 106 | #define STM32_MDMA_CTCR_PAM_MASK GENMASK(27, 26) |
| 107 | #define STM32_MDMA_CTCR_PAM(n) STM32_MDMA_SET(n, \ |
| 108 | STM32_MDMA_CTCR_PAM_MASK) |
| 109 | #define STM32_MDMA_CTCR_PKE BIT(25) |
| 110 | #define STM32_MDMA_CTCR_TLEN_MSK GENMASK(24, 18) |
| 111 | #define STM32_MDMA_CTCR_TLEN(n) STM32_MDMA_SET((n), \ |
| 112 | STM32_MDMA_CTCR_TLEN_MSK) |
| 113 | #define STM32_MDMA_CTCR_TLEN_GET(n) STM32_MDMA_GET((n), \ |
| 114 | STM32_MDMA_CTCR_TLEN_MSK) |
| 115 | #define STM32_MDMA_CTCR_LEN2_MSK GENMASK(25, 18) |
| 116 | #define STM32_MDMA_CTCR_LEN2(n) STM32_MDMA_SET((n), \ |
| 117 | STM32_MDMA_CTCR_LEN2_MSK) |
| 118 | #define STM32_MDMA_CTCR_LEN2_GET(n) STM32_MDMA_GET((n), \ |
| 119 | STM32_MDMA_CTCR_LEN2_MSK) |
| 120 | #define STM32_MDMA_CTCR_DBURST_MASK GENMASK(17, 15) |
| 121 | #define STM32_MDMA_CTCR_DBURST(n) STM32_MDMA_SET(n, \ |
| 122 | STM32_MDMA_CTCR_DBURST_MASK) |
| 123 | #define STM32_MDMA_CTCR_SBURST_MASK GENMASK(14, 12) |
| 124 | #define STM32_MDMA_CTCR_SBURST(n) STM32_MDMA_SET(n, \ |
| 125 | STM32_MDMA_CTCR_SBURST_MASK) |
| 126 | #define STM32_MDMA_CTCR_DINCOS_MASK GENMASK(11, 10) |
| 127 | #define STM32_MDMA_CTCR_DINCOS(n) STM32_MDMA_SET((n), \ |
| 128 | STM32_MDMA_CTCR_DINCOS_MASK) |
| 129 | #define STM32_MDMA_CTCR_SINCOS_MASK GENMASK(9, 8) |
| 130 | #define STM32_MDMA_CTCR_SINCOS(n) STM32_MDMA_SET((n), \ |
| 131 | STM32_MDMA_CTCR_SINCOS_MASK) |
| 132 | #define STM32_MDMA_CTCR_DSIZE_MASK GENMASK(7, 6) |
| 133 | #define STM32_MDMA_CTCR_DSIZE(n) STM32_MDMA_SET(n, \ |
| 134 | STM32_MDMA_CTCR_DSIZE_MASK) |
| 135 | #define STM32_MDMA_CTCR_SSIZE_MASK GENMASK(5, 4) |
| 136 | #define STM32_MDMA_CTCR_SSIZE(n) STM32_MDMA_SET(n, \ |
| 137 | STM32_MDMA_CTCR_SSIZE_MASK) |
| 138 | #define STM32_MDMA_CTCR_DINC_MASK GENMASK(3, 2) |
| 139 | #define STM32_MDMA_CTCR_DINC(n) STM32_MDMA_SET((n), \ |
| 140 | STM32_MDMA_CTCR_DINC_MASK) |
| 141 | #define STM32_MDMA_CTCR_SINC_MASK GENMASK(1, 0) |
| 142 | #define STM32_MDMA_CTCR_SINC(n) STM32_MDMA_SET((n), \ |
| 143 | STM32_MDMA_CTCR_SINC_MASK) |
| 144 | #define STM32_MDMA_CTCR_CFG_MASK (STM32_MDMA_CTCR_SINC_MASK \ |
| 145 | | STM32_MDMA_CTCR_DINC_MASK \ |
| 146 | | STM32_MDMA_CTCR_SINCOS_MASK \ |
| 147 | | STM32_MDMA_CTCR_DINCOS_MASK \ |
| 148 | | STM32_MDMA_CTCR_LEN2_MSK \ |
| 149 | | STM32_MDMA_CTCR_TRGM_MSK) |
| 150 | |
| 151 | /* MDMA Channel x block number of data register */ |
| 152 | #define STM32_MDMA_CBNDTR(x) (0x54 + 0x40 * (x)) |
| 153 | #define STM32_MDMA_CBNDTR_BRC_MK GENMASK(31, 20) |
| 154 | #define STM32_MDMA_CBNDTR_BRC(n) STM32_MDMA_SET(n, \ |
| 155 | STM32_MDMA_CBNDTR_BRC_MK) |
| 156 | #define STM32_MDMA_CBNDTR_BRC_GET(n) STM32_MDMA_GET((n), \ |
| 157 | STM32_MDMA_CBNDTR_BRC_MK) |
| 158 | |
| 159 | #define STM32_MDMA_CBNDTR_BRDUM BIT(19) |
| 160 | #define STM32_MDMA_CBNDTR_BRSUM BIT(18) |
| 161 | #define STM32_MDMA_CBNDTR_BNDT_MASK GENMASK(16, 0) |
| 162 | #define STM32_MDMA_CBNDTR_BNDT(n) STM32_MDMA_SET(n, \ |
| 163 | STM32_MDMA_CBNDTR_BNDT_MASK) |
| 164 | |
| 165 | /* MDMA Channel x source address register */ |
| 166 | #define STM32_MDMA_CSAR(x) (0x58 + 0x40 * (x)) |
| 167 | |
| 168 | /* MDMA Channel x destination address register */ |
| 169 | #define STM32_MDMA_CDAR(x) (0x5C + 0x40 * (x)) |
| 170 | |
| 171 | /* MDMA Channel x block repeat address update register */ |
| 172 | #define STM32_MDMA_CBRUR(x) (0x60 + 0x40 * (x)) |
| 173 | #define STM32_MDMA_CBRUR_DUV_MASK GENMASK(31, 16) |
| 174 | #define STM32_MDMA_CBRUR_DUV(n) STM32_MDMA_SET(n, \ |
| 175 | STM32_MDMA_CBRUR_DUV_MASK) |
| 176 | #define STM32_MDMA_CBRUR_SUV_MASK GENMASK(15, 0) |
| 177 | #define STM32_MDMA_CBRUR_SUV(n) STM32_MDMA_SET(n, \ |
| 178 | STM32_MDMA_CBRUR_SUV_MASK) |
| 179 | |
| 180 | /* MDMA Channel x link address register */ |
| 181 | #define STM32_MDMA_CLAR(x) (0x64 + 0x40 * (x)) |
| 182 | |
| 183 | /* MDMA Channel x trigger and bus selection register */ |
| 184 | #define STM32_MDMA_CTBR(x) (0x68 + 0x40 * (x)) |
| 185 | #define STM32_MDMA_CTBR_DBUS BIT(17) |
| 186 | #define STM32_MDMA_CTBR_SBUS BIT(16) |
| 187 | #define STM32_MDMA_CTBR_TSEL_MASK GENMASK(7, 0) |
| 188 | #define STM32_MDMA_CTBR_TSEL(n) STM32_MDMA_SET(n, \ |
| 189 | STM32_MDMA_CTBR_TSEL_MASK) |
| 190 | |
| 191 | /* MDMA Channel x mask address register */ |
| 192 | #define STM32_MDMA_CMAR(x) (0x70 + 0x40 * (x)) |
| 193 | |
| 194 | /* MDMA Channel x mask data register */ |
| 195 | #define STM32_MDMA_CMDR(x) (0x74 + 0x40 * (x)) |
| 196 | |
| 197 | #define STM32_MDMA_MAX_BUF_LEN 128 |
| 198 | #define STM32_MDMA_MAX_BLOCK_LEN 65536 |
| 199 | #define STM32_MDMA_MAX_CHANNELS 63 |
| 200 | #define STM32_MDMA_MAX_REQUESTS 256 |
| 201 | #define STM32_MDMA_MAX_BURST 128 |
| 202 | #define STM32_MDMA_VERY_HIGH_PRIORITY 0x11 |
| 203 | |
| 204 | enum stm32_mdma_trigger_mode { |
| 205 | STM32_MDMA_BUFFER, |
| 206 | STM32_MDMA_BLOCK, |
| 207 | STM32_MDMA_BLOCK_REP, |
| 208 | STM32_MDMA_LINKED_LIST, |
| 209 | }; |
| 210 | |
| 211 | enum stm32_mdma_width { |
| 212 | STM32_MDMA_BYTE, |
| 213 | STM32_MDMA_HALF_WORD, |
| 214 | STM32_MDMA_WORD, |
| 215 | STM32_MDMA_DOUBLE_WORD, |
| 216 | }; |
| 217 | |
| 218 | enum stm32_mdma_inc_mode { |
| 219 | STM32_MDMA_FIXED = 0, |
| 220 | STM32_MDMA_INC = 2, |
| 221 | STM32_MDMA_DEC = 3, |
| 222 | }; |
| 223 | |
| 224 | struct stm32_mdma_chan_config { |
| 225 | u32 request; |
| 226 | u32 priority_level; |
| 227 | u32 transfer_config; |
| 228 | u32 mask_addr; |
| 229 | u32 mask_data; |
| 230 | }; |
| 231 | |
| 232 | struct stm32_mdma_hwdesc { |
| 233 | u32 ctcr; |
| 234 | u32 cbndtr; |
| 235 | u32 csar; |
| 236 | u32 cdar; |
| 237 | u32 cbrur; |
| 238 | u32 clar; |
| 239 | u32 ctbr; |
| 240 | u32 dummy; |
| 241 | u32 cmar; |
| 242 | u32 cmdr; |
| 243 | } __aligned(64); |
| 244 | |
Pierre-Yves MORDRET | bbb5a4e | 2018-04-13 15:52:13 +0200 | [diff] [blame] | 245 | struct stm32_mdma_desc_node { |
| 246 | struct stm32_mdma_hwdesc *hwdesc; |
| 247 | dma_addr_t hwdesc_phys; |
| 248 | }; |
| 249 | |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 250 | struct stm32_mdma_desc { |
| 251 | struct virt_dma_desc vdesc; |
| 252 | u32 ccr; |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 253 | bool cyclic; |
| 254 | u32 count; |
Pierre-Yves MORDRET | bbb5a4e | 2018-04-13 15:52:13 +0200 | [diff] [blame] | 255 | struct stm32_mdma_desc_node node[]; |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 256 | }; |
| 257 | |
| 258 | struct stm32_mdma_chan { |
| 259 | struct virt_dma_chan vchan; |
| 260 | struct dma_pool *desc_pool; |
| 261 | u32 id; |
| 262 | struct stm32_mdma_desc *desc; |
| 263 | u32 curr_hwdesc; |
| 264 | struct dma_slave_config dma_config; |
| 265 | struct stm32_mdma_chan_config chan_config; |
| 266 | bool busy; |
| 267 | u32 mem_burst; |
| 268 | u32 mem_width; |
| 269 | }; |
| 270 | |
| 271 | struct stm32_mdma_device { |
| 272 | struct dma_device ddev; |
| 273 | void __iomem *base; |
| 274 | struct clk *clk; |
| 275 | int irq; |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 276 | u32 nr_channels; |
| 277 | u32 nr_requests; |
| 278 | u32 nr_ahb_addr_masks; |
| 279 | struct stm32_mdma_chan chan[STM32_MDMA_MAX_CHANNELS]; |
| 280 | u32 ahb_addr_masks[]; |
| 281 | }; |
| 282 | |
| 283 | static struct stm32_mdma_device *stm32_mdma_get_dev( |
| 284 | struct stm32_mdma_chan *chan) |
| 285 | { |
| 286 | return container_of(chan->vchan.chan.device, struct stm32_mdma_device, |
| 287 | ddev); |
| 288 | } |
| 289 | |
| 290 | static struct stm32_mdma_chan *to_stm32_mdma_chan(struct dma_chan *c) |
| 291 | { |
| 292 | return container_of(c, struct stm32_mdma_chan, vchan.chan); |
| 293 | } |
| 294 | |
| 295 | static struct stm32_mdma_desc *to_stm32_mdma_desc(struct virt_dma_desc *vdesc) |
| 296 | { |
| 297 | return container_of(vdesc, struct stm32_mdma_desc, vdesc); |
| 298 | } |
| 299 | |
| 300 | static struct device *chan2dev(struct stm32_mdma_chan *chan) |
| 301 | { |
| 302 | return &chan->vchan.chan.dev->device; |
| 303 | } |
| 304 | |
| 305 | static struct device *mdma2dev(struct stm32_mdma_device *mdma_dev) |
| 306 | { |
| 307 | return mdma_dev->ddev.dev; |
| 308 | } |
| 309 | |
| 310 | static u32 stm32_mdma_read(struct stm32_mdma_device *dmadev, u32 reg) |
| 311 | { |
| 312 | return readl_relaxed(dmadev->base + reg); |
| 313 | } |
| 314 | |
| 315 | static void stm32_mdma_write(struct stm32_mdma_device *dmadev, u32 reg, u32 val) |
| 316 | { |
| 317 | writel_relaxed(val, dmadev->base + reg); |
| 318 | } |
| 319 | |
| 320 | static void stm32_mdma_set_bits(struct stm32_mdma_device *dmadev, u32 reg, |
| 321 | u32 mask) |
| 322 | { |
| 323 | void __iomem *addr = dmadev->base + reg; |
| 324 | |
| 325 | writel_relaxed(readl_relaxed(addr) | mask, addr); |
| 326 | } |
| 327 | |
| 328 | static void stm32_mdma_clr_bits(struct stm32_mdma_device *dmadev, u32 reg, |
| 329 | u32 mask) |
| 330 | { |
| 331 | void __iomem *addr = dmadev->base + reg; |
| 332 | |
| 333 | writel_relaxed(readl_relaxed(addr) & ~mask, addr); |
| 334 | } |
| 335 | |
| 336 | static struct stm32_mdma_desc *stm32_mdma_alloc_desc( |
| 337 | struct stm32_mdma_chan *chan, u32 count) |
| 338 | { |
| 339 | struct stm32_mdma_desc *desc; |
Pierre-Yves MORDRET | bbb5a4e | 2018-04-13 15:52:13 +0200 | [diff] [blame] | 340 | int i; |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 341 | |
Gustavo A. R. Silva | 8145dce | 2020-10-08 09:18:28 -0500 | [diff] [blame] | 342 | desc = kzalloc(struct_size(desc, node, count), GFP_NOWAIT); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 343 | if (!desc) |
| 344 | return NULL; |
| 345 | |
Pierre-Yves MORDRET | bbb5a4e | 2018-04-13 15:52:13 +0200 | [diff] [blame] | 346 | for (i = 0; i < count; i++) { |
| 347 | desc->node[i].hwdesc = |
| 348 | dma_pool_alloc(chan->desc_pool, GFP_NOWAIT, |
| 349 | &desc->node[i].hwdesc_phys); |
| 350 | if (!desc->node[i].hwdesc) |
| 351 | goto err; |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 352 | } |
| 353 | |
| 354 | desc->count = count; |
| 355 | |
| 356 | return desc; |
Pierre-Yves MORDRET | bbb5a4e | 2018-04-13 15:52:13 +0200 | [diff] [blame] | 357 | |
| 358 | err: |
| 359 | dev_err(chan2dev(chan), "Failed to allocate descriptor\n"); |
| 360 | while (--i >= 0) |
| 361 | dma_pool_free(chan->desc_pool, desc->node[i].hwdesc, |
| 362 | desc->node[i].hwdesc_phys); |
| 363 | kfree(desc); |
| 364 | return NULL; |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 365 | } |
| 366 | |
| 367 | static void stm32_mdma_desc_free(struct virt_dma_desc *vdesc) |
| 368 | { |
| 369 | struct stm32_mdma_desc *desc = to_stm32_mdma_desc(vdesc); |
| 370 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(vdesc->tx.chan); |
Pierre-Yves MORDRET | bbb5a4e | 2018-04-13 15:52:13 +0200 | [diff] [blame] | 371 | int i; |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 372 | |
Pierre-Yves MORDRET | bbb5a4e | 2018-04-13 15:52:13 +0200 | [diff] [blame] | 373 | for (i = 0; i < desc->count; i++) |
| 374 | dma_pool_free(chan->desc_pool, desc->node[i].hwdesc, |
| 375 | desc->node[i].hwdesc_phys); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 376 | kfree(desc); |
| 377 | } |
| 378 | |
| 379 | static int stm32_mdma_get_width(struct stm32_mdma_chan *chan, |
| 380 | enum dma_slave_buswidth width) |
| 381 | { |
| 382 | switch (width) { |
| 383 | case DMA_SLAVE_BUSWIDTH_1_BYTE: |
| 384 | case DMA_SLAVE_BUSWIDTH_2_BYTES: |
| 385 | case DMA_SLAVE_BUSWIDTH_4_BYTES: |
| 386 | case DMA_SLAVE_BUSWIDTH_8_BYTES: |
| 387 | return ffs(width) - 1; |
| 388 | default: |
| 389 | dev_err(chan2dev(chan), "Dma bus width %i not supported\n", |
| 390 | width); |
| 391 | return -EINVAL; |
| 392 | } |
| 393 | } |
| 394 | |
Pierre-Yves MORDRET | d83f413 | 2017-10-17 15:43:47 +0200 | [diff] [blame] | 395 | static enum dma_slave_buswidth stm32_mdma_get_max_width(dma_addr_t addr, |
| 396 | u32 buf_len, u32 tlen) |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 397 | { |
| 398 | enum dma_slave_buswidth max_width = DMA_SLAVE_BUSWIDTH_8_BYTES; |
| 399 | |
| 400 | for (max_width = DMA_SLAVE_BUSWIDTH_8_BYTES; |
| 401 | max_width > DMA_SLAVE_BUSWIDTH_1_BYTE; |
| 402 | max_width >>= 1) { |
Pierre-Yves MORDRET | d83f413 | 2017-10-17 15:43:47 +0200 | [diff] [blame] | 403 | /* |
| 404 | * Address and buffer length both have to be aligned on |
| 405 | * bus width |
| 406 | */ |
| 407 | if ((((buf_len | addr) & (max_width - 1)) == 0) && |
| 408 | tlen >= max_width) |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 409 | break; |
| 410 | } |
| 411 | |
| 412 | return max_width; |
| 413 | } |
| 414 | |
| 415 | static u32 stm32_mdma_get_best_burst(u32 buf_len, u32 tlen, u32 max_burst, |
| 416 | enum dma_slave_buswidth width) |
| 417 | { |
Pierre-Yves MORDRET | ee6de9a | 2018-04-13 15:52:12 +0200 | [diff] [blame] | 418 | u32 best_burst; |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 419 | |
Pierre-Yves MORDRET | ee6de9a | 2018-04-13 15:52:12 +0200 | [diff] [blame] | 420 | best_burst = min((u32)1 << __ffs(tlen | buf_len), |
| 421 | max_burst * width) / width; |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 422 | |
| 423 | return (best_burst > 0) ? best_burst : 1; |
| 424 | } |
| 425 | |
| 426 | static int stm32_mdma_disable_chan(struct stm32_mdma_chan *chan) |
| 427 | { |
| 428 | struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan); |
| 429 | u32 ccr, cisr, id, reg; |
| 430 | int ret; |
| 431 | |
| 432 | id = chan->id; |
| 433 | reg = STM32_MDMA_CCR(id); |
| 434 | |
| 435 | /* Disable interrupts */ |
| 436 | stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_IRQ_MASK); |
| 437 | |
| 438 | ccr = stm32_mdma_read(dmadev, reg); |
| 439 | if (ccr & STM32_MDMA_CCR_EN) { |
| 440 | stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_EN); |
| 441 | |
| 442 | /* Ensure that any ongoing transfer has been completed */ |
| 443 | ret = readl_relaxed_poll_timeout_atomic( |
| 444 | dmadev->base + STM32_MDMA_CISR(id), cisr, |
| 445 | (cisr & STM32_MDMA_CISR_CTCIF), 10, 1000); |
| 446 | if (ret) { |
| 447 | dev_err(chan2dev(chan), "%s: timeout!\n", __func__); |
| 448 | return -EBUSY; |
| 449 | } |
| 450 | } |
| 451 | |
| 452 | return 0; |
| 453 | } |
| 454 | |
| 455 | static void stm32_mdma_stop(struct stm32_mdma_chan *chan) |
| 456 | { |
| 457 | struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan); |
| 458 | u32 status; |
| 459 | int ret; |
| 460 | |
| 461 | /* Disable DMA */ |
| 462 | ret = stm32_mdma_disable_chan(chan); |
| 463 | if (ret < 0) |
| 464 | return; |
| 465 | |
| 466 | /* Clear interrupt status if it is there */ |
| 467 | status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id)); |
| 468 | if (status) { |
| 469 | dev_dbg(chan2dev(chan), "%s(): clearing interrupt: 0x%08x\n", |
| 470 | __func__, status); |
| 471 | stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status); |
| 472 | } |
| 473 | |
| 474 | chan->busy = false; |
| 475 | } |
| 476 | |
| 477 | static void stm32_mdma_set_bus(struct stm32_mdma_device *dmadev, u32 *ctbr, |
| 478 | u32 ctbr_mask, u32 src_addr) |
| 479 | { |
| 480 | u32 mask; |
| 481 | int i; |
| 482 | |
| 483 | /* Check if memory device is on AHB or AXI */ |
| 484 | *ctbr &= ~ctbr_mask; |
| 485 | mask = src_addr & 0xF0000000; |
| 486 | for (i = 0; i < dmadev->nr_ahb_addr_masks; i++) { |
| 487 | if (mask == dmadev->ahb_addr_masks[i]) { |
| 488 | *ctbr |= ctbr_mask; |
| 489 | break; |
| 490 | } |
| 491 | } |
| 492 | } |
| 493 | |
| 494 | static int stm32_mdma_set_xfer_param(struct stm32_mdma_chan *chan, |
| 495 | enum dma_transfer_direction direction, |
| 496 | u32 *mdma_ccr, u32 *mdma_ctcr, |
Pierre-Yves MORDRET | d83f413 | 2017-10-17 15:43:47 +0200 | [diff] [blame] | 497 | u32 *mdma_ctbr, dma_addr_t addr, |
| 498 | u32 buf_len) |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 499 | { |
| 500 | struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan); |
| 501 | struct stm32_mdma_chan_config *chan_config = &chan->chan_config; |
| 502 | enum dma_slave_buswidth src_addr_width, dst_addr_width; |
| 503 | phys_addr_t src_addr, dst_addr; |
| 504 | int src_bus_width, dst_bus_width; |
| 505 | u32 src_maxburst, dst_maxburst, src_best_burst, dst_best_burst; |
| 506 | u32 ccr, ctcr, ctbr, tlen; |
| 507 | |
| 508 | src_addr_width = chan->dma_config.src_addr_width; |
| 509 | dst_addr_width = chan->dma_config.dst_addr_width; |
| 510 | src_maxburst = chan->dma_config.src_maxburst; |
| 511 | dst_maxburst = chan->dma_config.dst_maxburst; |
| 512 | |
| 513 | ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)); |
| 514 | ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id)); |
| 515 | ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id)); |
| 516 | |
| 517 | /* Enable HW request mode */ |
| 518 | ctcr &= ~STM32_MDMA_CTCR_SWRM; |
| 519 | |
| 520 | /* Set DINC, SINC, DINCOS, SINCOS, TRGM and TLEN retrieve from DT */ |
| 521 | ctcr &= ~STM32_MDMA_CTCR_CFG_MASK; |
| 522 | ctcr |= chan_config->transfer_config & STM32_MDMA_CTCR_CFG_MASK; |
| 523 | |
| 524 | /* |
| 525 | * For buffer transfer length (TLEN) we have to set |
| 526 | * the number of bytes - 1 in CTCR register |
| 527 | */ |
| 528 | tlen = STM32_MDMA_CTCR_LEN2_GET(ctcr); |
| 529 | ctcr &= ~STM32_MDMA_CTCR_LEN2_MSK; |
| 530 | ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1)); |
| 531 | |
Pierre-Yves MORDRET | d83f413 | 2017-10-17 15:43:47 +0200 | [diff] [blame] | 532 | /* Disable Pack Enable */ |
| 533 | ctcr &= ~STM32_MDMA_CTCR_PKE; |
| 534 | |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 535 | /* Check burst size constraints */ |
| 536 | if (src_maxburst * src_addr_width > STM32_MDMA_MAX_BURST || |
| 537 | dst_maxburst * dst_addr_width > STM32_MDMA_MAX_BURST) { |
| 538 | dev_err(chan2dev(chan), |
| 539 | "burst size * bus width higher than %d bytes\n", |
| 540 | STM32_MDMA_MAX_BURST); |
| 541 | return -EINVAL; |
| 542 | } |
| 543 | |
| 544 | if ((!is_power_of_2(src_maxburst) && src_maxburst > 0) || |
| 545 | (!is_power_of_2(dst_maxburst) && dst_maxburst > 0)) { |
| 546 | dev_err(chan2dev(chan), "burst size must be a power of 2\n"); |
| 547 | return -EINVAL; |
| 548 | } |
| 549 | |
| 550 | /* |
| 551 | * Configure channel control: |
| 552 | * - Clear SW request as in this case this is a HW one |
| 553 | * - Clear WEX, HEX and BEX bits |
| 554 | * - Set priority level |
| 555 | */ |
| 556 | ccr &= ~(STM32_MDMA_CCR_SWRQ | STM32_MDMA_CCR_WEX | STM32_MDMA_CCR_HEX | |
| 557 | STM32_MDMA_CCR_BEX | STM32_MDMA_CCR_PL_MASK); |
| 558 | ccr |= STM32_MDMA_CCR_PL(chan_config->priority_level); |
| 559 | |
| 560 | /* Configure Trigger selection */ |
| 561 | ctbr &= ~STM32_MDMA_CTBR_TSEL_MASK; |
| 562 | ctbr |= STM32_MDMA_CTBR_TSEL(chan_config->request); |
| 563 | |
| 564 | switch (direction) { |
| 565 | case DMA_MEM_TO_DEV: |
Pierre-Yves MORDRET | d83f413 | 2017-10-17 15:43:47 +0200 | [diff] [blame] | 566 | dst_addr = chan->dma_config.dst_addr; |
| 567 | |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 568 | /* Set device data size */ |
| 569 | dst_bus_width = stm32_mdma_get_width(chan, dst_addr_width); |
| 570 | if (dst_bus_width < 0) |
| 571 | return dst_bus_width; |
| 572 | ctcr &= ~STM32_MDMA_CTCR_DSIZE_MASK; |
| 573 | ctcr |= STM32_MDMA_CTCR_DSIZE(dst_bus_width); |
| 574 | |
| 575 | /* Set device burst value */ |
| 576 | dst_best_burst = stm32_mdma_get_best_burst(buf_len, tlen, |
| 577 | dst_maxburst, |
| 578 | dst_addr_width); |
| 579 | chan->mem_burst = dst_best_burst; |
| 580 | ctcr &= ~STM32_MDMA_CTCR_DBURST_MASK; |
| 581 | ctcr |= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst))); |
| 582 | |
| 583 | /* Set memory data size */ |
Pierre-Yves MORDRET | d83f413 | 2017-10-17 15:43:47 +0200 | [diff] [blame] | 584 | src_addr_width = stm32_mdma_get_max_width(addr, buf_len, tlen); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 585 | chan->mem_width = src_addr_width; |
| 586 | src_bus_width = stm32_mdma_get_width(chan, src_addr_width); |
| 587 | if (src_bus_width < 0) |
| 588 | return src_bus_width; |
| 589 | ctcr &= ~STM32_MDMA_CTCR_SSIZE_MASK | |
| 590 | STM32_MDMA_CTCR_SINCOS_MASK; |
| 591 | ctcr |= STM32_MDMA_CTCR_SSIZE(src_bus_width) | |
| 592 | STM32_MDMA_CTCR_SINCOS(src_bus_width); |
| 593 | |
| 594 | /* Set memory burst value */ |
| 595 | src_maxburst = STM32_MDMA_MAX_BUF_LEN / src_addr_width; |
| 596 | src_best_burst = stm32_mdma_get_best_burst(buf_len, tlen, |
| 597 | src_maxburst, |
| 598 | src_addr_width); |
| 599 | chan->mem_burst = src_best_burst; |
| 600 | ctcr &= ~STM32_MDMA_CTCR_SBURST_MASK; |
| 601 | ctcr |= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst))); |
| 602 | |
| 603 | /* Select bus */ |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 604 | stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS, |
| 605 | dst_addr); |
| 606 | |
Pierre-Yves MORDRET | d83f413 | 2017-10-17 15:43:47 +0200 | [diff] [blame] | 607 | if (dst_bus_width != src_bus_width) |
| 608 | ctcr |= STM32_MDMA_CTCR_PKE; |
| 609 | |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 610 | /* Set destination address */ |
| 611 | stm32_mdma_write(dmadev, STM32_MDMA_CDAR(chan->id), dst_addr); |
| 612 | break; |
| 613 | |
| 614 | case DMA_DEV_TO_MEM: |
Pierre-Yves MORDRET | d83f413 | 2017-10-17 15:43:47 +0200 | [diff] [blame] | 615 | src_addr = chan->dma_config.src_addr; |
| 616 | |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 617 | /* Set device data size */ |
| 618 | src_bus_width = stm32_mdma_get_width(chan, src_addr_width); |
| 619 | if (src_bus_width < 0) |
| 620 | return src_bus_width; |
| 621 | ctcr &= ~STM32_MDMA_CTCR_SSIZE_MASK; |
| 622 | ctcr |= STM32_MDMA_CTCR_SSIZE(src_bus_width); |
| 623 | |
| 624 | /* Set device burst value */ |
| 625 | src_best_burst = stm32_mdma_get_best_burst(buf_len, tlen, |
| 626 | src_maxburst, |
| 627 | src_addr_width); |
| 628 | ctcr &= ~STM32_MDMA_CTCR_SBURST_MASK; |
| 629 | ctcr |= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst))); |
| 630 | |
| 631 | /* Set memory data size */ |
Pierre-Yves MORDRET | d83f413 | 2017-10-17 15:43:47 +0200 | [diff] [blame] | 632 | dst_addr_width = stm32_mdma_get_max_width(addr, buf_len, tlen); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 633 | chan->mem_width = dst_addr_width; |
| 634 | dst_bus_width = stm32_mdma_get_width(chan, dst_addr_width); |
| 635 | if (dst_bus_width < 0) |
| 636 | return dst_bus_width; |
| 637 | ctcr &= ~(STM32_MDMA_CTCR_DSIZE_MASK | |
| 638 | STM32_MDMA_CTCR_DINCOS_MASK); |
| 639 | ctcr |= STM32_MDMA_CTCR_DSIZE(dst_bus_width) | |
| 640 | STM32_MDMA_CTCR_DINCOS(dst_bus_width); |
| 641 | |
| 642 | /* Set memory burst value */ |
| 643 | dst_maxburst = STM32_MDMA_MAX_BUF_LEN / dst_addr_width; |
| 644 | dst_best_burst = stm32_mdma_get_best_burst(buf_len, tlen, |
| 645 | dst_maxburst, |
| 646 | dst_addr_width); |
| 647 | ctcr &= ~STM32_MDMA_CTCR_DBURST_MASK; |
| 648 | ctcr |= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst))); |
| 649 | |
| 650 | /* Select bus */ |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 651 | stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS, |
| 652 | src_addr); |
| 653 | |
Pierre-Yves MORDRET | d83f413 | 2017-10-17 15:43:47 +0200 | [diff] [blame] | 654 | if (dst_bus_width != src_bus_width) |
| 655 | ctcr |= STM32_MDMA_CTCR_PKE; |
| 656 | |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 657 | /* Set source address */ |
| 658 | stm32_mdma_write(dmadev, STM32_MDMA_CSAR(chan->id), src_addr); |
| 659 | break; |
| 660 | |
| 661 | default: |
| 662 | dev_err(chan2dev(chan), "Dma direction is not supported\n"); |
| 663 | return -EINVAL; |
| 664 | } |
| 665 | |
| 666 | *mdma_ccr = ccr; |
| 667 | *mdma_ctcr = ctcr; |
| 668 | *mdma_ctbr = ctbr; |
| 669 | |
| 670 | return 0; |
| 671 | } |
| 672 | |
| 673 | static void stm32_mdma_dump_hwdesc(struct stm32_mdma_chan *chan, |
Pierre-Yves MORDRET | bbb5a4e | 2018-04-13 15:52:13 +0200 | [diff] [blame] | 674 | struct stm32_mdma_desc_node *node) |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 675 | { |
Pierre-Yves MORDRET | bbb5a4e | 2018-04-13 15:52:13 +0200 | [diff] [blame] | 676 | dev_dbg(chan2dev(chan), "hwdesc: %pad\n", &node->hwdesc_phys); |
| 677 | dev_dbg(chan2dev(chan), "CTCR: 0x%08x\n", node->hwdesc->ctcr); |
| 678 | dev_dbg(chan2dev(chan), "CBNDTR: 0x%08x\n", node->hwdesc->cbndtr); |
| 679 | dev_dbg(chan2dev(chan), "CSAR: 0x%08x\n", node->hwdesc->csar); |
| 680 | dev_dbg(chan2dev(chan), "CDAR: 0x%08x\n", node->hwdesc->cdar); |
| 681 | dev_dbg(chan2dev(chan), "CBRUR: 0x%08x\n", node->hwdesc->cbrur); |
| 682 | dev_dbg(chan2dev(chan), "CLAR: 0x%08x\n", node->hwdesc->clar); |
| 683 | dev_dbg(chan2dev(chan), "CTBR: 0x%08x\n", node->hwdesc->ctbr); |
| 684 | dev_dbg(chan2dev(chan), "CMAR: 0x%08x\n", node->hwdesc->cmar); |
| 685 | dev_dbg(chan2dev(chan), "CMDR: 0x%08x\n\n", node->hwdesc->cmdr); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 686 | } |
| 687 | |
| 688 | static void stm32_mdma_setup_hwdesc(struct stm32_mdma_chan *chan, |
| 689 | struct stm32_mdma_desc *desc, |
| 690 | enum dma_transfer_direction dir, u32 count, |
| 691 | dma_addr_t src_addr, dma_addr_t dst_addr, |
| 692 | u32 len, u32 ctcr, u32 ctbr, bool is_last, |
| 693 | bool is_first, bool is_cyclic) |
| 694 | { |
| 695 | struct stm32_mdma_chan_config *config = &chan->chan_config; |
| 696 | struct stm32_mdma_hwdesc *hwdesc; |
| 697 | u32 next = count + 1; |
| 698 | |
Pierre-Yves MORDRET | bbb5a4e | 2018-04-13 15:52:13 +0200 | [diff] [blame] | 699 | hwdesc = desc->node[count].hwdesc; |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 700 | hwdesc->ctcr = ctcr; |
| 701 | hwdesc->cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK | |
| 702 | STM32_MDMA_CBNDTR_BRDUM | |
| 703 | STM32_MDMA_CBNDTR_BRSUM | |
| 704 | STM32_MDMA_CBNDTR_BNDT_MASK); |
| 705 | hwdesc->cbndtr |= STM32_MDMA_CBNDTR_BNDT(len); |
| 706 | hwdesc->csar = src_addr; |
| 707 | hwdesc->cdar = dst_addr; |
| 708 | hwdesc->cbrur = 0; |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 709 | hwdesc->ctbr = ctbr; |
| 710 | hwdesc->cmar = config->mask_addr; |
| 711 | hwdesc->cmdr = config->mask_data; |
| 712 | |
| 713 | if (is_last) { |
| 714 | if (is_cyclic) |
Pierre-Yves MORDRET | bbb5a4e | 2018-04-13 15:52:13 +0200 | [diff] [blame] | 715 | hwdesc->clar = desc->node[0].hwdesc_phys; |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 716 | else |
| 717 | hwdesc->clar = 0; |
Pierre-Yves MORDRET | bbb5a4e | 2018-04-13 15:52:13 +0200 | [diff] [blame] | 718 | } else { |
| 719 | hwdesc->clar = desc->node[next].hwdesc_phys; |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 720 | } |
| 721 | |
Pierre-Yves MORDRET | bbb5a4e | 2018-04-13 15:52:13 +0200 | [diff] [blame] | 722 | stm32_mdma_dump_hwdesc(chan, &desc->node[count]); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 723 | } |
| 724 | |
| 725 | static int stm32_mdma_setup_xfer(struct stm32_mdma_chan *chan, |
| 726 | struct stm32_mdma_desc *desc, |
| 727 | struct scatterlist *sgl, u32 sg_len, |
| 728 | enum dma_transfer_direction direction) |
| 729 | { |
| 730 | struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan); |
| 731 | struct dma_slave_config *dma_config = &chan->dma_config; |
| 732 | struct scatterlist *sg; |
| 733 | dma_addr_t src_addr, dst_addr; |
| 734 | u32 ccr, ctcr, ctbr; |
| 735 | int i, ret = 0; |
| 736 | |
| 737 | for_each_sg(sgl, sg, sg_len, i) { |
| 738 | if (sg_dma_len(sg) > STM32_MDMA_MAX_BLOCK_LEN) { |
| 739 | dev_err(chan2dev(chan), "Invalid block len\n"); |
| 740 | return -EINVAL; |
| 741 | } |
| 742 | |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 743 | if (direction == DMA_MEM_TO_DEV) { |
| 744 | src_addr = sg_dma_address(sg); |
| 745 | dst_addr = dma_config->dst_addr; |
Pierre-Yves MORDRET | d83f413 | 2017-10-17 15:43:47 +0200 | [diff] [blame] | 746 | ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, |
| 747 | &ctcr, &ctbr, src_addr, |
| 748 | sg_dma_len(sg)); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 749 | stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS, |
| 750 | src_addr); |
| 751 | } else { |
| 752 | src_addr = dma_config->src_addr; |
| 753 | dst_addr = sg_dma_address(sg); |
Pierre-Yves MORDRET | d83f413 | 2017-10-17 15:43:47 +0200 | [diff] [blame] | 754 | ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, |
| 755 | &ctcr, &ctbr, dst_addr, |
| 756 | sg_dma_len(sg)); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 757 | stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS, |
| 758 | dst_addr); |
| 759 | } |
| 760 | |
Pierre-Yves MORDRET | d83f413 | 2017-10-17 15:43:47 +0200 | [diff] [blame] | 761 | if (ret < 0) |
| 762 | return ret; |
| 763 | |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 764 | stm32_mdma_setup_hwdesc(chan, desc, direction, i, src_addr, |
| 765 | dst_addr, sg_dma_len(sg), ctcr, ctbr, |
| 766 | i == sg_len - 1, i == 0, false); |
| 767 | } |
| 768 | |
| 769 | /* Enable interrupts */ |
| 770 | ccr &= ~STM32_MDMA_CCR_IRQ_MASK; |
| 771 | ccr |= STM32_MDMA_CCR_TEIE | STM32_MDMA_CCR_CTCIE; |
| 772 | if (sg_len > 1) |
| 773 | ccr |= STM32_MDMA_CCR_BTIE; |
| 774 | desc->ccr = ccr; |
| 775 | |
| 776 | return 0; |
| 777 | } |
| 778 | |
| 779 | static struct dma_async_tx_descriptor * |
| 780 | stm32_mdma_prep_slave_sg(struct dma_chan *c, struct scatterlist *sgl, |
| 781 | u32 sg_len, enum dma_transfer_direction direction, |
| 782 | unsigned long flags, void *context) |
| 783 | { |
| 784 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c); |
| 785 | struct stm32_mdma_desc *desc; |
Pierre-Yves MORDRET | bbb5a4e | 2018-04-13 15:52:13 +0200 | [diff] [blame] | 786 | int i, ret; |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 787 | |
| 788 | /* |
| 789 | * Once DMA is in setup cyclic mode the channel we cannot assign this |
| 790 | * channel anymore. The DMA channel needs to be aborted or terminated |
| 791 | * for allowing another request. |
| 792 | */ |
| 793 | if (chan->desc && chan->desc->cyclic) { |
| 794 | dev_err(chan2dev(chan), |
| 795 | "Request not allowed when dma in cyclic mode\n"); |
| 796 | return NULL; |
| 797 | } |
| 798 | |
| 799 | desc = stm32_mdma_alloc_desc(chan, sg_len); |
| 800 | if (!desc) |
| 801 | return NULL; |
| 802 | |
| 803 | ret = stm32_mdma_setup_xfer(chan, desc, sgl, sg_len, direction); |
| 804 | if (ret < 0) |
| 805 | goto xfer_setup_err; |
| 806 | |
| 807 | desc->cyclic = false; |
| 808 | |
| 809 | return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); |
| 810 | |
| 811 | xfer_setup_err: |
Pierre-Yves MORDRET | bbb5a4e | 2018-04-13 15:52:13 +0200 | [diff] [blame] | 812 | for (i = 0; i < desc->count; i++) |
| 813 | dma_pool_free(chan->desc_pool, desc->node[i].hwdesc, |
| 814 | desc->node[i].hwdesc_phys); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 815 | kfree(desc); |
| 816 | return NULL; |
| 817 | } |
| 818 | |
| 819 | static struct dma_async_tx_descriptor * |
| 820 | stm32_mdma_prep_dma_cyclic(struct dma_chan *c, dma_addr_t buf_addr, |
| 821 | size_t buf_len, size_t period_len, |
| 822 | enum dma_transfer_direction direction, |
| 823 | unsigned long flags) |
| 824 | { |
| 825 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c); |
| 826 | struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan); |
| 827 | struct dma_slave_config *dma_config = &chan->dma_config; |
| 828 | struct stm32_mdma_desc *desc; |
| 829 | dma_addr_t src_addr, dst_addr; |
| 830 | u32 ccr, ctcr, ctbr, count; |
| 831 | int i, ret; |
| 832 | |
| 833 | /* |
| 834 | * Once DMA is in setup cyclic mode the channel we cannot assign this |
| 835 | * channel anymore. The DMA channel needs to be aborted or terminated |
| 836 | * for allowing another request. |
| 837 | */ |
| 838 | if (chan->desc && chan->desc->cyclic) { |
| 839 | dev_err(chan2dev(chan), |
| 840 | "Request not allowed when dma in cyclic mode\n"); |
| 841 | return NULL; |
| 842 | } |
| 843 | |
| 844 | if (!buf_len || !period_len || period_len > STM32_MDMA_MAX_BLOCK_LEN) { |
| 845 | dev_err(chan2dev(chan), "Invalid buffer/period len\n"); |
| 846 | return NULL; |
| 847 | } |
| 848 | |
| 849 | if (buf_len % period_len) { |
| 850 | dev_err(chan2dev(chan), "buf_len not multiple of period_len\n"); |
| 851 | return NULL; |
| 852 | } |
| 853 | |
| 854 | count = buf_len / period_len; |
| 855 | |
| 856 | desc = stm32_mdma_alloc_desc(chan, count); |
| 857 | if (!desc) |
| 858 | return NULL; |
| 859 | |
Pierre-Yves MORDRET | d83f413 | 2017-10-17 15:43:47 +0200 | [diff] [blame] | 860 | /* Select bus */ |
| 861 | if (direction == DMA_MEM_TO_DEV) { |
| 862 | src_addr = buf_addr; |
| 863 | ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, &ctcr, |
| 864 | &ctbr, src_addr, period_len); |
| 865 | stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS, |
| 866 | src_addr); |
| 867 | } else { |
| 868 | dst_addr = buf_addr; |
| 869 | ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, &ctcr, |
| 870 | &ctbr, dst_addr, period_len); |
| 871 | stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS, |
| 872 | dst_addr); |
| 873 | } |
| 874 | |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 875 | if (ret < 0) |
| 876 | goto xfer_setup_err; |
| 877 | |
| 878 | /* Enable interrupts */ |
| 879 | ccr &= ~STM32_MDMA_CCR_IRQ_MASK; |
| 880 | ccr |= STM32_MDMA_CCR_TEIE | STM32_MDMA_CCR_CTCIE | STM32_MDMA_CCR_BTIE; |
| 881 | desc->ccr = ccr; |
| 882 | |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 883 | /* Configure hwdesc list */ |
| 884 | for (i = 0; i < count; i++) { |
| 885 | if (direction == DMA_MEM_TO_DEV) { |
| 886 | src_addr = buf_addr + i * period_len; |
| 887 | dst_addr = dma_config->dst_addr; |
| 888 | } else { |
| 889 | src_addr = dma_config->src_addr; |
| 890 | dst_addr = buf_addr + i * period_len; |
| 891 | } |
| 892 | |
| 893 | stm32_mdma_setup_hwdesc(chan, desc, direction, i, src_addr, |
| 894 | dst_addr, period_len, ctcr, ctbr, |
| 895 | i == count - 1, i == 0, true); |
| 896 | } |
| 897 | |
| 898 | desc->cyclic = true; |
| 899 | |
| 900 | return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); |
| 901 | |
| 902 | xfer_setup_err: |
Pierre-Yves MORDRET | bbb5a4e | 2018-04-13 15:52:13 +0200 | [diff] [blame] | 903 | for (i = 0; i < desc->count; i++) |
| 904 | dma_pool_free(chan->desc_pool, desc->node[i].hwdesc, |
| 905 | desc->node[i].hwdesc_phys); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 906 | kfree(desc); |
| 907 | return NULL; |
| 908 | } |
| 909 | |
| 910 | static struct dma_async_tx_descriptor * |
| 911 | stm32_mdma_prep_dma_memcpy(struct dma_chan *c, dma_addr_t dest, dma_addr_t src, |
| 912 | size_t len, unsigned long flags) |
| 913 | { |
| 914 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c); |
| 915 | struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan); |
| 916 | enum dma_slave_buswidth max_width; |
| 917 | struct stm32_mdma_desc *desc; |
| 918 | struct stm32_mdma_hwdesc *hwdesc; |
| 919 | u32 ccr, ctcr, ctbr, cbndtr, count, max_burst, mdma_burst; |
| 920 | u32 best_burst, tlen; |
| 921 | size_t xfer_count, offset; |
| 922 | int src_bus_width, dst_bus_width; |
| 923 | int i; |
| 924 | |
| 925 | /* |
| 926 | * Once DMA is in setup cyclic mode the channel we cannot assign this |
| 927 | * channel anymore. The DMA channel needs to be aborted or terminated |
| 928 | * to allow another request |
| 929 | */ |
| 930 | if (chan->desc && chan->desc->cyclic) { |
| 931 | dev_err(chan2dev(chan), |
| 932 | "Request not allowed when dma in cyclic mode\n"); |
| 933 | return NULL; |
| 934 | } |
| 935 | |
| 936 | count = DIV_ROUND_UP(len, STM32_MDMA_MAX_BLOCK_LEN); |
| 937 | desc = stm32_mdma_alloc_desc(chan, count); |
| 938 | if (!desc) |
| 939 | return NULL; |
| 940 | |
| 941 | ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)); |
| 942 | ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id)); |
| 943 | ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id)); |
| 944 | cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id)); |
| 945 | |
| 946 | /* Enable sw req, some interrupts and clear other bits */ |
| 947 | ccr &= ~(STM32_MDMA_CCR_WEX | STM32_MDMA_CCR_HEX | |
| 948 | STM32_MDMA_CCR_BEX | STM32_MDMA_CCR_PL_MASK | |
| 949 | STM32_MDMA_CCR_IRQ_MASK); |
| 950 | ccr |= STM32_MDMA_CCR_TEIE; |
| 951 | |
| 952 | /* Enable SW request mode, dest/src inc and clear other bits */ |
| 953 | ctcr &= ~(STM32_MDMA_CTCR_BWM | STM32_MDMA_CTCR_TRGM_MSK | |
| 954 | STM32_MDMA_CTCR_PAM_MASK | STM32_MDMA_CTCR_PKE | |
| 955 | STM32_MDMA_CTCR_TLEN_MSK | STM32_MDMA_CTCR_DBURST_MASK | |
| 956 | STM32_MDMA_CTCR_SBURST_MASK | STM32_MDMA_CTCR_DINCOS_MASK | |
| 957 | STM32_MDMA_CTCR_SINCOS_MASK | STM32_MDMA_CTCR_DSIZE_MASK | |
| 958 | STM32_MDMA_CTCR_SSIZE_MASK | STM32_MDMA_CTCR_DINC_MASK | |
| 959 | STM32_MDMA_CTCR_SINC_MASK); |
| 960 | ctcr |= STM32_MDMA_CTCR_SWRM | STM32_MDMA_CTCR_SINC(STM32_MDMA_INC) | |
| 961 | STM32_MDMA_CTCR_DINC(STM32_MDMA_INC); |
| 962 | |
| 963 | /* Reset HW request */ |
| 964 | ctbr &= ~STM32_MDMA_CTBR_TSEL_MASK; |
| 965 | |
| 966 | /* Select bus */ |
| 967 | stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS, src); |
| 968 | stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS, dest); |
| 969 | |
| 970 | /* Clear CBNDTR registers */ |
| 971 | cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK | STM32_MDMA_CBNDTR_BRDUM | |
| 972 | STM32_MDMA_CBNDTR_BRSUM | STM32_MDMA_CBNDTR_BNDT_MASK); |
| 973 | |
| 974 | if (len <= STM32_MDMA_MAX_BLOCK_LEN) { |
| 975 | cbndtr |= STM32_MDMA_CBNDTR_BNDT(len); |
| 976 | if (len <= STM32_MDMA_MAX_BUF_LEN) { |
| 977 | /* Setup a buffer transfer */ |
| 978 | ccr |= STM32_MDMA_CCR_TCIE | STM32_MDMA_CCR_CTCIE; |
| 979 | ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BUFFER); |
| 980 | } else { |
| 981 | /* Setup a block transfer */ |
| 982 | ccr |= STM32_MDMA_CCR_BTIE | STM32_MDMA_CCR_CTCIE; |
| 983 | ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BLOCK); |
| 984 | } |
| 985 | |
| 986 | tlen = STM32_MDMA_MAX_BUF_LEN; |
| 987 | ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1)); |
| 988 | |
| 989 | /* Set source best burst size */ |
Pierre-Yves MORDRET | d83f413 | 2017-10-17 15:43:47 +0200 | [diff] [blame] | 990 | max_width = stm32_mdma_get_max_width(src, len, tlen); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 991 | src_bus_width = stm32_mdma_get_width(chan, max_width); |
| 992 | |
| 993 | max_burst = tlen / max_width; |
| 994 | best_burst = stm32_mdma_get_best_burst(len, tlen, max_burst, |
| 995 | max_width); |
| 996 | mdma_burst = ilog2(best_burst); |
| 997 | |
| 998 | ctcr |= STM32_MDMA_CTCR_SBURST(mdma_burst) | |
| 999 | STM32_MDMA_CTCR_SSIZE(src_bus_width) | |
| 1000 | STM32_MDMA_CTCR_SINCOS(src_bus_width); |
| 1001 | |
| 1002 | /* Set destination best burst size */ |
Pierre-Yves MORDRET | d83f413 | 2017-10-17 15:43:47 +0200 | [diff] [blame] | 1003 | max_width = stm32_mdma_get_max_width(dest, len, tlen); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1004 | dst_bus_width = stm32_mdma_get_width(chan, max_width); |
| 1005 | |
| 1006 | max_burst = tlen / max_width; |
| 1007 | best_burst = stm32_mdma_get_best_burst(len, tlen, max_burst, |
| 1008 | max_width); |
| 1009 | mdma_burst = ilog2(best_burst); |
| 1010 | |
| 1011 | ctcr |= STM32_MDMA_CTCR_DBURST(mdma_burst) | |
| 1012 | STM32_MDMA_CTCR_DSIZE(dst_bus_width) | |
| 1013 | STM32_MDMA_CTCR_DINCOS(dst_bus_width); |
| 1014 | |
| 1015 | if (dst_bus_width != src_bus_width) |
| 1016 | ctcr |= STM32_MDMA_CTCR_PKE; |
| 1017 | |
| 1018 | /* Prepare hardware descriptor */ |
Pierre-Yves MORDRET | bbb5a4e | 2018-04-13 15:52:13 +0200 | [diff] [blame] | 1019 | hwdesc = desc->node[0].hwdesc; |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1020 | hwdesc->ctcr = ctcr; |
| 1021 | hwdesc->cbndtr = cbndtr; |
| 1022 | hwdesc->csar = src; |
| 1023 | hwdesc->cdar = dest; |
| 1024 | hwdesc->cbrur = 0; |
| 1025 | hwdesc->clar = 0; |
| 1026 | hwdesc->ctbr = ctbr; |
| 1027 | hwdesc->cmar = 0; |
| 1028 | hwdesc->cmdr = 0; |
| 1029 | |
Pierre-Yves MORDRET | bbb5a4e | 2018-04-13 15:52:13 +0200 | [diff] [blame] | 1030 | stm32_mdma_dump_hwdesc(chan, &desc->node[0]); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1031 | } else { |
| 1032 | /* Setup a LLI transfer */ |
| 1033 | ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_LINKED_LIST) | |
| 1034 | STM32_MDMA_CTCR_TLEN((STM32_MDMA_MAX_BUF_LEN - 1)); |
| 1035 | ccr |= STM32_MDMA_CCR_BTIE | STM32_MDMA_CCR_CTCIE; |
| 1036 | tlen = STM32_MDMA_MAX_BUF_LEN; |
| 1037 | |
| 1038 | for (i = 0, offset = 0; offset < len; |
| 1039 | i++, offset += xfer_count) { |
| 1040 | xfer_count = min_t(size_t, len - offset, |
| 1041 | STM32_MDMA_MAX_BLOCK_LEN); |
| 1042 | |
| 1043 | /* Set source best burst size */ |
Pierre-Yves MORDRET | d83f413 | 2017-10-17 15:43:47 +0200 | [diff] [blame] | 1044 | max_width = stm32_mdma_get_max_width(src, len, tlen); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1045 | src_bus_width = stm32_mdma_get_width(chan, max_width); |
| 1046 | |
| 1047 | max_burst = tlen / max_width; |
| 1048 | best_burst = stm32_mdma_get_best_burst(len, tlen, |
| 1049 | max_burst, |
| 1050 | max_width); |
| 1051 | mdma_burst = ilog2(best_burst); |
| 1052 | |
| 1053 | ctcr |= STM32_MDMA_CTCR_SBURST(mdma_burst) | |
| 1054 | STM32_MDMA_CTCR_SSIZE(src_bus_width) | |
| 1055 | STM32_MDMA_CTCR_SINCOS(src_bus_width); |
| 1056 | |
| 1057 | /* Set destination best burst size */ |
Pierre-Yves MORDRET | d83f413 | 2017-10-17 15:43:47 +0200 | [diff] [blame] | 1058 | max_width = stm32_mdma_get_max_width(dest, len, tlen); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1059 | dst_bus_width = stm32_mdma_get_width(chan, max_width); |
| 1060 | |
| 1061 | max_burst = tlen / max_width; |
| 1062 | best_burst = stm32_mdma_get_best_burst(len, tlen, |
| 1063 | max_burst, |
| 1064 | max_width); |
| 1065 | mdma_burst = ilog2(best_burst); |
| 1066 | |
| 1067 | ctcr |= STM32_MDMA_CTCR_DBURST(mdma_burst) | |
| 1068 | STM32_MDMA_CTCR_DSIZE(dst_bus_width) | |
| 1069 | STM32_MDMA_CTCR_DINCOS(dst_bus_width); |
| 1070 | |
| 1071 | if (dst_bus_width != src_bus_width) |
| 1072 | ctcr |= STM32_MDMA_CTCR_PKE; |
| 1073 | |
| 1074 | /* Prepare hardware descriptor */ |
| 1075 | stm32_mdma_setup_hwdesc(chan, desc, DMA_MEM_TO_MEM, i, |
| 1076 | src + offset, dest + offset, |
| 1077 | xfer_count, ctcr, ctbr, |
| 1078 | i == count - 1, i == 0, false); |
| 1079 | } |
| 1080 | } |
| 1081 | |
| 1082 | desc->ccr = ccr; |
| 1083 | |
| 1084 | desc->cyclic = false; |
| 1085 | |
| 1086 | return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); |
| 1087 | } |
| 1088 | |
| 1089 | static void stm32_mdma_dump_reg(struct stm32_mdma_chan *chan) |
| 1090 | { |
| 1091 | struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan); |
| 1092 | |
| 1093 | dev_dbg(chan2dev(chan), "CCR: 0x%08x\n", |
| 1094 | stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id))); |
| 1095 | dev_dbg(chan2dev(chan), "CTCR: 0x%08x\n", |
| 1096 | stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id))); |
| 1097 | dev_dbg(chan2dev(chan), "CBNDTR: 0x%08x\n", |
| 1098 | stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id))); |
| 1099 | dev_dbg(chan2dev(chan), "CSAR: 0x%08x\n", |
| 1100 | stm32_mdma_read(dmadev, STM32_MDMA_CSAR(chan->id))); |
| 1101 | dev_dbg(chan2dev(chan), "CDAR: 0x%08x\n", |
| 1102 | stm32_mdma_read(dmadev, STM32_MDMA_CDAR(chan->id))); |
| 1103 | dev_dbg(chan2dev(chan), "CBRUR: 0x%08x\n", |
| 1104 | stm32_mdma_read(dmadev, STM32_MDMA_CBRUR(chan->id))); |
| 1105 | dev_dbg(chan2dev(chan), "CLAR: 0x%08x\n", |
| 1106 | stm32_mdma_read(dmadev, STM32_MDMA_CLAR(chan->id))); |
| 1107 | dev_dbg(chan2dev(chan), "CTBR: 0x%08x\n", |
| 1108 | stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id))); |
| 1109 | dev_dbg(chan2dev(chan), "CMAR: 0x%08x\n", |
| 1110 | stm32_mdma_read(dmadev, STM32_MDMA_CMAR(chan->id))); |
| 1111 | dev_dbg(chan2dev(chan), "CMDR: 0x%08x\n", |
| 1112 | stm32_mdma_read(dmadev, STM32_MDMA_CMDR(chan->id))); |
| 1113 | } |
| 1114 | |
| 1115 | static void stm32_mdma_start_transfer(struct stm32_mdma_chan *chan) |
| 1116 | { |
| 1117 | struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan); |
| 1118 | struct virt_dma_desc *vdesc; |
| 1119 | struct stm32_mdma_hwdesc *hwdesc; |
| 1120 | u32 id = chan->id; |
| 1121 | u32 status, reg; |
| 1122 | |
| 1123 | vdesc = vchan_next_desc(&chan->vchan); |
| 1124 | if (!vdesc) { |
| 1125 | chan->desc = NULL; |
| 1126 | return; |
| 1127 | } |
| 1128 | |
Amelie Delaunay | dfc7088 | 2020-01-27 09:53:34 +0100 | [diff] [blame] | 1129 | list_del(&vdesc->node); |
| 1130 | |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1131 | chan->desc = to_stm32_mdma_desc(vdesc); |
Pierre-Yves MORDRET | bbb5a4e | 2018-04-13 15:52:13 +0200 | [diff] [blame] | 1132 | hwdesc = chan->desc->node[0].hwdesc; |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1133 | chan->curr_hwdesc = 0; |
| 1134 | |
| 1135 | stm32_mdma_write(dmadev, STM32_MDMA_CCR(id), chan->desc->ccr); |
| 1136 | stm32_mdma_write(dmadev, STM32_MDMA_CTCR(id), hwdesc->ctcr); |
| 1137 | stm32_mdma_write(dmadev, STM32_MDMA_CBNDTR(id), hwdesc->cbndtr); |
| 1138 | stm32_mdma_write(dmadev, STM32_MDMA_CSAR(id), hwdesc->csar); |
| 1139 | stm32_mdma_write(dmadev, STM32_MDMA_CDAR(id), hwdesc->cdar); |
| 1140 | stm32_mdma_write(dmadev, STM32_MDMA_CBRUR(id), hwdesc->cbrur); |
| 1141 | stm32_mdma_write(dmadev, STM32_MDMA_CLAR(id), hwdesc->clar); |
| 1142 | stm32_mdma_write(dmadev, STM32_MDMA_CTBR(id), hwdesc->ctbr); |
| 1143 | stm32_mdma_write(dmadev, STM32_MDMA_CMAR(id), hwdesc->cmar); |
| 1144 | stm32_mdma_write(dmadev, STM32_MDMA_CMDR(id), hwdesc->cmdr); |
| 1145 | |
| 1146 | /* Clear interrupt status if it is there */ |
| 1147 | status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(id)); |
| 1148 | if (status) |
| 1149 | stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(id), status); |
| 1150 | |
| 1151 | stm32_mdma_dump_reg(chan); |
| 1152 | |
| 1153 | /* Start DMA */ |
| 1154 | stm32_mdma_set_bits(dmadev, STM32_MDMA_CCR(id), STM32_MDMA_CCR_EN); |
| 1155 | |
| 1156 | /* Set SW request in case of MEM2MEM transfer */ |
| 1157 | if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM) { |
| 1158 | reg = STM32_MDMA_CCR(id); |
| 1159 | stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ); |
| 1160 | } |
| 1161 | |
| 1162 | chan->busy = true; |
| 1163 | |
Benjamin Gaignard | 90ec93c | 2018-07-06 15:02:20 +0200 | [diff] [blame] | 1164 | dev_dbg(chan2dev(chan), "vchan %pK: started\n", &chan->vchan); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1165 | } |
| 1166 | |
| 1167 | static void stm32_mdma_issue_pending(struct dma_chan *c) |
| 1168 | { |
| 1169 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c); |
| 1170 | unsigned long flags; |
| 1171 | |
| 1172 | spin_lock_irqsave(&chan->vchan.lock, flags); |
| 1173 | |
| 1174 | if (!vchan_issue_pending(&chan->vchan)) |
| 1175 | goto end; |
| 1176 | |
Benjamin Gaignard | 90ec93c | 2018-07-06 15:02:20 +0200 | [diff] [blame] | 1177 | dev_dbg(chan2dev(chan), "vchan %pK: issued\n", &chan->vchan); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1178 | |
| 1179 | if (!chan->desc && !chan->busy) |
| 1180 | stm32_mdma_start_transfer(chan); |
| 1181 | |
| 1182 | end: |
| 1183 | spin_unlock_irqrestore(&chan->vchan.lock, flags); |
| 1184 | } |
| 1185 | |
| 1186 | static int stm32_mdma_pause(struct dma_chan *c) |
| 1187 | { |
| 1188 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c); |
| 1189 | unsigned long flags; |
| 1190 | int ret; |
| 1191 | |
| 1192 | spin_lock_irqsave(&chan->vchan.lock, flags); |
| 1193 | ret = stm32_mdma_disable_chan(chan); |
| 1194 | spin_unlock_irqrestore(&chan->vchan.lock, flags); |
| 1195 | |
| 1196 | if (!ret) |
Benjamin Gaignard | 90ec93c | 2018-07-06 15:02:20 +0200 | [diff] [blame] | 1197 | dev_dbg(chan2dev(chan), "vchan %pK: pause\n", &chan->vchan); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1198 | |
| 1199 | return ret; |
| 1200 | } |
| 1201 | |
| 1202 | static int stm32_mdma_resume(struct dma_chan *c) |
| 1203 | { |
| 1204 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c); |
| 1205 | struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan); |
| 1206 | struct stm32_mdma_hwdesc *hwdesc; |
| 1207 | unsigned long flags; |
| 1208 | u32 status, reg; |
| 1209 | |
Pierre-Yves MORDRET | bbb5a4e | 2018-04-13 15:52:13 +0200 | [diff] [blame] | 1210 | hwdesc = chan->desc->node[chan->curr_hwdesc].hwdesc; |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1211 | |
| 1212 | spin_lock_irqsave(&chan->vchan.lock, flags); |
| 1213 | |
| 1214 | /* Re-configure control register */ |
| 1215 | stm32_mdma_write(dmadev, STM32_MDMA_CCR(chan->id), chan->desc->ccr); |
| 1216 | |
| 1217 | /* Clear interrupt status if it is there */ |
| 1218 | status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id)); |
| 1219 | if (status) |
| 1220 | stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status); |
| 1221 | |
| 1222 | stm32_mdma_dump_reg(chan); |
| 1223 | |
| 1224 | /* Re-start DMA */ |
| 1225 | reg = STM32_MDMA_CCR(chan->id); |
| 1226 | stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_EN); |
| 1227 | |
| 1228 | /* Set SW request in case of MEM2MEM transfer */ |
| 1229 | if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM) |
| 1230 | stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ); |
| 1231 | |
| 1232 | spin_unlock_irqrestore(&chan->vchan.lock, flags); |
| 1233 | |
Benjamin Gaignard | 90ec93c | 2018-07-06 15:02:20 +0200 | [diff] [blame] | 1234 | dev_dbg(chan2dev(chan), "vchan %pK: resume\n", &chan->vchan); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1235 | |
| 1236 | return 0; |
| 1237 | } |
| 1238 | |
| 1239 | static int stm32_mdma_terminate_all(struct dma_chan *c) |
| 1240 | { |
| 1241 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c); |
| 1242 | unsigned long flags; |
| 1243 | LIST_HEAD(head); |
| 1244 | |
| 1245 | spin_lock_irqsave(&chan->vchan.lock, flags); |
Amelie Delaunay | dfc7088 | 2020-01-27 09:53:34 +0100 | [diff] [blame] | 1246 | if (chan->desc) { |
| 1247 | vchan_terminate_vdesc(&chan->desc->vdesc); |
| 1248 | if (chan->busy) |
| 1249 | stm32_mdma_stop(chan); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1250 | chan->desc = NULL; |
| 1251 | } |
| 1252 | vchan_get_all_descriptors(&chan->vchan, &head); |
| 1253 | spin_unlock_irqrestore(&chan->vchan.lock, flags); |
| 1254 | |
| 1255 | vchan_dma_desc_free_list(&chan->vchan, &head); |
| 1256 | |
| 1257 | return 0; |
| 1258 | } |
| 1259 | |
| 1260 | static void stm32_mdma_synchronize(struct dma_chan *c) |
| 1261 | { |
| 1262 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c); |
| 1263 | |
| 1264 | vchan_synchronize(&chan->vchan); |
| 1265 | } |
| 1266 | |
| 1267 | static int stm32_mdma_slave_config(struct dma_chan *c, |
| 1268 | struct dma_slave_config *config) |
| 1269 | { |
| 1270 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c); |
| 1271 | |
| 1272 | memcpy(&chan->dma_config, config, sizeof(*config)); |
| 1273 | |
| 1274 | return 0; |
| 1275 | } |
| 1276 | |
| 1277 | static size_t stm32_mdma_desc_residue(struct stm32_mdma_chan *chan, |
| 1278 | struct stm32_mdma_desc *desc, |
| 1279 | u32 curr_hwdesc) |
| 1280 | { |
| 1281 | struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan); |
Pierre-Yves MORDRET | bbb5a4e | 2018-04-13 15:52:13 +0200 | [diff] [blame] | 1282 | struct stm32_mdma_hwdesc *hwdesc = desc->node[0].hwdesc; |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1283 | u32 cbndtr, residue, modulo, burst_size; |
| 1284 | int i; |
| 1285 | |
| 1286 | residue = 0; |
| 1287 | for (i = curr_hwdesc + 1; i < desc->count; i++) { |
Pierre-Yves MORDRET | bbb5a4e | 2018-04-13 15:52:13 +0200 | [diff] [blame] | 1288 | hwdesc = desc->node[i].hwdesc; |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1289 | residue += STM32_MDMA_CBNDTR_BNDT(hwdesc->cbndtr); |
| 1290 | } |
| 1291 | cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id)); |
| 1292 | residue += cbndtr & STM32_MDMA_CBNDTR_BNDT_MASK; |
| 1293 | |
| 1294 | if (!chan->mem_burst) |
| 1295 | return residue; |
| 1296 | |
| 1297 | burst_size = chan->mem_burst * chan->mem_width; |
| 1298 | modulo = residue % burst_size; |
| 1299 | if (modulo) |
| 1300 | residue = residue - modulo + burst_size; |
| 1301 | |
| 1302 | return residue; |
| 1303 | } |
| 1304 | |
| 1305 | static enum dma_status stm32_mdma_tx_status(struct dma_chan *c, |
| 1306 | dma_cookie_t cookie, |
| 1307 | struct dma_tx_state *state) |
| 1308 | { |
| 1309 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c); |
| 1310 | struct virt_dma_desc *vdesc; |
| 1311 | enum dma_status status; |
| 1312 | unsigned long flags; |
| 1313 | u32 residue = 0; |
| 1314 | |
| 1315 | status = dma_cookie_status(c, cookie, state); |
| 1316 | if ((status == DMA_COMPLETE) || (!state)) |
| 1317 | return status; |
| 1318 | |
| 1319 | spin_lock_irqsave(&chan->vchan.lock, flags); |
| 1320 | |
| 1321 | vdesc = vchan_find_desc(&chan->vchan, cookie); |
| 1322 | if (chan->desc && cookie == chan->desc->vdesc.tx.cookie) |
| 1323 | residue = stm32_mdma_desc_residue(chan, chan->desc, |
| 1324 | chan->curr_hwdesc); |
| 1325 | else if (vdesc) |
| 1326 | residue = stm32_mdma_desc_residue(chan, |
| 1327 | to_stm32_mdma_desc(vdesc), 0); |
| 1328 | dma_set_residue(state, residue); |
| 1329 | |
| 1330 | spin_unlock_irqrestore(&chan->vchan.lock, flags); |
| 1331 | |
| 1332 | return status; |
| 1333 | } |
| 1334 | |
| 1335 | static void stm32_mdma_xfer_end(struct stm32_mdma_chan *chan) |
| 1336 | { |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1337 | vchan_cookie_complete(&chan->desc->vdesc); |
| 1338 | chan->desc = NULL; |
| 1339 | chan->busy = false; |
| 1340 | |
| 1341 | /* Start the next transfer if this driver has a next desc */ |
| 1342 | stm32_mdma_start_transfer(chan); |
| 1343 | } |
| 1344 | |
| 1345 | static irqreturn_t stm32_mdma_irq_handler(int irq, void *devid) |
| 1346 | { |
| 1347 | struct stm32_mdma_device *dmadev = devid; |
| 1348 | struct stm32_mdma_chan *chan = devid; |
Amelie Delaunay | 1d3dd68 | 2020-11-20 15:33:20 +0100 | [diff] [blame] | 1349 | u32 reg, id, ccr, ien, status; |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1350 | |
| 1351 | /* Find out which channel generates the interrupt */ |
| 1352 | status = readl_relaxed(dmadev->base + STM32_MDMA_GISR0); |
| 1353 | if (status) { |
| 1354 | id = __ffs(status); |
| 1355 | } else { |
| 1356 | status = readl_relaxed(dmadev->base + STM32_MDMA_GISR1); |
| 1357 | if (!status) { |
| 1358 | dev_dbg(mdma2dev(dmadev), "spurious it\n"); |
| 1359 | return IRQ_NONE; |
| 1360 | } |
| 1361 | id = __ffs(status); |
| 1362 | /* |
| 1363 | * As GISR0 provides status for channel id from 0 to 31, |
| 1364 | * so GISR1 provides status for channel id from 32 to 62 |
| 1365 | */ |
| 1366 | id += 32; |
| 1367 | } |
| 1368 | |
| 1369 | chan = &dmadev->chan[id]; |
| 1370 | if (!chan) { |
Amelie Delaunay | 1d3dd68 | 2020-11-20 15:33:20 +0100 | [diff] [blame] | 1371 | dev_warn(mdma2dev(dmadev), "MDMA channel not initialized\n"); |
| 1372 | return IRQ_NONE; |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1373 | } |
| 1374 | |
| 1375 | /* Handle interrupt for the channel */ |
| 1376 | spin_lock(&chan->vchan.lock); |
Amelie Delaunay | 1d3dd68 | 2020-11-20 15:33:20 +0100 | [diff] [blame] | 1377 | status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(id)); |
| 1378 | /* Mask Channel ReQuest Active bit which can be set in case of MEM2MEM */ |
| 1379 | status &= ~STM32_MDMA_CISR_CRQA; |
| 1380 | ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(id)); |
| 1381 | ien = (ccr & STM32_MDMA_CCR_IRQ_MASK) >> 1; |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1382 | |
| 1383 | if (!(status & ien)) { |
| 1384 | spin_unlock(&chan->vchan.lock); |
Amelie Delaunay | 1d3dd68 | 2020-11-20 15:33:20 +0100 | [diff] [blame] | 1385 | dev_warn(chan2dev(chan), |
| 1386 | "spurious it (status=0x%04x, ien=0x%04x)\n", |
| 1387 | status, ien); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1388 | return IRQ_NONE; |
| 1389 | } |
| 1390 | |
Amelie Delaunay | 1d3dd68 | 2020-11-20 15:33:20 +0100 | [diff] [blame] | 1391 | reg = STM32_MDMA_CIFCR(id); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1392 | |
Amelie Delaunay | 1d3dd68 | 2020-11-20 15:33:20 +0100 | [diff] [blame] | 1393 | if (status & STM32_MDMA_CISR_TEIF) { |
| 1394 | dev_err(chan2dev(chan), "Transfer Err: stat=0x%08x\n", |
| 1395 | readl_relaxed(dmadev->base + STM32_MDMA_CESR(id))); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1396 | stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CTEIF); |
Amelie Delaunay | 1d3dd68 | 2020-11-20 15:33:20 +0100 | [diff] [blame] | 1397 | status &= ~STM32_MDMA_CISR_TEIF; |
| 1398 | } |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1399 | |
Amelie Delaunay | 1d3dd68 | 2020-11-20 15:33:20 +0100 | [diff] [blame] | 1400 | if (status & STM32_MDMA_CISR_CTCIF) { |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1401 | stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CCTCIF); |
Amelie Delaunay | 1d3dd68 | 2020-11-20 15:33:20 +0100 | [diff] [blame] | 1402 | status &= ~STM32_MDMA_CISR_CTCIF; |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1403 | stm32_mdma_xfer_end(chan); |
Amelie Delaunay | 1d3dd68 | 2020-11-20 15:33:20 +0100 | [diff] [blame] | 1404 | } |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1405 | |
Amelie Delaunay | 1d3dd68 | 2020-11-20 15:33:20 +0100 | [diff] [blame] | 1406 | if (status & STM32_MDMA_CISR_BRTIF) { |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1407 | stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBRTIF); |
Amelie Delaunay | 1d3dd68 | 2020-11-20 15:33:20 +0100 | [diff] [blame] | 1408 | status &= ~STM32_MDMA_CISR_BRTIF; |
| 1409 | } |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1410 | |
Amelie Delaunay | 1d3dd68 | 2020-11-20 15:33:20 +0100 | [diff] [blame] | 1411 | if (status & STM32_MDMA_CISR_BTIF) { |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1412 | stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBTIF); |
Amelie Delaunay | 1d3dd68 | 2020-11-20 15:33:20 +0100 | [diff] [blame] | 1413 | status &= ~STM32_MDMA_CISR_BTIF; |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1414 | chan->curr_hwdesc++; |
| 1415 | if (chan->desc && chan->desc->cyclic) { |
| 1416 | if (chan->curr_hwdesc == chan->desc->count) |
| 1417 | chan->curr_hwdesc = 0; |
| 1418 | vchan_cyclic_callback(&chan->desc->vdesc); |
| 1419 | } |
Amelie Delaunay | 1d3dd68 | 2020-11-20 15:33:20 +0100 | [diff] [blame] | 1420 | } |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1421 | |
Amelie Delaunay | 1d3dd68 | 2020-11-20 15:33:20 +0100 | [diff] [blame] | 1422 | if (status & STM32_MDMA_CISR_TCIF) { |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1423 | stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CLTCIF); |
Amelie Delaunay | 1d3dd68 | 2020-11-20 15:33:20 +0100 | [diff] [blame] | 1424 | status &= ~STM32_MDMA_CISR_TCIF; |
| 1425 | } |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1426 | |
Amelie Delaunay | 1d3dd68 | 2020-11-20 15:33:20 +0100 | [diff] [blame] | 1427 | if (status) { |
| 1428 | stm32_mdma_set_bits(dmadev, reg, status); |
| 1429 | dev_err(chan2dev(chan), "DMA error: status=0x%08x\n", status); |
| 1430 | if (!(ccr & STM32_MDMA_CCR_EN)) |
| 1431 | dev_err(chan2dev(chan), "chan disabled by HW\n"); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1432 | } |
| 1433 | |
| 1434 | spin_unlock(&chan->vchan.lock); |
| 1435 | |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1436 | return IRQ_HANDLED; |
| 1437 | } |
| 1438 | |
| 1439 | static int stm32_mdma_alloc_chan_resources(struct dma_chan *c) |
| 1440 | { |
| 1441 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c); |
| 1442 | struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan); |
| 1443 | int ret; |
| 1444 | |
| 1445 | chan->desc_pool = dmam_pool_create(dev_name(&c->dev->device), |
| 1446 | c->device->dev, |
| 1447 | sizeof(struct stm32_mdma_hwdesc), |
| 1448 | __alignof__(struct stm32_mdma_hwdesc), |
| 1449 | 0); |
| 1450 | if (!chan->desc_pool) { |
| 1451 | dev_err(chan2dev(chan), "failed to allocate descriptor pool\n"); |
| 1452 | return -ENOMEM; |
| 1453 | } |
| 1454 | |
Pierre-Yves MORDRET | 89e987e | 2019-01-03 11:17:10 +0100 | [diff] [blame] | 1455 | ret = pm_runtime_get_sync(dmadev->ddev.dev); |
| 1456 | if (ret < 0) |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1457 | return ret; |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1458 | |
| 1459 | ret = stm32_mdma_disable_chan(chan); |
| 1460 | if (ret < 0) |
Pierre-Yves MORDRET | 89e987e | 2019-01-03 11:17:10 +0100 | [diff] [blame] | 1461 | pm_runtime_put(dmadev->ddev.dev); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1462 | |
| 1463 | return ret; |
| 1464 | } |
| 1465 | |
| 1466 | static void stm32_mdma_free_chan_resources(struct dma_chan *c) |
| 1467 | { |
| 1468 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c); |
| 1469 | struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan); |
| 1470 | unsigned long flags; |
| 1471 | |
| 1472 | dev_dbg(chan2dev(chan), "Freeing channel %d\n", chan->id); |
| 1473 | |
| 1474 | if (chan->busy) { |
| 1475 | spin_lock_irqsave(&chan->vchan.lock, flags); |
| 1476 | stm32_mdma_stop(chan); |
| 1477 | chan->desc = NULL; |
| 1478 | spin_unlock_irqrestore(&chan->vchan.lock, flags); |
| 1479 | } |
| 1480 | |
Pierre-Yves MORDRET | 89e987e | 2019-01-03 11:17:10 +0100 | [diff] [blame] | 1481 | pm_runtime_put(dmadev->ddev.dev); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1482 | vchan_free_chan_resources(to_virt_chan(c)); |
| 1483 | dmam_pool_destroy(chan->desc_pool); |
| 1484 | chan->desc_pool = NULL; |
| 1485 | } |
| 1486 | |
| 1487 | static struct dma_chan *stm32_mdma_of_xlate(struct of_phandle_args *dma_spec, |
| 1488 | struct of_dma *ofdma) |
| 1489 | { |
| 1490 | struct stm32_mdma_device *dmadev = ofdma->of_dma_data; |
| 1491 | struct stm32_mdma_chan *chan; |
| 1492 | struct dma_chan *c; |
| 1493 | struct stm32_mdma_chan_config config; |
| 1494 | |
| 1495 | if (dma_spec->args_count < 5) { |
| 1496 | dev_err(mdma2dev(dmadev), "Bad number of args\n"); |
| 1497 | return NULL; |
| 1498 | } |
| 1499 | |
| 1500 | config.request = dma_spec->args[0]; |
| 1501 | config.priority_level = dma_spec->args[1]; |
| 1502 | config.transfer_config = dma_spec->args[2]; |
| 1503 | config.mask_addr = dma_spec->args[3]; |
| 1504 | config.mask_data = dma_spec->args[4]; |
| 1505 | |
| 1506 | if (config.request >= dmadev->nr_requests) { |
| 1507 | dev_err(mdma2dev(dmadev), "Bad request line\n"); |
| 1508 | return NULL; |
| 1509 | } |
| 1510 | |
| 1511 | if (config.priority_level > STM32_MDMA_VERY_HIGH_PRIORITY) { |
| 1512 | dev_err(mdma2dev(dmadev), "Priority level not supported\n"); |
| 1513 | return NULL; |
| 1514 | } |
| 1515 | |
| 1516 | c = dma_get_any_slave_channel(&dmadev->ddev); |
| 1517 | if (!c) { |
Colin Ian King | d317d32 | 2018-04-28 23:03:52 +0100 | [diff] [blame] | 1518 | dev_err(mdma2dev(dmadev), "No more channels available\n"); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1519 | return NULL; |
| 1520 | } |
| 1521 | |
| 1522 | chan = to_stm32_mdma_chan(c); |
| 1523 | chan->chan_config = config; |
| 1524 | |
| 1525 | return c; |
| 1526 | } |
| 1527 | |
| 1528 | static const struct of_device_id stm32_mdma_of_match[] = { |
| 1529 | { .compatible = "st,stm32h7-mdma", }, |
| 1530 | { /* sentinel */ }, |
| 1531 | }; |
| 1532 | MODULE_DEVICE_TABLE(of, stm32_mdma_of_match); |
| 1533 | |
| 1534 | static int stm32_mdma_probe(struct platform_device *pdev) |
| 1535 | { |
| 1536 | struct stm32_mdma_chan *chan; |
| 1537 | struct stm32_mdma_device *dmadev; |
| 1538 | struct dma_device *dd; |
| 1539 | struct device_node *of_node; |
| 1540 | struct resource *res; |
Etienne Carriere | 54d50c8 | 2020-01-27 09:53:30 +0100 | [diff] [blame] | 1541 | struct reset_control *rst; |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1542 | u32 nr_channels, nr_requests; |
| 1543 | int i, count, ret; |
| 1544 | |
| 1545 | of_node = pdev->dev.of_node; |
| 1546 | if (!of_node) |
| 1547 | return -ENODEV; |
| 1548 | |
| 1549 | ret = device_property_read_u32(&pdev->dev, "dma-channels", |
| 1550 | &nr_channels); |
| 1551 | if (ret) { |
| 1552 | nr_channels = STM32_MDMA_MAX_CHANNELS; |
| 1553 | dev_warn(&pdev->dev, "MDMA defaulting on %i channels\n", |
| 1554 | nr_channels); |
| 1555 | } |
| 1556 | |
| 1557 | ret = device_property_read_u32(&pdev->dev, "dma-requests", |
| 1558 | &nr_requests); |
| 1559 | if (ret) { |
| 1560 | nr_requests = STM32_MDMA_MAX_REQUESTS; |
| 1561 | dev_warn(&pdev->dev, "MDMA defaulting on %i request lines\n", |
| 1562 | nr_requests); |
| 1563 | } |
| 1564 | |
Andy Shevchenko | 57dbd0e | 2019-07-23 22:07:57 +0300 | [diff] [blame] | 1565 | count = device_property_count_u32(&pdev->dev, "st,ahb-addr-masks"); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1566 | if (count < 0) |
| 1567 | count = 0; |
| 1568 | |
| 1569 | dmadev = devm_kzalloc(&pdev->dev, sizeof(*dmadev) + sizeof(u32) * count, |
| 1570 | GFP_KERNEL); |
| 1571 | if (!dmadev) |
| 1572 | return -ENOMEM; |
| 1573 | |
| 1574 | dmadev->nr_channels = nr_channels; |
| 1575 | dmadev->nr_requests = nr_requests; |
Pierre-Yves MORDRET | 9dfec7c | 2019-03-25 17:21:55 +0100 | [diff] [blame] | 1576 | device_property_read_u32_array(&pdev->dev, "st,ahb-addr-masks", |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1577 | dmadev->ahb_addr_masks, |
| 1578 | count); |
| 1579 | dmadev->nr_ahb_addr_masks = count; |
| 1580 | |
| 1581 | res = platform_get_resource(pdev, IORESOURCE_MEM, 0); |
| 1582 | dmadev->base = devm_ioremap_resource(&pdev->dev, res); |
| 1583 | if (IS_ERR(dmadev->base)) |
| 1584 | return PTR_ERR(dmadev->base); |
| 1585 | |
| 1586 | dmadev->clk = devm_clk_get(&pdev->dev, NULL); |
Krzysztof Kozlowski | 1c966e1 | 2020-08-28 17:26:36 +0200 | [diff] [blame] | 1587 | if (IS_ERR(dmadev->clk)) |
| 1588 | return dev_err_probe(&pdev->dev, PTR_ERR(dmadev->clk), |
| 1589 | "Missing clock controller\n"); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1590 | |
Pierre-Yves MORDRET | 89e987e | 2019-01-03 11:17:10 +0100 | [diff] [blame] | 1591 | ret = clk_prepare_enable(dmadev->clk); |
| 1592 | if (ret < 0) { |
| 1593 | dev_err(&pdev->dev, "clk_prep_enable error: %d\n", ret); |
| 1594 | return ret; |
| 1595 | } |
| 1596 | |
Etienne Carriere | 54d50c8 | 2020-01-27 09:53:30 +0100 | [diff] [blame] | 1597 | rst = devm_reset_control_get(&pdev->dev, NULL); |
Amelie Delaunay | 56cf8dd | 2020-01-27 09:53:32 +0100 | [diff] [blame] | 1598 | if (IS_ERR(rst)) { |
| 1599 | ret = PTR_ERR(rst); |
| 1600 | if (ret == -EPROBE_DEFER) |
| 1601 | goto err_clk; |
| 1602 | } else { |
Etienne Carriere | 54d50c8 | 2020-01-27 09:53:30 +0100 | [diff] [blame] | 1603 | reset_control_assert(rst); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1604 | udelay(2); |
Etienne Carriere | 54d50c8 | 2020-01-27 09:53:30 +0100 | [diff] [blame] | 1605 | reset_control_deassert(rst); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1606 | } |
| 1607 | |
| 1608 | dd = &dmadev->ddev; |
| 1609 | dma_cap_set(DMA_SLAVE, dd->cap_mask); |
| 1610 | dma_cap_set(DMA_PRIVATE, dd->cap_mask); |
| 1611 | dma_cap_set(DMA_CYCLIC, dd->cap_mask); |
| 1612 | dma_cap_set(DMA_MEMCPY, dd->cap_mask); |
| 1613 | dd->device_alloc_chan_resources = stm32_mdma_alloc_chan_resources; |
| 1614 | dd->device_free_chan_resources = stm32_mdma_free_chan_resources; |
| 1615 | dd->device_tx_status = stm32_mdma_tx_status; |
| 1616 | dd->device_issue_pending = stm32_mdma_issue_pending; |
| 1617 | dd->device_prep_slave_sg = stm32_mdma_prep_slave_sg; |
| 1618 | dd->device_prep_dma_cyclic = stm32_mdma_prep_dma_cyclic; |
| 1619 | dd->device_prep_dma_memcpy = stm32_mdma_prep_dma_memcpy; |
| 1620 | dd->device_config = stm32_mdma_slave_config; |
| 1621 | dd->device_pause = stm32_mdma_pause; |
| 1622 | dd->device_resume = stm32_mdma_resume; |
| 1623 | dd->device_terminate_all = stm32_mdma_terminate_all; |
| 1624 | dd->device_synchronize = stm32_mdma_synchronize; |
Pierre-Yves MORDRET | 542fbc4 | 2020-01-27 09:53:33 +0100 | [diff] [blame] | 1625 | dd->descriptor_reuse = true; |
| 1626 | |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1627 | dd->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) | |
| 1628 | BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) | |
| 1629 | BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) | |
| 1630 | BIT(DMA_SLAVE_BUSWIDTH_8_BYTES); |
| 1631 | dd->dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) | |
| 1632 | BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) | |
| 1633 | BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) | |
| 1634 | BIT(DMA_SLAVE_BUSWIDTH_8_BYTES); |
| 1635 | dd->directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV) | |
| 1636 | BIT(DMA_MEM_TO_MEM); |
| 1637 | dd->residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; |
| 1638 | dd->max_burst = STM32_MDMA_MAX_BURST; |
| 1639 | dd->dev = &pdev->dev; |
| 1640 | INIT_LIST_HEAD(&dd->channels); |
| 1641 | |
| 1642 | for (i = 0; i < dmadev->nr_channels; i++) { |
| 1643 | chan = &dmadev->chan[i]; |
| 1644 | chan->id = i; |
| 1645 | chan->vchan.desc_free = stm32_mdma_desc_free; |
| 1646 | vchan_init(&chan->vchan, dd); |
| 1647 | } |
| 1648 | |
| 1649 | dmadev->irq = platform_get_irq(pdev, 0); |
Etienne Carriere | cb0bc2d | 2020-01-27 09:53:31 +0100 | [diff] [blame] | 1650 | if (dmadev->irq < 0) { |
| 1651 | ret = dmadev->irq; |
| 1652 | goto err_clk; |
| 1653 | } |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1654 | |
| 1655 | ret = devm_request_irq(&pdev->dev, dmadev->irq, stm32_mdma_irq_handler, |
| 1656 | 0, dev_name(&pdev->dev), dmadev); |
| 1657 | if (ret) { |
| 1658 | dev_err(&pdev->dev, "failed to request IRQ\n"); |
Etienne Carriere | cb0bc2d | 2020-01-27 09:53:31 +0100 | [diff] [blame] | 1659 | goto err_clk; |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1660 | } |
| 1661 | |
Huang Shijie | 42f604b | 2018-08-06 16:52:24 +0800 | [diff] [blame] | 1662 | ret = dmaenginem_async_device_register(dd); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1663 | if (ret) |
Etienne Carriere | cb0bc2d | 2020-01-27 09:53:31 +0100 | [diff] [blame] | 1664 | goto err_clk; |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1665 | |
| 1666 | ret = of_dma_controller_register(of_node, stm32_mdma_of_xlate, dmadev); |
| 1667 | if (ret < 0) { |
| 1668 | dev_err(&pdev->dev, |
| 1669 | "STM32 MDMA DMA OF registration failed %d\n", ret); |
Etienne Carriere | cb0bc2d | 2020-01-27 09:53:31 +0100 | [diff] [blame] | 1670 | goto err_clk; |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1671 | } |
| 1672 | |
| 1673 | platform_set_drvdata(pdev, dmadev); |
Pierre-Yves MORDRET | 89e987e | 2019-01-03 11:17:10 +0100 | [diff] [blame] | 1674 | pm_runtime_set_active(&pdev->dev); |
| 1675 | pm_runtime_enable(&pdev->dev); |
| 1676 | pm_runtime_get_noresume(&pdev->dev); |
| 1677 | pm_runtime_put(&pdev->dev); |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1678 | |
| 1679 | dev_info(&pdev->dev, "STM32 MDMA driver registered\n"); |
| 1680 | |
| 1681 | return 0; |
| 1682 | |
Etienne Carriere | cb0bc2d | 2020-01-27 09:53:31 +0100 | [diff] [blame] | 1683 | err_clk: |
| 1684 | clk_disable_unprepare(dmadev->clk); |
| 1685 | |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1686 | return ret; |
| 1687 | } |
| 1688 | |
Pierre-Yves MORDRET | 89e987e | 2019-01-03 11:17:10 +0100 | [diff] [blame] | 1689 | #ifdef CONFIG_PM |
| 1690 | static int stm32_mdma_runtime_suspend(struct device *dev) |
| 1691 | { |
| 1692 | struct stm32_mdma_device *dmadev = dev_get_drvdata(dev); |
| 1693 | |
| 1694 | clk_disable_unprepare(dmadev->clk); |
| 1695 | |
| 1696 | return 0; |
| 1697 | } |
| 1698 | |
| 1699 | static int stm32_mdma_runtime_resume(struct device *dev) |
| 1700 | { |
| 1701 | struct stm32_mdma_device *dmadev = dev_get_drvdata(dev); |
| 1702 | int ret; |
| 1703 | |
| 1704 | ret = clk_prepare_enable(dmadev->clk); |
| 1705 | if (ret) { |
| 1706 | dev_err(dev, "failed to prepare_enable clock\n"); |
| 1707 | return ret; |
| 1708 | } |
| 1709 | |
| 1710 | return 0; |
| 1711 | } |
| 1712 | #endif |
| 1713 | |
Pierre-Yves MORDRET | 7cb819c | 2020-01-27 09:53:29 +0100 | [diff] [blame] | 1714 | #ifdef CONFIG_PM_SLEEP |
| 1715 | static int stm32_mdma_pm_suspend(struct device *dev) |
| 1716 | { |
| 1717 | struct stm32_mdma_device *dmadev = dev_get_drvdata(dev); |
| 1718 | u32 ccr, id; |
| 1719 | int ret; |
| 1720 | |
| 1721 | ret = pm_runtime_get_sync(dev); |
| 1722 | if (ret < 0) |
| 1723 | return ret; |
| 1724 | |
| 1725 | for (id = 0; id < dmadev->nr_channels; id++) { |
| 1726 | ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(id)); |
| 1727 | if (ccr & STM32_MDMA_CCR_EN) { |
| 1728 | dev_warn(dev, "Suspend is prevented by Chan %i\n", id); |
| 1729 | return -EBUSY; |
| 1730 | } |
| 1731 | } |
| 1732 | |
| 1733 | pm_runtime_put_sync(dev); |
| 1734 | |
| 1735 | pm_runtime_force_suspend(dev); |
| 1736 | |
| 1737 | return 0; |
| 1738 | } |
| 1739 | |
| 1740 | static int stm32_mdma_pm_resume(struct device *dev) |
| 1741 | { |
| 1742 | return pm_runtime_force_resume(dev); |
| 1743 | } |
| 1744 | #endif |
| 1745 | |
Pierre-Yves MORDRET | 89e987e | 2019-01-03 11:17:10 +0100 | [diff] [blame] | 1746 | static const struct dev_pm_ops stm32_mdma_pm_ops = { |
Pierre-Yves MORDRET | 7cb819c | 2020-01-27 09:53:29 +0100 | [diff] [blame] | 1747 | SET_SYSTEM_SLEEP_PM_OPS(stm32_mdma_pm_suspend, stm32_mdma_pm_resume) |
Pierre-Yves MORDRET | 89e987e | 2019-01-03 11:17:10 +0100 | [diff] [blame] | 1748 | SET_RUNTIME_PM_OPS(stm32_mdma_runtime_suspend, |
| 1749 | stm32_mdma_runtime_resume, NULL) |
| 1750 | }; |
| 1751 | |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1752 | static struct platform_driver stm32_mdma_driver = { |
| 1753 | .probe = stm32_mdma_probe, |
| 1754 | .driver = { |
| 1755 | .name = "stm32-mdma", |
| 1756 | .of_match_table = stm32_mdma_of_match, |
Pierre-Yves MORDRET | 89e987e | 2019-01-03 11:17:10 +0100 | [diff] [blame] | 1757 | .pm = &stm32_mdma_pm_ops, |
Pierre-Yves MORDRET | a4ffb13 | 2017-09-28 17:36:41 +0200 | [diff] [blame] | 1758 | }, |
| 1759 | }; |
| 1760 | |
| 1761 | static int __init stm32_mdma_init(void) |
| 1762 | { |
| 1763 | return platform_driver_register(&stm32_mdma_driver); |
| 1764 | } |
| 1765 | |
| 1766 | subsys_initcall(stm32_mdma_init); |
| 1767 | |
| 1768 | MODULE_DESCRIPTION("Driver for STM32 MDMA controller"); |
| 1769 | MODULE_AUTHOR("M'boumba Cedric Madianga <cedric.madianga@gmail.com>"); |
| 1770 | MODULE_AUTHOR("Pierre-Yves Mordret <pierre-yves.mordret@st.com>"); |
| 1771 | MODULE_LICENSE("GPL v2"); |