Thomas Gleixner | ea2305f | 2019-05-20 19:08:05 +0200 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0-or-later */ |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 2 | /* |
Forrest Shi | f3c677b | 2010-12-09 16:14:04 +0800 | [diff] [blame] | 3 | * Copyright (C) 2007-2010 Freescale Semiconductor, Inc. All rights reserved. |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 4 | * |
| 5 | * Author: |
| 6 | * Zhang Wei <wei.zhang@freescale.com>, Jul 2007 |
| 7 | * Ebony Zhu <ebony.zhu@freescale.com>, May 2007 |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 8 | */ |
| 9 | #ifndef __DMA_FSLDMA_H |
| 10 | #define __DMA_FSLDMA_H |
| 11 | |
| 12 | #include <linux/device.h> |
| 13 | #include <linux/dmapool.h> |
| 14 | #include <linux/dmaengine.h> |
| 15 | |
| 16 | /* Define data structures needed by Freescale |
| 17 | * MPC8540 and MPC8349 DMA controller. |
| 18 | */ |
| 19 | #define FSL_DMA_MR_CS 0x00000001 |
| 20 | #define FSL_DMA_MR_CC 0x00000002 |
| 21 | #define FSL_DMA_MR_CA 0x00000008 |
| 22 | #define FSL_DMA_MR_EIE 0x00000040 |
| 23 | #define FSL_DMA_MR_XFE 0x00000020 |
| 24 | #define FSL_DMA_MR_EOLNIE 0x00000100 |
| 25 | #define FSL_DMA_MR_EOLSIE 0x00000080 |
| 26 | #define FSL_DMA_MR_EOSIE 0x00000200 |
| 27 | #define FSL_DMA_MR_CDSM 0x00000010 |
| 28 | #define FSL_DMA_MR_CTM 0x00000004 |
| 29 | #define FSL_DMA_MR_EMP_EN 0x00200000 |
| 30 | #define FSL_DMA_MR_EMS_EN 0x00040000 |
| 31 | #define FSL_DMA_MR_DAHE 0x00002000 |
| 32 | #define FSL_DMA_MR_SAHE 0x00001000 |
| 33 | |
Thomas Breitung | ccc0772 | 2017-06-19 16:40:04 +0200 | [diff] [blame] | 34 | #define FSL_DMA_MR_SAHTS_MASK 0x0000C000 |
| 35 | #define FSL_DMA_MR_DAHTS_MASK 0x00030000 |
| 36 | #define FSL_DMA_MR_BWC_MASK 0x0f000000 |
| 37 | |
Forrest Shi | f3c677b | 2010-12-09 16:14:04 +0800 | [diff] [blame] | 38 | /* |
| 39 | * Bandwidth/pause control determines how many bytes a given |
| 40 | * channel is allowed to transfer before the DMA engine pauses |
| 41 | * the current channel and switches to the next channel |
| 42 | */ |
Hongbo Zhang | 0ca583a | 2014-01-16 14:10:53 +0800 | [diff] [blame] | 43 | #define FSL_DMA_MR_BWC 0x0A000000 |
Forrest Shi | f3c677b | 2010-12-09 16:14:04 +0800 | [diff] [blame] | 44 | |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 45 | /* Special MR definition for MPC8349 */ |
| 46 | #define FSL_DMA_MR_EOTIE 0x00000080 |
Ira W. Snyder | a7aea37 | 2009-04-23 16:17:54 -0700 | [diff] [blame] | 47 | #define FSL_DMA_MR_PRC_RM 0x00000800 |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 48 | |
| 49 | #define FSL_DMA_SR_CH 0x00000020 |
Zhang Wei | f79abb6 | 2008-03-18 18:45:00 -0700 | [diff] [blame] | 50 | #define FSL_DMA_SR_PE 0x00000010 |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 51 | #define FSL_DMA_SR_CB 0x00000004 |
| 52 | #define FSL_DMA_SR_TE 0x00000080 |
| 53 | #define FSL_DMA_SR_EOSI 0x00000002 |
| 54 | #define FSL_DMA_SR_EOLSI 0x00000001 |
| 55 | #define FSL_DMA_SR_EOCDI 0x00000001 |
| 56 | #define FSL_DMA_SR_EOLNI 0x00000008 |
| 57 | |
| 58 | #define FSL_DMA_SATR_SBPATMU 0x20000000 |
| 59 | #define FSL_DMA_SATR_STRANSINT_RIO 0x00c00000 |
| 60 | #define FSL_DMA_SATR_SREADTYPE_SNOOP_READ 0x00050000 |
| 61 | #define FSL_DMA_SATR_SREADTYPE_BP_IORH 0x00020000 |
| 62 | #define FSL_DMA_SATR_SREADTYPE_BP_NREAD 0x00040000 |
| 63 | #define FSL_DMA_SATR_SREADTYPE_BP_MREAD 0x00070000 |
| 64 | |
| 65 | #define FSL_DMA_DATR_DBPATMU 0x20000000 |
| 66 | #define FSL_DMA_DATR_DTRANSINT_RIO 0x00c00000 |
| 67 | #define FSL_DMA_DATR_DWRITETYPE_SNOOP_WRITE 0x00050000 |
| 68 | #define FSL_DMA_DATR_DWRITETYPE_BP_FLUSH 0x00010000 |
| 69 | |
| 70 | #define FSL_DMA_EOL ((u64)0x1) |
| 71 | #define FSL_DMA_SNEN ((u64)0x10) |
| 72 | #define FSL_DMA_EOSIE 0x8 |
| 73 | #define FSL_DMA_NLDA_MASK (~(u64)0x1f) |
| 74 | |
| 75 | #define FSL_DMA_BCR_MAX_CNT 0x03ffffffu |
| 76 | |
| 77 | #define FSL_DMA_DGSR_TE 0x80 |
| 78 | #define FSL_DMA_DGSR_CH 0x20 |
| 79 | #define FSL_DMA_DGSR_PE 0x10 |
| 80 | #define FSL_DMA_DGSR_EOLNI 0x08 |
| 81 | #define FSL_DMA_DGSR_CB 0x04 |
| 82 | #define FSL_DMA_DGSR_EOSI 0x02 |
| 83 | #define FSL_DMA_DGSR_EOLSI 0x01 |
| 84 | |
Kevin Hao | 75dc177 | 2015-01-08 18:38:16 +0800 | [diff] [blame] | 85 | #define FSL_DMA_BUSWIDTHS (BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) | \ |
| 86 | BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) | \ |
| 87 | BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) | \ |
| 88 | BIT(DMA_SLAVE_BUSWIDTH_8_BYTES)) |
Al Viro | a4e6d5d | 2008-03-29 03:10:18 +0000 | [diff] [blame] | 89 | typedef u64 __bitwise v64; |
| 90 | typedef u32 __bitwise v32; |
| 91 | |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 92 | struct fsl_dma_ld_hw { |
Al Viro | a4e6d5d | 2008-03-29 03:10:18 +0000 | [diff] [blame] | 93 | v64 src_addr; |
| 94 | v64 dst_addr; |
| 95 | v64 next_ln_addr; |
| 96 | v32 count; |
| 97 | v32 reserve; |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 98 | } __attribute__((aligned(32))); |
| 99 | |
| 100 | struct fsl_desc_sw { |
| 101 | struct fsl_dma_ld_hw hw; |
| 102 | struct list_head node; |
Dan Williams | eda3423 | 2009-09-08 17:53:02 -0700 | [diff] [blame] | 103 | struct list_head tx_list; |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 104 | struct dma_async_tx_descriptor async_tx; |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 105 | } __attribute__((aligned(32))); |
| 106 | |
Ira Snyder | a4f56d4 | 2010-01-06 13:34:01 +0000 | [diff] [blame] | 107 | struct fsldma_chan_regs { |
Ira Snyder | 31f4306 | 2011-03-03 07:54:57 +0000 | [diff] [blame] | 108 | u32 mr; /* 0x00 - Mode Register */ |
| 109 | u32 sr; /* 0x04 - Status Register */ |
Al Viro | a4e6d5d | 2008-03-29 03:10:18 +0000 | [diff] [blame] | 110 | u64 cdar; /* 0x08 - Current descriptor address register */ |
| 111 | u64 sar; /* 0x10 - Source Address Register */ |
| 112 | u64 dar; /* 0x18 - Destination Address Register */ |
| 113 | u32 bcr; /* 0x20 - Byte Count Register */ |
| 114 | u64 ndar; /* 0x24 - Next Descriptor Address Register */ |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 115 | }; |
| 116 | |
Ira Snyder | a4f56d4 | 2010-01-06 13:34:01 +0000 | [diff] [blame] | 117 | struct fsldma_chan; |
Hongbo Zhang | 8de7a7d | 2013-09-26 17:33:43 +0800 | [diff] [blame] | 118 | #define FSL_DMA_MAX_CHANS_PER_DEVICE 8 |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 119 | |
Ira Snyder | a4f56d4 | 2010-01-06 13:34:01 +0000 | [diff] [blame] | 120 | struct fsldma_device { |
Ira Snyder | e7a2915 | 2010-01-06 13:34:03 +0000 | [diff] [blame] | 121 | void __iomem *regs; /* DGSR register base */ |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 122 | struct device *dev; |
| 123 | struct dma_device common; |
Ira Snyder | a4f56d4 | 2010-01-06 13:34:01 +0000 | [diff] [blame] | 124 | struct fsldma_chan *chan[FSL_DMA_MAX_CHANS_PER_DEVICE]; |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 125 | u32 feature; /* The same as DMA channels */ |
Timur Tabi | 77cd62e | 2008-09-26 17:00:11 -0700 | [diff] [blame] | 126 | int irq; /* Channel IRQ */ |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 127 | }; |
| 128 | |
Ira Snyder | a4f56d4 | 2010-01-06 13:34:01 +0000 | [diff] [blame] | 129 | /* Define macros for fsldma_chan->feature property */ |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 130 | #define FSL_DMA_LITTLE_ENDIAN 0x00000000 |
| 131 | #define FSL_DMA_BIG_ENDIAN 0x00000001 |
| 132 | |
| 133 | #define FSL_DMA_IP_MASK 0x00000ff0 |
| 134 | #define FSL_DMA_IP_85XX 0x00000010 |
| 135 | #define FSL_DMA_IP_83XX 0x00000020 |
| 136 | |
| 137 | #define FSL_DMA_CHAN_PAUSE_EXT 0x00001000 |
| 138 | #define FSL_DMA_CHAN_START_EXT 0x00002000 |
| 139 | |
Hongbo Zhang | 14c6a33 | 2014-05-21 16:03:02 +0800 | [diff] [blame] | 140 | #ifdef CONFIG_PM |
| 141 | struct fsldma_chan_regs_save { |
| 142 | u32 mr; |
| 143 | }; |
| 144 | |
| 145 | enum fsldma_pm_state { |
| 146 | RUNNING = 0, |
| 147 | SUSPENDED, |
| 148 | }; |
| 149 | #endif |
| 150 | |
Ira Snyder | a4f56d4 | 2010-01-06 13:34:01 +0000 | [diff] [blame] | 151 | struct fsldma_chan { |
Ira Snyder | b158471 | 2011-03-03 07:54:55 +0000 | [diff] [blame] | 152 | char name[8]; /* Channel name */ |
Ira Snyder | e7a2915 | 2010-01-06 13:34:03 +0000 | [diff] [blame] | 153 | struct fsldma_chan_regs __iomem *regs; |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 154 | spinlock_t desc_lock; /* Descriptor operation lock */ |
Hongbo Zhang | 43452fa | 2014-05-21 16:03:03 +0800 | [diff] [blame] | 155 | /* |
| 156 | * Descriptors which are queued to run, but have not yet been |
| 157 | * submitted to the hardware for execution |
| 158 | */ |
| 159 | struct list_head ld_pending; |
| 160 | /* |
| 161 | * Descriptors which are currently being executed by the hardware |
| 162 | */ |
| 163 | struct list_head ld_running; |
| 164 | /* |
| 165 | * Descriptors which have finished execution by the hardware. These |
| 166 | * descriptors have already had their cleanup actions run. They are |
| 167 | * waiting for the ACK bit to be set by the async_tx API. |
| 168 | */ |
| 169 | struct list_head ld_completed; /* Link descriptors queue */ |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 170 | struct dma_chan common; /* DMA common channel */ |
| 171 | struct dma_pool *desc_pool; /* Descriptors pool */ |
| 172 | struct device *dev; /* Channel device */ |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 173 | int irq; /* Channel IRQ */ |
| 174 | int id; /* Raw id of this channel */ |
| 175 | struct tasklet_struct tasklet; |
| 176 | u32 feature; |
Ira Snyder | f04cd40 | 2011-03-03 07:54:58 +0000 | [diff] [blame] | 177 | bool idle; /* DMA controller is idle */ |
Hongbo Zhang | 14c6a33 | 2014-05-21 16:03:02 +0800 | [diff] [blame] | 178 | #ifdef CONFIG_PM |
| 179 | struct fsldma_chan_regs_save regs_save; |
| 180 | enum fsldma_pm_state pm_state; |
| 181 | #endif |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 182 | |
Ira Snyder | a4f56d4 | 2010-01-06 13:34:01 +0000 | [diff] [blame] | 183 | void (*toggle_ext_pause)(struct fsldma_chan *fsl_chan, int enable); |
| 184 | void (*toggle_ext_start)(struct fsldma_chan *fsl_chan, int enable); |
| 185 | void (*set_src_loop_size)(struct fsldma_chan *fsl_chan, int size); |
Ira Snyder | 738f5f7 | 2010-01-06 13:34:02 +0000 | [diff] [blame] | 186 | void (*set_dst_loop_size)(struct fsldma_chan *fsl_chan, int size); |
Ira Snyder | a4f56d4 | 2010-01-06 13:34:01 +0000 | [diff] [blame] | 187 | void (*set_request_count)(struct fsldma_chan *fsl_chan, int size); |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 188 | }; |
| 189 | |
Ira Snyder | a4f56d4 | 2010-01-06 13:34:01 +0000 | [diff] [blame] | 190 | #define to_fsl_chan(chan) container_of(chan, struct fsldma_chan, common) |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 191 | #define to_fsl_desc(lh) container_of(lh, struct fsl_desc_sw, node) |
| 192 | #define tx_to_fsl_desc(tx) container_of(tx, struct fsl_desc_sw, async_tx) |
| 193 | |
Peng Ma | a1ff82a | 2018-10-30 10:35:59 +0800 | [diff] [blame] | 194 | #ifdef CONFIG_PPC |
| 195 | #define fsl_ioread32(p) in_le32(p) |
| 196 | #define fsl_ioread32be(p) in_be32(p) |
| 197 | #define fsl_iowrite32(v, p) out_le32(p, v) |
| 198 | #define fsl_iowrite32be(v, p) out_be32(p, v) |
| 199 | |
Scott Wood | 6175f6a | 2018-12-21 22:34:45 -0600 | [diff] [blame] | 200 | #ifdef __powerpc64__ |
| 201 | #define fsl_ioread64(p) in_le64(p) |
| 202 | #define fsl_ioread64be(p) in_be64(p) |
| 203 | #define fsl_iowrite64(v, p) out_le64(p, v) |
| 204 | #define fsl_iowrite64be(v, p) out_be64(p, v) |
| 205 | #else |
Peng Ma | a1ff82a | 2018-10-30 10:35:59 +0800 | [diff] [blame] | 206 | static u64 fsl_ioread64(const u64 __iomem *addr) |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 207 | { |
Linus Torvalds | 0a4c56c | 2020-08-29 13:50:56 -0700 | [diff] [blame] | 208 | u32 val_lo = in_le32((u32 __iomem *)addr); |
| 209 | u32 val_hi = in_le32((u32 __iomem *)addr + 1); |
Peng Ma | a1ff82a | 2018-10-30 10:35:59 +0800 | [diff] [blame] | 210 | |
Linus Torvalds | 0a4c56c | 2020-08-29 13:50:56 -0700 | [diff] [blame] | 211 | return ((u64)val_hi << 32) + val_lo; |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 212 | } |
| 213 | |
Peng Ma | a1ff82a | 2018-10-30 10:35:59 +0800 | [diff] [blame] | 214 | static void fsl_iowrite64(u64 val, u64 __iomem *addr) |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 215 | { |
Al Viro | a4e6d5d | 2008-03-29 03:10:18 +0000 | [diff] [blame] | 216 | out_le32((u32 __iomem *)addr + 1, val >> 32); |
| 217 | out_le32((u32 __iomem *)addr, (u32)val); |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 218 | } |
Peng Ma | a1ff82a | 2018-10-30 10:35:59 +0800 | [diff] [blame] | 219 | |
| 220 | static u64 fsl_ioread64be(const u64 __iomem *addr) |
| 221 | { |
Linus Torvalds | 0a4c56c | 2020-08-29 13:50:56 -0700 | [diff] [blame] | 222 | u32 val_hi = in_be32((u32 __iomem *)addr); |
| 223 | u32 val_lo = in_be32((u32 __iomem *)addr + 1); |
Peng Ma | a1ff82a | 2018-10-30 10:35:59 +0800 | [diff] [blame] | 224 | |
Linus Torvalds | 0a4c56c | 2020-08-29 13:50:56 -0700 | [diff] [blame] | 225 | return ((u64)val_hi << 32) + val_lo; |
Peng Ma | a1ff82a | 2018-10-30 10:35:59 +0800 | [diff] [blame] | 226 | } |
| 227 | |
| 228 | static void fsl_iowrite64be(u64 val, u64 __iomem *addr) |
| 229 | { |
| 230 | out_be32((u32 __iomem *)addr, val >> 32); |
| 231 | out_be32((u32 __iomem *)addr + 1, (u32)val); |
| 232 | } |
| 233 | #endif |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 234 | #endif |
| 235 | |
Peng Ma | a1ff82a | 2018-10-30 10:35:59 +0800 | [diff] [blame] | 236 | #if defined(CONFIG_ARM64) || defined(CONFIG_ARM) |
| 237 | #define fsl_ioread32(p) ioread32(p) |
| 238 | #define fsl_ioread32be(p) ioread32be(p) |
| 239 | #define fsl_iowrite32(v, p) iowrite32(v, p) |
| 240 | #define fsl_iowrite32be(v, p) iowrite32be(v, p) |
| 241 | #define fsl_ioread64(p) ioread64(p) |
| 242 | #define fsl_ioread64be(p) ioread64be(p) |
| 243 | #define fsl_iowrite64(v, p) iowrite64(v, p) |
| 244 | #define fsl_iowrite64be(v, p) iowrite64be(v, p) |
| 245 | #endif |
| 246 | |
| 247 | #define FSL_DMA_IN(fsl_dma, addr, width) \ |
| 248 | (((fsl_dma)->feature & FSL_DMA_BIG_ENDIAN) ? \ |
| 249 | fsl_ioread##width##be(addr) : fsl_ioread##width(addr)) |
| 250 | |
| 251 | #define FSL_DMA_OUT(fsl_dma, addr, val, width) \ |
| 252 | (((fsl_dma)->feature & FSL_DMA_BIG_ENDIAN) ? \ |
| 253 | fsl_iowrite##width##be(val, addr) : fsl_iowrite \ |
| 254 | ##width(val, addr)) |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 255 | |
| 256 | #define DMA_TO_CPU(fsl_chan, d, width) \ |
| 257 | (((fsl_chan)->feature & FSL_DMA_BIG_ENDIAN) ? \ |
Al Viro | a4e6d5d | 2008-03-29 03:10:18 +0000 | [diff] [blame] | 258 | be##width##_to_cpu((__force __be##width)(v##width)d) : \ |
| 259 | le##width##_to_cpu((__force __le##width)(v##width)d)) |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 260 | #define CPU_TO_DMA(fsl_chan, c, width) \ |
| 261 | (((fsl_chan)->feature & FSL_DMA_BIG_ENDIAN) ? \ |
Al Viro | a4e6d5d | 2008-03-29 03:10:18 +0000 | [diff] [blame] | 262 | (__force v##width)cpu_to_be##width(c) : \ |
| 263 | (__force v##width)cpu_to_le##width(c)) |
Zhang Wei | 173acc7 | 2008-03-01 07:42:48 -0700 | [diff] [blame] | 264 | |
| 265 | #endif /* __DMA_FSLDMA_H */ |