Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 2 | #ifndef DW_SPI_HEADER_H |
| 3 | #define DW_SPI_HEADER_H |
Feng Tang | 7063c0d | 2010-12-24 13:59:11 +0800 | [diff] [blame] | 4 | |
Serge Semin | bdbdf0f | 2020-05-29 16:11:52 +0300 | [diff] [blame] | 5 | #include <linux/completion.h> |
Serge Semin | 8378449 | 2020-05-29 16:12:04 +0300 | [diff] [blame] | 6 | #include <linux/debugfs.h> |
Andy Shevchenko | e62a15d | 2020-05-06 18:30:21 +0300 | [diff] [blame] | 7 | #include <linux/irqreturn.h> |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 8 | #include <linux/io.h> |
Jiri Slaby | 46165a3d | 2011-03-18 10:41:17 +0100 | [diff] [blame] | 9 | #include <linux/scatterlist.h> |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 10 | |
H Hartley Sweeten | 7eb187b | 2011-09-20 11:06:17 -0700 | [diff] [blame] | 11 | /* Register offsets */ |
Wan Ahmad Zainie | 299cb65 | 2020-05-05 21:06:12 +0800 | [diff] [blame] | 12 | #define DW_SPI_CTRLR0 0x00 |
| 13 | #define DW_SPI_CTRLR1 0x04 |
H Hartley Sweeten | 7eb187b | 2011-09-20 11:06:17 -0700 | [diff] [blame] | 14 | #define DW_SPI_SSIENR 0x08 |
| 15 | #define DW_SPI_MWCR 0x0c |
| 16 | #define DW_SPI_SER 0x10 |
| 17 | #define DW_SPI_BAUDR 0x14 |
Wan Ahmad Zainie | 299cb65 | 2020-05-05 21:06:12 +0800 | [diff] [blame] | 18 | #define DW_SPI_TXFTLR 0x18 |
| 19 | #define DW_SPI_RXFTLR 0x1c |
H Hartley Sweeten | 7eb187b | 2011-09-20 11:06:17 -0700 | [diff] [blame] | 20 | #define DW_SPI_TXFLR 0x20 |
| 21 | #define DW_SPI_RXFLR 0x24 |
| 22 | #define DW_SPI_SR 0x28 |
| 23 | #define DW_SPI_IMR 0x2c |
| 24 | #define DW_SPI_ISR 0x30 |
| 25 | #define DW_SPI_RISR 0x34 |
| 26 | #define DW_SPI_TXOICR 0x38 |
| 27 | #define DW_SPI_RXOICR 0x3c |
| 28 | #define DW_SPI_RXUICR 0x40 |
| 29 | #define DW_SPI_MSTICR 0x44 |
| 30 | #define DW_SPI_ICR 0x48 |
| 31 | #define DW_SPI_DMACR 0x4c |
| 32 | #define DW_SPI_DMATDLR 0x50 |
| 33 | #define DW_SPI_DMARDLR 0x54 |
| 34 | #define DW_SPI_IDR 0x58 |
| 35 | #define DW_SPI_VERSION 0x5c |
| 36 | #define DW_SPI_DR 0x60 |
Lars Povlsen | bac70b5 | 2020-08-24 22:30:05 +0200 | [diff] [blame^] | 37 | #define DW_SPI_RX_SAMPLE_DLY 0xf0 |
Talel Shenhar | f2d7047 | 2018-10-11 14:20:07 +0300 | [diff] [blame] | 38 | #define DW_SPI_CS_OVERRIDE 0xf4 |
H Hartley Sweeten | 7eb187b | 2011-09-20 11:06:17 -0700 | [diff] [blame] | 39 | |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 40 | /* Bit fields in CTRLR0 */ |
| 41 | #define SPI_DFS_OFFSET 0 |
| 42 | |
| 43 | #define SPI_FRF_OFFSET 4 |
| 44 | #define SPI_FRF_SPI 0x0 |
| 45 | #define SPI_FRF_SSP 0x1 |
| 46 | #define SPI_FRF_MICROWIRE 0x2 |
| 47 | #define SPI_FRF_RESV 0x3 |
| 48 | |
| 49 | #define SPI_MODE_OFFSET 6 |
| 50 | #define SPI_SCPH_OFFSET 6 |
| 51 | #define SPI_SCOL_OFFSET 7 |
Feng Tang | e3e55ff | 2010-09-07 15:52:06 +0800 | [diff] [blame] | 52 | |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 53 | #define SPI_TMOD_OFFSET 8 |
Feng Tang | e3e55ff | 2010-09-07 15:52:06 +0800 | [diff] [blame] | 54 | #define SPI_TMOD_MASK (0x3 << SPI_TMOD_OFFSET) |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 55 | #define SPI_TMOD_TR 0x0 /* xmit & recv */ |
| 56 | #define SPI_TMOD_TO 0x1 /* xmit only */ |
| 57 | #define SPI_TMOD_RO 0x2 /* recv only */ |
| 58 | #define SPI_TMOD_EPROMREAD 0x3 /* eeprom read mode */ |
| 59 | |
| 60 | #define SPI_SLVOE_OFFSET 10 |
| 61 | #define SPI_SRL_OFFSET 11 |
| 62 | #define SPI_CFS_OFFSET 12 |
| 63 | |
Wan Ahmad Zainie | e539f43 | 2020-05-05 21:06:14 +0800 | [diff] [blame] | 64 | /* Bit fields in CTRLR0 based on DWC_ssi_databook.pdf v1.01a */ |
| 65 | #define DWC_SSI_CTRLR0_SRL_OFFSET 13 |
| 66 | #define DWC_SSI_CTRLR0_TMOD_OFFSET 10 |
| 67 | #define DWC_SSI_CTRLR0_TMOD_MASK GENMASK(11, 10) |
| 68 | #define DWC_SSI_CTRLR0_SCPOL_OFFSET 9 |
| 69 | #define DWC_SSI_CTRLR0_SCPH_OFFSET 8 |
| 70 | #define DWC_SSI_CTRLR0_FRF_OFFSET 6 |
| 71 | #define DWC_SSI_CTRLR0_DFS_OFFSET 0 |
| 72 | |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 73 | /* Bit fields in SR, 7 bits */ |
| 74 | #define SR_MASK 0x7f /* cover 7 bits */ |
| 75 | #define SR_BUSY (1 << 0) |
| 76 | #define SR_TF_NOT_FULL (1 << 1) |
| 77 | #define SR_TF_EMPT (1 << 2) |
| 78 | #define SR_RF_NOT_EMPT (1 << 3) |
| 79 | #define SR_RF_FULL (1 << 4) |
| 80 | #define SR_TX_ERR (1 << 5) |
| 81 | #define SR_DCOL (1 << 6) |
| 82 | |
| 83 | /* Bit fields in ISR, IMR, RISR, 7 bits */ |
| 84 | #define SPI_INT_TXEI (1 << 0) |
| 85 | #define SPI_INT_TXOI (1 << 1) |
| 86 | #define SPI_INT_RXUI (1 << 2) |
| 87 | #define SPI_INT_RXOI (1 << 3) |
| 88 | #define SPI_INT_RXFI (1 << 4) |
| 89 | #define SPI_INT_MSTI (1 << 5) |
| 90 | |
Andy Shevchenko | 15ee3be | 2014-10-02 16:31:07 +0300 | [diff] [blame] | 91 | /* Bit fields in DMACR */ |
| 92 | #define SPI_DMA_RDMAE (1 << 0) |
| 93 | #define SPI_DMA_TDMAE (1 << 1) |
| 94 | |
Lucas De Marchi | 25985ed | 2011-03-30 22:57:33 -0300 | [diff] [blame] | 95 | /* TX RX interrupt level threshold, max can be 256 */ |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 96 | #define SPI_INT_THRESHOLD 32 |
| 97 | |
| 98 | enum dw_ssi_type { |
| 99 | SSI_MOTO_SPI = 0, |
| 100 | SSI_TI_SSP, |
| 101 | SSI_NS_MICROWIRE, |
| 102 | }; |
| 103 | |
Feng Tang | 7063c0d | 2010-12-24 13:59:11 +0800 | [diff] [blame] | 104 | struct dw_spi; |
| 105 | struct dw_spi_dma_ops { |
Andy Shevchenko | 6370aba | 2020-05-06 18:30:24 +0300 | [diff] [blame] | 106 | int (*dma_init)(struct device *dev, struct dw_spi *dws); |
Feng Tang | 7063c0d | 2010-12-24 13:59:11 +0800 | [diff] [blame] | 107 | void (*dma_exit)(struct dw_spi *dws); |
Andy Shevchenko | f89a6d8 | 2015-03-09 16:48:49 +0200 | [diff] [blame] | 108 | int (*dma_setup)(struct dw_spi *dws, struct spi_transfer *xfer); |
Jarkko Nikula | 721483e | 2018-02-01 17:17:29 +0200 | [diff] [blame] | 109 | bool (*can_dma)(struct spi_controller *master, struct spi_device *spi, |
Andy Shevchenko | f89a6d8 | 2015-03-09 16:48:49 +0200 | [diff] [blame] | 110 | struct spi_transfer *xfer); |
| 111 | int (*dma_transfer)(struct dw_spi *dws, struct spi_transfer *xfer); |
Andy Shevchenko | 4d5ac1e | 2015-03-09 16:48:48 +0200 | [diff] [blame] | 112 | void (*dma_stop)(struct dw_spi *dws); |
Feng Tang | 7063c0d | 2010-12-24 13:59:11 +0800 | [diff] [blame] | 113 | }; |
| 114 | |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 115 | struct dw_spi { |
Jarkko Nikula | 721483e | 2018-02-01 17:17:29 +0200 | [diff] [blame] | 116 | struct spi_controller *master; |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 117 | enum dw_ssi_type type; |
| 118 | |
| 119 | void __iomem *regs; |
| 120 | unsigned long paddr; |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 121 | int irq; |
Feng Tang | 552e450 | 2010-01-20 13:49:45 -0700 | [diff] [blame] | 122 | u32 fifo_len; /* depth of the FIFO buffer */ |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 123 | u32 max_freq; /* max bus freq supported */ |
| 124 | |
Talel Shenhar | f2d7047 | 2018-10-11 14:20:07 +0300 | [diff] [blame] | 125 | int cs_override; |
Michael van der Westhuizen | c4fe57f | 2015-08-18 22:21:53 +0200 | [diff] [blame] | 126 | u32 reg_io_width; /* DR I/O width in bytes */ |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 127 | u16 bus_num; |
| 128 | u16 num_cs; /* supported slave numbers */ |
Alexandre Belloni | 62dbbae | 2018-07-17 16:23:11 +0200 | [diff] [blame] | 129 | void (*set_cs)(struct spi_device *spi, bool enable); |
Wan Ahmad Zainie | c4eadee | 2020-05-05 21:06:13 +0800 | [diff] [blame] | 130 | u32 (*update_cr0)(struct spi_controller *master, struct spi_device *spi, |
| 131 | struct spi_transfer *transfer); |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 132 | |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 133 | /* Current message transfer state info */ |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 134 | size_t len; |
| 135 | void *tx; |
| 136 | void *tx_end; |
wuxu.wu | 19b6139 | 2020-01-01 11:39:41 +0800 | [diff] [blame] | 137 | spinlock_t buf_lock; |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 138 | void *rx; |
| 139 | void *rx_end; |
| 140 | int dma_mapped; |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 141 | u8 n_bytes; /* current is a 1/2 bytes op */ |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 142 | irqreturn_t (*transfer_handler)(struct dw_spi *dws); |
Matthias Seidel | 13b1030 | 2016-09-04 02:04:49 +0200 | [diff] [blame] | 143 | u32 current_freq; /* frequency in hz */ |
Lars Povlsen | bac70b5 | 2020-08-24 22:30:05 +0200 | [diff] [blame^] | 144 | u32 cur_rx_sample_dly; |
| 145 | u32 def_rx_sample_dly_ns; |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 146 | |
Andy Shevchenko | f89a6d8 | 2015-03-09 16:48:49 +0200 | [diff] [blame] | 147 | /* DMA info */ |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 148 | struct dma_chan *txchan; |
Serge Semin | 0b2b665 | 2020-05-29 16:11:56 +0300 | [diff] [blame] | 149 | u32 txburst; |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 150 | struct dma_chan *rxchan; |
Serge Semin | 0b2b665 | 2020-05-29 16:11:56 +0300 | [diff] [blame] | 151 | u32 rxburst; |
Andy Shevchenko | 30c8eb5 | 2014-10-28 18:25:02 +0200 | [diff] [blame] | 152 | unsigned long dma_chan_busy; |
Feng Tang | 7063c0d | 2010-12-24 13:59:11 +0800 | [diff] [blame] | 153 | dma_addr_t dma_addr; /* phy address of the Data register */ |
Julia Lawall | 4fe338c | 2015-11-28 15:09:38 +0100 | [diff] [blame] | 154 | const struct dw_spi_dma_ops *dma_ops; |
Serge Semin | bdbdf0f | 2020-05-29 16:11:52 +0300 | [diff] [blame] | 155 | struct completion dma_completion; |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 156 | |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 157 | #ifdef CONFIG_DEBUG_FS |
| 158 | struct dentry *debugfs; |
Serge Semin | 8378449 | 2020-05-29 16:12:04 +0300 | [diff] [blame] | 159 | struct debugfs_regset32 regset; |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 160 | #endif |
| 161 | }; |
| 162 | |
H Hartley Sweeten | 7eb187b | 2011-09-20 11:06:17 -0700 | [diff] [blame] | 163 | static inline u32 dw_readl(struct dw_spi *dws, u32 offset) |
| 164 | { |
| 165 | return __raw_readl(dws->regs + offset); |
| 166 | } |
| 167 | |
Michael van der Westhuizen | c4fe57f | 2015-08-18 22:21:53 +0200 | [diff] [blame] | 168 | static inline u16 dw_readw(struct dw_spi *dws, u32 offset) |
| 169 | { |
| 170 | return __raw_readw(dws->regs + offset); |
| 171 | } |
| 172 | |
H Hartley Sweeten | 7eb187b | 2011-09-20 11:06:17 -0700 | [diff] [blame] | 173 | static inline void dw_writel(struct dw_spi *dws, u32 offset, u32 val) |
| 174 | { |
| 175 | __raw_writel(val, dws->regs + offset); |
| 176 | } |
| 177 | |
Michael van der Westhuizen | c4fe57f | 2015-08-18 22:21:53 +0200 | [diff] [blame] | 178 | static inline void dw_writew(struct dw_spi *dws, u32 offset, u16 val) |
| 179 | { |
| 180 | __raw_writew(val, dws->regs + offset); |
| 181 | } |
| 182 | |
| 183 | static inline u32 dw_read_io_reg(struct dw_spi *dws, u32 offset) |
| 184 | { |
| 185 | switch (dws->reg_io_width) { |
| 186 | case 2: |
| 187 | return dw_readw(dws, offset); |
| 188 | case 4: |
| 189 | default: |
| 190 | return dw_readl(dws, offset); |
| 191 | } |
| 192 | } |
| 193 | |
| 194 | static inline void dw_write_io_reg(struct dw_spi *dws, u32 offset, u32 val) |
| 195 | { |
| 196 | switch (dws->reg_io_width) { |
| 197 | case 2: |
| 198 | dw_writew(dws, offset, val); |
| 199 | break; |
| 200 | case 4: |
| 201 | default: |
| 202 | dw_writel(dws, offset, val); |
| 203 | break; |
| 204 | } |
| 205 | } |
| 206 | |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 207 | static inline void spi_enable_chip(struct dw_spi *dws, int enable) |
| 208 | { |
H Hartley Sweeten | 7eb187b | 2011-09-20 11:06:17 -0700 | [diff] [blame] | 209 | dw_writel(dws, DW_SPI_SSIENR, (enable ? 1 : 0)); |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 210 | } |
| 211 | |
| 212 | static inline void spi_set_clk(struct dw_spi *dws, u16 div) |
| 213 | { |
H Hartley Sweeten | 7eb187b | 2011-09-20 11:06:17 -0700 | [diff] [blame] | 214 | dw_writel(dws, DW_SPI_BAUDR, div); |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 215 | } |
| 216 | |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 217 | /* Disable IRQ bits */ |
| 218 | static inline void spi_mask_intr(struct dw_spi *dws, u32 mask) |
| 219 | { |
| 220 | u32 new_mask; |
| 221 | |
H Hartley Sweeten | 7eb187b | 2011-09-20 11:06:17 -0700 | [diff] [blame] | 222 | new_mask = dw_readl(dws, DW_SPI_IMR) & ~mask; |
| 223 | dw_writel(dws, DW_SPI_IMR, new_mask); |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 224 | } |
| 225 | |
| 226 | /* Enable IRQ bits */ |
| 227 | static inline void spi_umask_intr(struct dw_spi *dws, u32 mask) |
| 228 | { |
| 229 | u32 new_mask; |
| 230 | |
H Hartley Sweeten | 7eb187b | 2011-09-20 11:06:17 -0700 | [diff] [blame] | 231 | new_mask = dw_readl(dws, DW_SPI_IMR) | mask; |
| 232 | dw_writel(dws, DW_SPI_IMR, new_mask); |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 233 | } |
| 234 | |
| 235 | /* |
Andy Shevchenko | 45746e8 | 2015-03-02 14:58:55 +0200 | [diff] [blame] | 236 | * This does disable the SPI controller, interrupts, and re-enable the |
| 237 | * controller back. Transmit and receive FIFO buffers are cleared when the |
| 238 | * device is disabled. |
| 239 | */ |
| 240 | static inline void spi_reset_chip(struct dw_spi *dws) |
| 241 | { |
| 242 | spi_enable_chip(dws, 0); |
| 243 | spi_mask_intr(dws, 0xff); |
| 244 | spi_enable_chip(dws, 1); |
| 245 | } |
| 246 | |
Andy Shevchenko | 1cc3f14 | 2015-10-14 23:12:23 +0300 | [diff] [blame] | 247 | static inline void spi_shutdown_chip(struct dw_spi *dws) |
| 248 | { |
| 249 | spi_enable_chip(dws, 0); |
| 250 | spi_set_clk(dws, 0); |
| 251 | } |
| 252 | |
Alexandre Belloni | c79bdbb | 2018-07-27 21:53:54 +0200 | [diff] [blame] | 253 | extern void dw_spi_set_cs(struct spi_device *spi, bool enable); |
Baruch Siach | 04f421e | 2013-12-30 20:30:44 +0200 | [diff] [blame] | 254 | extern int dw_spi_add_host(struct device *dev, struct dw_spi *dws); |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 255 | extern void dw_spi_remove_host(struct dw_spi *dws); |
| 256 | extern int dw_spi_suspend_host(struct dw_spi *dws); |
| 257 | extern int dw_spi_resume_host(struct dw_spi *dws); |
Wan Ahmad Zainie | c4eadee | 2020-05-05 21:06:13 +0800 | [diff] [blame] | 258 | extern u32 dw_spi_update_cr0(struct spi_controller *master, |
| 259 | struct spi_device *spi, |
| 260 | struct spi_transfer *transfer); |
Wan Ahmad Zainie | e539f43 | 2020-05-05 21:06:14 +0800 | [diff] [blame] | 261 | extern u32 dw_spi_update_cr0_v1_01a(struct spi_controller *master, |
| 262 | struct spi_device *spi, |
| 263 | struct spi_transfer *transfer); |
Feng Tang | 7063c0d | 2010-12-24 13:59:11 +0800 | [diff] [blame] | 264 | |
Serge Semin | 6c710c0 | 2020-05-29 16:11:59 +0300 | [diff] [blame] | 265 | #ifdef CONFIG_SPI_DW_DMA |
| 266 | |
Serge Semin | 5778441 | 2020-05-29 16:12:02 +0300 | [diff] [blame] | 267 | extern void dw_spi_dma_setup_mfld(struct dw_spi *dws); |
| 268 | extern void dw_spi_dma_setup_generic(struct dw_spi *dws); |
Serge Semin | 6c710c0 | 2020-05-29 16:11:59 +0300 | [diff] [blame] | 269 | |
| 270 | #else |
| 271 | |
Serge Semin | 5778441 | 2020-05-29 16:12:02 +0300 | [diff] [blame] | 272 | static inline void dw_spi_dma_setup_mfld(struct dw_spi *dws) {} |
| 273 | static inline void dw_spi_dma_setup_generic(struct dw_spi *dws) {} |
Serge Semin | 6c710c0 | 2020-05-29 16:11:59 +0300 | [diff] [blame] | 274 | |
| 275 | #endif /* !CONFIG_SPI_DW_DMA */ |
Andy Shevchenko | 37aa8aa | 2020-05-06 18:30:23 +0300 | [diff] [blame] | 276 | |
Feng Tang | e24c745 | 2009-12-14 14:20:22 -0800 | [diff] [blame] | 277 | #endif /* DW_SPI_HEADER_H */ |