blob: dc54990bff6b076bb321f2254f0c61c436951ce7 [file] [log] [blame]
Thomas Gleixner2025cf92019-05-29 07:18:02 -07001// SPDX-License-Identifier: GPL-2.0-only
Feng Tange24c7452009-12-14 14:20:22 -08002/*
Grant Likelyca632f52011-06-06 01:16:30 -06003 * Designware SPI core controller driver (refer pxa2xx_spi.c)
Feng Tange24c7452009-12-14 14:20:22 -08004 *
5 * Copyright (c) 2009, Intel Corporation.
Feng Tange24c7452009-12-14 14:20:22 -08006 */
7
8#include <linux/dma-mapping.h>
9#include <linux/interrupt.h>
Paul Gortmakerd7614de2011-07-03 15:44:29 -040010#include <linux/module.h>
Feng Tange24c7452009-12-14 14:20:22 -080011#include <linux/highmem.h>
12#include <linux/delay.h>
Tejun Heo5a0e3ad2010-03-24 17:04:11 +090013#include <linux/slab.h>
Feng Tange24c7452009-12-14 14:20:22 -080014#include <linux/spi/spi.h>
Lars Povlsenbac70b52020-08-24 22:30:05 +020015#include <linux/of.h>
Feng Tange24c7452009-12-14 14:20:22 -080016
Grant Likelyca632f52011-06-06 01:16:30 -060017#include "spi-dw.h"
Grant Likely568a60e2011-02-28 12:47:12 -070018
Feng Tange24c7452009-12-14 14:20:22 -080019#ifdef CONFIG_DEBUG_FS
20#include <linux/debugfs.h>
21#endif
22
Feng Tange24c7452009-12-14 14:20:22 -080023/* Slave spi_dev related */
24struct chip_data {
Feng Tange24c7452009-12-14 14:20:22 -080025 u8 tmode; /* TR/TO/RO/EEPROM */
26 u8 type; /* SPI/SSP/MicroWire */
27
Feng Tange24c7452009-12-14 14:20:22 -080028 u16 clk_div; /* baud rate divider */
29 u32 speed_hz; /* baud rate */
Lars Povlsenbac70b52020-08-24 22:30:05 +020030
31 u32 rx_sample_dly; /* RX sample delay */
Feng Tange24c7452009-12-14 14:20:22 -080032};
33
34#ifdef CONFIG_DEBUG_FS
Feng Tange24c7452009-12-14 14:20:22 -080035
Serge Semin83784492020-05-29 16:12:04 +030036#define DW_SPI_DBGFS_REG(_name, _off) \
37{ \
38 .name = _name, \
39 .offset = _off, \
Feng Tange24c7452009-12-14 14:20:22 -080040}
41
Serge Semin83784492020-05-29 16:12:04 +030042static const struct debugfs_reg32 dw_spi_dbgfs_regs[] = {
43 DW_SPI_DBGFS_REG("CTRLR0", DW_SPI_CTRLR0),
44 DW_SPI_DBGFS_REG("CTRLR1", DW_SPI_CTRLR1),
45 DW_SPI_DBGFS_REG("SSIENR", DW_SPI_SSIENR),
46 DW_SPI_DBGFS_REG("SER", DW_SPI_SER),
47 DW_SPI_DBGFS_REG("BAUDR", DW_SPI_BAUDR),
48 DW_SPI_DBGFS_REG("TXFTLR", DW_SPI_TXFTLR),
49 DW_SPI_DBGFS_REG("RXFTLR", DW_SPI_RXFTLR),
50 DW_SPI_DBGFS_REG("TXFLR", DW_SPI_TXFLR),
51 DW_SPI_DBGFS_REG("RXFLR", DW_SPI_RXFLR),
52 DW_SPI_DBGFS_REG("SR", DW_SPI_SR),
53 DW_SPI_DBGFS_REG("IMR", DW_SPI_IMR),
54 DW_SPI_DBGFS_REG("ISR", DW_SPI_ISR),
55 DW_SPI_DBGFS_REG("DMACR", DW_SPI_DMACR),
56 DW_SPI_DBGFS_REG("DMATDLR", DW_SPI_DMATDLR),
57 DW_SPI_DBGFS_REG("DMARDLR", DW_SPI_DMARDLR),
Lars Povlsenbac70b52020-08-24 22:30:05 +020058 DW_SPI_DBGFS_REG("RX_SAMPLE_DLY", DW_SPI_RX_SAMPLE_DLY),
Feng Tange24c7452009-12-14 14:20:22 -080059};
60
Andy Shevchenko53288fe2014-09-12 15:11:56 +030061static int dw_spi_debugfs_init(struct dw_spi *dws)
Feng Tange24c7452009-12-14 14:20:22 -080062{
Phil Reide70002c802017-01-06 17:35:13 +080063 char name[32];
Phil Reid13288bd2016-12-22 17:18:12 +080064
Phil Reide70002c802017-01-06 17:35:13 +080065 snprintf(name, 32, "dw_spi%d", dws->master->bus_num);
Phil Reid13288bd2016-12-22 17:18:12 +080066 dws->debugfs = debugfs_create_dir(name, NULL);
Feng Tange24c7452009-12-14 14:20:22 -080067 if (!dws->debugfs)
68 return -ENOMEM;
69
Serge Semin83784492020-05-29 16:12:04 +030070 dws->regset.regs = dw_spi_dbgfs_regs;
71 dws->regset.nregs = ARRAY_SIZE(dw_spi_dbgfs_regs);
72 dws->regset.base = dws->regs;
73 debugfs_create_regset32("registers", 0400, dws->debugfs, &dws->regset);
74
Feng Tange24c7452009-12-14 14:20:22 -080075 return 0;
76}
77
Andy Shevchenko53288fe2014-09-12 15:11:56 +030078static void dw_spi_debugfs_remove(struct dw_spi *dws)
Feng Tange24c7452009-12-14 14:20:22 -080079{
Jingoo Hanfadcace2014-09-02 11:49:24 +090080 debugfs_remove_recursive(dws->debugfs);
Feng Tange24c7452009-12-14 14:20:22 -080081}
82
83#else
Andy Shevchenko53288fe2014-09-12 15:11:56 +030084static inline int dw_spi_debugfs_init(struct dw_spi *dws)
Feng Tange24c7452009-12-14 14:20:22 -080085{
George Shore20a588f2010-01-21 11:40:49 +000086 return 0;
Feng Tange24c7452009-12-14 14:20:22 -080087}
88
Andy Shevchenko53288fe2014-09-12 15:11:56 +030089static inline void dw_spi_debugfs_remove(struct dw_spi *dws)
Feng Tange24c7452009-12-14 14:20:22 -080090{
91}
92#endif /* CONFIG_DEBUG_FS */
93
Alexandre Bellonic79bdbb2018-07-27 21:53:54 +020094void dw_spi_set_cs(struct spi_device *spi, bool enable)
Andy Shevchenkoc22c62d2015-03-02 14:58:57 +020095{
Jarkko Nikula721483e2018-02-01 17:17:29 +020096 struct dw_spi *dws = spi_controller_get_devdata(spi->controller);
Serge Semin9aea6442020-05-15 13:47:43 +030097 bool cs_high = !!(spi->mode & SPI_CS_HIGH);
Andy Shevchenkoc22c62d2015-03-02 14:58:57 +020098
Serge Semin9aea6442020-05-15 13:47:43 +030099 /*
100 * DW SPI controller demands any native CS being set in order to
101 * proceed with data transfer. So in order to activate the SPI
102 * communications we must set a corresponding bit in the Slave
103 * Enable register no matter whether the SPI core is configured to
104 * support active-high or active-low CS level.
105 */
106 if (cs_high == enable)
Andy Shevchenkoc22c62d2015-03-02 14:58:57 +0200107 dw_writel(dws, DW_SPI_SER, BIT(spi->chip_select));
Talel Shenharf2d70472018-10-11 14:20:07 +0300108 else if (dws->cs_override)
109 dw_writel(dws, DW_SPI_SER, 0);
Andy Shevchenkoc22c62d2015-03-02 14:58:57 +0200110}
Alexandre Bellonic79bdbb2018-07-27 21:53:54 +0200111EXPORT_SYMBOL_GPL(dw_spi_set_cs);
Andy Shevchenkoc22c62d2015-03-02 14:58:57 +0200112
Alek Du2ff271b2011-03-30 23:09:54 +0800113/* Return the max entries we can fill into tx fifo */
114static inline u32 tx_max(struct dw_spi *dws)
115{
116 u32 tx_left, tx_room, rxtx_gap;
117
118 tx_left = (dws->tx_end - dws->tx) / dws->n_bytes;
Thor Thayerdd114442015-03-12 14:19:31 -0500119 tx_room = dws->fifo_len - dw_readl(dws, DW_SPI_TXFLR);
Alek Du2ff271b2011-03-30 23:09:54 +0800120
121 /*
122 * Another concern is about the tx/rx mismatch, we
123 * though to use (dws->fifo_len - rxflr - txflr) as
124 * one maximum value for tx, but it doesn't cover the
125 * data which is out of tx/rx fifo and inside the
126 * shift registers. So a control from sw point of
127 * view is taken.
128 */
129 rxtx_gap = ((dws->rx_end - dws->rx) - (dws->tx_end - dws->tx))
130 / dws->n_bytes;
131
132 return min3(tx_left, tx_room, (u32) (dws->fifo_len - rxtx_gap));
133}
134
135/* Return the max entries we should read out of rx fifo */
136static inline u32 rx_max(struct dw_spi *dws)
137{
138 u32 rx_left = (dws->rx_end - dws->rx) / dws->n_bytes;
139
Thor Thayerdd114442015-03-12 14:19:31 -0500140 return min_t(u32, rx_left, dw_readl(dws, DW_SPI_RXFLR));
Alek Du2ff271b2011-03-30 23:09:54 +0800141}
142
Alek Du3b8a4dd2011-03-30 23:09:55 +0800143static void dw_writer(struct dw_spi *dws)
Feng Tange24c7452009-12-14 14:20:22 -0800144{
wuxu.wu19b61392020-01-01 11:39:41 +0800145 u32 max;
Feng Tangde6efe02011-03-30 23:09:52 +0800146 u16 txw = 0;
Feng Tange24c7452009-12-14 14:20:22 -0800147
wuxu.wu19b61392020-01-01 11:39:41 +0800148 spin_lock(&dws->buf_lock);
149 max = tx_max(dws);
Alek Du2ff271b2011-03-30 23:09:54 +0800150 while (max--) {
151 /* Set the tx word if the transfer's original "tx" is not null */
152 if (dws->tx_end - dws->len) {
153 if (dws->n_bytes == 1)
154 txw = *(u8 *)(dws->tx);
155 else
156 txw = *(u16 *)(dws->tx);
157 }
Michael van der Westhuizenc4fe57f2015-08-18 22:21:53 +0200158 dw_write_io_reg(dws, DW_SPI_DR, txw);
Alek Du2ff271b2011-03-30 23:09:54 +0800159 dws->tx += dws->n_bytes;
Feng Tange24c7452009-12-14 14:20:22 -0800160 }
wuxu.wu19b61392020-01-01 11:39:41 +0800161 spin_unlock(&dws->buf_lock);
Feng Tange24c7452009-12-14 14:20:22 -0800162}
163
Alek Du3b8a4dd2011-03-30 23:09:55 +0800164static void dw_reader(struct dw_spi *dws)
Feng Tange24c7452009-12-14 14:20:22 -0800165{
wuxu.wu19b61392020-01-01 11:39:41 +0800166 u32 max;
Feng Tangde6efe02011-03-30 23:09:52 +0800167 u16 rxw;
Feng Tange24c7452009-12-14 14:20:22 -0800168
wuxu.wu19b61392020-01-01 11:39:41 +0800169 spin_lock(&dws->buf_lock);
170 max = rx_max(dws);
Alek Du2ff271b2011-03-30 23:09:54 +0800171 while (max--) {
Michael van der Westhuizenc4fe57f2015-08-18 22:21:53 +0200172 rxw = dw_read_io_reg(dws, DW_SPI_DR);
Feng Tangde6efe02011-03-30 23:09:52 +0800173 /* Care rx only if the transfer's original "rx" is not null */
174 if (dws->rx_end - dws->len) {
175 if (dws->n_bytes == 1)
176 *(u8 *)(dws->rx) = rxw;
177 else
178 *(u16 *)(dws->rx) = rxw;
179 }
180 dws->rx += dws->n_bytes;
Feng Tange24c7452009-12-14 14:20:22 -0800181 }
wuxu.wu19b61392020-01-01 11:39:41 +0800182 spin_unlock(&dws->buf_lock);
Feng Tange24c7452009-12-14 14:20:22 -0800183}
184
Feng Tange24c7452009-12-14 14:20:22 -0800185static void int_error_stop(struct dw_spi *dws, const char *msg)
186{
Andy Shevchenko45746e82015-03-02 14:58:55 +0200187 spi_reset_chip(dws);
Feng Tange24c7452009-12-14 14:20:22 -0800188
189 dev_err(&dws->master->dev, "%s\n", msg);
Andy Shevchenkoc22c62d2015-03-02 14:58:57 +0200190 dws->master->cur_msg->status = -EIO;
191 spi_finalize_current_transfer(dws->master);
Feng Tange24c7452009-12-14 14:20:22 -0800192}
193
Feng Tange24c7452009-12-14 14:20:22 -0800194static irqreturn_t interrupt_transfer(struct dw_spi *dws)
195{
Thor Thayerdd114442015-03-12 14:19:31 -0500196 u16 irq_status = dw_readl(dws, DW_SPI_ISR);
Feng Tange24c7452009-12-14 14:20:22 -0800197
Feng Tange24c7452009-12-14 14:20:22 -0800198 /* Error handling */
199 if (irq_status & (SPI_INT_TXOI | SPI_INT_RXOI | SPI_INT_RXUI)) {
Thor Thayerdd114442015-03-12 14:19:31 -0500200 dw_readl(dws, DW_SPI_ICR);
Alek Du3b8a4dd2011-03-30 23:09:55 +0800201 int_error_stop(dws, "interrupt_transfer: fifo overrun/underrun");
Feng Tange24c7452009-12-14 14:20:22 -0800202 return IRQ_HANDLED;
203 }
204
Alek Du3b8a4dd2011-03-30 23:09:55 +0800205 dw_reader(dws);
206 if (dws->rx_end == dws->rx) {
207 spi_mask_intr(dws, SPI_INT_TXEI);
Andy Shevchenkoc22c62d2015-03-02 14:58:57 +0200208 spi_finalize_current_transfer(dws->master);
Alek Du3b8a4dd2011-03-30 23:09:55 +0800209 return IRQ_HANDLED;
210 }
Feng Tang552e4502010-01-20 13:49:45 -0700211 if (irq_status & SPI_INT_TXEI) {
212 spi_mask_intr(dws, SPI_INT_TXEI);
Alek Du3b8a4dd2011-03-30 23:09:55 +0800213 dw_writer(dws);
214 /* Enable TX irq always, it will be disabled when RX finished */
215 spi_umask_intr(dws, SPI_INT_TXEI);
Feng Tange24c7452009-12-14 14:20:22 -0800216 }
Feng Tang552e4502010-01-20 13:49:45 -0700217
Feng Tange24c7452009-12-14 14:20:22 -0800218 return IRQ_HANDLED;
219}
220
221static irqreturn_t dw_spi_irq(int irq, void *dev_id)
222{
Jarkko Nikula721483e2018-02-01 17:17:29 +0200223 struct spi_controller *master = dev_id;
224 struct dw_spi *dws = spi_controller_get_devdata(master);
Thor Thayerdd114442015-03-12 14:19:31 -0500225 u16 irq_status = dw_readl(dws, DW_SPI_ISR) & 0x3f;
Yong Wangcbcc0622010-09-07 15:27:27 +0800226
Yong Wangcbcc0622010-09-07 15:27:27 +0800227 if (!irq_status)
228 return IRQ_NONE;
Feng Tange24c7452009-12-14 14:20:22 -0800229
Andy Shevchenkoc22c62d2015-03-02 14:58:57 +0200230 if (!master->cur_msg) {
Feng Tange24c7452009-12-14 14:20:22 -0800231 spi_mask_intr(dws, SPI_INT_TXEI);
Feng Tange24c7452009-12-14 14:20:22 -0800232 return IRQ_HANDLED;
233 }
234
235 return dws->transfer_handler(dws);
236}
237
Wan Ahmad Zainiec4eadee2020-05-05 21:06:13 +0800238/* Configure CTRLR0 for DW_apb_ssi */
239u32 dw_spi_update_cr0(struct spi_controller *master, struct spi_device *spi,
240 struct spi_transfer *transfer)
Feng Tange24c7452009-12-14 14:20:22 -0800241{
Wan Ahmad Zainiec4eadee2020-05-05 21:06:13 +0800242 struct chip_data *chip = spi_get_ctldata(spi);
243 u32 cr0;
Feng Tange24c7452009-12-14 14:20:22 -0800244
Wan Ahmad Zainiec4eadee2020-05-05 21:06:13 +0800245 /* Default SPI mode is SCPOL = 0, SCPH = 0 */
246 cr0 = (transfer->bits_per_word - 1)
247 | (chip->type << SPI_FRF_OFFSET)
248 | ((((spi->mode & SPI_CPOL) ? 1 : 0) << SPI_SCOL_OFFSET) |
249 (((spi->mode & SPI_CPHA) ? 1 : 0) << SPI_SCPH_OFFSET) |
250 (((spi->mode & SPI_LOOP) ? 1 : 0) << SPI_SRL_OFFSET))
251 | (chip->tmode << SPI_TMOD_OFFSET);
252
253 return cr0;
Feng Tange24c7452009-12-14 14:20:22 -0800254}
Wan Ahmad Zainiec4eadee2020-05-05 21:06:13 +0800255EXPORT_SYMBOL_GPL(dw_spi_update_cr0);
256
Wan Ahmad Zainiee539f432020-05-05 21:06:14 +0800257/* Configure CTRLR0 for DWC_ssi */
258u32 dw_spi_update_cr0_v1_01a(struct spi_controller *master,
259 struct spi_device *spi,
260 struct spi_transfer *transfer)
261{
Wan Ahmad Zainiee539f432020-05-05 21:06:14 +0800262 struct chip_data *chip = spi_get_ctldata(spi);
263 u32 cr0;
264
265 /* CTRLR0[ 4: 0] Data Frame Size */
266 cr0 = (transfer->bits_per_word - 1);
267
268 /* CTRLR0[ 7: 6] Frame Format */
269 cr0 |= chip->type << DWC_SSI_CTRLR0_FRF_OFFSET;
270
271 /*
272 * SPI mode (SCPOL|SCPH)
273 * CTRLR0[ 8] Serial Clock Phase
274 * CTRLR0[ 9] Serial Clock Polarity
275 */
276 cr0 |= ((spi->mode & SPI_CPOL) ? 1 : 0) << DWC_SSI_CTRLR0_SCPOL_OFFSET;
277 cr0 |= ((spi->mode & SPI_CPHA) ? 1 : 0) << DWC_SSI_CTRLR0_SCPH_OFFSET;
278
279 /* CTRLR0[11:10] Transfer Mode */
280 cr0 |= chip->tmode << DWC_SSI_CTRLR0_TMOD_OFFSET;
281
282 /* CTRLR0[13] Shift Register Loop */
283 cr0 |= ((spi->mode & SPI_LOOP) ? 1 : 0) << DWC_SSI_CTRLR0_SRL_OFFSET;
284
285 return cr0;
286}
287EXPORT_SYMBOL_GPL(dw_spi_update_cr0_v1_01a);
Feng Tange24c7452009-12-14 14:20:22 -0800288
Jarkko Nikula721483e2018-02-01 17:17:29 +0200289static int dw_spi_transfer_one(struct spi_controller *master,
Andy Shevchenkoc22c62d2015-03-02 14:58:57 +0200290 struct spi_device *spi, struct spi_transfer *transfer)
Feng Tange24c7452009-12-14 14:20:22 -0800291{
Jarkko Nikula721483e2018-02-01 17:17:29 +0200292 struct dw_spi *dws = spi_controller_get_devdata(master);
Andy Shevchenkoc22c62d2015-03-02 14:58:57 +0200293 struct chip_data *chip = spi_get_ctldata(spi);
wuxu.wu19b61392020-01-01 11:39:41 +0800294 unsigned long flags;
Feng Tange24c7452009-12-14 14:20:22 -0800295 u8 imask = 0;
Andy Shevchenkoea113702015-02-24 13:32:11 +0200296 u16 txlevel = 0;
Andy Shevchenko4adb1f82015-10-14 23:12:18 +0300297 u32 cr0;
Andy Shevchenko9f145382015-03-09 16:48:46 +0200298 int ret;
Feng Tange24c7452009-12-14 14:20:22 -0800299
Andy Shevchenkof89a6d82015-03-09 16:48:49 +0200300 dws->dma_mapped = 0;
wuxu.wu19b61392020-01-01 11:39:41 +0800301 spin_lock_irqsave(&dws->buf_lock, flags);
Serge Semin8225c1c2020-09-20 14:28:47 +0300302 dws->n_bytes = DIV_ROUND_UP(transfer->bits_per_word, BITS_PER_BYTE);
Feng Tange24c7452009-12-14 14:20:22 -0800303 dws->tx = (void *)transfer->tx_buf;
304 dws->tx_end = dws->tx + transfer->len;
305 dws->rx = transfer->rx_buf;
306 dws->rx_end = dws->rx + transfer->len;
Andy Shevchenkoc22c62d2015-03-02 14:58:57 +0200307 dws->len = transfer->len;
wuxu.wu19b61392020-01-01 11:39:41 +0800308 spin_unlock_irqrestore(&dws->buf_lock, flags);
Feng Tange24c7452009-12-14 14:20:22 -0800309
Xinwei Kongbfda0442020-01-03 10:52:10 +0800310 /* Ensure dw->rx and dw->rx_end are visible */
311 smp_mb();
312
Andy Shevchenko0b2e8912015-03-02 14:58:56 +0200313 spi_enable_chip(dws, 0);
314
Feng Tange24c7452009-12-14 14:20:22 -0800315 /* Handle per transfer options for bpw and speed */
Matthias Seidel13b10302016-09-04 02:04:49 +0200316 if (transfer->speed_hz != dws->current_freq) {
317 if (transfer->speed_hz != chip->speed_hz) {
318 /* clk_div doesn't support odd number */
Matthias Seidel3aef4632016-09-07 17:45:30 +0200319 chip->clk_div = (DIV_ROUND_UP(dws->max_freq, transfer->speed_hz) + 1) & 0xfffe;
Matthias Seidel13b10302016-09-04 02:04:49 +0200320 chip->speed_hz = transfer->speed_hz;
321 }
322 dws->current_freq = transfer->speed_hz;
Jarkko Nikula0ed36992015-09-15 16:26:23 +0300323 spi_set_clk(dws, chip->clk_div);
Feng Tange24c7452009-12-14 14:20:22 -0800324 }
Simon Goldschmidtaf060b32018-09-04 21:49:44 +0200325
Serge Seminde4c2872020-05-29 16:11:50 +0300326 transfer->effective_speed_hz = dws->max_freq / chip->clk_div;
Simon Goldschmidtaf060b32018-09-04 21:49:44 +0200327
Wan Ahmad Zainiec4eadee2020-05-05 21:06:13 +0800328 cr0 = dws->update_cr0(master, spi, transfer);
Wan Ahmad Zainie299cb652020-05-05 21:06:12 +0800329 dw_writel(dws, DW_SPI_CTRLR0, cr0);
Andy Shevchenko0b2e8912015-03-02 14:58:56 +0200330
Feng Tange24c7452009-12-14 14:20:22 -0800331 /* Check if current transfer is a DMA transaction */
Andy Shevchenkof89a6d82015-03-09 16:48:49 +0200332 if (master->can_dma && master->can_dma(master, spi, transfer))
333 dws->dma_mapped = master->cur_msg_mapped;
Feng Tange24c7452009-12-14 14:20:22 -0800334
Lars Povlsenbac70b52020-08-24 22:30:05 +0200335 /* Update RX sample delay if required */
336 if (dws->cur_rx_sample_dly != chip->rx_sample_dly) {
337 dw_writel(dws, DW_SPI_RX_SAMPLE_DLY, chip->rx_sample_dly);
338 dws->cur_rx_sample_dly = chip->rx_sample_dly;
339 }
340
Andy Shevchenko0b2e8912015-03-02 14:58:56 +0200341 /* For poll mode just disable all interrupts */
342 spi_mask_intr(dws, 0xff);
343
Feng Tang552e4502010-01-20 13:49:45 -0700344 /*
345 * Interrupt mode
346 * we only need set the TXEI IRQ, as TX/RX always happen syncronizely
347 */
Andy Shevchenko9f145382015-03-09 16:48:46 +0200348 if (dws->dma_mapped) {
Andy Shevchenkof89a6d82015-03-09 16:48:49 +0200349 ret = dws->dma_ops->dma_setup(dws, transfer);
Andy Shevchenko9f145382015-03-09 16:48:46 +0200350 if (ret < 0) {
351 spi_enable_chip(dws, 1);
352 return ret;
353 }
Clement Leger33e8fd42020-04-16 13:09:16 +0200354 } else {
Andy Shevchenkoea113702015-02-24 13:32:11 +0200355 txlevel = min_t(u16, dws->fifo_len / 2, dws->len / dws->n_bytes);
Wan Ahmad Zainie299cb652020-05-05 21:06:12 +0800356 dw_writel(dws, DW_SPI_TXFTLR, txlevel);
Feng Tang552e4502010-01-20 13:49:45 -0700357
Andy Shevchenko0b2e8912015-03-02 14:58:56 +0200358 /* Set the interrupt mask */
Jingoo Hanfadcace2014-09-02 11:49:24 +0900359 imask |= SPI_INT_TXEI | SPI_INT_TXOI |
360 SPI_INT_RXUI | SPI_INT_RXOI;
Andy Shevchenko0b2e8912015-03-02 14:58:56 +0200361 spi_umask_intr(dws, imask);
362
Feng Tange24c7452009-12-14 14:20:22 -0800363 dws->transfer_handler = interrupt_transfer;
364 }
365
Andy Shevchenko0b2e8912015-03-02 14:58:56 +0200366 spi_enable_chip(dws, 1);
Feng Tange24c7452009-12-14 14:20:22 -0800367
Serge Seminf0410bb2020-05-29 16:11:51 +0300368 if (dws->dma_mapped)
369 return dws->dma_ops->dma_transfer(dws, transfer);
Feng Tange24c7452009-12-14 14:20:22 -0800370
Andy Shevchenkoc22c62d2015-03-02 14:58:57 +0200371 return 1;
Feng Tange24c7452009-12-14 14:20:22 -0800372}
373
Jarkko Nikula721483e2018-02-01 17:17:29 +0200374static void dw_spi_handle_err(struct spi_controller *master,
Baruch Siachec37e8e2014-01-31 12:07:44 +0200375 struct spi_message *msg)
Feng Tange24c7452009-12-14 14:20:22 -0800376{
Jarkko Nikula721483e2018-02-01 17:17:29 +0200377 struct dw_spi *dws = spi_controller_get_devdata(master);
Feng Tange24c7452009-12-14 14:20:22 -0800378
Andy Shevchenko4d5ac1e2015-03-09 16:48:48 +0200379 if (dws->dma_mapped)
380 dws->dma_ops->dma_stop(dws);
381
Andy Shevchenkoc22c62d2015-03-02 14:58:57 +0200382 spi_reset_chip(dws);
Feng Tange24c7452009-12-14 14:20:22 -0800383}
384
385/* This may be called twice for each spi dev */
386static int dw_spi_setup(struct spi_device *spi)
387{
Feng Tange24c7452009-12-14 14:20:22 -0800388 struct chip_data *chip;
389
Feng Tange24c7452009-12-14 14:20:22 -0800390 /* Only alloc on first setup */
391 chip = spi_get_ctldata(spi);
392 if (!chip) {
Lars Povlsenbac70b52020-08-24 22:30:05 +0200393 struct dw_spi *dws = spi_controller_get_devdata(spi->controller);
394 u32 rx_sample_dly_ns;
395
Axel Lina97c8832014-08-31 12:47:06 +0800396 chip = kzalloc(sizeof(struct chip_data), GFP_KERNEL);
Feng Tange24c7452009-12-14 14:20:22 -0800397 if (!chip)
398 return -ENOMEM;
Baruch Siach43f627a2013-12-30 20:30:46 +0200399 spi_set_ctldata(spi, chip);
Lars Povlsenbac70b52020-08-24 22:30:05 +0200400 /* Get specific / default rx-sample-delay */
401 if (device_property_read_u32(&spi->dev,
402 "rx-sample-delay-ns",
403 &rx_sample_dly_ns) != 0)
404 /* Use default controller value */
405 rx_sample_dly_ns = dws->def_rx_sample_dly_ns;
406 chip->rx_sample_dly = DIV_ROUND_CLOSEST(rx_sample_dly_ns,
407 NSEC_PER_SEC /
408 dws->max_freq);
Feng Tange24c7452009-12-14 14:20:22 -0800409 }
410
Jisheng Zhang60968282015-12-23 19:05:39 +0800411 chip->tmode = SPI_TMOD_TR;
Andy Shevchenkoc3ce15b2014-09-18 20:08:56 +0300412
Feng Tange24c7452009-12-14 14:20:22 -0800413 return 0;
414}
415
Axel Lina97c8832014-08-31 12:47:06 +0800416static void dw_spi_cleanup(struct spi_device *spi)
417{
418 struct chip_data *chip = spi_get_ctldata(spi);
419
420 kfree(chip);
421 spi_set_ctldata(spi, NULL);
422}
423
Feng Tange24c7452009-12-14 14:20:22 -0800424/* Restart the controller, disable all interrupts, clean rx fifo */
Andy Shevchenko30b4b702015-01-07 16:56:55 +0200425static void spi_hw_init(struct device *dev, struct dw_spi *dws)
Feng Tange24c7452009-12-14 14:20:22 -0800426{
Andy Shevchenko45746e82015-03-02 14:58:55 +0200427 spi_reset_chip(dws);
Feng Tangc587b6f2010-01-21 10:41:10 +0800428
429 /*
430 * Try to detect the FIFO depth if not set by interface driver,
431 * the depth could be from 2 to 256 from HW spec
432 */
433 if (!dws->fifo_len) {
434 u32 fifo;
Jingoo Hanfadcace2014-09-02 11:49:24 +0900435
Andy Shevchenko9d239d32015-02-25 11:39:36 +0200436 for (fifo = 1; fifo < 256; fifo++) {
Wan Ahmad Zainie299cb652020-05-05 21:06:12 +0800437 dw_writel(dws, DW_SPI_TXFTLR, fifo);
438 if (fifo != dw_readl(dws, DW_SPI_TXFTLR))
Feng Tangc587b6f2010-01-21 10:41:10 +0800439 break;
440 }
Wan Ahmad Zainie299cb652020-05-05 21:06:12 +0800441 dw_writel(dws, DW_SPI_TXFTLR, 0);
Feng Tangc587b6f2010-01-21 10:41:10 +0800442
Andy Shevchenko9d239d32015-02-25 11:39:36 +0200443 dws->fifo_len = (fifo == 1) ? 0 : fifo;
Andy Shevchenko30b4b702015-01-07 16:56:55 +0200444 dev_dbg(dev, "Detected FIFO size: %u bytes\n", dws->fifo_len);
Feng Tangc587b6f2010-01-21 10:41:10 +0800445 }
Talel Shenharf2d70472018-10-11 14:20:07 +0300446
447 /* enable HW fixup for explicit CS deselect for Amazon's alpine chip */
448 if (dws->cs_override)
449 dw_writel(dws, DW_SPI_CS_OVERRIDE, 0xF);
Feng Tange24c7452009-12-14 14:20:22 -0800450}
451
Baruch Siach04f421e2013-12-30 20:30:44 +0200452int dw_spi_add_host(struct device *dev, struct dw_spi *dws)
Feng Tange24c7452009-12-14 14:20:22 -0800453{
Jarkko Nikula721483e2018-02-01 17:17:29 +0200454 struct spi_controller *master;
Feng Tange24c7452009-12-14 14:20:22 -0800455 int ret;
456
Aditya Pakki169f9ac2019-12-05 17:14:21 -0600457 if (!dws)
458 return -EINVAL;
Feng Tange24c7452009-12-14 14:20:22 -0800459
Baruch Siach04f421e2013-12-30 20:30:44 +0200460 master = spi_alloc_master(dev, 0);
461 if (!master)
462 return -ENOMEM;
Feng Tange24c7452009-12-14 14:20:22 -0800463
464 dws->master = master;
465 dws->type = SSI_MOTO_SPI;
Andy Shevchenkod7ef54c2015-10-27 17:48:16 +0200466 dws->dma_addr = (dma_addr_t)(dws->paddr + DW_SPI_DR);
wuxu.wu19b61392020-01-01 11:39:41 +0800467 spin_lock_init(&dws->buf_lock);
Feng Tange24c7452009-12-14 14:20:22 -0800468
Alexandre Belloni66b19d72018-07-17 16:23:10 +0200469 spi_controller_set_devdata(master, dws);
470
Phil Reide70002c802017-01-06 17:35:13 +0800471 ret = request_irq(dws->irq, dw_spi_irq, IRQF_SHARED, dev_name(dev),
472 master);
Feng Tange24c7452009-12-14 14:20:22 -0800473 if (ret < 0) {
Andy Shevchenko5f0966e2015-10-14 23:12:17 +0300474 dev_err(dev, "can not get IRQ\n");
Feng Tange24c7452009-12-14 14:20:22 -0800475 goto err_free_master;
476 }
477
Linus Walleij9400c412019-01-07 16:51:56 +0100478 master->use_gpio_descriptors = true;
Andy Shevchenkoc3ce15b2014-09-18 20:08:56 +0300479 master->mode_bits = SPI_CPOL | SPI_CPHA | SPI_LOOP;
Simon Goldschmidtaf060b32018-09-04 21:49:44 +0200480 master->bits_per_word_mask = SPI_BPW_RANGE_MASK(4, 16);
Feng Tange24c7452009-12-14 14:20:22 -0800481 master->bus_num = dws->bus_num;
482 master->num_chipselect = dws->num_cs;
Feng Tange24c7452009-12-14 14:20:22 -0800483 master->setup = dw_spi_setup;
Axel Lina97c8832014-08-31 12:47:06 +0800484 master->cleanup = dw_spi_cleanup;
Andy Shevchenkoc22c62d2015-03-02 14:58:57 +0200485 master->set_cs = dw_spi_set_cs;
486 master->transfer_one = dw_spi_transfer_one;
487 master->handle_err = dw_spi_handle_err;
Axel Lin765ee702014-02-20 21:37:56 +0800488 master->max_speed_hz = dws->max_freq;
Thor Thayer9c6de472014-10-08 13:51:34 -0500489 master->dev.of_node = dev->of_node;
Jay Fang32215a62018-12-03 11:15:50 +0800490 master->dev.fwnode = dev->fwnode;
Thor Thayer80b444e2016-10-10 09:25:25 -0500491 master->flags = SPI_MASTER_GPIO_SS;
Phil Edworthy1e695982019-09-18 09:04:35 +0100492 master->auto_runtime_pm = true;
Feng Tange24c7452009-12-14 14:20:22 -0800493
Alexandre Belloni62dbbae2018-07-17 16:23:11 +0200494 if (dws->set_cs)
495 master->set_cs = dws->set_cs;
496
Lars Povlsenbac70b52020-08-24 22:30:05 +0200497 /* Get default rx sample delay */
498 device_property_read_u32(dev, "rx-sample-delay-ns",
499 &dws->def_rx_sample_dly_ns);
500
Feng Tange24c7452009-12-14 14:20:22 -0800501 /* Basic HW init */
Andy Shevchenko30b4b702015-01-07 16:56:55 +0200502 spi_hw_init(dev, dws);
Feng Tange24c7452009-12-14 14:20:22 -0800503
Feng Tang7063c0d2010-12-24 13:59:11 +0800504 if (dws->dma_ops && dws->dma_ops->dma_init) {
Andy Shevchenko6370aba2020-05-06 18:30:24 +0300505 ret = dws->dma_ops->dma_init(dev, dws);
Feng Tang7063c0d2010-12-24 13:59:11 +0800506 if (ret) {
Andy Shevchenko3dbb3b92015-01-07 16:56:54 +0200507 dev_warn(dev, "DMA init failed\n");
Andy Shevchenkof89a6d82015-03-09 16:48:49 +0200508 } else {
509 master->can_dma = dws->dma_ops->can_dma;
Serge Semin46164fd2020-05-29 16:11:57 +0300510 master->flags |= SPI_CONTROLLER_MUST_TX;
Feng Tang7063c0d2010-12-24 13:59:11 +0800511 }
512 }
513
Lukas Wunnerca8b19d2020-05-25 14:25:01 +0200514 ret = spi_register_controller(master);
Feng Tange24c7452009-12-14 14:20:22 -0800515 if (ret) {
516 dev_err(&master->dev, "problem registering spi master\n");
Baruch Siachec37e8e2014-01-31 12:07:44 +0200517 goto err_dma_exit;
Feng Tange24c7452009-12-14 14:20:22 -0800518 }
519
Andy Shevchenko53288fe2014-09-12 15:11:56 +0300520 dw_spi_debugfs_init(dws);
Feng Tange24c7452009-12-14 14:20:22 -0800521 return 0;
522
Baruch Siachec37e8e2014-01-31 12:07:44 +0200523err_dma_exit:
Feng Tang7063c0d2010-12-24 13:59:11 +0800524 if (dws->dma_ops && dws->dma_ops->dma_exit)
525 dws->dma_ops->dma_exit(dws);
Feng Tange24c7452009-12-14 14:20:22 -0800526 spi_enable_chip(dws, 0);
Andy Shevchenko02f20382015-10-20 12:11:40 +0300527 free_irq(dws->irq, master);
Feng Tange24c7452009-12-14 14:20:22 -0800528err_free_master:
Jarkko Nikula721483e2018-02-01 17:17:29 +0200529 spi_controller_put(master);
Feng Tange24c7452009-12-14 14:20:22 -0800530 return ret;
531}
Feng Tang79290a22010-12-24 13:59:10 +0800532EXPORT_SYMBOL_GPL(dw_spi_add_host);
Feng Tange24c7452009-12-14 14:20:22 -0800533
Grant Likelyfd4a3192012-12-07 16:57:14 +0000534void dw_spi_remove_host(struct dw_spi *dws)
Feng Tange24c7452009-12-14 14:20:22 -0800535{
Andy Shevchenko53288fe2014-09-12 15:11:56 +0300536 dw_spi_debugfs_remove(dws);
Feng Tange24c7452009-12-14 14:20:22 -0800537
Lukas Wunnerca8b19d2020-05-25 14:25:01 +0200538 spi_unregister_controller(dws->master);
539
Feng Tang7063c0d2010-12-24 13:59:11 +0800540 if (dws->dma_ops && dws->dma_ops->dma_exit)
541 dws->dma_ops->dma_exit(dws);
Andy Shevchenko1cc3f142015-10-14 23:12:23 +0300542
543 spi_shutdown_chip(dws);
Andy Shevchenko02f20382015-10-20 12:11:40 +0300544
545 free_irq(dws->irq, dws->master);
Feng Tange24c7452009-12-14 14:20:22 -0800546}
Feng Tang79290a22010-12-24 13:59:10 +0800547EXPORT_SYMBOL_GPL(dw_spi_remove_host);
Feng Tange24c7452009-12-14 14:20:22 -0800548
549int dw_spi_suspend_host(struct dw_spi *dws)
550{
Andy Shevchenko1cc3f142015-10-14 23:12:23 +0300551 int ret;
Feng Tange24c7452009-12-14 14:20:22 -0800552
Jarkko Nikula721483e2018-02-01 17:17:29 +0200553 ret = spi_controller_suspend(dws->master);
Feng Tange24c7452009-12-14 14:20:22 -0800554 if (ret)
555 return ret;
Andy Shevchenko1cc3f142015-10-14 23:12:23 +0300556
557 spi_shutdown_chip(dws);
558 return 0;
Feng Tange24c7452009-12-14 14:20:22 -0800559}
Feng Tang79290a22010-12-24 13:59:10 +0800560EXPORT_SYMBOL_GPL(dw_spi_suspend_host);
Feng Tange24c7452009-12-14 14:20:22 -0800561
562int dw_spi_resume_host(struct dw_spi *dws)
563{
Andy Shevchenko30b4b702015-01-07 16:56:55 +0200564 spi_hw_init(&dws->master->dev, dws);
Geert Uytterhoeven7c5d8a22018-09-05 10:51:57 +0200565 return spi_controller_resume(dws->master);
Feng Tange24c7452009-12-14 14:20:22 -0800566}
Feng Tang79290a22010-12-24 13:59:10 +0800567EXPORT_SYMBOL_GPL(dw_spi_resume_host);
Feng Tange24c7452009-12-14 14:20:22 -0800568
569MODULE_AUTHOR("Feng Tang <feng.tang@intel.com>");
570MODULE_DESCRIPTION("Driver for DesignWare SPI controller core");
571MODULE_LICENSE("GPL v2");