blob: 01ab743bf17703fed7ed5c8d37412f9833c9c940 [file] [log] [blame]
Thomas Gleixner2025cf92019-05-29 07:18:02 -07001// SPDX-License-Identifier: GPL-2.0-only
Feng Tange24c7452009-12-14 14:20:22 -08002/*
Grant Likelyca632f52011-06-06 01:16:30 -06003 * Designware SPI core controller driver (refer pxa2xx_spi.c)
Feng Tange24c7452009-12-14 14:20:22 -08004 *
5 * Copyright (c) 2009, Intel Corporation.
Feng Tange24c7452009-12-14 14:20:22 -08006 */
7
8#include <linux/dma-mapping.h>
9#include <linux/interrupt.h>
Paul Gortmakerd7614de2011-07-03 15:44:29 -040010#include <linux/module.h>
Feng Tange24c7452009-12-14 14:20:22 -080011#include <linux/highmem.h>
12#include <linux/delay.h>
Tejun Heo5a0e3ad2010-03-24 17:04:11 +090013#include <linux/slab.h>
Feng Tange24c7452009-12-14 14:20:22 -080014#include <linux/spi/spi.h>
Lars Povlsenbac70b52020-08-24 22:30:05 +020015#include <linux/of.h>
Feng Tange24c7452009-12-14 14:20:22 -080016
Grant Likelyca632f52011-06-06 01:16:30 -060017#include "spi-dw.h"
Grant Likely568a60e2011-02-28 12:47:12 -070018
Feng Tange24c7452009-12-14 14:20:22 -080019#ifdef CONFIG_DEBUG_FS
20#include <linux/debugfs.h>
21#endif
22
Serge Semin3ff60c62020-10-08 02:54:56 +030023/* Slave spi_device related */
Feng Tange24c7452009-12-14 14:20:22 -080024struct chip_data {
Serge Semina3577bd2020-10-08 02:54:52 +030025 u32 cr0;
Lars Povlsenbac70b52020-08-24 22:30:05 +020026 u32 rx_sample_dly; /* RX sample delay */
Feng Tange24c7452009-12-14 14:20:22 -080027};
28
29#ifdef CONFIG_DEBUG_FS
Feng Tange24c7452009-12-14 14:20:22 -080030
Serge Semin83784492020-05-29 16:12:04 +030031#define DW_SPI_DBGFS_REG(_name, _off) \
32{ \
33 .name = _name, \
34 .offset = _off, \
Feng Tange24c7452009-12-14 14:20:22 -080035}
36
Serge Semin83784492020-05-29 16:12:04 +030037static const struct debugfs_reg32 dw_spi_dbgfs_regs[] = {
38 DW_SPI_DBGFS_REG("CTRLR0", DW_SPI_CTRLR0),
39 DW_SPI_DBGFS_REG("CTRLR1", DW_SPI_CTRLR1),
40 DW_SPI_DBGFS_REG("SSIENR", DW_SPI_SSIENR),
41 DW_SPI_DBGFS_REG("SER", DW_SPI_SER),
42 DW_SPI_DBGFS_REG("BAUDR", DW_SPI_BAUDR),
43 DW_SPI_DBGFS_REG("TXFTLR", DW_SPI_TXFTLR),
44 DW_SPI_DBGFS_REG("RXFTLR", DW_SPI_RXFTLR),
45 DW_SPI_DBGFS_REG("TXFLR", DW_SPI_TXFLR),
46 DW_SPI_DBGFS_REG("RXFLR", DW_SPI_RXFLR),
47 DW_SPI_DBGFS_REG("SR", DW_SPI_SR),
48 DW_SPI_DBGFS_REG("IMR", DW_SPI_IMR),
49 DW_SPI_DBGFS_REG("ISR", DW_SPI_ISR),
50 DW_SPI_DBGFS_REG("DMACR", DW_SPI_DMACR),
51 DW_SPI_DBGFS_REG("DMATDLR", DW_SPI_DMATDLR),
52 DW_SPI_DBGFS_REG("DMARDLR", DW_SPI_DMARDLR),
Lars Povlsenbac70b52020-08-24 22:30:05 +020053 DW_SPI_DBGFS_REG("RX_SAMPLE_DLY", DW_SPI_RX_SAMPLE_DLY),
Feng Tange24c7452009-12-14 14:20:22 -080054};
55
Andy Shevchenko53288fe2014-09-12 15:11:56 +030056static int dw_spi_debugfs_init(struct dw_spi *dws)
Feng Tange24c7452009-12-14 14:20:22 -080057{
Phil Reide70002c802017-01-06 17:35:13 +080058 char name[32];
Phil Reid13288bd2016-12-22 17:18:12 +080059
Phil Reide70002c802017-01-06 17:35:13 +080060 snprintf(name, 32, "dw_spi%d", dws->master->bus_num);
Phil Reid13288bd2016-12-22 17:18:12 +080061 dws->debugfs = debugfs_create_dir(name, NULL);
Feng Tange24c7452009-12-14 14:20:22 -080062 if (!dws->debugfs)
63 return -ENOMEM;
64
Serge Semin83784492020-05-29 16:12:04 +030065 dws->regset.regs = dw_spi_dbgfs_regs;
66 dws->regset.nregs = ARRAY_SIZE(dw_spi_dbgfs_regs);
67 dws->regset.base = dws->regs;
68 debugfs_create_regset32("registers", 0400, dws->debugfs, &dws->regset);
69
Feng Tange24c7452009-12-14 14:20:22 -080070 return 0;
71}
72
Andy Shevchenko53288fe2014-09-12 15:11:56 +030073static void dw_spi_debugfs_remove(struct dw_spi *dws)
Feng Tange24c7452009-12-14 14:20:22 -080074{
Jingoo Hanfadcace2014-09-02 11:49:24 +090075 debugfs_remove_recursive(dws->debugfs);
Feng Tange24c7452009-12-14 14:20:22 -080076}
77
78#else
Andy Shevchenko53288fe2014-09-12 15:11:56 +030079static inline int dw_spi_debugfs_init(struct dw_spi *dws)
Feng Tange24c7452009-12-14 14:20:22 -080080{
George Shore20a588f2010-01-21 11:40:49 +000081 return 0;
Feng Tange24c7452009-12-14 14:20:22 -080082}
83
Andy Shevchenko53288fe2014-09-12 15:11:56 +030084static inline void dw_spi_debugfs_remove(struct dw_spi *dws)
Feng Tange24c7452009-12-14 14:20:22 -080085{
86}
87#endif /* CONFIG_DEBUG_FS */
88
Alexandre Bellonic79bdbb2018-07-27 21:53:54 +020089void dw_spi_set_cs(struct spi_device *spi, bool enable)
Andy Shevchenkoc22c62d2015-03-02 14:58:57 +020090{
Jarkko Nikula721483e2018-02-01 17:17:29 +020091 struct dw_spi *dws = spi_controller_get_devdata(spi->controller);
Serge Semin9aea6442020-05-15 13:47:43 +030092 bool cs_high = !!(spi->mode & SPI_CS_HIGH);
Andy Shevchenkoc22c62d2015-03-02 14:58:57 +020093
Serge Semin9aea6442020-05-15 13:47:43 +030094 /*
95 * DW SPI controller demands any native CS being set in order to
96 * proceed with data transfer. So in order to activate the SPI
97 * communications we must set a corresponding bit in the Slave
98 * Enable register no matter whether the SPI core is configured to
99 * support active-high or active-low CS level.
100 */
101 if (cs_high == enable)
Andy Shevchenkoc22c62d2015-03-02 14:58:57 +0200102 dw_writel(dws, DW_SPI_SER, BIT(spi->chip_select));
Serge Semincc760f32020-09-20 14:28:53 +0300103 else if (dws->caps & DW_SPI_CAP_CS_OVERRIDE)
Talel Shenharf2d70472018-10-11 14:20:07 +0300104 dw_writel(dws, DW_SPI_SER, 0);
Andy Shevchenkoc22c62d2015-03-02 14:58:57 +0200105}
Alexandre Bellonic79bdbb2018-07-27 21:53:54 +0200106EXPORT_SYMBOL_GPL(dw_spi_set_cs);
Andy Shevchenkoc22c62d2015-03-02 14:58:57 +0200107
Alek Du2ff271b2011-03-30 23:09:54 +0800108/* Return the max entries we can fill into tx fifo */
109static inline u32 tx_max(struct dw_spi *dws)
110{
111 u32 tx_left, tx_room, rxtx_gap;
112
113 tx_left = (dws->tx_end - dws->tx) / dws->n_bytes;
Thor Thayerdd114442015-03-12 14:19:31 -0500114 tx_room = dws->fifo_len - dw_readl(dws, DW_SPI_TXFLR);
Alek Du2ff271b2011-03-30 23:09:54 +0800115
116 /*
117 * Another concern is about the tx/rx mismatch, we
118 * though to use (dws->fifo_len - rxflr - txflr) as
119 * one maximum value for tx, but it doesn't cover the
120 * data which is out of tx/rx fifo and inside the
121 * shift registers. So a control from sw point of
122 * view is taken.
123 */
124 rxtx_gap = ((dws->rx_end - dws->rx) - (dws->tx_end - dws->tx))
125 / dws->n_bytes;
126
127 return min3(tx_left, tx_room, (u32) (dws->fifo_len - rxtx_gap));
128}
129
130/* Return the max entries we should read out of rx fifo */
131static inline u32 rx_max(struct dw_spi *dws)
132{
133 u32 rx_left = (dws->rx_end - dws->rx) / dws->n_bytes;
134
Thor Thayerdd114442015-03-12 14:19:31 -0500135 return min_t(u32, rx_left, dw_readl(dws, DW_SPI_RXFLR));
Alek Du2ff271b2011-03-30 23:09:54 +0800136}
137
Alek Du3b8a4dd2011-03-30 23:09:55 +0800138static void dw_writer(struct dw_spi *dws)
Feng Tange24c7452009-12-14 14:20:22 -0800139{
Serge Semin0b6bfad2020-09-20 14:28:48 +0300140 u32 max = tx_max(dws);
Feng Tangde6efe02011-03-30 23:09:52 +0800141 u16 txw = 0;
Feng Tange24c7452009-12-14 14:20:22 -0800142
Alek Du2ff271b2011-03-30 23:09:54 +0800143 while (max--) {
144 /* Set the tx word if the transfer's original "tx" is not null */
145 if (dws->tx_end - dws->len) {
146 if (dws->n_bytes == 1)
147 txw = *(u8 *)(dws->tx);
148 else
149 txw = *(u16 *)(dws->tx);
150 }
Michael van der Westhuizenc4fe57f2015-08-18 22:21:53 +0200151 dw_write_io_reg(dws, DW_SPI_DR, txw);
Alek Du2ff271b2011-03-30 23:09:54 +0800152 dws->tx += dws->n_bytes;
Feng Tange24c7452009-12-14 14:20:22 -0800153 }
Feng Tange24c7452009-12-14 14:20:22 -0800154}
155
Alek Du3b8a4dd2011-03-30 23:09:55 +0800156static void dw_reader(struct dw_spi *dws)
Feng Tange24c7452009-12-14 14:20:22 -0800157{
Serge Semin0b6bfad2020-09-20 14:28:48 +0300158 u32 max = rx_max(dws);
Feng Tangde6efe02011-03-30 23:09:52 +0800159 u16 rxw;
Feng Tange24c7452009-12-14 14:20:22 -0800160
Alek Du2ff271b2011-03-30 23:09:54 +0800161 while (max--) {
Michael van der Westhuizenc4fe57f2015-08-18 22:21:53 +0200162 rxw = dw_read_io_reg(dws, DW_SPI_DR);
Feng Tangde6efe02011-03-30 23:09:52 +0800163 /* Care rx only if the transfer's original "rx" is not null */
164 if (dws->rx_end - dws->len) {
165 if (dws->n_bytes == 1)
166 *(u8 *)(dws->rx) = rxw;
167 else
168 *(u16 *)(dws->rx) = rxw;
169 }
170 dws->rx += dws->n_bytes;
Feng Tange24c7452009-12-14 14:20:22 -0800171 }
Feng Tange24c7452009-12-14 14:20:22 -0800172}
173
Feng Tange24c7452009-12-14 14:20:22 -0800174static void int_error_stop(struct dw_spi *dws, const char *msg)
175{
Andy Shevchenko45746e82015-03-02 14:58:55 +0200176 spi_reset_chip(dws);
Feng Tange24c7452009-12-14 14:20:22 -0800177
178 dev_err(&dws->master->dev, "%s\n", msg);
Andy Shevchenkoc22c62d2015-03-02 14:58:57 +0200179 dws->master->cur_msg->status = -EIO;
180 spi_finalize_current_transfer(dws->master);
Feng Tange24c7452009-12-14 14:20:22 -0800181}
182
Feng Tange24c7452009-12-14 14:20:22 -0800183static irqreturn_t interrupt_transfer(struct dw_spi *dws)
184{
Thor Thayerdd114442015-03-12 14:19:31 -0500185 u16 irq_status = dw_readl(dws, DW_SPI_ISR);
Feng Tange24c7452009-12-14 14:20:22 -0800186
Feng Tange24c7452009-12-14 14:20:22 -0800187 /* Error handling */
188 if (irq_status & (SPI_INT_TXOI | SPI_INT_RXOI | SPI_INT_RXUI)) {
Thor Thayerdd114442015-03-12 14:19:31 -0500189 dw_readl(dws, DW_SPI_ICR);
Alek Du3b8a4dd2011-03-30 23:09:55 +0800190 int_error_stop(dws, "interrupt_transfer: fifo overrun/underrun");
Feng Tange24c7452009-12-14 14:20:22 -0800191 return IRQ_HANDLED;
192 }
193
Alek Du3b8a4dd2011-03-30 23:09:55 +0800194 dw_reader(dws);
195 if (dws->rx_end == dws->rx) {
Serge Semina1d5aa62020-09-20 14:28:50 +0300196 spi_mask_intr(dws, 0xff);
Andy Shevchenkoc22c62d2015-03-02 14:58:57 +0200197 spi_finalize_current_transfer(dws->master);
Alek Du3b8a4dd2011-03-30 23:09:55 +0800198 return IRQ_HANDLED;
199 }
Feng Tang552e4502010-01-20 13:49:45 -0700200 if (irq_status & SPI_INT_TXEI) {
201 spi_mask_intr(dws, SPI_INT_TXEI);
Alek Du3b8a4dd2011-03-30 23:09:55 +0800202 dw_writer(dws);
203 /* Enable TX irq always, it will be disabled when RX finished */
204 spi_umask_intr(dws, SPI_INT_TXEI);
Feng Tange24c7452009-12-14 14:20:22 -0800205 }
Feng Tang552e4502010-01-20 13:49:45 -0700206
Feng Tange24c7452009-12-14 14:20:22 -0800207 return IRQ_HANDLED;
208}
209
210static irqreturn_t dw_spi_irq(int irq, void *dev_id)
211{
Jarkko Nikula721483e2018-02-01 17:17:29 +0200212 struct spi_controller *master = dev_id;
213 struct dw_spi *dws = spi_controller_get_devdata(master);
Thor Thayerdd114442015-03-12 14:19:31 -0500214 u16 irq_status = dw_readl(dws, DW_SPI_ISR) & 0x3f;
Yong Wangcbcc0622010-09-07 15:27:27 +0800215
Yong Wangcbcc0622010-09-07 15:27:27 +0800216 if (!irq_status)
217 return IRQ_NONE;
Feng Tange24c7452009-12-14 14:20:22 -0800218
Andy Shevchenkoc22c62d2015-03-02 14:58:57 +0200219 if (!master->cur_msg) {
Serge Semina1d5aa62020-09-20 14:28:50 +0300220 spi_mask_intr(dws, 0xff);
Feng Tange24c7452009-12-14 14:20:22 -0800221 return IRQ_HANDLED;
222 }
223
224 return dws->transfer_handler(dws);
225}
226
Serge Semina3577bd2020-10-08 02:54:52 +0300227static u32 dw_spi_prepare_cr0(struct dw_spi *dws, struct spi_device *spi)
Feng Tange24c7452009-12-14 14:20:22 -0800228{
Serge Semina3577bd2020-10-08 02:54:52 +0300229 u32 cr0 = 0;
Wan Ahmad Zainiee539f432020-05-05 21:06:14 +0800230
Serge Semind6bbd112020-10-08 02:54:51 +0300231 if (!(dws->caps & DW_SPI_CAP_DWC_SSI)) {
232 /* CTRLR0[ 5: 4] Frame Format */
233 cr0 |= SSI_MOTO_SPI << SPI_FRF_OFFSET;
Wan Ahmad Zainiee539f432020-05-05 21:06:14 +0800234
Serge Semind6bbd112020-10-08 02:54:51 +0300235 /*
236 * SPI mode (SCPOL|SCPH)
237 * CTRLR0[ 6] Serial Clock Phase
238 * CTRLR0[ 7] Serial Clock Polarity
239 */
240 cr0 |= ((spi->mode & SPI_CPOL) ? 1 : 0) << SPI_SCOL_OFFSET;
241 cr0 |= ((spi->mode & SPI_CPHA) ? 1 : 0) << SPI_SCPH_OFFSET;
Wan Ahmad Zainiee539f432020-05-05 21:06:14 +0800242
Serge Semind6bbd112020-10-08 02:54:51 +0300243 /* CTRLR0[11] Shift Register Loop */
244 cr0 |= ((spi->mode & SPI_LOOP) ? 1 : 0) << SPI_SRL_OFFSET;
Serge Semind6bbd112020-10-08 02:54:51 +0300245 } else {
246 /* CTRLR0[ 7: 6] Frame Format */
247 cr0 |= SSI_MOTO_SPI << DWC_SSI_CTRLR0_FRF_OFFSET;
Wan Ahmad Zainiee539f432020-05-05 21:06:14 +0800248
Serge Semind6bbd112020-10-08 02:54:51 +0300249 /*
250 * SPI mode (SCPOL|SCPH)
251 * CTRLR0[ 8] Serial Clock Phase
252 * CTRLR0[ 9] Serial Clock Polarity
253 */
254 cr0 |= ((spi->mode & SPI_CPOL) ? 1 : 0) << DWC_SSI_CTRLR0_SCPOL_OFFSET;
255 cr0 |= ((spi->mode & SPI_CPHA) ? 1 : 0) << DWC_SSI_CTRLR0_SCPH_OFFSET;
Serge Seminffb7ca52020-09-20 14:28:54 +0300256
Serge Semind6bbd112020-10-08 02:54:51 +0300257 /* CTRLR0[13] Shift Register Loop */
258 cr0 |= ((spi->mode & SPI_LOOP) ? 1 : 0) << DWC_SSI_CTRLR0_SRL_OFFSET;
259
Serge Semind6bbd112020-10-08 02:54:51 +0300260 if (dws->caps & DW_SPI_CAP_KEEMBAY_MST)
261 cr0 |= DWC_SSI_CTRLR0_KEEMBAY_MST;
262 }
263
Serge Semina3577bd2020-10-08 02:54:52 +0300264 return cr0;
265}
266
Serge Semin3ff60c62020-10-08 02:54:56 +0300267void dw_spi_update_config(struct dw_spi *dws, struct spi_device *spi,
268 struct dw_spi_cfg *cfg)
Serge Semina3577bd2020-10-08 02:54:52 +0300269{
270 struct chip_data *chip = spi_get_ctldata(spi);
271 u32 cr0 = chip->cr0;
Serge Seminc449ad72020-10-08 02:54:54 +0300272 u32 speed_hz;
273 u16 clk_div;
Serge Semina3577bd2020-10-08 02:54:52 +0300274
275 /* CTRLR0[ 4/3: 0] Data Frame Size */
Serge Semin3ff60c62020-10-08 02:54:56 +0300276 cr0 |= (cfg->dfs - 1);
Serge Semina3577bd2020-10-08 02:54:52 +0300277
278 if (!(dws->caps & DW_SPI_CAP_DWC_SSI))
279 /* CTRLR0[ 9:8] Transfer Mode */
Serge Semin3ff60c62020-10-08 02:54:56 +0300280 cr0 |= cfg->tmode << SPI_TMOD_OFFSET;
Serge Semina3577bd2020-10-08 02:54:52 +0300281 else
282 /* CTRLR0[11:10] Transfer Mode */
Serge Semin3ff60c62020-10-08 02:54:56 +0300283 cr0 |= cfg->tmode << DWC_SSI_CTRLR0_TMOD_OFFSET;
Serge Semina3577bd2020-10-08 02:54:52 +0300284
Serge Semind6bbd112020-10-08 02:54:51 +0300285 dw_writel(dws, DW_SPI_CTRLR0, cr0);
Serge Seminf76f3142020-10-08 02:54:53 +0300286
Serge Semin3ff60c62020-10-08 02:54:56 +0300287 if (cfg->tmode == SPI_TMOD_EPROMREAD || cfg->tmode == SPI_TMOD_RO)
288 dw_writel(dws, DW_SPI_CTRLR1, cfg->ndf ? cfg->ndf - 1 : 0);
289
Serge Seminc449ad72020-10-08 02:54:54 +0300290 /* Note DW APB SSI clock divider doesn't support odd numbers */
Serge Semin3ff60c62020-10-08 02:54:56 +0300291 clk_div = (DIV_ROUND_UP(dws->max_freq, cfg->freq) + 1) & 0xfffe;
Serge Seminc449ad72020-10-08 02:54:54 +0300292 speed_hz = dws->max_freq / clk_div;
293
294 if (dws->current_freq != speed_hz) {
295 spi_set_clk(dws, clk_div);
296 dws->current_freq = speed_hz;
Serge Seminf76f3142020-10-08 02:54:53 +0300297 }
Serge Semin2613d2b2020-10-08 02:54:55 +0300298
299 /* Update RX sample delay if required */
300 if (dws->cur_rx_sample_dly != chip->rx_sample_dly) {
301 dw_writel(dws, DW_SPI_RX_SAMPLE_DLY, chip->rx_sample_dly);
302 dws->cur_rx_sample_dly = chip->rx_sample_dly;
303 }
Wan Ahmad Zainiee539f432020-05-05 21:06:14 +0800304}
Serge Semin3ff60c62020-10-08 02:54:56 +0300305EXPORT_SYMBOL_GPL(dw_spi_update_config);
Feng Tange24c7452009-12-14 14:20:22 -0800306
Jarkko Nikula721483e2018-02-01 17:17:29 +0200307static int dw_spi_transfer_one(struct spi_controller *master,
Andy Shevchenkoc22c62d2015-03-02 14:58:57 +0200308 struct spi_device *spi, struct spi_transfer *transfer)
Feng Tange24c7452009-12-14 14:20:22 -0800309{
Jarkko Nikula721483e2018-02-01 17:17:29 +0200310 struct dw_spi *dws = spi_controller_get_devdata(master);
Serge Semin3ff60c62020-10-08 02:54:56 +0300311 struct dw_spi_cfg cfg = {
312 .tmode = SPI_TMOD_TR,
313 .dfs = transfer->bits_per_word,
314 .freq = transfer->speed_hz,
315 };
Feng Tange24c7452009-12-14 14:20:22 -0800316 u8 imask = 0;
Andy Shevchenkoea113702015-02-24 13:32:11 +0200317 u16 txlevel = 0;
Andy Shevchenko9f145382015-03-09 16:48:46 +0200318 int ret;
Feng Tange24c7452009-12-14 14:20:22 -0800319
Andy Shevchenkof89a6d82015-03-09 16:48:49 +0200320 dws->dma_mapped = 0;
Serge Semin8225c1c2020-09-20 14:28:47 +0300321 dws->n_bytes = DIV_ROUND_UP(transfer->bits_per_word, BITS_PER_BYTE);
Feng Tange24c7452009-12-14 14:20:22 -0800322 dws->tx = (void *)transfer->tx_buf;
323 dws->tx_end = dws->tx + transfer->len;
324 dws->rx = transfer->rx_buf;
325 dws->rx_end = dws->rx + transfer->len;
Andy Shevchenkoc22c62d2015-03-02 14:58:57 +0200326 dws->len = transfer->len;
Feng Tange24c7452009-12-14 14:20:22 -0800327
Xinwei Kongbfda0442020-01-03 10:52:10 +0800328 /* Ensure dw->rx and dw->rx_end are visible */
329 smp_mb();
330
Andy Shevchenko0b2e8912015-03-02 14:58:56 +0200331 spi_enable_chip(dws, 0);
332
Serge Semin3ff60c62020-10-08 02:54:56 +0300333 dw_spi_update_config(dws, spi, &cfg);
Simon Goldschmidtaf060b32018-09-04 21:49:44 +0200334
Serge Seminc449ad72020-10-08 02:54:54 +0300335 transfer->effective_speed_hz = dws->current_freq;
Simon Goldschmidtaf060b32018-09-04 21:49:44 +0200336
Feng Tange24c7452009-12-14 14:20:22 -0800337 /* Check if current transfer is a DMA transaction */
Andy Shevchenkof89a6d82015-03-09 16:48:49 +0200338 if (master->can_dma && master->can_dma(master, spi, transfer))
339 dws->dma_mapped = master->cur_msg_mapped;
Feng Tange24c7452009-12-14 14:20:22 -0800340
Andy Shevchenko0b2e8912015-03-02 14:58:56 +0200341 /* For poll mode just disable all interrupts */
342 spi_mask_intr(dws, 0xff);
343
Feng Tang552e4502010-01-20 13:49:45 -0700344 /*
345 * Interrupt mode
346 * we only need set the TXEI IRQ, as TX/RX always happen syncronizely
347 */
Andy Shevchenko9f145382015-03-09 16:48:46 +0200348 if (dws->dma_mapped) {
Andy Shevchenkof89a6d82015-03-09 16:48:49 +0200349 ret = dws->dma_ops->dma_setup(dws, transfer);
Andy Shevchenko9f145382015-03-09 16:48:46 +0200350 if (ret < 0) {
351 spi_enable_chip(dws, 1);
352 return ret;
353 }
Clement Leger33e8fd42020-04-16 13:09:16 +0200354 } else {
Andy Shevchenkoea113702015-02-24 13:32:11 +0200355 txlevel = min_t(u16, dws->fifo_len / 2, dws->len / dws->n_bytes);
Wan Ahmad Zainie299cb652020-05-05 21:06:12 +0800356 dw_writel(dws, DW_SPI_TXFTLR, txlevel);
Feng Tang552e4502010-01-20 13:49:45 -0700357
Andy Shevchenko0b2e8912015-03-02 14:58:56 +0200358 /* Set the interrupt mask */
Jingoo Hanfadcace2014-09-02 11:49:24 +0900359 imask |= SPI_INT_TXEI | SPI_INT_TXOI |
360 SPI_INT_RXUI | SPI_INT_RXOI;
Andy Shevchenko0b2e8912015-03-02 14:58:56 +0200361 spi_umask_intr(dws, imask);
362
Feng Tange24c7452009-12-14 14:20:22 -0800363 dws->transfer_handler = interrupt_transfer;
364 }
365
Andy Shevchenko0b2e8912015-03-02 14:58:56 +0200366 spi_enable_chip(dws, 1);
Feng Tange24c7452009-12-14 14:20:22 -0800367
Serge Seminf0410bb2020-05-29 16:11:51 +0300368 if (dws->dma_mapped)
369 return dws->dma_ops->dma_transfer(dws, transfer);
Feng Tange24c7452009-12-14 14:20:22 -0800370
Andy Shevchenkoc22c62d2015-03-02 14:58:57 +0200371 return 1;
Feng Tange24c7452009-12-14 14:20:22 -0800372}
373
Jarkko Nikula721483e2018-02-01 17:17:29 +0200374static void dw_spi_handle_err(struct spi_controller *master,
Baruch Siachec37e8e2014-01-31 12:07:44 +0200375 struct spi_message *msg)
Feng Tange24c7452009-12-14 14:20:22 -0800376{
Jarkko Nikula721483e2018-02-01 17:17:29 +0200377 struct dw_spi *dws = spi_controller_get_devdata(master);
Feng Tange24c7452009-12-14 14:20:22 -0800378
Andy Shevchenko4d5ac1e2015-03-09 16:48:48 +0200379 if (dws->dma_mapped)
380 dws->dma_ops->dma_stop(dws);
381
Andy Shevchenkoc22c62d2015-03-02 14:58:57 +0200382 spi_reset_chip(dws);
Feng Tange24c7452009-12-14 14:20:22 -0800383}
384
385/* This may be called twice for each spi dev */
386static int dw_spi_setup(struct spi_device *spi)
387{
Serge Semina3577bd2020-10-08 02:54:52 +0300388 struct dw_spi *dws = spi_controller_get_devdata(spi->controller);
Feng Tange24c7452009-12-14 14:20:22 -0800389 struct chip_data *chip;
390
Feng Tange24c7452009-12-14 14:20:22 -0800391 /* Only alloc on first setup */
392 chip = spi_get_ctldata(spi);
393 if (!chip) {
Lars Povlsenbac70b52020-08-24 22:30:05 +0200394 struct dw_spi *dws = spi_controller_get_devdata(spi->controller);
395 u32 rx_sample_dly_ns;
396
Axel Lina97c8832014-08-31 12:47:06 +0800397 chip = kzalloc(sizeof(struct chip_data), GFP_KERNEL);
Feng Tange24c7452009-12-14 14:20:22 -0800398 if (!chip)
399 return -ENOMEM;
Baruch Siach43f627a2013-12-30 20:30:46 +0200400 spi_set_ctldata(spi, chip);
Lars Povlsenbac70b52020-08-24 22:30:05 +0200401 /* Get specific / default rx-sample-delay */
402 if (device_property_read_u32(&spi->dev,
403 "rx-sample-delay-ns",
404 &rx_sample_dly_ns) != 0)
405 /* Use default controller value */
406 rx_sample_dly_ns = dws->def_rx_sample_dly_ns;
407 chip->rx_sample_dly = DIV_ROUND_CLOSEST(rx_sample_dly_ns,
408 NSEC_PER_SEC /
409 dws->max_freq);
Feng Tange24c7452009-12-14 14:20:22 -0800410 }
411
Serge Semina3577bd2020-10-08 02:54:52 +0300412 /*
413 * Update CR0 data each time the setup callback is invoked since
414 * the device parameters could have been changed, for instance, by
415 * the MMC SPI driver or something else.
416 */
417 chip->cr0 = dw_spi_prepare_cr0(dws, spi);
418
Feng Tange24c7452009-12-14 14:20:22 -0800419 return 0;
420}
421
Axel Lina97c8832014-08-31 12:47:06 +0800422static void dw_spi_cleanup(struct spi_device *spi)
423{
424 struct chip_data *chip = spi_get_ctldata(spi);
425
426 kfree(chip);
427 spi_set_ctldata(spi, NULL);
428}
429
Feng Tange24c7452009-12-14 14:20:22 -0800430/* Restart the controller, disable all interrupts, clean rx fifo */
Andy Shevchenko30b4b702015-01-07 16:56:55 +0200431static void spi_hw_init(struct device *dev, struct dw_spi *dws)
Feng Tange24c7452009-12-14 14:20:22 -0800432{
Andy Shevchenko45746e82015-03-02 14:58:55 +0200433 spi_reset_chip(dws);
Feng Tangc587b6f2010-01-21 10:41:10 +0800434
435 /*
436 * Try to detect the FIFO depth if not set by interface driver,
437 * the depth could be from 2 to 256 from HW spec
438 */
439 if (!dws->fifo_len) {
440 u32 fifo;
Jingoo Hanfadcace2014-09-02 11:49:24 +0900441
Andy Shevchenko9d239d32015-02-25 11:39:36 +0200442 for (fifo = 1; fifo < 256; fifo++) {
Wan Ahmad Zainie299cb652020-05-05 21:06:12 +0800443 dw_writel(dws, DW_SPI_TXFTLR, fifo);
444 if (fifo != dw_readl(dws, DW_SPI_TXFTLR))
Feng Tangc587b6f2010-01-21 10:41:10 +0800445 break;
446 }
Wan Ahmad Zainie299cb652020-05-05 21:06:12 +0800447 dw_writel(dws, DW_SPI_TXFTLR, 0);
Feng Tangc587b6f2010-01-21 10:41:10 +0800448
Andy Shevchenko9d239d32015-02-25 11:39:36 +0200449 dws->fifo_len = (fifo == 1) ? 0 : fifo;
Andy Shevchenko30b4b702015-01-07 16:56:55 +0200450 dev_dbg(dev, "Detected FIFO size: %u bytes\n", dws->fifo_len);
Feng Tangc587b6f2010-01-21 10:41:10 +0800451 }
Talel Shenharf2d70472018-10-11 14:20:07 +0300452
453 /* enable HW fixup for explicit CS deselect for Amazon's alpine chip */
Serge Semincc760f32020-09-20 14:28:53 +0300454 if (dws->caps & DW_SPI_CAP_CS_OVERRIDE)
Talel Shenharf2d70472018-10-11 14:20:07 +0300455 dw_writel(dws, DW_SPI_CS_OVERRIDE, 0xF);
Feng Tange24c7452009-12-14 14:20:22 -0800456}
457
Baruch Siach04f421e2013-12-30 20:30:44 +0200458int dw_spi_add_host(struct device *dev, struct dw_spi *dws)
Feng Tange24c7452009-12-14 14:20:22 -0800459{
Jarkko Nikula721483e2018-02-01 17:17:29 +0200460 struct spi_controller *master;
Feng Tange24c7452009-12-14 14:20:22 -0800461 int ret;
462
Aditya Pakki169f9ac2019-12-05 17:14:21 -0600463 if (!dws)
464 return -EINVAL;
Feng Tange24c7452009-12-14 14:20:22 -0800465
Baruch Siach04f421e2013-12-30 20:30:44 +0200466 master = spi_alloc_master(dev, 0);
467 if (!master)
468 return -ENOMEM;
Feng Tange24c7452009-12-14 14:20:22 -0800469
470 dws->master = master;
Andy Shevchenkod7ef54c2015-10-27 17:48:16 +0200471 dws->dma_addr = (dma_addr_t)(dws->paddr + DW_SPI_DR);
Feng Tange24c7452009-12-14 14:20:22 -0800472
Alexandre Belloni66b19d72018-07-17 16:23:10 +0200473 spi_controller_set_devdata(master, dws);
474
Serge Semina1d5aa62020-09-20 14:28:50 +0300475 /* Basic HW init */
476 spi_hw_init(dev, dws);
477
Phil Reide70002c802017-01-06 17:35:13 +0800478 ret = request_irq(dws->irq, dw_spi_irq, IRQF_SHARED, dev_name(dev),
479 master);
Feng Tange24c7452009-12-14 14:20:22 -0800480 if (ret < 0) {
Andy Shevchenko5f0966e2015-10-14 23:12:17 +0300481 dev_err(dev, "can not get IRQ\n");
Feng Tange24c7452009-12-14 14:20:22 -0800482 goto err_free_master;
483 }
484
Linus Walleij9400c412019-01-07 16:51:56 +0100485 master->use_gpio_descriptors = true;
Andy Shevchenkoc3ce15b2014-09-18 20:08:56 +0300486 master->mode_bits = SPI_CPOL | SPI_CPHA | SPI_LOOP;
Simon Goldschmidtaf060b32018-09-04 21:49:44 +0200487 master->bits_per_word_mask = SPI_BPW_RANGE_MASK(4, 16);
Feng Tange24c7452009-12-14 14:20:22 -0800488 master->bus_num = dws->bus_num;
489 master->num_chipselect = dws->num_cs;
Feng Tange24c7452009-12-14 14:20:22 -0800490 master->setup = dw_spi_setup;
Axel Lina97c8832014-08-31 12:47:06 +0800491 master->cleanup = dw_spi_cleanup;
Serge Seminf68fe8d2020-10-08 02:54:50 +0300492 if (dws->set_cs)
493 master->set_cs = dws->set_cs;
494 else
495 master->set_cs = dw_spi_set_cs;
Andy Shevchenkoc22c62d2015-03-02 14:58:57 +0200496 master->transfer_one = dw_spi_transfer_one;
497 master->handle_err = dw_spi_handle_err;
Axel Lin765ee702014-02-20 21:37:56 +0800498 master->max_speed_hz = dws->max_freq;
Thor Thayer9c6de472014-10-08 13:51:34 -0500499 master->dev.of_node = dev->of_node;
Jay Fang32215a62018-12-03 11:15:50 +0800500 master->dev.fwnode = dev->fwnode;
Thor Thayer80b444e2016-10-10 09:25:25 -0500501 master->flags = SPI_MASTER_GPIO_SS;
Phil Edworthy1e695982019-09-18 09:04:35 +0100502 master->auto_runtime_pm = true;
Feng Tange24c7452009-12-14 14:20:22 -0800503
Lars Povlsenbac70b52020-08-24 22:30:05 +0200504 /* Get default rx sample delay */
505 device_property_read_u32(dev, "rx-sample-delay-ns",
506 &dws->def_rx_sample_dly_ns);
507
Feng Tang7063c0d2010-12-24 13:59:11 +0800508 if (dws->dma_ops && dws->dma_ops->dma_init) {
Andy Shevchenko6370aba2020-05-06 18:30:24 +0300509 ret = dws->dma_ops->dma_init(dev, dws);
Feng Tang7063c0d2010-12-24 13:59:11 +0800510 if (ret) {
Andy Shevchenko3dbb3b92015-01-07 16:56:54 +0200511 dev_warn(dev, "DMA init failed\n");
Andy Shevchenkof89a6d82015-03-09 16:48:49 +0200512 } else {
513 master->can_dma = dws->dma_ops->can_dma;
Serge Semin46164fd2020-05-29 16:11:57 +0300514 master->flags |= SPI_CONTROLLER_MUST_TX;
Feng Tang7063c0d2010-12-24 13:59:11 +0800515 }
516 }
517
Lukas Wunnerca8b19d2020-05-25 14:25:01 +0200518 ret = spi_register_controller(master);
Feng Tange24c7452009-12-14 14:20:22 -0800519 if (ret) {
520 dev_err(&master->dev, "problem registering spi master\n");
Baruch Siachec37e8e2014-01-31 12:07:44 +0200521 goto err_dma_exit;
Feng Tange24c7452009-12-14 14:20:22 -0800522 }
523
Andy Shevchenko53288fe2014-09-12 15:11:56 +0300524 dw_spi_debugfs_init(dws);
Feng Tange24c7452009-12-14 14:20:22 -0800525 return 0;
526
Baruch Siachec37e8e2014-01-31 12:07:44 +0200527err_dma_exit:
Feng Tang7063c0d2010-12-24 13:59:11 +0800528 if (dws->dma_ops && dws->dma_ops->dma_exit)
529 dws->dma_ops->dma_exit(dws);
Feng Tange24c7452009-12-14 14:20:22 -0800530 spi_enable_chip(dws, 0);
Andy Shevchenko02f20382015-10-20 12:11:40 +0300531 free_irq(dws->irq, master);
Feng Tange24c7452009-12-14 14:20:22 -0800532err_free_master:
Jarkko Nikula721483e2018-02-01 17:17:29 +0200533 spi_controller_put(master);
Feng Tange24c7452009-12-14 14:20:22 -0800534 return ret;
535}
Feng Tang79290a22010-12-24 13:59:10 +0800536EXPORT_SYMBOL_GPL(dw_spi_add_host);
Feng Tange24c7452009-12-14 14:20:22 -0800537
Grant Likelyfd4a3192012-12-07 16:57:14 +0000538void dw_spi_remove_host(struct dw_spi *dws)
Feng Tange24c7452009-12-14 14:20:22 -0800539{
Andy Shevchenko53288fe2014-09-12 15:11:56 +0300540 dw_spi_debugfs_remove(dws);
Feng Tange24c7452009-12-14 14:20:22 -0800541
Lukas Wunnerca8b19d2020-05-25 14:25:01 +0200542 spi_unregister_controller(dws->master);
543
Feng Tang7063c0d2010-12-24 13:59:11 +0800544 if (dws->dma_ops && dws->dma_ops->dma_exit)
545 dws->dma_ops->dma_exit(dws);
Andy Shevchenko1cc3f142015-10-14 23:12:23 +0300546
547 spi_shutdown_chip(dws);
Andy Shevchenko02f20382015-10-20 12:11:40 +0300548
549 free_irq(dws->irq, dws->master);
Feng Tange24c7452009-12-14 14:20:22 -0800550}
Feng Tang79290a22010-12-24 13:59:10 +0800551EXPORT_SYMBOL_GPL(dw_spi_remove_host);
Feng Tange24c7452009-12-14 14:20:22 -0800552
553int dw_spi_suspend_host(struct dw_spi *dws)
554{
Andy Shevchenko1cc3f142015-10-14 23:12:23 +0300555 int ret;
Feng Tange24c7452009-12-14 14:20:22 -0800556
Jarkko Nikula721483e2018-02-01 17:17:29 +0200557 ret = spi_controller_suspend(dws->master);
Feng Tange24c7452009-12-14 14:20:22 -0800558 if (ret)
559 return ret;
Andy Shevchenko1cc3f142015-10-14 23:12:23 +0300560
561 spi_shutdown_chip(dws);
562 return 0;
Feng Tange24c7452009-12-14 14:20:22 -0800563}
Feng Tang79290a22010-12-24 13:59:10 +0800564EXPORT_SYMBOL_GPL(dw_spi_suspend_host);
Feng Tange24c7452009-12-14 14:20:22 -0800565
566int dw_spi_resume_host(struct dw_spi *dws)
567{
Andy Shevchenko30b4b702015-01-07 16:56:55 +0200568 spi_hw_init(&dws->master->dev, dws);
Geert Uytterhoeven7c5d8a22018-09-05 10:51:57 +0200569 return spi_controller_resume(dws->master);
Feng Tange24c7452009-12-14 14:20:22 -0800570}
Feng Tang79290a22010-12-24 13:59:10 +0800571EXPORT_SYMBOL_GPL(dw_spi_resume_host);
Feng Tange24c7452009-12-14 14:20:22 -0800572
573MODULE_AUTHOR("Feng Tang <feng.tang@intel.com>");
574MODULE_DESCRIPTION("Driver for DesignWare SPI controller core");
575MODULE_LICENSE("GPL v2");