Thomas Gleixner | 2874c5f | 2019-05-27 08:55:01 +0200 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0-or-later |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 2 | /* |
| 3 | * Freescale eSPI controller driver. |
| 4 | * |
| 5 | * Copyright 2010 Freescale Semiconductor, Inc. |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 6 | */ |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 7 | #include <linux/delay.h> |
Xiubo Li | a310836 | 2014-09-29 10:57:06 +0800 | [diff] [blame] | 8 | #include <linux/err.h> |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 9 | #include <linux/fsl_devices.h> |
Xiubo Li | a310836 | 2014-09-29 10:57:06 +0800 | [diff] [blame] | 10 | #include <linux/interrupt.h> |
Xiubo Li | a310836 | 2014-09-29 10:57:06 +0800 | [diff] [blame] | 11 | #include <linux/module.h> |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 12 | #include <linux/mm.h> |
| 13 | #include <linux/of.h> |
Rob Herring | 5af5073 | 2013-09-17 14:28:33 -0500 | [diff] [blame] | 14 | #include <linux/of_address.h> |
| 15 | #include <linux/of_irq.h> |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 16 | #include <linux/of_platform.h> |
Xiubo Li | a310836 | 2014-09-29 10:57:06 +0800 | [diff] [blame] | 17 | #include <linux/platform_device.h> |
| 18 | #include <linux/spi/spi.h> |
Heiner Kallweit | e9abb4d | 2015-08-26 21:21:55 +0200 | [diff] [blame] | 19 | #include <linux/pm_runtime.h> |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 20 | #include <sysdev/fsl_soc.h> |
| 21 | |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 22 | /* eSPI Controller registers */ |
Heiner Kallweit | 46afd38 | 2016-09-13 23:16:02 +0200 | [diff] [blame] | 23 | #define ESPI_SPMODE 0x00 /* eSPI mode register */ |
| 24 | #define ESPI_SPIE 0x04 /* eSPI event register */ |
| 25 | #define ESPI_SPIM 0x08 /* eSPI mask register */ |
| 26 | #define ESPI_SPCOM 0x0c /* eSPI command register */ |
| 27 | #define ESPI_SPITF 0x10 /* eSPI transmit FIFO access register*/ |
| 28 | #define ESPI_SPIRF 0x14 /* eSPI receive FIFO access register*/ |
| 29 | #define ESPI_SPMODE0 0x20 /* eSPI cs0 mode register */ |
| 30 | |
| 31 | #define ESPI_SPMODEx(x) (ESPI_SPMODE0 + (x) * 4) |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 32 | |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 33 | /* eSPI Controller mode register definitions */ |
Heiner Kallweit | 81abc2e | 2016-09-13 23:16:06 +0200 | [diff] [blame] | 34 | #define SPMODE_ENABLE BIT(31) |
| 35 | #define SPMODE_LOOP BIT(30) |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 36 | #define SPMODE_TXTHR(x) ((x) << 8) |
| 37 | #define SPMODE_RXTHR(x) ((x) << 0) |
| 38 | |
| 39 | /* eSPI Controller CS mode register definitions */ |
Heiner Kallweit | 81abc2e | 2016-09-13 23:16:06 +0200 | [diff] [blame] | 40 | #define CSMODE_CI_INACTIVEHIGH BIT(31) |
| 41 | #define CSMODE_CP_BEGIN_EDGECLK BIT(30) |
| 42 | #define CSMODE_REV BIT(29) |
| 43 | #define CSMODE_DIV16 BIT(28) |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 44 | #define CSMODE_PM(x) ((x) << 24) |
Heiner Kallweit | 81abc2e | 2016-09-13 23:16:06 +0200 | [diff] [blame] | 45 | #define CSMODE_POL_1 BIT(20) |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 46 | #define CSMODE_LEN(x) ((x) << 16) |
| 47 | #define CSMODE_BEF(x) ((x) << 12) |
| 48 | #define CSMODE_AFT(x) ((x) << 8) |
| 49 | #define CSMODE_CG(x) ((x) << 3) |
| 50 | |
Heiner Kallweit | 5473126 | 2016-10-27 21:25:58 +0200 | [diff] [blame] | 51 | #define FSL_ESPI_FIFO_SIZE 32 |
Heiner Kallweit | e508cea | 2016-10-27 21:27:56 +0200 | [diff] [blame] | 52 | #define FSL_ESPI_RXTHR 15 |
Heiner Kallweit | 5473126 | 2016-10-27 21:25:58 +0200 | [diff] [blame] | 53 | |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 54 | /* Default mode/csmode for eSPI controller */ |
Heiner Kallweit | e508cea | 2016-10-27 21:27:56 +0200 | [diff] [blame] | 55 | #define SPMODE_INIT_VAL (SPMODE_TXTHR(4) | SPMODE_RXTHR(FSL_ESPI_RXTHR)) |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 56 | #define CSMODE_INIT_VAL (CSMODE_POL_1 | CSMODE_BEF(0) \ |
| 57 | | CSMODE_AFT(0) | CSMODE_CG(1)) |
| 58 | |
| 59 | /* SPIE register values */ |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 60 | #define SPIE_RXCNT(reg) ((reg >> 24) & 0x3F) |
| 61 | #define SPIE_TXCNT(reg) ((reg >> 16) & 0x3F) |
Heiner Kallweit | 81abc2e | 2016-09-13 23:16:06 +0200 | [diff] [blame] | 62 | #define SPIE_TXE BIT(15) /* TX FIFO empty */ |
| 63 | #define SPIE_DON BIT(14) /* TX done */ |
| 64 | #define SPIE_RXT BIT(13) /* RX FIFO threshold */ |
| 65 | #define SPIE_RXF BIT(12) /* RX FIFO full */ |
| 66 | #define SPIE_TXT BIT(11) /* TX FIFO threshold*/ |
| 67 | #define SPIE_RNE BIT(9) /* RX FIFO not empty */ |
| 68 | #define SPIE_TNF BIT(8) /* TX FIFO not full */ |
| 69 | |
| 70 | /* SPIM register values */ |
| 71 | #define SPIM_TXE BIT(15) /* TX FIFO empty */ |
| 72 | #define SPIM_DON BIT(14) /* TX done */ |
| 73 | #define SPIM_RXT BIT(13) /* RX FIFO threshold */ |
| 74 | #define SPIM_RXF BIT(12) /* RX FIFO full */ |
| 75 | #define SPIM_TXT BIT(11) /* TX FIFO threshold*/ |
| 76 | #define SPIM_RNE BIT(9) /* RX FIFO not empty */ |
| 77 | #define SPIM_TNF BIT(8) /* TX FIFO not full */ |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 78 | |
| 79 | /* SPCOM register values */ |
| 80 | #define SPCOM_CS(x) ((x) << 30) |
Heiner Kallweit | 81abc2e | 2016-09-13 23:16:06 +0200 | [diff] [blame] | 81 | #define SPCOM_DO BIT(28) /* Dual output */ |
| 82 | #define SPCOM_TO BIT(27) /* TX only */ |
| 83 | #define SPCOM_RXSKIP(x) ((x) << 16) |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 84 | #define SPCOM_TRANLEN(x) ((x) << 0) |
Heiner Kallweit | 81abc2e | 2016-09-13 23:16:06 +0200 | [diff] [blame] | 85 | |
Hou Zhiqiang | 5cfa1e4 | 2016-01-22 18:58:26 +0800 | [diff] [blame] | 86 | #define SPCOM_TRANLEN_MAX 0x10000 /* Max transaction length */ |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 87 | |
Heiner Kallweit | e9abb4d | 2015-08-26 21:21:55 +0200 | [diff] [blame] | 88 | #define AUTOSUSPEND_TIMEOUT 2000 |
| 89 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 90 | struct fsl_espi { |
| 91 | struct device *dev; |
| 92 | void __iomem *reg_base; |
| 93 | |
Heiner Kallweit | 0582343 | 2016-11-25 23:59:24 +0100 | [diff] [blame] | 94 | struct list_head *m_transfers; |
| 95 | struct spi_transfer *tx_t; |
| 96 | unsigned int tx_pos; |
| 97 | bool tx_done; |
Heiner Kallweit | dcb425f3 | 2016-11-25 23:59:57 +0100 | [diff] [blame] | 98 | struct spi_transfer *rx_t; |
| 99 | unsigned int rx_pos; |
| 100 | bool rx_done; |
Heiner Kallweit | 0582343 | 2016-11-25 23:59:24 +0100 | [diff] [blame] | 101 | |
Heiner Kallweit | e1cdee7 | 2016-11-25 23:58:49 +0100 | [diff] [blame] | 102 | bool swab; |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 103 | unsigned int rxskip; |
| 104 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 105 | spinlock_t lock; |
| 106 | |
| 107 | u32 spibrg; /* SPIBRG input clock */ |
| 108 | |
| 109 | struct completion done; |
| 110 | }; |
| 111 | |
Heiner Kallweit | 219b5e3 | 2016-11-13 14:38:05 +0100 | [diff] [blame] | 112 | struct fsl_espi_cs { |
| 113 | u32 hw_mode; |
| 114 | }; |
| 115 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 116 | static inline u32 fsl_espi_read_reg(struct fsl_espi *espi, int offset) |
Heiner Kallweit | 46afd38 | 2016-09-13 23:16:02 +0200 | [diff] [blame] | 117 | { |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 118 | return ioread32be(espi->reg_base + offset); |
Heiner Kallweit | 46afd38 | 2016-09-13 23:16:02 +0200 | [diff] [blame] | 119 | } |
| 120 | |
Heiner Kallweit | dcb425f3 | 2016-11-25 23:59:57 +0100 | [diff] [blame] | 121 | static inline u16 fsl_espi_read_reg16(struct fsl_espi *espi, int offset) |
| 122 | { |
Heiner Kallweit | 7e2ef00 | 2016-11-30 20:28:09 +0100 | [diff] [blame] | 123 | return ioread16be(espi->reg_base + offset); |
Heiner Kallweit | dcb425f3 | 2016-11-25 23:59:57 +0100 | [diff] [blame] | 124 | } |
| 125 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 126 | static inline u8 fsl_espi_read_reg8(struct fsl_espi *espi, int offset) |
Heiner Kallweit | 46afd38 | 2016-09-13 23:16:02 +0200 | [diff] [blame] | 127 | { |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 128 | return ioread8(espi->reg_base + offset); |
Heiner Kallweit | 46afd38 | 2016-09-13 23:16:02 +0200 | [diff] [blame] | 129 | } |
| 130 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 131 | static inline void fsl_espi_write_reg(struct fsl_espi *espi, int offset, |
Heiner Kallweit | 46afd38 | 2016-09-13 23:16:02 +0200 | [diff] [blame] | 132 | u32 val) |
| 133 | { |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 134 | iowrite32be(val, espi->reg_base + offset); |
Heiner Kallweit | 46afd38 | 2016-09-13 23:16:02 +0200 | [diff] [blame] | 135 | } |
| 136 | |
Heiner Kallweit | 0582343 | 2016-11-25 23:59:24 +0100 | [diff] [blame] | 137 | static inline void fsl_espi_write_reg16(struct fsl_espi *espi, int offset, |
| 138 | u16 val) |
| 139 | { |
Heiner Kallweit | 7e2ef00 | 2016-11-30 20:28:09 +0100 | [diff] [blame] | 140 | iowrite16be(val, espi->reg_base + offset); |
Heiner Kallweit | 0582343 | 2016-11-25 23:59:24 +0100 | [diff] [blame] | 141 | } |
| 142 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 143 | static inline void fsl_espi_write_reg8(struct fsl_espi *espi, int offset, |
Heiner Kallweit | 46afd38 | 2016-09-13 23:16:02 +0200 | [diff] [blame] | 144 | u8 val) |
| 145 | { |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 146 | iowrite8(val, espi->reg_base + offset); |
Heiner Kallweit | 46afd38 | 2016-09-13 23:16:02 +0200 | [diff] [blame] | 147 | } |
| 148 | |
Heiner Kallweit | d3152cf1 | 2016-09-07 22:53:38 +0200 | [diff] [blame] | 149 | static int fsl_espi_check_message(struct spi_message *m) |
| 150 | { |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 151 | struct fsl_espi *espi = spi_master_get_devdata(m->spi->master); |
Heiner Kallweit | d3152cf1 | 2016-09-07 22:53:38 +0200 | [diff] [blame] | 152 | struct spi_transfer *t, *first; |
| 153 | |
| 154 | if (m->frame_length > SPCOM_TRANLEN_MAX) { |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 155 | dev_err(espi->dev, "message too long, size is %u bytes\n", |
Heiner Kallweit | d3152cf1 | 2016-09-07 22:53:38 +0200 | [diff] [blame] | 156 | m->frame_length); |
| 157 | return -EMSGSIZE; |
| 158 | } |
| 159 | |
| 160 | first = list_first_entry(&m->transfers, struct spi_transfer, |
| 161 | transfer_list); |
Heiner Kallweit | e4be705 | 2016-10-02 14:22:35 +0200 | [diff] [blame] | 162 | |
Heiner Kallweit | d3152cf1 | 2016-09-07 22:53:38 +0200 | [diff] [blame] | 163 | list_for_each_entry(t, &m->transfers, transfer_list) { |
| 164 | if (first->bits_per_word != t->bits_per_word || |
| 165 | first->speed_hz != t->speed_hz) { |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 166 | dev_err(espi->dev, "bits_per_word/speed_hz should be the same for all transfers\n"); |
Heiner Kallweit | d3152cf1 | 2016-09-07 22:53:38 +0200 | [diff] [blame] | 167 | return -EINVAL; |
| 168 | } |
| 169 | } |
| 170 | |
Heiner Kallweit | e4be705 | 2016-10-02 14:22:35 +0200 | [diff] [blame] | 171 | /* ESPI supports MSB-first transfers for word size 8 / 16 only */ |
| 172 | if (!(m->spi->mode & SPI_LSB_FIRST) && first->bits_per_word != 8 && |
| 173 | first->bits_per_word != 16) { |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 174 | dev_err(espi->dev, |
Heiner Kallweit | e4be705 | 2016-10-02 14:22:35 +0200 | [diff] [blame] | 175 | "MSB-first transfer not supported for wordsize %u\n", |
| 176 | first->bits_per_word); |
| 177 | return -EINVAL; |
| 178 | } |
| 179 | |
Heiner Kallweit | d3152cf1 | 2016-09-07 22:53:38 +0200 | [diff] [blame] | 180 | return 0; |
| 181 | } |
| 182 | |
Heiner Kallweit | aca7515 | 2016-11-09 22:58:01 +0100 | [diff] [blame] | 183 | static unsigned int fsl_espi_check_rxskip_mode(struct spi_message *m) |
| 184 | { |
| 185 | struct spi_transfer *t; |
| 186 | unsigned int i = 0, rxskip = 0; |
| 187 | |
| 188 | /* |
| 189 | * prerequisites for ESPI rxskip mode: |
| 190 | * - message has two transfers |
| 191 | * - first transfer is a write and second is a read |
| 192 | * |
| 193 | * In addition the current low-level transfer mechanism requires |
| 194 | * that the rxskip bytes fit into the TX FIFO. Else the transfer |
| 195 | * would hang because after the first FSL_ESPI_FIFO_SIZE bytes |
| 196 | * the TX FIFO isn't re-filled. |
| 197 | */ |
| 198 | list_for_each_entry(t, &m->transfers, transfer_list) { |
| 199 | if (i == 0) { |
| 200 | if (!t->tx_buf || t->rx_buf || |
| 201 | t->len > FSL_ESPI_FIFO_SIZE) |
| 202 | return 0; |
| 203 | rxskip = t->len; |
| 204 | } else if (i == 1) { |
| 205 | if (t->tx_buf || !t->rx_buf) |
| 206 | return 0; |
| 207 | } |
| 208 | i++; |
| 209 | } |
| 210 | |
| 211 | return i == 2 ? rxskip : 0; |
| 212 | } |
| 213 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 214 | static void fsl_espi_fill_tx_fifo(struct fsl_espi *espi, u32 events) |
Heiner Kallweit | 5473126 | 2016-10-27 21:25:58 +0200 | [diff] [blame] | 215 | { |
| 216 | u32 tx_fifo_avail; |
Heiner Kallweit | 0582343 | 2016-11-25 23:59:24 +0100 | [diff] [blame] | 217 | unsigned int tx_left; |
| 218 | const void *tx_buf; |
Heiner Kallweit | 5473126 | 2016-10-27 21:25:58 +0200 | [diff] [blame] | 219 | |
| 220 | /* if events is zero transfer has not started and tx fifo is empty */ |
| 221 | tx_fifo_avail = events ? SPIE_TXCNT(events) : FSL_ESPI_FIFO_SIZE; |
Heiner Kallweit | 0582343 | 2016-11-25 23:59:24 +0100 | [diff] [blame] | 222 | start: |
| 223 | tx_left = espi->tx_t->len - espi->tx_pos; |
| 224 | tx_buf = espi->tx_t->tx_buf; |
| 225 | while (tx_fifo_avail >= min(4U, tx_left) && tx_left) { |
| 226 | if (tx_left >= 4) { |
| 227 | if (!tx_buf) |
| 228 | fsl_espi_write_reg(espi, ESPI_SPITF, 0); |
| 229 | else if (espi->swab) |
| 230 | fsl_espi_write_reg(espi, ESPI_SPITF, |
| 231 | swahb32p(tx_buf + espi->tx_pos)); |
| 232 | else |
| 233 | fsl_espi_write_reg(espi, ESPI_SPITF, |
| 234 | *(u32 *)(tx_buf + espi->tx_pos)); |
| 235 | espi->tx_pos += 4; |
| 236 | tx_left -= 4; |
Heiner Kallweit | 5473126 | 2016-10-27 21:25:58 +0200 | [diff] [blame] | 237 | tx_fifo_avail -= 4; |
Heiner Kallweit | 0582343 | 2016-11-25 23:59:24 +0100 | [diff] [blame] | 238 | } else if (tx_left >= 2 && tx_buf && espi->swab) { |
| 239 | fsl_espi_write_reg16(espi, ESPI_SPITF, |
| 240 | swab16p(tx_buf + espi->tx_pos)); |
| 241 | espi->tx_pos += 2; |
| 242 | tx_left -= 2; |
| 243 | tx_fifo_avail -= 2; |
Heiner Kallweit | 5473126 | 2016-10-27 21:25:58 +0200 | [diff] [blame] | 244 | } else { |
Heiner Kallweit | 0582343 | 2016-11-25 23:59:24 +0100 | [diff] [blame] | 245 | if (!tx_buf) |
| 246 | fsl_espi_write_reg8(espi, ESPI_SPITF, 0); |
| 247 | else |
| 248 | fsl_espi_write_reg8(espi, ESPI_SPITF, |
| 249 | *(u8 *)(tx_buf + espi->tx_pos)); |
| 250 | espi->tx_pos += 1; |
| 251 | tx_left -= 1; |
Heiner Kallweit | 5473126 | 2016-10-27 21:25:58 +0200 | [diff] [blame] | 252 | tx_fifo_avail -= 1; |
| 253 | } |
Heiner Kallweit | 0582343 | 2016-11-25 23:59:24 +0100 | [diff] [blame] | 254 | } |
| 255 | |
| 256 | if (!tx_left) { |
| 257 | /* Last transfer finished, in rxskip mode only one is needed */ |
| 258 | if (list_is_last(&espi->tx_t->transfer_list, |
| 259 | espi->m_transfers) || espi->rxskip) { |
| 260 | espi->tx_done = true; |
| 261 | return; |
| 262 | } |
| 263 | espi->tx_t = list_next_entry(espi->tx_t, transfer_list); |
| 264 | espi->tx_pos = 0; |
| 265 | /* continue with next transfer if tx fifo is not full */ |
| 266 | if (tx_fifo_avail) |
| 267 | goto start; |
| 268 | } |
Heiner Kallweit | 5473126 | 2016-10-27 21:25:58 +0200 | [diff] [blame] | 269 | } |
| 270 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 271 | static void fsl_espi_read_rx_fifo(struct fsl_espi *espi, u32 events) |
Heiner Kallweit | f05689a | 2016-10-27 21:27:35 +0200 | [diff] [blame] | 272 | { |
| 273 | u32 rx_fifo_avail = SPIE_RXCNT(events); |
Heiner Kallweit | dcb425f3 | 2016-11-25 23:59:57 +0100 | [diff] [blame] | 274 | unsigned int rx_left; |
| 275 | void *rx_buf; |
Heiner Kallweit | f05689a | 2016-10-27 21:27:35 +0200 | [diff] [blame] | 276 | |
Heiner Kallweit | dcb425f3 | 2016-11-25 23:59:57 +0100 | [diff] [blame] | 277 | start: |
| 278 | rx_left = espi->rx_t->len - espi->rx_pos; |
| 279 | rx_buf = espi->rx_t->rx_buf; |
| 280 | while (rx_fifo_avail >= min(4U, rx_left) && rx_left) { |
| 281 | if (rx_left >= 4) { |
| 282 | u32 val = fsl_espi_read_reg(espi, ESPI_SPIRF); |
| 283 | |
| 284 | if (rx_buf && espi->swab) |
| 285 | *(u32 *)(rx_buf + espi->rx_pos) = swahb32(val); |
| 286 | else if (rx_buf) |
| 287 | *(u32 *)(rx_buf + espi->rx_pos) = val; |
| 288 | espi->rx_pos += 4; |
| 289 | rx_left -= 4; |
Heiner Kallweit | f05689a | 2016-10-27 21:27:35 +0200 | [diff] [blame] | 290 | rx_fifo_avail -= 4; |
Heiner Kallweit | dcb425f3 | 2016-11-25 23:59:57 +0100 | [diff] [blame] | 291 | } else if (rx_left >= 2 && rx_buf && espi->swab) { |
| 292 | u16 val = fsl_espi_read_reg16(espi, ESPI_SPIRF); |
| 293 | |
| 294 | *(u16 *)(rx_buf + espi->rx_pos) = swab16(val); |
| 295 | espi->rx_pos += 2; |
| 296 | rx_left -= 2; |
| 297 | rx_fifo_avail -= 2; |
Heiner Kallweit | f05689a | 2016-10-27 21:27:35 +0200 | [diff] [blame] | 298 | } else { |
Heiner Kallweit | dcb425f3 | 2016-11-25 23:59:57 +0100 | [diff] [blame] | 299 | u8 val = fsl_espi_read_reg8(espi, ESPI_SPIRF); |
| 300 | |
| 301 | if (rx_buf) |
| 302 | *(u8 *)(rx_buf + espi->rx_pos) = val; |
| 303 | espi->rx_pos += 1; |
| 304 | rx_left -= 1; |
Heiner Kallweit | f05689a | 2016-10-27 21:27:35 +0200 | [diff] [blame] | 305 | rx_fifo_avail -= 1; |
| 306 | } |
Heiner Kallweit | dcb425f3 | 2016-11-25 23:59:57 +0100 | [diff] [blame] | 307 | } |
| 308 | |
| 309 | if (!rx_left) { |
| 310 | if (list_is_last(&espi->rx_t->transfer_list, |
| 311 | espi->m_transfers)) { |
| 312 | espi->rx_done = true; |
| 313 | return; |
| 314 | } |
| 315 | espi->rx_t = list_next_entry(espi->rx_t, transfer_list); |
| 316 | espi->rx_pos = 0; |
| 317 | /* continue with next transfer if rx fifo is not empty */ |
| 318 | if (rx_fifo_avail) |
| 319 | goto start; |
| 320 | } |
Heiner Kallweit | f05689a | 2016-10-27 21:27:35 +0200 | [diff] [blame] | 321 | } |
| 322 | |
Heiner Kallweit | ea616ee | 2016-08-25 06:44:42 +0200 | [diff] [blame] | 323 | static void fsl_espi_setup_transfer(struct spi_device *spi, |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 324 | struct spi_transfer *t) |
| 325 | { |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 326 | struct fsl_espi *espi = spi_master_get_devdata(spi->master); |
Heiner Kallweit | d198ebf | 2016-09-13 23:15:45 +0200 | [diff] [blame] | 327 | int bits_per_word = t ? t->bits_per_word : spi->bits_per_word; |
Paulo Zaneti | 73aaf15 | 2016-10-29 11:02:19 +0200 | [diff] [blame] | 328 | u32 pm, hz = t ? t->speed_hz : spi->max_speed_hz; |
Heiner Kallweit | 219b5e3 | 2016-11-13 14:38:05 +0100 | [diff] [blame] | 329 | struct fsl_espi_cs *cs = spi_get_ctldata(spi); |
Heiner Kallweit | 8f3086d | 2016-11-04 21:01:12 +0100 | [diff] [blame] | 330 | u32 hw_mode_old = cs->hw_mode; |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 331 | |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 332 | /* mask out bits we are going to set */ |
| 333 | cs->hw_mode &= ~(CSMODE_LEN(0xF) | CSMODE_DIV16 | CSMODE_PM(0xF)); |
| 334 | |
Heiner Kallweit | a755af5 | 2016-09-04 09:56:57 +0200 | [diff] [blame] | 335 | cs->hw_mode |= CSMODE_LEN(bits_per_word - 1); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 336 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 337 | pm = DIV_ROUND_UP(espi->spibrg, hz * 4) - 1; |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 338 | |
Paulo Zaneti | 73aaf15 | 2016-10-29 11:02:19 +0200 | [diff] [blame] | 339 | if (pm > 15) { |
| 340 | cs->hw_mode |= CSMODE_DIV16; |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 341 | pm = DIV_ROUND_UP(espi->spibrg, hz * 16 * 4) - 1; |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 342 | } |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 343 | |
| 344 | cs->hw_mode |= CSMODE_PM(pm); |
| 345 | |
Heiner Kallweit | 8f3086d | 2016-11-04 21:01:12 +0100 | [diff] [blame] | 346 | /* don't write the mode register if the mode doesn't change */ |
| 347 | if (cs->hw_mode != hw_mode_old) |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 348 | fsl_espi_write_reg(espi, ESPI_SPMODEx(spi->chip_select), |
Heiner Kallweit | 8f3086d | 2016-11-04 21:01:12 +0100 | [diff] [blame] | 349 | cs->hw_mode); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 350 | } |
| 351 | |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 352 | static int fsl_espi_bufs(struct spi_device *spi, struct spi_transfer *t) |
| 353 | { |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 354 | struct fsl_espi *espi = spi_master_get_devdata(spi->master); |
Heiner Kallweit | dcb425f3 | 2016-11-25 23:59:57 +0100 | [diff] [blame] | 355 | unsigned int rx_len = t->len; |
Heiner Kallweit | aca7515 | 2016-11-09 22:58:01 +0100 | [diff] [blame] | 356 | u32 mask, spcom; |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 357 | int ret; |
| 358 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 359 | reinit_completion(&espi->done); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 360 | |
| 361 | /* Set SPCOM[CS] and SPCOM[TRANLEN] field */ |
Heiner Kallweit | aca7515 | 2016-11-09 22:58:01 +0100 | [diff] [blame] | 362 | spcom = SPCOM_CS(spi->chip_select); |
| 363 | spcom |= SPCOM_TRANLEN(t->len - 1); |
| 364 | |
| 365 | /* configure RXSKIP mode */ |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 366 | if (espi->rxskip) { |
| 367 | spcom |= SPCOM_RXSKIP(espi->rxskip); |
Heiner Kallweit | dcb425f3 | 2016-11-25 23:59:57 +0100 | [diff] [blame] | 368 | rx_len = t->len - espi->rxskip; |
Heiner Kallweit | 8263cb3 | 2016-11-09 22:58:34 +0100 | [diff] [blame] | 369 | if (t->rx_nbits == SPI_NBITS_DUAL) |
| 370 | spcom |= SPCOM_DO; |
Heiner Kallweit | aca7515 | 2016-11-09 22:58:01 +0100 | [diff] [blame] | 371 | } |
| 372 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 373 | fsl_espi_write_reg(espi, ESPI_SPCOM, spcom); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 374 | |
Heiner Kallweit | e508cea | 2016-10-27 21:27:56 +0200 | [diff] [blame] | 375 | /* enable interrupts */ |
| 376 | mask = SPIM_DON; |
Heiner Kallweit | dcb425f3 | 2016-11-25 23:59:57 +0100 | [diff] [blame] | 377 | if (rx_len > FSL_ESPI_FIFO_SIZE) |
Heiner Kallweit | e508cea | 2016-10-27 21:27:56 +0200 | [diff] [blame] | 378 | mask |= SPIM_RXT; |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 379 | fsl_espi_write_reg(espi, ESPI_SPIM, mask); |
Heiner Kallweit | 5bcc6a2 | 2016-09-07 22:53:01 +0200 | [diff] [blame] | 380 | |
Heiner Kallweit | 5473126 | 2016-10-27 21:25:58 +0200 | [diff] [blame] | 381 | /* Prevent filling the fifo from getting interrupted */ |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 382 | spin_lock_irq(&espi->lock); |
| 383 | fsl_espi_fill_tx_fifo(espi, 0); |
| 384 | spin_unlock_irq(&espi->lock); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 385 | |
Nobuteru Hayashi | aa70e56 | 2016-03-18 11:35:21 +0000 | [diff] [blame] | 386 | /* Won't hang up forever, SPI bus sometimes got lost interrupts... */ |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 387 | ret = wait_for_completion_timeout(&espi->done, 2 * HZ); |
Nobuteru Hayashi | aa70e56 | 2016-03-18 11:35:21 +0000 | [diff] [blame] | 388 | if (ret == 0) |
Heiner Kallweit | 0582343 | 2016-11-25 23:59:24 +0100 | [diff] [blame] | 389 | dev_err(espi->dev, "Transfer timed out!\n"); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 390 | |
| 391 | /* disable rx ints */ |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 392 | fsl_espi_write_reg(espi, ESPI_SPIM, 0); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 393 | |
Heiner Kallweit | db1b049 | 2016-10-27 21:28:02 +0200 | [diff] [blame] | 394 | return ret == 0 ? -ETIMEDOUT : 0; |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 395 | } |
| 396 | |
Heiner Kallweit | 38d003f | 2016-09-07 22:54:51 +0200 | [diff] [blame] | 397 | static int fsl_espi_trans(struct spi_message *m, struct spi_transfer *trans) |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 398 | { |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 399 | struct fsl_espi *espi = spi_master_get_devdata(m->spi->master); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 400 | struct spi_device *spi = m->spi; |
Heiner Kallweit | 38d003f | 2016-09-07 22:54:51 +0200 | [diff] [blame] | 401 | int ret; |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 402 | |
Heiner Kallweit | e1cdee7 | 2016-11-25 23:58:49 +0100 | [diff] [blame] | 403 | /* In case of LSB-first and bits_per_word > 8 byte-swap all words */ |
| 404 | espi->swab = spi->mode & SPI_LSB_FIRST && trans->bits_per_word > 8; |
| 405 | |
Heiner Kallweit | 0582343 | 2016-11-25 23:59:24 +0100 | [diff] [blame] | 406 | espi->m_transfers = &m->transfers; |
| 407 | espi->tx_t = list_first_entry(&m->transfers, struct spi_transfer, |
| 408 | transfer_list); |
| 409 | espi->tx_pos = 0; |
| 410 | espi->tx_done = false; |
Heiner Kallweit | dcb425f3 | 2016-11-25 23:59:57 +0100 | [diff] [blame] | 411 | espi->rx_t = list_first_entry(&m->transfers, struct spi_transfer, |
| 412 | transfer_list); |
| 413 | espi->rx_pos = 0; |
| 414 | espi->rx_done = false; |
Heiner Kallweit | 0582343 | 2016-11-25 23:59:24 +0100 | [diff] [blame] | 415 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 416 | espi->rxskip = fsl_espi_check_rxskip_mode(m); |
| 417 | if (trans->rx_nbits == SPI_NBITS_DUAL && !espi->rxskip) { |
| 418 | dev_err(espi->dev, "Dual output mode requires RXSKIP mode!\n"); |
Heiner Kallweit | 8263cb3 | 2016-11-09 22:58:34 +0100 | [diff] [blame] | 419 | return -EINVAL; |
| 420 | } |
| 421 | |
Heiner Kallweit | dcb425f3 | 2016-11-25 23:59:57 +0100 | [diff] [blame] | 422 | /* In RXSKIP mode skip first transfer for reads */ |
| 423 | if (espi->rxskip) |
| 424 | espi->rx_t = list_next_entry(espi->rx_t, transfer_list); |
| 425 | |
Heiner Kallweit | faceef3 | 2016-09-07 22:52:06 +0200 | [diff] [blame] | 426 | fsl_espi_setup_transfer(spi, trans); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 427 | |
Heiner Kallweit | 06af115 | 2016-09-07 22:54:35 +0200 | [diff] [blame] | 428 | ret = fsl_espi_bufs(spi, trans); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 429 | |
Alexandru Ardelean | e74dc5c | 2019-09-26 13:51:37 +0300 | [diff] [blame] | 430 | spi_transfer_delay_exec(trans); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 431 | |
Heiner Kallweit | e33a3ad | 2016-09-07 22:51:10 +0200 | [diff] [blame] | 432 | return ret; |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 433 | } |
| 434 | |
Heiner Kallweit | c592bec | 2014-12-03 07:56:17 +0100 | [diff] [blame] | 435 | static int fsl_espi_do_one_msg(struct spi_master *master, |
| 436 | struct spi_message *m) |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 437 | { |
Alexandru Ardelean | 55a4753 | 2021-03-08 16:55:00 +0200 | [diff] [blame] | 438 | unsigned int rx_nbits = 0, delay_nsecs = 0; |
Heiner Kallweit | faceef3 | 2016-09-07 22:52:06 +0200 | [diff] [blame] | 439 | struct spi_transfer *t, trans = {}; |
Heiner Kallweit | e33a3ad | 2016-09-07 22:51:10 +0200 | [diff] [blame] | 440 | int ret; |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 441 | |
Heiner Kallweit | d3152cf1 | 2016-09-07 22:53:38 +0200 | [diff] [blame] | 442 | ret = fsl_espi_check_message(m); |
| 443 | if (ret) |
| 444 | goto out; |
| 445 | |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 446 | list_for_each_entry(t, &m->transfers, transfer_list) { |
Alexandru Ardelean | 55a4753 | 2021-03-08 16:55:00 +0200 | [diff] [blame] | 447 | unsigned int delay = spi_delay_to_ns(&t->delay, t); |
| 448 | |
| 449 | if (delay > delay_nsecs) |
| 450 | delay_nsecs = delay; |
Heiner Kallweit | 8263cb3 | 2016-11-09 22:58:34 +0100 | [diff] [blame] | 451 | if (t->rx_nbits > rx_nbits) |
| 452 | rx_nbits = t->rx_nbits; |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 453 | } |
| 454 | |
Heiner Kallweit | 96361faf | 2016-09-07 22:54:00 +0200 | [diff] [blame] | 455 | t = list_first_entry(&m->transfers, struct spi_transfer, |
| 456 | transfer_list); |
| 457 | |
Heiner Kallweit | 06af115 | 2016-09-07 22:54:35 +0200 | [diff] [blame] | 458 | trans.len = m->frame_length; |
Heiner Kallweit | 96361faf | 2016-09-07 22:54:00 +0200 | [diff] [blame] | 459 | trans.speed_hz = t->speed_hz; |
| 460 | trans.bits_per_word = t->bits_per_word; |
Alexandru Ardelean | 3984d39 | 2019-09-26 13:51:44 +0300 | [diff] [blame] | 461 | trans.delay.value = delay_nsecs; |
| 462 | trans.delay.unit = SPI_DELAY_UNIT_NSECS; |
Heiner Kallweit | 8263cb3 | 2016-11-09 22:58:34 +0100 | [diff] [blame] | 463 | trans.rx_nbits = rx_nbits; |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 464 | |
Heiner Kallweit | 06af115 | 2016-09-07 22:54:35 +0200 | [diff] [blame] | 465 | if (trans.len) |
| 466 | ret = fsl_espi_trans(m, &trans); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 467 | |
Heiner Kallweit | faceef3 | 2016-09-07 22:52:06 +0200 | [diff] [blame] | 468 | m->actual_length = ret ? 0 : trans.len; |
Heiner Kallweit | d3152cf1 | 2016-09-07 22:53:38 +0200 | [diff] [blame] | 469 | out: |
Heiner Kallweit | 0319d49 | 2016-09-07 22:51:29 +0200 | [diff] [blame] | 470 | if (m->status == -EINPROGRESS) |
| 471 | m->status = ret; |
| 472 | |
Heiner Kallweit | c592bec | 2014-12-03 07:56:17 +0100 | [diff] [blame] | 473 | spi_finalize_current_message(master); |
Heiner Kallweit | 0319d49 | 2016-09-07 22:51:29 +0200 | [diff] [blame] | 474 | |
| 475 | return ret; |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 476 | } |
| 477 | |
| 478 | static int fsl_espi_setup(struct spi_device *spi) |
| 479 | { |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 480 | struct fsl_espi *espi; |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 481 | u32 loop_mode; |
Heiner Kallweit | 219b5e3 | 2016-11-13 14:38:05 +0100 | [diff] [blame] | 482 | struct fsl_espi_cs *cs = spi_get_ctldata(spi); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 483 | |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 484 | if (!cs) { |
Axel Lin | d9f2674 | 2014-08-31 12:44:09 +0800 | [diff] [blame] | 485 | cs = kzalloc(sizeof(*cs), GFP_KERNEL); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 486 | if (!cs) |
| 487 | return -ENOMEM; |
Axel Lin | d9f2674 | 2014-08-31 12:44:09 +0800 | [diff] [blame] | 488 | spi_set_ctldata(spi, cs); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 489 | } |
| 490 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 491 | espi = spi_master_get_devdata(spi->master); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 492 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 493 | pm_runtime_get_sync(espi->dev); |
Heiner Kallweit | e9abb4d | 2015-08-26 21:21:55 +0200 | [diff] [blame] | 494 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 495 | cs->hw_mode = fsl_espi_read_reg(espi, ESPI_SPMODEx(spi->chip_select)); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 496 | /* mask out bits we are going to set */ |
| 497 | cs->hw_mode &= ~(CSMODE_CP_BEGIN_EDGECLK | CSMODE_CI_INACTIVEHIGH |
| 498 | | CSMODE_REV); |
| 499 | |
| 500 | if (spi->mode & SPI_CPHA) |
| 501 | cs->hw_mode |= CSMODE_CP_BEGIN_EDGECLK; |
| 502 | if (spi->mode & SPI_CPOL) |
| 503 | cs->hw_mode |= CSMODE_CI_INACTIVEHIGH; |
| 504 | if (!(spi->mode & SPI_LSB_FIRST)) |
| 505 | cs->hw_mode |= CSMODE_REV; |
| 506 | |
| 507 | /* Handle the loop mode */ |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 508 | loop_mode = fsl_espi_read_reg(espi, ESPI_SPMODE); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 509 | loop_mode &= ~SPMODE_LOOP; |
| 510 | if (spi->mode & SPI_LOOP) |
| 511 | loop_mode |= SPMODE_LOOP; |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 512 | fsl_espi_write_reg(espi, ESPI_SPMODE, loop_mode); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 513 | |
Heiner Kallweit | ea616ee | 2016-08-25 06:44:42 +0200 | [diff] [blame] | 514 | fsl_espi_setup_transfer(spi, NULL); |
Heiner Kallweit | e9abb4d | 2015-08-26 21:21:55 +0200 | [diff] [blame] | 515 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 516 | pm_runtime_mark_last_busy(espi->dev); |
| 517 | pm_runtime_put_autosuspend(espi->dev); |
Heiner Kallweit | e9abb4d | 2015-08-26 21:21:55 +0200 | [diff] [blame] | 518 | |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 519 | return 0; |
| 520 | } |
| 521 | |
Axel Lin | d9f2674 | 2014-08-31 12:44:09 +0800 | [diff] [blame] | 522 | static void fsl_espi_cleanup(struct spi_device *spi) |
| 523 | { |
Heiner Kallweit | 219b5e3 | 2016-11-13 14:38:05 +0100 | [diff] [blame] | 524 | struct fsl_espi_cs *cs = spi_get_ctldata(spi); |
Axel Lin | d9f2674 | 2014-08-31 12:44:09 +0800 | [diff] [blame] | 525 | |
| 526 | kfree(cs); |
| 527 | spi_set_ctldata(spi, NULL); |
| 528 | } |
| 529 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 530 | static void fsl_espi_cpu_irq(struct fsl_espi *espi, u32 events) |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 531 | { |
Heiner Kallweit | dcb425f3 | 2016-11-25 23:59:57 +0100 | [diff] [blame] | 532 | if (!espi->rx_done) |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 533 | fsl_espi_read_rx_fifo(espi, events); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 534 | |
Heiner Kallweit | 0582343 | 2016-11-25 23:59:24 +0100 | [diff] [blame] | 535 | if (!espi->tx_done) |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 536 | fsl_espi_fill_tx_fifo(espi, events); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 537 | |
Heiner Kallweit | dcb425f3 | 2016-11-25 23:59:57 +0100 | [diff] [blame] | 538 | if (!espi->tx_done || !espi->rx_done) |
Heiner Kallweit | db1b049 | 2016-10-27 21:28:02 +0200 | [diff] [blame] | 539 | return; |
| 540 | |
| 541 | /* we're done, but check for errors before returning */ |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 542 | events = fsl_espi_read_reg(espi, ESPI_SPIE); |
Heiner Kallweit | db1b049 | 2016-10-27 21:28:02 +0200 | [diff] [blame] | 543 | |
| 544 | if (!(events & SPIE_DON)) |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 545 | dev_err(espi->dev, |
Heiner Kallweit | db1b049 | 2016-10-27 21:28:02 +0200 | [diff] [blame] | 546 | "Transfer done but SPIE_DON isn't set!\n"); |
| 547 | |
Tiago Brusamarello | 516ddd7 | 2018-07-26 11:12:11 -0300 | [diff] [blame] | 548 | if (SPIE_RXCNT(events) || SPIE_TXCNT(events) != FSL_ESPI_FIFO_SIZE) { |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 549 | dev_err(espi->dev, "Transfer done but rx/tx fifo's aren't empty!\n"); |
Tiago Brusamarello | 516ddd7 | 2018-07-26 11:12:11 -0300 | [diff] [blame] | 550 | dev_err(espi->dev, "SPIE_RXCNT = %d, SPIE_TXCNT = %d\n", |
| 551 | SPIE_RXCNT(events), SPIE_TXCNT(events)); |
| 552 | } |
Heiner Kallweit | db1b049 | 2016-10-27 21:28:02 +0200 | [diff] [blame] | 553 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 554 | complete(&espi->done); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 555 | } |
| 556 | |
| 557 | static irqreturn_t fsl_espi_irq(s32 irq, void *context_data) |
| 558 | { |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 559 | struct fsl_espi *espi = context_data; |
Chris Packham | b867eef | 2020-09-04 12:28:12 +1200 | [diff] [blame] | 560 | u32 events, mask; |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 561 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 562 | spin_lock(&espi->lock); |
Heiner Kallweit | 5473126 | 2016-10-27 21:25:58 +0200 | [diff] [blame] | 563 | |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 564 | /* Get interrupt events(tx/rx) */ |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 565 | events = fsl_espi_read_reg(espi, ESPI_SPIE); |
Chris Packham | b867eef | 2020-09-04 12:28:12 +1200 | [diff] [blame] | 566 | mask = fsl_espi_read_reg(espi, ESPI_SPIM); |
| 567 | if (!(events & mask)) { |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 568 | spin_unlock(&espi->lock); |
Heiner Kallweit | 35f5d71 | 2016-09-13 23:15:57 +0200 | [diff] [blame] | 569 | return IRQ_NONE; |
Heiner Kallweit | 5473126 | 2016-10-27 21:25:58 +0200 | [diff] [blame] | 570 | } |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 571 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 572 | dev_vdbg(espi->dev, "%s: events %x\n", __func__, events); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 573 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 574 | fsl_espi_cpu_irq(espi, events); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 575 | |
Heiner Kallweit | 35f5d71 | 2016-09-13 23:15:57 +0200 | [diff] [blame] | 576 | /* Clear the events */ |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 577 | fsl_espi_write_reg(espi, ESPI_SPIE, events); |
Heiner Kallweit | 35f5d71 | 2016-09-13 23:15:57 +0200 | [diff] [blame] | 578 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 579 | spin_unlock(&espi->lock); |
Heiner Kallweit | 5473126 | 2016-10-27 21:25:58 +0200 | [diff] [blame] | 580 | |
Heiner Kallweit | 35f5d71 | 2016-09-13 23:15:57 +0200 | [diff] [blame] | 581 | return IRQ_HANDLED; |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 582 | } |
| 583 | |
Heiner Kallweit | e9abb4d | 2015-08-26 21:21:55 +0200 | [diff] [blame] | 584 | #ifdef CONFIG_PM |
| 585 | static int fsl_espi_runtime_suspend(struct device *dev) |
Heiner Kallweit | 75506d0 | 2014-12-03 07:56:19 +0100 | [diff] [blame] | 586 | { |
Heiner Kallweit | e9abb4d | 2015-08-26 21:21:55 +0200 | [diff] [blame] | 587 | struct spi_master *master = dev_get_drvdata(dev); |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 588 | struct fsl_espi *espi = spi_master_get_devdata(master); |
Heiner Kallweit | 75506d0 | 2014-12-03 07:56:19 +0100 | [diff] [blame] | 589 | u32 regval; |
| 590 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 591 | regval = fsl_espi_read_reg(espi, ESPI_SPMODE); |
Heiner Kallweit | 75506d0 | 2014-12-03 07:56:19 +0100 | [diff] [blame] | 592 | regval &= ~SPMODE_ENABLE; |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 593 | fsl_espi_write_reg(espi, ESPI_SPMODE, regval); |
Heiner Kallweit | 75506d0 | 2014-12-03 07:56:19 +0100 | [diff] [blame] | 594 | |
| 595 | return 0; |
| 596 | } |
| 597 | |
Heiner Kallweit | e9abb4d | 2015-08-26 21:21:55 +0200 | [diff] [blame] | 598 | static int fsl_espi_runtime_resume(struct device *dev) |
Heiner Kallweit | 75506d0 | 2014-12-03 07:56:19 +0100 | [diff] [blame] | 599 | { |
Heiner Kallweit | e9abb4d | 2015-08-26 21:21:55 +0200 | [diff] [blame] | 600 | struct spi_master *master = dev_get_drvdata(dev); |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 601 | struct fsl_espi *espi = spi_master_get_devdata(master); |
Heiner Kallweit | 75506d0 | 2014-12-03 07:56:19 +0100 | [diff] [blame] | 602 | u32 regval; |
| 603 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 604 | regval = fsl_espi_read_reg(espi, ESPI_SPMODE); |
Heiner Kallweit | 75506d0 | 2014-12-03 07:56:19 +0100 | [diff] [blame] | 605 | regval |= SPMODE_ENABLE; |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 606 | fsl_espi_write_reg(espi, ESPI_SPMODE, regval); |
Heiner Kallweit | 75506d0 | 2014-12-03 07:56:19 +0100 | [diff] [blame] | 607 | |
| 608 | return 0; |
| 609 | } |
Heiner Kallweit | e9abb4d | 2015-08-26 21:21:55 +0200 | [diff] [blame] | 610 | #endif |
Heiner Kallweit | 75506d0 | 2014-12-03 07:56:19 +0100 | [diff] [blame] | 611 | |
Heiner Kallweit | 02a595d | 2016-08-17 21:11:01 +0200 | [diff] [blame] | 612 | static size_t fsl_espi_max_message_size(struct spi_device *spi) |
Michal Suchanek | b541eef | 2015-12-02 10:38:21 +0000 | [diff] [blame] | 613 | { |
| 614 | return SPCOM_TRANLEN_MAX; |
| 615 | } |
| 616 | |
Heiner Kallweit | 456c742 | 2016-11-13 14:40:18 +0100 | [diff] [blame] | 617 | static void fsl_espi_init_regs(struct device *dev, bool initial) |
| 618 | { |
| 619 | struct spi_master *master = dev_get_drvdata(dev); |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 620 | struct fsl_espi *espi = spi_master_get_devdata(master); |
Heiner Kallweit | 456c742 | 2016-11-13 14:40:18 +0100 | [diff] [blame] | 621 | struct device_node *nc; |
| 622 | u32 csmode, cs, prop; |
| 623 | int ret; |
| 624 | |
| 625 | /* SPI controller initializations */ |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 626 | fsl_espi_write_reg(espi, ESPI_SPMODE, 0); |
| 627 | fsl_espi_write_reg(espi, ESPI_SPIM, 0); |
| 628 | fsl_espi_write_reg(espi, ESPI_SPCOM, 0); |
| 629 | fsl_espi_write_reg(espi, ESPI_SPIE, 0xffffffff); |
Heiner Kallweit | 456c742 | 2016-11-13 14:40:18 +0100 | [diff] [blame] | 630 | |
| 631 | /* Init eSPI CS mode register */ |
| 632 | for_each_available_child_of_node(master->dev.of_node, nc) { |
| 633 | /* get chip select */ |
| 634 | ret = of_property_read_u32(nc, "reg", &cs); |
| 635 | if (ret || cs >= master->num_chipselect) |
| 636 | continue; |
| 637 | |
| 638 | csmode = CSMODE_INIT_VAL; |
| 639 | |
| 640 | /* check if CSBEF is set in device tree */ |
| 641 | ret = of_property_read_u32(nc, "fsl,csbef", &prop); |
| 642 | if (!ret) { |
| 643 | csmode &= ~(CSMODE_BEF(0xf)); |
| 644 | csmode |= CSMODE_BEF(prop); |
| 645 | } |
| 646 | |
| 647 | /* check if CSAFT is set in device tree */ |
| 648 | ret = of_property_read_u32(nc, "fsl,csaft", &prop); |
| 649 | if (!ret) { |
| 650 | csmode &= ~(CSMODE_AFT(0xf)); |
| 651 | csmode |= CSMODE_AFT(prop); |
| 652 | } |
| 653 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 654 | fsl_espi_write_reg(espi, ESPI_SPMODEx(cs), csmode); |
Heiner Kallweit | 456c742 | 2016-11-13 14:40:18 +0100 | [diff] [blame] | 655 | |
| 656 | if (initial) |
| 657 | dev_info(dev, "cs=%u, init_csmode=0x%x\n", cs, csmode); |
| 658 | } |
| 659 | |
| 660 | /* Enable SPI interface */ |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 661 | fsl_espi_write_reg(espi, ESPI_SPMODE, SPMODE_INIT_VAL | SPMODE_ENABLE); |
Heiner Kallweit | 456c742 | 2016-11-13 14:40:18 +0100 | [diff] [blame] | 662 | } |
| 663 | |
Heiner Kallweit | 604042a | 2016-09-17 15:43:31 +0200 | [diff] [blame] | 664 | static int fsl_espi_probe(struct device *dev, struct resource *mem, |
Heiner Kallweit | 7454346 | 2016-11-13 14:36:39 +0100 | [diff] [blame] | 665 | unsigned int irq, unsigned int num_cs) |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 666 | { |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 667 | struct spi_master *master; |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 668 | struct fsl_espi *espi; |
Heiner Kallweit | b497eb0 | 2016-10-01 21:07:52 +0200 | [diff] [blame] | 669 | int ret; |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 670 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 671 | master = spi_alloc_master(dev, sizeof(struct fsl_espi)); |
Heiner Kallweit | 604042a | 2016-09-17 15:43:31 +0200 | [diff] [blame] | 672 | if (!master) |
| 673 | return -ENOMEM; |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 674 | |
| 675 | dev_set_drvdata(dev, master); |
| 676 | |
Heiner Kallweit | 7cb5557 | 2016-11-13 14:37:41 +0100 | [diff] [blame] | 677 | master->mode_bits = SPI_RX_DUAL | SPI_CPOL | SPI_CPHA | SPI_CS_HIGH | |
| 678 | SPI_LSB_FIRST | SPI_LOOP; |
| 679 | master->dev.of_node = dev->of_node; |
Stephen Warren | 24778be | 2013-05-21 20:36:35 -0600 | [diff] [blame] | 680 | master->bits_per_word_mask = SPI_BPW_RANGE_MASK(4, 16); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 681 | master->setup = fsl_espi_setup; |
Axel Lin | d9f2674 | 2014-08-31 12:44:09 +0800 | [diff] [blame] | 682 | master->cleanup = fsl_espi_cleanup; |
Heiner Kallweit | c592bec | 2014-12-03 07:56:17 +0100 | [diff] [blame] | 683 | master->transfer_one_message = fsl_espi_do_one_msg; |
Heiner Kallweit | e9abb4d | 2015-08-26 21:21:55 +0200 | [diff] [blame] | 684 | master->auto_runtime_pm = true; |
Heiner Kallweit | 02a595d | 2016-08-17 21:11:01 +0200 | [diff] [blame] | 685 | master->max_message_size = fsl_espi_max_message_size; |
Heiner Kallweit | 7454346 | 2016-11-13 14:36:39 +0100 | [diff] [blame] | 686 | master->num_chipselect = num_cs; |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 687 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 688 | espi = spi_master_get_devdata(master); |
| 689 | spin_lock_init(&espi->lock); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 690 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 691 | espi->dev = dev; |
| 692 | espi->spibrg = fsl_get_sys_freq(); |
| 693 | if (espi->spibrg == -1) { |
Heiner Kallweit | 7cb5557 | 2016-11-13 14:37:41 +0100 | [diff] [blame] | 694 | dev_err(dev, "Can't get sys frequency!\n"); |
| 695 | ret = -EINVAL; |
| 696 | goto err_probe; |
| 697 | } |
Heiner Kallweit | f254e65c | 2016-11-15 21:56:33 +0100 | [diff] [blame] | 698 | /* determined by clock divider fields DIV16/PM in register SPMODEx */ |
| 699 | master->min_speed_hz = DIV_ROUND_UP(espi->spibrg, 4 * 16 * 16); |
| 700 | master->max_speed_hz = DIV_ROUND_UP(espi->spibrg, 4); |
Heiner Kallweit | 7cb5557 | 2016-11-13 14:37:41 +0100 | [diff] [blame] | 701 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 702 | init_completion(&espi->done); |
Heiner Kallweit | 7cb5557 | 2016-11-13 14:37:41 +0100 | [diff] [blame] | 703 | |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 704 | espi->reg_base = devm_ioremap_resource(dev, mem); |
| 705 | if (IS_ERR(espi->reg_base)) { |
| 706 | ret = PTR_ERR(espi->reg_base); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 707 | goto err_probe; |
| 708 | } |
| 709 | |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 710 | /* Register for SPI Interrupt */ |
Heiner Kallweit | 35ab046 | 2016-11-13 14:40:51 +0100 | [diff] [blame] | 711 | ret = devm_request_irq(dev, irq, fsl_espi_irq, 0, "fsl_espi", espi); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 712 | if (ret) |
Heiner Kallweit | 4178b6b | 2015-08-26 21:21:50 +0200 | [diff] [blame] | 713 | goto err_probe; |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 714 | |
Heiner Kallweit | 456c742 | 2016-11-13 14:40:18 +0100 | [diff] [blame] | 715 | fsl_espi_init_regs(dev, true); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 716 | |
Heiner Kallweit | e9abb4d | 2015-08-26 21:21:55 +0200 | [diff] [blame] | 717 | pm_runtime_set_autosuspend_delay(dev, AUTOSUSPEND_TIMEOUT); |
| 718 | pm_runtime_use_autosuspend(dev); |
| 719 | pm_runtime_set_active(dev); |
| 720 | pm_runtime_enable(dev); |
| 721 | pm_runtime_get_sync(dev); |
| 722 | |
Heiner Kallweit | 4178b6b | 2015-08-26 21:21:50 +0200 | [diff] [blame] | 723 | ret = devm_spi_register_master(dev, master); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 724 | if (ret < 0) |
Heiner Kallweit | e9abb4d | 2015-08-26 21:21:55 +0200 | [diff] [blame] | 725 | goto err_pm; |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 726 | |
Chris Packham | b0e37c5 | 2020-08-25 17:08:56 +1200 | [diff] [blame] | 727 | dev_info(dev, "irq = %u\n", irq); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 728 | |
Heiner Kallweit | e9abb4d | 2015-08-26 21:21:55 +0200 | [diff] [blame] | 729 | pm_runtime_mark_last_busy(dev); |
| 730 | pm_runtime_put_autosuspend(dev); |
| 731 | |
Heiner Kallweit | 604042a | 2016-09-17 15:43:31 +0200 | [diff] [blame] | 732 | return 0; |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 733 | |
Heiner Kallweit | e9abb4d | 2015-08-26 21:21:55 +0200 | [diff] [blame] | 734 | err_pm: |
| 735 | pm_runtime_put_noidle(dev); |
| 736 | pm_runtime_disable(dev); |
| 737 | pm_runtime_set_suspended(dev); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 738 | err_probe: |
| 739 | spi_master_put(master); |
Heiner Kallweit | 604042a | 2016-09-17 15:43:31 +0200 | [diff] [blame] | 740 | return ret; |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 741 | } |
| 742 | |
| 743 | static int of_fsl_espi_get_chipselects(struct device *dev) |
| 744 | { |
| 745 | struct device_node *np = dev->of_node; |
Heiner Kallweit | b497eb0 | 2016-10-01 21:07:52 +0200 | [diff] [blame] | 746 | u32 num_cs; |
| 747 | int ret; |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 748 | |
Heiner Kallweit | b497eb0 | 2016-10-01 21:07:52 +0200 | [diff] [blame] | 749 | ret = of_property_read_u32(np, "fsl,espi-num-chipselects", &num_cs); |
| 750 | if (ret) { |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 751 | dev_err(dev, "No 'fsl,espi-num-chipselects' property\n"); |
Heiner Kallweit | 7454346 | 2016-11-13 14:36:39 +0100 | [diff] [blame] | 752 | return 0; |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 753 | } |
| 754 | |
Heiner Kallweit | 7454346 | 2016-11-13 14:36:39 +0100 | [diff] [blame] | 755 | return num_cs; |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 756 | } |
| 757 | |
Grant Likely | fd4a319 | 2012-12-07 16:57:14 +0000 | [diff] [blame] | 758 | static int of_fsl_espi_probe(struct platform_device *ofdev) |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 759 | { |
| 760 | struct device *dev = &ofdev->dev; |
| 761 | struct device_node *np = ofdev->dev.of_node; |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 762 | struct resource mem; |
Heiner Kallweit | 7454346 | 2016-11-13 14:36:39 +0100 | [diff] [blame] | 763 | unsigned int irq, num_cs; |
Heiner Kallweit | acf6921 | 2016-09-17 15:43:00 +0200 | [diff] [blame] | 764 | int ret; |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 765 | |
Heiner Kallweit | e3ce4f4 | 2016-11-13 14:37:18 +0100 | [diff] [blame] | 766 | if (of_property_read_bool(np, "mode")) { |
| 767 | dev_err(dev, "mode property is not supported on ESPI!\n"); |
| 768 | return -EINVAL; |
| 769 | } |
| 770 | |
Heiner Kallweit | 7454346 | 2016-11-13 14:36:39 +0100 | [diff] [blame] | 771 | num_cs = of_fsl_espi_get_chipselects(dev); |
| 772 | if (!num_cs) |
| 773 | return -EINVAL; |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 774 | |
| 775 | ret = of_address_to_resource(np, 0, &mem); |
| 776 | if (ret) |
Heiner Kallweit | acf6921 | 2016-09-17 15:43:00 +0200 | [diff] [blame] | 777 | return ret; |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 778 | |
Thierry Reding | f757849 | 2013-09-18 15:24:44 +0200 | [diff] [blame] | 779 | irq = irq_of_parse_and_map(np, 0); |
Heiner Kallweit | acf6921 | 2016-09-17 15:43:00 +0200 | [diff] [blame] | 780 | if (!irq) |
| 781 | return -EINVAL; |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 782 | |
Heiner Kallweit | 7454346 | 2016-11-13 14:36:39 +0100 | [diff] [blame] | 783 | return fsl_espi_probe(dev, &mem, irq, num_cs); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 784 | } |
| 785 | |
Heiner Kallweit | e9abb4d | 2015-08-26 21:21:55 +0200 | [diff] [blame] | 786 | static int of_fsl_espi_remove(struct platform_device *dev) |
| 787 | { |
| 788 | pm_runtime_disable(&dev->dev); |
| 789 | |
| 790 | return 0; |
| 791 | } |
| 792 | |
Hou Zhiqiang | 714bb65 | 2013-12-12 12:53:52 +0800 | [diff] [blame] | 793 | #ifdef CONFIG_PM_SLEEP |
| 794 | static int of_fsl_espi_suspend(struct device *dev) |
| 795 | { |
| 796 | struct spi_master *master = dev_get_drvdata(dev); |
Hou Zhiqiang | 714bb65 | 2013-12-12 12:53:52 +0800 | [diff] [blame] | 797 | int ret; |
| 798 | |
Hou Zhiqiang | 714bb65 | 2013-12-12 12:53:52 +0800 | [diff] [blame] | 799 | ret = spi_master_suspend(master); |
Geert Uytterhoeven | 7c5d8a2 | 2018-09-05 10:51:57 +0200 | [diff] [blame] | 800 | if (ret) |
Hou Zhiqiang | 714bb65 | 2013-12-12 12:53:52 +0800 | [diff] [blame] | 801 | return ret; |
Hou Zhiqiang | 714bb65 | 2013-12-12 12:53:52 +0800 | [diff] [blame] | 802 | |
Heiner Kallweit | a9a813d | 2016-11-15 21:37:17 +0100 | [diff] [blame] | 803 | return pm_runtime_force_suspend(dev); |
Hou Zhiqiang | 714bb65 | 2013-12-12 12:53:52 +0800 | [diff] [blame] | 804 | } |
| 805 | |
| 806 | static int of_fsl_espi_resume(struct device *dev) |
| 807 | { |
Hou Zhiqiang | 714bb65 | 2013-12-12 12:53:52 +0800 | [diff] [blame] | 808 | struct spi_master *master = dev_get_drvdata(dev); |
Heiner Kallweit | 456c742 | 2016-11-13 14:40:18 +0100 | [diff] [blame] | 809 | int ret; |
Hou Zhiqiang | 714bb65 | 2013-12-12 12:53:52 +0800 | [diff] [blame] | 810 | |
Heiner Kallweit | 456c742 | 2016-11-13 14:40:18 +0100 | [diff] [blame] | 811 | fsl_espi_init_regs(dev, false); |
Hou Zhiqiang | 714bb65 | 2013-12-12 12:53:52 +0800 | [diff] [blame] | 812 | |
Heiner Kallweit | e9abb4d | 2015-08-26 21:21:55 +0200 | [diff] [blame] | 813 | ret = pm_runtime_force_resume(dev); |
| 814 | if (ret < 0) |
| 815 | return ret; |
| 816 | |
Hou Zhiqiang | 714bb65 | 2013-12-12 12:53:52 +0800 | [diff] [blame] | 817 | return spi_master_resume(master); |
| 818 | } |
| 819 | #endif /* CONFIG_PM_SLEEP */ |
| 820 | |
| 821 | static const struct dev_pm_ops espi_pm = { |
Heiner Kallweit | e9abb4d | 2015-08-26 21:21:55 +0200 | [diff] [blame] | 822 | SET_RUNTIME_PM_OPS(fsl_espi_runtime_suspend, |
| 823 | fsl_espi_runtime_resume, NULL) |
Hou Zhiqiang | 714bb65 | 2013-12-12 12:53:52 +0800 | [diff] [blame] | 824 | SET_SYSTEM_SLEEP_PM_OPS(of_fsl_espi_suspend, of_fsl_espi_resume) |
| 825 | }; |
| 826 | |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 827 | static const struct of_device_id of_fsl_espi_match[] = { |
| 828 | { .compatible = "fsl,mpc8536-espi" }, |
| 829 | {} |
| 830 | }; |
| 831 | MODULE_DEVICE_TABLE(of, of_fsl_espi_match); |
| 832 | |
Grant Likely | 18d306d | 2011-02-22 21:02:43 -0700 | [diff] [blame] | 833 | static struct platform_driver fsl_espi_driver = { |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 834 | .driver = { |
| 835 | .name = "fsl_espi", |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 836 | .of_match_table = of_fsl_espi_match, |
Hou Zhiqiang | 714bb65 | 2013-12-12 12:53:52 +0800 | [diff] [blame] | 837 | .pm = &espi_pm, |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 838 | }, |
| 839 | .probe = of_fsl_espi_probe, |
Heiner Kallweit | e9abb4d | 2015-08-26 21:21:55 +0200 | [diff] [blame] | 840 | .remove = of_fsl_espi_remove, |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 841 | }; |
Grant Likely | 940ab88 | 2011-10-05 11:29:49 -0600 | [diff] [blame] | 842 | module_platform_driver(fsl_espi_driver); |
Mingkai Hu | 8b60d6c | 2010-10-12 18:18:32 +0800 | [diff] [blame] | 843 | |
| 844 | MODULE_AUTHOR("Mingkai Hu"); |
| 845 | MODULE_DESCRIPTION("Enhanced Freescale SPI Driver"); |
| 846 | MODULE_LICENSE("GPL"); |