blob: 00e272bfee39a94520f16aeef03beed6a08ea43c [file] [log] [blame]
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +02001#include <linux/device.h>
2#include <linux/dma-mapping.h>
3#include <linux/dmaengine.h>
4#include <linux/sizes.h>
5#include <linux/platform_device.h>
6#include <linux/of.h>
7
Bin Liu239d2212016-06-30 12:12:29 -05008#include "cppi_dma.h"
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +02009#include "musb_core.h"
Bin Liu8ccb49d2016-06-30 12:12:30 -050010#include "musb_trace.h"
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +020011
12#define RNDIS_REG(x) (0x80 + ((x - 1) * 4))
13
Bin Liu0149b072015-01-26 16:22:06 -060014#define EP_MODE_AUTOREQ_NONE 0
15#define EP_MODE_AUTOREQ_ALL_NEOP 1
16#define EP_MODE_AUTOREQ_ALWAYS 3
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +020017
18#define EP_MODE_DMA_TRANSPARENT 0
19#define EP_MODE_DMA_RNDIS 1
20#define EP_MODE_DMA_GEN_RNDIS 3
21
22#define USB_CTRL_TX_MODE 0x70
23#define USB_CTRL_RX_MODE 0x74
24#define USB_CTRL_AUTOREQ 0xd0
25#define USB_TDOWN 0xd8
26
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +020027#define MUSB_DMA_NUM_CHANNELS 15
28
29struct cppi41_dma_controller {
30 struct dma_controller controller;
31 struct cppi41_dma_channel rx_channel[MUSB_DMA_NUM_CHANNELS];
32 struct cppi41_dma_channel tx_channel[MUSB_DMA_NUM_CHANNELS];
Sebastian Andrzej Siewiora655f482013-11-12 16:37:47 +010033 struct hrtimer early_tx;
34 struct list_head early_tx_list;
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +020035 u32 rx_mode;
36 u32 tx_mode;
37 u32 auto_req;
38};
39
40static void save_rx_toggle(struct cppi41_dma_channel *cppi41_channel)
41{
42 u16 csr;
43 u8 toggle;
44
45 if (cppi41_channel->is_tx)
46 return;
Alexandre Bailon995ee0e2017-02-06 22:53:54 -060047 if (!is_host_active(cppi41_channel->controller->controller.musb))
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +020048 return;
49
50 csr = musb_readw(cppi41_channel->hw_ep->regs, MUSB_RXCSR);
51 toggle = csr & MUSB_RXCSR_H_DATATOGGLE ? 1 : 0;
52
53 cppi41_channel->usb_toggle = toggle;
54}
55
56static void update_rx_toggle(struct cppi41_dma_channel *cppi41_channel)
57{
Daniel Mackf50e6782014-05-26 14:52:39 +020058 struct musb_hw_ep *hw_ep = cppi41_channel->hw_ep;
59 struct musb *musb = hw_ep->musb;
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +020060 u16 csr;
61 u8 toggle;
62
63 if (cppi41_channel->is_tx)
64 return;
Daniel Mackf50e6782014-05-26 14:52:39 +020065 if (!is_host_active(musb))
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +020066 return;
67
Daniel Mackf50e6782014-05-26 14:52:39 +020068 musb_ep_select(musb->mregs, hw_ep->epnum);
69 csr = musb_readw(hw_ep->regs, MUSB_RXCSR);
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +020070 toggle = csr & MUSB_RXCSR_H_DATATOGGLE ? 1 : 0;
71
72 /*
73 * AM335x Advisory 1.0.13: Due to internal synchronisation error the
74 * data toggle may reset from DATA1 to DATA0 during receiving data from
75 * more than one endpoint.
76 */
77 if (!toggle && toggle == cppi41_channel->usb_toggle) {
78 csr |= MUSB_RXCSR_H_DATATOGGLE | MUSB_RXCSR_H_WR_DATATOGGLE;
79 musb_writew(cppi41_channel->hw_ep->regs, MUSB_RXCSR, csr);
Alexandre Bailon995ee0e2017-02-06 22:53:54 -060080 musb_dbg(musb, "Restoring DATA1 toggle.");
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +020081 }
82
83 cppi41_channel->usb_toggle = toggle;
84}
85
Sebastian Andrzej Siewiora655f482013-11-12 16:37:47 +010086static bool musb_is_tx_fifo_empty(struct musb_hw_ep *hw_ep)
87{
88 u8 epnum = hw_ep->epnum;
89 struct musb *musb = hw_ep->musb;
90 void __iomem *epio = musb->endpoints[epnum].regs;
91 u16 csr;
92
Daniel Mackf50e6782014-05-26 14:52:39 +020093 musb_ep_select(musb->mregs, hw_ep->epnum);
Sebastian Andrzej Siewiora655f482013-11-12 16:37:47 +010094 csr = musb_readw(epio, MUSB_TXCSR);
95 if (csr & MUSB_TXCSR_TXPKTRDY)
96 return false;
97 return true;
98}
99
Alexandre Bailoned232c02017-02-06 22:53:52 -0600100static void cppi41_dma_callback(void *private_data,
101 const struct dmaengine_result *result);
Sebastian Andrzej Siewiord373a852013-11-12 16:37:46 +0100102
Sebastian Andrzej Siewiora655f482013-11-12 16:37:47 +0100103static void cppi41_trans_done(struct cppi41_dma_channel *cppi41_channel)
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200104{
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200105 struct musb_hw_ep *hw_ep = cppi41_channel->hw_ep;
106 struct musb *musb = hw_ep->musb;
Bin Liu9267eda2014-08-12 14:18:43 -0500107 void __iomem *epio = hw_ep->regs;
108 u16 csr;
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200109
George Cherianaecbc312014-02-27 10:44:41 +0530110 if (!cppi41_channel->prog_len ||
111 (cppi41_channel->channel.status == MUSB_DMA_STATUS_FREE)) {
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200112
113 /* done, complete */
114 cppi41_channel->channel.actual_len =
115 cppi41_channel->transferred;
116 cppi41_channel->channel.status = MUSB_DMA_STATUS_FREE;
Daniel Mackff3fcac2014-05-26 14:52:38 +0200117 cppi41_channel->channel.rx_packet_done = true;
Bin Liu9267eda2014-08-12 14:18:43 -0500118
119 /*
120 * transmit ZLP using PIO mode for transfers which size is
121 * multiple of EP packet size.
122 */
123 if (cppi41_channel->tx_zlp && (cppi41_channel->transferred %
124 cppi41_channel->packet_sz) == 0) {
125 musb_ep_select(musb->mregs, hw_ep->epnum);
126 csr = MUSB_TXCSR_MODE | MUSB_TXCSR_TXPKTRDY;
127 musb_writew(epio, MUSB_TXCSR, csr);
128 }
Bin Liu8ccb49d2016-06-30 12:12:30 -0500129
130 trace_musb_cppi41_done(cppi41_channel);
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200131 musb_dma_completion(musb, hw_ep->epnum, cppi41_channel->is_tx);
132 } else {
133 /* next iteration, reload */
134 struct dma_chan *dc = cppi41_channel->dc;
135 struct dma_async_tx_descriptor *dma_desc;
136 enum dma_transfer_direction direction;
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200137 u32 remain_bytes;
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200138
139 cppi41_channel->buf_addr += cppi41_channel->packet_sz;
140
141 remain_bytes = cppi41_channel->total_len;
142 remain_bytes -= cppi41_channel->transferred;
143 remain_bytes = min(remain_bytes, cppi41_channel->packet_sz);
144 cppi41_channel->prog_len = remain_bytes;
145
146 direction = cppi41_channel->is_tx ? DMA_MEM_TO_DEV
147 : DMA_DEV_TO_MEM;
148 dma_desc = dmaengine_prep_slave_single(dc,
149 cppi41_channel->buf_addr,
150 remain_bytes,
151 direction,
152 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
Sebastian Andrzej Siewiord373a852013-11-12 16:37:46 +0100153 if (WARN_ON(!dma_desc))
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200154 return;
155
Alexandre Bailoned232c02017-02-06 22:53:52 -0600156 dma_desc->callback_result = cppi41_dma_callback;
Sebastian Andrzej Siewiora655f482013-11-12 16:37:47 +0100157 dma_desc->callback_param = &cppi41_channel->channel;
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200158 cppi41_channel->cookie = dma_desc->tx_submit(dma_desc);
Bin Liu8ccb49d2016-06-30 12:12:30 -0500159 trace_musb_cppi41_cont(cppi41_channel);
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200160 dma_async_issue_pending(dc);
161
162 if (!cppi41_channel->is_tx) {
Daniel Mackf50e6782014-05-26 14:52:39 +0200163 musb_ep_select(musb->mregs, hw_ep->epnum);
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200164 csr = musb_readw(epio, MUSB_RXCSR);
165 csr |= MUSB_RXCSR_H_REQPKT;
166 musb_writew(epio, MUSB_RXCSR, csr);
167 }
168 }
Sebastian Andrzej Siewiord373a852013-11-12 16:37:46 +0100169}
170
Sebastian Andrzej Siewiora655f482013-11-12 16:37:47 +0100171static enum hrtimer_restart cppi41_recheck_tx_req(struct hrtimer *timer)
172{
173 struct cppi41_dma_controller *controller;
174 struct cppi41_dma_channel *cppi41_channel, *n;
175 struct musb *musb;
176 unsigned long flags;
177 enum hrtimer_restart ret = HRTIMER_NORESTART;
178
179 controller = container_of(timer, struct cppi41_dma_controller,
180 early_tx);
Alexandre Bailon995ee0e2017-02-06 22:53:54 -0600181 musb = controller->controller.musb;
Sebastian Andrzej Siewiora655f482013-11-12 16:37:47 +0100182
183 spin_lock_irqsave(&musb->lock, flags);
184 list_for_each_entry_safe(cppi41_channel, n, &controller->early_tx_list,
185 tx_check) {
186 bool empty;
187 struct musb_hw_ep *hw_ep = cppi41_channel->hw_ep;
188
189 empty = musb_is_tx_fifo_empty(hw_ep);
190 if (empty) {
191 list_del_init(&cppi41_channel->tx_check);
192 cppi41_trans_done(cppi41_channel);
193 }
194 }
195
Thomas Gleixnerd2e6d622014-10-02 17:32:16 +0200196 if (!list_empty(&controller->early_tx_list) &&
197 !hrtimer_is_queued(&controller->early_tx)) {
Sebastian Andrzej Siewiora655f482013-11-12 16:37:47 +0100198 ret = HRTIMER_RESTART;
Thomas Gleixner8b0e1952016-12-25 12:30:41 +0100199 hrtimer_forward_now(&controller->early_tx, 20 * NSEC_PER_USEC);
Sebastian Andrzej Siewiora655f482013-11-12 16:37:47 +0100200 }
201
202 spin_unlock_irqrestore(&musb->lock, flags);
203 return ret;
204}
205
Alexandre Bailoned232c02017-02-06 22:53:52 -0600206static void cppi41_dma_callback(void *private_data,
207 const struct dmaengine_result *result)
Sebastian Andrzej Siewiord373a852013-11-12 16:37:46 +0100208{
209 struct dma_channel *channel = private_data;
210 struct cppi41_dma_channel *cppi41_channel = channel->private_data;
211 struct musb_hw_ep *hw_ep = cppi41_channel->hw_ep;
Felipe Balbi1b616252015-02-27 13:19:39 -0600212 struct cppi41_dma_controller *controller;
Sebastian Andrzej Siewiord373a852013-11-12 16:37:46 +0100213 struct musb *musb = hw_ep->musb;
214 unsigned long flags;
215 struct dma_tx_state txstate;
216 u32 transferred;
Felipe Balbi1b616252015-02-27 13:19:39 -0600217 int is_hs = 0;
Sebastian Andrzej Siewiora655f482013-11-12 16:37:47 +0100218 bool empty;
Sebastian Andrzej Siewiord373a852013-11-12 16:37:46 +0100219
Alexandre Bailon050dc902017-02-06 22:53:51 -0600220 controller = cppi41_channel->controller;
221 if (controller->controller.dma_callback)
222 controller->controller.dma_callback(&controller->controller);
223
Alexandre Bailoned232c02017-02-06 22:53:52 -0600224 if (result->result == DMA_TRANS_ABORTED)
225 return;
226
Sebastian Andrzej Siewiord373a852013-11-12 16:37:46 +0100227 spin_lock_irqsave(&musb->lock, flags);
228
229 dmaengine_tx_status(cppi41_channel->dc, cppi41_channel->cookie,
230 &txstate);
231 transferred = cppi41_channel->prog_len - txstate.residue;
232 cppi41_channel->transferred += transferred;
233
Bin Liu8ccb49d2016-06-30 12:12:30 -0500234 trace_musb_cppi41_gb(cppi41_channel);
Sebastian Andrzej Siewiord373a852013-11-12 16:37:46 +0100235 update_rx_toggle(cppi41_channel);
236
237 if (cppi41_channel->transferred == cppi41_channel->total_len ||
238 transferred < cppi41_channel->packet_sz)
239 cppi41_channel->prog_len = 0;
240
Takeyoshi Kikuchi72a472d2015-03-02 11:03:51 +0900241 if (cppi41_channel->is_tx)
242 empty = musb_is_tx_fifo_empty(hw_ep);
243
244 if (!cppi41_channel->is_tx || empty) {
Sebastian Andrzej Siewiora655f482013-11-12 16:37:47 +0100245 cppi41_trans_done(cppi41_channel);
Felipe Balbi1b616252015-02-27 13:19:39 -0600246 goto out;
247 }
248
249 /*
250 * On AM335x it has been observed that the TX interrupt fires
251 * too early that means the TXFIFO is not yet empty but the DMA
252 * engine says that it is done with the transfer. We don't
253 * receive a FIFO empty interrupt so the only thing we can do is
254 * to poll for the bit. On HS it usually takes 2us, on FS around
255 * 110us - 150us depending on the transfer size.
256 * We spin on HS (no longer than than 25us and setup a timer on
257 * FS to check for the bit and complete the transfer.
258 */
Felipe Balbi1b616252015-02-27 13:19:39 -0600259 if (is_host_active(musb)) {
260 if (musb->port1_status & USB_PORT_STAT_HIGH_SPEED)
261 is_hs = 1;
Sebastian Andrzej Siewiora655f482013-11-12 16:37:47 +0100262 } else {
Felipe Balbi1b616252015-02-27 13:19:39 -0600263 if (musb->g.speed == USB_SPEED_HIGH)
264 is_hs = 1;
265 }
266 if (is_hs) {
267 unsigned wait = 25;
Sebastian Andrzej Siewiord373a852013-11-12 16:37:46 +0100268
Felipe Balbi1b616252015-02-27 13:19:39 -0600269 do {
Sebastian Andrzej Siewiora655f482013-11-12 16:37:47 +0100270 empty = musb_is_tx_fifo_empty(hw_ep);
Felipe Balbiaf634292015-02-27 13:21:14 -0600271 if (empty) {
272 cppi41_trans_done(cppi41_channel);
273 goto out;
274 }
Felipe Balbi1b616252015-02-27 13:19:39 -0600275 wait--;
276 if (!wait)
277 break;
Felipe Balbi043f5b72015-02-27 13:22:27 -0600278 cpu_relax();
Felipe Balbi1b616252015-02-27 13:19:39 -0600279 } while (1);
Felipe Balbi1b616252015-02-27 13:19:39 -0600280 }
281 list_add_tail(&cppi41_channel->tx_check,
282 &controller->early_tx_list);
283 if (!hrtimer_is_queued(&controller->early_tx)) {
284 unsigned long usecs = cppi41_channel->total_len / 10;
285
286 hrtimer_start_range_ns(&controller->early_tx,
Thomas Gleixner8b0e1952016-12-25 12:30:41 +0100287 usecs * NSEC_PER_USEC,
288 20 * NSEC_PER_USEC,
289 HRTIMER_MODE_REL);
Sebastian Andrzej Siewiora655f482013-11-12 16:37:47 +0100290 }
Felipe Balbi1b616252015-02-27 13:19:39 -0600291
Sebastian Andrzej Siewiora655f482013-11-12 16:37:47 +0100292out:
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200293 spin_unlock_irqrestore(&musb->lock, flags);
294}
295
296static u32 update_ep_mode(unsigned ep, unsigned mode, u32 old)
297{
298 unsigned shift;
299
300 shift = (ep - 1) * 2;
301 old &= ~(3 << shift);
302 old |= mode << shift;
303 return old;
304}
305
306static void cppi41_set_dma_mode(struct cppi41_dma_channel *cppi41_channel,
307 unsigned mode)
308{
309 struct cppi41_dma_controller *controller = cppi41_channel->controller;
Alexandre Bailon995ee0e2017-02-06 22:53:54 -0600310 struct musb *musb = controller->controller.musb;
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200311 u32 port;
312 u32 new_mode;
313 u32 old_mode;
314
315 if (cppi41_channel->is_tx)
316 old_mode = controller->tx_mode;
317 else
318 old_mode = controller->rx_mode;
319 port = cppi41_channel->port_num;
320 new_mode = update_ep_mode(port, mode, old_mode);
321
322 if (new_mode == old_mode)
323 return;
324 if (cppi41_channel->is_tx) {
325 controller->tx_mode = new_mode;
Alexandre Bailon995ee0e2017-02-06 22:53:54 -0600326 musb_writel(musb->ctrl_base, USB_CTRL_TX_MODE, new_mode);
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200327 } else {
328 controller->rx_mode = new_mode;
Alexandre Bailon995ee0e2017-02-06 22:53:54 -0600329 musb_writel(musb->ctrl_base, USB_CTRL_RX_MODE, new_mode);
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200330 }
331}
332
333static void cppi41_set_autoreq_mode(struct cppi41_dma_channel *cppi41_channel,
334 unsigned mode)
335{
336 struct cppi41_dma_controller *controller = cppi41_channel->controller;
337 u32 port;
338 u32 new_mode;
339 u32 old_mode;
340
341 old_mode = controller->auto_req;
342 port = cppi41_channel->port_num;
343 new_mode = update_ep_mode(port, mode, old_mode);
344
345 if (new_mode == old_mode)
346 return;
347 controller->auto_req = new_mode;
Alexandre Bailon995ee0e2017-02-06 22:53:54 -0600348 musb_writel(controller->controller.musb->ctrl_base, USB_CTRL_AUTOREQ,
349 new_mode);
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200350}
351
352static bool cppi41_configure_channel(struct dma_channel *channel,
353 u16 packet_sz, u8 mode,
354 dma_addr_t dma_addr, u32 len)
355{
356 struct cppi41_dma_channel *cppi41_channel = channel->private_data;
357 struct dma_chan *dc = cppi41_channel->dc;
358 struct dma_async_tx_descriptor *dma_desc;
359 enum dma_transfer_direction direction;
Alexandre Bailon995ee0e2017-02-06 22:53:54 -0600360 struct musb *musb = cppi41_channel->controller->controller.musb;
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200361 unsigned use_gen_rndis = 0;
362
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200363 cppi41_channel->buf_addr = dma_addr;
364 cppi41_channel->total_len = len;
365 cppi41_channel->transferred = 0;
366 cppi41_channel->packet_sz = packet_sz;
Bin Liu9267eda2014-08-12 14:18:43 -0500367 cppi41_channel->tx_zlp = (cppi41_channel->is_tx && mode) ? 1 : 0;
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200368
369 /*
370 * Due to AM335x' Advisory 1.0.13 we are not allowed to transfer more
371 * than max packet size at a time.
372 */
373 if (cppi41_channel->is_tx)
374 use_gen_rndis = 1;
375
376 if (use_gen_rndis) {
377 /* RNDIS mode */
378 if (len > packet_sz) {
379 musb_writel(musb->ctrl_base,
380 RNDIS_REG(cppi41_channel->port_num), len);
381 /* gen rndis */
382 cppi41_set_dma_mode(cppi41_channel,
383 EP_MODE_DMA_GEN_RNDIS);
384
385 /* auto req */
386 cppi41_set_autoreq_mode(cppi41_channel,
Bin Liu0149b072015-01-26 16:22:06 -0600387 EP_MODE_AUTOREQ_ALL_NEOP);
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200388 } else {
389 musb_writel(musb->ctrl_base,
390 RNDIS_REG(cppi41_channel->port_num), 0);
391 cppi41_set_dma_mode(cppi41_channel,
392 EP_MODE_DMA_TRANSPARENT);
393 cppi41_set_autoreq_mode(cppi41_channel,
Bin Liu0149b072015-01-26 16:22:06 -0600394 EP_MODE_AUTOREQ_NONE);
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200395 }
396 } else {
397 /* fallback mode */
398 cppi41_set_dma_mode(cppi41_channel, EP_MODE_DMA_TRANSPARENT);
Bin Liu0149b072015-01-26 16:22:06 -0600399 cppi41_set_autoreq_mode(cppi41_channel, EP_MODE_AUTOREQ_NONE);
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200400 len = min_t(u32, packet_sz, len);
401 }
402 cppi41_channel->prog_len = len;
403 direction = cppi41_channel->is_tx ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM;
404 dma_desc = dmaengine_prep_slave_single(dc, dma_addr, len, direction,
405 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
406 if (!dma_desc)
407 return false;
408
Alexandre Bailoned232c02017-02-06 22:53:52 -0600409 dma_desc->callback_result = cppi41_dma_callback;
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200410 dma_desc->callback_param = channel;
411 cppi41_channel->cookie = dma_desc->tx_submit(dma_desc);
Daniel Mackff3fcac2014-05-26 14:52:38 +0200412 cppi41_channel->channel.rx_packet_done = false;
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200413
Bin Liu8ccb49d2016-06-30 12:12:30 -0500414 trace_musb_cppi41_config(cppi41_channel);
415
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200416 save_rx_toggle(cppi41_channel);
417 dma_async_issue_pending(dc);
418 return true;
419}
420
421static struct dma_channel *cppi41_dma_channel_allocate(struct dma_controller *c,
422 struct musb_hw_ep *hw_ep, u8 is_tx)
423{
424 struct cppi41_dma_controller *controller = container_of(c,
425 struct cppi41_dma_controller, controller);
426 struct cppi41_dma_channel *cppi41_channel = NULL;
427 u8 ch_num = hw_ep->epnum - 1;
428
429 if (ch_num >= MUSB_DMA_NUM_CHANNELS)
430 return NULL;
431
432 if (is_tx)
433 cppi41_channel = &controller->tx_channel[ch_num];
434 else
435 cppi41_channel = &controller->rx_channel[ch_num];
436
437 if (!cppi41_channel->dc)
438 return NULL;
439
440 if (cppi41_channel->is_allocated)
441 return NULL;
442
443 cppi41_channel->hw_ep = hw_ep;
444 cppi41_channel->is_allocated = 1;
445
Bin Liu8ccb49d2016-06-30 12:12:30 -0500446 trace_musb_cppi41_alloc(cppi41_channel);
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200447 return &cppi41_channel->channel;
448}
449
450static void cppi41_dma_channel_release(struct dma_channel *channel)
451{
452 struct cppi41_dma_channel *cppi41_channel = channel->private_data;
453
Bin Liu8ccb49d2016-06-30 12:12:30 -0500454 trace_musb_cppi41_free(cppi41_channel);
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200455 if (cppi41_channel->is_allocated) {
456 cppi41_channel->is_allocated = 0;
457 channel->status = MUSB_DMA_STATUS_FREE;
458 channel->actual_len = 0;
459 }
460}
461
462static int cppi41_dma_channel_program(struct dma_channel *channel,
463 u16 packet_sz, u8 mode,
464 dma_addr_t dma_addr, u32 len)
465{
466 int ret;
George Cherianf82503f2014-01-27 15:07:25 +0530467 struct cppi41_dma_channel *cppi41_channel = channel->private_data;
468 int hb_mult = 0;
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200469
470 BUG_ON(channel->status == MUSB_DMA_STATUS_UNKNOWN ||
471 channel->status == MUSB_DMA_STATUS_BUSY);
472
Alexandre Bailon995ee0e2017-02-06 22:53:54 -0600473 if (is_host_active(cppi41_channel->controller->controller.musb)) {
George Cherianf82503f2014-01-27 15:07:25 +0530474 if (cppi41_channel->is_tx)
475 hb_mult = cppi41_channel->hw_ep->out_qh->hb_mult;
476 else
477 hb_mult = cppi41_channel->hw_ep->in_qh->hb_mult;
478 }
479
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200480 channel->status = MUSB_DMA_STATUS_BUSY;
481 channel->actual_len = 0;
George Cherianf82503f2014-01-27 15:07:25 +0530482
483 if (hb_mult)
484 packet_sz = hb_mult * (packet_sz & 0x7FF);
485
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200486 ret = cppi41_configure_channel(channel, packet_sz, mode, dma_addr, len);
487 if (!ret)
488 channel->status = MUSB_DMA_STATUS_FREE;
489
490 return ret;
491}
492
493static int cppi41_is_compatible(struct dma_channel *channel, u16 maxpacket,
494 void *buf, u32 length)
495{
496 struct cppi41_dma_channel *cppi41_channel = channel->private_data;
497 struct cppi41_dma_controller *controller = cppi41_channel->controller;
Alexandre Bailon995ee0e2017-02-06 22:53:54 -0600498 struct musb *musb = controller->controller.musb;
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200499
500 if (is_host_active(musb)) {
501 WARN_ON(1);
502 return 1;
503 }
Sebastian Andrzej Siewiora655f482013-11-12 16:37:47 +0100504 if (cppi41_channel->hw_ep->ep_in.type != USB_ENDPOINT_XFER_BULK)
505 return 0;
Sebastian Andrzej Siewior13266fe2013-08-13 19:38:24 +0200506 if (cppi41_channel->is_tx)
507 return 1;
508 /* AM335x Advisory 1.0.13. No workaround for device RX mode */
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200509 return 0;
510}
511
512static int cppi41_dma_channel_abort(struct dma_channel *channel)
513{
514 struct cppi41_dma_channel *cppi41_channel = channel->private_data;
515 struct cppi41_dma_controller *controller = cppi41_channel->controller;
Alexandre Bailon995ee0e2017-02-06 22:53:54 -0600516 struct musb *musb = controller->controller.musb;
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200517 void __iomem *epio = cppi41_channel->hw_ep->regs;
518 int tdbit;
519 int ret;
520 unsigned is_tx;
521 u16 csr;
522
523 is_tx = cppi41_channel->is_tx;
Bin Liu8ccb49d2016-06-30 12:12:30 -0500524 trace_musb_cppi41_abort(cppi41_channel);
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200525
526 if (cppi41_channel->channel.status == MUSB_DMA_STATUS_FREE)
527 return 0;
528
Sebastian Andrzej Siewiora655f482013-11-12 16:37:47 +0100529 list_del_init(&cppi41_channel->tx_check);
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200530 if (is_tx) {
531 csr = musb_readw(epio, MUSB_TXCSR);
532 csr &= ~MUSB_TXCSR_DMAENAB;
533 musb_writew(epio, MUSB_TXCSR, csr);
534 } else {
Bin Liucb83df72015-01-26 16:22:07 -0600535 cppi41_set_autoreq_mode(cppi41_channel, EP_MODE_AUTOREQ_NONE);
536
Bin Liub431ba82015-08-24 15:28:37 -0500537 /* delay to drain to cppi dma pipeline for isoch */
538 udelay(250);
539
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200540 csr = musb_readw(epio, MUSB_RXCSR);
541 csr &= ~(MUSB_RXCSR_H_REQPKT | MUSB_RXCSR_DMAENAB);
542 musb_writew(epio, MUSB_RXCSR, csr);
543
Bin Liucb83df72015-01-26 16:22:07 -0600544 /* wait to drain cppi dma pipe line */
545 udelay(50);
546
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200547 csr = musb_readw(epio, MUSB_RXCSR);
548 if (csr & MUSB_RXCSR_RXPKTRDY) {
549 csr |= MUSB_RXCSR_FLUSHFIFO;
550 musb_writew(epio, MUSB_RXCSR, csr);
551 musb_writew(epio, MUSB_RXCSR, csr);
552 }
553 }
554
555 tdbit = 1 << cppi41_channel->port_num;
556 if (is_tx)
557 tdbit <<= 16;
558
559 do {
Bin Liucb83df72015-01-26 16:22:07 -0600560 if (is_tx)
561 musb_writel(musb->ctrl_base, USB_TDOWN, tdbit);
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200562 ret = dmaengine_terminate_all(cppi41_channel->dc);
563 } while (ret == -EAGAIN);
564
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200565 if (is_tx) {
Bin Liucb83df72015-01-26 16:22:07 -0600566 musb_writel(musb->ctrl_base, USB_TDOWN, tdbit);
567
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200568 csr = musb_readw(epio, MUSB_TXCSR);
569 if (csr & MUSB_TXCSR_TXPKTRDY) {
570 csr |= MUSB_TXCSR_FLUSHFIFO;
571 musb_writew(epio, MUSB_TXCSR, csr);
572 }
573 }
574
575 cppi41_channel->channel.status = MUSB_DMA_STATUS_FREE;
576 return 0;
577}
578
579static void cppi41_release_all_dma_chans(struct cppi41_dma_controller *ctrl)
580{
581 struct dma_chan *dc;
582 int i;
583
584 for (i = 0; i < MUSB_DMA_NUM_CHANNELS; i++) {
585 dc = ctrl->tx_channel[i].dc;
586 if (dc)
587 dma_release_channel(dc);
588 dc = ctrl->rx_channel[i].dc;
589 if (dc)
590 dma_release_channel(dc);
591 }
592}
593
594static void cppi41_dma_controller_stop(struct cppi41_dma_controller *controller)
595{
596 cppi41_release_all_dma_chans(controller);
597}
598
599static int cppi41_dma_controller_start(struct cppi41_dma_controller *controller)
600{
Alexandre Bailon995ee0e2017-02-06 22:53:54 -0600601 struct musb *musb = controller->controller.musb;
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200602 struct device *dev = musb->controller;
Felipe Balbib0a688d2015-08-06 10:51:29 -0500603 struct device_node *np = dev->parent->of_node;
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200604 struct cppi41_dma_channel *cppi41_channel;
605 int count;
606 int i;
607 int ret;
608
609 count = of_property_count_strings(np, "dma-names");
610 if (count < 0)
611 return count;
612
613 for (i = 0; i < count; i++) {
614 struct dma_chan *dc;
615 struct dma_channel *musb_dma;
616 const char *str;
617 unsigned is_tx;
618 unsigned int port;
619
620 ret = of_property_read_string_index(np, "dma-names", i, &str);
621 if (ret)
622 goto err;
Rasmus Villemoese87c3f82014-11-27 22:25:45 +0100623 if (strstarts(str, "tx"))
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200624 is_tx = 1;
Rasmus Villemoese87c3f82014-11-27 22:25:45 +0100625 else if (strstarts(str, "rx"))
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200626 is_tx = 0;
627 else {
628 dev_err(dev, "Wrong dmatype %s\n", str);
629 goto err;
630 }
631 ret = kstrtouint(str + 2, 0, &port);
632 if (ret)
633 goto err;
634
Sebastian Andrzej Siewior48054142013-10-16 12:50:08 +0200635 ret = -EINVAL;
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200636 if (port > MUSB_DMA_NUM_CHANNELS || !port)
637 goto err;
638 if (is_tx)
639 cppi41_channel = &controller->tx_channel[port - 1];
640 else
641 cppi41_channel = &controller->rx_channel[port - 1];
642
643 cppi41_channel->controller = controller;
644 cppi41_channel->port_num = port;
645 cppi41_channel->is_tx = is_tx;
Sebastian Andrzej Siewiora655f482013-11-12 16:37:47 +0100646 INIT_LIST_HEAD(&cppi41_channel->tx_check);
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200647
648 musb_dma = &cppi41_channel->channel;
649 musb_dma->private_data = cppi41_channel;
650 musb_dma->status = MUSB_DMA_STATUS_FREE;
651 musb_dma->max_len = SZ_4M;
652
Felipe Balbib0a688d2015-08-06 10:51:29 -0500653 dc = dma_request_slave_channel(dev->parent, str);
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200654 if (!dc) {
Rahul Bedarkar5ae477b2014-01-02 19:27:47 +0530655 dev_err(dev, "Failed to request %s.\n", str);
Sebastian Andrzej Siewior48054142013-10-16 12:50:08 +0200656 ret = -EPROBE_DEFER;
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200657 goto err;
658 }
659 cppi41_channel->dc = dc;
660 }
661 return 0;
662err:
663 cppi41_release_all_dma_chans(controller);
Sebastian Andrzej Siewior48054142013-10-16 12:50:08 +0200664 return ret;
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200665}
666
Tony Lindgren7f6283e2015-05-01 12:29:28 -0700667void cppi41_dma_controller_destroy(struct dma_controller *c)
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200668{
669 struct cppi41_dma_controller *controller = container_of(c,
670 struct cppi41_dma_controller, controller);
671
Sebastian Andrzej Siewiora655f482013-11-12 16:37:47 +0100672 hrtimer_cancel(&controller->early_tx);
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200673 cppi41_dma_controller_stop(controller);
674 kfree(controller);
675}
Tony Lindgren7f6283e2015-05-01 12:29:28 -0700676EXPORT_SYMBOL_GPL(cppi41_dma_controller_destroy);
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200677
Tony Lindgren7f6283e2015-05-01 12:29:28 -0700678struct dma_controller *
679cppi41_dma_controller_create(struct musb *musb, void __iomem *base)
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200680{
681 struct cppi41_dma_controller *controller;
Sebastian Andrzej Siewior48054142013-10-16 12:50:08 +0200682 int ret = 0;
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200683
Felipe Balbib0a688d2015-08-06 10:51:29 -0500684 if (!musb->controller->parent->of_node) {
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200685 dev_err(musb->controller, "Need DT for the DMA engine.\n");
686 return NULL;
687 }
688
689 controller = kzalloc(sizeof(*controller), GFP_KERNEL);
690 if (!controller)
691 goto kzalloc_fail;
692
Sebastian Andrzej Siewiora655f482013-11-12 16:37:47 +0100693 hrtimer_init(&controller->early_tx, CLOCK_MONOTONIC, HRTIMER_MODE_REL);
694 controller->early_tx.function = cppi41_recheck_tx_req;
695 INIT_LIST_HEAD(&controller->early_tx_list);
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200696
697 controller->controller.channel_alloc = cppi41_dma_channel_allocate;
698 controller->controller.channel_release = cppi41_dma_channel_release;
699 controller->controller.channel_program = cppi41_dma_channel_program;
700 controller->controller.channel_abort = cppi41_dma_channel_abort;
701 controller->controller.is_compatible = cppi41_is_compatible;
Alexandre Bailon050dc902017-02-06 22:53:51 -0600702 controller->controller.musb = musb;
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200703
704 ret = cppi41_dma_controller_start(controller);
705 if (ret)
706 goto plat_get_fail;
707 return &controller->controller;
708
709plat_get_fail:
710 kfree(controller);
711kzalloc_fail:
Sebastian Andrzej Siewior48054142013-10-16 12:50:08 +0200712 if (ret == -EPROBE_DEFER)
713 return ERR_PTR(ret);
Sebastian Andrzej Siewior9b3452d2013-06-20 12:13:04 +0200714 return NULL;
715}
Tony Lindgren7f6283e2015-05-01 12:29:28 -0700716EXPORT_SYMBOL_GPL(cppi41_dma_controller_create);