blob: 4b1cd4da3e368e783f49da8e5b9d05283470b5fa [file] [log] [blame]
Thomas Gleixnerd2912cb2019-06-04 10:11:33 +02001// SPDX-License-Identifier: GPL-2.0-only
Atsushi Nemotoe24805d2009-05-19 22:12:15 +09002/*
3 * Generic TXx9 ACLC platform driver
4 *
5 * Copyright (C) 2009 Atsushi Nemoto
6 *
7 * Based on RBTX49xx patch from CELF patch archive.
8 * (C) Copyright TOSHIBA CORPORATION 2004-2006
Atsushi Nemotoe24805d2009-05-19 22:12:15 +09009 */
10
11#include <linux/module.h>
12#include <linux/init.h>
13#include <linux/platform_device.h>
14#include <linux/scatterlist.h>
Tejun Heo5a0e3ad2010-03-24 17:04:11 +090015#include <linux/slab.h>
Lars-Peter Clausenff495d32014-08-19 17:48:07 +020016#include <linux/dmaengine.h>
Atsushi Nemotoe24805d2009-05-19 22:12:15 +090017#include <sound/core.h>
18#include <sound/pcm.h>
19#include <sound/pcm_params.h>
20#include <sound/soc.h>
21#include "txx9aclc.h"
22
Kuninori Morimotoe049cf42018-01-29 02:51:26 +000023#define DRV_NAME "txx9aclc"
24
Liam Girdwoodf0fba2a2010-03-17 20:15:21 +000025static struct txx9aclc_soc_device {
26 struct txx9aclc_dmadata dmadata[2];
27} txx9aclc_soc_device;
28
29/* REVISIT: How to find txx9aclc_drvdata from snd_ac97? */
30static struct txx9aclc_plat_drvdata *txx9aclc_drvdata;
31
32static int txx9aclc_dma_init(struct txx9aclc_soc_device *dev,
33 struct txx9aclc_dmadata *dmadata);
34
Atsushi Nemotoe24805d2009-05-19 22:12:15 +090035static const struct snd_pcm_hardware txx9aclc_pcm_hardware = {
36 /*
37 * REVISIT: SNDRV_PCM_INFO_MMAP | SNDRV_PCM_INFO_MMAP_VALID
38 * needs more works for noncoherent MIPS.
39 */
40 .info = SNDRV_PCM_INFO_INTERLEAVED |
41 SNDRV_PCM_INFO_BATCH |
42 SNDRV_PCM_INFO_PAUSE,
Atsushi Nemotoe24805d2009-05-19 22:12:15 +090043 .period_bytes_min = 1024,
44 .period_bytes_max = 8 * 1024,
45 .periods_min = 2,
46 .periods_max = 4096,
47 .buffer_bytes_max = 32 * 1024,
48};
49
Kuninori Morimotoa857e072019-10-02 14:32:41 +090050static int txx9aclc_pcm_hw_params(struct snd_soc_component *component,
51 struct snd_pcm_substream *substream,
Atsushi Nemotoe24805d2009-05-19 22:12:15 +090052 struct snd_pcm_hw_params *params)
53{
Atsushi Nemotoe24805d2009-05-19 22:12:15 +090054 struct snd_pcm_runtime *runtime = substream->runtime;
55 struct txx9aclc_dmadata *dmadata = runtime->private_data;
Atsushi Nemotoe24805d2009-05-19 22:12:15 +090056
Kuninori Morimotoe049cf42018-01-29 02:51:26 +000057 dev_dbg(component->dev,
Atsushi Nemotoe24805d2009-05-19 22:12:15 +090058 "runtime->dma_area = %#lx dma_addr = %#lx dma_bytes = %zd "
59 "runtime->min_align %ld\n",
60 (unsigned long)runtime->dma_area,
61 (unsigned long)runtime->dma_addr, runtime->dma_bytes,
62 runtime->min_align);
Kuninori Morimotoe049cf42018-01-29 02:51:26 +000063 dev_dbg(component->dev,
Atsushi Nemotoe24805d2009-05-19 22:12:15 +090064 "periods %d period_bytes %d stream %d\n",
65 params_periods(params), params_period_bytes(params),
66 substream->stream);
67
68 dmadata->substream = substream;
69 dmadata->pos = 0;
70 return 0;
71}
72
Kuninori Morimotoa857e072019-10-02 14:32:41 +090073static int txx9aclc_pcm_prepare(struct snd_soc_component *component,
74 struct snd_pcm_substream *substream)
Atsushi Nemotoe24805d2009-05-19 22:12:15 +090075{
76 struct snd_pcm_runtime *runtime = substream->runtime;
77 struct txx9aclc_dmadata *dmadata = runtime->private_data;
78
79 dmadata->dma_addr = runtime->dma_addr;
80 dmadata->buffer_bytes = snd_pcm_lib_buffer_bytes(substream);
81 dmadata->period_bytes = snd_pcm_lib_period_bytes(substream);
82
83 if (dmadata->buffer_bytes == dmadata->period_bytes) {
84 dmadata->frag_bytes = dmadata->period_bytes >> 1;
85 dmadata->frags = 2;
86 } else {
87 dmadata->frag_bytes = dmadata->period_bytes;
88 dmadata->frags = dmadata->buffer_bytes / dmadata->period_bytes;
89 }
90 dmadata->frag_count = 0;
91 dmadata->pos = 0;
92 return 0;
93}
94
95static void txx9aclc_dma_complete(void *arg)
96{
97 struct txx9aclc_dmadata *dmadata = arg;
98 unsigned long flags;
99
100 /* dma completion handler cannot submit new operations */
101 spin_lock_irqsave(&dmadata->dma_lock, flags);
102 if (dmadata->frag_count >= 0) {
103 dmadata->dmacount--;
Takashi Iwaicb1b1022013-11-05 18:40:07 +0100104 if (!WARN_ON(dmadata->dmacount < 0))
105 tasklet_schedule(&dmadata->tasklet);
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900106 }
107 spin_unlock_irqrestore(&dmadata->dma_lock, flags);
108}
109
110static struct dma_async_tx_descriptor *
111txx9aclc_dma_submit(struct txx9aclc_dmadata *dmadata, dma_addr_t buf_dma_addr)
112{
113 struct dma_chan *chan = dmadata->dma_chan;
114 struct dma_async_tx_descriptor *desc;
115 struct scatterlist sg;
116
117 sg_init_table(&sg, 1);
118 sg_set_page(&sg, pfn_to_page(PFN_DOWN(buf_dma_addr)),
119 dmadata->frag_bytes, buf_dma_addr & (PAGE_SIZE - 1));
120 sg_dma_address(&sg) = buf_dma_addr;
Alexandre Bounine16052822012-03-08 16:11:18 -0500121 desc = dmaengine_prep_slave_sg(chan, &sg, 1,
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900122 dmadata->substream->stream == SNDRV_PCM_STREAM_PLAYBACK ?
Vinod Koul35e16582011-10-14 10:49:30 +0530123 DMA_MEM_TO_DEV : DMA_DEV_TO_MEM,
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900124 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
125 if (!desc) {
126 dev_err(&chan->dev->device, "cannot prepare slave dma\n");
127 return NULL;
128 }
129 desc->callback = txx9aclc_dma_complete;
130 desc->callback_param = dmadata;
Lars-Peter Clausenff495d32014-08-19 17:48:07 +0200131 dmaengine_submit(desc);
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900132 return desc;
133}
134
135#define NR_DMA_CHAIN 2
136
137static void txx9aclc_dma_tasklet(unsigned long data)
138{
139 struct txx9aclc_dmadata *dmadata = (struct txx9aclc_dmadata *)data;
140 struct dma_chan *chan = dmadata->dma_chan;
141 struct dma_async_tx_descriptor *desc;
142 struct snd_pcm_substream *substream = dmadata->substream;
143 u32 ctlbit = substream->stream == SNDRV_PCM_STREAM_PLAYBACK ?
144 ACCTL_AUDODMA : ACCTL_AUDIDMA;
145 int i;
146 unsigned long flags;
147
148 spin_lock_irqsave(&dmadata->dma_lock, flags);
149 if (dmadata->frag_count < 0) {
Liam Girdwoodf0fba2a2010-03-17 20:15:21 +0000150 struct txx9aclc_plat_drvdata *drvdata = txx9aclc_drvdata;
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900151 void __iomem *base = drvdata->base;
152
153 spin_unlock_irqrestore(&dmadata->dma_lock, flags);
Lars-Peter Clausenff495d32014-08-19 17:48:07 +0200154 dmaengine_terminate_all(chan);
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900155 /* first time */
156 for (i = 0; i < NR_DMA_CHAIN; i++) {
157 desc = txx9aclc_dma_submit(dmadata,
158 dmadata->dma_addr + i * dmadata->frag_bytes);
159 if (!desc)
160 return;
161 }
162 dmadata->dmacount = NR_DMA_CHAIN;
Lars-Peter Clausenff495d32014-08-19 17:48:07 +0200163 dma_async_issue_pending(chan);
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900164 spin_lock_irqsave(&dmadata->dma_lock, flags);
165 __raw_writel(ctlbit, base + ACCTLEN);
166 dmadata->frag_count = NR_DMA_CHAIN % dmadata->frags;
167 spin_unlock_irqrestore(&dmadata->dma_lock, flags);
168 return;
169 }
Takashi Iwaicb1b1022013-11-05 18:40:07 +0100170 if (WARN_ON(dmadata->dmacount >= NR_DMA_CHAIN)) {
171 spin_unlock_irqrestore(&dmadata->dma_lock, flags);
172 return;
173 }
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900174 while (dmadata->dmacount < NR_DMA_CHAIN) {
175 dmadata->dmacount++;
176 spin_unlock_irqrestore(&dmadata->dma_lock, flags);
177 desc = txx9aclc_dma_submit(dmadata,
178 dmadata->dma_addr +
179 dmadata->frag_count * dmadata->frag_bytes);
180 if (!desc)
181 return;
Lars-Peter Clausenff495d32014-08-19 17:48:07 +0200182 dma_async_issue_pending(chan);
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900183
184 spin_lock_irqsave(&dmadata->dma_lock, flags);
185 dmadata->frag_count++;
186 dmadata->frag_count %= dmadata->frags;
187 dmadata->pos += dmadata->frag_bytes;
188 dmadata->pos %= dmadata->buffer_bytes;
189 if ((dmadata->frag_count * dmadata->frag_bytes) %
190 dmadata->period_bytes == 0)
191 snd_pcm_period_elapsed(substream);
192 }
193 spin_unlock_irqrestore(&dmadata->dma_lock, flags);
194}
195
Kuninori Morimotoa857e072019-10-02 14:32:41 +0900196static int txx9aclc_pcm_trigger(struct snd_soc_component *component,
197 struct snd_pcm_substream *substream, int cmd)
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900198{
199 struct txx9aclc_dmadata *dmadata = substream->runtime->private_data;
Codrut Grosu84052652017-02-25 12:12:21 +0200200 struct txx9aclc_plat_drvdata *drvdata = txx9aclc_drvdata;
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900201 void __iomem *base = drvdata->base;
202 unsigned long flags;
203 int ret = 0;
204 u32 ctlbit = substream->stream == SNDRV_PCM_STREAM_PLAYBACK ?
205 ACCTL_AUDODMA : ACCTL_AUDIDMA;
206
207 spin_lock_irqsave(&dmadata->dma_lock, flags);
208 switch (cmd) {
209 case SNDRV_PCM_TRIGGER_START:
210 dmadata->frag_count = -1;
211 tasklet_schedule(&dmadata->tasklet);
212 break;
213 case SNDRV_PCM_TRIGGER_STOP:
214 case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
215 case SNDRV_PCM_TRIGGER_SUSPEND:
216 __raw_writel(ctlbit, base + ACCTLDIS);
217 break;
218 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
219 case SNDRV_PCM_TRIGGER_RESUME:
220 __raw_writel(ctlbit, base + ACCTLEN);
221 break;
222 default:
223 ret = -EINVAL;
224 }
225 spin_unlock_irqrestore(&dmadata->dma_lock, flags);
226 return ret;
227}
228
229static snd_pcm_uframes_t
Kuninori Morimotoa857e072019-10-02 14:32:41 +0900230txx9aclc_pcm_pointer(struct snd_soc_component *component,
231 struct snd_pcm_substream *substream)
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900232{
233 struct txx9aclc_dmadata *dmadata = substream->runtime->private_data;
234
235 return bytes_to_frames(substream->runtime, dmadata->pos);
236}
237
Kuninori Morimotoa857e072019-10-02 14:32:41 +0900238static int txx9aclc_pcm_open(struct snd_soc_component *component,
239 struct snd_pcm_substream *substream)
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900240{
Liam Girdwoodf0fba2a2010-03-17 20:15:21 +0000241 struct txx9aclc_soc_device *dev = &txx9aclc_soc_device;
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900242 struct txx9aclc_dmadata *dmadata = &dev->dmadata[substream->stream];
243 int ret;
244
245 ret = snd_soc_set_runtime_hwparams(substream, &txx9aclc_pcm_hardware);
246 if (ret)
247 return ret;
248 /* ensure that buffer size is a multiple of period size */
249 ret = snd_pcm_hw_constraint_integer(substream->runtime,
250 SNDRV_PCM_HW_PARAM_PERIODS);
251 if (ret < 0)
252 return ret;
253 substream->runtime->private_data = dmadata;
254 return 0;
255}
256
Kuninori Morimotoa857e072019-10-02 14:32:41 +0900257static int txx9aclc_pcm_close(struct snd_soc_component *component,
258 struct snd_pcm_substream *substream)
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900259{
260 struct txx9aclc_dmadata *dmadata = substream->runtime->private_data;
261 struct dma_chan *chan = dmadata->dma_chan;
262
263 dmadata->frag_count = -1;
Lars-Peter Clausenff495d32014-08-19 17:48:07 +0200264 dmaengine_terminate_all(chan);
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900265 return 0;
266}
267
Kuninori Morimotoa857e072019-10-02 14:32:41 +0900268static int txx9aclc_pcm_new(struct snd_soc_component *component,
269 struct snd_soc_pcm_runtime *rtd)
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900270{
Ralf Baechle06132fd2011-07-28 12:26:16 +0100271 struct snd_card *card = rtd->card->snd_card;
Kuninori Morimotof7c488012020-03-23 14:21:21 +0900272 struct snd_soc_dai *dai = asoc_rtd_to_cpu(rtd, 0);
Liam Girdwood552d1ef2011-06-07 16:08:33 +0100273 struct snd_pcm *pcm = rtd->pcm;
Kuninori Morimotoe049cf42018-01-29 02:51:26 +0000274 struct platform_device *pdev = to_platform_device(component->dev);
Liam Girdwoodf0fba2a2010-03-17 20:15:21 +0000275 struct txx9aclc_soc_device *dev;
276 struct resource *r;
277 int i;
278 int ret;
279
280 /* at this point onwards the AC97 component has probed and this will be valid */
281 dev = snd_soc_dai_get_drvdata(dai);
282
283 dev->dmadata[0].stream = SNDRV_PCM_STREAM_PLAYBACK;
284 dev->dmadata[1].stream = SNDRV_PCM_STREAM_CAPTURE;
285 for (i = 0; i < 2; i++) {
286 r = platform_get_resource(pdev, IORESOURCE_DMA, i);
287 if (!r) {
288 ret = -EBUSY;
289 goto exit;
290 }
291 dev->dmadata[i].dma_res = r;
292 ret = txx9aclc_dma_init(dev, &dev->dmadata[i]);
293 if (ret)
294 goto exit;
295 }
Takashi Iwai4f39e4c2019-02-04 16:40:37 +0100296
Takashi Iwaiffe11932019-12-10 15:26:04 +0100297 snd_pcm_set_managed_buffer_all(pcm, SNDRV_DMA_TYPE_DEV,
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900298 card->dev, 64 * 1024, 4 * 1024 * 1024);
Takashi Iwai4f39e4c2019-02-04 16:40:37 +0100299 return 0;
Liam Girdwoodf0fba2a2010-03-17 20:15:21 +0000300
301exit:
302 for (i = 0; i < 2; i++) {
303 if (dev->dmadata[i].dma_chan)
304 dma_release_channel(dev->dmadata[i].dma_chan);
305 dev->dmadata[i].dma_chan = NULL;
306 }
307 return ret;
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900308}
309
310static bool filter(struct dma_chan *chan, void *param)
311{
312 struct txx9aclc_dmadata *dmadata = param;
Atsushi Nemoto647613e2009-06-25 22:36:58 +0900313 char *devname;
314 bool found = false;
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900315
Atsushi Nemoto647613e2009-06-25 22:36:58 +0900316 devname = kasprintf(GFP_KERNEL, "%s.%d", dmadata->dma_res->name,
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900317 (int)dmadata->dma_res->start);
318 if (strcmp(dev_name(chan->device->dev), devname) == 0) {
319 chan->private = &dmadata->dma_slave;
Atsushi Nemoto647613e2009-06-25 22:36:58 +0900320 found = true;
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900321 }
Atsushi Nemoto647613e2009-06-25 22:36:58 +0900322 kfree(devname);
323 return found;
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900324}
325
326static int txx9aclc_dma_init(struct txx9aclc_soc_device *dev,
327 struct txx9aclc_dmadata *dmadata)
328{
Codrut Grosu84052652017-02-25 12:12:21 +0200329 struct txx9aclc_plat_drvdata *drvdata = txx9aclc_drvdata;
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900330 struct txx9dmac_slave *ds = &dmadata->dma_slave;
331 dma_cap_mask_t mask;
332
333 spin_lock_init(&dmadata->dma_lock);
334
335 ds->reg_width = sizeof(u32);
336 if (dmadata->stream == SNDRV_PCM_STREAM_PLAYBACK) {
337 ds->tx_reg = drvdata->physbase + ACAUDODAT;
338 ds->rx_reg = 0;
339 } else {
340 ds->tx_reg = 0;
341 ds->rx_reg = drvdata->physbase + ACAUDIDAT;
342 }
343
344 /* Try to grab a DMA channel */
345 dma_cap_zero(mask);
346 dma_cap_set(DMA_SLAVE, mask);
347 dmadata->dma_chan = dma_request_channel(mask, filter, dmadata);
348 if (!dmadata->dma_chan) {
Liam Girdwoodf0fba2a2010-03-17 20:15:21 +0000349 printk(KERN_ERR
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900350 "DMA channel for %s is not available\n",
351 dmadata->stream == SNDRV_PCM_STREAM_PLAYBACK ?
352 "playback" : "capture");
353 return -EBUSY;
354 }
355 tasklet_init(&dmadata->tasklet, txx9aclc_dma_tasklet,
356 (unsigned long)dmadata);
357 return 0;
358}
359
Kuninori Morimotoe049cf42018-01-29 02:51:26 +0000360static int txx9aclc_pcm_probe(struct snd_soc_component *component)
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900361{
Kuninori Morimotoe049cf42018-01-29 02:51:26 +0000362 snd_soc_component_set_drvdata(component, &txx9aclc_soc_device);
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900363 return 0;
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900364}
365
Kuninori Morimotoe049cf42018-01-29 02:51:26 +0000366static void txx9aclc_pcm_remove(struct snd_soc_component *component)
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900367{
Kuninori Morimotoe049cf42018-01-29 02:51:26 +0000368 struct txx9aclc_soc_device *dev = snd_soc_component_get_drvdata(component);
Liam Girdwoodf0fba2a2010-03-17 20:15:21 +0000369 struct txx9aclc_plat_drvdata *drvdata = txx9aclc_drvdata;
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900370 void __iomem *base = drvdata->base;
371 int i;
372
373 /* disable all FIFO DMAs */
374 __raw_writel(ACCTL_AUDODMA | ACCTL_AUDIDMA, base + ACCTLDIS);
375 /* dummy R/W to clear pending DMAREQ if any */
376 __raw_writel(__raw_readl(base + ACAUDIDAT), base + ACAUDODAT);
377
378 for (i = 0; i < 2; i++) {
379 struct txx9aclc_dmadata *dmadata = &dev->dmadata[i];
380 struct dma_chan *chan = dmadata->dma_chan;
Codrut Grosuc2dcce32017-02-25 21:17:53 +0200381
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900382 if (chan) {
383 dmadata->frag_count = -1;
Lars-Peter Clausenff495d32014-08-19 17:48:07 +0200384 dmaengine_terminate_all(chan);
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900385 dma_release_channel(chan);
386 }
387 dev->dmadata[i].dma_chan = NULL;
388 }
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900389}
390
Kuninori Morimotoe049cf42018-01-29 02:51:26 +0000391static const struct snd_soc_component_driver txx9aclc_soc_component = {
392 .name = DRV_NAME,
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900393 .probe = txx9aclc_pcm_probe,
394 .remove = txx9aclc_pcm_remove,
Kuninori Morimotoa857e072019-10-02 14:32:41 +0900395 .open = txx9aclc_pcm_open,
396 .close = txx9aclc_pcm_close,
Kuninori Morimotoa857e072019-10-02 14:32:41 +0900397 .hw_params = txx9aclc_pcm_hw_params,
Kuninori Morimotoa857e072019-10-02 14:32:41 +0900398 .prepare = txx9aclc_pcm_prepare,
399 .trigger = txx9aclc_pcm_trigger,
400 .pointer = txx9aclc_pcm_pointer,
401 .pcm_construct = txx9aclc_pcm_new,
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900402};
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900403
Bill Pembertond8628d12012-12-07 09:26:34 -0500404static int txx9aclc_soc_platform_probe(struct platform_device *pdev)
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900405{
Kuninori Morimotoe049cf42018-01-29 02:51:26 +0000406 return devm_snd_soc_register_component(&pdev->dev,
407 &txx9aclc_soc_component, NULL, 0);
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900408}
409
Liam Girdwoodf0fba2a2010-03-17 20:15:21 +0000410static struct platform_driver txx9aclc_pcm_driver = {
411 .driver = {
412 .name = "txx9aclc-pcm-audio",
Liam Girdwoodf0fba2a2010-03-17 20:15:21 +0000413 },
414
415 .probe = txx9aclc_soc_platform_probe,
Liam Girdwoodf0fba2a2010-03-17 20:15:21 +0000416};
417
Axel Lin33d316c2011-11-25 10:16:10 +0800418module_platform_driver(txx9aclc_pcm_driver);
Atsushi Nemotoe24805d2009-05-19 22:12:15 +0900419
420MODULE_AUTHOR("Atsushi Nemoto <anemo@mba.ocn.ne.jp>");
421MODULE_DESCRIPTION("TXx9 ACLC Audio DMA driver");
422MODULE_LICENSE("GPL");