xref: /openbmc/linux/sound/soc/samsung/idma.c (revision 09bae3b6)
1 /*
2  * sound/soc/samsung/idma.c
3  *
4  * Copyright (c) 2011 Samsung Electronics Co., Ltd.
5  *		http://www.samsung.com
6  *
7  * I2S0's Internal DMA driver
8  *
9  * This program is free software; you can redistribute  it and/or modify it
10  * under  the terms of  the GNU General  Public License as published by the
11  * Free Software Foundation;  either version 2 of the  License, or (at your
12  * option) any later version.
13  */
14 #include <linux/interrupt.h>
15 #include <linux/platform_device.h>
16 #include <linux/dma-mapping.h>
17 #include <linux/slab.h>
18 #include <linux/module.h>
19 #include <sound/pcm.h>
20 #include <sound/pcm_params.h>
21 #include <sound/soc.h>
22 
23 #include "i2s.h"
24 #include "idma.h"
25 #include "i2s-regs.h"
26 
27 #define ST_RUNNING		(1<<0)
28 #define ST_OPENED		(1<<1)
29 
30 static const struct snd_pcm_hardware idma_hardware = {
31 	.info = SNDRV_PCM_INFO_INTERLEAVED |
32 		    SNDRV_PCM_INFO_BLOCK_TRANSFER |
33 		    SNDRV_PCM_INFO_MMAP |
34 		    SNDRV_PCM_INFO_MMAP_VALID |
35 		    SNDRV_PCM_INFO_PAUSE |
36 		    SNDRV_PCM_INFO_RESUME,
37 	.buffer_bytes_max = MAX_IDMA_BUFFER,
38 	.period_bytes_min = 128,
39 	.period_bytes_max = MAX_IDMA_PERIOD,
40 	.periods_min = 1,
41 	.periods_max = 2,
42 };
43 
44 struct idma_ctrl {
45 	spinlock_t	lock;
46 	int		state;
47 	dma_addr_t	start;
48 	dma_addr_t	pos;
49 	dma_addr_t	end;
50 	dma_addr_t	period;
51 	dma_addr_t	periodsz;
52 	void		*token;
53 	void		(*cb)(void *dt, int bytes_xfer);
54 };
55 
56 static struct idma_info {
57 	spinlock_t	lock;
58 	void		 __iomem  *regs;
59 	dma_addr_t	lp_tx_addr;
60 } idma;
61 
62 static int idma_irq;
63 
64 static void idma_getpos(dma_addr_t *src)
65 {
66 	*src = idma.lp_tx_addr +
67 		(readl(idma.regs + I2STRNCNT) & 0xffffff) * 4;
68 }
69 
70 static int idma_enqueue(struct snd_pcm_substream *substream)
71 {
72 	struct snd_pcm_runtime *runtime = substream->runtime;
73 	struct idma_ctrl *prtd = substream->runtime->private_data;
74 	u32 val;
75 
76 	spin_lock(&prtd->lock);
77 	prtd->token = (void *) substream;
78 	spin_unlock(&prtd->lock);
79 
80 	/* Internal DMA Level0 Interrupt Address */
81 	val = idma.lp_tx_addr + prtd->periodsz;
82 	writel(val, idma.regs + I2SLVL0ADDR);
83 
84 	/* Start address0 of I2S internal DMA operation. */
85 	val = idma.lp_tx_addr;
86 	writel(val, idma.regs + I2SSTR0);
87 
88 	/*
89 	 * Transfer block size for I2S internal DMA.
90 	 * Should decide transfer size before start dma operation
91 	 */
92 	val = readl(idma.regs + I2SSIZE);
93 	val &= ~(I2SSIZE_TRNMSK << I2SSIZE_SHIFT);
94 	val |= (((runtime->dma_bytes >> 2) &
95 			I2SSIZE_TRNMSK) << I2SSIZE_SHIFT);
96 	writel(val, idma.regs + I2SSIZE);
97 
98 	val = readl(idma.regs + I2SAHB);
99 	val |= AHB_INTENLVL0;
100 	writel(val, idma.regs + I2SAHB);
101 
102 	return 0;
103 }
104 
105 static void idma_setcallbk(struct snd_pcm_substream *substream,
106 				void (*cb)(void *, int))
107 {
108 	struct idma_ctrl *prtd = substream->runtime->private_data;
109 
110 	spin_lock(&prtd->lock);
111 	prtd->cb = cb;
112 	spin_unlock(&prtd->lock);
113 }
114 
115 static void idma_control(int op)
116 {
117 	u32 val = readl(idma.regs + I2SAHB);
118 
119 	spin_lock(&idma.lock);
120 
121 	switch (op) {
122 	case LPAM_DMA_START:
123 		val |= (AHB_INTENLVL0 | AHB_DMAEN);
124 		break;
125 	case LPAM_DMA_STOP:
126 		val &= ~(AHB_INTENLVL0 | AHB_DMAEN);
127 		break;
128 	default:
129 		spin_unlock(&idma.lock);
130 		return;
131 	}
132 
133 	writel(val, idma.regs + I2SAHB);
134 	spin_unlock(&idma.lock);
135 }
136 
137 static void idma_done(void *id, int bytes_xfer)
138 {
139 	struct snd_pcm_substream *substream = id;
140 	struct idma_ctrl *prtd = substream->runtime->private_data;
141 
142 	if (prtd && (prtd->state & ST_RUNNING))
143 		snd_pcm_period_elapsed(substream);
144 }
145 
146 static int idma_hw_params(struct snd_pcm_substream *substream,
147 				struct snd_pcm_hw_params *params)
148 {
149 	struct snd_pcm_runtime *runtime = substream->runtime;
150 	struct idma_ctrl *prtd = substream->runtime->private_data;
151 	u32 mod = readl(idma.regs + I2SMOD);
152 	u32 ahb = readl(idma.regs + I2SAHB);
153 
154 	ahb |= (AHB_DMARLD | AHB_INTMASK);
155 	mod |= MOD_TXS_IDMA;
156 	writel(ahb, idma.regs + I2SAHB);
157 	writel(mod, idma.regs + I2SMOD);
158 
159 	snd_pcm_set_runtime_buffer(substream, &substream->dma_buffer);
160 	runtime->dma_bytes = params_buffer_bytes(params);
161 
162 	prtd->start = prtd->pos = runtime->dma_addr;
163 	prtd->period = params_periods(params);
164 	prtd->periodsz = params_period_bytes(params);
165 	prtd->end = runtime->dma_addr + runtime->dma_bytes;
166 
167 	idma_setcallbk(substream, idma_done);
168 
169 	return 0;
170 }
171 
172 static int idma_hw_free(struct snd_pcm_substream *substream)
173 {
174 	snd_pcm_set_runtime_buffer(substream, NULL);
175 
176 	return 0;
177 }
178 
179 static int idma_prepare(struct snd_pcm_substream *substream)
180 {
181 	struct idma_ctrl *prtd = substream->runtime->private_data;
182 
183 	prtd->pos = prtd->start;
184 
185 	/* flush the DMA channel */
186 	idma_control(LPAM_DMA_STOP);
187 	idma_enqueue(substream);
188 
189 	return 0;
190 }
191 
192 static int idma_trigger(struct snd_pcm_substream *substream, int cmd)
193 {
194 	struct idma_ctrl *prtd = substream->runtime->private_data;
195 	int ret = 0;
196 
197 	spin_lock(&prtd->lock);
198 
199 	switch (cmd) {
200 	case SNDRV_PCM_TRIGGER_RESUME:
201 	case SNDRV_PCM_TRIGGER_START:
202 	case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
203 		prtd->state |= ST_RUNNING;
204 		idma_control(LPAM_DMA_START);
205 		break;
206 
207 	case SNDRV_PCM_TRIGGER_SUSPEND:
208 	case SNDRV_PCM_TRIGGER_STOP:
209 	case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
210 		prtd->state &= ~ST_RUNNING;
211 		idma_control(LPAM_DMA_STOP);
212 		break;
213 
214 	default:
215 		ret = -EINVAL;
216 		break;
217 	}
218 
219 	spin_unlock(&prtd->lock);
220 
221 	return ret;
222 }
223 
224 static snd_pcm_uframes_t
225 	idma_pointer(struct snd_pcm_substream *substream)
226 {
227 	struct snd_pcm_runtime *runtime = substream->runtime;
228 	struct idma_ctrl *prtd = runtime->private_data;
229 	dma_addr_t src;
230 	unsigned long res;
231 
232 	spin_lock(&prtd->lock);
233 
234 	idma_getpos(&src);
235 	res = src - prtd->start;
236 
237 	spin_unlock(&prtd->lock);
238 
239 	return bytes_to_frames(substream->runtime, res);
240 }
241 
242 static int idma_mmap(struct snd_pcm_substream *substream,
243 	struct vm_area_struct *vma)
244 {
245 	struct snd_pcm_runtime *runtime = substream->runtime;
246 	unsigned long size, offset;
247 	int ret;
248 
249 	/* From snd_pcm_lib_mmap_iomem */
250 	vma->vm_page_prot = pgprot_noncached(vma->vm_page_prot);
251 	size = vma->vm_end - vma->vm_start;
252 	offset = vma->vm_pgoff << PAGE_SHIFT;
253 	ret = io_remap_pfn_range(vma, vma->vm_start,
254 			(runtime->dma_addr + offset) >> PAGE_SHIFT,
255 			size, vma->vm_page_prot);
256 
257 	return ret;
258 }
259 
260 static irqreturn_t iis_irq(int irqno, void *dev_id)
261 {
262 	struct idma_ctrl *prtd = (struct idma_ctrl *)dev_id;
263 	u32 iisahb, val, addr;
264 
265 	iisahb  = readl(idma.regs + I2SAHB);
266 
267 	val = (iisahb & AHB_LVL0INT) ? AHB_CLRLVL0INT : 0;
268 
269 	if (val) {
270 		iisahb |= val;
271 		writel(iisahb, idma.regs + I2SAHB);
272 
273 		addr = readl(idma.regs + I2SLVL0ADDR) - idma.lp_tx_addr;
274 		addr += prtd->periodsz;
275 		addr %= (u32)(prtd->end - prtd->start);
276 		addr += idma.lp_tx_addr;
277 
278 		writel(addr, idma.regs + I2SLVL0ADDR);
279 
280 		if (prtd->cb)
281 			prtd->cb(prtd->token, prtd->period);
282 	}
283 
284 	return IRQ_HANDLED;
285 }
286 
287 static int idma_open(struct snd_pcm_substream *substream)
288 {
289 	struct snd_pcm_runtime *runtime = substream->runtime;
290 	struct idma_ctrl *prtd;
291 	int ret;
292 
293 	snd_soc_set_runtime_hwparams(substream, &idma_hardware);
294 
295 	prtd = kzalloc(sizeof(struct idma_ctrl), GFP_KERNEL);
296 	if (prtd == NULL)
297 		return -ENOMEM;
298 
299 	ret = request_irq(idma_irq, iis_irq, 0, "i2s", prtd);
300 	if (ret < 0) {
301 		pr_err("fail to claim i2s irq , ret = %d\n", ret);
302 		kfree(prtd);
303 		return ret;
304 	}
305 
306 	spin_lock_init(&prtd->lock);
307 
308 	runtime->private_data = prtd;
309 
310 	return 0;
311 }
312 
313 static int idma_close(struct snd_pcm_substream *substream)
314 {
315 	struct snd_pcm_runtime *runtime = substream->runtime;
316 	struct idma_ctrl *prtd = runtime->private_data;
317 
318 	free_irq(idma_irq, prtd);
319 
320 	if (!prtd)
321 		pr_err("idma_close called with prtd == NULL\n");
322 
323 	kfree(prtd);
324 
325 	return 0;
326 }
327 
328 static const struct snd_pcm_ops idma_ops = {
329 	.open		= idma_open,
330 	.close		= idma_close,
331 	.ioctl		= snd_pcm_lib_ioctl,
332 	.trigger	= idma_trigger,
333 	.pointer	= idma_pointer,
334 	.mmap		= idma_mmap,
335 	.hw_params	= idma_hw_params,
336 	.hw_free	= idma_hw_free,
337 	.prepare	= idma_prepare,
338 };
339 
340 static void idma_free(struct snd_pcm *pcm)
341 {
342 	struct snd_pcm_substream *substream;
343 	struct snd_dma_buffer *buf;
344 
345 	substream = pcm->streams[SNDRV_PCM_STREAM_PLAYBACK].substream;
346 	if (!substream)
347 		return;
348 
349 	buf = &substream->dma_buffer;
350 	if (!buf->area)
351 		return;
352 
353 	iounmap((void __iomem *)buf->area);
354 
355 	buf->area = NULL;
356 	buf->addr = 0;
357 }
358 
359 static int preallocate_idma_buffer(struct snd_pcm *pcm, int stream)
360 {
361 	struct snd_pcm_substream *substream = pcm->streams[stream].substream;
362 	struct snd_dma_buffer *buf = &substream->dma_buffer;
363 
364 	buf->dev.dev = pcm->card->dev;
365 	buf->private_data = NULL;
366 
367 	/* Assign PCM buffer pointers */
368 	buf->dev.type = SNDRV_DMA_TYPE_CONTINUOUS;
369 	buf->addr = idma.lp_tx_addr;
370 	buf->bytes = idma_hardware.buffer_bytes_max;
371 	buf->area = (unsigned char * __force)ioremap(buf->addr, buf->bytes);
372 
373 	return 0;
374 }
375 
376 static int idma_new(struct snd_soc_pcm_runtime *rtd)
377 {
378 	struct snd_card *card = rtd->card->snd_card;
379 	struct snd_pcm *pcm = rtd->pcm;
380 	int ret;
381 
382 	ret = dma_coerce_mask_and_coherent(card->dev, DMA_BIT_MASK(32));
383 	if (ret)
384 		return ret;
385 
386 	if (pcm->streams[SNDRV_PCM_STREAM_PLAYBACK].substream) {
387 		ret = preallocate_idma_buffer(pcm,
388 				SNDRV_PCM_STREAM_PLAYBACK);
389 	}
390 
391 	return ret;
392 }
393 
394 void idma_reg_addr_init(void __iomem *regs, dma_addr_t addr)
395 {
396 	spin_lock_init(&idma.lock);
397 	idma.regs = regs;
398 	idma.lp_tx_addr = addr;
399 }
400 EXPORT_SYMBOL_GPL(idma_reg_addr_init);
401 
402 static const struct snd_soc_component_driver asoc_idma_platform = {
403 	.ops = &idma_ops,
404 	.pcm_new = idma_new,
405 	.pcm_free = idma_free,
406 };
407 
408 static int asoc_idma_platform_probe(struct platform_device *pdev)
409 {
410 	idma_irq = platform_get_irq(pdev, 0);
411 	if (idma_irq < 0)
412 		return idma_irq;
413 
414 	return devm_snd_soc_register_component(&pdev->dev, &asoc_idma_platform,
415 					       NULL, 0);
416 }
417 
418 static struct platform_driver asoc_idma_driver = {
419 	.driver = {
420 		.name = "samsung-idma",
421 	},
422 
423 	.probe = asoc_idma_platform_probe,
424 };
425 
426 module_platform_driver(asoc_idma_driver);
427 
428 MODULE_AUTHOR("Jaswinder Singh, <jassisinghbrar@gmail.com>");
429 MODULE_DESCRIPTION("Samsung ASoC IDMA Driver");
430 MODULE_LICENSE("GPL");
431