xref: /openbmc/linux/sound/soc/samsung/idma.c (revision 0d3b051a)
1 // SPDX-License-Identifier: GPL-2.0+
2 //
3 // idma.c - I2S0 internal DMA driver
4 //
5 // Copyright (c) 2011 Samsung Electronics Co., Ltd.
6 //		http://www.samsung.com
7 
8 #include <linux/interrupt.h>
9 #include <linux/platform_device.h>
10 #include <linux/dma-mapping.h>
11 #include <linux/slab.h>
12 #include <linux/module.h>
13 #include <sound/pcm.h>
14 #include <sound/pcm_params.h>
15 #include <sound/soc.h>
16 
17 #include "i2s.h"
18 #include "idma.h"
19 #include "i2s-regs.h"
20 
21 #define ST_RUNNING		(1<<0)
22 #define ST_OPENED		(1<<1)
23 
24 static const struct snd_pcm_hardware idma_hardware = {
25 	.info = SNDRV_PCM_INFO_INTERLEAVED |
26 		    SNDRV_PCM_INFO_BLOCK_TRANSFER |
27 		    SNDRV_PCM_INFO_MMAP |
28 		    SNDRV_PCM_INFO_MMAP_VALID |
29 		    SNDRV_PCM_INFO_PAUSE |
30 		    SNDRV_PCM_INFO_RESUME,
31 	.buffer_bytes_max = MAX_IDMA_BUFFER,
32 	.period_bytes_min = 128,
33 	.period_bytes_max = MAX_IDMA_PERIOD,
34 	.periods_min = 1,
35 	.periods_max = 2,
36 };
37 
38 struct idma_ctrl {
39 	spinlock_t	lock;
40 	int		state;
41 	dma_addr_t	start;
42 	dma_addr_t	pos;
43 	dma_addr_t	end;
44 	dma_addr_t	period;
45 	dma_addr_t	periodsz;
46 	void		*token;
47 	void		(*cb)(void *dt, int bytes_xfer);
48 };
49 
50 static struct idma_info {
51 	spinlock_t	lock;
52 	void		 __iomem  *regs;
53 	dma_addr_t	lp_tx_addr;
54 } idma;
55 
56 static int idma_irq;
57 
58 static void idma_getpos(dma_addr_t *src)
59 {
60 	*src = idma.lp_tx_addr +
61 		(readl(idma.regs + I2STRNCNT) & 0xffffff) * 4;
62 }
63 
64 static int idma_enqueue(struct snd_pcm_substream *substream)
65 {
66 	struct snd_pcm_runtime *runtime = substream->runtime;
67 	struct idma_ctrl *prtd = substream->runtime->private_data;
68 	u32 val;
69 
70 	spin_lock(&prtd->lock);
71 	prtd->token = (void *) substream;
72 	spin_unlock(&prtd->lock);
73 
74 	/* Internal DMA Level0 Interrupt Address */
75 	val = idma.lp_tx_addr + prtd->periodsz;
76 	writel(val, idma.regs + I2SLVL0ADDR);
77 
78 	/* Start address0 of I2S internal DMA operation. */
79 	val = idma.lp_tx_addr;
80 	writel(val, idma.regs + I2SSTR0);
81 
82 	/*
83 	 * Transfer block size for I2S internal DMA.
84 	 * Should decide transfer size before start dma operation
85 	 */
86 	val = readl(idma.regs + I2SSIZE);
87 	val &= ~(I2SSIZE_TRNMSK << I2SSIZE_SHIFT);
88 	val |= (((runtime->dma_bytes >> 2) &
89 			I2SSIZE_TRNMSK) << I2SSIZE_SHIFT);
90 	writel(val, idma.regs + I2SSIZE);
91 
92 	val = readl(idma.regs + I2SAHB);
93 	val |= AHB_INTENLVL0;
94 	writel(val, idma.regs + I2SAHB);
95 
96 	return 0;
97 }
98 
99 static void idma_setcallbk(struct snd_pcm_substream *substream,
100 				void (*cb)(void *, int))
101 {
102 	struct idma_ctrl *prtd = substream->runtime->private_data;
103 
104 	spin_lock(&prtd->lock);
105 	prtd->cb = cb;
106 	spin_unlock(&prtd->lock);
107 }
108 
109 static void idma_control(int op)
110 {
111 	u32 val = readl(idma.regs + I2SAHB);
112 
113 	spin_lock(&idma.lock);
114 
115 	switch (op) {
116 	case LPAM_DMA_START:
117 		val |= (AHB_INTENLVL0 | AHB_DMAEN);
118 		break;
119 	case LPAM_DMA_STOP:
120 		val &= ~(AHB_INTENLVL0 | AHB_DMAEN);
121 		break;
122 	default:
123 		spin_unlock(&idma.lock);
124 		return;
125 	}
126 
127 	writel(val, idma.regs + I2SAHB);
128 	spin_unlock(&idma.lock);
129 }
130 
131 static void idma_done(void *id, int bytes_xfer)
132 {
133 	struct snd_pcm_substream *substream = id;
134 	struct idma_ctrl *prtd = substream->runtime->private_data;
135 
136 	if (prtd && (prtd->state & ST_RUNNING))
137 		snd_pcm_period_elapsed(substream);
138 }
139 
140 static int idma_hw_params(struct snd_soc_component *component,
141 			  struct snd_pcm_substream *substream,
142 			  struct snd_pcm_hw_params *params)
143 {
144 	struct snd_pcm_runtime *runtime = substream->runtime;
145 	struct idma_ctrl *prtd = substream->runtime->private_data;
146 	u32 mod = readl(idma.regs + I2SMOD);
147 	u32 ahb = readl(idma.regs + I2SAHB);
148 
149 	ahb |= (AHB_DMARLD | AHB_INTMASK);
150 	mod |= MOD_TXS_IDMA;
151 	writel(ahb, idma.regs + I2SAHB);
152 	writel(mod, idma.regs + I2SMOD);
153 
154 	snd_pcm_set_runtime_buffer(substream, &substream->dma_buffer);
155 	runtime->dma_bytes = params_buffer_bytes(params);
156 
157 	prtd->start = prtd->pos = runtime->dma_addr;
158 	prtd->period = params_periods(params);
159 	prtd->periodsz = params_period_bytes(params);
160 	prtd->end = runtime->dma_addr + runtime->dma_bytes;
161 
162 	idma_setcallbk(substream, idma_done);
163 
164 	return 0;
165 }
166 
167 static int idma_hw_free(struct snd_soc_component *component,
168 			struct snd_pcm_substream *substream)
169 {
170 	snd_pcm_set_runtime_buffer(substream, NULL);
171 
172 	return 0;
173 }
174 
175 static int idma_prepare(struct snd_soc_component *component,
176 			struct snd_pcm_substream *substream)
177 {
178 	struct idma_ctrl *prtd = substream->runtime->private_data;
179 
180 	prtd->pos = prtd->start;
181 
182 	/* flush the DMA channel */
183 	idma_control(LPAM_DMA_STOP);
184 	idma_enqueue(substream);
185 
186 	return 0;
187 }
188 
189 static int idma_trigger(struct snd_soc_component *component,
190 			struct snd_pcm_substream *substream, int cmd)
191 {
192 	struct idma_ctrl *prtd = substream->runtime->private_data;
193 	int ret = 0;
194 
195 	spin_lock(&prtd->lock);
196 
197 	switch (cmd) {
198 	case SNDRV_PCM_TRIGGER_RESUME:
199 	case SNDRV_PCM_TRIGGER_START:
200 	case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
201 		prtd->state |= ST_RUNNING;
202 		idma_control(LPAM_DMA_START);
203 		break;
204 
205 	case SNDRV_PCM_TRIGGER_SUSPEND:
206 	case SNDRV_PCM_TRIGGER_STOP:
207 	case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
208 		prtd->state &= ~ST_RUNNING;
209 		idma_control(LPAM_DMA_STOP);
210 		break;
211 
212 	default:
213 		ret = -EINVAL;
214 		break;
215 	}
216 
217 	spin_unlock(&prtd->lock);
218 
219 	return ret;
220 }
221 
222 static snd_pcm_uframes_t
223 idma_pointer(struct snd_soc_component *component,
224 	     struct snd_pcm_substream *substream)
225 {
226 	struct snd_pcm_runtime *runtime = substream->runtime;
227 	struct idma_ctrl *prtd = runtime->private_data;
228 	dma_addr_t src;
229 	unsigned long res;
230 
231 	spin_lock(&prtd->lock);
232 
233 	idma_getpos(&src);
234 	res = src - prtd->start;
235 
236 	spin_unlock(&prtd->lock);
237 
238 	return bytes_to_frames(substream->runtime, res);
239 }
240 
241 static int idma_mmap(struct snd_soc_component *component,
242 		     struct snd_pcm_substream *substream,
243 	struct vm_area_struct *vma)
244 {
245 	struct snd_pcm_runtime *runtime = substream->runtime;
246 	unsigned long size, offset;
247 	int ret;
248 
249 	/* From snd_pcm_lib_mmap_iomem */
250 	vma->vm_page_prot = pgprot_noncached(vma->vm_page_prot);
251 	size = vma->vm_end - vma->vm_start;
252 	offset = vma->vm_pgoff << PAGE_SHIFT;
253 	ret = io_remap_pfn_range(vma, vma->vm_start,
254 			(runtime->dma_addr + offset) >> PAGE_SHIFT,
255 			size, vma->vm_page_prot);
256 
257 	return ret;
258 }
259 
260 static irqreturn_t iis_irq(int irqno, void *dev_id)
261 {
262 	struct idma_ctrl *prtd = (struct idma_ctrl *)dev_id;
263 	u32 iisahb, val, addr;
264 
265 	iisahb  = readl(idma.regs + I2SAHB);
266 
267 	val = (iisahb & AHB_LVL0INT) ? AHB_CLRLVL0INT : 0;
268 
269 	if (val) {
270 		iisahb |= val;
271 		writel(iisahb, idma.regs + I2SAHB);
272 
273 		addr = readl(idma.regs + I2SLVL0ADDR) - idma.lp_tx_addr;
274 		addr += prtd->periodsz;
275 		addr %= (u32)(prtd->end - prtd->start);
276 		addr += idma.lp_tx_addr;
277 
278 		writel(addr, idma.regs + I2SLVL0ADDR);
279 
280 		if (prtd->cb)
281 			prtd->cb(prtd->token, prtd->period);
282 	}
283 
284 	return IRQ_HANDLED;
285 }
286 
287 static int idma_open(struct snd_soc_component *component,
288 		     struct snd_pcm_substream *substream)
289 {
290 	struct snd_pcm_runtime *runtime = substream->runtime;
291 	struct idma_ctrl *prtd;
292 	int ret;
293 
294 	snd_soc_set_runtime_hwparams(substream, &idma_hardware);
295 
296 	prtd = kzalloc(sizeof(struct idma_ctrl), GFP_KERNEL);
297 	if (prtd == NULL)
298 		return -ENOMEM;
299 
300 	ret = request_irq(idma_irq, iis_irq, 0, "i2s", prtd);
301 	if (ret < 0) {
302 		pr_err("fail to claim i2s irq , ret = %d\n", ret);
303 		kfree(prtd);
304 		return ret;
305 	}
306 
307 	spin_lock_init(&prtd->lock);
308 
309 	runtime->private_data = prtd;
310 
311 	return 0;
312 }
313 
314 static int idma_close(struct snd_soc_component *component,
315 		      struct snd_pcm_substream *substream)
316 {
317 	struct snd_pcm_runtime *runtime = substream->runtime;
318 	struct idma_ctrl *prtd = runtime->private_data;
319 
320 	free_irq(idma_irq, prtd);
321 
322 	if (!prtd)
323 		pr_err("idma_close called with prtd == NULL\n");
324 
325 	kfree(prtd);
326 
327 	return 0;
328 }
329 
330 static void idma_free(struct snd_soc_component *component,
331 		      struct snd_pcm *pcm)
332 {
333 	struct snd_pcm_substream *substream;
334 	struct snd_dma_buffer *buf;
335 
336 	substream = pcm->streams[SNDRV_PCM_STREAM_PLAYBACK].substream;
337 	if (!substream)
338 		return;
339 
340 	buf = &substream->dma_buffer;
341 	if (!buf->area)
342 		return;
343 
344 	iounmap((void __iomem *)buf->area);
345 
346 	buf->area = NULL;
347 	buf->addr = 0;
348 }
349 
350 static int preallocate_idma_buffer(struct snd_pcm *pcm, int stream)
351 {
352 	struct snd_pcm_substream *substream = pcm->streams[stream].substream;
353 	struct snd_dma_buffer *buf = &substream->dma_buffer;
354 
355 	buf->dev.dev = pcm->card->dev;
356 	buf->private_data = NULL;
357 
358 	/* Assign PCM buffer pointers */
359 	buf->dev.type = SNDRV_DMA_TYPE_CONTINUOUS;
360 	buf->addr = idma.lp_tx_addr;
361 	buf->bytes = idma_hardware.buffer_bytes_max;
362 	buf->area = (unsigned char * __force)ioremap(buf->addr, buf->bytes);
363 
364 	return 0;
365 }
366 
367 static int idma_new(struct snd_soc_component *component,
368 		    struct snd_soc_pcm_runtime *rtd)
369 {
370 	struct snd_card *card = rtd->card->snd_card;
371 	struct snd_pcm *pcm = rtd->pcm;
372 	int ret;
373 
374 	ret = dma_coerce_mask_and_coherent(card->dev, DMA_BIT_MASK(32));
375 	if (ret)
376 		return ret;
377 
378 	if (pcm->streams[SNDRV_PCM_STREAM_PLAYBACK].substream) {
379 		ret = preallocate_idma_buffer(pcm,
380 				SNDRV_PCM_STREAM_PLAYBACK);
381 	}
382 
383 	return ret;
384 }
385 
386 void idma_reg_addr_init(void __iomem *regs, dma_addr_t addr)
387 {
388 	spin_lock_init(&idma.lock);
389 	idma.regs = regs;
390 	idma.lp_tx_addr = addr;
391 }
392 EXPORT_SYMBOL_GPL(idma_reg_addr_init);
393 
394 static const struct snd_soc_component_driver asoc_idma_platform = {
395 	.open		= idma_open,
396 	.close		= idma_close,
397 	.trigger	= idma_trigger,
398 	.pointer	= idma_pointer,
399 	.mmap		= idma_mmap,
400 	.hw_params	= idma_hw_params,
401 	.hw_free	= idma_hw_free,
402 	.prepare	= idma_prepare,
403 	.pcm_construct	= idma_new,
404 	.pcm_destruct	= idma_free,
405 };
406 
407 static int asoc_idma_platform_probe(struct platform_device *pdev)
408 {
409 	idma_irq = platform_get_irq(pdev, 0);
410 	if (idma_irq < 0)
411 		return idma_irq;
412 
413 	return devm_snd_soc_register_component(&pdev->dev, &asoc_idma_platform,
414 					       NULL, 0);
415 }
416 
417 static struct platform_driver asoc_idma_driver = {
418 	.driver = {
419 		.name = "samsung-idma",
420 	},
421 
422 	.probe = asoc_idma_platform_probe,
423 };
424 
425 module_platform_driver(asoc_idma_driver);
426 
427 MODULE_AUTHOR("Jaswinder Singh, <jassisinghbrar@gmail.com>");
428 MODULE_DESCRIPTION("Samsung ASoC IDMA Driver");
429 MODULE_LICENSE("GPL");
430