xref: /openbmc/linux/sound/soc/sh/rz-ssi.c (revision ccf09255)
1 // SPDX-License-Identifier: GPL-2.0
2 //
3 // Renesas RZ/G2L ASoC Serial Sound Interface (SSIF-2) Driver
4 //
5 // Copyright (C) 2021 Renesas Electronics Corp.
6 // Copyright (C) 2019 Chris Brandt.
7 //
8 
9 #include <linux/clk.h>
10 #include <linux/dmaengine.h>
11 #include <linux/io.h>
12 #include <linux/module.h>
13 #include <linux/of_device.h>
14 #include <linux/pm_runtime.h>
15 #include <linux/reset.h>
16 #include <sound/soc.h>
17 
18 /* REGISTER OFFSET */
19 #define SSICR			0x000
20 #define SSISR			0x004
21 #define SSIFCR			0x010
22 #define SSIFSR			0x014
23 #define SSIFTDR			0x018
24 #define SSIFRDR			0x01c
25 #define SSIOFR			0x020
26 #define SSISCR			0x024
27 
28 /* SSI REGISTER BITS */
29 #define SSICR_DWL(x)		(((x) & 0x7) << 19)
30 #define SSICR_SWL(x)		(((x) & 0x7) << 16)
31 
32 #define SSICR_CKS		BIT(30)
33 #define SSICR_TUIEN		BIT(29)
34 #define SSICR_TOIEN		BIT(28)
35 #define SSICR_RUIEN		BIT(27)
36 #define SSICR_ROIEN		BIT(26)
37 #define SSICR_MST		BIT(14)
38 #define SSICR_BCKP		BIT(13)
39 #define SSICR_LRCKP		BIT(12)
40 #define SSICR_CKDV(x)		(((x) & 0xf) << 4)
41 #define SSICR_TEN		BIT(1)
42 #define SSICR_REN		BIT(0)
43 
44 #define SSISR_TUIRQ		BIT(29)
45 #define SSISR_TOIRQ		BIT(28)
46 #define SSISR_RUIRQ		BIT(27)
47 #define SSISR_ROIRQ		BIT(26)
48 #define SSISR_IIRQ		BIT(25)
49 
50 #define SSIFCR_AUCKE		BIT(31)
51 #define SSIFCR_SSIRST		BIT(16)
52 #define SSIFCR_TIE		BIT(3)
53 #define SSIFCR_RIE		BIT(2)
54 #define SSIFCR_TFRST		BIT(1)
55 #define SSIFCR_RFRST		BIT(0)
56 
57 #define SSIFSR_TDC_MASK		0x3f
58 #define SSIFSR_TDC_SHIFT	24
59 #define SSIFSR_RDC_MASK		0x3f
60 #define SSIFSR_RDC_SHIFT	8
61 
62 #define SSIFSR_TDE		BIT(16)
63 #define SSIFSR_RDF		BIT(0)
64 
65 #define SSIOFR_LRCONT		BIT(8)
66 
67 #define SSISCR_TDES(x)		(((x) & 0x1f) << 8)
68 #define SSISCR_RDFS(x)		(((x) & 0x1f) << 0)
69 
70 /* Pre allocated buffers sizes */
71 #define PREALLOC_BUFFER		(SZ_32K)
72 #define PREALLOC_BUFFER_MAX	(SZ_32K)
73 
74 #define SSI_RATES		SNDRV_PCM_RATE_8000_48000 /* 8k-44.1kHz */
75 #define SSI_FMTS		SNDRV_PCM_FMTBIT_S16_LE
76 #define SSI_CHAN_MIN		2
77 #define SSI_CHAN_MAX		2
78 #define SSI_FIFO_DEPTH		32
79 
80 struct rz_ssi_priv;
81 
82 struct rz_ssi_stream {
83 	struct rz_ssi_priv *priv;
84 	struct snd_pcm_substream *substream;
85 	int fifo_sample_size;	/* sample capacity of SSI FIFO */
86 	int dma_buffer_pos;	/* The address for the next DMA descriptor */
87 	int period_counter;	/* for keeping track of periods transferred */
88 	int sample_width;
89 	int buffer_pos;		/* current frame position in the buffer */
90 	int running;		/* 0=stopped, 1=running */
91 
92 	int uerr_num;
93 	int oerr_num;
94 
95 	struct dma_chan *dma_ch;
96 
97 	int (*transfer)(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm);
98 };
99 
100 struct rz_ssi_priv {
101 	void __iomem *base;
102 	struct platform_device *pdev;
103 	struct reset_control *rstc;
104 	struct device *dev;
105 	struct clk *sfr_clk;
106 	struct clk *clk;
107 
108 	phys_addr_t phys;
109 	int irq_int;
110 	int irq_tx;
111 	int irq_rx;
112 	int irq_rt;
113 
114 	spinlock_t lock;
115 
116 	/*
117 	 * The SSI supports full-duplex transmission and reception.
118 	 * However, if an error occurs, channel reset (both transmission
119 	 * and reception reset) is required.
120 	 * So it is better to use as half-duplex (playing and recording
121 	 * should be done on separate channels).
122 	 */
123 	struct rz_ssi_stream playback;
124 	struct rz_ssi_stream capture;
125 
126 	/* clock */
127 	unsigned long audio_mck;
128 	unsigned long audio_clk_1;
129 	unsigned long audio_clk_2;
130 
131 	bool lrckp_fsync_fall;	/* LR clock polarity (SSICR.LRCKP) */
132 	bool bckp_rise;	/* Bit clock polarity (SSICR.BCKP) */
133 	bool dma_rt;
134 };
135 
136 static void rz_ssi_dma_complete(void *data);
137 
rz_ssi_reg_writel(struct rz_ssi_priv * priv,uint reg,u32 data)138 static void rz_ssi_reg_writel(struct rz_ssi_priv *priv, uint reg, u32 data)
139 {
140 	writel(data, (priv->base + reg));
141 }
142 
rz_ssi_reg_readl(struct rz_ssi_priv * priv,uint reg)143 static u32 rz_ssi_reg_readl(struct rz_ssi_priv *priv, uint reg)
144 {
145 	return readl(priv->base + reg);
146 }
147 
rz_ssi_reg_mask_setl(struct rz_ssi_priv * priv,uint reg,u32 bclr,u32 bset)148 static void rz_ssi_reg_mask_setl(struct rz_ssi_priv *priv, uint reg,
149 				 u32 bclr, u32 bset)
150 {
151 	u32 val;
152 
153 	val = readl(priv->base + reg);
154 	val = (val & ~bclr) | bset;
155 	writel(val, (priv->base + reg));
156 }
157 
158 static inline struct snd_soc_dai *
rz_ssi_get_dai(struct snd_pcm_substream * substream)159 rz_ssi_get_dai(struct snd_pcm_substream *substream)
160 {
161 	struct snd_soc_pcm_runtime *rtd = asoc_substream_to_rtd(substream);
162 
163 	return asoc_rtd_to_cpu(rtd, 0);
164 }
165 
rz_ssi_stream_is_play(struct rz_ssi_priv * ssi,struct snd_pcm_substream * substream)166 static inline bool rz_ssi_stream_is_play(struct rz_ssi_priv *ssi,
167 					 struct snd_pcm_substream *substream)
168 {
169 	return substream->stream == SNDRV_PCM_STREAM_PLAYBACK;
170 }
171 
172 static inline struct rz_ssi_stream *
rz_ssi_stream_get(struct rz_ssi_priv * ssi,struct snd_pcm_substream * substream)173 rz_ssi_stream_get(struct rz_ssi_priv *ssi, struct snd_pcm_substream *substream)
174 {
175 	struct rz_ssi_stream *stream = &ssi->playback;
176 
177 	if (substream->stream != SNDRV_PCM_STREAM_PLAYBACK)
178 		stream = &ssi->capture;
179 
180 	return stream;
181 }
182 
rz_ssi_is_dma_enabled(struct rz_ssi_priv * ssi)183 static inline bool rz_ssi_is_dma_enabled(struct rz_ssi_priv *ssi)
184 {
185 	return (ssi->playback.dma_ch && (ssi->dma_rt || ssi->capture.dma_ch));
186 }
187 
rz_ssi_set_substream(struct rz_ssi_stream * strm,struct snd_pcm_substream * substream)188 static void rz_ssi_set_substream(struct rz_ssi_stream *strm,
189 				 struct snd_pcm_substream *substream)
190 {
191 	struct rz_ssi_priv *ssi = strm->priv;
192 	unsigned long flags;
193 
194 	spin_lock_irqsave(&ssi->lock, flags);
195 	strm->substream = substream;
196 	spin_unlock_irqrestore(&ssi->lock, flags);
197 }
198 
rz_ssi_stream_is_valid(struct rz_ssi_priv * ssi,struct rz_ssi_stream * strm)199 static bool rz_ssi_stream_is_valid(struct rz_ssi_priv *ssi,
200 				   struct rz_ssi_stream *strm)
201 {
202 	unsigned long flags;
203 	bool ret;
204 
205 	spin_lock_irqsave(&ssi->lock, flags);
206 	ret = strm->substream && strm->substream->runtime;
207 	spin_unlock_irqrestore(&ssi->lock, flags);
208 
209 	return ret;
210 }
211 
rz_ssi_stream_init(struct rz_ssi_stream * strm,struct snd_pcm_substream * substream)212 static void rz_ssi_stream_init(struct rz_ssi_stream *strm,
213 			       struct snd_pcm_substream *substream)
214 {
215 	struct snd_pcm_runtime *runtime = substream->runtime;
216 
217 	rz_ssi_set_substream(strm, substream);
218 	strm->sample_width = samples_to_bytes(runtime, 1);
219 	strm->dma_buffer_pos = 0;
220 	strm->period_counter = 0;
221 	strm->buffer_pos = 0;
222 
223 	strm->oerr_num = 0;
224 	strm->uerr_num = 0;
225 	strm->running = 0;
226 
227 	/* fifo init */
228 	strm->fifo_sample_size = SSI_FIFO_DEPTH;
229 }
230 
rz_ssi_stream_quit(struct rz_ssi_priv * ssi,struct rz_ssi_stream * strm)231 static void rz_ssi_stream_quit(struct rz_ssi_priv *ssi,
232 			       struct rz_ssi_stream *strm)
233 {
234 	struct snd_soc_dai *dai = rz_ssi_get_dai(strm->substream);
235 
236 	rz_ssi_set_substream(strm, NULL);
237 
238 	if (strm->oerr_num > 0)
239 		dev_info(dai->dev, "overrun = %d\n", strm->oerr_num);
240 
241 	if (strm->uerr_num > 0)
242 		dev_info(dai->dev, "underrun = %d\n", strm->uerr_num);
243 }
244 
rz_ssi_clk_setup(struct rz_ssi_priv * ssi,unsigned int rate,unsigned int channels)245 static int rz_ssi_clk_setup(struct rz_ssi_priv *ssi, unsigned int rate,
246 			    unsigned int channels)
247 {
248 	static s8 ckdv[16] = { 1,  2,  4,  8, 16, 32, 64, 128,
249 			       6, 12, 24, 48, 96, -1, -1, -1 };
250 	unsigned int channel_bits = 32;	/* System Word Length */
251 	unsigned long bclk_rate = rate * channels * channel_bits;
252 	unsigned int div;
253 	unsigned int i;
254 	u32 ssicr = 0;
255 	u32 clk_ckdv;
256 
257 	/* Clear AUCKE so we can set MST */
258 	rz_ssi_reg_writel(ssi, SSIFCR, 0);
259 
260 	/* Continue to output LRCK pin even when idle */
261 	rz_ssi_reg_writel(ssi, SSIOFR, SSIOFR_LRCONT);
262 	if (ssi->audio_clk_1 && ssi->audio_clk_2) {
263 		if (ssi->audio_clk_1 % bclk_rate)
264 			ssi->audio_mck = ssi->audio_clk_2;
265 		else
266 			ssi->audio_mck = ssi->audio_clk_1;
267 	}
268 
269 	/* Clock setting */
270 	ssicr |= SSICR_MST;
271 	if (ssi->audio_mck == ssi->audio_clk_1)
272 		ssicr |= SSICR_CKS;
273 	if (ssi->bckp_rise)
274 		ssicr |= SSICR_BCKP;
275 	if (ssi->lrckp_fsync_fall)
276 		ssicr |= SSICR_LRCKP;
277 
278 	/* Determine the clock divider */
279 	clk_ckdv = 0;
280 	div = ssi->audio_mck / bclk_rate;
281 	/* try to find an match */
282 	for (i = 0; i < ARRAY_SIZE(ckdv); i++) {
283 		if (ckdv[i] == div) {
284 			clk_ckdv = i;
285 			break;
286 		}
287 	}
288 
289 	if (i == ARRAY_SIZE(ckdv)) {
290 		dev_err(ssi->dev, "Rate not divisible by audio clock source\n");
291 		return -EINVAL;
292 	}
293 
294 	/*
295 	 * DWL: Data Word Length = 16 bits
296 	 * SWL: System Word Length = 32 bits
297 	 */
298 	ssicr |= SSICR_CKDV(clk_ckdv);
299 	ssicr |= SSICR_DWL(1) | SSICR_SWL(3);
300 	rz_ssi_reg_writel(ssi, SSICR, ssicr);
301 	rz_ssi_reg_writel(ssi, SSIFCR,
302 			  (SSIFCR_AUCKE | SSIFCR_TFRST | SSIFCR_RFRST));
303 
304 	return 0;
305 }
306 
rz_ssi_start(struct rz_ssi_priv * ssi,struct rz_ssi_stream * strm)307 static int rz_ssi_start(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm)
308 {
309 	bool is_play = rz_ssi_stream_is_play(ssi, strm->substream);
310 	u32 ssicr, ssifcr;
311 
312 	ssicr = rz_ssi_reg_readl(ssi, SSICR);
313 	ssifcr = rz_ssi_reg_readl(ssi, SSIFCR) & ~0xF;
314 
315 	/* FIFO interrupt thresholds */
316 	if (rz_ssi_is_dma_enabled(ssi))
317 		rz_ssi_reg_writel(ssi, SSISCR, 0);
318 	else
319 		rz_ssi_reg_writel(ssi, SSISCR,
320 				  SSISCR_TDES(strm->fifo_sample_size / 2 - 1) |
321 				  SSISCR_RDFS(0));
322 
323 	/* enable IRQ */
324 	if (is_play) {
325 		ssicr |= SSICR_TUIEN | SSICR_TOIEN;
326 		ssifcr |= SSIFCR_TIE | SSIFCR_RFRST;
327 	} else {
328 		ssicr |= SSICR_RUIEN | SSICR_ROIEN;
329 		ssifcr |= SSIFCR_RIE | SSIFCR_TFRST;
330 	}
331 
332 	rz_ssi_reg_writel(ssi, SSICR, ssicr);
333 	rz_ssi_reg_writel(ssi, SSIFCR, ssifcr);
334 
335 	/* Clear all error flags */
336 	rz_ssi_reg_mask_setl(ssi, SSISR,
337 			     (SSISR_TOIRQ | SSISR_TUIRQ | SSISR_ROIRQ |
338 			      SSISR_RUIRQ), 0);
339 
340 	strm->running = 1;
341 	ssicr |= is_play ? SSICR_TEN : SSICR_REN;
342 	rz_ssi_reg_writel(ssi, SSICR, ssicr);
343 
344 	return 0;
345 }
346 
rz_ssi_stop(struct rz_ssi_priv * ssi,struct rz_ssi_stream * strm)347 static int rz_ssi_stop(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm)
348 {
349 	int timeout;
350 
351 	strm->running = 0;
352 
353 	/* Disable TX/RX */
354 	rz_ssi_reg_mask_setl(ssi, SSICR, SSICR_TEN | SSICR_REN, 0);
355 
356 	/* Cancel all remaining DMA transactions */
357 	if (rz_ssi_is_dma_enabled(ssi))
358 		dmaengine_terminate_async(strm->dma_ch);
359 
360 	/* Disable irqs */
361 	rz_ssi_reg_mask_setl(ssi, SSICR, SSICR_TUIEN | SSICR_TOIEN |
362 			     SSICR_RUIEN | SSICR_ROIEN, 0);
363 	rz_ssi_reg_mask_setl(ssi, SSIFCR, SSIFCR_TIE | SSIFCR_RIE, 0);
364 
365 	/* Clear all error flags */
366 	rz_ssi_reg_mask_setl(ssi, SSISR,
367 			     (SSISR_TOIRQ | SSISR_TUIRQ | SSISR_ROIRQ |
368 			      SSISR_RUIRQ), 0);
369 
370 	/* Wait for idle */
371 	timeout = 100;
372 	while (--timeout) {
373 		if (rz_ssi_reg_readl(ssi, SSISR) & SSISR_IIRQ)
374 			break;
375 		udelay(1);
376 	}
377 
378 	if (!timeout)
379 		dev_info(ssi->dev, "timeout waiting for SSI idle\n");
380 
381 	/* Hold FIFOs in reset */
382 	rz_ssi_reg_mask_setl(ssi, SSIFCR, 0,
383 			     SSIFCR_TFRST | SSIFCR_RFRST);
384 
385 	return 0;
386 }
387 
rz_ssi_pointer_update(struct rz_ssi_stream * strm,int frames)388 static void rz_ssi_pointer_update(struct rz_ssi_stream *strm, int frames)
389 {
390 	struct snd_pcm_substream *substream = strm->substream;
391 	struct snd_pcm_runtime *runtime;
392 	int current_period;
393 
394 	if (!strm->running || !substream || !substream->runtime)
395 		return;
396 
397 	runtime = substream->runtime;
398 	strm->buffer_pos += frames;
399 	WARN_ON(strm->buffer_pos > runtime->buffer_size);
400 
401 	/* ring buffer */
402 	if (strm->buffer_pos == runtime->buffer_size)
403 		strm->buffer_pos = 0;
404 
405 	current_period = strm->buffer_pos / runtime->period_size;
406 	if (strm->period_counter != current_period) {
407 		snd_pcm_period_elapsed(strm->substream);
408 		strm->period_counter = current_period;
409 	}
410 }
411 
rz_ssi_pio_recv(struct rz_ssi_priv * ssi,struct rz_ssi_stream * strm)412 static int rz_ssi_pio_recv(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm)
413 {
414 	struct snd_pcm_substream *substream = strm->substream;
415 	struct snd_pcm_runtime *runtime;
416 	u16 *buf;
417 	int fifo_samples;
418 	int frames_left;
419 	int samples;
420 	int i;
421 
422 	if (!rz_ssi_stream_is_valid(ssi, strm))
423 		return -EINVAL;
424 
425 	runtime = substream->runtime;
426 
427 	do {
428 		/* frames left in this period */
429 		frames_left = runtime->period_size -
430 			      (strm->buffer_pos % runtime->period_size);
431 		if (!frames_left)
432 			frames_left = runtime->period_size;
433 
434 		/* Samples in RX FIFO */
435 		fifo_samples = (rz_ssi_reg_readl(ssi, SSIFSR) >>
436 				SSIFSR_RDC_SHIFT) & SSIFSR_RDC_MASK;
437 
438 		/* Only read full frames at a time */
439 		samples = 0;
440 		while (frames_left && (fifo_samples >= runtime->channels)) {
441 			samples += runtime->channels;
442 			fifo_samples -= runtime->channels;
443 			frames_left--;
444 		}
445 
446 		/* not enough samples yet */
447 		if (!samples)
448 			break;
449 
450 		/* calculate new buffer index */
451 		buf = (u16 *)runtime->dma_area;
452 		buf += strm->buffer_pos * runtime->channels;
453 
454 		/* Note, only supports 16-bit samples */
455 		for (i = 0; i < samples; i++)
456 			*buf++ = (u16)(rz_ssi_reg_readl(ssi, SSIFRDR) >> 16);
457 
458 		rz_ssi_reg_mask_setl(ssi, SSIFSR, SSIFSR_RDF, 0);
459 		rz_ssi_pointer_update(strm, samples / runtime->channels);
460 	} while (!frames_left && fifo_samples >= runtime->channels);
461 
462 	return 0;
463 }
464 
rz_ssi_pio_send(struct rz_ssi_priv * ssi,struct rz_ssi_stream * strm)465 static int rz_ssi_pio_send(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm)
466 {
467 	struct snd_pcm_substream *substream = strm->substream;
468 	struct snd_pcm_runtime *runtime = substream->runtime;
469 	int sample_space;
470 	int samples = 0;
471 	int frames_left;
472 	int i;
473 	u32 ssifsr;
474 	u16 *buf;
475 
476 	if (!rz_ssi_stream_is_valid(ssi, strm))
477 		return -EINVAL;
478 
479 	/* frames left in this period */
480 	frames_left = runtime->period_size - (strm->buffer_pos %
481 					      runtime->period_size);
482 	if (frames_left == 0)
483 		frames_left = runtime->period_size;
484 
485 	sample_space = strm->fifo_sample_size;
486 	ssifsr = rz_ssi_reg_readl(ssi, SSIFSR);
487 	sample_space -= (ssifsr >> SSIFSR_TDC_SHIFT) & SSIFSR_TDC_MASK;
488 
489 	/* Only add full frames at a time */
490 	while (frames_left && (sample_space >= runtime->channels)) {
491 		samples += runtime->channels;
492 		sample_space -= runtime->channels;
493 		frames_left--;
494 	}
495 
496 	/* no space to send anything right now */
497 	if (samples == 0)
498 		return 0;
499 
500 	/* calculate new buffer index */
501 	buf = (u16 *)(runtime->dma_area);
502 	buf += strm->buffer_pos * runtime->channels;
503 
504 	/* Note, only supports 16-bit samples */
505 	for (i = 0; i < samples; i++)
506 		rz_ssi_reg_writel(ssi, SSIFTDR, ((u32)(*buf++) << 16));
507 
508 	rz_ssi_reg_mask_setl(ssi, SSIFSR, SSIFSR_TDE, 0);
509 	rz_ssi_pointer_update(strm, samples / runtime->channels);
510 
511 	return 0;
512 }
513 
rz_ssi_interrupt(int irq,void * data)514 static irqreturn_t rz_ssi_interrupt(int irq, void *data)
515 {
516 	struct rz_ssi_stream *strm = NULL;
517 	struct rz_ssi_priv *ssi = data;
518 	u32 ssisr = rz_ssi_reg_readl(ssi, SSISR);
519 
520 	if (ssi->playback.substream)
521 		strm = &ssi->playback;
522 	else if (ssi->capture.substream)
523 		strm = &ssi->capture;
524 	else
525 		return IRQ_HANDLED; /* Left over TX/RX interrupt */
526 
527 	if (irq == ssi->irq_int) { /* error or idle */
528 		if (ssisr & SSISR_TUIRQ)
529 			strm->uerr_num++;
530 		if (ssisr & SSISR_TOIRQ)
531 			strm->oerr_num++;
532 		if (ssisr & SSISR_RUIRQ)
533 			strm->uerr_num++;
534 		if (ssisr & SSISR_ROIRQ)
535 			strm->oerr_num++;
536 
537 		if (ssisr & (SSISR_TUIRQ | SSISR_TOIRQ | SSISR_RUIRQ |
538 			     SSISR_ROIRQ)) {
539 			/* Error handling */
540 			/* You must reset (stop/restart) after each interrupt */
541 			rz_ssi_stop(ssi, strm);
542 
543 			/* Clear all flags */
544 			rz_ssi_reg_mask_setl(ssi, SSISR, SSISR_TOIRQ |
545 					     SSISR_TUIRQ | SSISR_ROIRQ |
546 					     SSISR_RUIRQ, 0);
547 
548 			/* Add/remove more data */
549 			strm->transfer(ssi, strm);
550 
551 			/* Resume */
552 			rz_ssi_start(ssi, strm);
553 		}
554 	}
555 
556 	if (!strm->running)
557 		return IRQ_HANDLED;
558 
559 	/* tx data empty */
560 	if (irq == ssi->irq_tx)
561 		strm->transfer(ssi, &ssi->playback);
562 
563 	/* rx data full */
564 	if (irq == ssi->irq_rx) {
565 		strm->transfer(ssi, &ssi->capture);
566 		rz_ssi_reg_mask_setl(ssi, SSIFSR, SSIFSR_RDF, 0);
567 	}
568 
569 	if (irq == ssi->irq_rt) {
570 		struct snd_pcm_substream *substream = strm->substream;
571 
572 		if (rz_ssi_stream_is_play(ssi, substream)) {
573 			strm->transfer(ssi, &ssi->playback);
574 		} else {
575 			strm->transfer(ssi, &ssi->capture);
576 			rz_ssi_reg_mask_setl(ssi, SSIFSR, SSIFSR_RDF, 0);
577 		}
578 	}
579 
580 	return IRQ_HANDLED;
581 }
582 
rz_ssi_dma_slave_config(struct rz_ssi_priv * ssi,struct dma_chan * dma_ch,bool is_play)583 static int rz_ssi_dma_slave_config(struct rz_ssi_priv *ssi,
584 				   struct dma_chan *dma_ch, bool is_play)
585 {
586 	struct dma_slave_config cfg;
587 
588 	memset(&cfg, 0, sizeof(cfg));
589 
590 	cfg.direction = is_play ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM;
591 	cfg.dst_addr = ssi->phys + SSIFTDR;
592 	cfg.src_addr = ssi->phys + SSIFRDR;
593 	cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES;
594 	cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES;
595 
596 	return dmaengine_slave_config(dma_ch, &cfg);
597 }
598 
rz_ssi_dma_transfer(struct rz_ssi_priv * ssi,struct rz_ssi_stream * strm)599 static int rz_ssi_dma_transfer(struct rz_ssi_priv *ssi,
600 			       struct rz_ssi_stream *strm)
601 {
602 	struct snd_pcm_substream *substream = strm->substream;
603 	struct dma_async_tx_descriptor *desc;
604 	struct snd_pcm_runtime *runtime;
605 	enum dma_transfer_direction dir;
606 	u32 dma_paddr, dma_size;
607 	int amount;
608 
609 	if (!rz_ssi_stream_is_valid(ssi, strm))
610 		return -EINVAL;
611 
612 	runtime = substream->runtime;
613 	if (runtime->state == SNDRV_PCM_STATE_DRAINING)
614 		/*
615 		 * Stream is ending, so do not queue up any more DMA
616 		 * transfers otherwise we play partial sound clips
617 		 * because we can't shut off the DMA quick enough.
618 		 */
619 		return 0;
620 
621 	dir = rz_ssi_stream_is_play(ssi, substream) ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM;
622 
623 	/* Always transfer 1 period */
624 	amount = runtime->period_size;
625 
626 	/* DMA physical address and size */
627 	dma_paddr = runtime->dma_addr + frames_to_bytes(runtime,
628 							strm->dma_buffer_pos);
629 	dma_size = frames_to_bytes(runtime, amount);
630 	desc = dmaengine_prep_slave_single(strm->dma_ch, dma_paddr, dma_size,
631 					   dir,
632 					   DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
633 	if (!desc) {
634 		dev_err(ssi->dev, "dmaengine_prep_slave_single() fail\n");
635 		return -ENOMEM;
636 	}
637 
638 	desc->callback = rz_ssi_dma_complete;
639 	desc->callback_param = strm;
640 
641 	if (dmaengine_submit(desc) < 0) {
642 		dev_err(ssi->dev, "dmaengine_submit() fail\n");
643 		return -EIO;
644 	}
645 
646 	/* Update DMA pointer */
647 	strm->dma_buffer_pos += amount;
648 	if (strm->dma_buffer_pos >= runtime->buffer_size)
649 		strm->dma_buffer_pos = 0;
650 
651 	/* Start DMA */
652 	dma_async_issue_pending(strm->dma_ch);
653 
654 	return 0;
655 }
656 
rz_ssi_dma_complete(void * data)657 static void rz_ssi_dma_complete(void *data)
658 {
659 	struct rz_ssi_stream *strm = (struct rz_ssi_stream *)data;
660 
661 	if (!strm->running || !strm->substream || !strm->substream->runtime)
662 		return;
663 
664 	/* Note that next DMA transaction has probably already started */
665 	rz_ssi_pointer_update(strm, strm->substream->runtime->period_size);
666 
667 	/* Queue up another DMA transaction */
668 	rz_ssi_dma_transfer(strm->priv, strm);
669 }
670 
rz_ssi_release_dma_channels(struct rz_ssi_priv * ssi)671 static void rz_ssi_release_dma_channels(struct rz_ssi_priv *ssi)
672 {
673 	if (ssi->playback.dma_ch) {
674 		dma_release_channel(ssi->playback.dma_ch);
675 		ssi->playback.dma_ch = NULL;
676 		if (ssi->dma_rt)
677 			ssi->dma_rt = false;
678 	}
679 
680 	if (ssi->capture.dma_ch) {
681 		dma_release_channel(ssi->capture.dma_ch);
682 		ssi->capture.dma_ch = NULL;
683 	}
684 }
685 
rz_ssi_dma_request(struct rz_ssi_priv * ssi,struct device * dev)686 static int rz_ssi_dma_request(struct rz_ssi_priv *ssi, struct device *dev)
687 {
688 	ssi->playback.dma_ch = dma_request_chan(dev, "tx");
689 	if (IS_ERR(ssi->playback.dma_ch))
690 		ssi->playback.dma_ch = NULL;
691 
692 	ssi->capture.dma_ch = dma_request_chan(dev, "rx");
693 	if (IS_ERR(ssi->capture.dma_ch))
694 		ssi->capture.dma_ch = NULL;
695 
696 	if (!ssi->playback.dma_ch && !ssi->capture.dma_ch) {
697 		ssi->playback.dma_ch = dma_request_chan(dev, "rt");
698 		if (IS_ERR(ssi->playback.dma_ch)) {
699 			ssi->playback.dma_ch = NULL;
700 			goto no_dma;
701 		}
702 
703 		ssi->dma_rt = true;
704 	}
705 
706 	if (!rz_ssi_is_dma_enabled(ssi))
707 		goto no_dma;
708 
709 	if (ssi->playback.dma_ch &&
710 	    (rz_ssi_dma_slave_config(ssi, ssi->playback.dma_ch, true) < 0))
711 		goto no_dma;
712 
713 	if (ssi->capture.dma_ch &&
714 	    (rz_ssi_dma_slave_config(ssi, ssi->capture.dma_ch, false) < 0))
715 		goto no_dma;
716 
717 	return 0;
718 
719 no_dma:
720 	rz_ssi_release_dma_channels(ssi);
721 
722 	return -ENODEV;
723 }
724 
rz_ssi_dai_trigger(struct snd_pcm_substream * substream,int cmd,struct snd_soc_dai * dai)725 static int rz_ssi_dai_trigger(struct snd_pcm_substream *substream, int cmd,
726 			      struct snd_soc_dai *dai)
727 {
728 	struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai);
729 	struct rz_ssi_stream *strm = rz_ssi_stream_get(ssi, substream);
730 	int ret = 0, i, num_transfer = 1;
731 
732 	switch (cmd) {
733 	case SNDRV_PCM_TRIGGER_START:
734 		/* Soft Reset */
735 		rz_ssi_reg_mask_setl(ssi, SSIFCR, 0, SSIFCR_SSIRST);
736 		rz_ssi_reg_mask_setl(ssi, SSIFCR, SSIFCR_SSIRST, 0);
737 		udelay(5);
738 
739 		rz_ssi_stream_init(strm, substream);
740 
741 		if (ssi->dma_rt) {
742 			bool is_playback;
743 
744 			is_playback = rz_ssi_stream_is_play(ssi, substream);
745 			ret = rz_ssi_dma_slave_config(ssi, ssi->playback.dma_ch,
746 						      is_playback);
747 			/* Fallback to pio */
748 			if (ret < 0) {
749 				ssi->playback.transfer = rz_ssi_pio_send;
750 				ssi->capture.transfer = rz_ssi_pio_recv;
751 				rz_ssi_release_dma_channels(ssi);
752 			}
753 		}
754 
755 		/* For DMA, queue up multiple DMA descriptors */
756 		if (rz_ssi_is_dma_enabled(ssi))
757 			num_transfer = 4;
758 
759 		for (i = 0; i < num_transfer; i++) {
760 			ret = strm->transfer(ssi, strm);
761 			if (ret)
762 				goto done;
763 		}
764 
765 		ret = rz_ssi_start(ssi, strm);
766 		break;
767 	case SNDRV_PCM_TRIGGER_STOP:
768 		rz_ssi_stop(ssi, strm);
769 		rz_ssi_stream_quit(ssi, strm);
770 		break;
771 	}
772 
773 done:
774 	return ret;
775 }
776 
rz_ssi_dai_set_fmt(struct snd_soc_dai * dai,unsigned int fmt)777 static int rz_ssi_dai_set_fmt(struct snd_soc_dai *dai, unsigned int fmt)
778 {
779 	struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai);
780 
781 	switch (fmt & SND_SOC_DAIFMT_CLOCK_PROVIDER_MASK) {
782 	case SND_SOC_DAIFMT_BP_FP:
783 		break;
784 	default:
785 		dev_err(ssi->dev, "Codec should be clk and frame consumer\n");
786 		return -EINVAL;
787 	}
788 
789 	/*
790 	 * set clock polarity
791 	 *
792 	 * "normal" BCLK = Signal is available at rising edge of BCLK
793 	 * "normal" FSYNC = (I2S) Left ch starts with falling FSYNC edge
794 	 */
795 	switch (fmt & SND_SOC_DAIFMT_INV_MASK) {
796 	case SND_SOC_DAIFMT_NB_NF:
797 		ssi->bckp_rise = false;
798 		ssi->lrckp_fsync_fall = false;
799 		break;
800 	case SND_SOC_DAIFMT_NB_IF:
801 		ssi->bckp_rise = false;
802 		ssi->lrckp_fsync_fall = true;
803 		break;
804 	case SND_SOC_DAIFMT_IB_NF:
805 		ssi->bckp_rise = true;
806 		ssi->lrckp_fsync_fall = false;
807 		break;
808 	case SND_SOC_DAIFMT_IB_IF:
809 		ssi->bckp_rise = true;
810 		ssi->lrckp_fsync_fall = true;
811 		break;
812 	default:
813 		return -EINVAL;
814 	}
815 
816 	/* only i2s support */
817 	switch (fmt & SND_SOC_DAIFMT_FORMAT_MASK) {
818 	case SND_SOC_DAIFMT_I2S:
819 		break;
820 	default:
821 		dev_err(ssi->dev, "Only I2S mode is supported.\n");
822 		return -EINVAL;
823 	}
824 
825 	return 0;
826 }
827 
rz_ssi_dai_hw_params(struct snd_pcm_substream * substream,struct snd_pcm_hw_params * params,struct snd_soc_dai * dai)828 static int rz_ssi_dai_hw_params(struct snd_pcm_substream *substream,
829 				struct snd_pcm_hw_params *params,
830 				struct snd_soc_dai *dai)
831 {
832 	struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai);
833 	unsigned int sample_bits = hw_param_interval(params,
834 					SNDRV_PCM_HW_PARAM_SAMPLE_BITS)->min;
835 	unsigned int channels = params_channels(params);
836 
837 	if (sample_bits != 16) {
838 		dev_err(ssi->dev, "Unsupported sample width: %d\n",
839 			sample_bits);
840 		return -EINVAL;
841 	}
842 
843 	if (channels != 2) {
844 		dev_err(ssi->dev, "Number of channels not matched: %d\n",
845 			channels);
846 		return -EINVAL;
847 	}
848 
849 	return rz_ssi_clk_setup(ssi, params_rate(params),
850 				params_channels(params));
851 }
852 
853 static const struct snd_soc_dai_ops rz_ssi_dai_ops = {
854 	.trigger	= rz_ssi_dai_trigger,
855 	.set_fmt	= rz_ssi_dai_set_fmt,
856 	.hw_params	= rz_ssi_dai_hw_params,
857 };
858 
859 static const struct snd_pcm_hardware rz_ssi_pcm_hardware = {
860 	.info			= SNDRV_PCM_INFO_INTERLEAVED	|
861 				  SNDRV_PCM_INFO_MMAP		|
862 				  SNDRV_PCM_INFO_MMAP_VALID,
863 	.buffer_bytes_max	= PREALLOC_BUFFER,
864 	.period_bytes_min	= 32,
865 	.period_bytes_max	= 8192,
866 	.channels_min		= SSI_CHAN_MIN,
867 	.channels_max		= SSI_CHAN_MAX,
868 	.periods_min		= 1,
869 	.periods_max		= 32,
870 	.fifo_size		= 32 * 2,
871 };
872 
rz_ssi_pcm_open(struct snd_soc_component * component,struct snd_pcm_substream * substream)873 static int rz_ssi_pcm_open(struct snd_soc_component *component,
874 			   struct snd_pcm_substream *substream)
875 {
876 	snd_soc_set_runtime_hwparams(substream, &rz_ssi_pcm_hardware);
877 
878 	return snd_pcm_hw_constraint_integer(substream->runtime,
879 					    SNDRV_PCM_HW_PARAM_PERIODS);
880 }
881 
rz_ssi_pcm_pointer(struct snd_soc_component * component,struct snd_pcm_substream * substream)882 static snd_pcm_uframes_t rz_ssi_pcm_pointer(struct snd_soc_component *component,
883 					    struct snd_pcm_substream *substream)
884 {
885 	struct snd_soc_dai *dai = rz_ssi_get_dai(substream);
886 	struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai);
887 	struct rz_ssi_stream *strm = rz_ssi_stream_get(ssi, substream);
888 
889 	return strm->buffer_pos;
890 }
891 
rz_ssi_pcm_new(struct snd_soc_component * component,struct snd_soc_pcm_runtime * rtd)892 static int rz_ssi_pcm_new(struct snd_soc_component *component,
893 			  struct snd_soc_pcm_runtime *rtd)
894 {
895 	snd_pcm_set_managed_buffer_all(rtd->pcm, SNDRV_DMA_TYPE_DEV,
896 				       rtd->card->snd_card->dev,
897 				       PREALLOC_BUFFER, PREALLOC_BUFFER_MAX);
898 	return 0;
899 }
900 
901 static struct snd_soc_dai_driver rz_ssi_soc_dai[] = {
902 	{
903 		.name			= "rz-ssi-dai",
904 		.playback = {
905 			.rates		= SSI_RATES,
906 			.formats	= SSI_FMTS,
907 			.channels_min	= SSI_CHAN_MIN,
908 			.channels_max	= SSI_CHAN_MAX,
909 		},
910 		.capture = {
911 			.rates		= SSI_RATES,
912 			.formats	= SSI_FMTS,
913 			.channels_min	= SSI_CHAN_MIN,
914 			.channels_max	= SSI_CHAN_MAX,
915 		},
916 		.ops = &rz_ssi_dai_ops,
917 	},
918 };
919 
920 static const struct snd_soc_component_driver rz_ssi_soc_component = {
921 	.name			= "rz-ssi",
922 	.open			= rz_ssi_pcm_open,
923 	.pointer		= rz_ssi_pcm_pointer,
924 	.pcm_construct		= rz_ssi_pcm_new,
925 	.legacy_dai_naming	= 1,
926 };
927 
rz_ssi_probe(struct platform_device * pdev)928 static int rz_ssi_probe(struct platform_device *pdev)
929 {
930 	struct rz_ssi_priv *ssi;
931 	struct clk *audio_clk;
932 	struct resource *res;
933 	int ret;
934 
935 	ssi = devm_kzalloc(&pdev->dev, sizeof(*ssi), GFP_KERNEL);
936 	if (!ssi)
937 		return -ENOMEM;
938 
939 	ssi->pdev = pdev;
940 	ssi->dev = &pdev->dev;
941 	ssi->base = devm_platform_get_and_ioremap_resource(pdev, 0, &res);
942 	if (IS_ERR(ssi->base))
943 		return PTR_ERR(ssi->base);
944 
945 	ssi->phys = res->start;
946 	ssi->clk = devm_clk_get(&pdev->dev, "ssi");
947 	if (IS_ERR(ssi->clk))
948 		return PTR_ERR(ssi->clk);
949 
950 	ssi->sfr_clk = devm_clk_get(&pdev->dev, "ssi_sfr");
951 	if (IS_ERR(ssi->sfr_clk))
952 		return PTR_ERR(ssi->sfr_clk);
953 
954 	audio_clk = devm_clk_get(&pdev->dev, "audio_clk1");
955 	if (IS_ERR(audio_clk))
956 		return dev_err_probe(&pdev->dev, PTR_ERR(audio_clk),
957 				     "no audio clk1");
958 
959 	ssi->audio_clk_1 = clk_get_rate(audio_clk);
960 	audio_clk = devm_clk_get(&pdev->dev, "audio_clk2");
961 	if (IS_ERR(audio_clk))
962 		return dev_err_probe(&pdev->dev, PTR_ERR(audio_clk),
963 				     "no audio clk2");
964 
965 	ssi->audio_clk_2 = clk_get_rate(audio_clk);
966 	if (!(ssi->audio_clk_1 || ssi->audio_clk_2))
967 		return dev_err_probe(&pdev->dev, -EINVAL,
968 				     "no audio clk1 or audio clk2");
969 
970 	ssi->audio_mck = ssi->audio_clk_1 ? ssi->audio_clk_1 : ssi->audio_clk_2;
971 
972 	/* Detect DMA support */
973 	ret = rz_ssi_dma_request(ssi, &pdev->dev);
974 	if (ret < 0) {
975 		dev_warn(&pdev->dev, "DMA not available, using PIO\n");
976 		ssi->playback.transfer = rz_ssi_pio_send;
977 		ssi->capture.transfer = rz_ssi_pio_recv;
978 	} else {
979 		dev_info(&pdev->dev, "DMA enabled");
980 		ssi->playback.transfer = rz_ssi_dma_transfer;
981 		ssi->capture.transfer = rz_ssi_dma_transfer;
982 	}
983 
984 	ssi->playback.priv = ssi;
985 	ssi->capture.priv = ssi;
986 
987 	spin_lock_init(&ssi->lock);
988 	dev_set_drvdata(&pdev->dev, ssi);
989 
990 	/* Error Interrupt */
991 	ssi->irq_int = platform_get_irq_byname(pdev, "int_req");
992 	if (ssi->irq_int < 0) {
993 		rz_ssi_release_dma_channels(ssi);
994 		return ssi->irq_int;
995 	}
996 
997 	ret = devm_request_irq(&pdev->dev, ssi->irq_int, &rz_ssi_interrupt,
998 			       0, dev_name(&pdev->dev), ssi);
999 	if (ret < 0) {
1000 		rz_ssi_release_dma_channels(ssi);
1001 		return dev_err_probe(&pdev->dev, ret,
1002 				     "irq request error (int_req)\n");
1003 	}
1004 
1005 	if (!rz_ssi_is_dma_enabled(ssi)) {
1006 		/* Tx and Rx interrupts (pio only) */
1007 		ssi->irq_tx = platform_get_irq_byname(pdev, "dma_tx");
1008 		ssi->irq_rx = platform_get_irq_byname(pdev, "dma_rx");
1009 		if (ssi->irq_tx == -ENXIO && ssi->irq_rx == -ENXIO) {
1010 			ssi->irq_rt = platform_get_irq_byname(pdev, "dma_rt");
1011 			if (ssi->irq_rt < 0)
1012 				return ssi->irq_rt;
1013 
1014 			ret = devm_request_irq(&pdev->dev, ssi->irq_rt,
1015 					       &rz_ssi_interrupt, 0,
1016 					       dev_name(&pdev->dev), ssi);
1017 			if (ret < 0)
1018 				return dev_err_probe(&pdev->dev, ret,
1019 						     "irq request error (dma_rt)\n");
1020 		} else {
1021 			if (ssi->irq_tx < 0)
1022 				return ssi->irq_tx;
1023 
1024 			if (ssi->irq_rx < 0)
1025 				return ssi->irq_rx;
1026 
1027 			ret = devm_request_irq(&pdev->dev, ssi->irq_tx,
1028 					       &rz_ssi_interrupt, 0,
1029 					       dev_name(&pdev->dev), ssi);
1030 			if (ret < 0)
1031 				return dev_err_probe(&pdev->dev, ret,
1032 						"irq request error (dma_tx)\n");
1033 
1034 			ret = devm_request_irq(&pdev->dev, ssi->irq_rx,
1035 					       &rz_ssi_interrupt, 0,
1036 					       dev_name(&pdev->dev), ssi);
1037 			if (ret < 0)
1038 				return dev_err_probe(&pdev->dev, ret,
1039 						"irq request error (dma_rx)\n");
1040 		}
1041 	}
1042 
1043 	ssi->rstc = devm_reset_control_get_exclusive(&pdev->dev, NULL);
1044 	if (IS_ERR(ssi->rstc)) {
1045 		ret = PTR_ERR(ssi->rstc);
1046 		goto err_reset;
1047 	}
1048 
1049 	reset_control_deassert(ssi->rstc);
1050 	pm_runtime_enable(&pdev->dev);
1051 	ret = pm_runtime_resume_and_get(&pdev->dev);
1052 	if (ret < 0) {
1053 		dev_err(&pdev->dev, "pm_runtime_resume_and_get failed\n");
1054 		goto err_pm;
1055 	}
1056 
1057 	ret = devm_snd_soc_register_component(&pdev->dev, &rz_ssi_soc_component,
1058 					      rz_ssi_soc_dai,
1059 					      ARRAY_SIZE(rz_ssi_soc_dai));
1060 	if (ret < 0) {
1061 		dev_err(&pdev->dev, "failed to register snd component\n");
1062 		goto err_snd_soc;
1063 	}
1064 
1065 	return 0;
1066 
1067 err_snd_soc:
1068 	pm_runtime_put(ssi->dev);
1069 err_pm:
1070 	pm_runtime_disable(ssi->dev);
1071 	reset_control_assert(ssi->rstc);
1072 err_reset:
1073 	rz_ssi_release_dma_channels(ssi);
1074 
1075 	return ret;
1076 }
1077 
rz_ssi_remove(struct platform_device * pdev)1078 static void rz_ssi_remove(struct platform_device *pdev)
1079 {
1080 	struct rz_ssi_priv *ssi = dev_get_drvdata(&pdev->dev);
1081 
1082 	rz_ssi_release_dma_channels(ssi);
1083 
1084 	pm_runtime_put(ssi->dev);
1085 	pm_runtime_disable(ssi->dev);
1086 	reset_control_assert(ssi->rstc);
1087 }
1088 
1089 static const struct of_device_id rz_ssi_of_match[] = {
1090 	{ .compatible = "renesas,rz-ssi", },
1091 	{/* Sentinel */},
1092 };
1093 MODULE_DEVICE_TABLE(of, rz_ssi_of_match);
1094 
1095 static struct platform_driver rz_ssi_driver = {
1096 	.driver	= {
1097 		.name	= "rz-ssi-pcm-audio",
1098 		.of_match_table = rz_ssi_of_match,
1099 	},
1100 	.probe		= rz_ssi_probe,
1101 	.remove_new	= rz_ssi_remove,
1102 };
1103 
1104 module_platform_driver(rz_ssi_driver);
1105 
1106 MODULE_LICENSE("GPL v2");
1107 MODULE_DESCRIPTION("Renesas RZ/G2L ASoC Serial Sound Interface Driver");
1108 MODULE_AUTHOR("Biju Das <biju.das.jz@bp.renesas.com>");
1109