1 // SPDX-License-Identifier: GPL-2.0 2 // 3 // Renesas RZ/G2L ASoC Serial Sound Interface (SSIF-2) Driver 4 // 5 // Copyright (C) 2021 Renesas Electronics Corp. 6 // Copyright (C) 2019 Chris Brandt. 7 // 8 9 #include <linux/clk.h> 10 #include <linux/dmaengine.h> 11 #include <linux/io.h> 12 #include <linux/module.h> 13 #include <linux/of_device.h> 14 #include <linux/pm_runtime.h> 15 #include <linux/reset.h> 16 #include <sound/soc.h> 17 18 /* REGISTER OFFSET */ 19 #define SSICR 0x000 20 #define SSISR 0x004 21 #define SSIFCR 0x010 22 #define SSIFSR 0x014 23 #define SSIFTDR 0x018 24 #define SSIFRDR 0x01c 25 #define SSIOFR 0x020 26 #define SSISCR 0x024 27 28 /* SSI REGISTER BITS */ 29 #define SSICR_DWL(x) (((x) & 0x7) << 19) 30 #define SSICR_SWL(x) (((x) & 0x7) << 16) 31 #define SSICR_MST BIT(14) 32 #define SSICR_CKDV(x) (((x) & 0xf) << 4) 33 34 #define SSICR_CKS BIT(30) 35 #define SSICR_TUIEN BIT(29) 36 #define SSICR_TOIEN BIT(28) 37 #define SSICR_RUIEN BIT(27) 38 #define SSICR_ROIEN BIT(26) 39 #define SSICR_MST BIT(14) 40 #define SSICR_BCKP BIT(13) 41 #define SSICR_LRCKP BIT(12) 42 #define SSICR_CKDV(x) (((x) & 0xf) << 4) 43 #define SSICR_TEN BIT(1) 44 #define SSICR_REN BIT(0) 45 46 #define SSISR_TUIRQ BIT(29) 47 #define SSISR_TOIRQ BIT(28) 48 #define SSISR_RUIRQ BIT(27) 49 #define SSISR_ROIRQ BIT(26) 50 #define SSISR_IIRQ BIT(25) 51 52 #define SSIFCR_AUCKE BIT(31) 53 #define SSIFCR_SSIRST BIT(16) 54 #define SSIFCR_TIE BIT(3) 55 #define SSIFCR_RIE BIT(2) 56 #define SSIFCR_TFRST BIT(1) 57 #define SSIFCR_RFRST BIT(0) 58 59 #define SSIFSR_TDC_MASK 0x3f 60 #define SSIFSR_TDC_SHIFT 24 61 #define SSIFSR_RDC_MASK 0x3f 62 #define SSIFSR_RDC_SHIFT 8 63 64 #define SSIFSR_TDC(x) (((x) & 0x1f) << 24) 65 #define SSIFSR_TDE BIT(16) 66 #define SSIFSR_RDC(x) (((x) & 0x1f) << 8) 67 #define SSIFSR_RDF BIT(0) 68 69 #define SSIOFR_LRCONT BIT(8) 70 71 #define SSISCR_TDES(x) (((x) & 0x1f) << 8) 72 #define SSISCR_RDFS(x) (((x) & 0x1f) << 0) 73 74 /* Pre allocated buffers sizes */ 75 #define PREALLOC_BUFFER (SZ_32K) 76 #define PREALLOC_BUFFER_MAX (SZ_32K) 77 78 #define SSI_RATES SNDRV_PCM_RATE_8000_48000 /* 8k-44.1kHz */ 79 #define SSI_FMTS SNDRV_PCM_FMTBIT_S16_LE 80 #define SSI_CHAN_MIN 2 81 #define SSI_CHAN_MAX 2 82 #define SSI_FIFO_DEPTH 32 83 84 struct rz_ssi_priv; 85 86 struct rz_ssi_stream { 87 struct rz_ssi_priv *priv; 88 struct snd_pcm_substream *substream; 89 int fifo_sample_size; /* sample capacity of SSI FIFO */ 90 int dma_buffer_pos; /* The address for the next DMA descriptor */ 91 int period_counter; /* for keeping track of periods transferred */ 92 int sample_width; 93 int buffer_pos; /* current frame position in the buffer */ 94 int running; /* 0=stopped, 1=running */ 95 96 int uerr_num; 97 int oerr_num; 98 99 struct dma_chan *dma_ch; 100 101 int (*transfer)(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm); 102 }; 103 104 struct rz_ssi_priv { 105 void __iomem *base; 106 struct platform_device *pdev; 107 struct reset_control *rstc; 108 struct device *dev; 109 struct clk *sfr_clk; 110 struct clk *clk; 111 112 phys_addr_t phys; 113 int irq_int; 114 int irq_tx; 115 int irq_rx; 116 117 spinlock_t lock; 118 119 /* 120 * The SSI supports full-duplex transmission and reception. 121 * However, if an error occurs, channel reset (both transmission 122 * and reception reset) is required. 123 * So it is better to use as half-duplex (playing and recording 124 * should be done on separate channels). 125 */ 126 struct rz_ssi_stream playback; 127 struct rz_ssi_stream capture; 128 129 /* clock */ 130 unsigned long audio_mck; 131 unsigned long audio_clk_1; 132 unsigned long audio_clk_2; 133 134 bool lrckp_fsync_fall; /* LR clock polarity (SSICR.LRCKP) */ 135 bool bckp_rise; /* Bit clock polarity (SSICR.BCKP) */ 136 bool dma_rt; 137 }; 138 139 static void rz_ssi_dma_complete(void *data); 140 141 static void rz_ssi_reg_writel(struct rz_ssi_priv *priv, uint reg, u32 data) 142 { 143 writel(data, (priv->base + reg)); 144 } 145 146 static u32 rz_ssi_reg_readl(struct rz_ssi_priv *priv, uint reg) 147 { 148 return readl(priv->base + reg); 149 } 150 151 static void rz_ssi_reg_mask_setl(struct rz_ssi_priv *priv, uint reg, 152 u32 bclr, u32 bset) 153 { 154 u32 val; 155 156 val = readl(priv->base + reg); 157 val = (val & ~bclr) | bset; 158 writel(val, (priv->base + reg)); 159 } 160 161 static inline struct snd_soc_dai * 162 rz_ssi_get_dai(struct snd_pcm_substream *substream) 163 { 164 struct snd_soc_pcm_runtime *rtd = asoc_substream_to_rtd(substream); 165 166 return asoc_rtd_to_cpu(rtd, 0); 167 } 168 169 static inline bool rz_ssi_stream_is_play(struct rz_ssi_priv *ssi, 170 struct snd_pcm_substream *substream) 171 { 172 return substream->stream == SNDRV_PCM_STREAM_PLAYBACK; 173 } 174 175 static inline struct rz_ssi_stream * 176 rz_ssi_stream_get(struct rz_ssi_priv *ssi, struct snd_pcm_substream *substream) 177 { 178 struct rz_ssi_stream *stream = &ssi->playback; 179 180 if (substream->stream != SNDRV_PCM_STREAM_PLAYBACK) 181 stream = &ssi->capture; 182 183 return stream; 184 } 185 186 static inline bool rz_ssi_is_dma_enabled(struct rz_ssi_priv *ssi) 187 { 188 return (ssi->playback.dma_ch && (ssi->dma_rt || ssi->capture.dma_ch)); 189 } 190 191 static int rz_ssi_stream_is_valid(struct rz_ssi_priv *ssi, 192 struct rz_ssi_stream *strm) 193 { 194 unsigned long flags; 195 int ret; 196 197 spin_lock_irqsave(&ssi->lock, flags); 198 ret = !!(strm->substream && strm->substream->runtime); 199 spin_unlock_irqrestore(&ssi->lock, flags); 200 201 return ret; 202 } 203 204 static int rz_ssi_stream_init(struct rz_ssi_priv *ssi, 205 struct rz_ssi_stream *strm, 206 struct snd_pcm_substream *substream) 207 { 208 struct snd_pcm_runtime *runtime = substream->runtime; 209 210 strm->substream = substream; 211 strm->sample_width = samples_to_bytes(runtime, 1); 212 strm->dma_buffer_pos = 0; 213 strm->period_counter = 0; 214 strm->buffer_pos = 0; 215 216 strm->oerr_num = 0; 217 strm->uerr_num = 0; 218 strm->running = 0; 219 220 /* fifo init */ 221 strm->fifo_sample_size = SSI_FIFO_DEPTH; 222 223 return 0; 224 } 225 226 static void rz_ssi_stream_quit(struct rz_ssi_priv *ssi, 227 struct rz_ssi_stream *strm) 228 { 229 struct snd_soc_dai *dai = rz_ssi_get_dai(strm->substream); 230 unsigned long flags; 231 232 spin_lock_irqsave(&ssi->lock, flags); 233 strm->substream = NULL; 234 spin_unlock_irqrestore(&ssi->lock, flags); 235 236 if (strm->oerr_num > 0) 237 dev_info(dai->dev, "overrun = %d\n", strm->oerr_num); 238 239 if (strm->uerr_num > 0) 240 dev_info(dai->dev, "underrun = %d\n", strm->uerr_num); 241 } 242 243 static int rz_ssi_clk_setup(struct rz_ssi_priv *ssi, unsigned int rate, 244 unsigned int channels) 245 { 246 static s8 ckdv[16] = { 1, 2, 4, 8, 16, 32, 64, 128, 247 6, 12, 24, 48, 96, -1, -1, -1 }; 248 unsigned int channel_bits = 32; /* System Word Length */ 249 unsigned long bclk_rate = rate * channels * channel_bits; 250 unsigned int div; 251 unsigned int i; 252 u32 ssicr = 0; 253 u32 clk_ckdv; 254 255 /* Clear AUCKE so we can set MST */ 256 rz_ssi_reg_writel(ssi, SSIFCR, 0); 257 258 /* Continue to output LRCK pin even when idle */ 259 rz_ssi_reg_writel(ssi, SSIOFR, SSIOFR_LRCONT); 260 if (ssi->audio_clk_1 && ssi->audio_clk_2) { 261 if (ssi->audio_clk_1 % bclk_rate) 262 ssi->audio_mck = ssi->audio_clk_2; 263 else 264 ssi->audio_mck = ssi->audio_clk_1; 265 } 266 267 /* Clock setting */ 268 ssicr |= SSICR_MST; 269 if (ssi->audio_mck == ssi->audio_clk_1) 270 ssicr |= SSICR_CKS; 271 if (ssi->bckp_rise) 272 ssicr |= SSICR_BCKP; 273 if (ssi->lrckp_fsync_fall) 274 ssicr |= SSICR_LRCKP; 275 276 /* Determine the clock divider */ 277 clk_ckdv = 0; 278 div = ssi->audio_mck / bclk_rate; 279 /* try to find an match */ 280 for (i = 0; i < ARRAY_SIZE(ckdv); i++) { 281 if (ckdv[i] == div) { 282 clk_ckdv = i; 283 break; 284 } 285 } 286 287 if (i == ARRAY_SIZE(ckdv)) { 288 dev_err(ssi->dev, "Rate not divisible by audio clock source\n"); 289 return -EINVAL; 290 } 291 292 /* 293 * DWL: Data Word Length = 16 bits 294 * SWL: System Word Length = 32 bits 295 */ 296 ssicr |= SSICR_CKDV(clk_ckdv); 297 ssicr |= SSICR_DWL(1) | SSICR_SWL(3); 298 rz_ssi_reg_writel(ssi, SSICR, ssicr); 299 rz_ssi_reg_writel(ssi, SSIFCR, 300 (SSIFCR_AUCKE | SSIFCR_TFRST | SSIFCR_RFRST)); 301 302 return 0; 303 } 304 305 static int rz_ssi_start(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm) 306 { 307 bool is_play = rz_ssi_stream_is_play(ssi, strm->substream); 308 u32 ssicr, ssifcr; 309 310 ssicr = rz_ssi_reg_readl(ssi, SSICR); 311 ssifcr = rz_ssi_reg_readl(ssi, SSIFCR) & ~0xF; 312 313 /* FIFO interrupt thresholds */ 314 if (rz_ssi_is_dma_enabled(ssi)) 315 rz_ssi_reg_writel(ssi, SSISCR, 0); 316 else 317 rz_ssi_reg_writel(ssi, SSISCR, 318 SSISCR_TDES(strm->fifo_sample_size / 2 - 1) | 319 SSISCR_RDFS(0)); 320 321 /* enable IRQ */ 322 if (is_play) { 323 ssicr |= SSICR_TUIEN | SSICR_TOIEN; 324 ssifcr |= SSIFCR_TIE | SSIFCR_RFRST; 325 } else { 326 ssicr |= SSICR_RUIEN | SSICR_ROIEN; 327 ssifcr |= SSIFCR_RIE | SSIFCR_TFRST; 328 } 329 330 rz_ssi_reg_writel(ssi, SSICR, ssicr); 331 rz_ssi_reg_writel(ssi, SSIFCR, ssifcr); 332 333 /* Clear all error flags */ 334 rz_ssi_reg_mask_setl(ssi, SSISR, 335 (SSISR_TOIRQ | SSISR_TUIRQ | SSISR_ROIRQ | 336 SSISR_RUIRQ), 0); 337 338 strm->running = 1; 339 ssicr |= is_play ? SSICR_TEN : SSICR_REN; 340 rz_ssi_reg_writel(ssi, SSICR, ssicr); 341 342 return 0; 343 } 344 345 static int rz_ssi_stop(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm) 346 { 347 int timeout; 348 349 strm->running = 0; 350 351 /* Disable TX/RX */ 352 rz_ssi_reg_mask_setl(ssi, SSICR, SSICR_TEN | SSICR_REN, 0); 353 354 /* Cancel all remaining DMA transactions */ 355 if (rz_ssi_is_dma_enabled(ssi)) 356 dmaengine_terminate_async(strm->dma_ch); 357 358 /* Disable irqs */ 359 rz_ssi_reg_mask_setl(ssi, SSICR, SSICR_TUIEN | SSICR_TOIEN | 360 SSICR_RUIEN | SSICR_ROIEN, 0); 361 rz_ssi_reg_mask_setl(ssi, SSIFCR, SSIFCR_TIE | SSIFCR_RIE, 0); 362 363 /* Clear all error flags */ 364 rz_ssi_reg_mask_setl(ssi, SSISR, 365 (SSISR_TOIRQ | SSISR_TUIRQ | SSISR_ROIRQ | 366 SSISR_RUIRQ), 0); 367 368 /* Wait for idle */ 369 timeout = 100; 370 while (--timeout) { 371 if (rz_ssi_reg_readl(ssi, SSISR) & SSISR_IIRQ) 372 break; 373 udelay(1); 374 } 375 376 if (!timeout) 377 dev_info(ssi->dev, "timeout waiting for SSI idle\n"); 378 379 /* Hold FIFOs in reset */ 380 rz_ssi_reg_mask_setl(ssi, SSIFCR, 0, 381 SSIFCR_TFRST | SSIFCR_RFRST); 382 383 return 0; 384 } 385 386 static void rz_ssi_pointer_update(struct rz_ssi_stream *strm, int frames) 387 { 388 struct snd_pcm_substream *substream = strm->substream; 389 struct snd_pcm_runtime *runtime; 390 int current_period; 391 392 if (!strm->running || !substream || !substream->runtime) 393 return; 394 395 runtime = substream->runtime; 396 strm->buffer_pos += frames; 397 WARN_ON(strm->buffer_pos > runtime->buffer_size); 398 399 /* ring buffer */ 400 if (strm->buffer_pos == runtime->buffer_size) 401 strm->buffer_pos = 0; 402 403 current_period = strm->buffer_pos / runtime->period_size; 404 if (strm->period_counter != current_period) { 405 snd_pcm_period_elapsed(strm->substream); 406 strm->period_counter = current_period; 407 } 408 } 409 410 static int rz_ssi_pio_recv(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm) 411 { 412 struct snd_pcm_substream *substream = strm->substream; 413 struct snd_pcm_runtime *runtime; 414 u16 *buf; 415 int fifo_samples; 416 int frames_left; 417 int samples = 0; 418 int i; 419 420 if (!rz_ssi_stream_is_valid(ssi, strm)) 421 return -EINVAL; 422 423 runtime = substream->runtime; 424 /* frames left in this period */ 425 frames_left = runtime->period_size - (strm->buffer_pos % 426 runtime->period_size); 427 if (frames_left == 0) 428 frames_left = runtime->period_size; 429 430 /* Samples in RX FIFO */ 431 fifo_samples = (rz_ssi_reg_readl(ssi, SSIFSR) >> 432 SSIFSR_RDC_SHIFT) & SSIFSR_RDC_MASK; 433 434 /* Only read full frames at a time */ 435 while (frames_left && (fifo_samples >= runtime->channels)) { 436 samples += runtime->channels; 437 fifo_samples -= runtime->channels; 438 frames_left--; 439 } 440 441 /* not enough samples yet */ 442 if (samples == 0) 443 return 0; 444 445 /* calculate new buffer index */ 446 buf = (u16 *)(runtime->dma_area); 447 buf += strm->buffer_pos * runtime->channels; 448 449 /* Note, only supports 16-bit samples */ 450 for (i = 0; i < samples; i++) 451 *buf++ = (u16)(rz_ssi_reg_readl(ssi, SSIFRDR) >> 16); 452 453 rz_ssi_reg_mask_setl(ssi, SSIFSR, SSIFSR_RDF, 0); 454 rz_ssi_pointer_update(strm, samples / runtime->channels); 455 456 /* 457 * If we finished this period, but there are more samples in 458 * the RX FIFO, call this function again 459 */ 460 if (frames_left == 0 && fifo_samples >= runtime->channels) 461 rz_ssi_pio_recv(ssi, strm); 462 463 return 0; 464 } 465 466 static int rz_ssi_pio_send(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm) 467 { 468 struct snd_pcm_substream *substream = strm->substream; 469 struct snd_pcm_runtime *runtime = substream->runtime; 470 int sample_space; 471 int samples = 0; 472 int frames_left; 473 int i; 474 u32 ssifsr; 475 u16 *buf; 476 477 if (!rz_ssi_stream_is_valid(ssi, strm)) 478 return -EINVAL; 479 480 /* frames left in this period */ 481 frames_left = runtime->period_size - (strm->buffer_pos % 482 runtime->period_size); 483 if (frames_left == 0) 484 frames_left = runtime->period_size; 485 486 sample_space = strm->fifo_sample_size; 487 ssifsr = rz_ssi_reg_readl(ssi, SSIFSR); 488 sample_space -= (ssifsr >> SSIFSR_TDC_SHIFT) & SSIFSR_TDC_MASK; 489 490 /* Only add full frames at a time */ 491 while (frames_left && (sample_space >= runtime->channels)) { 492 samples += runtime->channels; 493 sample_space -= runtime->channels; 494 frames_left--; 495 } 496 497 /* no space to send anything right now */ 498 if (samples == 0) 499 return 0; 500 501 /* calculate new buffer index */ 502 buf = (u16 *)(runtime->dma_area); 503 buf += strm->buffer_pos * runtime->channels; 504 505 /* Note, only supports 16-bit samples */ 506 for (i = 0; i < samples; i++) 507 rz_ssi_reg_writel(ssi, SSIFTDR, ((u32)(*buf++) << 16)); 508 509 rz_ssi_reg_mask_setl(ssi, SSIFSR, SSIFSR_TDE, 0); 510 rz_ssi_pointer_update(strm, samples / runtime->channels); 511 512 return 0; 513 } 514 515 static irqreturn_t rz_ssi_interrupt(int irq, void *data) 516 { 517 struct rz_ssi_stream *strm = NULL; 518 struct rz_ssi_priv *ssi = data; 519 u32 ssisr = rz_ssi_reg_readl(ssi, SSISR); 520 521 if (ssi->playback.substream) 522 strm = &ssi->playback; 523 else if (ssi->capture.substream) 524 strm = &ssi->capture; 525 else 526 return IRQ_HANDLED; /* Left over TX/RX interrupt */ 527 528 if (irq == ssi->irq_int) { /* error or idle */ 529 if (ssisr & SSISR_TUIRQ) 530 strm->uerr_num++; 531 if (ssisr & SSISR_TOIRQ) 532 strm->oerr_num++; 533 if (ssisr & SSISR_RUIRQ) 534 strm->uerr_num++; 535 if (ssisr & SSISR_ROIRQ) 536 strm->oerr_num++; 537 538 if (ssisr & (SSISR_TUIRQ | SSISR_TOIRQ | SSISR_RUIRQ | 539 SSISR_ROIRQ)) { 540 /* Error handling */ 541 /* You must reset (stop/restart) after each interrupt */ 542 rz_ssi_stop(ssi, strm); 543 544 /* Clear all flags */ 545 rz_ssi_reg_mask_setl(ssi, SSISR, SSISR_TOIRQ | 546 SSISR_TUIRQ | SSISR_ROIRQ | 547 SSISR_RUIRQ, 0); 548 549 /* Add/remove more data */ 550 strm->transfer(ssi, strm); 551 552 /* Resume */ 553 rz_ssi_start(ssi, strm); 554 } 555 } 556 557 if (!strm->running) 558 return IRQ_HANDLED; 559 560 /* tx data empty */ 561 if (irq == ssi->irq_tx) 562 strm->transfer(ssi, &ssi->playback); 563 564 /* rx data full */ 565 if (irq == ssi->irq_rx) { 566 strm->transfer(ssi, &ssi->capture); 567 rz_ssi_reg_mask_setl(ssi, SSIFSR, SSIFSR_RDF, 0); 568 } 569 570 return IRQ_HANDLED; 571 } 572 573 static int rz_ssi_dma_slave_config(struct rz_ssi_priv *ssi, 574 struct dma_chan *dma_ch, bool is_play) 575 { 576 struct dma_slave_config cfg; 577 578 memset(&cfg, 0, sizeof(cfg)); 579 580 cfg.direction = is_play ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM; 581 cfg.dst_addr = ssi->phys + SSIFTDR; 582 cfg.src_addr = ssi->phys + SSIFRDR; 583 cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES; 584 cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES; 585 586 return dmaengine_slave_config(dma_ch, &cfg); 587 } 588 589 static int rz_ssi_dma_transfer(struct rz_ssi_priv *ssi, 590 struct rz_ssi_stream *strm) 591 { 592 struct snd_pcm_substream *substream = strm->substream; 593 struct dma_async_tx_descriptor *desc; 594 struct snd_pcm_runtime *runtime; 595 enum dma_transfer_direction dir; 596 u32 dma_paddr, dma_size; 597 int amount; 598 599 if (!rz_ssi_stream_is_valid(ssi, strm)) 600 return -EINVAL; 601 602 runtime = substream->runtime; 603 if (runtime->status->state == SNDRV_PCM_STATE_DRAINING) 604 /* 605 * Stream is ending, so do not queue up any more DMA 606 * transfers otherwise we play partial sound clips 607 * because we can't shut off the DMA quick enough. 608 */ 609 return 0; 610 611 dir = rz_ssi_stream_is_play(ssi, substream) ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM; 612 613 /* Always transfer 1 period */ 614 amount = runtime->period_size; 615 616 /* DMA physical address and size */ 617 dma_paddr = runtime->dma_addr + frames_to_bytes(runtime, 618 strm->dma_buffer_pos); 619 dma_size = frames_to_bytes(runtime, amount); 620 desc = dmaengine_prep_slave_single(strm->dma_ch, dma_paddr, dma_size, 621 dir, 622 DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 623 if (!desc) { 624 dev_err(ssi->dev, "dmaengine_prep_slave_single() fail\n"); 625 return -ENOMEM; 626 } 627 628 desc->callback = rz_ssi_dma_complete; 629 desc->callback_param = strm; 630 631 if (dmaengine_submit(desc) < 0) { 632 dev_err(ssi->dev, "dmaengine_submit() fail\n"); 633 return -EIO; 634 } 635 636 /* Update DMA pointer */ 637 strm->dma_buffer_pos += amount; 638 if (strm->dma_buffer_pos >= runtime->buffer_size) 639 strm->dma_buffer_pos = 0; 640 641 /* Start DMA */ 642 dma_async_issue_pending(strm->dma_ch); 643 644 return 0; 645 } 646 647 static void rz_ssi_dma_complete(void *data) 648 { 649 struct rz_ssi_stream *strm = (struct rz_ssi_stream *)data; 650 651 if (!strm->running || !strm->substream || !strm->substream->runtime) 652 return; 653 654 /* Note that next DMA transaction has probably already started */ 655 rz_ssi_pointer_update(strm, strm->substream->runtime->period_size); 656 657 /* Queue up another DMA transaction */ 658 rz_ssi_dma_transfer(strm->priv, strm); 659 } 660 661 static void rz_ssi_release_dma_channels(struct rz_ssi_priv *ssi) 662 { 663 if (ssi->playback.dma_ch) { 664 dma_release_channel(ssi->playback.dma_ch); 665 ssi->playback.dma_ch = NULL; 666 if (ssi->dma_rt) 667 ssi->dma_rt = false; 668 } 669 670 if (ssi->capture.dma_ch) { 671 dma_release_channel(ssi->capture.dma_ch); 672 ssi->capture.dma_ch = NULL; 673 } 674 } 675 676 static int rz_ssi_dma_request(struct rz_ssi_priv *ssi, struct device *dev) 677 { 678 ssi->playback.dma_ch = dma_request_chan(dev, "tx"); 679 if (IS_ERR(ssi->playback.dma_ch)) 680 ssi->playback.dma_ch = NULL; 681 682 ssi->capture.dma_ch = dma_request_chan(dev, "rx"); 683 if (IS_ERR(ssi->capture.dma_ch)) 684 ssi->capture.dma_ch = NULL; 685 686 if (!ssi->playback.dma_ch && !ssi->capture.dma_ch) { 687 ssi->playback.dma_ch = dma_request_chan(dev, "rt"); 688 if (IS_ERR(ssi->playback.dma_ch)) { 689 ssi->playback.dma_ch = NULL; 690 goto no_dma; 691 } 692 693 ssi->dma_rt = true; 694 } 695 696 if (!rz_ssi_is_dma_enabled(ssi)) 697 goto no_dma; 698 699 if (ssi->playback.dma_ch && 700 (rz_ssi_dma_slave_config(ssi, ssi->playback.dma_ch, true) < 0)) 701 goto no_dma; 702 703 if (ssi->capture.dma_ch && 704 (rz_ssi_dma_slave_config(ssi, ssi->capture.dma_ch, false) < 0)) 705 goto no_dma; 706 707 return 0; 708 709 no_dma: 710 rz_ssi_release_dma_channels(ssi); 711 712 return -ENODEV; 713 } 714 715 static int rz_ssi_dai_trigger(struct snd_pcm_substream *substream, int cmd, 716 struct snd_soc_dai *dai) 717 { 718 struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai); 719 struct rz_ssi_stream *strm = rz_ssi_stream_get(ssi, substream); 720 int ret = 0, i, num_transfer = 1; 721 722 switch (cmd) { 723 case SNDRV_PCM_TRIGGER_START: 724 /* Soft Reset */ 725 rz_ssi_reg_mask_setl(ssi, SSIFCR, 0, SSIFCR_SSIRST); 726 rz_ssi_reg_mask_setl(ssi, SSIFCR, SSIFCR_SSIRST, 0); 727 udelay(5); 728 729 ret = rz_ssi_stream_init(ssi, strm, substream); 730 if (ret) 731 goto done; 732 733 if (ssi->dma_rt) { 734 bool is_playback; 735 736 is_playback = rz_ssi_stream_is_play(ssi, substream); 737 ret = rz_ssi_dma_slave_config(ssi, ssi->playback.dma_ch, 738 is_playback); 739 /* Fallback to pio */ 740 if (ret < 0) { 741 ssi->playback.transfer = rz_ssi_pio_send; 742 ssi->capture.transfer = rz_ssi_pio_recv; 743 rz_ssi_release_dma_channels(ssi); 744 } 745 } 746 747 /* For DMA, queue up multiple DMA descriptors */ 748 if (rz_ssi_is_dma_enabled(ssi)) 749 num_transfer = 4; 750 751 for (i = 0; i < num_transfer; i++) { 752 ret = strm->transfer(ssi, strm); 753 if (ret) 754 goto done; 755 } 756 757 ret = rz_ssi_start(ssi, strm); 758 break; 759 case SNDRV_PCM_TRIGGER_STOP: 760 rz_ssi_stop(ssi, strm); 761 rz_ssi_stream_quit(ssi, strm); 762 break; 763 } 764 765 done: 766 return ret; 767 } 768 769 static int rz_ssi_dai_set_fmt(struct snd_soc_dai *dai, unsigned int fmt) 770 { 771 struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai); 772 773 switch (fmt & SND_SOC_DAIFMT_CLOCK_PROVIDER_MASK) { 774 case SND_SOC_DAIFMT_CBC_CFC: 775 break; 776 default: 777 dev_err(ssi->dev, "Codec should be clk and frame consumer\n"); 778 return -EINVAL; 779 } 780 781 /* 782 * set clock polarity 783 * 784 * "normal" BCLK = Signal is available at rising edge of BCLK 785 * "normal" FSYNC = (I2S) Left ch starts with falling FSYNC edge 786 */ 787 switch (fmt & SND_SOC_DAIFMT_INV_MASK) { 788 case SND_SOC_DAIFMT_NB_NF: 789 ssi->bckp_rise = false; 790 ssi->lrckp_fsync_fall = false; 791 break; 792 case SND_SOC_DAIFMT_NB_IF: 793 ssi->bckp_rise = false; 794 ssi->lrckp_fsync_fall = true; 795 break; 796 case SND_SOC_DAIFMT_IB_NF: 797 ssi->bckp_rise = true; 798 ssi->lrckp_fsync_fall = false; 799 break; 800 case SND_SOC_DAIFMT_IB_IF: 801 ssi->bckp_rise = true; 802 ssi->lrckp_fsync_fall = true; 803 break; 804 default: 805 return -EINVAL; 806 } 807 808 /* only i2s support */ 809 switch (fmt & SND_SOC_DAIFMT_FORMAT_MASK) { 810 case SND_SOC_DAIFMT_I2S: 811 break; 812 default: 813 dev_err(ssi->dev, "Only I2S mode is supported.\n"); 814 return -EINVAL; 815 } 816 817 return 0; 818 } 819 820 static int rz_ssi_dai_hw_params(struct snd_pcm_substream *substream, 821 struct snd_pcm_hw_params *params, 822 struct snd_soc_dai *dai) 823 { 824 struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai); 825 unsigned int sample_bits = hw_param_interval(params, 826 SNDRV_PCM_HW_PARAM_SAMPLE_BITS)->min; 827 unsigned int channels = params_channels(params); 828 829 if (sample_bits != 16) { 830 dev_err(ssi->dev, "Unsupported sample width: %d\n", 831 sample_bits); 832 return -EINVAL; 833 } 834 835 if (channels != 2) { 836 dev_err(ssi->dev, "Number of channels not matched: %d\n", 837 channels); 838 return -EINVAL; 839 } 840 841 return rz_ssi_clk_setup(ssi, params_rate(params), 842 params_channels(params)); 843 } 844 845 static const struct snd_soc_dai_ops rz_ssi_dai_ops = { 846 .trigger = rz_ssi_dai_trigger, 847 .set_fmt = rz_ssi_dai_set_fmt, 848 .hw_params = rz_ssi_dai_hw_params, 849 }; 850 851 static const struct snd_pcm_hardware rz_ssi_pcm_hardware = { 852 .info = SNDRV_PCM_INFO_INTERLEAVED | 853 SNDRV_PCM_INFO_MMAP | 854 SNDRV_PCM_INFO_MMAP_VALID, 855 .buffer_bytes_max = PREALLOC_BUFFER, 856 .period_bytes_min = 32, 857 .period_bytes_max = 8192, 858 .channels_min = SSI_CHAN_MIN, 859 .channels_max = SSI_CHAN_MAX, 860 .periods_min = 1, 861 .periods_max = 32, 862 .fifo_size = 32 * 2, 863 }; 864 865 static int rz_ssi_pcm_open(struct snd_soc_component *component, 866 struct snd_pcm_substream *substream) 867 { 868 snd_soc_set_runtime_hwparams(substream, &rz_ssi_pcm_hardware); 869 870 return snd_pcm_hw_constraint_integer(substream->runtime, 871 SNDRV_PCM_HW_PARAM_PERIODS); 872 } 873 874 static snd_pcm_uframes_t rz_ssi_pcm_pointer(struct snd_soc_component *component, 875 struct snd_pcm_substream *substream) 876 { 877 struct snd_soc_dai *dai = rz_ssi_get_dai(substream); 878 struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai); 879 struct rz_ssi_stream *strm = rz_ssi_stream_get(ssi, substream); 880 881 return strm->buffer_pos; 882 } 883 884 static int rz_ssi_pcm_new(struct snd_soc_component *component, 885 struct snd_soc_pcm_runtime *rtd) 886 { 887 snd_pcm_set_managed_buffer_all(rtd->pcm, SNDRV_DMA_TYPE_DEV, 888 rtd->card->snd_card->dev, 889 PREALLOC_BUFFER, PREALLOC_BUFFER_MAX); 890 return 0; 891 } 892 893 static struct snd_soc_dai_driver rz_ssi_soc_dai[] = { 894 { 895 .name = "rz-ssi-dai", 896 .playback = { 897 .rates = SSI_RATES, 898 .formats = SSI_FMTS, 899 .channels_min = SSI_CHAN_MIN, 900 .channels_max = SSI_CHAN_MAX, 901 }, 902 .capture = { 903 .rates = SSI_RATES, 904 .formats = SSI_FMTS, 905 .channels_min = SSI_CHAN_MIN, 906 .channels_max = SSI_CHAN_MAX, 907 }, 908 .ops = &rz_ssi_dai_ops, 909 }, 910 }; 911 912 static const struct snd_soc_component_driver rz_ssi_soc_component = { 913 .name = "rz-ssi", 914 .open = rz_ssi_pcm_open, 915 .pointer = rz_ssi_pcm_pointer, 916 .pcm_construct = rz_ssi_pcm_new, 917 }; 918 919 static int rz_ssi_probe(struct platform_device *pdev) 920 { 921 struct rz_ssi_priv *ssi; 922 struct clk *audio_clk; 923 struct resource *res; 924 int ret; 925 926 ssi = devm_kzalloc(&pdev->dev, sizeof(*ssi), GFP_KERNEL); 927 if (!ssi) 928 return -ENOMEM; 929 930 ssi->pdev = pdev; 931 ssi->dev = &pdev->dev; 932 ssi->base = devm_platform_get_and_ioremap_resource(pdev, 0, &res); 933 if (IS_ERR(ssi->base)) 934 return PTR_ERR(ssi->base); 935 936 ssi->phys = res->start; 937 ssi->clk = devm_clk_get(&pdev->dev, "ssi"); 938 if (IS_ERR(ssi->clk)) 939 return PTR_ERR(ssi->clk); 940 941 ssi->sfr_clk = devm_clk_get(&pdev->dev, "ssi_sfr"); 942 if (IS_ERR(ssi->sfr_clk)) 943 return PTR_ERR(ssi->sfr_clk); 944 945 audio_clk = devm_clk_get(&pdev->dev, "audio_clk1"); 946 if (IS_ERR(audio_clk)) 947 return dev_err_probe(&pdev->dev, PTR_ERR(audio_clk), 948 "no audio clk1"); 949 950 ssi->audio_clk_1 = clk_get_rate(audio_clk); 951 audio_clk = devm_clk_get(&pdev->dev, "audio_clk2"); 952 if (IS_ERR(audio_clk)) 953 return dev_err_probe(&pdev->dev, PTR_ERR(audio_clk), 954 "no audio clk2"); 955 956 ssi->audio_clk_2 = clk_get_rate(audio_clk); 957 if (!(ssi->audio_clk_1 || ssi->audio_clk_2)) 958 return dev_err_probe(&pdev->dev, -EINVAL, 959 "no audio clk1 or audio clk2"); 960 961 ssi->audio_mck = ssi->audio_clk_1 ? ssi->audio_clk_1 : ssi->audio_clk_2; 962 963 /* Detect DMA support */ 964 ret = rz_ssi_dma_request(ssi, &pdev->dev); 965 if (ret < 0) { 966 dev_warn(&pdev->dev, "DMA not available, using PIO\n"); 967 ssi->playback.transfer = rz_ssi_pio_send; 968 ssi->capture.transfer = rz_ssi_pio_recv; 969 } else { 970 dev_info(&pdev->dev, "DMA enabled"); 971 ssi->playback.transfer = rz_ssi_dma_transfer; 972 ssi->capture.transfer = rz_ssi_dma_transfer; 973 } 974 975 ssi->playback.priv = ssi; 976 ssi->capture.priv = ssi; 977 978 /* Error Interrupt */ 979 ssi->irq_int = platform_get_irq_byname(pdev, "int_req"); 980 if (ssi->irq_int < 0) 981 return dev_err_probe(&pdev->dev, -ENODEV, 982 "Unable to get SSI int_req IRQ\n"); 983 984 ret = devm_request_irq(&pdev->dev, ssi->irq_int, &rz_ssi_interrupt, 985 0, dev_name(&pdev->dev), ssi); 986 if (ret < 0) 987 return dev_err_probe(&pdev->dev, ret, 988 "irq request error (int_req)\n"); 989 990 if (!rz_ssi_is_dma_enabled(ssi)) { 991 /* Tx and Rx interrupts (pio only) */ 992 ssi->irq_tx = platform_get_irq_byname(pdev, "dma_tx"); 993 if (ssi->irq_tx < 0) 994 return dev_err_probe(&pdev->dev, -ENODEV, 995 "Unable to get SSI dma_tx IRQ\n"); 996 997 ret = devm_request_irq(&pdev->dev, ssi->irq_tx, 998 &rz_ssi_interrupt, 0, 999 dev_name(&pdev->dev), ssi); 1000 if (ret < 0) 1001 return dev_err_probe(&pdev->dev, ret, 1002 "irq request error (dma_tx)\n"); 1003 1004 ssi->irq_rx = platform_get_irq_byname(pdev, "dma_rx"); 1005 if (ssi->irq_rx < 0) 1006 return dev_err_probe(&pdev->dev, -ENODEV, 1007 "Unable to get SSI dma_rx IRQ\n"); 1008 1009 ret = devm_request_irq(&pdev->dev, ssi->irq_rx, 1010 &rz_ssi_interrupt, 0, 1011 dev_name(&pdev->dev), ssi); 1012 if (ret < 0) 1013 return dev_err_probe(&pdev->dev, ret, 1014 "irq request error (dma_rx)\n"); 1015 } 1016 1017 ssi->rstc = devm_reset_control_get_exclusive(&pdev->dev, NULL); 1018 if (IS_ERR(ssi->rstc)) 1019 return PTR_ERR(ssi->rstc); 1020 1021 reset_control_deassert(ssi->rstc); 1022 pm_runtime_enable(&pdev->dev); 1023 ret = pm_runtime_resume_and_get(&pdev->dev); 1024 if (ret < 0) { 1025 pm_runtime_disable(ssi->dev); 1026 reset_control_assert(ssi->rstc); 1027 return dev_err_probe(ssi->dev, ret, "pm_runtime_resume_and_get failed\n"); 1028 } 1029 1030 spin_lock_init(&ssi->lock); 1031 dev_set_drvdata(&pdev->dev, ssi); 1032 ret = devm_snd_soc_register_component(&pdev->dev, &rz_ssi_soc_component, 1033 rz_ssi_soc_dai, 1034 ARRAY_SIZE(rz_ssi_soc_dai)); 1035 if (ret < 0) { 1036 rz_ssi_release_dma_channels(ssi); 1037 1038 pm_runtime_put(ssi->dev); 1039 pm_runtime_disable(ssi->dev); 1040 reset_control_assert(ssi->rstc); 1041 dev_err(&pdev->dev, "failed to register snd component\n"); 1042 } 1043 1044 return ret; 1045 } 1046 1047 static int rz_ssi_remove(struct platform_device *pdev) 1048 { 1049 struct rz_ssi_priv *ssi = dev_get_drvdata(&pdev->dev); 1050 1051 rz_ssi_release_dma_channels(ssi); 1052 1053 pm_runtime_put(ssi->dev); 1054 pm_runtime_disable(ssi->dev); 1055 reset_control_assert(ssi->rstc); 1056 1057 return 0; 1058 } 1059 1060 static const struct of_device_id rz_ssi_of_match[] = { 1061 { .compatible = "renesas,rz-ssi", }, 1062 {/* Sentinel */}, 1063 }; 1064 MODULE_DEVICE_TABLE(of, rz_ssi_of_match); 1065 1066 static struct platform_driver rz_ssi_driver = { 1067 .driver = { 1068 .name = "rz-ssi-pcm-audio", 1069 .of_match_table = rz_ssi_of_match, 1070 }, 1071 .probe = rz_ssi_probe, 1072 .remove = rz_ssi_remove, 1073 }; 1074 1075 module_platform_driver(rz_ssi_driver); 1076 1077 MODULE_LICENSE("GPL v2"); 1078 MODULE_DESCRIPTION("Renesas RZ/G2L ASoC Serial Sound Interface Driver"); 1079 MODULE_AUTHOR("Biju Das <biju.das.jz@bp.renesas.com>"); 1080