1 // SPDX-License-Identifier: (GPL-2.0 OR BSD-3-Clause) 2 // 3 // This file is provided under a dual BSD/GPLv2 license. When using or 4 // redistributing this file, you may do so under either license. 5 // 6 // Copyright(c) 2018 Intel Corporation. All rights reserved. 7 // 8 // Authors: Liam Girdwood <liam.r.girdwood@linux.intel.com> 9 // Ranjani Sridharan <ranjani.sridharan@linux.intel.com> 10 // Rander Wang <rander.wang@intel.com> 11 // Keyon Jie <yang.jie@linux.intel.com> 12 // 13 14 /* 15 * Hardware interface for generic Intel audio DSP HDA IP 16 */ 17 18 #include <linux/pm_runtime.h> 19 #include <sound/hdaudio_ext.h> 20 #include <sound/hda_register.h> 21 #include <sound/sof.h> 22 #include "../ops.h" 23 #include "../sof-audio.h" 24 #include "hda.h" 25 26 /* 27 * set up one of BDL entries for a stream 28 */ 29 static int hda_setup_bdle(struct snd_sof_dev *sdev, 30 struct snd_dma_buffer *dmab, 31 struct hdac_stream *stream, 32 struct sof_intel_dsp_bdl **bdlp, 33 int offset, int size, int ioc) 34 { 35 struct hdac_bus *bus = sof_to_bus(sdev); 36 struct sof_intel_dsp_bdl *bdl = *bdlp; 37 38 while (size > 0) { 39 dma_addr_t addr; 40 int chunk; 41 42 if (stream->frags >= HDA_DSP_MAX_BDL_ENTRIES) { 43 dev_err(sdev->dev, "error: stream frags exceeded\n"); 44 return -EINVAL; 45 } 46 47 addr = snd_sgbuf_get_addr(dmab, offset); 48 /* program BDL addr */ 49 bdl->addr_l = cpu_to_le32(lower_32_bits(addr)); 50 bdl->addr_h = cpu_to_le32(upper_32_bits(addr)); 51 /* program BDL size */ 52 chunk = snd_sgbuf_get_chunk_size(dmab, offset, size); 53 /* one BDLE should not cross 4K boundary */ 54 if (bus->align_bdle_4k) { 55 u32 remain = 0x1000 - (offset & 0xfff); 56 57 if (chunk > remain) 58 chunk = remain; 59 } 60 bdl->size = cpu_to_le32(chunk); 61 /* only program IOC when the whole segment is processed */ 62 size -= chunk; 63 bdl->ioc = (size || !ioc) ? 0 : cpu_to_le32(0x01); 64 bdl++; 65 stream->frags++; 66 offset += chunk; 67 68 dev_vdbg(sdev->dev, "bdl, frags:%d, chunk size:0x%x;\n", 69 stream->frags, chunk); 70 } 71 72 *bdlp = bdl; 73 return offset; 74 } 75 76 /* 77 * set up Buffer Descriptor List (BDL) for host memory transfer 78 * BDL describes the location of the individual buffers and is little endian. 79 */ 80 int hda_dsp_stream_setup_bdl(struct snd_sof_dev *sdev, 81 struct snd_dma_buffer *dmab, 82 struct hdac_stream *stream) 83 { 84 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata; 85 struct sof_intel_dsp_bdl *bdl; 86 int i, offset, period_bytes, periods; 87 int remain, ioc; 88 89 period_bytes = stream->period_bytes; 90 dev_dbg(sdev->dev, "period_bytes:0x%x\n", period_bytes); 91 if (!period_bytes) 92 period_bytes = stream->bufsize; 93 94 periods = stream->bufsize / period_bytes; 95 96 dev_dbg(sdev->dev, "periods:%d\n", periods); 97 98 remain = stream->bufsize % period_bytes; 99 if (remain) 100 periods++; 101 102 /* program the initial BDL entries */ 103 bdl = (struct sof_intel_dsp_bdl *)stream->bdl.area; 104 offset = 0; 105 stream->frags = 0; 106 107 /* 108 * set IOC if don't use position IPC 109 * and period_wakeup needed. 110 */ 111 ioc = hda->no_ipc_position ? 112 !stream->no_period_wakeup : 0; 113 114 for (i = 0; i < periods; i++) { 115 if (i == (periods - 1) && remain) 116 /* set the last small entry */ 117 offset = hda_setup_bdle(sdev, dmab, 118 stream, &bdl, offset, 119 remain, 0); 120 else 121 offset = hda_setup_bdle(sdev, dmab, 122 stream, &bdl, offset, 123 period_bytes, ioc); 124 } 125 126 return offset; 127 } 128 129 int hda_dsp_stream_spib_config(struct snd_sof_dev *sdev, 130 struct hdac_ext_stream *stream, 131 int enable, u32 size) 132 { 133 struct hdac_stream *hstream = &stream->hstream; 134 u32 mask; 135 136 if (!sdev->bar[HDA_DSP_SPIB_BAR]) { 137 dev_err(sdev->dev, "error: address of spib capability is NULL\n"); 138 return -EINVAL; 139 } 140 141 mask = (1 << hstream->index); 142 143 /* enable/disable SPIB for the stream */ 144 snd_sof_dsp_update_bits(sdev, HDA_DSP_SPIB_BAR, 145 SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL, mask, 146 enable << hstream->index); 147 148 /* set the SPIB value */ 149 sof_io_write(sdev, stream->spib_addr, size); 150 151 return 0; 152 } 153 154 /* get next unused stream */ 155 struct hdac_ext_stream * 156 hda_dsp_stream_get(struct snd_sof_dev *sdev, int direction) 157 { 158 struct hdac_bus *bus = sof_to_bus(sdev); 159 struct sof_intel_hda_stream *hda_stream; 160 struct hdac_ext_stream *stream = NULL; 161 struct hdac_stream *s; 162 163 spin_lock_irq(&bus->reg_lock); 164 165 /* get an unused stream */ 166 list_for_each_entry(s, &bus->stream_list, list) { 167 if (s->direction == direction && !s->opened) { 168 stream = stream_to_hdac_ext_stream(s); 169 hda_stream = container_of(stream, 170 struct sof_intel_hda_stream, 171 hda_stream); 172 /* check if the host DMA channel is reserved */ 173 if (hda_stream->host_reserved) 174 continue; 175 176 s->opened = true; 177 break; 178 } 179 } 180 181 spin_unlock_irq(&bus->reg_lock); 182 183 /* stream found ? */ 184 if (!stream) 185 dev_err(sdev->dev, "error: no free %s streams\n", 186 direction == SNDRV_PCM_STREAM_PLAYBACK ? 187 "playback" : "capture"); 188 189 /* 190 * Disable DMI Link L1 entry when capture stream is opened. 191 * Workaround to address a known issue with host DMA that results 192 * in xruns during pause/release in capture scenarios. 193 */ 194 if (!IS_ENABLED(CONFIG_SND_SOC_SOF_HDA_ALWAYS_ENABLE_DMI_L1)) 195 if (stream && direction == SNDRV_PCM_STREAM_CAPTURE) 196 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 197 HDA_VS_INTEL_EM2, 198 HDA_VS_INTEL_EM2_L1SEN, 0); 199 200 return stream; 201 } 202 203 /* free a stream */ 204 int hda_dsp_stream_put(struct snd_sof_dev *sdev, int direction, int stream_tag) 205 { 206 struct hdac_bus *bus = sof_to_bus(sdev); 207 struct hdac_stream *s; 208 bool active_capture_stream = false; 209 bool found = false; 210 211 spin_lock_irq(&bus->reg_lock); 212 213 /* 214 * close stream matching the stream tag 215 * and check if there are any open capture streams. 216 */ 217 list_for_each_entry(s, &bus->stream_list, list) { 218 if (!s->opened) 219 continue; 220 221 if (s->direction == direction && s->stream_tag == stream_tag) { 222 s->opened = false; 223 found = true; 224 } else if (s->direction == SNDRV_PCM_STREAM_CAPTURE) { 225 active_capture_stream = true; 226 } 227 } 228 229 spin_unlock_irq(&bus->reg_lock); 230 231 /* Enable DMI L1 entry if there are no capture streams open */ 232 if (!IS_ENABLED(CONFIG_SND_SOC_SOF_HDA_ALWAYS_ENABLE_DMI_L1)) 233 if (!active_capture_stream) 234 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 235 HDA_VS_INTEL_EM2, 236 HDA_VS_INTEL_EM2_L1SEN, 237 HDA_VS_INTEL_EM2_L1SEN); 238 239 if (!found) { 240 dev_dbg(sdev->dev, "stream_tag %d not opened!\n", stream_tag); 241 return -ENODEV; 242 } 243 244 return 0; 245 } 246 247 int hda_dsp_stream_trigger(struct snd_sof_dev *sdev, 248 struct hdac_ext_stream *stream, int cmd) 249 { 250 struct hdac_stream *hstream = &stream->hstream; 251 int sd_offset = SOF_STREAM_SD_OFFSET(hstream); 252 u32 dma_start = SOF_HDA_SD_CTL_DMA_START; 253 int ret; 254 u32 run; 255 256 /* cmd must be for audio stream */ 257 switch (cmd) { 258 case SNDRV_PCM_TRIGGER_RESUME: 259 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE: 260 case SNDRV_PCM_TRIGGER_START: 261 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL, 262 1 << hstream->index, 263 1 << hstream->index); 264 265 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 266 sd_offset, 267 SOF_HDA_SD_CTL_DMA_START | 268 SOF_HDA_CL_DMA_SD_INT_MASK, 269 SOF_HDA_SD_CTL_DMA_START | 270 SOF_HDA_CL_DMA_SD_INT_MASK); 271 272 ret = snd_sof_dsp_read_poll_timeout(sdev, 273 HDA_DSP_HDA_BAR, 274 sd_offset, run, 275 ((run & dma_start) == dma_start), 276 HDA_DSP_REG_POLL_INTERVAL_US, 277 HDA_DSP_STREAM_RUN_TIMEOUT); 278 279 if (ret < 0) { 280 dev_err(sdev->dev, 281 "error: %s: cmd %d: timeout on STREAM_SD_OFFSET read\n", 282 __func__, cmd); 283 return ret; 284 } 285 286 hstream->running = true; 287 break; 288 case SNDRV_PCM_TRIGGER_SUSPEND: 289 case SNDRV_PCM_TRIGGER_PAUSE_PUSH: 290 case SNDRV_PCM_TRIGGER_STOP: 291 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 292 sd_offset, 293 SOF_HDA_SD_CTL_DMA_START | 294 SOF_HDA_CL_DMA_SD_INT_MASK, 0x0); 295 296 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR, 297 sd_offset, run, 298 !(run & dma_start), 299 HDA_DSP_REG_POLL_INTERVAL_US, 300 HDA_DSP_STREAM_RUN_TIMEOUT); 301 302 if (ret < 0) { 303 dev_err(sdev->dev, 304 "error: %s: cmd %d: timeout on STREAM_SD_OFFSET read\n", 305 __func__, cmd); 306 return ret; 307 } 308 309 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, sd_offset + 310 SOF_HDA_ADSP_REG_CL_SD_STS, 311 SOF_HDA_CL_DMA_SD_INT_MASK); 312 313 hstream->running = false; 314 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL, 315 1 << hstream->index, 0x0); 316 break; 317 default: 318 dev_err(sdev->dev, "error: unknown command: %d\n", cmd); 319 return -EINVAL; 320 } 321 322 return 0; 323 } 324 325 /* 326 * prepare for common hdac registers settings, for both code loader 327 * and normal stream. 328 */ 329 int hda_dsp_stream_hw_params(struct snd_sof_dev *sdev, 330 struct hdac_ext_stream *stream, 331 struct snd_dma_buffer *dmab, 332 struct snd_pcm_hw_params *params) 333 { 334 struct hdac_bus *bus = sof_to_bus(sdev); 335 struct hdac_stream *hstream = &stream->hstream; 336 int sd_offset = SOF_STREAM_SD_OFFSET(hstream); 337 int ret, timeout = HDA_DSP_STREAM_RESET_TIMEOUT; 338 u32 dma_start = SOF_HDA_SD_CTL_DMA_START; 339 u32 val, mask; 340 u32 run; 341 342 if (!stream) { 343 dev_err(sdev->dev, "error: no stream available\n"); 344 return -ENODEV; 345 } 346 347 /* decouple host and link DMA */ 348 mask = 0x1 << hstream->index; 349 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL, 350 mask, mask); 351 352 if (!dmab) { 353 dev_err(sdev->dev, "error: no dma buffer allocated!\n"); 354 return -ENODEV; 355 } 356 357 /* clear stream status */ 358 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 359 SOF_HDA_CL_DMA_SD_INT_MASK | 360 SOF_HDA_SD_CTL_DMA_START, 0); 361 362 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR, 363 sd_offset, run, 364 !(run & dma_start), 365 HDA_DSP_REG_POLL_INTERVAL_US, 366 HDA_DSP_STREAM_RUN_TIMEOUT); 367 368 if (ret < 0) { 369 dev_err(sdev->dev, 370 "error: %s: timeout on STREAM_SD_OFFSET read1\n", 371 __func__); 372 return ret; 373 } 374 375 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 376 sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS, 377 SOF_HDA_CL_DMA_SD_INT_MASK, 378 SOF_HDA_CL_DMA_SD_INT_MASK); 379 380 /* stream reset */ 381 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 0x1, 382 0x1); 383 udelay(3); 384 do { 385 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, 386 sd_offset); 387 if (val & 0x1) 388 break; 389 } while (--timeout); 390 if (timeout == 0) { 391 dev_err(sdev->dev, "error: stream reset failed\n"); 392 return -ETIMEDOUT; 393 } 394 395 timeout = HDA_DSP_STREAM_RESET_TIMEOUT; 396 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 0x1, 397 0x0); 398 399 /* wait for hardware to report that stream is out of reset */ 400 udelay(3); 401 do { 402 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, 403 sd_offset); 404 if ((val & 0x1) == 0) 405 break; 406 } while (--timeout); 407 if (timeout == 0) { 408 dev_err(sdev->dev, "error: timeout waiting for stream reset\n"); 409 return -ETIMEDOUT; 410 } 411 412 if (hstream->posbuf) 413 *hstream->posbuf = 0; 414 415 /* reset BDL address */ 416 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 417 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL, 418 0x0); 419 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 420 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU, 421 0x0); 422 423 /* clear stream status */ 424 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 425 SOF_HDA_CL_DMA_SD_INT_MASK | 426 SOF_HDA_SD_CTL_DMA_START, 0); 427 428 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR, 429 sd_offset, run, 430 !(run & dma_start), 431 HDA_DSP_REG_POLL_INTERVAL_US, 432 HDA_DSP_STREAM_RUN_TIMEOUT); 433 434 if (ret < 0) { 435 dev_err(sdev->dev, 436 "error: %s: timeout on STREAM_SD_OFFSET read2\n", 437 __func__); 438 return ret; 439 } 440 441 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 442 sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS, 443 SOF_HDA_CL_DMA_SD_INT_MASK, 444 SOF_HDA_CL_DMA_SD_INT_MASK); 445 446 hstream->frags = 0; 447 448 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream); 449 if (ret < 0) { 450 dev_err(sdev->dev, "error: set up of BDL failed\n"); 451 return ret; 452 } 453 454 /* program stream tag to set up stream descriptor for DMA */ 455 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 456 SOF_HDA_CL_SD_CTL_STREAM_TAG_MASK, 457 hstream->stream_tag << 458 SOF_HDA_CL_SD_CTL_STREAM_TAG_SHIFT); 459 460 /* program cyclic buffer length */ 461 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 462 sd_offset + SOF_HDA_ADSP_REG_CL_SD_CBL, 463 hstream->bufsize); 464 465 /* 466 * Recommended hardware programming sequence for HDAudio DMA format 467 * 468 * 1. Put DMA into coupled mode by clearing PPCTL.PROCEN bit 469 * for corresponding stream index before the time of writing 470 * format to SDxFMT register. 471 * 2. Write SDxFMT 472 * 3. Set PPCTL.PROCEN bit for corresponding stream index to 473 * enable decoupled mode 474 */ 475 476 /* couple host and link DMA, disable DSP features */ 477 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL, 478 mask, 0); 479 480 /* program stream format */ 481 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 482 sd_offset + 483 SOF_HDA_ADSP_REG_CL_SD_FORMAT, 484 0xffff, hstream->format_val); 485 486 /* decouple host and link DMA, enable DSP features */ 487 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL, 488 mask, mask); 489 490 /* program last valid index */ 491 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 492 sd_offset + SOF_HDA_ADSP_REG_CL_SD_LVI, 493 0xffff, (hstream->frags - 1)); 494 495 /* program BDL address */ 496 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 497 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL, 498 (u32)hstream->bdl.addr); 499 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 500 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU, 501 upper_32_bits(hstream->bdl.addr)); 502 503 /* enable position buffer */ 504 if (!(snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE) 505 & SOF_HDA_ADSP_DPLBASE_ENABLE)) { 506 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPUBASE, 507 upper_32_bits(bus->posbuf.addr)); 508 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE, 509 (u32)bus->posbuf.addr | 510 SOF_HDA_ADSP_DPLBASE_ENABLE); 511 } 512 513 /* set interrupt enable bits */ 514 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 515 SOF_HDA_CL_DMA_SD_INT_MASK, 516 SOF_HDA_CL_DMA_SD_INT_MASK); 517 518 /* read FIFO size */ 519 if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) { 520 hstream->fifo_size = 521 snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, 522 sd_offset + 523 SOF_HDA_ADSP_REG_CL_SD_FIFOSIZE); 524 hstream->fifo_size &= 0xffff; 525 hstream->fifo_size += 1; 526 } else { 527 hstream->fifo_size = 0; 528 } 529 530 return ret; 531 } 532 533 int hda_dsp_stream_hw_free(struct snd_sof_dev *sdev, 534 struct snd_pcm_substream *substream) 535 { 536 struct hdac_stream *stream = substream->runtime->private_data; 537 struct hdac_ext_stream *link_dev = container_of(stream, 538 struct hdac_ext_stream, 539 hstream); 540 struct hdac_bus *bus = sof_to_bus(sdev); 541 u32 mask = 0x1 << stream->index; 542 543 spin_lock_irq(&bus->reg_lock); 544 /* couple host and link DMA if link DMA channel is idle */ 545 if (!link_dev->link_locked) 546 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, 547 SOF_HDA_REG_PP_PPCTL, mask, 0); 548 spin_unlock_irq(&bus->reg_lock); 549 550 stream->substream = NULL; 551 552 return 0; 553 } 554 555 bool hda_dsp_check_stream_irq(struct snd_sof_dev *sdev) 556 { 557 struct hdac_bus *bus = sof_to_bus(sdev); 558 bool ret = false; 559 u32 status; 560 561 /* The function can be called at irq thread, so use spin_lock_irq */ 562 spin_lock_irq(&bus->reg_lock); 563 564 status = snd_hdac_chip_readl(bus, INTSTS); 565 dev_vdbg(bus->dev, "stream irq, INTSTS status: 0x%x\n", status); 566 567 /* if Register inaccessible, ignore it.*/ 568 if (status != 0xffffffff) 569 ret = true; 570 571 spin_unlock_irq(&bus->reg_lock); 572 573 return ret; 574 } 575 576 static void 577 hda_dsp_set_bytes_transferred(struct hdac_stream *hstream, u64 buffer_size) 578 { 579 u64 prev_pos, pos, num_bytes; 580 581 div64_u64_rem(hstream->curr_pos, buffer_size, &prev_pos); 582 pos = snd_hdac_stream_get_pos_posbuf(hstream); 583 584 if (pos < prev_pos) 585 num_bytes = (buffer_size - prev_pos) + pos; 586 else 587 num_bytes = pos - prev_pos; 588 589 hstream->curr_pos += num_bytes; 590 } 591 592 static bool hda_dsp_stream_check(struct hdac_bus *bus, u32 status) 593 { 594 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus); 595 struct hdac_stream *s; 596 bool active = false; 597 u32 sd_status; 598 599 list_for_each_entry(s, &bus->stream_list, list) { 600 if (status & BIT(s->index) && s->opened) { 601 sd_status = snd_hdac_stream_readb(s, SD_STS); 602 603 dev_vdbg(bus->dev, "stream %d status 0x%x\n", 604 s->index, sd_status); 605 606 snd_hdac_stream_writeb(s, SD_STS, sd_status); 607 608 active = true; 609 if ((!s->substream && !s->cstream) || 610 !s->running || 611 (sd_status & SOF_HDA_CL_DMA_SD_INT_COMPLETE) == 0) 612 continue; 613 614 /* Inform ALSA only in case not do that with IPC */ 615 if (s->substream && sof_hda->no_ipc_position) { 616 snd_sof_pcm_period_elapsed(s->substream); 617 } else if (s->cstream) { 618 hda_dsp_set_bytes_transferred(s, 619 s->cstream->runtime->buffer_size); 620 snd_compr_fragment_elapsed(s->cstream); 621 } 622 } 623 } 624 625 return active; 626 } 627 628 irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context) 629 { 630 struct snd_sof_dev *sdev = context; 631 struct hdac_bus *bus = sof_to_bus(sdev); 632 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA) 633 u32 rirb_status; 634 #endif 635 bool active; 636 u32 status; 637 int i; 638 639 /* 640 * Loop 10 times to handle missed interrupts caused by 641 * unsolicited responses from the codec 642 */ 643 for (i = 0, active = true; i < 10 && active; i++) { 644 spin_lock_irq(&bus->reg_lock); 645 646 status = snd_hdac_chip_readl(bus, INTSTS); 647 648 /* check streams */ 649 active = hda_dsp_stream_check(bus, status); 650 651 /* check and clear RIRB interrupt */ 652 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA) 653 if (status & AZX_INT_CTRL_EN) { 654 rirb_status = snd_hdac_chip_readb(bus, RIRBSTS); 655 if (rirb_status & RIRB_INT_MASK) { 656 active = true; 657 if (rirb_status & RIRB_INT_RESPONSE) 658 snd_hdac_bus_update_rirb(bus); 659 snd_hdac_chip_writeb(bus, RIRBSTS, 660 RIRB_INT_MASK); 661 } 662 } 663 #endif 664 spin_unlock_irq(&bus->reg_lock); 665 } 666 667 return IRQ_HANDLED; 668 } 669 670 int hda_dsp_stream_init(struct snd_sof_dev *sdev) 671 { 672 struct hdac_bus *bus = sof_to_bus(sdev); 673 struct hdac_ext_stream *stream; 674 struct hdac_stream *hstream; 675 struct pci_dev *pci = to_pci_dev(sdev->dev); 676 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus); 677 int sd_offset; 678 int i, num_playback, num_capture, num_total, ret; 679 u32 gcap; 680 681 gcap = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_GCAP); 682 dev_dbg(sdev->dev, "hda global caps = 0x%x\n", gcap); 683 684 /* get stream count from GCAP */ 685 num_capture = (gcap >> 8) & 0x0f; 686 num_playback = (gcap >> 12) & 0x0f; 687 num_total = num_playback + num_capture; 688 689 dev_dbg(sdev->dev, "detected %d playback and %d capture streams\n", 690 num_playback, num_capture); 691 692 if (num_playback >= SOF_HDA_PLAYBACK_STREAMS) { 693 dev_err(sdev->dev, "error: too many playback streams %d\n", 694 num_playback); 695 return -EINVAL; 696 } 697 698 if (num_capture >= SOF_HDA_CAPTURE_STREAMS) { 699 dev_err(sdev->dev, "error: too many capture streams %d\n", 700 num_playback); 701 return -EINVAL; 702 } 703 704 /* 705 * mem alloc for the position buffer 706 * TODO: check position buffer update 707 */ 708 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev, 709 SOF_HDA_DPIB_ENTRY_SIZE * num_total, 710 &bus->posbuf); 711 if (ret < 0) { 712 dev_err(sdev->dev, "error: posbuffer dma alloc failed\n"); 713 return -ENOMEM; 714 } 715 716 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA) 717 /* mem alloc for the CORB/RIRB ringbuffers */ 718 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev, 719 PAGE_SIZE, &bus->rb); 720 if (ret < 0) { 721 dev_err(sdev->dev, "error: RB alloc failed\n"); 722 return -ENOMEM; 723 } 724 #endif 725 726 /* create capture streams */ 727 for (i = 0; i < num_capture; i++) { 728 struct sof_intel_hda_stream *hda_stream; 729 730 hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream), 731 GFP_KERNEL); 732 if (!hda_stream) 733 return -ENOMEM; 734 735 hda_stream->sdev = sdev; 736 737 stream = &hda_stream->hda_stream; 738 739 stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] + 740 SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i; 741 742 stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] + 743 SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total + 744 SOF_HDA_PPLC_INTERVAL * i; 745 746 /* do we support SPIB */ 747 if (sdev->bar[HDA_DSP_SPIB_BAR]) { 748 stream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] + 749 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i + 750 SOF_HDA_SPIB_SPIB; 751 752 stream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] + 753 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i + 754 SOF_HDA_SPIB_MAXFIFO; 755 } 756 757 hstream = &stream->hstream; 758 hstream->bus = bus; 759 hstream->sd_int_sta_mask = 1 << i; 760 hstream->index = i; 761 sd_offset = SOF_STREAM_SD_OFFSET(hstream); 762 hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset; 763 hstream->stream_tag = i + 1; 764 hstream->opened = false; 765 hstream->running = false; 766 hstream->direction = SNDRV_PCM_STREAM_CAPTURE; 767 768 /* memory alloc for stream BDL */ 769 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev, 770 HDA_DSP_BDL_SIZE, &hstream->bdl); 771 if (ret < 0) { 772 dev_err(sdev->dev, "error: stream bdl dma alloc failed\n"); 773 return -ENOMEM; 774 } 775 hstream->posbuf = (__le32 *)(bus->posbuf.area + 776 (hstream->index) * 8); 777 778 list_add_tail(&hstream->list, &bus->stream_list); 779 } 780 781 /* create playback streams */ 782 for (i = num_capture; i < num_total; i++) { 783 struct sof_intel_hda_stream *hda_stream; 784 785 hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream), 786 GFP_KERNEL); 787 if (!hda_stream) 788 return -ENOMEM; 789 790 hda_stream->sdev = sdev; 791 792 stream = &hda_stream->hda_stream; 793 794 /* we always have DSP support */ 795 stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] + 796 SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i; 797 798 stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] + 799 SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total + 800 SOF_HDA_PPLC_INTERVAL * i; 801 802 /* do we support SPIB */ 803 if (sdev->bar[HDA_DSP_SPIB_BAR]) { 804 stream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] + 805 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i + 806 SOF_HDA_SPIB_SPIB; 807 808 stream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] + 809 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i + 810 SOF_HDA_SPIB_MAXFIFO; 811 } 812 813 hstream = &stream->hstream; 814 hstream->bus = bus; 815 hstream->sd_int_sta_mask = 1 << i; 816 hstream->index = i; 817 sd_offset = SOF_STREAM_SD_OFFSET(hstream); 818 hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset; 819 hstream->stream_tag = i - num_capture + 1; 820 hstream->opened = false; 821 hstream->running = false; 822 hstream->direction = SNDRV_PCM_STREAM_PLAYBACK; 823 824 /* mem alloc for stream BDL */ 825 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev, 826 HDA_DSP_BDL_SIZE, &hstream->bdl); 827 if (ret < 0) { 828 dev_err(sdev->dev, "error: stream bdl dma alloc failed\n"); 829 return -ENOMEM; 830 } 831 832 hstream->posbuf = (__le32 *)(bus->posbuf.area + 833 (hstream->index) * 8); 834 835 list_add_tail(&hstream->list, &bus->stream_list); 836 } 837 838 /* store total stream count (playback + capture) from GCAP */ 839 sof_hda->stream_max = num_total; 840 841 return 0; 842 } 843 844 void hda_dsp_stream_free(struct snd_sof_dev *sdev) 845 { 846 struct hdac_bus *bus = sof_to_bus(sdev); 847 struct hdac_stream *s, *_s; 848 struct hdac_ext_stream *stream; 849 struct sof_intel_hda_stream *hda_stream; 850 851 /* free position buffer */ 852 if (bus->posbuf.area) 853 snd_dma_free_pages(&bus->posbuf); 854 855 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA) 856 /* free position buffer */ 857 if (bus->rb.area) 858 snd_dma_free_pages(&bus->rb); 859 #endif 860 861 list_for_each_entry_safe(s, _s, &bus->stream_list, list) { 862 /* TODO: decouple */ 863 864 /* free bdl buffer */ 865 if (s->bdl.area) 866 snd_dma_free_pages(&s->bdl); 867 list_del(&s->list); 868 stream = stream_to_hdac_ext_stream(s); 869 hda_stream = container_of(stream, struct sof_intel_hda_stream, 870 hda_stream); 871 devm_kfree(sdev->dev, hda_stream); 872 } 873 } 874