1 // SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause) 2 // 3 // This file is provided under a dual BSD/GPLv2 license. When using or 4 // redistributing this file, you may do so under either license. 5 // 6 // Copyright(c) 2018 Intel Corporation. All rights reserved. 7 // 8 // Authors: Liam Girdwood <liam.r.girdwood@linux.intel.com> 9 // Ranjani Sridharan <ranjani.sridharan@linux.intel.com> 10 // Rander Wang <rander.wang@intel.com> 11 // Keyon Jie <yang.jie@linux.intel.com> 12 // 13 14 /* 15 * Hardware interface for generic Intel audio DSP HDA IP 16 */ 17 18 #include <linux/pm_runtime.h> 19 #include <sound/hdaudio_ext.h> 20 #include <sound/hda_register.h> 21 #include <sound/sof.h> 22 #include "../ops.h" 23 #include "../sof-audio.h" 24 #include "hda.h" 25 26 #define HDA_LTRP_GB_VALUE_US 95 27 28 /* 29 * set up one of BDL entries for a stream 30 */ 31 static int hda_setup_bdle(struct snd_sof_dev *sdev, 32 struct snd_dma_buffer *dmab, 33 struct hdac_stream *stream, 34 struct sof_intel_dsp_bdl **bdlp, 35 int offset, int size, int ioc) 36 { 37 struct hdac_bus *bus = sof_to_bus(sdev); 38 struct sof_intel_dsp_bdl *bdl = *bdlp; 39 40 while (size > 0) { 41 dma_addr_t addr; 42 int chunk; 43 44 if (stream->frags >= HDA_DSP_MAX_BDL_ENTRIES) { 45 dev_err(sdev->dev, "error: stream frags exceeded\n"); 46 return -EINVAL; 47 } 48 49 addr = snd_sgbuf_get_addr(dmab, offset); 50 /* program BDL addr */ 51 bdl->addr_l = cpu_to_le32(lower_32_bits(addr)); 52 bdl->addr_h = cpu_to_le32(upper_32_bits(addr)); 53 /* program BDL size */ 54 chunk = snd_sgbuf_get_chunk_size(dmab, offset, size); 55 /* one BDLE should not cross 4K boundary */ 56 if (bus->align_bdle_4k) { 57 u32 remain = 0x1000 - (offset & 0xfff); 58 59 if (chunk > remain) 60 chunk = remain; 61 } 62 bdl->size = cpu_to_le32(chunk); 63 /* only program IOC when the whole segment is processed */ 64 size -= chunk; 65 bdl->ioc = (size || !ioc) ? 0 : cpu_to_le32(0x01); 66 bdl++; 67 stream->frags++; 68 offset += chunk; 69 70 dev_vdbg(sdev->dev, "bdl, frags:%d, chunk size:0x%x;\n", 71 stream->frags, chunk); 72 } 73 74 *bdlp = bdl; 75 return offset; 76 } 77 78 /* 79 * set up Buffer Descriptor List (BDL) for host memory transfer 80 * BDL describes the location of the individual buffers and is little endian. 81 */ 82 int hda_dsp_stream_setup_bdl(struct snd_sof_dev *sdev, 83 struct snd_dma_buffer *dmab, 84 struct hdac_stream *stream) 85 { 86 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata; 87 struct sof_intel_dsp_bdl *bdl; 88 int i, offset, period_bytes, periods; 89 int remain, ioc; 90 91 period_bytes = stream->period_bytes; 92 dev_dbg(sdev->dev, "period_bytes:0x%x\n", period_bytes); 93 if (!period_bytes) 94 period_bytes = stream->bufsize; 95 96 periods = stream->bufsize / period_bytes; 97 98 dev_dbg(sdev->dev, "periods:%d\n", periods); 99 100 remain = stream->bufsize % period_bytes; 101 if (remain) 102 periods++; 103 104 /* program the initial BDL entries */ 105 bdl = (struct sof_intel_dsp_bdl *)stream->bdl.area; 106 offset = 0; 107 stream->frags = 0; 108 109 /* 110 * set IOC if don't use position IPC 111 * and period_wakeup needed. 112 */ 113 ioc = hda->no_ipc_position ? 114 !stream->no_period_wakeup : 0; 115 116 for (i = 0; i < periods; i++) { 117 if (i == (periods - 1) && remain) 118 /* set the last small entry */ 119 offset = hda_setup_bdle(sdev, dmab, 120 stream, &bdl, offset, 121 remain, 0); 122 else 123 offset = hda_setup_bdle(sdev, dmab, 124 stream, &bdl, offset, 125 period_bytes, ioc); 126 } 127 128 return offset; 129 } 130 131 int hda_dsp_stream_spib_config(struct snd_sof_dev *sdev, 132 struct hdac_ext_stream *stream, 133 int enable, u32 size) 134 { 135 struct hdac_stream *hstream = &stream->hstream; 136 u32 mask; 137 138 if (!sdev->bar[HDA_DSP_SPIB_BAR]) { 139 dev_err(sdev->dev, "error: address of spib capability is NULL\n"); 140 return -EINVAL; 141 } 142 143 mask = (1 << hstream->index); 144 145 /* enable/disable SPIB for the stream */ 146 snd_sof_dsp_update_bits(sdev, HDA_DSP_SPIB_BAR, 147 SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL, mask, 148 enable << hstream->index); 149 150 /* set the SPIB value */ 151 sof_io_write(sdev, stream->spib_addr, size); 152 153 return 0; 154 } 155 156 /* get next unused stream */ 157 struct hdac_ext_stream * 158 hda_dsp_stream_get(struct snd_sof_dev *sdev, int direction, u32 flags) 159 { 160 struct hdac_bus *bus = sof_to_bus(sdev); 161 struct sof_intel_hda_stream *hda_stream; 162 struct hdac_ext_stream *stream = NULL; 163 struct hdac_stream *s; 164 165 spin_lock_irq(&bus->reg_lock); 166 167 /* get an unused stream */ 168 list_for_each_entry(s, &bus->stream_list, list) { 169 if (s->direction == direction && !s->opened) { 170 stream = stream_to_hdac_ext_stream(s); 171 hda_stream = container_of(stream, 172 struct sof_intel_hda_stream, 173 hda_stream); 174 /* check if the host DMA channel is reserved */ 175 if (hda_stream->host_reserved) 176 continue; 177 178 s->opened = true; 179 break; 180 } 181 } 182 183 spin_unlock_irq(&bus->reg_lock); 184 185 /* stream found ? */ 186 if (!stream) { 187 dev_err(sdev->dev, "error: no free %s streams\n", 188 direction == SNDRV_PCM_STREAM_PLAYBACK ? 189 "playback" : "capture"); 190 return stream; 191 } 192 193 hda_stream->flags = flags; 194 195 /* 196 * Prevent DMI Link L1 entry for streams that don't support it. 197 * Workaround to address a known issue with host DMA that results 198 * in xruns during pause/release in capture scenarios. 199 */ 200 if (!(flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) 201 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 202 HDA_VS_INTEL_EM2, 203 HDA_VS_INTEL_EM2_L1SEN, 0); 204 205 return stream; 206 } 207 208 /* free a stream */ 209 int hda_dsp_stream_put(struct snd_sof_dev *sdev, int direction, int stream_tag) 210 { 211 struct hdac_bus *bus = sof_to_bus(sdev); 212 struct sof_intel_hda_stream *hda_stream; 213 struct hdac_ext_stream *stream; 214 struct hdac_stream *s; 215 bool dmi_l1_enable = true; 216 bool found = false; 217 218 spin_lock_irq(&bus->reg_lock); 219 220 /* 221 * close stream matching the stream tag and check if there are any open streams 222 * that are DMI L1 incompatible. 223 */ 224 list_for_each_entry(s, &bus->stream_list, list) { 225 stream = stream_to_hdac_ext_stream(s); 226 hda_stream = container_of(stream, struct sof_intel_hda_stream, hda_stream); 227 228 if (!s->opened) 229 continue; 230 231 if (s->direction == direction && s->stream_tag == stream_tag) { 232 s->opened = false; 233 found = true; 234 } else if (!(hda_stream->flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) { 235 dmi_l1_enable = false; 236 } 237 } 238 239 spin_unlock_irq(&bus->reg_lock); 240 241 /* Enable DMI L1 if permitted */ 242 if (dmi_l1_enable) 243 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_EM2, 244 HDA_VS_INTEL_EM2_L1SEN, HDA_VS_INTEL_EM2_L1SEN); 245 246 if (!found) { 247 dev_dbg(sdev->dev, "stream_tag %d not opened!\n", stream_tag); 248 return -ENODEV; 249 } 250 251 return 0; 252 } 253 254 int hda_dsp_stream_trigger(struct snd_sof_dev *sdev, 255 struct hdac_ext_stream *stream, int cmd) 256 { 257 struct hdac_stream *hstream = &stream->hstream; 258 int sd_offset = SOF_STREAM_SD_OFFSET(hstream); 259 u32 dma_start = SOF_HDA_SD_CTL_DMA_START; 260 int ret; 261 u32 run; 262 263 /* cmd must be for audio stream */ 264 switch (cmd) { 265 case SNDRV_PCM_TRIGGER_RESUME: 266 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE: 267 case SNDRV_PCM_TRIGGER_START: 268 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL, 269 1 << hstream->index, 270 1 << hstream->index); 271 272 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 273 sd_offset, 274 SOF_HDA_SD_CTL_DMA_START | 275 SOF_HDA_CL_DMA_SD_INT_MASK, 276 SOF_HDA_SD_CTL_DMA_START | 277 SOF_HDA_CL_DMA_SD_INT_MASK); 278 279 ret = snd_sof_dsp_read_poll_timeout(sdev, 280 HDA_DSP_HDA_BAR, 281 sd_offset, run, 282 ((run & dma_start) == dma_start), 283 HDA_DSP_REG_POLL_INTERVAL_US, 284 HDA_DSP_STREAM_RUN_TIMEOUT); 285 286 if (ret < 0) { 287 dev_err(sdev->dev, 288 "error: %s: cmd %d: timeout on STREAM_SD_OFFSET read\n", 289 __func__, cmd); 290 return ret; 291 } 292 293 hstream->running = true; 294 break; 295 case SNDRV_PCM_TRIGGER_SUSPEND: 296 case SNDRV_PCM_TRIGGER_PAUSE_PUSH: 297 case SNDRV_PCM_TRIGGER_STOP: 298 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 299 sd_offset, 300 SOF_HDA_SD_CTL_DMA_START | 301 SOF_HDA_CL_DMA_SD_INT_MASK, 0x0); 302 303 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR, 304 sd_offset, run, 305 !(run & dma_start), 306 HDA_DSP_REG_POLL_INTERVAL_US, 307 HDA_DSP_STREAM_RUN_TIMEOUT); 308 309 if (ret < 0) { 310 dev_err(sdev->dev, 311 "error: %s: cmd %d: timeout on STREAM_SD_OFFSET read\n", 312 __func__, cmd); 313 return ret; 314 } 315 316 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, sd_offset + 317 SOF_HDA_ADSP_REG_CL_SD_STS, 318 SOF_HDA_CL_DMA_SD_INT_MASK); 319 320 hstream->running = false; 321 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL, 322 1 << hstream->index, 0x0); 323 break; 324 default: 325 dev_err(sdev->dev, "error: unknown command: %d\n", cmd); 326 return -EINVAL; 327 } 328 329 return 0; 330 } 331 332 /* minimal recommended programming for ICCMAX stream */ 333 int hda_dsp_iccmax_stream_hw_params(struct snd_sof_dev *sdev, struct hdac_ext_stream *stream, 334 struct snd_dma_buffer *dmab, 335 struct snd_pcm_hw_params *params) 336 { 337 struct hdac_bus *bus = sof_to_bus(sdev); 338 struct hdac_stream *hstream = &stream->hstream; 339 int sd_offset = SOF_STREAM_SD_OFFSET(hstream); 340 int ret; 341 u32 mask = 0x1 << hstream->index; 342 343 if (!stream) { 344 dev_err(sdev->dev, "error: no stream available\n"); 345 return -ENODEV; 346 } 347 348 if (hstream->posbuf) 349 *hstream->posbuf = 0; 350 351 /* reset BDL address */ 352 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 353 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL, 354 0x0); 355 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 356 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU, 357 0x0); 358 359 hstream->frags = 0; 360 361 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream); 362 if (ret < 0) { 363 dev_err(sdev->dev, "error: set up of BDL failed\n"); 364 return ret; 365 } 366 367 /* program BDL address */ 368 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 369 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL, 370 (u32)hstream->bdl.addr); 371 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 372 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU, 373 upper_32_bits(hstream->bdl.addr)); 374 375 /* program cyclic buffer length */ 376 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 377 sd_offset + SOF_HDA_ADSP_REG_CL_SD_CBL, 378 hstream->bufsize); 379 380 /* program last valid index */ 381 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 382 sd_offset + SOF_HDA_ADSP_REG_CL_SD_LVI, 383 0xffff, (hstream->frags - 1)); 384 385 /* decouple host and link DMA, enable DSP features */ 386 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL, 387 mask, mask); 388 389 /* Follow HW recommendation to set the guardband value to 95us during FW boot */ 390 snd_hdac_chip_updateb(bus, VS_LTRP, HDA_VS_INTEL_LTRP_GB_MASK, HDA_LTRP_GB_VALUE_US); 391 392 /* start DMA */ 393 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 394 SOF_HDA_SD_CTL_DMA_START, SOF_HDA_SD_CTL_DMA_START); 395 396 return 0; 397 } 398 399 /* 400 * prepare for common hdac registers settings, for both code loader 401 * and normal stream. 402 */ 403 int hda_dsp_stream_hw_params(struct snd_sof_dev *sdev, 404 struct hdac_ext_stream *stream, 405 struct snd_dma_buffer *dmab, 406 struct snd_pcm_hw_params *params) 407 { 408 struct hdac_bus *bus = sof_to_bus(sdev); 409 struct hdac_stream *hstream = &stream->hstream; 410 int sd_offset = SOF_STREAM_SD_OFFSET(hstream); 411 int ret, timeout = HDA_DSP_STREAM_RESET_TIMEOUT; 412 u32 dma_start = SOF_HDA_SD_CTL_DMA_START; 413 u32 val, mask; 414 u32 run; 415 416 if (!stream) { 417 dev_err(sdev->dev, "error: no stream available\n"); 418 return -ENODEV; 419 } 420 421 /* decouple host and link DMA */ 422 mask = 0x1 << hstream->index; 423 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL, 424 mask, mask); 425 426 if (!dmab) { 427 dev_err(sdev->dev, "error: no dma buffer allocated!\n"); 428 return -ENODEV; 429 } 430 431 /* clear stream status */ 432 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 433 SOF_HDA_CL_DMA_SD_INT_MASK | 434 SOF_HDA_SD_CTL_DMA_START, 0); 435 436 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR, 437 sd_offset, run, 438 !(run & dma_start), 439 HDA_DSP_REG_POLL_INTERVAL_US, 440 HDA_DSP_STREAM_RUN_TIMEOUT); 441 442 if (ret < 0) { 443 dev_err(sdev->dev, 444 "error: %s: timeout on STREAM_SD_OFFSET read1\n", 445 __func__); 446 return ret; 447 } 448 449 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 450 sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS, 451 SOF_HDA_CL_DMA_SD_INT_MASK, 452 SOF_HDA_CL_DMA_SD_INT_MASK); 453 454 /* stream reset */ 455 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 0x1, 456 0x1); 457 udelay(3); 458 do { 459 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, 460 sd_offset); 461 if (val & 0x1) 462 break; 463 } while (--timeout); 464 if (timeout == 0) { 465 dev_err(sdev->dev, "error: stream reset failed\n"); 466 return -ETIMEDOUT; 467 } 468 469 timeout = HDA_DSP_STREAM_RESET_TIMEOUT; 470 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 0x1, 471 0x0); 472 473 /* wait for hardware to report that stream is out of reset */ 474 udelay(3); 475 do { 476 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, 477 sd_offset); 478 if ((val & 0x1) == 0) 479 break; 480 } while (--timeout); 481 if (timeout == 0) { 482 dev_err(sdev->dev, "error: timeout waiting for stream reset\n"); 483 return -ETIMEDOUT; 484 } 485 486 if (hstream->posbuf) 487 *hstream->posbuf = 0; 488 489 /* reset BDL address */ 490 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 491 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL, 492 0x0); 493 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 494 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU, 495 0x0); 496 497 /* clear stream status */ 498 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 499 SOF_HDA_CL_DMA_SD_INT_MASK | 500 SOF_HDA_SD_CTL_DMA_START, 0); 501 502 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR, 503 sd_offset, run, 504 !(run & dma_start), 505 HDA_DSP_REG_POLL_INTERVAL_US, 506 HDA_DSP_STREAM_RUN_TIMEOUT); 507 508 if (ret < 0) { 509 dev_err(sdev->dev, 510 "error: %s: timeout on STREAM_SD_OFFSET read2\n", 511 __func__); 512 return ret; 513 } 514 515 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 516 sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS, 517 SOF_HDA_CL_DMA_SD_INT_MASK, 518 SOF_HDA_CL_DMA_SD_INT_MASK); 519 520 hstream->frags = 0; 521 522 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream); 523 if (ret < 0) { 524 dev_err(sdev->dev, "error: set up of BDL failed\n"); 525 return ret; 526 } 527 528 /* program stream tag to set up stream descriptor for DMA */ 529 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 530 SOF_HDA_CL_SD_CTL_STREAM_TAG_MASK, 531 hstream->stream_tag << 532 SOF_HDA_CL_SD_CTL_STREAM_TAG_SHIFT); 533 534 /* program cyclic buffer length */ 535 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 536 sd_offset + SOF_HDA_ADSP_REG_CL_SD_CBL, 537 hstream->bufsize); 538 539 /* 540 * Recommended hardware programming sequence for HDAudio DMA format 541 * 542 * 1. Put DMA into coupled mode by clearing PPCTL.PROCEN bit 543 * for corresponding stream index before the time of writing 544 * format to SDxFMT register. 545 * 2. Write SDxFMT 546 * 3. Set PPCTL.PROCEN bit for corresponding stream index to 547 * enable decoupled mode 548 */ 549 550 /* couple host and link DMA, disable DSP features */ 551 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL, 552 mask, 0); 553 554 /* program stream format */ 555 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 556 sd_offset + 557 SOF_HDA_ADSP_REG_CL_SD_FORMAT, 558 0xffff, hstream->format_val); 559 560 /* decouple host and link DMA, enable DSP features */ 561 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL, 562 mask, mask); 563 564 /* program last valid index */ 565 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 566 sd_offset + SOF_HDA_ADSP_REG_CL_SD_LVI, 567 0xffff, (hstream->frags - 1)); 568 569 /* program BDL address */ 570 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 571 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL, 572 (u32)hstream->bdl.addr); 573 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 574 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU, 575 upper_32_bits(hstream->bdl.addr)); 576 577 /* enable position buffer */ 578 if (!(snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE) 579 & SOF_HDA_ADSP_DPLBASE_ENABLE)) { 580 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPUBASE, 581 upper_32_bits(bus->posbuf.addr)); 582 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE, 583 (u32)bus->posbuf.addr | 584 SOF_HDA_ADSP_DPLBASE_ENABLE); 585 } 586 587 /* set interrupt enable bits */ 588 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 589 SOF_HDA_CL_DMA_SD_INT_MASK, 590 SOF_HDA_CL_DMA_SD_INT_MASK); 591 592 /* read FIFO size */ 593 if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) { 594 hstream->fifo_size = 595 snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, 596 sd_offset + 597 SOF_HDA_ADSP_REG_CL_SD_FIFOSIZE); 598 hstream->fifo_size &= 0xffff; 599 hstream->fifo_size += 1; 600 } else { 601 hstream->fifo_size = 0; 602 } 603 604 return ret; 605 } 606 607 int hda_dsp_stream_hw_free(struct snd_sof_dev *sdev, 608 struct snd_pcm_substream *substream) 609 { 610 struct hdac_stream *stream = substream->runtime->private_data; 611 struct hdac_ext_stream *link_dev = container_of(stream, 612 struct hdac_ext_stream, 613 hstream); 614 struct hdac_bus *bus = sof_to_bus(sdev); 615 u32 mask = 0x1 << stream->index; 616 617 spin_lock_irq(&bus->reg_lock); 618 /* couple host and link DMA if link DMA channel is idle */ 619 if (!link_dev->link_locked) 620 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, 621 SOF_HDA_REG_PP_PPCTL, mask, 0); 622 spin_unlock_irq(&bus->reg_lock); 623 624 stream->substream = NULL; 625 626 return 0; 627 } 628 629 bool hda_dsp_check_stream_irq(struct snd_sof_dev *sdev) 630 { 631 struct hdac_bus *bus = sof_to_bus(sdev); 632 bool ret = false; 633 u32 status; 634 635 /* The function can be called at irq thread, so use spin_lock_irq */ 636 spin_lock_irq(&bus->reg_lock); 637 638 status = snd_hdac_chip_readl(bus, INTSTS); 639 dev_vdbg(bus->dev, "stream irq, INTSTS status: 0x%x\n", status); 640 641 /* if Register inaccessible, ignore it.*/ 642 if (status != 0xffffffff) 643 ret = true; 644 645 spin_unlock_irq(&bus->reg_lock); 646 647 return ret; 648 } 649 650 static void 651 hda_dsp_set_bytes_transferred(struct hdac_stream *hstream, u64 buffer_size) 652 { 653 u64 prev_pos, pos, num_bytes; 654 655 div64_u64_rem(hstream->curr_pos, buffer_size, &prev_pos); 656 pos = snd_hdac_stream_get_pos_posbuf(hstream); 657 658 if (pos < prev_pos) 659 num_bytes = (buffer_size - prev_pos) + pos; 660 else 661 num_bytes = pos - prev_pos; 662 663 hstream->curr_pos += num_bytes; 664 } 665 666 static bool hda_dsp_stream_check(struct hdac_bus *bus, u32 status) 667 { 668 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus); 669 struct hdac_stream *s; 670 bool active = false; 671 u32 sd_status; 672 673 list_for_each_entry(s, &bus->stream_list, list) { 674 if (status & BIT(s->index) && s->opened) { 675 sd_status = snd_hdac_stream_readb(s, SD_STS); 676 677 dev_vdbg(bus->dev, "stream %d status 0x%x\n", 678 s->index, sd_status); 679 680 snd_hdac_stream_writeb(s, SD_STS, sd_status); 681 682 active = true; 683 if ((!s->substream && !s->cstream) || 684 !s->running || 685 (sd_status & SOF_HDA_CL_DMA_SD_INT_COMPLETE) == 0) 686 continue; 687 688 /* Inform ALSA only in case not do that with IPC */ 689 if (s->substream && sof_hda->no_ipc_position) { 690 snd_sof_pcm_period_elapsed(s->substream); 691 } else if (s->cstream) { 692 hda_dsp_set_bytes_transferred(s, 693 s->cstream->runtime->buffer_size); 694 snd_compr_fragment_elapsed(s->cstream); 695 } 696 } 697 } 698 699 return active; 700 } 701 702 irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context) 703 { 704 struct snd_sof_dev *sdev = context; 705 struct hdac_bus *bus = sof_to_bus(sdev); 706 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA) 707 u32 rirb_status; 708 #endif 709 bool active; 710 u32 status; 711 int i; 712 713 /* 714 * Loop 10 times to handle missed interrupts caused by 715 * unsolicited responses from the codec 716 */ 717 for (i = 0, active = true; i < 10 && active; i++) { 718 spin_lock_irq(&bus->reg_lock); 719 720 status = snd_hdac_chip_readl(bus, INTSTS); 721 722 /* check streams */ 723 active = hda_dsp_stream_check(bus, status); 724 725 /* check and clear RIRB interrupt */ 726 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA) 727 if (status & AZX_INT_CTRL_EN) { 728 rirb_status = snd_hdac_chip_readb(bus, RIRBSTS); 729 if (rirb_status & RIRB_INT_MASK) { 730 /* 731 * Clearing the interrupt status here ensures 732 * that no interrupt gets masked after the RIRB 733 * wp is read in snd_hdac_bus_update_rirb. 734 */ 735 snd_hdac_chip_writeb(bus, RIRBSTS, 736 RIRB_INT_MASK); 737 active = true; 738 if (rirb_status & RIRB_INT_RESPONSE) 739 snd_hdac_bus_update_rirb(bus); 740 } 741 } 742 #endif 743 spin_unlock_irq(&bus->reg_lock); 744 } 745 746 return IRQ_HANDLED; 747 } 748 749 int hda_dsp_stream_init(struct snd_sof_dev *sdev) 750 { 751 struct hdac_bus *bus = sof_to_bus(sdev); 752 struct hdac_ext_stream *stream; 753 struct hdac_stream *hstream; 754 struct pci_dev *pci = to_pci_dev(sdev->dev); 755 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus); 756 int sd_offset; 757 int i, num_playback, num_capture, num_total, ret; 758 u32 gcap; 759 760 gcap = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_GCAP); 761 dev_dbg(sdev->dev, "hda global caps = 0x%x\n", gcap); 762 763 /* get stream count from GCAP */ 764 num_capture = (gcap >> 8) & 0x0f; 765 num_playback = (gcap >> 12) & 0x0f; 766 num_total = num_playback + num_capture; 767 768 dev_dbg(sdev->dev, "detected %d playback and %d capture streams\n", 769 num_playback, num_capture); 770 771 if (num_playback >= SOF_HDA_PLAYBACK_STREAMS) { 772 dev_err(sdev->dev, "error: too many playback streams %d\n", 773 num_playback); 774 return -EINVAL; 775 } 776 777 if (num_capture >= SOF_HDA_CAPTURE_STREAMS) { 778 dev_err(sdev->dev, "error: too many capture streams %d\n", 779 num_playback); 780 return -EINVAL; 781 } 782 783 /* 784 * mem alloc for the position buffer 785 * TODO: check position buffer update 786 */ 787 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev, 788 SOF_HDA_DPIB_ENTRY_SIZE * num_total, 789 &bus->posbuf); 790 if (ret < 0) { 791 dev_err(sdev->dev, "error: posbuffer dma alloc failed\n"); 792 return -ENOMEM; 793 } 794 795 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA) 796 /* mem alloc for the CORB/RIRB ringbuffers */ 797 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev, 798 PAGE_SIZE, &bus->rb); 799 if (ret < 0) { 800 dev_err(sdev->dev, "error: RB alloc failed\n"); 801 return -ENOMEM; 802 } 803 #endif 804 805 /* create capture streams */ 806 for (i = 0; i < num_capture; i++) { 807 struct sof_intel_hda_stream *hda_stream; 808 809 hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream), 810 GFP_KERNEL); 811 if (!hda_stream) 812 return -ENOMEM; 813 814 hda_stream->sdev = sdev; 815 816 stream = &hda_stream->hda_stream; 817 818 stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] + 819 SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i; 820 821 stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] + 822 SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total + 823 SOF_HDA_PPLC_INTERVAL * i; 824 825 /* do we support SPIB */ 826 if (sdev->bar[HDA_DSP_SPIB_BAR]) { 827 stream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] + 828 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i + 829 SOF_HDA_SPIB_SPIB; 830 831 stream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] + 832 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i + 833 SOF_HDA_SPIB_MAXFIFO; 834 } 835 836 hstream = &stream->hstream; 837 hstream->bus = bus; 838 hstream->sd_int_sta_mask = 1 << i; 839 hstream->index = i; 840 sd_offset = SOF_STREAM_SD_OFFSET(hstream); 841 hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset; 842 hstream->stream_tag = i + 1; 843 hstream->opened = false; 844 hstream->running = false; 845 hstream->direction = SNDRV_PCM_STREAM_CAPTURE; 846 847 /* memory alloc for stream BDL */ 848 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev, 849 HDA_DSP_BDL_SIZE, &hstream->bdl); 850 if (ret < 0) { 851 dev_err(sdev->dev, "error: stream bdl dma alloc failed\n"); 852 return -ENOMEM; 853 } 854 hstream->posbuf = (__le32 *)(bus->posbuf.area + 855 (hstream->index) * 8); 856 857 list_add_tail(&hstream->list, &bus->stream_list); 858 } 859 860 /* create playback streams */ 861 for (i = num_capture; i < num_total; i++) { 862 struct sof_intel_hda_stream *hda_stream; 863 864 hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream), 865 GFP_KERNEL); 866 if (!hda_stream) 867 return -ENOMEM; 868 869 hda_stream->sdev = sdev; 870 871 stream = &hda_stream->hda_stream; 872 873 /* we always have DSP support */ 874 stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] + 875 SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i; 876 877 stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] + 878 SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total + 879 SOF_HDA_PPLC_INTERVAL * i; 880 881 /* do we support SPIB */ 882 if (sdev->bar[HDA_DSP_SPIB_BAR]) { 883 stream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] + 884 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i + 885 SOF_HDA_SPIB_SPIB; 886 887 stream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] + 888 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i + 889 SOF_HDA_SPIB_MAXFIFO; 890 } 891 892 hstream = &stream->hstream; 893 hstream->bus = bus; 894 hstream->sd_int_sta_mask = 1 << i; 895 hstream->index = i; 896 sd_offset = SOF_STREAM_SD_OFFSET(hstream); 897 hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset; 898 hstream->stream_tag = i - num_capture + 1; 899 hstream->opened = false; 900 hstream->running = false; 901 hstream->direction = SNDRV_PCM_STREAM_PLAYBACK; 902 903 /* mem alloc for stream BDL */ 904 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev, 905 HDA_DSP_BDL_SIZE, &hstream->bdl); 906 if (ret < 0) { 907 dev_err(sdev->dev, "error: stream bdl dma alloc failed\n"); 908 return -ENOMEM; 909 } 910 911 hstream->posbuf = (__le32 *)(bus->posbuf.area + 912 (hstream->index) * 8); 913 914 list_add_tail(&hstream->list, &bus->stream_list); 915 } 916 917 /* store total stream count (playback + capture) from GCAP */ 918 sof_hda->stream_max = num_total; 919 920 return 0; 921 } 922 923 void hda_dsp_stream_free(struct snd_sof_dev *sdev) 924 { 925 struct hdac_bus *bus = sof_to_bus(sdev); 926 struct hdac_stream *s, *_s; 927 struct hdac_ext_stream *stream; 928 struct sof_intel_hda_stream *hda_stream; 929 930 /* free position buffer */ 931 if (bus->posbuf.area) 932 snd_dma_free_pages(&bus->posbuf); 933 934 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA) 935 /* free position buffer */ 936 if (bus->rb.area) 937 snd_dma_free_pages(&bus->rb); 938 #endif 939 940 list_for_each_entry_safe(s, _s, &bus->stream_list, list) { 941 /* TODO: decouple */ 942 943 /* free bdl buffer */ 944 if (s->bdl.area) 945 snd_dma_free_pages(&s->bdl); 946 list_del(&s->list); 947 stream = stream_to_hdac_ext_stream(s); 948 hda_stream = container_of(stream, struct sof_intel_hda_stream, 949 hda_stream); 950 devm_kfree(sdev->dev, hda_stream); 951 } 952 } 953