1 // SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause) 2 // 3 // This file is provided under a dual BSD/GPLv2 license. When using or 4 // redistributing this file, you may do so under either license. 5 // 6 // Copyright(c) 2018 Intel Corporation. All rights reserved. 7 // 8 // Authors: Liam Girdwood <liam.r.girdwood@linux.intel.com> 9 // Ranjani Sridharan <ranjani.sridharan@linux.intel.com> 10 // Rander Wang <rander.wang@intel.com> 11 // Keyon Jie <yang.jie@linux.intel.com> 12 // 13 14 /* 15 * Hardware interface for generic Intel audio DSP HDA IP 16 */ 17 18 #include <linux/pm_runtime.h> 19 #include <sound/hdaudio_ext.h> 20 #include <sound/hda_register.h> 21 #include <sound/sof.h> 22 #include <trace/events/sof_intel.h> 23 #include "../ops.h" 24 #include "../sof-audio.h" 25 #include "hda.h" 26 27 #define HDA_LTRP_GB_VALUE_US 95 28 29 static inline const char *hda_hstream_direction_str(struct hdac_stream *hstream) 30 { 31 if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) 32 return "Playback"; 33 else 34 return "Capture"; 35 } 36 37 static char *hda_hstream_dbg_get_stream_info_str(struct hdac_stream *hstream) 38 { 39 struct snd_soc_pcm_runtime *rtd; 40 41 if (hstream->substream) 42 rtd = asoc_substream_to_rtd(hstream->substream); 43 else if (hstream->cstream) 44 rtd = hstream->cstream->private_data; 45 else 46 /* Non audio DMA user, like dma-trace */ 47 return kasprintf(GFP_KERNEL, "-- (%s, stream_tag: %u)", 48 hda_hstream_direction_str(hstream), 49 hstream->stream_tag); 50 51 return kasprintf(GFP_KERNEL, "dai_link \"%s\" (%s, stream_tag: %u)", 52 rtd->dai_link->name, hda_hstream_direction_str(hstream), 53 hstream->stream_tag); 54 } 55 56 /* 57 * set up one of BDL entries for a stream 58 */ 59 static int hda_setup_bdle(struct snd_sof_dev *sdev, 60 struct snd_dma_buffer *dmab, 61 struct hdac_stream *hstream, 62 struct sof_intel_dsp_bdl **bdlp, 63 int offset, int size, int ioc) 64 { 65 struct hdac_bus *bus = sof_to_bus(sdev); 66 struct sof_intel_dsp_bdl *bdl = *bdlp; 67 68 while (size > 0) { 69 dma_addr_t addr; 70 int chunk; 71 72 if (hstream->frags >= HDA_DSP_MAX_BDL_ENTRIES) { 73 dev_err(sdev->dev, "error: stream frags exceeded\n"); 74 return -EINVAL; 75 } 76 77 addr = snd_sgbuf_get_addr(dmab, offset); 78 /* program BDL addr */ 79 bdl->addr_l = cpu_to_le32(lower_32_bits(addr)); 80 bdl->addr_h = cpu_to_le32(upper_32_bits(addr)); 81 /* program BDL size */ 82 chunk = snd_sgbuf_get_chunk_size(dmab, offset, size); 83 /* one BDLE should not cross 4K boundary */ 84 if (bus->align_bdle_4k) { 85 u32 remain = 0x1000 - (offset & 0xfff); 86 87 if (chunk > remain) 88 chunk = remain; 89 } 90 bdl->size = cpu_to_le32(chunk); 91 /* only program IOC when the whole segment is processed */ 92 size -= chunk; 93 bdl->ioc = (size || !ioc) ? 0 : cpu_to_le32(0x01); 94 bdl++; 95 hstream->frags++; 96 offset += chunk; 97 } 98 99 *bdlp = bdl; 100 return offset; 101 } 102 103 /* 104 * set up Buffer Descriptor List (BDL) for host memory transfer 105 * BDL describes the location of the individual buffers and is little endian. 106 */ 107 int hda_dsp_stream_setup_bdl(struct snd_sof_dev *sdev, 108 struct snd_dma_buffer *dmab, 109 struct hdac_stream *hstream) 110 { 111 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata; 112 struct sof_intel_dsp_bdl *bdl; 113 int i, offset, period_bytes, periods; 114 int remain, ioc; 115 116 period_bytes = hstream->period_bytes; 117 dev_dbg(sdev->dev, "period_bytes:0x%x\n", period_bytes); 118 if (!period_bytes) 119 period_bytes = hstream->bufsize; 120 121 periods = hstream->bufsize / period_bytes; 122 123 dev_dbg(sdev->dev, "periods:%d\n", periods); 124 125 remain = hstream->bufsize % period_bytes; 126 if (remain) 127 periods++; 128 129 /* program the initial BDL entries */ 130 bdl = (struct sof_intel_dsp_bdl *)hstream->bdl.area; 131 offset = 0; 132 hstream->frags = 0; 133 134 /* 135 * set IOC if don't use position IPC 136 * and period_wakeup needed. 137 */ 138 ioc = hda->no_ipc_position ? 139 !hstream->no_period_wakeup : 0; 140 141 for (i = 0; i < periods; i++) { 142 if (i == (periods - 1) && remain) 143 /* set the last small entry */ 144 offset = hda_setup_bdle(sdev, dmab, 145 hstream, &bdl, offset, 146 remain, 0); 147 else 148 offset = hda_setup_bdle(sdev, dmab, 149 hstream, &bdl, offset, 150 period_bytes, ioc); 151 } 152 153 return offset; 154 } 155 156 int hda_dsp_stream_spib_config(struct snd_sof_dev *sdev, 157 struct hdac_ext_stream *hext_stream, 158 int enable, u32 size) 159 { 160 struct hdac_stream *hstream = &hext_stream->hstream; 161 u32 mask; 162 163 if (!sdev->bar[HDA_DSP_SPIB_BAR]) { 164 dev_err(sdev->dev, "error: address of spib capability is NULL\n"); 165 return -EINVAL; 166 } 167 168 mask = (1 << hstream->index); 169 170 /* enable/disable SPIB for the stream */ 171 snd_sof_dsp_update_bits(sdev, HDA_DSP_SPIB_BAR, 172 SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL, mask, 173 enable << hstream->index); 174 175 /* set the SPIB value */ 176 sof_io_write(sdev, hstream->spib_addr, size); 177 178 return 0; 179 } 180 181 /* get next unused stream */ 182 struct hdac_ext_stream * 183 hda_dsp_stream_get(struct snd_sof_dev *sdev, int direction, u32 flags) 184 { 185 const struct sof_intel_dsp_desc *chip_info = get_chip_info(sdev->pdata); 186 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata; 187 struct hdac_bus *bus = sof_to_bus(sdev); 188 struct sof_intel_hda_stream *hda_stream; 189 struct hdac_ext_stream *hext_stream = NULL; 190 struct hdac_stream *s; 191 192 spin_lock_irq(&bus->reg_lock); 193 194 /* get an unused stream */ 195 list_for_each_entry(s, &bus->stream_list, list) { 196 if (s->direction == direction && !s->opened) { 197 hext_stream = stream_to_hdac_ext_stream(s); 198 hda_stream = container_of(hext_stream, 199 struct sof_intel_hda_stream, 200 hext_stream); 201 /* check if the host DMA channel is reserved */ 202 if (hda_stream->host_reserved) 203 continue; 204 205 s->opened = true; 206 break; 207 } 208 } 209 210 spin_unlock_irq(&bus->reg_lock); 211 212 /* stream found ? */ 213 if (!hext_stream) { 214 dev_err(sdev->dev, "error: no free %s streams\n", 215 direction == SNDRV_PCM_STREAM_PLAYBACK ? 216 "playback" : "capture"); 217 return hext_stream; 218 } 219 220 hda_stream->flags = flags; 221 222 /* 223 * Prevent DMI Link L1 entry for streams that don't support it. 224 * Workaround to address a known issue with host DMA that results 225 * in xruns during pause/release in capture scenarios. This is not needed for the ACE IP. 226 */ 227 if (chip_info->hw_ip_version < SOF_INTEL_ACE_1_0 && 228 !(flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) { 229 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 230 HDA_VS_INTEL_EM2, 231 HDA_VS_INTEL_EM2_L1SEN, 0); 232 hda->l1_disabled = true; 233 } 234 235 return hext_stream; 236 } 237 238 /* free a stream */ 239 int hda_dsp_stream_put(struct snd_sof_dev *sdev, int direction, int stream_tag) 240 { 241 const struct sof_intel_dsp_desc *chip_info = get_chip_info(sdev->pdata); 242 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata; 243 struct hdac_bus *bus = sof_to_bus(sdev); 244 struct sof_intel_hda_stream *hda_stream; 245 struct hdac_ext_stream *hext_stream; 246 struct hdac_stream *s; 247 bool dmi_l1_enable = true; 248 bool found = false; 249 250 spin_lock_irq(&bus->reg_lock); 251 252 /* 253 * close stream matching the stream tag and check if there are any open streams 254 * that are DMI L1 incompatible. 255 */ 256 list_for_each_entry(s, &bus->stream_list, list) { 257 hext_stream = stream_to_hdac_ext_stream(s); 258 hda_stream = container_of(hext_stream, struct sof_intel_hda_stream, hext_stream); 259 260 if (!s->opened) 261 continue; 262 263 if (s->direction == direction && s->stream_tag == stream_tag) { 264 s->opened = false; 265 found = true; 266 } else if (!(hda_stream->flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) { 267 dmi_l1_enable = false; 268 } 269 } 270 271 spin_unlock_irq(&bus->reg_lock); 272 273 /* Enable DMI L1 if permitted */ 274 if (chip_info->hw_ip_version < SOF_INTEL_ACE_1_0 && dmi_l1_enable) { 275 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_EM2, 276 HDA_VS_INTEL_EM2_L1SEN, HDA_VS_INTEL_EM2_L1SEN); 277 hda->l1_disabled = false; 278 } 279 280 if (!found) { 281 dev_err(sdev->dev, "%s: stream_tag %d not opened!\n", 282 __func__, stream_tag); 283 return -ENODEV; 284 } 285 286 return 0; 287 } 288 289 static int hda_dsp_stream_reset(struct snd_sof_dev *sdev, struct hdac_stream *hstream) 290 { 291 int sd_offset = SOF_STREAM_SD_OFFSET(hstream); 292 int timeout = HDA_DSP_STREAM_RESET_TIMEOUT; 293 u32 val; 294 295 /* enter stream reset */ 296 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST, 297 SOF_STREAM_SD_OFFSET_CRST); 298 do { 299 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset); 300 if (val & SOF_STREAM_SD_OFFSET_CRST) 301 break; 302 } while (--timeout); 303 if (timeout == 0) { 304 dev_err(sdev->dev, "timeout waiting for stream reset\n"); 305 return -ETIMEDOUT; 306 } 307 308 timeout = HDA_DSP_STREAM_RESET_TIMEOUT; 309 310 /* exit stream reset and wait to read a zero before reading any other register */ 311 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST, 0x0); 312 313 /* wait for hardware to report that stream is out of reset */ 314 udelay(3); 315 do { 316 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset); 317 if ((val & SOF_STREAM_SD_OFFSET_CRST) == 0) 318 break; 319 } while (--timeout); 320 if (timeout == 0) { 321 dev_err(sdev->dev, "timeout waiting for stream to exit reset\n"); 322 return -ETIMEDOUT; 323 } 324 325 return 0; 326 } 327 328 int hda_dsp_stream_trigger(struct snd_sof_dev *sdev, 329 struct hdac_ext_stream *hext_stream, int cmd) 330 { 331 struct hdac_stream *hstream = &hext_stream->hstream; 332 int sd_offset = SOF_STREAM_SD_OFFSET(hstream); 333 u32 dma_start = SOF_HDA_SD_CTL_DMA_START; 334 int ret = 0; 335 u32 run; 336 337 /* cmd must be for audio stream */ 338 switch (cmd) { 339 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE: 340 case SNDRV_PCM_TRIGGER_START: 341 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL, 342 1 << hstream->index, 343 1 << hstream->index); 344 345 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 346 sd_offset, 347 SOF_HDA_SD_CTL_DMA_START | 348 SOF_HDA_CL_DMA_SD_INT_MASK, 349 SOF_HDA_SD_CTL_DMA_START | 350 SOF_HDA_CL_DMA_SD_INT_MASK); 351 352 ret = snd_sof_dsp_read_poll_timeout(sdev, 353 HDA_DSP_HDA_BAR, 354 sd_offset, run, 355 ((run & dma_start) == dma_start), 356 HDA_DSP_REG_POLL_INTERVAL_US, 357 HDA_DSP_STREAM_RUN_TIMEOUT); 358 359 if (ret >= 0) 360 hstream->running = true; 361 362 break; 363 case SNDRV_PCM_TRIGGER_SUSPEND: 364 case SNDRV_PCM_TRIGGER_PAUSE_PUSH: 365 case SNDRV_PCM_TRIGGER_STOP: 366 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 367 sd_offset, 368 SOF_HDA_SD_CTL_DMA_START | 369 SOF_HDA_CL_DMA_SD_INT_MASK, 0x0); 370 371 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR, 372 sd_offset, run, 373 !(run & dma_start), 374 HDA_DSP_REG_POLL_INTERVAL_US, 375 HDA_DSP_STREAM_RUN_TIMEOUT); 376 377 if (ret >= 0) { 378 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 379 sd_offset + SOF_HDA_ADSP_REG_SD_STS, 380 SOF_HDA_CL_DMA_SD_INT_MASK); 381 382 hstream->running = false; 383 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 384 SOF_HDA_INTCTL, 385 1 << hstream->index, 0x0); 386 } 387 break; 388 default: 389 dev_err(sdev->dev, "error: unknown command: %d\n", cmd); 390 return -EINVAL; 391 } 392 393 if (ret < 0) { 394 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream); 395 396 dev_err(sdev->dev, 397 "%s: cmd %d on %s: timeout on STREAM_SD_OFFSET read\n", 398 __func__, cmd, stream_name ? stream_name : "unknown stream"); 399 kfree(stream_name); 400 } 401 402 return ret; 403 } 404 405 /* minimal recommended programming for ICCMAX stream */ 406 int hda_dsp_iccmax_stream_hw_params(struct snd_sof_dev *sdev, struct hdac_ext_stream *hext_stream, 407 struct snd_dma_buffer *dmab, 408 struct snd_pcm_hw_params *params) 409 { 410 struct hdac_stream *hstream = &hext_stream->hstream; 411 int sd_offset = SOF_STREAM_SD_OFFSET(hstream); 412 int ret; 413 u32 mask = 0x1 << hstream->index; 414 415 if (!hext_stream) { 416 dev_err(sdev->dev, "error: no stream available\n"); 417 return -ENODEV; 418 } 419 420 if (!dmab) { 421 dev_err(sdev->dev, "error: no dma buffer allocated!\n"); 422 return -ENODEV; 423 } 424 425 if (hstream->posbuf) 426 *hstream->posbuf = 0; 427 428 /* reset BDL address */ 429 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 430 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL, 431 0x0); 432 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 433 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU, 434 0x0); 435 436 hstream->frags = 0; 437 438 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream); 439 if (ret < 0) { 440 dev_err(sdev->dev, "error: set up of BDL failed\n"); 441 return ret; 442 } 443 444 /* program BDL address */ 445 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 446 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL, 447 (u32)hstream->bdl.addr); 448 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 449 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU, 450 upper_32_bits(hstream->bdl.addr)); 451 452 /* program cyclic buffer length */ 453 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 454 sd_offset + SOF_HDA_ADSP_REG_SD_CBL, 455 hstream->bufsize); 456 457 /* program last valid index */ 458 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 459 sd_offset + SOF_HDA_ADSP_REG_SD_LVI, 460 0xffff, (hstream->frags - 1)); 461 462 /* decouple host and link DMA, enable DSP features */ 463 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL, 464 mask, mask); 465 466 /* Follow HW recommendation to set the guardband value to 95us during FW boot */ 467 snd_sof_dsp_update8(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_LTRP, 468 HDA_VS_INTEL_LTRP_GB_MASK, HDA_LTRP_GB_VALUE_US); 469 470 /* start DMA */ 471 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 472 SOF_HDA_SD_CTL_DMA_START, SOF_HDA_SD_CTL_DMA_START); 473 474 return 0; 475 } 476 477 /* 478 * prepare for common hdac registers settings, for both code loader 479 * and normal stream. 480 */ 481 int hda_dsp_stream_hw_params(struct snd_sof_dev *sdev, 482 struct hdac_ext_stream *hext_stream, 483 struct snd_dma_buffer *dmab, 484 struct snd_pcm_hw_params *params) 485 { 486 const struct sof_intel_dsp_desc *chip = get_chip_info(sdev->pdata); 487 struct hdac_bus *bus = sof_to_bus(sdev); 488 struct hdac_stream *hstream; 489 int sd_offset, ret; 490 u32 dma_start = SOF_HDA_SD_CTL_DMA_START; 491 u32 mask; 492 u32 run; 493 494 if (!hext_stream) { 495 dev_err(sdev->dev, "error: no stream available\n"); 496 return -ENODEV; 497 } 498 499 if (!dmab) { 500 dev_err(sdev->dev, "error: no dma buffer allocated!\n"); 501 return -ENODEV; 502 } 503 504 hstream = &hext_stream->hstream; 505 sd_offset = SOF_STREAM_SD_OFFSET(hstream); 506 mask = BIT(hstream->index); 507 508 /* decouple host and link DMA if the DSP is used */ 509 if (!sdev->dspless_mode_selected) 510 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL, 511 mask, mask); 512 513 /* clear stream status */ 514 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 515 SOF_HDA_CL_DMA_SD_INT_MASK | 516 SOF_HDA_SD_CTL_DMA_START, 0); 517 518 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR, 519 sd_offset, run, 520 !(run & dma_start), 521 HDA_DSP_REG_POLL_INTERVAL_US, 522 HDA_DSP_STREAM_RUN_TIMEOUT); 523 524 if (ret < 0) { 525 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream); 526 527 dev_err(sdev->dev, 528 "%s: on %s: timeout on STREAM_SD_OFFSET read1\n", 529 __func__, stream_name ? stream_name : "unknown stream"); 530 kfree(stream_name); 531 return ret; 532 } 533 534 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 535 sd_offset + SOF_HDA_ADSP_REG_SD_STS, 536 SOF_HDA_CL_DMA_SD_INT_MASK, 537 SOF_HDA_CL_DMA_SD_INT_MASK); 538 539 /* stream reset */ 540 ret = hda_dsp_stream_reset(sdev, hstream); 541 if (ret < 0) 542 return ret; 543 544 if (hstream->posbuf) 545 *hstream->posbuf = 0; 546 547 /* reset BDL address */ 548 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 549 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL, 550 0x0); 551 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 552 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU, 553 0x0); 554 555 /* clear stream status */ 556 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 557 SOF_HDA_CL_DMA_SD_INT_MASK | 558 SOF_HDA_SD_CTL_DMA_START, 0); 559 560 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR, 561 sd_offset, run, 562 !(run & dma_start), 563 HDA_DSP_REG_POLL_INTERVAL_US, 564 HDA_DSP_STREAM_RUN_TIMEOUT); 565 566 if (ret < 0) { 567 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream); 568 569 dev_err(sdev->dev, 570 "%s: on %s: timeout on STREAM_SD_OFFSET read1\n", 571 __func__, stream_name ? stream_name : "unknown stream"); 572 kfree(stream_name); 573 return ret; 574 } 575 576 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 577 sd_offset + SOF_HDA_ADSP_REG_SD_STS, 578 SOF_HDA_CL_DMA_SD_INT_MASK, 579 SOF_HDA_CL_DMA_SD_INT_MASK); 580 581 hstream->frags = 0; 582 583 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream); 584 if (ret < 0) { 585 dev_err(sdev->dev, "error: set up of BDL failed\n"); 586 return ret; 587 } 588 589 /* program stream tag to set up stream descriptor for DMA */ 590 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 591 SOF_HDA_CL_SD_CTL_STREAM_TAG_MASK, 592 hstream->stream_tag << 593 SOF_HDA_CL_SD_CTL_STREAM_TAG_SHIFT); 594 595 /* program cyclic buffer length */ 596 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 597 sd_offset + SOF_HDA_ADSP_REG_SD_CBL, 598 hstream->bufsize); 599 600 /* 601 * Recommended hardware programming sequence for HDAudio DMA format 602 * on earlier platforms - this is not needed on newer platforms 603 * 604 * 1. Put DMA into coupled mode by clearing PPCTL.PROCEN bit 605 * for corresponding stream index before the time of writing 606 * format to SDxFMT register. 607 * 2. Write SDxFMT 608 * 3. Set PPCTL.PROCEN bit for corresponding stream index to 609 * enable decoupled mode 610 */ 611 612 if (!sdev->dspless_mode_selected && (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK)) 613 /* couple host and link DMA, disable DSP features */ 614 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL, 615 mask, 0); 616 617 /* program stream format */ 618 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 619 sd_offset + 620 SOF_HDA_ADSP_REG_SD_FORMAT, 621 0xffff, hstream->format_val); 622 623 if (!sdev->dspless_mode_selected && (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK)) 624 /* decouple host and link DMA, enable DSP features */ 625 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL, 626 mask, mask); 627 628 /* program last valid index */ 629 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 630 sd_offset + SOF_HDA_ADSP_REG_SD_LVI, 631 0xffff, (hstream->frags - 1)); 632 633 /* program BDL address */ 634 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 635 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL, 636 (u32)hstream->bdl.addr); 637 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 638 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU, 639 upper_32_bits(hstream->bdl.addr)); 640 641 /* enable position buffer, if needed */ 642 if (bus->use_posbuf && bus->posbuf.addr && 643 !(snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE) 644 & SOF_HDA_ADSP_DPLBASE_ENABLE)) { 645 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPUBASE, 646 upper_32_bits(bus->posbuf.addr)); 647 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE, 648 (u32)bus->posbuf.addr | 649 SOF_HDA_ADSP_DPLBASE_ENABLE); 650 } 651 652 /* set interrupt enable bits */ 653 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 654 SOF_HDA_CL_DMA_SD_INT_MASK, 655 SOF_HDA_CL_DMA_SD_INT_MASK); 656 657 /* read FIFO size */ 658 if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) { 659 hstream->fifo_size = 660 snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, 661 sd_offset + 662 SOF_HDA_ADSP_REG_SD_FIFOSIZE); 663 hstream->fifo_size &= 0xffff; 664 hstream->fifo_size += 1; 665 } else { 666 hstream->fifo_size = 0; 667 } 668 669 return ret; 670 } 671 672 int hda_dsp_stream_hw_free(struct snd_sof_dev *sdev, 673 struct snd_pcm_substream *substream) 674 { 675 struct hdac_stream *hstream = substream->runtime->private_data; 676 struct hdac_ext_stream *hext_stream = container_of(hstream, 677 struct hdac_ext_stream, 678 hstream); 679 int ret; 680 681 ret = hda_dsp_stream_reset(sdev, hstream); 682 if (ret < 0) 683 return ret; 684 685 if (!sdev->dspless_mode_selected) { 686 struct hdac_bus *bus = sof_to_bus(sdev); 687 u32 mask = BIT(hstream->index); 688 689 spin_lock_irq(&bus->reg_lock); 690 /* couple host and link DMA if link DMA channel is idle */ 691 if (!hext_stream->link_locked) 692 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, 693 SOF_HDA_REG_PP_PPCTL, mask, 0); 694 spin_unlock_irq(&bus->reg_lock); 695 } 696 697 hda_dsp_stream_spib_config(sdev, hext_stream, HDA_DSP_SPIB_DISABLE, 0); 698 699 hstream->substream = NULL; 700 701 return 0; 702 } 703 704 bool hda_dsp_check_stream_irq(struct snd_sof_dev *sdev) 705 { 706 struct hdac_bus *bus = sof_to_bus(sdev); 707 bool ret = false; 708 u32 status; 709 710 /* The function can be called at irq thread, so use spin_lock_irq */ 711 spin_lock_irq(&bus->reg_lock); 712 713 status = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTSTS); 714 715 trace_sof_intel_hda_dsp_check_stream_irq(sdev, status); 716 717 /* if Register inaccessible, ignore it.*/ 718 if (status != 0xffffffff) 719 ret = true; 720 721 spin_unlock_irq(&bus->reg_lock); 722 723 return ret; 724 } 725 726 static void 727 hda_dsp_compr_bytes_transferred(struct hdac_stream *hstream, int direction) 728 { 729 u64 buffer_size = hstream->bufsize; 730 u64 prev_pos, pos, num_bytes; 731 732 div64_u64_rem(hstream->curr_pos, buffer_size, &prev_pos); 733 pos = hda_dsp_stream_get_position(hstream, direction, false); 734 735 if (pos < prev_pos) 736 num_bytes = (buffer_size - prev_pos) + pos; 737 else 738 num_bytes = pos - prev_pos; 739 740 hstream->curr_pos += num_bytes; 741 } 742 743 static bool hda_dsp_stream_check(struct hdac_bus *bus, u32 status) 744 { 745 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus); 746 struct hdac_stream *s; 747 bool active = false; 748 u32 sd_status; 749 750 list_for_each_entry(s, &bus->stream_list, list) { 751 if (status & BIT(s->index) && s->opened) { 752 sd_status = readb(s->sd_addr + SOF_HDA_ADSP_REG_SD_STS); 753 754 trace_sof_intel_hda_dsp_stream_status(bus->dev, s, sd_status); 755 756 writeb(sd_status, s->sd_addr + SOF_HDA_ADSP_REG_SD_STS); 757 758 active = true; 759 if ((!s->substream && !s->cstream) || 760 !s->running || 761 (sd_status & SOF_HDA_CL_DMA_SD_INT_COMPLETE) == 0) 762 continue; 763 764 /* Inform ALSA only in case not do that with IPC */ 765 if (s->substream && sof_hda->no_ipc_position) { 766 snd_sof_pcm_period_elapsed(s->substream); 767 } else if (s->cstream) { 768 hda_dsp_compr_bytes_transferred(s, s->cstream->direction); 769 snd_compr_fragment_elapsed(s->cstream); 770 } 771 } 772 } 773 774 return active; 775 } 776 777 irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context) 778 { 779 struct snd_sof_dev *sdev = context; 780 struct hdac_bus *bus = sof_to_bus(sdev); 781 bool active; 782 u32 status; 783 int i; 784 785 /* 786 * Loop 10 times to handle missed interrupts caused by 787 * unsolicited responses from the codec 788 */ 789 for (i = 0, active = true; i < 10 && active; i++) { 790 spin_lock_irq(&bus->reg_lock); 791 792 status = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTSTS); 793 794 /* check streams */ 795 active = hda_dsp_stream_check(bus, status); 796 797 /* check and clear RIRB interrupt */ 798 if (status & AZX_INT_CTRL_EN) { 799 active |= hda_codec_check_rirb_status(sdev); 800 } 801 spin_unlock_irq(&bus->reg_lock); 802 } 803 804 return IRQ_HANDLED; 805 } 806 807 int hda_dsp_stream_init(struct snd_sof_dev *sdev) 808 { 809 struct hdac_bus *bus = sof_to_bus(sdev); 810 struct hdac_ext_stream *hext_stream; 811 struct hdac_stream *hstream; 812 struct pci_dev *pci = to_pci_dev(sdev->dev); 813 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus); 814 int sd_offset; 815 int i, num_playback, num_capture, num_total, ret; 816 u32 gcap; 817 818 gcap = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_GCAP); 819 dev_dbg(sdev->dev, "hda global caps = 0x%x\n", gcap); 820 821 /* get stream count from GCAP */ 822 num_capture = (gcap >> 8) & 0x0f; 823 num_playback = (gcap >> 12) & 0x0f; 824 num_total = num_playback + num_capture; 825 826 dev_dbg(sdev->dev, "detected %d playback and %d capture streams\n", 827 num_playback, num_capture); 828 829 if (num_playback >= SOF_HDA_PLAYBACK_STREAMS) { 830 dev_err(sdev->dev, "error: too many playback streams %d\n", 831 num_playback); 832 return -EINVAL; 833 } 834 835 if (num_capture >= SOF_HDA_CAPTURE_STREAMS) { 836 dev_err(sdev->dev, "error: too many capture streams %d\n", 837 num_playback); 838 return -EINVAL; 839 } 840 841 /* 842 * mem alloc for the position buffer 843 * TODO: check position buffer update 844 */ 845 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev, 846 SOF_HDA_DPIB_ENTRY_SIZE * num_total, 847 &bus->posbuf); 848 if (ret < 0) { 849 dev_err(sdev->dev, "error: posbuffer dma alloc failed\n"); 850 return -ENOMEM; 851 } 852 853 /* 854 * mem alloc for the CORB/RIRB ringbuffers - this will be used only for 855 * HDAudio codecs 856 */ 857 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev, 858 PAGE_SIZE, &bus->rb); 859 if (ret < 0) { 860 dev_err(sdev->dev, "error: RB alloc failed\n"); 861 return -ENOMEM; 862 } 863 864 /* create capture streams */ 865 for (i = 0; i < num_capture; i++) { 866 struct sof_intel_hda_stream *hda_stream; 867 868 hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream), 869 GFP_KERNEL); 870 if (!hda_stream) 871 return -ENOMEM; 872 873 hda_stream->sdev = sdev; 874 875 hext_stream = &hda_stream->hext_stream; 876 877 if (sdev->bar[HDA_DSP_PP_BAR]) { 878 hext_stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] + 879 SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i; 880 881 hext_stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] + 882 SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total + 883 SOF_HDA_PPLC_INTERVAL * i; 884 } 885 886 hstream = &hext_stream->hstream; 887 888 /* do we support SPIB */ 889 if (sdev->bar[HDA_DSP_SPIB_BAR]) { 890 hstream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] + 891 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i + 892 SOF_HDA_SPIB_SPIB; 893 894 hstream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] + 895 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i + 896 SOF_HDA_SPIB_MAXFIFO; 897 } 898 899 hstream->bus = bus; 900 hstream->sd_int_sta_mask = 1 << i; 901 hstream->index = i; 902 sd_offset = SOF_STREAM_SD_OFFSET(hstream); 903 hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset; 904 hstream->stream_tag = i + 1; 905 hstream->opened = false; 906 hstream->running = false; 907 hstream->direction = SNDRV_PCM_STREAM_CAPTURE; 908 909 /* memory alloc for stream BDL */ 910 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev, 911 HDA_DSP_BDL_SIZE, &hstream->bdl); 912 if (ret < 0) { 913 dev_err(sdev->dev, "error: stream bdl dma alloc failed\n"); 914 return -ENOMEM; 915 } 916 hstream->posbuf = (__le32 *)(bus->posbuf.area + 917 (hstream->index) * 8); 918 919 list_add_tail(&hstream->list, &bus->stream_list); 920 } 921 922 /* create playback streams */ 923 for (i = num_capture; i < num_total; i++) { 924 struct sof_intel_hda_stream *hda_stream; 925 926 hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream), 927 GFP_KERNEL); 928 if (!hda_stream) 929 return -ENOMEM; 930 931 hda_stream->sdev = sdev; 932 933 hext_stream = &hda_stream->hext_stream; 934 935 if (sdev->bar[HDA_DSP_PP_BAR]) { 936 hext_stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] + 937 SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i; 938 939 hext_stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] + 940 SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total + 941 SOF_HDA_PPLC_INTERVAL * i; 942 } 943 944 hstream = &hext_stream->hstream; 945 946 /* do we support SPIB */ 947 if (sdev->bar[HDA_DSP_SPIB_BAR]) { 948 hstream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] + 949 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i + 950 SOF_HDA_SPIB_SPIB; 951 952 hstream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] + 953 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i + 954 SOF_HDA_SPIB_MAXFIFO; 955 } 956 957 hstream->bus = bus; 958 hstream->sd_int_sta_mask = 1 << i; 959 hstream->index = i; 960 sd_offset = SOF_STREAM_SD_OFFSET(hstream); 961 hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset; 962 hstream->stream_tag = i - num_capture + 1; 963 hstream->opened = false; 964 hstream->running = false; 965 hstream->direction = SNDRV_PCM_STREAM_PLAYBACK; 966 967 /* mem alloc for stream BDL */ 968 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev, 969 HDA_DSP_BDL_SIZE, &hstream->bdl); 970 if (ret < 0) { 971 dev_err(sdev->dev, "error: stream bdl dma alloc failed\n"); 972 return -ENOMEM; 973 } 974 975 hstream->posbuf = (__le32 *)(bus->posbuf.area + 976 (hstream->index) * 8); 977 978 list_add_tail(&hstream->list, &bus->stream_list); 979 } 980 981 /* store total stream count (playback + capture) from GCAP */ 982 sof_hda->stream_max = num_total; 983 984 return 0; 985 } 986 987 void hda_dsp_stream_free(struct snd_sof_dev *sdev) 988 { 989 struct hdac_bus *bus = sof_to_bus(sdev); 990 struct hdac_stream *s, *_s; 991 struct hdac_ext_stream *hext_stream; 992 struct sof_intel_hda_stream *hda_stream; 993 994 /* free position buffer */ 995 if (bus->posbuf.area) 996 snd_dma_free_pages(&bus->posbuf); 997 998 /* free CORB/RIRB buffer - only used for HDaudio codecs */ 999 if (bus->rb.area) 1000 snd_dma_free_pages(&bus->rb); 1001 1002 list_for_each_entry_safe(s, _s, &bus->stream_list, list) { 1003 /* TODO: decouple */ 1004 1005 /* free bdl buffer */ 1006 if (s->bdl.area) 1007 snd_dma_free_pages(&s->bdl); 1008 list_del(&s->list); 1009 hext_stream = stream_to_hdac_ext_stream(s); 1010 hda_stream = container_of(hext_stream, struct sof_intel_hda_stream, 1011 hext_stream); 1012 devm_kfree(sdev->dev, hda_stream); 1013 } 1014 } 1015 1016 snd_pcm_uframes_t hda_dsp_stream_get_position(struct hdac_stream *hstream, 1017 int direction, bool can_sleep) 1018 { 1019 struct hdac_ext_stream *hext_stream = stream_to_hdac_ext_stream(hstream); 1020 struct sof_intel_hda_stream *hda_stream = hstream_to_sof_hda_stream(hext_stream); 1021 struct snd_sof_dev *sdev = hda_stream->sdev; 1022 snd_pcm_uframes_t pos; 1023 1024 switch (sof_hda_position_quirk) { 1025 case SOF_HDA_POSITION_QUIRK_USE_SKYLAKE_LEGACY: 1026 /* 1027 * This legacy code, inherited from the Skylake driver, 1028 * mixes DPIB registers and DPIB DDR updates and 1029 * does not seem to follow any known hardware recommendations. 1030 * It's not clear e.g. why there is a different flow 1031 * for capture and playback, the only information that matters is 1032 * what traffic class is used, and on all SOF-enabled platforms 1033 * only VC0 is supported so the work-around was likely not necessary 1034 * and quite possibly wrong. 1035 */ 1036 1037 /* DPIB/posbuf position mode: 1038 * For Playback, Use DPIB register from HDA space which 1039 * reflects the actual data transferred. 1040 * For Capture, Use the position buffer for pointer, as DPIB 1041 * is not accurate enough, its update may be completed 1042 * earlier than the data written to DDR. 1043 */ 1044 if (direction == SNDRV_PCM_STREAM_PLAYBACK) { 1045 pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, 1046 AZX_REG_VS_SDXDPIB_XBASE + 1047 (AZX_REG_VS_SDXDPIB_XINTERVAL * 1048 hstream->index)); 1049 } else { 1050 /* 1051 * For capture stream, we need more workaround to fix the 1052 * position incorrect issue: 1053 * 1054 * 1. Wait at least 20us before reading position buffer after 1055 * the interrupt generated(IOC), to make sure position update 1056 * happens on frame boundary i.e. 20.833uSec for 48KHz. 1057 * 2. Perform a dummy Read to DPIB register to flush DMA 1058 * position value. 1059 * 3. Read the DMA Position from posbuf. Now the readback 1060 * value should be >= period boundary. 1061 */ 1062 if (can_sleep) 1063 usleep_range(20, 21); 1064 1065 snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, 1066 AZX_REG_VS_SDXDPIB_XBASE + 1067 (AZX_REG_VS_SDXDPIB_XINTERVAL * 1068 hstream->index)); 1069 pos = snd_hdac_stream_get_pos_posbuf(hstream); 1070 } 1071 break; 1072 case SOF_HDA_POSITION_QUIRK_USE_DPIB_REGISTERS: 1073 /* 1074 * In case VC1 traffic is disabled this is the recommended option 1075 */ 1076 pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, 1077 AZX_REG_VS_SDXDPIB_XBASE + 1078 (AZX_REG_VS_SDXDPIB_XINTERVAL * 1079 hstream->index)); 1080 break; 1081 case SOF_HDA_POSITION_QUIRK_USE_DPIB_DDR_UPDATE: 1082 /* 1083 * This is the recommended option when VC1 is enabled. 1084 * While this isn't needed for SOF platforms it's added for 1085 * consistency and debug. 1086 */ 1087 pos = snd_hdac_stream_get_pos_posbuf(hstream); 1088 break; 1089 default: 1090 dev_err_once(sdev->dev, "hda_position_quirk value %d not supported\n", 1091 sof_hda_position_quirk); 1092 pos = 0; 1093 break; 1094 } 1095 1096 if (pos >= hstream->bufsize) 1097 pos = 0; 1098 1099 return pos; 1100 } 1101