xref: /openbmc/linux/sound/soc/sof/intel/hda-stream.c (revision 86edee97)
1 // SPDX-License-Identifier: (GPL-2.0 OR BSD-3-Clause)
2 //
3 // This file is provided under a dual BSD/GPLv2 license.  When using or
4 // redistributing this file, you may do so under either license.
5 //
6 // Copyright(c) 2018 Intel Corporation. All rights reserved.
7 //
8 // Authors: Liam Girdwood <liam.r.girdwood@linux.intel.com>
9 //	    Ranjani Sridharan <ranjani.sridharan@linux.intel.com>
10 //	    Rander Wang <rander.wang@intel.com>
11 //          Keyon Jie <yang.jie@linux.intel.com>
12 //
13 
14 /*
15  * Hardware interface for generic Intel audio DSP HDA IP
16  */
17 
18 #include <linux/pm_runtime.h>
19 #include <sound/hdaudio_ext.h>
20 #include <sound/hda_register.h>
21 #include <sound/sof.h>
22 #include "../ops.h"
23 #include "../sof-audio.h"
24 #include "hda.h"
25 
26 /*
27  * set up one of BDL entries for a stream
28  */
29 static int hda_setup_bdle(struct snd_sof_dev *sdev,
30 			  struct snd_dma_buffer *dmab,
31 			  struct hdac_stream *stream,
32 			  struct sof_intel_dsp_bdl **bdlp,
33 			  int offset, int size, int ioc)
34 {
35 	struct hdac_bus *bus = sof_to_bus(sdev);
36 	struct sof_intel_dsp_bdl *bdl = *bdlp;
37 
38 	while (size > 0) {
39 		dma_addr_t addr;
40 		int chunk;
41 
42 		if (stream->frags >= HDA_DSP_MAX_BDL_ENTRIES) {
43 			dev_err(sdev->dev, "error: stream frags exceeded\n");
44 			return -EINVAL;
45 		}
46 
47 		addr = snd_sgbuf_get_addr(dmab, offset);
48 		/* program BDL addr */
49 		bdl->addr_l = cpu_to_le32(lower_32_bits(addr));
50 		bdl->addr_h = cpu_to_le32(upper_32_bits(addr));
51 		/* program BDL size */
52 		chunk = snd_sgbuf_get_chunk_size(dmab, offset, size);
53 		/* one BDLE should not cross 4K boundary */
54 		if (bus->align_bdle_4k) {
55 			u32 remain = 0x1000 - (offset & 0xfff);
56 
57 			if (chunk > remain)
58 				chunk = remain;
59 		}
60 		bdl->size = cpu_to_le32(chunk);
61 		/* only program IOC when the whole segment is processed */
62 		size -= chunk;
63 		bdl->ioc = (size || !ioc) ? 0 : cpu_to_le32(0x01);
64 		bdl++;
65 		stream->frags++;
66 		offset += chunk;
67 
68 		dev_vdbg(sdev->dev, "bdl, frags:%d, chunk size:0x%x;\n",
69 			 stream->frags, chunk);
70 	}
71 
72 	*bdlp = bdl;
73 	return offset;
74 }
75 
76 /*
77  * set up Buffer Descriptor List (BDL) for host memory transfer
78  * BDL describes the location of the individual buffers and is little endian.
79  */
80 int hda_dsp_stream_setup_bdl(struct snd_sof_dev *sdev,
81 			     struct snd_dma_buffer *dmab,
82 			     struct hdac_stream *stream)
83 {
84 	struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
85 	struct sof_intel_dsp_bdl *bdl;
86 	int i, offset, period_bytes, periods;
87 	int remain, ioc;
88 
89 	period_bytes = stream->period_bytes;
90 	dev_dbg(sdev->dev, "period_bytes:0x%x\n", period_bytes);
91 	if (!period_bytes)
92 		period_bytes = stream->bufsize;
93 
94 	periods = stream->bufsize / period_bytes;
95 
96 	dev_dbg(sdev->dev, "periods:%d\n", periods);
97 
98 	remain = stream->bufsize % period_bytes;
99 	if (remain)
100 		periods++;
101 
102 	/* program the initial BDL entries */
103 	bdl = (struct sof_intel_dsp_bdl *)stream->bdl.area;
104 	offset = 0;
105 	stream->frags = 0;
106 
107 	/*
108 	 * set IOC if don't use position IPC
109 	 * and period_wakeup needed.
110 	 */
111 	ioc = hda->no_ipc_position ?
112 	      !stream->no_period_wakeup : 0;
113 
114 	for (i = 0; i < periods; i++) {
115 		if (i == (periods - 1) && remain)
116 			/* set the last small entry */
117 			offset = hda_setup_bdle(sdev, dmab,
118 						stream, &bdl, offset,
119 						remain, 0);
120 		else
121 			offset = hda_setup_bdle(sdev, dmab,
122 						stream, &bdl, offset,
123 						period_bytes, ioc);
124 	}
125 
126 	return offset;
127 }
128 
129 int hda_dsp_stream_spib_config(struct snd_sof_dev *sdev,
130 			       struct hdac_ext_stream *stream,
131 			       int enable, u32 size)
132 {
133 	struct hdac_stream *hstream = &stream->hstream;
134 	u32 mask;
135 
136 	if (!sdev->bar[HDA_DSP_SPIB_BAR]) {
137 		dev_err(sdev->dev, "error: address of spib capability is NULL\n");
138 		return -EINVAL;
139 	}
140 
141 	mask = (1 << hstream->index);
142 
143 	/* enable/disable SPIB for the stream */
144 	snd_sof_dsp_update_bits(sdev, HDA_DSP_SPIB_BAR,
145 				SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL, mask,
146 				enable << hstream->index);
147 
148 	/* set the SPIB value */
149 	sof_io_write(sdev, stream->spib_addr, size);
150 
151 	return 0;
152 }
153 
154 /* get next unused stream */
155 struct hdac_ext_stream *
156 hda_dsp_stream_get(struct snd_sof_dev *sdev, int direction)
157 {
158 	struct hdac_bus *bus = sof_to_bus(sdev);
159 	struct sof_intel_hda_stream *hda_stream;
160 	struct hdac_ext_stream *stream = NULL;
161 	struct hdac_stream *s;
162 
163 	spin_lock_irq(&bus->reg_lock);
164 
165 	/* get an unused stream */
166 	list_for_each_entry(s, &bus->stream_list, list) {
167 		if (s->direction == direction && !s->opened) {
168 			stream = stream_to_hdac_ext_stream(s);
169 			hda_stream = container_of(stream,
170 						  struct sof_intel_hda_stream,
171 						  hda_stream);
172 			/* check if the host DMA channel is reserved */
173 			if (hda_stream->host_reserved)
174 				continue;
175 
176 			s->opened = true;
177 			break;
178 		}
179 	}
180 
181 	spin_unlock_irq(&bus->reg_lock);
182 
183 	/* stream found ? */
184 	if (!stream)
185 		dev_err(sdev->dev, "error: no free %s streams\n",
186 			direction == SNDRV_PCM_STREAM_PLAYBACK ?
187 			"playback" : "capture");
188 
189 	/*
190 	 * Disable DMI Link L1 entry when capture stream is opened.
191 	 * Workaround to address a known issue with host DMA that results
192 	 * in xruns during pause/release in capture scenarios.
193 	 */
194 	if (!IS_ENABLED(CONFIG_SND_SOC_SOF_HDA_ALWAYS_ENABLE_DMI_L1))
195 		if (stream && direction == SNDRV_PCM_STREAM_CAPTURE)
196 			snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
197 						HDA_VS_INTEL_EM2,
198 						HDA_VS_INTEL_EM2_L1SEN, 0);
199 
200 	return stream;
201 }
202 
203 /* free a stream */
204 int hda_dsp_stream_put(struct snd_sof_dev *sdev, int direction, int stream_tag)
205 {
206 	struct hdac_bus *bus = sof_to_bus(sdev);
207 	struct hdac_stream *s;
208 	bool active_capture_stream = false;
209 	bool found = false;
210 
211 	spin_lock_irq(&bus->reg_lock);
212 
213 	/*
214 	 * close stream matching the stream tag
215 	 * and check if there are any open capture streams.
216 	 */
217 	list_for_each_entry(s, &bus->stream_list, list) {
218 		if (!s->opened)
219 			continue;
220 
221 		if (s->direction == direction && s->stream_tag == stream_tag) {
222 			s->opened = false;
223 			found = true;
224 		} else if (s->direction == SNDRV_PCM_STREAM_CAPTURE) {
225 			active_capture_stream = true;
226 		}
227 	}
228 
229 	spin_unlock_irq(&bus->reg_lock);
230 
231 	/* Enable DMI L1 entry if there are no capture streams open */
232 	if (!IS_ENABLED(CONFIG_SND_SOC_SOF_HDA_ALWAYS_ENABLE_DMI_L1))
233 		if (!active_capture_stream)
234 			snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
235 						HDA_VS_INTEL_EM2,
236 						HDA_VS_INTEL_EM2_L1SEN,
237 						HDA_VS_INTEL_EM2_L1SEN);
238 
239 	if (!found) {
240 		dev_dbg(sdev->dev, "stream_tag %d not opened!\n", stream_tag);
241 		return -ENODEV;
242 	}
243 
244 	return 0;
245 }
246 
247 int hda_dsp_stream_trigger(struct snd_sof_dev *sdev,
248 			   struct hdac_ext_stream *stream, int cmd)
249 {
250 	struct hdac_stream *hstream = &stream->hstream;
251 	int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
252 	u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
253 	int ret;
254 	u32 run;
255 
256 	/* cmd must be for audio stream */
257 	switch (cmd) {
258 	case SNDRV_PCM_TRIGGER_RESUME:
259 	case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
260 	case SNDRV_PCM_TRIGGER_START:
261 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL,
262 					1 << hstream->index,
263 					1 << hstream->index);
264 
265 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
266 					sd_offset,
267 					SOF_HDA_SD_CTL_DMA_START |
268 					SOF_HDA_CL_DMA_SD_INT_MASK,
269 					SOF_HDA_SD_CTL_DMA_START |
270 					SOF_HDA_CL_DMA_SD_INT_MASK);
271 
272 		ret = snd_sof_dsp_read_poll_timeout(sdev,
273 					HDA_DSP_HDA_BAR,
274 					sd_offset, run,
275 					((run &	dma_start) == dma_start),
276 					HDA_DSP_REG_POLL_INTERVAL_US,
277 					HDA_DSP_STREAM_RUN_TIMEOUT);
278 
279 		if (ret < 0) {
280 			dev_err(sdev->dev,
281 				"error: %s: cmd %d: timeout on STREAM_SD_OFFSET read\n",
282 				__func__, cmd);
283 			return ret;
284 		}
285 
286 		hstream->running = true;
287 		break;
288 	case SNDRV_PCM_TRIGGER_SUSPEND:
289 	case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
290 	case SNDRV_PCM_TRIGGER_STOP:
291 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
292 					sd_offset,
293 					SOF_HDA_SD_CTL_DMA_START |
294 					SOF_HDA_CL_DMA_SD_INT_MASK, 0x0);
295 
296 		ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
297 						sd_offset, run,
298 						!(run &	dma_start),
299 						HDA_DSP_REG_POLL_INTERVAL_US,
300 						HDA_DSP_STREAM_RUN_TIMEOUT);
301 
302 		if (ret < 0) {
303 			dev_err(sdev->dev,
304 				"error: %s: cmd %d: timeout on STREAM_SD_OFFSET read\n",
305 				__func__, cmd);
306 			return ret;
307 		}
308 
309 		snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, sd_offset +
310 				  SOF_HDA_ADSP_REG_CL_SD_STS,
311 				  SOF_HDA_CL_DMA_SD_INT_MASK);
312 
313 		hstream->running = false;
314 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL,
315 					1 << hstream->index, 0x0);
316 		break;
317 	default:
318 		dev_err(sdev->dev, "error: unknown command: %d\n", cmd);
319 		return -EINVAL;
320 	}
321 
322 	return 0;
323 }
324 
325 /*
326  * prepare for common hdac registers settings, for both code loader
327  * and normal stream.
328  */
329 int hda_dsp_stream_hw_params(struct snd_sof_dev *sdev,
330 			     struct hdac_ext_stream *stream,
331 			     struct snd_dma_buffer *dmab,
332 			     struct snd_pcm_hw_params *params)
333 {
334 	struct hdac_bus *bus = sof_to_bus(sdev);
335 	struct hdac_stream *hstream = &stream->hstream;
336 	int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
337 	int ret, timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
338 	u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
339 	u32 val, mask;
340 	u32 run;
341 
342 	if (!stream) {
343 		dev_err(sdev->dev, "error: no stream available\n");
344 		return -ENODEV;
345 	}
346 
347 	/* decouple host and link DMA */
348 	mask = 0x1 << hstream->index;
349 	snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
350 				mask, mask);
351 
352 	if (!dmab) {
353 		dev_err(sdev->dev, "error: no dma buffer allocated!\n");
354 		return -ENODEV;
355 	}
356 
357 	/* clear stream status */
358 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
359 				SOF_HDA_CL_DMA_SD_INT_MASK |
360 				SOF_HDA_SD_CTL_DMA_START, 0);
361 
362 	ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
363 					    sd_offset, run,
364 					    !(run & dma_start),
365 					    HDA_DSP_REG_POLL_INTERVAL_US,
366 					    HDA_DSP_STREAM_RUN_TIMEOUT);
367 
368 	if (ret < 0) {
369 		dev_err(sdev->dev,
370 			"error: %s: timeout on STREAM_SD_OFFSET read1\n",
371 			__func__);
372 		return ret;
373 	}
374 
375 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
376 				sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS,
377 				SOF_HDA_CL_DMA_SD_INT_MASK,
378 				SOF_HDA_CL_DMA_SD_INT_MASK);
379 
380 	/* stream reset */
381 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 0x1,
382 				0x1);
383 	udelay(3);
384 	do {
385 		val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
386 				       sd_offset);
387 		if (val & 0x1)
388 			break;
389 	} while (--timeout);
390 	if (timeout == 0) {
391 		dev_err(sdev->dev, "error: stream reset failed\n");
392 		return -ETIMEDOUT;
393 	}
394 
395 	timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
396 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 0x1,
397 				0x0);
398 
399 	/* wait for hardware to report that stream is out of reset */
400 	udelay(3);
401 	do {
402 		val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
403 				       sd_offset);
404 		if ((val & 0x1) == 0)
405 			break;
406 	} while (--timeout);
407 	if (timeout == 0) {
408 		dev_err(sdev->dev, "error: timeout waiting for stream reset\n");
409 		return -ETIMEDOUT;
410 	}
411 
412 	if (hstream->posbuf)
413 		*hstream->posbuf = 0;
414 
415 	/* reset BDL address */
416 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
417 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
418 			  0x0);
419 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
420 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
421 			  0x0);
422 
423 	/* clear stream status */
424 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
425 				SOF_HDA_CL_DMA_SD_INT_MASK |
426 				SOF_HDA_SD_CTL_DMA_START, 0);
427 
428 	ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
429 					    sd_offset, run,
430 					    !(run & dma_start),
431 					    HDA_DSP_REG_POLL_INTERVAL_US,
432 					    HDA_DSP_STREAM_RUN_TIMEOUT);
433 
434 	if (ret < 0) {
435 		dev_err(sdev->dev,
436 			"error: %s: timeout on STREAM_SD_OFFSET read2\n",
437 			__func__);
438 		return ret;
439 	}
440 
441 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
442 				sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS,
443 				SOF_HDA_CL_DMA_SD_INT_MASK,
444 				SOF_HDA_CL_DMA_SD_INT_MASK);
445 
446 	hstream->frags = 0;
447 
448 	ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
449 	if (ret < 0) {
450 		dev_err(sdev->dev, "error: set up of BDL failed\n");
451 		return ret;
452 	}
453 
454 	/* program stream tag to set up stream descriptor for DMA */
455 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
456 				SOF_HDA_CL_SD_CTL_STREAM_TAG_MASK,
457 				hstream->stream_tag <<
458 				SOF_HDA_CL_SD_CTL_STREAM_TAG_SHIFT);
459 
460 	/* program cyclic buffer length */
461 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
462 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_CBL,
463 			  hstream->bufsize);
464 
465 	/*
466 	 * Recommended hardware programming sequence for HDAudio DMA format
467 	 *
468 	 * 1. Put DMA into coupled mode by clearing PPCTL.PROCEN bit
469 	 *    for corresponding stream index before the time of writing
470 	 *    format to SDxFMT register.
471 	 * 2. Write SDxFMT
472 	 * 3. Set PPCTL.PROCEN bit for corresponding stream index to
473 	 *    enable decoupled mode
474 	 */
475 
476 	/* couple host and link DMA, disable DSP features */
477 	snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
478 				mask, 0);
479 
480 	/* program stream format */
481 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
482 				sd_offset +
483 				SOF_HDA_ADSP_REG_CL_SD_FORMAT,
484 				0xffff, hstream->format_val);
485 
486 	/* decouple host and link DMA, enable DSP features */
487 	snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
488 				mask, mask);
489 
490 	/* program last valid index */
491 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
492 				sd_offset + SOF_HDA_ADSP_REG_CL_SD_LVI,
493 				0xffff, (hstream->frags - 1));
494 
495 	/* program BDL address */
496 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
497 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
498 			  (u32)hstream->bdl.addr);
499 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
500 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
501 			  upper_32_bits(hstream->bdl.addr));
502 
503 	/* enable position buffer */
504 	if (!(snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE)
505 				& SOF_HDA_ADSP_DPLBASE_ENABLE)) {
506 		snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPUBASE,
507 				  upper_32_bits(bus->posbuf.addr));
508 		snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE,
509 				  (u32)bus->posbuf.addr |
510 				  SOF_HDA_ADSP_DPLBASE_ENABLE);
511 	}
512 
513 	/* set interrupt enable bits */
514 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
515 				SOF_HDA_CL_DMA_SD_INT_MASK,
516 				SOF_HDA_CL_DMA_SD_INT_MASK);
517 
518 	/* read FIFO size */
519 	if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) {
520 		hstream->fifo_size =
521 			snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
522 					 sd_offset +
523 					 SOF_HDA_ADSP_REG_CL_SD_FIFOSIZE);
524 		hstream->fifo_size &= 0xffff;
525 		hstream->fifo_size += 1;
526 	} else {
527 		hstream->fifo_size = 0;
528 	}
529 
530 	return ret;
531 }
532 
533 int hda_dsp_stream_hw_free(struct snd_sof_dev *sdev,
534 			   struct snd_pcm_substream *substream)
535 {
536 	struct hdac_stream *stream = substream->runtime->private_data;
537 	struct hdac_ext_stream *link_dev = container_of(stream,
538 							struct hdac_ext_stream,
539 							hstream);
540 	struct hdac_bus *bus = sof_to_bus(sdev);
541 	u32 mask = 0x1 << stream->index;
542 
543 	spin_lock_irq(&bus->reg_lock);
544 	/* couple host and link DMA if link DMA channel is idle */
545 	if (!link_dev->link_locked)
546 		snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR,
547 					SOF_HDA_REG_PP_PPCTL, mask, 0);
548 	spin_unlock_irq(&bus->reg_lock);
549 
550 	return 0;
551 }
552 
553 bool hda_dsp_check_stream_irq(struct snd_sof_dev *sdev)
554 {
555 	struct hdac_bus *bus = sof_to_bus(sdev);
556 	bool ret = false;
557 	u32 status;
558 
559 	/* The function can be called at irq thread, so use spin_lock_irq */
560 	spin_lock_irq(&bus->reg_lock);
561 
562 	status = snd_hdac_chip_readl(bus, INTSTS);
563 	dev_vdbg(bus->dev, "stream irq, INTSTS status: 0x%x\n", status);
564 
565 	/* if Register inaccessible, ignore it.*/
566 	if (status != 0xffffffff)
567 		ret = true;
568 
569 	spin_unlock_irq(&bus->reg_lock);
570 
571 	return ret;
572 }
573 
574 static bool hda_dsp_stream_check(struct hdac_bus *bus, u32 status)
575 {
576 	struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
577 	struct hdac_stream *s;
578 	bool active = false;
579 	u32 sd_status;
580 
581 	list_for_each_entry(s, &bus->stream_list, list) {
582 		if (status & BIT(s->index) && s->opened) {
583 			sd_status = snd_hdac_stream_readb(s, SD_STS);
584 
585 			dev_vdbg(bus->dev, "stream %d status 0x%x\n",
586 				 s->index, sd_status);
587 
588 			snd_hdac_stream_writeb(s, SD_STS, sd_status);
589 
590 			active = true;
591 			if (!s->substream ||
592 			    !s->running ||
593 			    (sd_status & SOF_HDA_CL_DMA_SD_INT_COMPLETE) == 0)
594 				continue;
595 
596 			/* Inform ALSA only in case not do that with IPC */
597 			if (sof_hda->no_ipc_position)
598 				snd_sof_pcm_period_elapsed(s->substream);
599 		}
600 	}
601 
602 	return active;
603 }
604 
605 irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context)
606 {
607 	struct snd_sof_dev *sdev = context;
608 	struct hdac_bus *bus = sof_to_bus(sdev);
609 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
610 	u32 rirb_status;
611 #endif
612 	bool active;
613 	u32 status;
614 	int i;
615 
616 	/*
617 	 * Loop 10 times to handle missed interrupts caused by
618 	 * unsolicited responses from the codec
619 	 */
620 	for (i = 0, active = true; i < 10 && active; i++) {
621 		spin_lock_irq(&bus->reg_lock);
622 
623 		status = snd_hdac_chip_readl(bus, INTSTS);
624 
625 		/* check streams */
626 		active = hda_dsp_stream_check(bus, status);
627 
628 		/* check and clear RIRB interrupt */
629 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
630 		if (status & AZX_INT_CTRL_EN) {
631 			rirb_status = snd_hdac_chip_readb(bus, RIRBSTS);
632 			if (rirb_status & RIRB_INT_MASK) {
633 				active = true;
634 				if (rirb_status & RIRB_INT_RESPONSE)
635 					snd_hdac_bus_update_rirb(bus);
636 				snd_hdac_chip_writeb(bus, RIRBSTS,
637 						     RIRB_INT_MASK);
638 			}
639 		}
640 #endif
641 		spin_unlock_irq(&bus->reg_lock);
642 	}
643 
644 	return IRQ_HANDLED;
645 }
646 
647 int hda_dsp_stream_init(struct snd_sof_dev *sdev)
648 {
649 	struct hdac_bus *bus = sof_to_bus(sdev);
650 	struct hdac_ext_stream *stream;
651 	struct hdac_stream *hstream;
652 	struct pci_dev *pci = to_pci_dev(sdev->dev);
653 	struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
654 	int sd_offset;
655 	int i, num_playback, num_capture, num_total, ret;
656 	u32 gcap;
657 
658 	gcap = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_GCAP);
659 	dev_dbg(sdev->dev, "hda global caps = 0x%x\n", gcap);
660 
661 	/* get stream count from GCAP */
662 	num_capture = (gcap >> 8) & 0x0f;
663 	num_playback = (gcap >> 12) & 0x0f;
664 	num_total = num_playback + num_capture;
665 
666 	dev_dbg(sdev->dev, "detected %d playback and %d capture streams\n",
667 		num_playback, num_capture);
668 
669 	if (num_playback >= SOF_HDA_PLAYBACK_STREAMS) {
670 		dev_err(sdev->dev, "error: too many playback streams %d\n",
671 			num_playback);
672 		return -EINVAL;
673 	}
674 
675 	if (num_capture >= SOF_HDA_CAPTURE_STREAMS) {
676 		dev_err(sdev->dev, "error: too many capture streams %d\n",
677 			num_playback);
678 		return -EINVAL;
679 	}
680 
681 	/*
682 	 * mem alloc for the position buffer
683 	 * TODO: check position buffer update
684 	 */
685 	ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
686 				  SOF_HDA_DPIB_ENTRY_SIZE * num_total,
687 				  &bus->posbuf);
688 	if (ret < 0) {
689 		dev_err(sdev->dev, "error: posbuffer dma alloc failed\n");
690 		return -ENOMEM;
691 	}
692 
693 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
694 	/* mem alloc for the CORB/RIRB ringbuffers */
695 	ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
696 				  PAGE_SIZE, &bus->rb);
697 	if (ret < 0) {
698 		dev_err(sdev->dev, "error: RB alloc failed\n");
699 		return -ENOMEM;
700 	}
701 #endif
702 
703 	/* create capture streams */
704 	for (i = 0; i < num_capture; i++) {
705 		struct sof_intel_hda_stream *hda_stream;
706 
707 		hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
708 					  GFP_KERNEL);
709 		if (!hda_stream)
710 			return -ENOMEM;
711 
712 		hda_stream->sdev = sdev;
713 
714 		stream = &hda_stream->hda_stream;
715 
716 		stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
717 			SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
718 
719 		stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
720 			SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
721 			SOF_HDA_PPLC_INTERVAL * i;
722 
723 		/* do we support SPIB */
724 		if (sdev->bar[HDA_DSP_SPIB_BAR]) {
725 			stream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
726 				SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
727 				SOF_HDA_SPIB_SPIB;
728 
729 			stream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
730 				SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
731 				SOF_HDA_SPIB_MAXFIFO;
732 		}
733 
734 		hstream = &stream->hstream;
735 		hstream->bus = bus;
736 		hstream->sd_int_sta_mask = 1 << i;
737 		hstream->index = i;
738 		sd_offset = SOF_STREAM_SD_OFFSET(hstream);
739 		hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
740 		hstream->stream_tag = i + 1;
741 		hstream->opened = false;
742 		hstream->running = false;
743 		hstream->direction = SNDRV_PCM_STREAM_CAPTURE;
744 
745 		/* memory alloc for stream BDL */
746 		ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
747 					  HDA_DSP_BDL_SIZE, &hstream->bdl);
748 		if (ret < 0) {
749 			dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
750 			return -ENOMEM;
751 		}
752 		hstream->posbuf = (__le32 *)(bus->posbuf.area +
753 			(hstream->index) * 8);
754 
755 		list_add_tail(&hstream->list, &bus->stream_list);
756 	}
757 
758 	/* create playback streams */
759 	for (i = num_capture; i < num_total; i++) {
760 		struct sof_intel_hda_stream *hda_stream;
761 
762 		hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
763 					  GFP_KERNEL);
764 		if (!hda_stream)
765 			return -ENOMEM;
766 
767 		hda_stream->sdev = sdev;
768 
769 		stream = &hda_stream->hda_stream;
770 
771 		/* we always have DSP support */
772 		stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
773 			SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
774 
775 		stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
776 			SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
777 			SOF_HDA_PPLC_INTERVAL * i;
778 
779 		/* do we support SPIB */
780 		if (sdev->bar[HDA_DSP_SPIB_BAR]) {
781 			stream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
782 				SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
783 				SOF_HDA_SPIB_SPIB;
784 
785 			stream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
786 				SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
787 				SOF_HDA_SPIB_MAXFIFO;
788 		}
789 
790 		hstream = &stream->hstream;
791 		hstream->bus = bus;
792 		hstream->sd_int_sta_mask = 1 << i;
793 		hstream->index = i;
794 		sd_offset = SOF_STREAM_SD_OFFSET(hstream);
795 		hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
796 		hstream->stream_tag = i - num_capture + 1;
797 		hstream->opened = false;
798 		hstream->running = false;
799 		hstream->direction = SNDRV_PCM_STREAM_PLAYBACK;
800 
801 		/* mem alloc for stream BDL */
802 		ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
803 					  HDA_DSP_BDL_SIZE, &hstream->bdl);
804 		if (ret < 0) {
805 			dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
806 			return -ENOMEM;
807 		}
808 
809 		hstream->posbuf = (__le32 *)(bus->posbuf.area +
810 			(hstream->index) * 8);
811 
812 		list_add_tail(&hstream->list, &bus->stream_list);
813 	}
814 
815 	/* store total stream count (playback + capture) from GCAP */
816 	sof_hda->stream_max = num_total;
817 
818 	return 0;
819 }
820 
821 void hda_dsp_stream_free(struct snd_sof_dev *sdev)
822 {
823 	struct hdac_bus *bus = sof_to_bus(sdev);
824 	struct hdac_stream *s, *_s;
825 	struct hdac_ext_stream *stream;
826 	struct sof_intel_hda_stream *hda_stream;
827 
828 	/* free position buffer */
829 	if (bus->posbuf.area)
830 		snd_dma_free_pages(&bus->posbuf);
831 
832 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
833 	/* free position buffer */
834 	if (bus->rb.area)
835 		snd_dma_free_pages(&bus->rb);
836 #endif
837 
838 	list_for_each_entry_safe(s, _s, &bus->stream_list, list) {
839 		/* TODO: decouple */
840 
841 		/* free bdl buffer */
842 		if (s->bdl.area)
843 			snd_dma_free_pages(&s->bdl);
844 		list_del(&s->list);
845 		stream = stream_to_hdac_ext_stream(s);
846 		hda_stream = container_of(stream, struct sof_intel_hda_stream,
847 					  hda_stream);
848 		devm_kfree(sdev->dev, hda_stream);
849 	}
850 }
851