xref: /openbmc/linux/sound/soc/sof/intel/hda-stream.c (revision 715f23b6)
1 // SPDX-License-Identifier: (GPL-2.0 OR BSD-3-Clause)
2 //
3 // This file is provided under a dual BSD/GPLv2 license.  When using or
4 // redistributing this file, you may do so under either license.
5 //
6 // Copyright(c) 2018 Intel Corporation. All rights reserved.
7 //
8 // Authors: Liam Girdwood <liam.r.girdwood@linux.intel.com>
9 //	    Ranjani Sridharan <ranjani.sridharan@linux.intel.com>
10 //	    Rander Wang <rander.wang@intel.com>
11 //          Keyon Jie <yang.jie@linux.intel.com>
12 //
13 
14 /*
15  * Hardware interface for generic Intel audio DSP HDA IP
16  */
17 
18 #include <linux/pm_runtime.h>
19 #include <sound/hdaudio_ext.h>
20 #include <sound/hda_register.h>
21 #include <sound/sof.h>
22 #include "../ops.h"
23 #include "hda.h"
24 
25 /*
26  * set up one of BDL entries for a stream
27  */
28 static int hda_setup_bdle(struct snd_sof_dev *sdev,
29 			  struct snd_dma_buffer *dmab,
30 			  struct hdac_stream *stream,
31 			  struct sof_intel_dsp_bdl **bdlp,
32 			  int offset, int size, int ioc)
33 {
34 	struct hdac_bus *bus = sof_to_bus(sdev);
35 	struct sof_intel_dsp_bdl *bdl = *bdlp;
36 
37 	while (size > 0) {
38 		dma_addr_t addr;
39 		int chunk;
40 
41 		if (stream->frags >= HDA_DSP_MAX_BDL_ENTRIES) {
42 			dev_err(sdev->dev, "error: stream frags exceeded\n");
43 			return -EINVAL;
44 		}
45 
46 		addr = snd_sgbuf_get_addr(dmab, offset);
47 		/* program BDL addr */
48 		bdl->addr_l = cpu_to_le32(lower_32_bits(addr));
49 		bdl->addr_h = cpu_to_le32(upper_32_bits(addr));
50 		/* program BDL size */
51 		chunk = snd_sgbuf_get_chunk_size(dmab, offset, size);
52 		/* one BDLE should not cross 4K boundary */
53 		if (bus->align_bdle_4k) {
54 			u32 remain = 0x1000 - (offset & 0xfff);
55 
56 			if (chunk > remain)
57 				chunk = remain;
58 		}
59 		bdl->size = cpu_to_le32(chunk);
60 		/* only program IOC when the whole segment is processed */
61 		size -= chunk;
62 		bdl->ioc = (size || !ioc) ? 0 : cpu_to_le32(0x01);
63 		bdl++;
64 		stream->frags++;
65 		offset += chunk;
66 
67 		dev_vdbg(sdev->dev, "bdl, frags:%d, chunk size:0x%x;\n",
68 			 stream->frags, chunk);
69 	}
70 
71 	*bdlp = bdl;
72 	return offset;
73 }
74 
75 /*
76  * set up Buffer Descriptor List (BDL) for host memory transfer
77  * BDL describes the location of the individual buffers and is little endian.
78  */
79 int hda_dsp_stream_setup_bdl(struct snd_sof_dev *sdev,
80 			     struct snd_dma_buffer *dmab,
81 			     struct hdac_stream *stream)
82 {
83 	struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
84 	struct sof_intel_dsp_bdl *bdl;
85 	int i, offset, period_bytes, periods;
86 	int remain, ioc;
87 
88 	period_bytes = stream->period_bytes;
89 	dev_dbg(sdev->dev, "period_bytes:0x%x\n", period_bytes);
90 	if (!period_bytes)
91 		period_bytes = stream->bufsize;
92 
93 	periods = stream->bufsize / period_bytes;
94 
95 	dev_dbg(sdev->dev, "periods:%d\n", periods);
96 
97 	remain = stream->bufsize % period_bytes;
98 	if (remain)
99 		periods++;
100 
101 	/* program the initial BDL entries */
102 	bdl = (struct sof_intel_dsp_bdl *)stream->bdl.area;
103 	offset = 0;
104 	stream->frags = 0;
105 
106 	/*
107 	 * set IOC if don't use position IPC
108 	 * and period_wakeup needed.
109 	 */
110 	ioc = hda->no_ipc_position ?
111 	      !stream->no_period_wakeup : 0;
112 
113 	for (i = 0; i < periods; i++) {
114 		if (i == (periods - 1) && remain)
115 			/* set the last small entry */
116 			offset = hda_setup_bdle(sdev, dmab,
117 						stream, &bdl, offset,
118 						remain, 0);
119 		else
120 			offset = hda_setup_bdle(sdev, dmab,
121 						stream, &bdl, offset,
122 						period_bytes, ioc);
123 	}
124 
125 	return offset;
126 }
127 
128 int hda_dsp_stream_spib_config(struct snd_sof_dev *sdev,
129 			       struct hdac_ext_stream *stream,
130 			       int enable, u32 size)
131 {
132 	struct hdac_stream *hstream = &stream->hstream;
133 	u32 mask;
134 
135 	if (!sdev->bar[HDA_DSP_SPIB_BAR]) {
136 		dev_err(sdev->dev, "error: address of spib capability is NULL\n");
137 		return -EINVAL;
138 	}
139 
140 	mask = (1 << hstream->index);
141 
142 	/* enable/disable SPIB for the stream */
143 	snd_sof_dsp_update_bits(sdev, HDA_DSP_SPIB_BAR,
144 				SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL, mask,
145 				enable << hstream->index);
146 
147 	/* set the SPIB value */
148 	sof_io_write(sdev, stream->spib_addr, size);
149 
150 	return 0;
151 }
152 
153 /* get next unused stream */
154 struct hdac_ext_stream *
155 hda_dsp_stream_get(struct snd_sof_dev *sdev, int direction)
156 {
157 	struct hdac_bus *bus = sof_to_bus(sdev);
158 	struct sof_intel_hda_stream *hda_stream;
159 	struct hdac_ext_stream *stream = NULL;
160 	struct hdac_stream *s;
161 
162 	spin_lock_irq(&bus->reg_lock);
163 
164 	/* get an unused stream */
165 	list_for_each_entry(s, &bus->stream_list, list) {
166 		if (s->direction == direction && !s->opened) {
167 			stream = stream_to_hdac_ext_stream(s);
168 			hda_stream = container_of(stream,
169 						  struct sof_intel_hda_stream,
170 						  hda_stream);
171 			/* check if the host DMA channel is reserved */
172 			if (hda_stream->host_reserved)
173 				continue;
174 
175 			s->opened = true;
176 			break;
177 		}
178 	}
179 
180 	spin_unlock_irq(&bus->reg_lock);
181 
182 	/* stream found ? */
183 	if (!stream)
184 		dev_err(sdev->dev, "error: no free %s streams\n",
185 			direction == SNDRV_PCM_STREAM_PLAYBACK ?
186 			"playback" : "capture");
187 
188 	/*
189 	 * Disable DMI Link L1 entry when capture stream is opened.
190 	 * Workaround to address a known issue with host DMA that results
191 	 * in xruns during pause/release in capture scenarios.
192 	 */
193 	if (!IS_ENABLED(CONFIG_SND_SOC_SOF_HDA_ALWAYS_ENABLE_DMI_L1))
194 		if (stream && direction == SNDRV_PCM_STREAM_CAPTURE)
195 			snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
196 						HDA_VS_INTEL_EM2,
197 						HDA_VS_INTEL_EM2_L1SEN, 0);
198 
199 	return stream;
200 }
201 
202 /* free a stream */
203 int hda_dsp_stream_put(struct snd_sof_dev *sdev, int direction, int stream_tag)
204 {
205 	struct hdac_bus *bus = sof_to_bus(sdev);
206 	struct hdac_stream *s;
207 	bool active_capture_stream = false;
208 	bool found = false;
209 
210 	spin_lock_irq(&bus->reg_lock);
211 
212 	/*
213 	 * close stream matching the stream tag
214 	 * and check if there are any open capture streams.
215 	 */
216 	list_for_each_entry(s, &bus->stream_list, list) {
217 		if (!s->opened)
218 			continue;
219 
220 		if (s->direction == direction && s->stream_tag == stream_tag) {
221 			s->opened = false;
222 			found = true;
223 		} else if (s->direction == SNDRV_PCM_STREAM_CAPTURE) {
224 			active_capture_stream = true;
225 		}
226 	}
227 
228 	spin_unlock_irq(&bus->reg_lock);
229 
230 	/* Enable DMI L1 entry if there are no capture streams open */
231 	if (!IS_ENABLED(CONFIG_SND_SOC_SOF_HDA_ALWAYS_ENABLE_DMI_L1))
232 		if (!active_capture_stream)
233 			snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
234 						HDA_VS_INTEL_EM2,
235 						HDA_VS_INTEL_EM2_L1SEN,
236 						HDA_VS_INTEL_EM2_L1SEN);
237 
238 	if (!found) {
239 		dev_dbg(sdev->dev, "stream_tag %d not opened!\n", stream_tag);
240 		return -ENODEV;
241 	}
242 
243 	return 0;
244 }
245 
246 int hda_dsp_stream_trigger(struct snd_sof_dev *sdev,
247 			   struct hdac_ext_stream *stream, int cmd)
248 {
249 	struct hdac_stream *hstream = &stream->hstream;
250 	int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
251 	u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
252 	int ret;
253 	u32 run;
254 
255 	/* cmd must be for audio stream */
256 	switch (cmd) {
257 	case SNDRV_PCM_TRIGGER_RESUME:
258 	case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
259 	case SNDRV_PCM_TRIGGER_START:
260 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL,
261 					1 << hstream->index,
262 					1 << hstream->index);
263 
264 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
265 					sd_offset,
266 					SOF_HDA_SD_CTL_DMA_START |
267 					SOF_HDA_CL_DMA_SD_INT_MASK,
268 					SOF_HDA_SD_CTL_DMA_START |
269 					SOF_HDA_CL_DMA_SD_INT_MASK);
270 
271 		ret = snd_sof_dsp_read_poll_timeout(sdev,
272 					HDA_DSP_HDA_BAR,
273 					sd_offset, run,
274 					((run &	dma_start) == dma_start),
275 					HDA_DSP_REG_POLL_INTERVAL_US,
276 					HDA_DSP_STREAM_RUN_TIMEOUT);
277 
278 		if (ret < 0) {
279 			dev_err(sdev->dev,
280 				"error: %s: cmd %d: timeout on STREAM_SD_OFFSET read\n",
281 				__func__, cmd);
282 			return ret;
283 		}
284 
285 		hstream->running = true;
286 		break;
287 	case SNDRV_PCM_TRIGGER_SUSPEND:
288 	case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
289 	case SNDRV_PCM_TRIGGER_STOP:
290 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
291 					sd_offset,
292 					SOF_HDA_SD_CTL_DMA_START |
293 					SOF_HDA_CL_DMA_SD_INT_MASK, 0x0);
294 
295 		ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
296 						sd_offset, run,
297 						!(run &	dma_start),
298 						HDA_DSP_REG_POLL_INTERVAL_US,
299 						HDA_DSP_STREAM_RUN_TIMEOUT);
300 
301 		if (ret < 0) {
302 			dev_err(sdev->dev,
303 				"error: %s: cmd %d: timeout on STREAM_SD_OFFSET read\n",
304 				__func__, cmd);
305 			return ret;
306 		}
307 
308 		snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, sd_offset +
309 				  SOF_HDA_ADSP_REG_CL_SD_STS,
310 				  SOF_HDA_CL_DMA_SD_INT_MASK);
311 
312 		hstream->running = false;
313 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL,
314 					1 << hstream->index, 0x0);
315 		break;
316 	default:
317 		dev_err(sdev->dev, "error: unknown command: %d\n", cmd);
318 		return -EINVAL;
319 	}
320 
321 	return 0;
322 }
323 
324 /*
325  * prepare for common hdac registers settings, for both code loader
326  * and normal stream.
327  */
328 int hda_dsp_stream_hw_params(struct snd_sof_dev *sdev,
329 			     struct hdac_ext_stream *stream,
330 			     struct snd_dma_buffer *dmab,
331 			     struct snd_pcm_hw_params *params)
332 {
333 	struct hdac_bus *bus = sof_to_bus(sdev);
334 	struct hdac_stream *hstream = &stream->hstream;
335 	int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
336 	int ret, timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
337 	u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
338 	u32 val, mask;
339 	u32 run;
340 
341 	if (!stream) {
342 		dev_err(sdev->dev, "error: no stream available\n");
343 		return -ENODEV;
344 	}
345 
346 	/* decouple host and link DMA */
347 	mask = 0x1 << hstream->index;
348 	snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
349 				mask, mask);
350 
351 	if (!dmab) {
352 		dev_err(sdev->dev, "error: no dma buffer allocated!\n");
353 		return -ENODEV;
354 	}
355 
356 	/* clear stream status */
357 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
358 				SOF_HDA_CL_DMA_SD_INT_MASK |
359 				SOF_HDA_SD_CTL_DMA_START, 0);
360 
361 	ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
362 					    sd_offset, run,
363 					    !(run & dma_start),
364 					    HDA_DSP_REG_POLL_INTERVAL_US,
365 					    HDA_DSP_STREAM_RUN_TIMEOUT);
366 
367 	if (ret < 0) {
368 		dev_err(sdev->dev,
369 			"error: %s: timeout on STREAM_SD_OFFSET read1\n",
370 			__func__);
371 		return ret;
372 	}
373 
374 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
375 				sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS,
376 				SOF_HDA_CL_DMA_SD_INT_MASK,
377 				SOF_HDA_CL_DMA_SD_INT_MASK);
378 
379 	/* stream reset */
380 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 0x1,
381 				0x1);
382 	udelay(3);
383 	do {
384 		val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
385 				       sd_offset);
386 		if (val & 0x1)
387 			break;
388 	} while (--timeout);
389 	if (timeout == 0) {
390 		dev_err(sdev->dev, "error: stream reset failed\n");
391 		return -ETIMEDOUT;
392 	}
393 
394 	timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
395 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 0x1,
396 				0x0);
397 
398 	/* wait for hardware to report that stream is out of reset */
399 	udelay(3);
400 	do {
401 		val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
402 				       sd_offset);
403 		if ((val & 0x1) == 0)
404 			break;
405 	} while (--timeout);
406 	if (timeout == 0) {
407 		dev_err(sdev->dev, "error: timeout waiting for stream reset\n");
408 		return -ETIMEDOUT;
409 	}
410 
411 	if (hstream->posbuf)
412 		*hstream->posbuf = 0;
413 
414 	/* reset BDL address */
415 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
416 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
417 			  0x0);
418 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
419 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
420 			  0x0);
421 
422 	/* clear stream status */
423 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
424 				SOF_HDA_CL_DMA_SD_INT_MASK |
425 				SOF_HDA_SD_CTL_DMA_START, 0);
426 
427 	ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
428 					    sd_offset, run,
429 					    !(run & dma_start),
430 					    HDA_DSP_REG_POLL_INTERVAL_US,
431 					    HDA_DSP_STREAM_RUN_TIMEOUT);
432 
433 	if (ret < 0) {
434 		dev_err(sdev->dev,
435 			"error: %s: timeout on STREAM_SD_OFFSET read2\n",
436 			__func__);
437 		return ret;
438 	}
439 
440 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
441 				sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS,
442 				SOF_HDA_CL_DMA_SD_INT_MASK,
443 				SOF_HDA_CL_DMA_SD_INT_MASK);
444 
445 	hstream->frags = 0;
446 
447 	ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
448 	if (ret < 0) {
449 		dev_err(sdev->dev, "error: set up of BDL failed\n");
450 		return ret;
451 	}
452 
453 	/* program stream tag to set up stream descriptor for DMA */
454 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
455 				SOF_HDA_CL_SD_CTL_STREAM_TAG_MASK,
456 				hstream->stream_tag <<
457 				SOF_HDA_CL_SD_CTL_STREAM_TAG_SHIFT);
458 
459 	/* program cyclic buffer length */
460 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
461 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_CBL,
462 			  hstream->bufsize);
463 
464 	/*
465 	 * Recommended hardware programming sequence for HDAudio DMA format
466 	 *
467 	 * 1. Put DMA into coupled mode by clearing PPCTL.PROCEN bit
468 	 *    for corresponding stream index before the time of writing
469 	 *    format to SDxFMT register.
470 	 * 2. Write SDxFMT
471 	 * 3. Set PPCTL.PROCEN bit for corresponding stream index to
472 	 *    enable decoupled mode
473 	 */
474 
475 	/* couple host and link DMA, disable DSP features */
476 	snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
477 				mask, 0);
478 
479 	/* program stream format */
480 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
481 				sd_offset +
482 				SOF_HDA_ADSP_REG_CL_SD_FORMAT,
483 				0xffff, hstream->format_val);
484 
485 	/* decouple host and link DMA, enable DSP features */
486 	snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
487 				mask, mask);
488 
489 	/* program last valid index */
490 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
491 				sd_offset + SOF_HDA_ADSP_REG_CL_SD_LVI,
492 				0xffff, (hstream->frags - 1));
493 
494 	/* program BDL address */
495 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
496 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
497 			  (u32)hstream->bdl.addr);
498 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
499 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
500 			  upper_32_bits(hstream->bdl.addr));
501 
502 	/* enable position buffer */
503 	if (!(snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE)
504 				& SOF_HDA_ADSP_DPLBASE_ENABLE)) {
505 		snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPUBASE,
506 				  upper_32_bits(bus->posbuf.addr));
507 		snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE,
508 				  (u32)bus->posbuf.addr |
509 				  SOF_HDA_ADSP_DPLBASE_ENABLE);
510 	}
511 
512 	/* set interrupt enable bits */
513 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
514 				SOF_HDA_CL_DMA_SD_INT_MASK,
515 				SOF_HDA_CL_DMA_SD_INT_MASK);
516 
517 	/* read FIFO size */
518 	if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) {
519 		hstream->fifo_size =
520 			snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
521 					 sd_offset +
522 					 SOF_HDA_ADSP_REG_CL_SD_FIFOSIZE);
523 		hstream->fifo_size &= 0xffff;
524 		hstream->fifo_size += 1;
525 	} else {
526 		hstream->fifo_size = 0;
527 	}
528 
529 	return ret;
530 }
531 
532 int hda_dsp_stream_hw_free(struct snd_sof_dev *sdev,
533 			   struct snd_pcm_substream *substream)
534 {
535 	struct hdac_stream *stream = substream->runtime->private_data;
536 	struct hdac_ext_stream *link_dev = container_of(stream,
537 							struct hdac_ext_stream,
538 							hstream);
539 	struct hdac_bus *bus = sof_to_bus(sdev);
540 	u32 mask = 0x1 << stream->index;
541 
542 	spin_lock_irq(&bus->reg_lock);
543 	/* couple host and link DMA if link DMA channel is idle */
544 	if (!link_dev->link_locked)
545 		snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR,
546 					SOF_HDA_REG_PP_PPCTL, mask, 0);
547 	spin_unlock_irq(&bus->reg_lock);
548 
549 	return 0;
550 }
551 
552 irqreturn_t hda_dsp_stream_interrupt(int irq, void *context)
553 {
554 	struct hdac_bus *bus = context;
555 	int ret = IRQ_WAKE_THREAD;
556 	u32 status;
557 
558 	spin_lock(&bus->reg_lock);
559 
560 	status = snd_hdac_chip_readl(bus, INTSTS);
561 	dev_vdbg(bus->dev, "stream irq, INTSTS status: 0x%x\n", status);
562 
563 	/* Register inaccessible, ignore it.*/
564 	if (status == 0xffffffff)
565 		ret = IRQ_NONE;
566 
567 	spin_unlock(&bus->reg_lock);
568 
569 	return ret;
570 }
571 
572 static bool hda_dsp_stream_check(struct hdac_bus *bus, u32 status)
573 {
574 	struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
575 	struct hdac_stream *s;
576 	bool active = false;
577 	u32 sd_status;
578 
579 	list_for_each_entry(s, &bus->stream_list, list) {
580 		if (status & BIT(s->index) && s->opened) {
581 			sd_status = snd_hdac_stream_readb(s, SD_STS);
582 
583 			dev_vdbg(bus->dev, "stream %d status 0x%x\n",
584 				 s->index, sd_status);
585 
586 			snd_hdac_stream_writeb(s, SD_STS, sd_status);
587 
588 			active = true;
589 			if (!s->substream ||
590 			    !s->running ||
591 			    (sd_status & SOF_HDA_CL_DMA_SD_INT_COMPLETE) == 0)
592 				continue;
593 
594 			/* Inform ALSA only in case not do that with IPC */
595 			if (sof_hda->no_ipc_position)
596 				snd_sof_pcm_period_elapsed(s->substream);
597 		}
598 	}
599 
600 	return active;
601 }
602 
603 irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context)
604 {
605 	struct hdac_bus *bus = context;
606 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
607 	u32 rirb_status;
608 #endif
609 	bool active;
610 	u32 status;
611 	int i;
612 
613 	/*
614 	 * Loop 10 times to handle missed interrupts caused by
615 	 * unsolicited responses from the codec
616 	 */
617 	for (i = 0, active = true; i < 10 && active; i++) {
618 		spin_lock_irq(&bus->reg_lock);
619 
620 		status = snd_hdac_chip_readl(bus, INTSTS);
621 
622 		/* check streams */
623 		active = hda_dsp_stream_check(bus, status);
624 
625 		/* check and clear RIRB interrupt */
626 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
627 		if (status & AZX_INT_CTRL_EN) {
628 			rirb_status = snd_hdac_chip_readb(bus, RIRBSTS);
629 			if (rirb_status & RIRB_INT_MASK) {
630 				active = true;
631 				if (rirb_status & RIRB_INT_RESPONSE)
632 					snd_hdac_bus_update_rirb(bus);
633 				snd_hdac_chip_writeb(bus, RIRBSTS,
634 						     RIRB_INT_MASK);
635 			}
636 		}
637 #endif
638 		spin_unlock_irq(&bus->reg_lock);
639 	}
640 
641 	return IRQ_HANDLED;
642 }
643 
644 int hda_dsp_stream_init(struct snd_sof_dev *sdev)
645 {
646 	struct hdac_bus *bus = sof_to_bus(sdev);
647 	struct hdac_ext_stream *stream;
648 	struct hdac_stream *hstream;
649 	struct pci_dev *pci = to_pci_dev(sdev->dev);
650 	struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
651 	int sd_offset;
652 	int i, num_playback, num_capture, num_total, ret;
653 	u32 gcap;
654 
655 	gcap = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_GCAP);
656 	dev_dbg(sdev->dev, "hda global caps = 0x%x\n", gcap);
657 
658 	/* get stream count from GCAP */
659 	num_capture = (gcap >> 8) & 0x0f;
660 	num_playback = (gcap >> 12) & 0x0f;
661 	num_total = num_playback + num_capture;
662 
663 	dev_dbg(sdev->dev, "detected %d playback and %d capture streams\n",
664 		num_playback, num_capture);
665 
666 	if (num_playback >= SOF_HDA_PLAYBACK_STREAMS) {
667 		dev_err(sdev->dev, "error: too many playback streams %d\n",
668 			num_playback);
669 		return -EINVAL;
670 	}
671 
672 	if (num_capture >= SOF_HDA_CAPTURE_STREAMS) {
673 		dev_err(sdev->dev, "error: too many capture streams %d\n",
674 			num_playback);
675 		return -EINVAL;
676 	}
677 
678 	/*
679 	 * mem alloc for the position buffer
680 	 * TODO: check position buffer update
681 	 */
682 	ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
683 				  SOF_HDA_DPIB_ENTRY_SIZE * num_total,
684 				  &bus->posbuf);
685 	if (ret < 0) {
686 		dev_err(sdev->dev, "error: posbuffer dma alloc failed\n");
687 		return -ENOMEM;
688 	}
689 
690 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
691 	/* mem alloc for the CORB/RIRB ringbuffers */
692 	ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
693 				  PAGE_SIZE, &bus->rb);
694 	if (ret < 0) {
695 		dev_err(sdev->dev, "error: RB alloc failed\n");
696 		return -ENOMEM;
697 	}
698 #endif
699 
700 	/* create capture streams */
701 	for (i = 0; i < num_capture; i++) {
702 		struct sof_intel_hda_stream *hda_stream;
703 
704 		hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
705 					  GFP_KERNEL);
706 		if (!hda_stream)
707 			return -ENOMEM;
708 
709 		hda_stream->sdev = sdev;
710 
711 		stream = &hda_stream->hda_stream;
712 
713 		stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
714 			SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
715 
716 		stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
717 			SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
718 			SOF_HDA_PPLC_INTERVAL * i;
719 
720 		/* do we support SPIB */
721 		if (sdev->bar[HDA_DSP_SPIB_BAR]) {
722 			stream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
723 				SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
724 				SOF_HDA_SPIB_SPIB;
725 
726 			stream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
727 				SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
728 				SOF_HDA_SPIB_MAXFIFO;
729 		}
730 
731 		hstream = &stream->hstream;
732 		hstream->bus = bus;
733 		hstream->sd_int_sta_mask = 1 << i;
734 		hstream->index = i;
735 		sd_offset = SOF_STREAM_SD_OFFSET(hstream);
736 		hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
737 		hstream->stream_tag = i + 1;
738 		hstream->opened = false;
739 		hstream->running = false;
740 		hstream->direction = SNDRV_PCM_STREAM_CAPTURE;
741 
742 		/* memory alloc for stream BDL */
743 		ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
744 					  HDA_DSP_BDL_SIZE, &hstream->bdl);
745 		if (ret < 0) {
746 			dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
747 			return -ENOMEM;
748 		}
749 		hstream->posbuf = (__le32 *)(bus->posbuf.area +
750 			(hstream->index) * 8);
751 
752 		list_add_tail(&hstream->list, &bus->stream_list);
753 	}
754 
755 	/* create playback streams */
756 	for (i = num_capture; i < num_total; i++) {
757 		struct sof_intel_hda_stream *hda_stream;
758 
759 		hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
760 					  GFP_KERNEL);
761 		if (!hda_stream)
762 			return -ENOMEM;
763 
764 		hda_stream->sdev = sdev;
765 
766 		stream = &hda_stream->hda_stream;
767 
768 		/* we always have DSP support */
769 		stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
770 			SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
771 
772 		stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
773 			SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
774 			SOF_HDA_PPLC_INTERVAL * i;
775 
776 		/* do we support SPIB */
777 		if (sdev->bar[HDA_DSP_SPIB_BAR]) {
778 			stream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
779 				SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
780 				SOF_HDA_SPIB_SPIB;
781 
782 			stream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
783 				SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
784 				SOF_HDA_SPIB_MAXFIFO;
785 		}
786 
787 		hstream = &stream->hstream;
788 		hstream->bus = bus;
789 		hstream->sd_int_sta_mask = 1 << i;
790 		hstream->index = i;
791 		sd_offset = SOF_STREAM_SD_OFFSET(hstream);
792 		hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
793 		hstream->stream_tag = i - num_capture + 1;
794 		hstream->opened = false;
795 		hstream->running = false;
796 		hstream->direction = SNDRV_PCM_STREAM_PLAYBACK;
797 
798 		/* mem alloc for stream BDL */
799 		ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
800 					  HDA_DSP_BDL_SIZE, &hstream->bdl);
801 		if (ret < 0) {
802 			dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
803 			return -ENOMEM;
804 		}
805 
806 		hstream->posbuf = (__le32 *)(bus->posbuf.area +
807 			(hstream->index) * 8);
808 
809 		list_add_tail(&hstream->list, &bus->stream_list);
810 	}
811 
812 	/* store total stream count (playback + capture) from GCAP */
813 	sof_hda->stream_max = num_total;
814 
815 	return 0;
816 }
817 
818 void hda_dsp_stream_free(struct snd_sof_dev *sdev)
819 {
820 	struct hdac_bus *bus = sof_to_bus(sdev);
821 	struct hdac_stream *s, *_s;
822 	struct hdac_ext_stream *stream;
823 	struct sof_intel_hda_stream *hda_stream;
824 
825 	/* free position buffer */
826 	if (bus->posbuf.area)
827 		snd_dma_free_pages(&bus->posbuf);
828 
829 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
830 	/* free position buffer */
831 	if (bus->rb.area)
832 		snd_dma_free_pages(&bus->rb);
833 #endif
834 
835 	list_for_each_entry_safe(s, _s, &bus->stream_list, list) {
836 		/* TODO: decouple */
837 
838 		/* free bdl buffer */
839 		if (s->bdl.area)
840 			snd_dma_free_pages(&s->bdl);
841 		list_del(&s->list);
842 		stream = stream_to_hdac_ext_stream(s);
843 		hda_stream = container_of(stream, struct sof_intel_hda_stream,
844 					  hda_stream);
845 		devm_kfree(sdev->dev, hda_stream);
846 	}
847 }
848