xref: /openbmc/linux/sound/soc/sof/intel/hda-stream.c (revision 8795a739)
1 // SPDX-License-Identifier: (GPL-2.0 OR BSD-3-Clause)
2 //
3 // This file is provided under a dual BSD/GPLv2 license.  When using or
4 // redistributing this file, you may do so under either license.
5 //
6 // Copyright(c) 2018 Intel Corporation. All rights reserved.
7 //
8 // Authors: Liam Girdwood <liam.r.girdwood@linux.intel.com>
9 //	    Ranjani Sridharan <ranjani.sridharan@linux.intel.com>
10 //	    Rander Wang <rander.wang@intel.com>
11 //          Keyon Jie <yang.jie@linux.intel.com>
12 //
13 
14 /*
15  * Hardware interface for generic Intel audio DSP HDA IP
16  */
17 
18 #include <linux/pm_runtime.h>
19 #include <sound/hdaudio_ext.h>
20 #include <sound/hda_register.h>
21 #include <sound/sof.h>
22 #include "../ops.h"
23 #include "hda.h"
24 
25 /*
26  * set up one of BDL entries for a stream
27  */
28 static int hda_setup_bdle(struct snd_sof_dev *sdev,
29 			  struct snd_dma_buffer *dmab,
30 			  struct hdac_stream *stream,
31 			  struct sof_intel_dsp_bdl **bdlp,
32 			  int offset, int size, int ioc)
33 {
34 	struct hdac_bus *bus = sof_to_bus(sdev);
35 	struct sof_intel_dsp_bdl *bdl = *bdlp;
36 
37 	while (size > 0) {
38 		dma_addr_t addr;
39 		int chunk;
40 
41 		if (stream->frags >= HDA_DSP_MAX_BDL_ENTRIES) {
42 			dev_err(sdev->dev, "error: stream frags exceeded\n");
43 			return -EINVAL;
44 		}
45 
46 		addr = snd_sgbuf_get_addr(dmab, offset);
47 		/* program BDL addr */
48 		bdl->addr_l = cpu_to_le32(lower_32_bits(addr));
49 		bdl->addr_h = cpu_to_le32(upper_32_bits(addr));
50 		/* program BDL size */
51 		chunk = snd_sgbuf_get_chunk_size(dmab, offset, size);
52 		/* one BDLE should not cross 4K boundary */
53 		if (bus->align_bdle_4k) {
54 			u32 remain = 0x1000 - (offset & 0xfff);
55 
56 			if (chunk > remain)
57 				chunk = remain;
58 		}
59 		bdl->size = cpu_to_le32(chunk);
60 		/* only program IOC when the whole segment is processed */
61 		size -= chunk;
62 		bdl->ioc = (size || !ioc) ? 0 : cpu_to_le32(0x01);
63 		bdl++;
64 		stream->frags++;
65 		offset += chunk;
66 
67 		dev_vdbg(sdev->dev, "bdl, frags:%d, chunk size:0x%x;\n",
68 			 stream->frags, chunk);
69 	}
70 
71 	*bdlp = bdl;
72 	return offset;
73 }
74 
75 /*
76  * set up Buffer Descriptor List (BDL) for host memory transfer
77  * BDL describes the location of the individual buffers and is little endian.
78  */
79 int hda_dsp_stream_setup_bdl(struct snd_sof_dev *sdev,
80 			     struct snd_dma_buffer *dmab,
81 			     struct hdac_stream *stream)
82 {
83 	struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
84 	struct sof_intel_dsp_bdl *bdl;
85 	int i, offset, period_bytes, periods;
86 	int remain, ioc;
87 
88 	period_bytes = stream->period_bytes;
89 	dev_dbg(sdev->dev, "period_bytes:0x%x\n", period_bytes);
90 	if (!period_bytes)
91 		period_bytes = stream->bufsize;
92 
93 	periods = stream->bufsize / period_bytes;
94 
95 	dev_dbg(sdev->dev, "periods:%d\n", periods);
96 
97 	remain = stream->bufsize % period_bytes;
98 	if (remain)
99 		periods++;
100 
101 	/* program the initial BDL entries */
102 	bdl = (struct sof_intel_dsp_bdl *)stream->bdl.area;
103 	offset = 0;
104 	stream->frags = 0;
105 
106 	/*
107 	 * set IOC if don't use position IPC
108 	 * and period_wakeup needed.
109 	 */
110 	ioc = hda->no_ipc_position ?
111 	      !stream->no_period_wakeup : 0;
112 
113 	for (i = 0; i < periods; i++) {
114 		if (i == (periods - 1) && remain)
115 			/* set the last small entry */
116 			offset = hda_setup_bdle(sdev, dmab,
117 						stream, &bdl, offset,
118 						remain, 0);
119 		else
120 			offset = hda_setup_bdle(sdev, dmab,
121 						stream, &bdl, offset,
122 						period_bytes, ioc);
123 	}
124 
125 	return offset;
126 }
127 
128 int hda_dsp_stream_spib_config(struct snd_sof_dev *sdev,
129 			       struct hdac_ext_stream *stream,
130 			       int enable, u32 size)
131 {
132 	struct hdac_stream *hstream = &stream->hstream;
133 	u32 mask;
134 
135 	if (!sdev->bar[HDA_DSP_SPIB_BAR]) {
136 		dev_err(sdev->dev, "error: address of spib capability is NULL\n");
137 		return -EINVAL;
138 	}
139 
140 	mask = (1 << hstream->index);
141 
142 	/* enable/disable SPIB for the stream */
143 	snd_sof_dsp_update_bits(sdev, HDA_DSP_SPIB_BAR,
144 				SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL, mask,
145 				enable << hstream->index);
146 
147 	/* set the SPIB value */
148 	sof_io_write(sdev, stream->spib_addr, size);
149 
150 	return 0;
151 }
152 
153 /* get next unused stream */
154 struct hdac_ext_stream *
155 hda_dsp_stream_get(struct snd_sof_dev *sdev, int direction)
156 {
157 	struct hdac_bus *bus = sof_to_bus(sdev);
158 	struct sof_intel_hda_stream *hda_stream;
159 	struct hdac_ext_stream *stream = NULL;
160 	struct hdac_stream *s;
161 
162 	spin_lock_irq(&bus->reg_lock);
163 
164 	/* get an unused stream */
165 	list_for_each_entry(s, &bus->stream_list, list) {
166 		if (s->direction == direction && !s->opened) {
167 			stream = stream_to_hdac_ext_stream(s);
168 			hda_stream = container_of(stream,
169 						  struct sof_intel_hda_stream,
170 						  hda_stream);
171 			/* check if the host DMA channel is reserved */
172 			if (hda_stream->host_reserved)
173 				continue;
174 
175 			s->opened = true;
176 			break;
177 		}
178 	}
179 
180 	spin_unlock_irq(&bus->reg_lock);
181 
182 	/* stream found ? */
183 	if (!stream)
184 		dev_err(sdev->dev, "error: no free %s streams\n",
185 			direction == SNDRV_PCM_STREAM_PLAYBACK ?
186 			"playback" : "capture");
187 
188 	return stream;
189 }
190 
191 /* free a stream */
192 int hda_dsp_stream_put(struct snd_sof_dev *sdev, int direction, int stream_tag)
193 {
194 	struct hdac_bus *bus = sof_to_bus(sdev);
195 	struct hdac_stream *s;
196 
197 	spin_lock_irq(&bus->reg_lock);
198 
199 	/* find used stream */
200 	list_for_each_entry(s, &bus->stream_list, list) {
201 		if (s->direction == direction &&
202 		    s->opened && s->stream_tag == stream_tag) {
203 			s->opened = false;
204 			spin_unlock_irq(&bus->reg_lock);
205 			return 0;
206 		}
207 	}
208 
209 	spin_unlock_irq(&bus->reg_lock);
210 
211 	dev_dbg(sdev->dev, "stream_tag %d not opened!\n", stream_tag);
212 	return -ENODEV;
213 }
214 
215 int hda_dsp_stream_trigger(struct snd_sof_dev *sdev,
216 			   struct hdac_ext_stream *stream, int cmd)
217 {
218 	struct hdac_stream *hstream = &stream->hstream;
219 	int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
220 	u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
221 	int ret;
222 	u32 run;
223 
224 	/* cmd must be for audio stream */
225 	switch (cmd) {
226 	case SNDRV_PCM_TRIGGER_RESUME:
227 	case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
228 	case SNDRV_PCM_TRIGGER_START:
229 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL,
230 					1 << hstream->index,
231 					1 << hstream->index);
232 
233 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
234 					sd_offset,
235 					SOF_HDA_SD_CTL_DMA_START |
236 					SOF_HDA_CL_DMA_SD_INT_MASK,
237 					SOF_HDA_SD_CTL_DMA_START |
238 					SOF_HDA_CL_DMA_SD_INT_MASK);
239 
240 		ret = snd_sof_dsp_read_poll_timeout(sdev,
241 					HDA_DSP_HDA_BAR,
242 					sd_offset, run,
243 					((run &	dma_start) == dma_start),
244 					HDA_DSP_REG_POLL_INTERVAL_US,
245 					HDA_DSP_STREAM_RUN_TIMEOUT);
246 
247 		if (ret)
248 			return ret;
249 
250 		hstream->running = true;
251 		break;
252 	case SNDRV_PCM_TRIGGER_SUSPEND:
253 	case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
254 	case SNDRV_PCM_TRIGGER_STOP:
255 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
256 					sd_offset,
257 					SOF_HDA_SD_CTL_DMA_START |
258 					SOF_HDA_CL_DMA_SD_INT_MASK, 0x0);
259 
260 		ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
261 						sd_offset, run,
262 						!(run &	dma_start),
263 						HDA_DSP_REG_POLL_INTERVAL_US,
264 						HDA_DSP_STREAM_RUN_TIMEOUT);
265 
266 		if (ret)
267 			return ret;
268 
269 		snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, sd_offset +
270 				  SOF_HDA_ADSP_REG_CL_SD_STS,
271 				  SOF_HDA_CL_DMA_SD_INT_MASK);
272 
273 		hstream->running = false;
274 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL,
275 					1 << hstream->index, 0x0);
276 		break;
277 	default:
278 		dev_err(sdev->dev, "error: unknown command: %d\n", cmd);
279 		return -EINVAL;
280 	}
281 
282 	return 0;
283 }
284 
285 /*
286  * prepare for common hdac registers settings, for both code loader
287  * and normal stream.
288  */
289 int hda_dsp_stream_hw_params(struct snd_sof_dev *sdev,
290 			     struct hdac_ext_stream *stream,
291 			     struct snd_dma_buffer *dmab,
292 			     struct snd_pcm_hw_params *params)
293 {
294 	struct hdac_bus *bus = sof_to_bus(sdev);
295 	struct hdac_stream *hstream = &stream->hstream;
296 	int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
297 	int ret, timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
298 	u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
299 	u32 val, mask;
300 	u32 run;
301 
302 	if (!stream) {
303 		dev_err(sdev->dev, "error: no stream available\n");
304 		return -ENODEV;
305 	}
306 
307 	/* decouple host and link DMA */
308 	mask = 0x1 << hstream->index;
309 	snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
310 				mask, mask);
311 
312 	if (!dmab) {
313 		dev_err(sdev->dev, "error: no dma buffer allocated!\n");
314 		return -ENODEV;
315 	}
316 
317 	/* clear stream status */
318 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
319 				SOF_HDA_CL_DMA_SD_INT_MASK |
320 				SOF_HDA_SD_CTL_DMA_START, 0);
321 
322 	ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
323 					    sd_offset, run,
324 					    !(run & dma_start),
325 					    HDA_DSP_REG_POLL_INTERVAL_US,
326 					    HDA_DSP_STREAM_RUN_TIMEOUT);
327 
328 	if (ret)
329 		return ret;
330 
331 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
332 				sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS,
333 				SOF_HDA_CL_DMA_SD_INT_MASK,
334 				SOF_HDA_CL_DMA_SD_INT_MASK);
335 
336 	/* stream reset */
337 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 0x1,
338 				0x1);
339 	udelay(3);
340 	do {
341 		val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
342 				       sd_offset);
343 		if (val & 0x1)
344 			break;
345 	} while (--timeout);
346 	if (timeout == 0) {
347 		dev_err(sdev->dev, "error: stream reset failed\n");
348 		return -ETIMEDOUT;
349 	}
350 
351 	timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
352 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 0x1,
353 				0x0);
354 
355 	/* wait for hardware to report that stream is out of reset */
356 	udelay(3);
357 	do {
358 		val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
359 				       sd_offset);
360 		if ((val & 0x1) == 0)
361 			break;
362 	} while (--timeout);
363 	if (timeout == 0) {
364 		dev_err(sdev->dev, "error: timeout waiting for stream reset\n");
365 		return -ETIMEDOUT;
366 	}
367 
368 	if (hstream->posbuf)
369 		*hstream->posbuf = 0;
370 
371 	/* reset BDL address */
372 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
373 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
374 			  0x0);
375 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
376 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
377 			  0x0);
378 
379 	/* clear stream status */
380 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
381 				SOF_HDA_CL_DMA_SD_INT_MASK |
382 				SOF_HDA_SD_CTL_DMA_START, 0);
383 
384 	ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
385 					    sd_offset, run,
386 					    !(run & dma_start),
387 					    HDA_DSP_REG_POLL_INTERVAL_US,
388 					    HDA_DSP_STREAM_RUN_TIMEOUT);
389 
390 	if (ret)
391 		return ret;
392 
393 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
394 				sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS,
395 				SOF_HDA_CL_DMA_SD_INT_MASK,
396 				SOF_HDA_CL_DMA_SD_INT_MASK);
397 
398 	hstream->frags = 0;
399 
400 	ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
401 	if (ret < 0) {
402 		dev_err(sdev->dev, "error: set up of BDL failed\n");
403 		return ret;
404 	}
405 
406 	/* program stream tag to set up stream descriptor for DMA */
407 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
408 				SOF_HDA_CL_SD_CTL_STREAM_TAG_MASK,
409 				hstream->stream_tag <<
410 				SOF_HDA_CL_SD_CTL_STREAM_TAG_SHIFT);
411 
412 	/* program cyclic buffer length */
413 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
414 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_CBL,
415 			  hstream->bufsize);
416 
417 	/*
418 	 * Recommended hardware programming sequence for HDAudio DMA format
419 	 *
420 	 * 1. Put DMA into coupled mode by clearing PPCTL.PROCEN bit
421 	 *    for corresponding stream index before the time of writing
422 	 *    format to SDxFMT register.
423 	 * 2. Write SDxFMT
424 	 * 3. Set PPCTL.PROCEN bit for corresponding stream index to
425 	 *    enable decoupled mode
426 	 */
427 
428 	/* couple host and link DMA, disable DSP features */
429 	snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
430 				mask, 0);
431 
432 	/* program stream format */
433 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
434 				sd_offset +
435 				SOF_HDA_ADSP_REG_CL_SD_FORMAT,
436 				0xffff, hstream->format_val);
437 
438 	/* decouple host and link DMA, enable DSP features */
439 	snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
440 				mask, mask);
441 
442 	/* program last valid index */
443 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
444 				sd_offset + SOF_HDA_ADSP_REG_CL_SD_LVI,
445 				0xffff, (hstream->frags - 1));
446 
447 	/* program BDL address */
448 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
449 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
450 			  (u32)hstream->bdl.addr);
451 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
452 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
453 			  upper_32_bits(hstream->bdl.addr));
454 
455 	/* enable position buffer */
456 	if (!(snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE)
457 				& SOF_HDA_ADSP_DPLBASE_ENABLE)) {
458 		snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPUBASE,
459 				  upper_32_bits(bus->posbuf.addr));
460 		snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE,
461 				  (u32)bus->posbuf.addr |
462 				  SOF_HDA_ADSP_DPLBASE_ENABLE);
463 	}
464 
465 	/* set interrupt enable bits */
466 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
467 				SOF_HDA_CL_DMA_SD_INT_MASK,
468 				SOF_HDA_CL_DMA_SD_INT_MASK);
469 
470 	/* read FIFO size */
471 	if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) {
472 		hstream->fifo_size =
473 			snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
474 					 sd_offset +
475 					 SOF_HDA_ADSP_REG_CL_SD_FIFOSIZE);
476 		hstream->fifo_size &= 0xffff;
477 		hstream->fifo_size += 1;
478 	} else {
479 		hstream->fifo_size = 0;
480 	}
481 
482 	return ret;
483 }
484 
485 int hda_dsp_stream_hw_free(struct snd_sof_dev *sdev,
486 			   struct snd_pcm_substream *substream)
487 {
488 	struct hdac_stream *stream = substream->runtime->private_data;
489 	struct hdac_ext_stream *link_dev = container_of(stream,
490 							struct hdac_ext_stream,
491 							hstream);
492 	struct hdac_bus *bus = sof_to_bus(sdev);
493 	u32 mask = 0x1 << stream->index;
494 
495 	spin_lock_irq(&bus->reg_lock);
496 	/* couple host and link DMA if link DMA channel is idle */
497 	if (!link_dev->link_locked)
498 		snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR,
499 					SOF_HDA_REG_PP_PPCTL, mask, 0);
500 	spin_unlock_irq(&bus->reg_lock);
501 
502 	return 0;
503 }
504 
505 irqreturn_t hda_dsp_stream_interrupt(int irq, void *context)
506 {
507 	struct hdac_bus *bus = context;
508 	int ret = IRQ_WAKE_THREAD;
509 	u32 status;
510 
511 	spin_lock(&bus->reg_lock);
512 
513 	status = snd_hdac_chip_readl(bus, INTSTS);
514 	dev_vdbg(bus->dev, "stream irq, INTSTS status: 0x%x\n", status);
515 
516 	/* Register inaccessible, ignore it.*/
517 	if (status == 0xffffffff)
518 		ret = IRQ_NONE;
519 
520 	spin_unlock(&bus->reg_lock);
521 
522 	return ret;
523 }
524 
525 static bool hda_dsp_stream_check(struct hdac_bus *bus, u32 status)
526 {
527 	struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
528 	struct hdac_stream *s;
529 	bool active = false;
530 	u32 sd_status;
531 
532 	list_for_each_entry(s, &bus->stream_list, list) {
533 		if (status & BIT(s->index) && s->opened) {
534 			sd_status = snd_hdac_stream_readb(s, SD_STS);
535 
536 			dev_vdbg(bus->dev, "stream %d status 0x%x\n",
537 				 s->index, sd_status);
538 
539 			snd_hdac_stream_writeb(s, SD_STS, sd_status);
540 
541 			active = true;
542 			if (!s->substream ||
543 			    !s->running ||
544 			    (sd_status & SOF_HDA_CL_DMA_SD_INT_COMPLETE) == 0)
545 				continue;
546 
547 			/* Inform ALSA only in case not do that with IPC */
548 			if (sof_hda->no_ipc_position)
549 				snd_sof_pcm_period_elapsed(s->substream);
550 		}
551 	}
552 
553 	return active;
554 }
555 
556 irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context)
557 {
558 	struct hdac_bus *bus = context;
559 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
560 	u32 rirb_status;
561 #endif
562 	bool active;
563 	u32 status;
564 	int i;
565 
566 	/*
567 	 * Loop 10 times to handle missed interrupts caused by
568 	 * unsolicited responses from the codec
569 	 */
570 	for (i = 0, active = true; i < 10 && active; i++) {
571 		spin_lock_irq(&bus->reg_lock);
572 
573 		status = snd_hdac_chip_readl(bus, INTSTS);
574 
575 		/* check streams */
576 		active = hda_dsp_stream_check(bus, status);
577 
578 		/* check and clear RIRB interrupt */
579 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
580 		if (status & AZX_INT_CTRL_EN) {
581 			rirb_status = snd_hdac_chip_readb(bus, RIRBSTS);
582 			if (rirb_status & RIRB_INT_MASK) {
583 				active = true;
584 				if (rirb_status & RIRB_INT_RESPONSE)
585 					snd_hdac_bus_update_rirb(bus);
586 				snd_hdac_chip_writeb(bus, RIRBSTS,
587 						     RIRB_INT_MASK);
588 			}
589 		}
590 #endif
591 		spin_unlock_irq(&bus->reg_lock);
592 	}
593 
594 	return IRQ_HANDLED;
595 }
596 
597 int hda_dsp_stream_init(struct snd_sof_dev *sdev)
598 {
599 	struct hdac_bus *bus = sof_to_bus(sdev);
600 	struct hdac_ext_stream *stream;
601 	struct hdac_stream *hstream;
602 	struct pci_dev *pci = to_pci_dev(sdev->dev);
603 	struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
604 	int sd_offset;
605 	int i, num_playback, num_capture, num_total, ret;
606 	u32 gcap;
607 
608 	gcap = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_GCAP);
609 	dev_dbg(sdev->dev, "hda global caps = 0x%x\n", gcap);
610 
611 	/* get stream count from GCAP */
612 	num_capture = (gcap >> 8) & 0x0f;
613 	num_playback = (gcap >> 12) & 0x0f;
614 	num_total = num_playback + num_capture;
615 
616 	dev_dbg(sdev->dev, "detected %d playback and %d capture streams\n",
617 		num_playback, num_capture);
618 
619 	if (num_playback >= SOF_HDA_PLAYBACK_STREAMS) {
620 		dev_err(sdev->dev, "error: too many playback streams %d\n",
621 			num_playback);
622 		return -EINVAL;
623 	}
624 
625 	if (num_capture >= SOF_HDA_CAPTURE_STREAMS) {
626 		dev_err(sdev->dev, "error: too many capture streams %d\n",
627 			num_playback);
628 		return -EINVAL;
629 	}
630 
631 	/*
632 	 * mem alloc for the position buffer
633 	 * TODO: check position buffer update
634 	 */
635 	ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
636 				  SOF_HDA_DPIB_ENTRY_SIZE * num_total,
637 				  &bus->posbuf);
638 	if (ret < 0) {
639 		dev_err(sdev->dev, "error: posbuffer dma alloc failed\n");
640 		return -ENOMEM;
641 	}
642 
643 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
644 	/* mem alloc for the CORB/RIRB ringbuffers */
645 	ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
646 				  PAGE_SIZE, &bus->rb);
647 	if (ret < 0) {
648 		dev_err(sdev->dev, "error: RB alloc failed\n");
649 		return -ENOMEM;
650 	}
651 #endif
652 
653 	/* create capture streams */
654 	for (i = 0; i < num_capture; i++) {
655 		struct sof_intel_hda_stream *hda_stream;
656 
657 		hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
658 					  GFP_KERNEL);
659 		if (!hda_stream)
660 			return -ENOMEM;
661 
662 		hda_stream->sdev = sdev;
663 
664 		stream = &hda_stream->hda_stream;
665 
666 		stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
667 			SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
668 
669 		stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
670 			SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
671 			SOF_HDA_PPLC_INTERVAL * i;
672 
673 		/* do we support SPIB */
674 		if (sdev->bar[HDA_DSP_SPIB_BAR]) {
675 			stream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
676 				SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
677 				SOF_HDA_SPIB_SPIB;
678 
679 			stream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
680 				SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
681 				SOF_HDA_SPIB_MAXFIFO;
682 		}
683 
684 		hstream = &stream->hstream;
685 		hstream->bus = bus;
686 		hstream->sd_int_sta_mask = 1 << i;
687 		hstream->index = i;
688 		sd_offset = SOF_STREAM_SD_OFFSET(hstream);
689 		hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
690 		hstream->stream_tag = i + 1;
691 		hstream->opened = false;
692 		hstream->running = false;
693 		hstream->direction = SNDRV_PCM_STREAM_CAPTURE;
694 
695 		/* memory alloc for stream BDL */
696 		ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
697 					  HDA_DSP_BDL_SIZE, &hstream->bdl);
698 		if (ret < 0) {
699 			dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
700 			return -ENOMEM;
701 		}
702 		hstream->posbuf = (__le32 *)(bus->posbuf.area +
703 			(hstream->index) * 8);
704 
705 		list_add_tail(&hstream->list, &bus->stream_list);
706 	}
707 
708 	/* create playback streams */
709 	for (i = num_capture; i < num_total; i++) {
710 		struct sof_intel_hda_stream *hda_stream;
711 
712 		hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
713 					  GFP_KERNEL);
714 		if (!hda_stream)
715 			return -ENOMEM;
716 
717 		hda_stream->sdev = sdev;
718 
719 		stream = &hda_stream->hda_stream;
720 
721 		/* we always have DSP support */
722 		stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
723 			SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
724 
725 		stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
726 			SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
727 			SOF_HDA_PPLC_INTERVAL * i;
728 
729 		/* do we support SPIB */
730 		if (sdev->bar[HDA_DSP_SPIB_BAR]) {
731 			stream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
732 				SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
733 				SOF_HDA_SPIB_SPIB;
734 
735 			stream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
736 				SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
737 				SOF_HDA_SPIB_MAXFIFO;
738 		}
739 
740 		hstream = &stream->hstream;
741 		hstream->bus = bus;
742 		hstream->sd_int_sta_mask = 1 << i;
743 		hstream->index = i;
744 		sd_offset = SOF_STREAM_SD_OFFSET(hstream);
745 		hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
746 		hstream->stream_tag = i - num_capture + 1;
747 		hstream->opened = false;
748 		hstream->running = false;
749 		hstream->direction = SNDRV_PCM_STREAM_PLAYBACK;
750 
751 		/* mem alloc for stream BDL */
752 		ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
753 					  HDA_DSP_BDL_SIZE, &hstream->bdl);
754 		if (ret < 0) {
755 			dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
756 			return -ENOMEM;
757 		}
758 
759 		hstream->posbuf = (__le32 *)(bus->posbuf.area +
760 			(hstream->index) * 8);
761 
762 		list_add_tail(&hstream->list, &bus->stream_list);
763 	}
764 
765 	/* store total stream count (playback + capture) from GCAP */
766 	sof_hda->stream_max = num_total;
767 
768 	return 0;
769 }
770 
771 void hda_dsp_stream_free(struct snd_sof_dev *sdev)
772 {
773 	struct hdac_bus *bus = sof_to_bus(sdev);
774 	struct hdac_stream *s, *_s;
775 	struct hdac_ext_stream *stream;
776 	struct sof_intel_hda_stream *hda_stream;
777 
778 	/* free position buffer */
779 	if (bus->posbuf.area)
780 		snd_dma_free_pages(&bus->posbuf);
781 
782 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
783 	/* free position buffer */
784 	if (bus->rb.area)
785 		snd_dma_free_pages(&bus->rb);
786 #endif
787 
788 	list_for_each_entry_safe(s, _s, &bus->stream_list, list) {
789 		/* TODO: decouple */
790 
791 		/* free bdl buffer */
792 		if (s->bdl.area)
793 			snd_dma_free_pages(&s->bdl);
794 		list_del(&s->list);
795 		stream = stream_to_hdac_ext_stream(s);
796 		hda_stream = container_of(stream, struct sof_intel_hda_stream,
797 					  hda_stream);
798 		devm_kfree(sdev->dev, hda_stream);
799 	}
800 }
801