xref: /openbmc/linux/sound/soc/sof/intel/hda-stream.c (revision 14474950)
1 // SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause)
2 //
3 // This file is provided under a dual BSD/GPLv2 license.  When using or
4 // redistributing this file, you may do so under either license.
5 //
6 // Copyright(c) 2018 Intel Corporation. All rights reserved.
7 //
8 // Authors: Liam Girdwood <liam.r.girdwood@linux.intel.com>
9 //	    Ranjani Sridharan <ranjani.sridharan@linux.intel.com>
10 //	    Rander Wang <rander.wang@intel.com>
11 //          Keyon Jie <yang.jie@linux.intel.com>
12 //
13 
14 /*
15  * Hardware interface for generic Intel audio DSP HDA IP
16  */
17 
18 #include <linux/pm_runtime.h>
19 #include <sound/hdaudio_ext.h>
20 #include <sound/hda_register.h>
21 #include <sound/sof.h>
22 #include "../ops.h"
23 #include "../sof-audio.h"
24 #include "hda.h"
25 
26 /*
27  * set up one of BDL entries for a stream
28  */
29 static int hda_setup_bdle(struct snd_sof_dev *sdev,
30 			  struct snd_dma_buffer *dmab,
31 			  struct hdac_stream *stream,
32 			  struct sof_intel_dsp_bdl **bdlp,
33 			  int offset, int size, int ioc)
34 {
35 	struct hdac_bus *bus = sof_to_bus(sdev);
36 	struct sof_intel_dsp_bdl *bdl = *bdlp;
37 
38 	while (size > 0) {
39 		dma_addr_t addr;
40 		int chunk;
41 
42 		if (stream->frags >= HDA_DSP_MAX_BDL_ENTRIES) {
43 			dev_err(sdev->dev, "error: stream frags exceeded\n");
44 			return -EINVAL;
45 		}
46 
47 		addr = snd_sgbuf_get_addr(dmab, offset);
48 		/* program BDL addr */
49 		bdl->addr_l = cpu_to_le32(lower_32_bits(addr));
50 		bdl->addr_h = cpu_to_le32(upper_32_bits(addr));
51 		/* program BDL size */
52 		chunk = snd_sgbuf_get_chunk_size(dmab, offset, size);
53 		/* one BDLE should not cross 4K boundary */
54 		if (bus->align_bdle_4k) {
55 			u32 remain = 0x1000 - (offset & 0xfff);
56 
57 			if (chunk > remain)
58 				chunk = remain;
59 		}
60 		bdl->size = cpu_to_le32(chunk);
61 		/* only program IOC when the whole segment is processed */
62 		size -= chunk;
63 		bdl->ioc = (size || !ioc) ? 0 : cpu_to_le32(0x01);
64 		bdl++;
65 		stream->frags++;
66 		offset += chunk;
67 
68 		dev_vdbg(sdev->dev, "bdl, frags:%d, chunk size:0x%x;\n",
69 			 stream->frags, chunk);
70 	}
71 
72 	*bdlp = bdl;
73 	return offset;
74 }
75 
76 /*
77  * set up Buffer Descriptor List (BDL) for host memory transfer
78  * BDL describes the location of the individual buffers and is little endian.
79  */
80 int hda_dsp_stream_setup_bdl(struct snd_sof_dev *sdev,
81 			     struct snd_dma_buffer *dmab,
82 			     struct hdac_stream *stream)
83 {
84 	struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
85 	struct sof_intel_dsp_bdl *bdl;
86 	int i, offset, period_bytes, periods;
87 	int remain, ioc;
88 
89 	period_bytes = stream->period_bytes;
90 	dev_dbg(sdev->dev, "period_bytes:0x%x\n", period_bytes);
91 	if (!period_bytes)
92 		period_bytes = stream->bufsize;
93 
94 	periods = stream->bufsize / period_bytes;
95 
96 	dev_dbg(sdev->dev, "periods:%d\n", periods);
97 
98 	remain = stream->bufsize % period_bytes;
99 	if (remain)
100 		periods++;
101 
102 	/* program the initial BDL entries */
103 	bdl = (struct sof_intel_dsp_bdl *)stream->bdl.area;
104 	offset = 0;
105 	stream->frags = 0;
106 
107 	/*
108 	 * set IOC if don't use position IPC
109 	 * and period_wakeup needed.
110 	 */
111 	ioc = hda->no_ipc_position ?
112 	      !stream->no_period_wakeup : 0;
113 
114 	for (i = 0; i < periods; i++) {
115 		if (i == (periods - 1) && remain)
116 			/* set the last small entry */
117 			offset = hda_setup_bdle(sdev, dmab,
118 						stream, &bdl, offset,
119 						remain, 0);
120 		else
121 			offset = hda_setup_bdle(sdev, dmab,
122 						stream, &bdl, offset,
123 						period_bytes, ioc);
124 	}
125 
126 	return offset;
127 }
128 
129 int hda_dsp_stream_spib_config(struct snd_sof_dev *sdev,
130 			       struct hdac_ext_stream *stream,
131 			       int enable, u32 size)
132 {
133 	struct hdac_stream *hstream = &stream->hstream;
134 	u32 mask;
135 
136 	if (!sdev->bar[HDA_DSP_SPIB_BAR]) {
137 		dev_err(sdev->dev, "error: address of spib capability is NULL\n");
138 		return -EINVAL;
139 	}
140 
141 	mask = (1 << hstream->index);
142 
143 	/* enable/disable SPIB for the stream */
144 	snd_sof_dsp_update_bits(sdev, HDA_DSP_SPIB_BAR,
145 				SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL, mask,
146 				enable << hstream->index);
147 
148 	/* set the SPIB value */
149 	sof_io_write(sdev, stream->spib_addr, size);
150 
151 	return 0;
152 }
153 
154 /* get next unused stream */
155 struct hdac_ext_stream *
156 hda_dsp_stream_get(struct snd_sof_dev *sdev, int direction)
157 {
158 	struct hdac_bus *bus = sof_to_bus(sdev);
159 	struct sof_intel_hda_stream *hda_stream;
160 	struct hdac_ext_stream *stream = NULL;
161 	struct hdac_stream *s;
162 
163 	spin_lock_irq(&bus->reg_lock);
164 
165 	/* get an unused stream */
166 	list_for_each_entry(s, &bus->stream_list, list) {
167 		if (s->direction == direction && !s->opened) {
168 			stream = stream_to_hdac_ext_stream(s);
169 			hda_stream = container_of(stream,
170 						  struct sof_intel_hda_stream,
171 						  hda_stream);
172 			/* check if the host DMA channel is reserved */
173 			if (hda_stream->host_reserved)
174 				continue;
175 
176 			s->opened = true;
177 			break;
178 		}
179 	}
180 
181 	spin_unlock_irq(&bus->reg_lock);
182 
183 	/* stream found ? */
184 	if (!stream)
185 		dev_err(sdev->dev, "error: no free %s streams\n",
186 			direction == SNDRV_PCM_STREAM_PLAYBACK ?
187 			"playback" : "capture");
188 
189 	/*
190 	 * Disable DMI Link L1 entry when capture stream is opened.
191 	 * Workaround to address a known issue with host DMA that results
192 	 * in xruns during pause/release in capture scenarios.
193 	 */
194 	if (!IS_ENABLED(CONFIG_SND_SOC_SOF_HDA_ALWAYS_ENABLE_DMI_L1))
195 		if (stream && direction == SNDRV_PCM_STREAM_CAPTURE)
196 			snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
197 						HDA_VS_INTEL_EM2,
198 						HDA_VS_INTEL_EM2_L1SEN, 0);
199 
200 	return stream;
201 }
202 
203 /* free a stream */
204 int hda_dsp_stream_put(struct snd_sof_dev *sdev, int direction, int stream_tag)
205 {
206 	struct hdac_bus *bus = sof_to_bus(sdev);
207 	struct hdac_stream *s;
208 	bool active_capture_stream = false;
209 	bool found = false;
210 
211 	spin_lock_irq(&bus->reg_lock);
212 
213 	/*
214 	 * close stream matching the stream tag
215 	 * and check if there are any open capture streams.
216 	 */
217 	list_for_each_entry(s, &bus->stream_list, list) {
218 		if (!s->opened)
219 			continue;
220 
221 		if (s->direction == direction && s->stream_tag == stream_tag) {
222 			s->opened = false;
223 			found = true;
224 		} else if (s->direction == SNDRV_PCM_STREAM_CAPTURE) {
225 			active_capture_stream = true;
226 		}
227 	}
228 
229 	spin_unlock_irq(&bus->reg_lock);
230 
231 	/* Enable DMI L1 entry if there are no capture streams open */
232 	if (!IS_ENABLED(CONFIG_SND_SOC_SOF_HDA_ALWAYS_ENABLE_DMI_L1))
233 		if (!active_capture_stream)
234 			snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
235 						HDA_VS_INTEL_EM2,
236 						HDA_VS_INTEL_EM2_L1SEN,
237 						HDA_VS_INTEL_EM2_L1SEN);
238 
239 	if (!found) {
240 		dev_dbg(sdev->dev, "stream_tag %d not opened!\n", stream_tag);
241 		return -ENODEV;
242 	}
243 
244 	return 0;
245 }
246 
247 int hda_dsp_stream_trigger(struct snd_sof_dev *sdev,
248 			   struct hdac_ext_stream *stream, int cmd)
249 {
250 	struct hdac_stream *hstream = &stream->hstream;
251 	int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
252 	u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
253 	int ret;
254 	u32 run;
255 
256 	/* cmd must be for audio stream */
257 	switch (cmd) {
258 	case SNDRV_PCM_TRIGGER_RESUME:
259 	case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
260 	case SNDRV_PCM_TRIGGER_START:
261 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL,
262 					1 << hstream->index,
263 					1 << hstream->index);
264 
265 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
266 					sd_offset,
267 					SOF_HDA_SD_CTL_DMA_START |
268 					SOF_HDA_CL_DMA_SD_INT_MASK,
269 					SOF_HDA_SD_CTL_DMA_START |
270 					SOF_HDA_CL_DMA_SD_INT_MASK);
271 
272 		ret = snd_sof_dsp_read_poll_timeout(sdev,
273 					HDA_DSP_HDA_BAR,
274 					sd_offset, run,
275 					((run &	dma_start) == dma_start),
276 					HDA_DSP_REG_POLL_INTERVAL_US,
277 					HDA_DSP_STREAM_RUN_TIMEOUT);
278 
279 		if (ret < 0) {
280 			dev_err(sdev->dev,
281 				"error: %s: cmd %d: timeout on STREAM_SD_OFFSET read\n",
282 				__func__, cmd);
283 			return ret;
284 		}
285 
286 		hstream->running = true;
287 		break;
288 	case SNDRV_PCM_TRIGGER_SUSPEND:
289 	case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
290 	case SNDRV_PCM_TRIGGER_STOP:
291 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
292 					sd_offset,
293 					SOF_HDA_SD_CTL_DMA_START |
294 					SOF_HDA_CL_DMA_SD_INT_MASK, 0x0);
295 
296 		ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
297 						sd_offset, run,
298 						!(run &	dma_start),
299 						HDA_DSP_REG_POLL_INTERVAL_US,
300 						HDA_DSP_STREAM_RUN_TIMEOUT);
301 
302 		if (ret < 0) {
303 			dev_err(sdev->dev,
304 				"error: %s: cmd %d: timeout on STREAM_SD_OFFSET read\n",
305 				__func__, cmd);
306 			return ret;
307 		}
308 
309 		snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, sd_offset +
310 				  SOF_HDA_ADSP_REG_CL_SD_STS,
311 				  SOF_HDA_CL_DMA_SD_INT_MASK);
312 
313 		hstream->running = false;
314 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL,
315 					1 << hstream->index, 0x0);
316 		break;
317 	default:
318 		dev_err(sdev->dev, "error: unknown command: %d\n", cmd);
319 		return -EINVAL;
320 	}
321 
322 	return 0;
323 }
324 
325 /*
326  * prepare for common hdac registers settings, for both code loader
327  * and normal stream.
328  */
329 int hda_dsp_stream_hw_params(struct snd_sof_dev *sdev,
330 			     struct hdac_ext_stream *stream,
331 			     struct snd_dma_buffer *dmab,
332 			     struct snd_pcm_hw_params *params)
333 {
334 	struct hdac_bus *bus = sof_to_bus(sdev);
335 	struct hdac_stream *hstream = &stream->hstream;
336 	int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
337 	int ret, timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
338 	u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
339 	u32 val, mask;
340 	u32 run;
341 
342 	if (!stream) {
343 		dev_err(sdev->dev, "error: no stream available\n");
344 		return -ENODEV;
345 	}
346 
347 	/* decouple host and link DMA */
348 	mask = 0x1 << hstream->index;
349 	snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
350 				mask, mask);
351 
352 	if (!dmab) {
353 		dev_err(sdev->dev, "error: no dma buffer allocated!\n");
354 		return -ENODEV;
355 	}
356 
357 	/* clear stream status */
358 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
359 				SOF_HDA_CL_DMA_SD_INT_MASK |
360 				SOF_HDA_SD_CTL_DMA_START, 0);
361 
362 	ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
363 					    sd_offset, run,
364 					    !(run & dma_start),
365 					    HDA_DSP_REG_POLL_INTERVAL_US,
366 					    HDA_DSP_STREAM_RUN_TIMEOUT);
367 
368 	if (ret < 0) {
369 		dev_err(sdev->dev,
370 			"error: %s: timeout on STREAM_SD_OFFSET read1\n",
371 			__func__);
372 		return ret;
373 	}
374 
375 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
376 				sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS,
377 				SOF_HDA_CL_DMA_SD_INT_MASK,
378 				SOF_HDA_CL_DMA_SD_INT_MASK);
379 
380 	/* stream reset */
381 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 0x1,
382 				0x1);
383 	udelay(3);
384 	do {
385 		val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
386 				       sd_offset);
387 		if (val & 0x1)
388 			break;
389 	} while (--timeout);
390 	if (timeout == 0) {
391 		dev_err(sdev->dev, "error: stream reset failed\n");
392 		return -ETIMEDOUT;
393 	}
394 
395 	timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
396 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 0x1,
397 				0x0);
398 
399 	/* wait for hardware to report that stream is out of reset */
400 	udelay(3);
401 	do {
402 		val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
403 				       sd_offset);
404 		if ((val & 0x1) == 0)
405 			break;
406 	} while (--timeout);
407 	if (timeout == 0) {
408 		dev_err(sdev->dev, "error: timeout waiting for stream reset\n");
409 		return -ETIMEDOUT;
410 	}
411 
412 	if (hstream->posbuf)
413 		*hstream->posbuf = 0;
414 
415 	/* reset BDL address */
416 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
417 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
418 			  0x0);
419 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
420 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
421 			  0x0);
422 
423 	/* clear stream status */
424 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
425 				SOF_HDA_CL_DMA_SD_INT_MASK |
426 				SOF_HDA_SD_CTL_DMA_START, 0);
427 
428 	ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
429 					    sd_offset, run,
430 					    !(run & dma_start),
431 					    HDA_DSP_REG_POLL_INTERVAL_US,
432 					    HDA_DSP_STREAM_RUN_TIMEOUT);
433 
434 	if (ret < 0) {
435 		dev_err(sdev->dev,
436 			"error: %s: timeout on STREAM_SD_OFFSET read2\n",
437 			__func__);
438 		return ret;
439 	}
440 
441 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
442 				sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS,
443 				SOF_HDA_CL_DMA_SD_INT_MASK,
444 				SOF_HDA_CL_DMA_SD_INT_MASK);
445 
446 	hstream->frags = 0;
447 
448 	ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
449 	if (ret < 0) {
450 		dev_err(sdev->dev, "error: set up of BDL failed\n");
451 		return ret;
452 	}
453 
454 	/* program stream tag to set up stream descriptor for DMA */
455 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
456 				SOF_HDA_CL_SD_CTL_STREAM_TAG_MASK,
457 				hstream->stream_tag <<
458 				SOF_HDA_CL_SD_CTL_STREAM_TAG_SHIFT);
459 
460 	/* program cyclic buffer length */
461 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
462 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_CBL,
463 			  hstream->bufsize);
464 
465 	/*
466 	 * Recommended hardware programming sequence for HDAudio DMA format
467 	 *
468 	 * 1. Put DMA into coupled mode by clearing PPCTL.PROCEN bit
469 	 *    for corresponding stream index before the time of writing
470 	 *    format to SDxFMT register.
471 	 * 2. Write SDxFMT
472 	 * 3. Set PPCTL.PROCEN bit for corresponding stream index to
473 	 *    enable decoupled mode
474 	 */
475 
476 	/* couple host and link DMA, disable DSP features */
477 	snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
478 				mask, 0);
479 
480 	/* program stream format */
481 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
482 				sd_offset +
483 				SOF_HDA_ADSP_REG_CL_SD_FORMAT,
484 				0xffff, hstream->format_val);
485 
486 	/* decouple host and link DMA, enable DSP features */
487 	snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
488 				mask, mask);
489 
490 	/* program last valid index */
491 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
492 				sd_offset + SOF_HDA_ADSP_REG_CL_SD_LVI,
493 				0xffff, (hstream->frags - 1));
494 
495 	/* program BDL address */
496 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
497 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
498 			  (u32)hstream->bdl.addr);
499 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
500 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
501 			  upper_32_bits(hstream->bdl.addr));
502 
503 	/* enable position buffer */
504 	if (!(snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE)
505 				& SOF_HDA_ADSP_DPLBASE_ENABLE)) {
506 		snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPUBASE,
507 				  upper_32_bits(bus->posbuf.addr));
508 		snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE,
509 				  (u32)bus->posbuf.addr |
510 				  SOF_HDA_ADSP_DPLBASE_ENABLE);
511 	}
512 
513 	/* set interrupt enable bits */
514 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
515 				SOF_HDA_CL_DMA_SD_INT_MASK,
516 				SOF_HDA_CL_DMA_SD_INT_MASK);
517 
518 	/* read FIFO size */
519 	if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) {
520 		hstream->fifo_size =
521 			snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
522 					 sd_offset +
523 					 SOF_HDA_ADSP_REG_CL_SD_FIFOSIZE);
524 		hstream->fifo_size &= 0xffff;
525 		hstream->fifo_size += 1;
526 	} else {
527 		hstream->fifo_size = 0;
528 	}
529 
530 	return ret;
531 }
532 
533 int hda_dsp_stream_hw_free(struct snd_sof_dev *sdev,
534 			   struct snd_pcm_substream *substream)
535 {
536 	struct hdac_stream *stream = substream->runtime->private_data;
537 	struct hdac_ext_stream *link_dev = container_of(stream,
538 							struct hdac_ext_stream,
539 							hstream);
540 	struct hdac_bus *bus = sof_to_bus(sdev);
541 	u32 mask = 0x1 << stream->index;
542 
543 	spin_lock_irq(&bus->reg_lock);
544 	/* couple host and link DMA if link DMA channel is idle */
545 	if (!link_dev->link_locked)
546 		snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR,
547 					SOF_HDA_REG_PP_PPCTL, mask, 0);
548 	spin_unlock_irq(&bus->reg_lock);
549 
550 	stream->substream = NULL;
551 
552 	return 0;
553 }
554 
555 bool hda_dsp_check_stream_irq(struct snd_sof_dev *sdev)
556 {
557 	struct hdac_bus *bus = sof_to_bus(sdev);
558 	bool ret = false;
559 	u32 status;
560 
561 	/* The function can be called at irq thread, so use spin_lock_irq */
562 	spin_lock_irq(&bus->reg_lock);
563 
564 	status = snd_hdac_chip_readl(bus, INTSTS);
565 	dev_vdbg(bus->dev, "stream irq, INTSTS status: 0x%x\n", status);
566 
567 	/* if Register inaccessible, ignore it.*/
568 	if (status != 0xffffffff)
569 		ret = true;
570 
571 	spin_unlock_irq(&bus->reg_lock);
572 
573 	return ret;
574 }
575 
576 static void
577 hda_dsp_set_bytes_transferred(struct hdac_stream *hstream, u64 buffer_size)
578 {
579 	u64 prev_pos, pos, num_bytes;
580 
581 	div64_u64_rem(hstream->curr_pos, buffer_size, &prev_pos);
582 	pos = snd_hdac_stream_get_pos_posbuf(hstream);
583 
584 	if (pos < prev_pos)
585 		num_bytes = (buffer_size - prev_pos) +  pos;
586 	else
587 		num_bytes = pos - prev_pos;
588 
589 	hstream->curr_pos += num_bytes;
590 }
591 
592 static bool hda_dsp_stream_check(struct hdac_bus *bus, u32 status)
593 {
594 	struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
595 	struct hdac_stream *s;
596 	bool active = false;
597 	u32 sd_status;
598 
599 	list_for_each_entry(s, &bus->stream_list, list) {
600 		if (status & BIT(s->index) && s->opened) {
601 			sd_status = snd_hdac_stream_readb(s, SD_STS);
602 
603 			dev_vdbg(bus->dev, "stream %d status 0x%x\n",
604 				 s->index, sd_status);
605 
606 			snd_hdac_stream_writeb(s, SD_STS, sd_status);
607 
608 			active = true;
609 			if ((!s->substream && !s->cstream) ||
610 			    !s->running ||
611 			    (sd_status & SOF_HDA_CL_DMA_SD_INT_COMPLETE) == 0)
612 				continue;
613 
614 			/* Inform ALSA only in case not do that with IPC */
615 			if (s->substream && sof_hda->no_ipc_position) {
616 				snd_sof_pcm_period_elapsed(s->substream);
617 			} else if (s->cstream) {
618 				hda_dsp_set_bytes_transferred(s,
619 					s->cstream->runtime->buffer_size);
620 				snd_compr_fragment_elapsed(s->cstream);
621 			}
622 		}
623 	}
624 
625 	return active;
626 }
627 
628 irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context)
629 {
630 	struct snd_sof_dev *sdev = context;
631 	struct hdac_bus *bus = sof_to_bus(sdev);
632 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
633 	u32 rirb_status;
634 #endif
635 	bool active;
636 	u32 status;
637 	int i;
638 
639 	/*
640 	 * Loop 10 times to handle missed interrupts caused by
641 	 * unsolicited responses from the codec
642 	 */
643 	for (i = 0, active = true; i < 10 && active; i++) {
644 		spin_lock_irq(&bus->reg_lock);
645 
646 		status = snd_hdac_chip_readl(bus, INTSTS);
647 
648 		/* check streams */
649 		active = hda_dsp_stream_check(bus, status);
650 
651 		/* check and clear RIRB interrupt */
652 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
653 		if (status & AZX_INT_CTRL_EN) {
654 			rirb_status = snd_hdac_chip_readb(bus, RIRBSTS);
655 			if (rirb_status & RIRB_INT_MASK) {
656 				/*
657 				 * Clearing the interrupt status here ensures
658 				 * that no interrupt gets masked after the RIRB
659 				 * wp is read in snd_hdac_bus_update_rirb.
660 				 */
661 				snd_hdac_chip_writeb(bus, RIRBSTS,
662 						     RIRB_INT_MASK);
663 				active = true;
664 				if (rirb_status & RIRB_INT_RESPONSE)
665 					snd_hdac_bus_update_rirb(bus);
666 			}
667 		}
668 #endif
669 		spin_unlock_irq(&bus->reg_lock);
670 	}
671 
672 	return IRQ_HANDLED;
673 }
674 
675 int hda_dsp_stream_init(struct snd_sof_dev *sdev)
676 {
677 	struct hdac_bus *bus = sof_to_bus(sdev);
678 	struct hdac_ext_stream *stream;
679 	struct hdac_stream *hstream;
680 	struct pci_dev *pci = to_pci_dev(sdev->dev);
681 	struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
682 	int sd_offset;
683 	int i, num_playback, num_capture, num_total, ret;
684 	u32 gcap;
685 
686 	gcap = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_GCAP);
687 	dev_dbg(sdev->dev, "hda global caps = 0x%x\n", gcap);
688 
689 	/* get stream count from GCAP */
690 	num_capture = (gcap >> 8) & 0x0f;
691 	num_playback = (gcap >> 12) & 0x0f;
692 	num_total = num_playback + num_capture;
693 
694 	dev_dbg(sdev->dev, "detected %d playback and %d capture streams\n",
695 		num_playback, num_capture);
696 
697 	if (num_playback >= SOF_HDA_PLAYBACK_STREAMS) {
698 		dev_err(sdev->dev, "error: too many playback streams %d\n",
699 			num_playback);
700 		return -EINVAL;
701 	}
702 
703 	if (num_capture >= SOF_HDA_CAPTURE_STREAMS) {
704 		dev_err(sdev->dev, "error: too many capture streams %d\n",
705 			num_playback);
706 		return -EINVAL;
707 	}
708 
709 	/*
710 	 * mem alloc for the position buffer
711 	 * TODO: check position buffer update
712 	 */
713 	ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
714 				  SOF_HDA_DPIB_ENTRY_SIZE * num_total,
715 				  &bus->posbuf);
716 	if (ret < 0) {
717 		dev_err(sdev->dev, "error: posbuffer dma alloc failed\n");
718 		return -ENOMEM;
719 	}
720 
721 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
722 	/* mem alloc for the CORB/RIRB ringbuffers */
723 	ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
724 				  PAGE_SIZE, &bus->rb);
725 	if (ret < 0) {
726 		dev_err(sdev->dev, "error: RB alloc failed\n");
727 		return -ENOMEM;
728 	}
729 #endif
730 
731 	/* create capture streams */
732 	for (i = 0; i < num_capture; i++) {
733 		struct sof_intel_hda_stream *hda_stream;
734 
735 		hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
736 					  GFP_KERNEL);
737 		if (!hda_stream)
738 			return -ENOMEM;
739 
740 		hda_stream->sdev = sdev;
741 
742 		stream = &hda_stream->hda_stream;
743 
744 		stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
745 			SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
746 
747 		stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
748 			SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
749 			SOF_HDA_PPLC_INTERVAL * i;
750 
751 		/* do we support SPIB */
752 		if (sdev->bar[HDA_DSP_SPIB_BAR]) {
753 			stream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
754 				SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
755 				SOF_HDA_SPIB_SPIB;
756 
757 			stream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
758 				SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
759 				SOF_HDA_SPIB_MAXFIFO;
760 		}
761 
762 		hstream = &stream->hstream;
763 		hstream->bus = bus;
764 		hstream->sd_int_sta_mask = 1 << i;
765 		hstream->index = i;
766 		sd_offset = SOF_STREAM_SD_OFFSET(hstream);
767 		hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
768 		hstream->stream_tag = i + 1;
769 		hstream->opened = false;
770 		hstream->running = false;
771 		hstream->direction = SNDRV_PCM_STREAM_CAPTURE;
772 
773 		/* memory alloc for stream BDL */
774 		ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
775 					  HDA_DSP_BDL_SIZE, &hstream->bdl);
776 		if (ret < 0) {
777 			dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
778 			return -ENOMEM;
779 		}
780 		hstream->posbuf = (__le32 *)(bus->posbuf.area +
781 			(hstream->index) * 8);
782 
783 		list_add_tail(&hstream->list, &bus->stream_list);
784 	}
785 
786 	/* create playback streams */
787 	for (i = num_capture; i < num_total; i++) {
788 		struct sof_intel_hda_stream *hda_stream;
789 
790 		hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
791 					  GFP_KERNEL);
792 		if (!hda_stream)
793 			return -ENOMEM;
794 
795 		hda_stream->sdev = sdev;
796 
797 		stream = &hda_stream->hda_stream;
798 
799 		/* we always have DSP support */
800 		stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
801 			SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
802 
803 		stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
804 			SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
805 			SOF_HDA_PPLC_INTERVAL * i;
806 
807 		/* do we support SPIB */
808 		if (sdev->bar[HDA_DSP_SPIB_BAR]) {
809 			stream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
810 				SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
811 				SOF_HDA_SPIB_SPIB;
812 
813 			stream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
814 				SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
815 				SOF_HDA_SPIB_MAXFIFO;
816 		}
817 
818 		hstream = &stream->hstream;
819 		hstream->bus = bus;
820 		hstream->sd_int_sta_mask = 1 << i;
821 		hstream->index = i;
822 		sd_offset = SOF_STREAM_SD_OFFSET(hstream);
823 		hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
824 		hstream->stream_tag = i - num_capture + 1;
825 		hstream->opened = false;
826 		hstream->running = false;
827 		hstream->direction = SNDRV_PCM_STREAM_PLAYBACK;
828 
829 		/* mem alloc for stream BDL */
830 		ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
831 					  HDA_DSP_BDL_SIZE, &hstream->bdl);
832 		if (ret < 0) {
833 			dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
834 			return -ENOMEM;
835 		}
836 
837 		hstream->posbuf = (__le32 *)(bus->posbuf.area +
838 			(hstream->index) * 8);
839 
840 		list_add_tail(&hstream->list, &bus->stream_list);
841 	}
842 
843 	/* store total stream count (playback + capture) from GCAP */
844 	sof_hda->stream_max = num_total;
845 
846 	return 0;
847 }
848 
849 void hda_dsp_stream_free(struct snd_sof_dev *sdev)
850 {
851 	struct hdac_bus *bus = sof_to_bus(sdev);
852 	struct hdac_stream *s, *_s;
853 	struct hdac_ext_stream *stream;
854 	struct sof_intel_hda_stream *hda_stream;
855 
856 	/* free position buffer */
857 	if (bus->posbuf.area)
858 		snd_dma_free_pages(&bus->posbuf);
859 
860 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
861 	/* free position buffer */
862 	if (bus->rb.area)
863 		snd_dma_free_pages(&bus->rb);
864 #endif
865 
866 	list_for_each_entry_safe(s, _s, &bus->stream_list, list) {
867 		/* TODO: decouple */
868 
869 		/* free bdl buffer */
870 		if (s->bdl.area)
871 			snd_dma_free_pages(&s->bdl);
872 		list_del(&s->list);
873 		stream = stream_to_hdac_ext_stream(s);
874 		hda_stream = container_of(stream, struct sof_intel_hda_stream,
875 					  hda_stream);
876 		devm_kfree(sdev->dev, hda_stream);
877 	}
878 }
879