xref: /openbmc/linux/sound/soc/amd/acp-pcm-dma.c (revision cfbb9be8)
1 /*
2  * AMD ALSA SoC PCM Driver for ACP 2.x
3  *
4  * Copyright 2014-2015 Advanced Micro Devices, Inc.
5  *
6  * This program is free software; you can redistribute it and/or modify it
7  * under the terms and conditions of the GNU General Public License,
8  * version 2, as published by the Free Software Foundation.
9  *
10  * This program is distributed in the hope it will be useful, but WITHOUT
11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
13  * more details.
14  */
15 
16 #include <linux/module.h>
17 #include <linux/delay.h>
18 #include <linux/io.h>
19 #include <linux/sizes.h>
20 #include <linux/pm_runtime.h>
21 
22 #include <sound/soc.h>
23 #include <drm/amd_asic_type.h>
24 #include "acp.h"
25 
26 #define PLAYBACK_MIN_NUM_PERIODS    2
27 #define PLAYBACK_MAX_NUM_PERIODS    2
28 #define PLAYBACK_MAX_PERIOD_SIZE    16384
29 #define PLAYBACK_MIN_PERIOD_SIZE    1024
30 #define CAPTURE_MIN_NUM_PERIODS     2
31 #define CAPTURE_MAX_NUM_PERIODS     2
32 #define CAPTURE_MAX_PERIOD_SIZE     16384
33 #define CAPTURE_MIN_PERIOD_SIZE     1024
34 
35 #define MAX_BUFFER (PLAYBACK_MAX_PERIOD_SIZE * PLAYBACK_MAX_NUM_PERIODS)
36 #define MIN_BUFFER MAX_BUFFER
37 
38 #define ST_PLAYBACK_MAX_PERIOD_SIZE 8192
39 #define ST_CAPTURE_MAX_PERIOD_SIZE  ST_PLAYBACK_MAX_PERIOD_SIZE
40 #define ST_MAX_BUFFER (ST_PLAYBACK_MAX_PERIOD_SIZE * PLAYBACK_MAX_NUM_PERIODS)
41 #define ST_MIN_BUFFER ST_MAX_BUFFER
42 
43 #define DRV_NAME "acp_audio_dma"
44 
45 static const struct snd_pcm_hardware acp_pcm_hardware_playback = {
46 	.info = SNDRV_PCM_INFO_INTERLEAVED |
47 		SNDRV_PCM_INFO_BLOCK_TRANSFER | SNDRV_PCM_INFO_MMAP |
48 		SNDRV_PCM_INFO_MMAP_VALID | SNDRV_PCM_INFO_BATCH |
49 		SNDRV_PCM_INFO_PAUSE | SNDRV_PCM_INFO_RESUME,
50 	.formats = SNDRV_PCM_FMTBIT_S16_LE |
51 		SNDRV_PCM_FMTBIT_S24_LE | SNDRV_PCM_FMTBIT_S32_LE,
52 	.channels_min = 1,
53 	.channels_max = 8,
54 	.rates = SNDRV_PCM_RATE_8000_96000,
55 	.rate_min = 8000,
56 	.rate_max = 96000,
57 	.buffer_bytes_max = PLAYBACK_MAX_NUM_PERIODS * PLAYBACK_MAX_PERIOD_SIZE,
58 	.period_bytes_min = PLAYBACK_MIN_PERIOD_SIZE,
59 	.period_bytes_max = PLAYBACK_MAX_PERIOD_SIZE,
60 	.periods_min = PLAYBACK_MIN_NUM_PERIODS,
61 	.periods_max = PLAYBACK_MAX_NUM_PERIODS,
62 };
63 
64 static const struct snd_pcm_hardware acp_pcm_hardware_capture = {
65 	.info = SNDRV_PCM_INFO_INTERLEAVED |
66 		SNDRV_PCM_INFO_BLOCK_TRANSFER | SNDRV_PCM_INFO_MMAP |
67 		SNDRV_PCM_INFO_MMAP_VALID | SNDRV_PCM_INFO_BATCH |
68 	    SNDRV_PCM_INFO_PAUSE | SNDRV_PCM_INFO_RESUME,
69 	.formats = SNDRV_PCM_FMTBIT_S16_LE |
70 		SNDRV_PCM_FMTBIT_S24_LE | SNDRV_PCM_FMTBIT_S32_LE,
71 	.channels_min = 1,
72 	.channels_max = 2,
73 	.rates = SNDRV_PCM_RATE_8000_48000,
74 	.rate_min = 8000,
75 	.rate_max = 48000,
76 	.buffer_bytes_max = CAPTURE_MAX_NUM_PERIODS * CAPTURE_MAX_PERIOD_SIZE,
77 	.period_bytes_min = CAPTURE_MIN_PERIOD_SIZE,
78 	.period_bytes_max = CAPTURE_MAX_PERIOD_SIZE,
79 	.periods_min = CAPTURE_MIN_NUM_PERIODS,
80 	.periods_max = CAPTURE_MAX_NUM_PERIODS,
81 };
82 
83 static const struct snd_pcm_hardware acp_st_pcm_hardware_playback = {
84 	.info = SNDRV_PCM_INFO_INTERLEAVED |
85 		SNDRV_PCM_INFO_BLOCK_TRANSFER | SNDRV_PCM_INFO_MMAP |
86 		SNDRV_PCM_INFO_MMAP_VALID | SNDRV_PCM_INFO_BATCH |
87 		SNDRV_PCM_INFO_PAUSE | SNDRV_PCM_INFO_RESUME,
88 	.formats = SNDRV_PCM_FMTBIT_S16_LE |
89 		SNDRV_PCM_FMTBIT_S24_LE | SNDRV_PCM_FMTBIT_S32_LE,
90 	.channels_min = 1,
91 	.channels_max = 8,
92 	.rates = SNDRV_PCM_RATE_8000_96000,
93 	.rate_min = 8000,
94 	.rate_max = 96000,
95 	.buffer_bytes_max = ST_MAX_BUFFER,
96 	.period_bytes_min = PLAYBACK_MIN_PERIOD_SIZE,
97 	.period_bytes_max = ST_PLAYBACK_MAX_PERIOD_SIZE,
98 	.periods_min = PLAYBACK_MIN_NUM_PERIODS,
99 	.periods_max = PLAYBACK_MAX_NUM_PERIODS,
100 };
101 
102 static const struct snd_pcm_hardware acp_st_pcm_hardware_capture = {
103 	.info = SNDRV_PCM_INFO_INTERLEAVED |
104 		SNDRV_PCM_INFO_BLOCK_TRANSFER | SNDRV_PCM_INFO_MMAP |
105 		SNDRV_PCM_INFO_MMAP_VALID | SNDRV_PCM_INFO_BATCH |
106 		SNDRV_PCM_INFO_PAUSE | SNDRV_PCM_INFO_RESUME,
107 	.formats = SNDRV_PCM_FMTBIT_S16_LE |
108 		SNDRV_PCM_FMTBIT_S24_LE | SNDRV_PCM_FMTBIT_S32_LE,
109 	.channels_min = 1,
110 	.channels_max = 2,
111 	.rates = SNDRV_PCM_RATE_8000_48000,
112 	.rate_min = 8000,
113 	.rate_max = 48000,
114 	.buffer_bytes_max = ST_MAX_BUFFER,
115 	.period_bytes_min = CAPTURE_MIN_PERIOD_SIZE,
116 	.period_bytes_max = ST_CAPTURE_MAX_PERIOD_SIZE,
117 	.periods_min = CAPTURE_MIN_NUM_PERIODS,
118 	.periods_max = CAPTURE_MAX_NUM_PERIODS,
119 };
120 
121 static u32 acp_reg_read(void __iomem *acp_mmio, u32 reg)
122 {
123 	return readl(acp_mmio + (reg * 4));
124 }
125 
126 static void acp_reg_write(u32 val, void __iomem *acp_mmio, u32 reg)
127 {
128 	writel(val, acp_mmio + (reg * 4));
129 }
130 
131 /* Configure a given dma channel parameters - enable/disable,
132  * number of descriptors, priority
133  */
134 static void config_acp_dma_channel(void __iomem *acp_mmio, u8 ch_num,
135 				   u16 dscr_strt_idx, u16 num_dscrs,
136 				   enum acp_dma_priority_level priority_level)
137 {
138 	u32 dma_ctrl;
139 
140 	/* disable the channel run field */
141 	dma_ctrl = acp_reg_read(acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
142 	dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChRun_MASK;
143 	acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
144 
145 	/* program a DMA channel with first descriptor to be processed. */
146 	acp_reg_write((ACP_DMA_DSCR_STRT_IDX_0__DMAChDscrStrtIdx_MASK
147 			& dscr_strt_idx),
148 			acp_mmio, mmACP_DMA_DSCR_STRT_IDX_0 + ch_num);
149 
150 	/* program a DMA channel with the number of descriptors to be
151 	 * processed in the transfer
152 	*/
153 	acp_reg_write(ACP_DMA_DSCR_CNT_0__DMAChDscrCnt_MASK & num_dscrs,
154 		acp_mmio, mmACP_DMA_DSCR_CNT_0 + ch_num);
155 
156 	/* set DMA channel priority */
157 	acp_reg_write(priority_level, acp_mmio, mmACP_DMA_PRIO_0 + ch_num);
158 }
159 
160 /* Initialize a dma descriptor in SRAM based on descritor information passed */
161 static void config_dma_descriptor_in_sram(void __iomem *acp_mmio,
162 					  u16 descr_idx,
163 					  acp_dma_dscr_transfer_t *descr_info)
164 {
165 	u32 sram_offset;
166 
167 	sram_offset = (descr_idx * sizeof(acp_dma_dscr_transfer_t));
168 
169 	/* program the source base address. */
170 	acp_reg_write(sram_offset, acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
171 	acp_reg_write(descr_info->src,	acp_mmio, mmACP_SRBM_Targ_Idx_Data);
172 	/* program the destination base address. */
173 	acp_reg_write(sram_offset + 4,	acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
174 	acp_reg_write(descr_info->dest, acp_mmio, mmACP_SRBM_Targ_Idx_Data);
175 
176 	/* program the number of bytes to be transferred for this descriptor. */
177 	acp_reg_write(sram_offset + 8,	acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
178 	acp_reg_write(descr_info->xfer_val, acp_mmio, mmACP_SRBM_Targ_Idx_Data);
179 }
180 
181 /* Initialize the DMA descriptor information for transfer between
182  * system memory <-> ACP SRAM
183  */
184 static void set_acp_sysmem_dma_descriptors(void __iomem *acp_mmio,
185 					u32 size, int direction,
186 					u32 pte_offset, u32 asic_type)
187 {
188 	u16 i;
189 	u16 dma_dscr_idx = PLAYBACK_START_DMA_DESCR_CH12;
190 	acp_dma_dscr_transfer_t dmadscr[NUM_DSCRS_PER_CHANNEL];
191 
192 	for (i = 0; i < NUM_DSCRS_PER_CHANNEL; i++) {
193 		dmadscr[i].xfer_val = 0;
194 		if (direction == SNDRV_PCM_STREAM_PLAYBACK) {
195 			dma_dscr_idx = PLAYBACK_START_DMA_DESCR_CH12 + i;
196 			dmadscr[i].dest = ACP_SHARED_RAM_BANK_1_ADDRESS
197 					+ (i * (size/2));
198 			dmadscr[i].src = ACP_INTERNAL_APERTURE_WINDOW_0_ADDRESS
199 				+ (pte_offset * SZ_4K) + (i * (size/2));
200 			switch (asic_type) {
201 			case CHIP_STONEY:
202 				dmadscr[i].xfer_val |=
203 				(ACP_DMA_ATTRIBUTES_DAGB_GARLIC_TO_SHAREDMEM  << 16) |
204 				(size / 2);
205 				break;
206 			default:
207 				dmadscr[i].xfer_val |=
208 				(ACP_DMA_ATTRIBUTES_DAGB_ONION_TO_SHAREDMEM  << 16) |
209 				(size / 2);
210 			}
211 		} else {
212 			dma_dscr_idx = CAPTURE_START_DMA_DESCR_CH14 + i;
213 			switch (asic_type) {
214 			case CHIP_STONEY:
215 				dmadscr[i].src = ACP_SHARED_RAM_BANK_3_ADDRESS +
216 				(i * (size/2));
217 				dmadscr[i].dest =
218 				ACP_INTERNAL_APERTURE_WINDOW_0_ADDRESS +
219 				(pte_offset * SZ_4K) + (i * (size/2));
220 				dmadscr[i].xfer_val |=
221 				BIT(22) |
222 				(ACP_DMA_ATTRIBUTES_SHARED_MEM_TO_DAGB_GARLIC << 16) |
223 				(size / 2);
224 				break;
225 			default:
226 				dmadscr[i].src = ACP_SHARED_RAM_BANK_5_ADDRESS +
227 				(i * (size/2));
228 				dmadscr[i].dest =
229 				ACP_INTERNAL_APERTURE_WINDOW_0_ADDRESS +
230 				(pte_offset * SZ_4K) + (i * (size/2));
231 				dmadscr[i].xfer_val |=
232 				BIT(22) |
233 				(ACP_DMA_ATTRIBUTES_SHAREDMEM_TO_DAGB_ONION << 16) |
234 				(size / 2);
235 			}
236 		}
237 		config_dma_descriptor_in_sram(acp_mmio, dma_dscr_idx,
238 						&dmadscr[i]);
239 	}
240 	if (direction == SNDRV_PCM_STREAM_PLAYBACK)
241 		config_acp_dma_channel(acp_mmio, SYSRAM_TO_ACP_CH_NUM,
242 					PLAYBACK_START_DMA_DESCR_CH12,
243 					NUM_DSCRS_PER_CHANNEL,
244 					ACP_DMA_PRIORITY_LEVEL_NORMAL);
245 	else
246 		config_acp_dma_channel(acp_mmio, ACP_TO_SYSRAM_CH_NUM,
247 					CAPTURE_START_DMA_DESCR_CH14,
248 					NUM_DSCRS_PER_CHANNEL,
249 					ACP_DMA_PRIORITY_LEVEL_NORMAL);
250 }
251 
252 /* Initialize the DMA descriptor information for transfer between
253  * ACP SRAM <-> I2S
254  */
255 static void set_acp_to_i2s_dma_descriptors(void __iomem *acp_mmio,
256 					u32 size, int direction,
257 					u32 asic_type)
258 {
259 
260 	u16 i;
261 	u16 dma_dscr_idx = PLAYBACK_START_DMA_DESCR_CH13;
262 	acp_dma_dscr_transfer_t dmadscr[NUM_DSCRS_PER_CHANNEL];
263 
264 	for (i = 0; i < NUM_DSCRS_PER_CHANNEL; i++) {
265 		dmadscr[i].xfer_val = 0;
266 		if (direction == SNDRV_PCM_STREAM_PLAYBACK) {
267 			dma_dscr_idx = PLAYBACK_START_DMA_DESCR_CH13 + i;
268 			dmadscr[i].src = ACP_SHARED_RAM_BANK_1_ADDRESS +
269 					 (i * (size/2));
270 			/* dmadscr[i].dest is unused by hardware. */
271 			dmadscr[i].dest = 0;
272 			dmadscr[i].xfer_val |= BIT(22) | (TO_ACP_I2S_1 << 16) |
273 						(size / 2);
274 		} else {
275 			dma_dscr_idx = CAPTURE_START_DMA_DESCR_CH15 + i;
276 			/* dmadscr[i].src is unused by hardware. */
277 			dmadscr[i].src = 0;
278 			switch (asic_type) {
279 			case CHIP_STONEY:
280 				dmadscr[i].dest =
281 					 ACP_SHARED_RAM_BANK_3_ADDRESS +
282 					(i * (size / 2));
283 				break;
284 			default:
285 				dmadscr[i].dest =
286 					 ACP_SHARED_RAM_BANK_5_ADDRESS +
287 					(i * (size / 2));
288 			}
289 			dmadscr[i].xfer_val |= BIT(22) |
290 					(FROM_ACP_I2S_1 << 16) | (size / 2);
291 		}
292 		config_dma_descriptor_in_sram(acp_mmio, dma_dscr_idx,
293 						&dmadscr[i]);
294 	}
295 	/* Configure the DMA channel with the above descriptore */
296 	if (direction == SNDRV_PCM_STREAM_PLAYBACK)
297 		config_acp_dma_channel(acp_mmio, ACP_TO_I2S_DMA_CH_NUM,
298 					PLAYBACK_START_DMA_DESCR_CH13,
299 					NUM_DSCRS_PER_CHANNEL,
300 					ACP_DMA_PRIORITY_LEVEL_NORMAL);
301 	else
302 		config_acp_dma_channel(acp_mmio, I2S_TO_ACP_DMA_CH_NUM,
303 					CAPTURE_START_DMA_DESCR_CH15,
304 					NUM_DSCRS_PER_CHANNEL,
305 					ACP_DMA_PRIORITY_LEVEL_NORMAL);
306 }
307 
308 /* Create page table entries in ACP SRAM for the allocated memory */
309 static void acp_pte_config(void __iomem *acp_mmio, struct page *pg,
310 			   u16 num_of_pages, u32 pte_offset)
311 {
312 	u16 page_idx;
313 	u64 addr;
314 	u32 low;
315 	u32 high;
316 	u32 offset;
317 
318 	offset	= ACP_DAGB_GRP_SRBM_SRAM_BASE_OFFSET + (pte_offset * 8);
319 	for (page_idx = 0; page_idx < (num_of_pages); page_idx++) {
320 		/* Load the low address of page int ACP SRAM through SRBM */
321 		acp_reg_write((offset + (page_idx * 8)),
322 			acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
323 		addr = page_to_phys(pg);
324 
325 		low = lower_32_bits(addr);
326 		high = upper_32_bits(addr);
327 
328 		acp_reg_write(low, acp_mmio, mmACP_SRBM_Targ_Idx_Data);
329 
330 		/* Load the High address of page int ACP SRAM through SRBM */
331 		acp_reg_write((offset + (page_idx * 8) + 4),
332 			acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
333 
334 		/* page enable in ACP */
335 		high |= BIT(31);
336 		acp_reg_write(high, acp_mmio, mmACP_SRBM_Targ_Idx_Data);
337 
338 		/* Move to next physically contiguos page */
339 		pg++;
340 	}
341 }
342 
343 static void config_acp_dma(void __iomem *acp_mmio,
344 			struct audio_substream_data *audio_config,
345 			u32 asic_type)
346 {
347 	u32 pte_offset;
348 
349 	if (audio_config->direction == SNDRV_PCM_STREAM_PLAYBACK)
350 		pte_offset = ACP_PLAYBACK_PTE_OFFSET;
351 	else
352 		pte_offset = ACP_CAPTURE_PTE_OFFSET;
353 
354 	acp_pte_config(acp_mmio, audio_config->pg, audio_config->num_of_pages,
355 			pte_offset);
356 
357 	/* Configure System memory <-> ACP SRAM DMA descriptors */
358 	set_acp_sysmem_dma_descriptors(acp_mmio, audio_config->size,
359 				audio_config->direction, pte_offset, asic_type);
360 
361 	/* Configure ACP SRAM <-> I2S DMA descriptors */
362 	set_acp_to_i2s_dma_descriptors(acp_mmio, audio_config->size,
363 				audio_config->direction, asic_type);
364 }
365 
366 /* Start a given DMA channel transfer */
367 static void acp_dma_start(void __iomem *acp_mmio,
368 			 u16 ch_num, bool is_circular)
369 {
370 	u32 dma_ctrl;
371 
372 	/* read the dma control register and disable the channel run field */
373 	dma_ctrl = acp_reg_read(acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
374 
375 	/* Invalidating the DAGB cache */
376 	acp_reg_write(1, acp_mmio, mmACP_DAGB_ATU_CTRL);
377 
378 	/* configure the DMA channel and start the DMA transfer
379 	 * set dmachrun bit to start the transfer and enable the
380 	 * interrupt on completion of the dma transfer
381 	 */
382 	dma_ctrl |= ACP_DMA_CNTL_0__DMAChRun_MASK;
383 
384 	switch (ch_num) {
385 	case ACP_TO_I2S_DMA_CH_NUM:
386 	case ACP_TO_SYSRAM_CH_NUM:
387 	case I2S_TO_ACP_DMA_CH_NUM:
388 		dma_ctrl |= ACP_DMA_CNTL_0__DMAChIOCEn_MASK;
389 		break;
390 	default:
391 		dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChIOCEn_MASK;
392 		break;
393 	}
394 
395 	/* enable  for ACP SRAM to/from I2S DMA channel */
396 	if (is_circular == true)
397 		dma_ctrl |= ACP_DMA_CNTL_0__Circular_DMA_En_MASK;
398 	else
399 		dma_ctrl &= ~ACP_DMA_CNTL_0__Circular_DMA_En_MASK;
400 
401 	acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
402 }
403 
404 /* Stop a given DMA channel transfer */
405 static int acp_dma_stop(void __iomem *acp_mmio, u8 ch_num)
406 {
407 	u32 dma_ctrl;
408 	u32 dma_ch_sts;
409 	u32 count = ACP_DMA_RESET_TIME;
410 
411 	dma_ctrl = acp_reg_read(acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
412 
413 	/* clear the dma control register fields before writing zero
414 	 * in reset bit
415 	*/
416 	dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChRun_MASK;
417 	dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChIOCEn_MASK;
418 
419 	acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
420 	dma_ch_sts = acp_reg_read(acp_mmio, mmACP_DMA_CH_STS);
421 
422 	if (dma_ch_sts & BIT(ch_num)) {
423 		/* set the reset bit for this channel to stop the dma
424 		*  transfer
425 		*/
426 		dma_ctrl |= ACP_DMA_CNTL_0__DMAChRst_MASK;
427 		acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
428 	}
429 
430 	/* check the channel status bit for some time and return the status */
431 	while (true) {
432 		dma_ch_sts = acp_reg_read(acp_mmio, mmACP_DMA_CH_STS);
433 		if (!(dma_ch_sts & BIT(ch_num))) {
434 			/* clear the reset flag after successfully stopping
435 			* the dma transfer and break from the loop
436 			*/
437 			dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChRst_MASK;
438 
439 			acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0
440 								+ ch_num);
441 			break;
442 		}
443 		if (--count == 0) {
444 			pr_err("Failed to stop ACP DMA channel : %d\n", ch_num);
445 			return -ETIMEDOUT;
446 		}
447 		udelay(100);
448 	}
449 	return 0;
450 }
451 
452 static void acp_set_sram_bank_state(void __iomem *acp_mmio, u16 bank,
453 					bool power_on)
454 {
455 	u32 val, req_reg, sts_reg, sts_reg_mask;
456 	u32 loops = 1000;
457 
458 	if (bank < 32) {
459 		req_reg = mmACP_MEM_SHUT_DOWN_REQ_LO;
460 		sts_reg = mmACP_MEM_SHUT_DOWN_STS_LO;
461 		sts_reg_mask = 0xFFFFFFFF;
462 
463 	} else {
464 		bank -= 32;
465 		req_reg = mmACP_MEM_SHUT_DOWN_REQ_HI;
466 		sts_reg = mmACP_MEM_SHUT_DOWN_STS_HI;
467 		sts_reg_mask = 0x0000FFFF;
468 	}
469 
470 	val = acp_reg_read(acp_mmio, req_reg);
471 	if (val & (1 << bank)) {
472 		/* bank is in off state */
473 		if (power_on == true)
474 			/* request to on */
475 			val &= ~(1 << bank);
476 		else
477 			/* request to off */
478 			return;
479 	} else {
480 		/* bank is in on state */
481 		if (power_on == false)
482 			/* request to off */
483 			val |= 1 << bank;
484 		else
485 			/* request to on */
486 			return;
487 	}
488 	acp_reg_write(val, acp_mmio, req_reg);
489 
490 	while (acp_reg_read(acp_mmio, sts_reg) != sts_reg_mask) {
491 		if (!loops--) {
492 			pr_err("ACP SRAM bank %d state change failed\n", bank);
493 			break;
494 		}
495 		cpu_relax();
496 	}
497 }
498 
499 /* Initialize and bring ACP hardware to default state. */
500 static int acp_init(void __iomem *acp_mmio, u32 asic_type)
501 {
502 	u16 bank;
503 	u32 val, count, sram_pte_offset;
504 
505 	/* Assert Soft reset of ACP */
506 	val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
507 
508 	val |= ACP_SOFT_RESET__SoftResetAud_MASK;
509 	acp_reg_write(val, acp_mmio, mmACP_SOFT_RESET);
510 
511 	count = ACP_SOFT_RESET_DONE_TIME_OUT_VALUE;
512 	while (true) {
513 		val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
514 		if (ACP_SOFT_RESET__SoftResetAudDone_MASK ==
515 		    (val & ACP_SOFT_RESET__SoftResetAudDone_MASK))
516 			break;
517 		if (--count == 0) {
518 			pr_err("Failed to reset ACP\n");
519 			return -ETIMEDOUT;
520 		}
521 		udelay(100);
522 	}
523 
524 	/* Enable clock to ACP and wait until the clock is enabled */
525 	val = acp_reg_read(acp_mmio, mmACP_CONTROL);
526 	val = val | ACP_CONTROL__ClkEn_MASK;
527 	acp_reg_write(val, acp_mmio, mmACP_CONTROL);
528 
529 	count = ACP_CLOCK_EN_TIME_OUT_VALUE;
530 
531 	while (true) {
532 		val = acp_reg_read(acp_mmio, mmACP_STATUS);
533 		if (val & (u32) 0x1)
534 			break;
535 		if (--count == 0) {
536 			pr_err("Failed to reset ACP\n");
537 			return -ETIMEDOUT;
538 		}
539 		udelay(100);
540 	}
541 
542 	/* Deassert the SOFT RESET flags */
543 	val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
544 	val &= ~ACP_SOFT_RESET__SoftResetAud_MASK;
545 	acp_reg_write(val, acp_mmio, mmACP_SOFT_RESET);
546 
547 	/* initiailize Onion control DAGB register */
548 	acp_reg_write(ACP_ONION_CNTL_DEFAULT, acp_mmio,
549 			mmACP_AXI2DAGB_ONION_CNTL);
550 
551 	/* initiailize Garlic control DAGB registers */
552 	acp_reg_write(ACP_GARLIC_CNTL_DEFAULT, acp_mmio,
553 			mmACP_AXI2DAGB_GARLIC_CNTL);
554 
555 	sram_pte_offset = ACP_DAGB_GRP_SRAM_BASE_ADDRESS |
556 			ACP_DAGB_BASE_ADDR_GRP_1__AXI2DAGBSnoopSel_MASK |
557 			ACP_DAGB_BASE_ADDR_GRP_1__AXI2DAGBTargetMemSel_MASK |
558 			ACP_DAGB_BASE_ADDR_GRP_1__AXI2DAGBGrpEnable_MASK;
559 	acp_reg_write(sram_pte_offset,  acp_mmio, mmACP_DAGB_BASE_ADDR_GRP_1);
560 	acp_reg_write(ACP_PAGE_SIZE_4K_ENABLE, acp_mmio,
561 			mmACP_DAGB_PAGE_SIZE_GRP_1);
562 
563 	acp_reg_write(ACP_SRAM_BASE_ADDRESS, acp_mmio,
564 			mmACP_DMA_DESC_BASE_ADDR);
565 
566 	/* Num of descriptiors in SRAM 0x4, means 256 descriptors;(64 * 4) */
567 	acp_reg_write(0x4, acp_mmio, mmACP_DMA_DESC_MAX_NUM_DSCR);
568 	acp_reg_write(ACP_EXTERNAL_INTR_CNTL__DMAIOCMask_MASK,
569 		acp_mmio, mmACP_EXTERNAL_INTR_CNTL);
570 
571        /* When ACP_TILE_P1 is turned on, all SRAM banks get turned on.
572 	* Now, turn off all of them. This can't be done in 'poweron' of
573 	* ACP pm domain, as this requires ACP to be initialized.
574 	* For Stoney, Memory gating is disabled,i.e SRAM Banks
575 	* won't be turned off. The default state for SRAM banks is ON.
576 	* Setting SRAM bank state code skipped for STONEY platform.
577 	*/
578 	if (asic_type != CHIP_STONEY) {
579 		for (bank = 1; bank < 48; bank++)
580 			acp_set_sram_bank_state(acp_mmio, bank, false);
581 	}
582 	return 0;
583 }
584 
585 /* Deinitialize ACP */
586 static int acp_deinit(void __iomem *acp_mmio)
587 {
588 	u32 val;
589 	u32 count;
590 
591 	/* Assert Soft reset of ACP */
592 	val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
593 
594 	val |= ACP_SOFT_RESET__SoftResetAud_MASK;
595 	acp_reg_write(val, acp_mmio, mmACP_SOFT_RESET);
596 
597 	count = ACP_SOFT_RESET_DONE_TIME_OUT_VALUE;
598 	while (true) {
599 		val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
600 		if (ACP_SOFT_RESET__SoftResetAudDone_MASK ==
601 		    (val & ACP_SOFT_RESET__SoftResetAudDone_MASK))
602 			break;
603 		if (--count == 0) {
604 			pr_err("Failed to reset ACP\n");
605 			return -ETIMEDOUT;
606 		}
607 		udelay(100);
608 	}
609 	/** Disable ACP clock */
610 	val = acp_reg_read(acp_mmio, mmACP_CONTROL);
611 	val &= ~ACP_CONTROL__ClkEn_MASK;
612 	acp_reg_write(val, acp_mmio, mmACP_CONTROL);
613 
614 	count = ACP_CLOCK_EN_TIME_OUT_VALUE;
615 
616 	while (true) {
617 		val = acp_reg_read(acp_mmio, mmACP_STATUS);
618 		if (!(val & (u32) 0x1))
619 			break;
620 		if (--count == 0) {
621 			pr_err("Failed to reset ACP\n");
622 			return -ETIMEDOUT;
623 		}
624 		udelay(100);
625 	}
626 	return 0;
627 }
628 
629 /* ACP DMA irq handler routine for playback, capture usecases */
630 static irqreturn_t dma_irq_handler(int irq, void *arg)
631 {
632 	u16 dscr_idx;
633 	u32 intr_flag, ext_intr_status;
634 	struct audio_drv_data *irq_data;
635 	void __iomem *acp_mmio;
636 	struct device *dev = arg;
637 	bool valid_irq = false;
638 
639 	irq_data = dev_get_drvdata(dev);
640 	acp_mmio = irq_data->acp_mmio;
641 
642 	ext_intr_status = acp_reg_read(acp_mmio, mmACP_EXTERNAL_INTR_STAT);
643 	intr_flag = (((ext_intr_status &
644 		      ACP_EXTERNAL_INTR_STAT__DMAIOCStat_MASK) >>
645 		     ACP_EXTERNAL_INTR_STAT__DMAIOCStat__SHIFT));
646 
647 	if ((intr_flag & BIT(ACP_TO_I2S_DMA_CH_NUM)) != 0) {
648 		valid_irq = true;
649 		if (acp_reg_read(acp_mmio, mmACP_DMA_CUR_DSCR_13) ==
650 				PLAYBACK_START_DMA_DESCR_CH13)
651 			dscr_idx = PLAYBACK_END_DMA_DESCR_CH12;
652 		else
653 			dscr_idx = PLAYBACK_START_DMA_DESCR_CH12;
654 		config_acp_dma_channel(acp_mmio, SYSRAM_TO_ACP_CH_NUM, dscr_idx,
655 				       1, 0);
656 		acp_dma_start(acp_mmio, SYSRAM_TO_ACP_CH_NUM, false);
657 
658 		snd_pcm_period_elapsed(irq_data->play_stream);
659 
660 		acp_reg_write((intr_flag & BIT(ACP_TO_I2S_DMA_CH_NUM)) << 16,
661 				acp_mmio, mmACP_EXTERNAL_INTR_STAT);
662 	}
663 
664 	if ((intr_flag & BIT(I2S_TO_ACP_DMA_CH_NUM)) != 0) {
665 		valid_irq = true;
666 		if (acp_reg_read(acp_mmio, mmACP_DMA_CUR_DSCR_15) ==
667 				CAPTURE_START_DMA_DESCR_CH15)
668 			dscr_idx = CAPTURE_END_DMA_DESCR_CH14;
669 		else
670 			dscr_idx = CAPTURE_START_DMA_DESCR_CH14;
671 		config_acp_dma_channel(acp_mmio, ACP_TO_SYSRAM_CH_NUM, dscr_idx,
672 				       1, 0);
673 		acp_dma_start(acp_mmio, ACP_TO_SYSRAM_CH_NUM, false);
674 
675 		acp_reg_write((intr_flag & BIT(I2S_TO_ACP_DMA_CH_NUM)) << 16,
676 				acp_mmio, mmACP_EXTERNAL_INTR_STAT);
677 	}
678 
679 	if ((intr_flag & BIT(ACP_TO_SYSRAM_CH_NUM)) != 0) {
680 		valid_irq = true;
681 		snd_pcm_period_elapsed(irq_data->capture_stream);
682 		acp_reg_write((intr_flag & BIT(ACP_TO_SYSRAM_CH_NUM)) << 16,
683 				acp_mmio, mmACP_EXTERNAL_INTR_STAT);
684 	}
685 
686 	if (valid_irq)
687 		return IRQ_HANDLED;
688 	else
689 		return IRQ_NONE;
690 }
691 
692 static int acp_dma_open(struct snd_pcm_substream *substream)
693 {
694 	u16 bank;
695 	int ret = 0;
696 	struct snd_pcm_runtime *runtime = substream->runtime;
697 	struct snd_soc_pcm_runtime *prtd = substream->private_data;
698 	struct audio_drv_data *intr_data = dev_get_drvdata(prtd->platform->dev);
699 
700 	struct audio_substream_data *adata =
701 		kzalloc(sizeof(struct audio_substream_data), GFP_KERNEL);
702 	if (adata == NULL)
703 		return -ENOMEM;
704 
705 	if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
706 		switch (intr_data->asic_type) {
707 		case CHIP_STONEY:
708 			runtime->hw = acp_st_pcm_hardware_playback;
709 			break;
710 		default:
711 			runtime->hw = acp_pcm_hardware_playback;
712 		}
713 	} else {
714 		switch (intr_data->asic_type) {
715 		case CHIP_STONEY:
716 			runtime->hw = acp_st_pcm_hardware_capture;
717 			break;
718 		default:
719 			runtime->hw = acp_pcm_hardware_capture;
720 		}
721 	}
722 
723 	ret = snd_pcm_hw_constraint_integer(runtime,
724 					    SNDRV_PCM_HW_PARAM_PERIODS);
725 	if (ret < 0) {
726 		dev_err(prtd->platform->dev, "set integer constraint failed\n");
727 		kfree(adata);
728 		return ret;
729 	}
730 
731 	adata->acp_mmio = intr_data->acp_mmio;
732 	runtime->private_data = adata;
733 
734 	/* Enable ACP irq, when neither playback or capture streams are
735 	 * active by the time when a new stream is being opened.
736 	 * This enablement is not required for another stream, if current
737 	 * stream is not closed
738 	*/
739 	if (!intr_data->play_stream && !intr_data->capture_stream)
740 		acp_reg_write(1, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB);
741 
742 	if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
743 		intr_data->play_stream = substream;
744 		/* For Stoney, Memory gating is disabled,i.e SRAM Banks
745 		 * won't be turned off. The default state for SRAM banks is ON.
746 		 * Setting SRAM bank state code skipped for STONEY platform.
747 		 */
748 		if (intr_data->asic_type != CHIP_STONEY) {
749 			for (bank = 1; bank <= 4; bank++)
750 				acp_set_sram_bank_state(intr_data->acp_mmio,
751 							bank, true);
752 		}
753 	} else {
754 		intr_data->capture_stream = substream;
755 		if (intr_data->asic_type != CHIP_STONEY) {
756 			for (bank = 5; bank <= 8; bank++)
757 				acp_set_sram_bank_state(intr_data->acp_mmio,
758 							bank, true);
759 		}
760 	}
761 
762 	return 0;
763 }
764 
765 static int acp_dma_hw_params(struct snd_pcm_substream *substream,
766 			     struct snd_pcm_hw_params *params)
767 {
768 	int status;
769 	uint64_t size;
770 	u32 val = 0;
771 	struct page *pg;
772 	struct snd_pcm_runtime *runtime;
773 	struct audio_substream_data *rtd;
774 	struct snd_soc_pcm_runtime *prtd = substream->private_data;
775 	struct audio_drv_data *adata = dev_get_drvdata(prtd->platform->dev);
776 
777 	runtime = substream->runtime;
778 	rtd = runtime->private_data;
779 
780 	if (WARN_ON(!rtd))
781 		return -EINVAL;
782 
783 	if (adata->asic_type == CHIP_STONEY) {
784 		val = acp_reg_read(adata->acp_mmio, mmACP_I2S_16BIT_RESOLUTION_EN);
785 		if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK)
786 			val |= ACP_I2S_SP_16BIT_RESOLUTION_EN;
787 		else
788 			val |= ACP_I2S_MIC_16BIT_RESOLUTION_EN;
789 		acp_reg_write(val, adata->acp_mmio, mmACP_I2S_16BIT_RESOLUTION_EN);
790 	}
791 	size = params_buffer_bytes(params);
792 	status = snd_pcm_lib_malloc_pages(substream, size);
793 	if (status < 0)
794 		return status;
795 
796 	memset(substream->runtime->dma_area, 0, params_buffer_bytes(params));
797 	pg = virt_to_page(substream->dma_buffer.area);
798 
799 	if (pg != NULL) {
800 		acp_set_sram_bank_state(rtd->acp_mmio, 0, true);
801 		/* Save for runtime private data */
802 		rtd->pg = pg;
803 		rtd->order = get_order(size);
804 
805 		/* Fill the page table entries in ACP SRAM */
806 		rtd->pg = pg;
807 		rtd->size = size;
808 		rtd->num_of_pages = PAGE_ALIGN(size) >> PAGE_SHIFT;
809 		rtd->direction = substream->stream;
810 
811 		config_acp_dma(rtd->acp_mmio, rtd, adata->asic_type);
812 		status = 0;
813 	} else {
814 		status = -ENOMEM;
815 	}
816 	return status;
817 }
818 
819 static int acp_dma_hw_free(struct snd_pcm_substream *substream)
820 {
821 	return snd_pcm_lib_free_pages(substream);
822 }
823 
824 static u64 acp_get_byte_count(void __iomem *acp_mmio, int stream)
825 {
826 	union acp_dma_count playback_dma_count;
827 	union acp_dma_count capture_dma_count;
828 	u64 bytescount = 0;
829 
830 	if (stream == SNDRV_PCM_STREAM_PLAYBACK) {
831 		playback_dma_count.bcount.high = acp_reg_read(acp_mmio,
832 					mmACP_I2S_TRANSMIT_BYTE_CNT_HIGH);
833 		playback_dma_count.bcount.low  = acp_reg_read(acp_mmio,
834 					mmACP_I2S_TRANSMIT_BYTE_CNT_LOW);
835 		bytescount = playback_dma_count.bytescount;
836 	} else {
837 		capture_dma_count.bcount.high = acp_reg_read(acp_mmio,
838 					mmACP_I2S_RECEIVED_BYTE_CNT_HIGH);
839 		capture_dma_count.bcount.low  = acp_reg_read(acp_mmio,
840 					mmACP_I2S_RECEIVED_BYTE_CNT_LOW);
841 		bytescount = capture_dma_count.bytescount;
842 	}
843 	return bytescount;
844 }
845 
846 static snd_pcm_uframes_t acp_dma_pointer(struct snd_pcm_substream *substream)
847 {
848 	u32 buffersize;
849 	u32 pos = 0;
850 	u64 bytescount = 0;
851 
852 	struct snd_pcm_runtime *runtime = substream->runtime;
853 	struct audio_substream_data *rtd = runtime->private_data;
854 
855 	if (!rtd)
856 		return -EINVAL;
857 
858 	buffersize = frames_to_bytes(runtime, runtime->buffer_size);
859 	bytescount = acp_get_byte_count(rtd->acp_mmio, substream->stream);
860 
861 	if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
862 		if (bytescount > rtd->renderbytescount)
863 			bytescount = bytescount - rtd->renderbytescount;
864 	} else {
865 		if (bytescount > rtd->capturebytescount)
866 			bytescount = bytescount - rtd->capturebytescount;
867 	}
868 	pos = do_div(bytescount, buffersize);
869 	return bytes_to_frames(runtime, pos);
870 }
871 
872 static int acp_dma_mmap(struct snd_pcm_substream *substream,
873 			struct vm_area_struct *vma)
874 {
875 	return snd_pcm_lib_default_mmap(substream, vma);
876 }
877 
878 static int acp_dma_prepare(struct snd_pcm_substream *substream)
879 {
880 	struct snd_pcm_runtime *runtime = substream->runtime;
881 	struct audio_substream_data *rtd = runtime->private_data;
882 
883 	if (!rtd)
884 		return -EINVAL;
885 	if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
886 		config_acp_dma_channel(rtd->acp_mmio, SYSRAM_TO_ACP_CH_NUM,
887 					PLAYBACK_START_DMA_DESCR_CH12,
888 					NUM_DSCRS_PER_CHANNEL, 0);
889 		config_acp_dma_channel(rtd->acp_mmio, ACP_TO_I2S_DMA_CH_NUM,
890 					PLAYBACK_START_DMA_DESCR_CH13,
891 					NUM_DSCRS_PER_CHANNEL, 0);
892 	} else {
893 		config_acp_dma_channel(rtd->acp_mmio, ACP_TO_SYSRAM_CH_NUM,
894 					CAPTURE_START_DMA_DESCR_CH14,
895 					NUM_DSCRS_PER_CHANNEL, 0);
896 		config_acp_dma_channel(rtd->acp_mmio, I2S_TO_ACP_DMA_CH_NUM,
897 					CAPTURE_START_DMA_DESCR_CH15,
898 					NUM_DSCRS_PER_CHANNEL, 0);
899 	}
900 	return 0;
901 }
902 
903 static int acp_dma_trigger(struct snd_pcm_substream *substream, int cmd)
904 {
905 	int ret;
906 	u32 loops = 4000;
907 	u64 bytescount = 0;
908 
909 	struct snd_pcm_runtime *runtime = substream->runtime;
910 	struct snd_soc_pcm_runtime *prtd = substream->private_data;
911 	struct audio_substream_data *rtd = runtime->private_data;
912 
913 	if (!rtd)
914 		return -EINVAL;
915 	switch (cmd) {
916 	case SNDRV_PCM_TRIGGER_START:
917 	case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
918 	case SNDRV_PCM_TRIGGER_RESUME:
919 		bytescount = acp_get_byte_count(rtd->acp_mmio,
920 						substream->stream);
921 		if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
922 			if (rtd->renderbytescount == 0)
923 				rtd->renderbytescount = bytescount;
924 			acp_dma_start(rtd->acp_mmio,
925 						SYSRAM_TO_ACP_CH_NUM, false);
926 			while (acp_reg_read(rtd->acp_mmio, mmACP_DMA_CH_STS) &
927 						BIT(SYSRAM_TO_ACP_CH_NUM)) {
928 				if (!loops--) {
929 					dev_err(prtd->platform->dev,
930 						"acp dma start timeout\n");
931 					return -ETIMEDOUT;
932 				}
933 				cpu_relax();
934 			}
935 
936 			acp_dma_start(rtd->acp_mmio,
937 					ACP_TO_I2S_DMA_CH_NUM, true);
938 
939 		} else {
940 			if (rtd->capturebytescount == 0)
941 				rtd->capturebytescount = bytescount;
942 			acp_dma_start(rtd->acp_mmio,
943 					    I2S_TO_ACP_DMA_CH_NUM, true);
944 		}
945 		ret = 0;
946 		break;
947 	case SNDRV_PCM_TRIGGER_STOP:
948 	case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
949 	case SNDRV_PCM_TRIGGER_SUSPEND:
950 		/* Need to stop only circular DMA channels :
951 		 * ACP_TO_I2S_DMA_CH_NUM / I2S_TO_ACP_DMA_CH_NUM. Non-circular
952 		 * channels will stopped automatically after its transfer
953 		 * completes : SYSRAM_TO_ACP_CH_NUM / ACP_TO_SYSRAM_CH_NUM
954 		 */
955 		if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
956 			ret = acp_dma_stop(rtd->acp_mmio,
957 					ACP_TO_I2S_DMA_CH_NUM);
958 			rtd->renderbytescount = 0;
959 		} else {
960 			ret = acp_dma_stop(rtd->acp_mmio,
961 					I2S_TO_ACP_DMA_CH_NUM);
962 			rtd->capturebytescount = 0;
963 		}
964 		break;
965 	default:
966 		ret = -EINVAL;
967 
968 	}
969 	return ret;
970 }
971 
972 static int acp_dma_new(struct snd_soc_pcm_runtime *rtd)
973 {
974 	int ret;
975 	struct audio_drv_data *adata = dev_get_drvdata(rtd->platform->dev);
976 
977 	switch (adata->asic_type) {
978 	case CHIP_STONEY:
979 		ret = snd_pcm_lib_preallocate_pages_for_all(rtd->pcm,
980 							SNDRV_DMA_TYPE_DEV,
981 							NULL, ST_MIN_BUFFER,
982 							ST_MAX_BUFFER);
983 		break;
984 	default:
985 		ret = snd_pcm_lib_preallocate_pages_for_all(rtd->pcm,
986 							SNDRV_DMA_TYPE_DEV,
987 							NULL, MIN_BUFFER,
988 							MAX_BUFFER);
989 		break;
990 	}
991 	if (ret < 0)
992 		dev_err(rtd->platform->dev,
993 				"buffer preallocation failer error:%d\n", ret);
994 	return ret;
995 }
996 
997 static int acp_dma_close(struct snd_pcm_substream *substream)
998 {
999 	u16 bank;
1000 	struct snd_pcm_runtime *runtime = substream->runtime;
1001 	struct audio_substream_data *rtd = runtime->private_data;
1002 	struct snd_soc_pcm_runtime *prtd = substream->private_data;
1003 	struct audio_drv_data *adata = dev_get_drvdata(prtd->platform->dev);
1004 
1005 	kfree(rtd);
1006 
1007 	if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
1008 		adata->play_stream = NULL;
1009 		/* For Stoney, Memory gating is disabled,i.e SRAM Banks
1010 		 * won't be turned off. The default state for SRAM banks is ON.
1011 		 * Setting SRAM bank state code skipped for STONEY platform.
1012 		 * added condition checks for Carrizo platform only
1013 		 */
1014 		if (adata->asic_type != CHIP_STONEY) {
1015 			for (bank = 1; bank <= 4; bank++)
1016 				acp_set_sram_bank_state(adata->acp_mmio, bank,
1017 				false);
1018 		}
1019 	} else  {
1020 		adata->capture_stream = NULL;
1021 		if (adata->asic_type != CHIP_STONEY) {
1022 			for (bank = 5; bank <= 8; bank++)
1023 				acp_set_sram_bank_state(adata->acp_mmio, bank,
1024 						     false);
1025 		}
1026 	}
1027 
1028 	/* Disable ACP irq, when the current stream is being closed and
1029 	 * another stream is also not active.
1030 	*/
1031 	if (!adata->play_stream && !adata->capture_stream)
1032 		acp_reg_write(0, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB);
1033 
1034 	return 0;
1035 }
1036 
1037 static const struct snd_pcm_ops acp_dma_ops = {
1038 	.open = acp_dma_open,
1039 	.close = acp_dma_close,
1040 	.ioctl = snd_pcm_lib_ioctl,
1041 	.hw_params = acp_dma_hw_params,
1042 	.hw_free = acp_dma_hw_free,
1043 	.trigger = acp_dma_trigger,
1044 	.pointer = acp_dma_pointer,
1045 	.mmap = acp_dma_mmap,
1046 	.prepare = acp_dma_prepare,
1047 };
1048 
1049 static struct snd_soc_platform_driver acp_asoc_platform = {
1050 	.ops = &acp_dma_ops,
1051 	.pcm_new = acp_dma_new,
1052 };
1053 
1054 static int acp_audio_probe(struct platform_device *pdev)
1055 {
1056 	int status;
1057 	struct audio_drv_data *audio_drv_data;
1058 	struct resource *res;
1059 	const u32 *pdata = pdev->dev.platform_data;
1060 
1061 	if (!pdata) {
1062 		dev_err(&pdev->dev, "Missing platform data\n");
1063 		return -ENODEV;
1064 	}
1065 
1066 	audio_drv_data = devm_kzalloc(&pdev->dev, sizeof(struct audio_drv_data),
1067 					GFP_KERNEL);
1068 	if (audio_drv_data == NULL)
1069 		return -ENOMEM;
1070 
1071 	res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1072 	audio_drv_data->acp_mmio = devm_ioremap_resource(&pdev->dev, res);
1073 	if (IS_ERR(audio_drv_data->acp_mmio))
1074 		return PTR_ERR(audio_drv_data->acp_mmio);
1075 
1076 	/* The following members gets populated in device 'open'
1077 	 * function. Till then interrupts are disabled in 'acp_init'
1078 	 * and device doesn't generate any interrupts.
1079 	 */
1080 
1081 	audio_drv_data->play_stream = NULL;
1082 	audio_drv_data->capture_stream = NULL;
1083 	audio_drv_data->asic_type =  *pdata;
1084 
1085 	res = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1086 	if (!res) {
1087 		dev_err(&pdev->dev, "IORESOURCE_IRQ FAILED\n");
1088 		return -ENODEV;
1089 	}
1090 
1091 	status = devm_request_irq(&pdev->dev, res->start, dma_irq_handler,
1092 					0, "ACP_IRQ", &pdev->dev);
1093 	if (status) {
1094 		dev_err(&pdev->dev, "ACP IRQ request failed\n");
1095 		return status;
1096 	}
1097 
1098 	dev_set_drvdata(&pdev->dev, audio_drv_data);
1099 
1100 	/* Initialize the ACP */
1101 	status = acp_init(audio_drv_data->acp_mmio, audio_drv_data->asic_type);
1102 	if (status) {
1103 		dev_err(&pdev->dev, "ACP Init failed status:%d\n", status);
1104 		return status;
1105 	}
1106 
1107 	status = snd_soc_register_platform(&pdev->dev, &acp_asoc_platform);
1108 	if (status != 0) {
1109 		dev_err(&pdev->dev, "Fail to register ALSA platform device\n");
1110 		return status;
1111 	}
1112 
1113 	pm_runtime_set_autosuspend_delay(&pdev->dev, 10000);
1114 	pm_runtime_use_autosuspend(&pdev->dev);
1115 	pm_runtime_enable(&pdev->dev);
1116 
1117 	return status;
1118 }
1119 
1120 static int acp_audio_remove(struct platform_device *pdev)
1121 {
1122 	int status;
1123 	struct audio_drv_data *adata = dev_get_drvdata(&pdev->dev);
1124 
1125 	status = acp_deinit(adata->acp_mmio);
1126 	if (status)
1127 		dev_err(&pdev->dev, "ACP Deinit failed status:%d\n", status);
1128 	snd_soc_unregister_platform(&pdev->dev);
1129 	pm_runtime_disable(&pdev->dev);
1130 
1131 	return 0;
1132 }
1133 
1134 static int acp_pcm_resume(struct device *dev)
1135 {
1136 	u16 bank;
1137 	int status;
1138 	struct audio_drv_data *adata = dev_get_drvdata(dev);
1139 
1140 	status = acp_init(adata->acp_mmio, adata->asic_type);
1141 	if (status) {
1142 		dev_err(dev, "ACP Init failed status:%d\n", status);
1143 		return status;
1144 	}
1145 
1146 	if (adata->play_stream && adata->play_stream->runtime) {
1147 		/* For Stoney, Memory gating is disabled,i.e SRAM Banks
1148 		 * won't be turned off. The default state for SRAM banks is ON.
1149 		 * Setting SRAM bank state code skipped for STONEY platform.
1150 		 */
1151 		if (adata->asic_type != CHIP_STONEY) {
1152 			for (bank = 1; bank <= 4; bank++)
1153 				acp_set_sram_bank_state(adata->acp_mmio, bank,
1154 						true);
1155 		}
1156 		config_acp_dma(adata->acp_mmio,
1157 			adata->play_stream->runtime->private_data,
1158 			adata->asic_type);
1159 	}
1160 	if (adata->capture_stream && adata->capture_stream->runtime) {
1161 		if (adata->asic_type != CHIP_STONEY) {
1162 			for (bank = 5; bank <= 8; bank++)
1163 				acp_set_sram_bank_state(adata->acp_mmio, bank,
1164 						true);
1165 		}
1166 		config_acp_dma(adata->acp_mmio,
1167 			adata->capture_stream->runtime->private_data,
1168 			adata->asic_type);
1169 	}
1170 	acp_reg_write(1, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB);
1171 	return 0;
1172 }
1173 
1174 static int acp_pcm_runtime_suspend(struct device *dev)
1175 {
1176 	int status;
1177 	struct audio_drv_data *adata = dev_get_drvdata(dev);
1178 
1179 	status = acp_deinit(adata->acp_mmio);
1180 	if (status)
1181 		dev_err(dev, "ACP Deinit failed status:%d\n", status);
1182 	acp_reg_write(0, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB);
1183 	return 0;
1184 }
1185 
1186 static int acp_pcm_runtime_resume(struct device *dev)
1187 {
1188 	int status;
1189 	struct audio_drv_data *adata = dev_get_drvdata(dev);
1190 
1191 	status = acp_init(adata->acp_mmio, adata->asic_type);
1192 	if (status) {
1193 		dev_err(dev, "ACP Init failed status:%d\n", status);
1194 		return status;
1195 	}
1196 	acp_reg_write(1, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB);
1197 	return 0;
1198 }
1199 
1200 static const struct dev_pm_ops acp_pm_ops = {
1201 	.resume = acp_pcm_resume,
1202 	.runtime_suspend = acp_pcm_runtime_suspend,
1203 	.runtime_resume = acp_pcm_runtime_resume,
1204 };
1205 
1206 static struct platform_driver acp_dma_driver = {
1207 	.probe = acp_audio_probe,
1208 	.remove = acp_audio_remove,
1209 	.driver = {
1210 		.name = DRV_NAME,
1211 		.pm = &acp_pm_ops,
1212 	},
1213 };
1214 
1215 module_platform_driver(acp_dma_driver);
1216 
1217 MODULE_AUTHOR("Vijendar.Mukunda@amd.com");
1218 MODULE_AUTHOR("Maruthi.Bayyavarapu@amd.com");
1219 MODULE_DESCRIPTION("AMD ACP PCM Driver");
1220 MODULE_LICENSE("GPL v2");
1221 MODULE_ALIAS("platform:"DRV_NAME);
1222