1 /* 2 * AMD ALSA SoC PCM Driver for ACP 2.x 3 * 4 * Copyright 2014-2015 Advanced Micro Devices, Inc. 5 * 6 * This program is free software; you can redistribute it and/or modify it 7 * under the terms and conditions of the GNU General Public License, 8 * version 2, as published by the Free Software Foundation. 9 * 10 * This program is distributed in the hope it will be useful, but WITHOUT 11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for 13 * more details. 14 */ 15 16 #include <linux/module.h> 17 #include <linux/delay.h> 18 #include <linux/io.h> 19 #include <linux/sizes.h> 20 #include <linux/pm_runtime.h> 21 22 #include <sound/soc.h> 23 #include <drm/amd_asic_type.h> 24 #include "acp.h" 25 26 #define PLAYBACK_MIN_NUM_PERIODS 2 27 #define PLAYBACK_MAX_NUM_PERIODS 2 28 #define PLAYBACK_MAX_PERIOD_SIZE 16384 29 #define PLAYBACK_MIN_PERIOD_SIZE 1024 30 #define CAPTURE_MIN_NUM_PERIODS 2 31 #define CAPTURE_MAX_NUM_PERIODS 2 32 #define CAPTURE_MAX_PERIOD_SIZE 16384 33 #define CAPTURE_MIN_PERIOD_SIZE 1024 34 35 #define MAX_BUFFER (PLAYBACK_MAX_PERIOD_SIZE * PLAYBACK_MAX_NUM_PERIODS) 36 #define MIN_BUFFER MAX_BUFFER 37 38 #define ST_PLAYBACK_MAX_PERIOD_SIZE 8192 39 #define ST_CAPTURE_MAX_PERIOD_SIZE ST_PLAYBACK_MAX_PERIOD_SIZE 40 #define ST_MAX_BUFFER (ST_PLAYBACK_MAX_PERIOD_SIZE * PLAYBACK_MAX_NUM_PERIODS) 41 #define ST_MIN_BUFFER ST_MAX_BUFFER 42 43 #define DRV_NAME "acp_audio_dma" 44 45 static const struct snd_pcm_hardware acp_pcm_hardware_playback = { 46 .info = SNDRV_PCM_INFO_INTERLEAVED | 47 SNDRV_PCM_INFO_BLOCK_TRANSFER | SNDRV_PCM_INFO_MMAP | 48 SNDRV_PCM_INFO_MMAP_VALID | SNDRV_PCM_INFO_BATCH | 49 SNDRV_PCM_INFO_PAUSE | SNDRV_PCM_INFO_RESUME, 50 .formats = SNDRV_PCM_FMTBIT_S16_LE | 51 SNDRV_PCM_FMTBIT_S24_LE | SNDRV_PCM_FMTBIT_S32_LE, 52 .channels_min = 1, 53 .channels_max = 8, 54 .rates = SNDRV_PCM_RATE_8000_96000, 55 .rate_min = 8000, 56 .rate_max = 96000, 57 .buffer_bytes_max = PLAYBACK_MAX_NUM_PERIODS * PLAYBACK_MAX_PERIOD_SIZE, 58 .period_bytes_min = PLAYBACK_MIN_PERIOD_SIZE, 59 .period_bytes_max = PLAYBACK_MAX_PERIOD_SIZE, 60 .periods_min = PLAYBACK_MIN_NUM_PERIODS, 61 .periods_max = PLAYBACK_MAX_NUM_PERIODS, 62 }; 63 64 static const struct snd_pcm_hardware acp_pcm_hardware_capture = { 65 .info = SNDRV_PCM_INFO_INTERLEAVED | 66 SNDRV_PCM_INFO_BLOCK_TRANSFER | SNDRV_PCM_INFO_MMAP | 67 SNDRV_PCM_INFO_MMAP_VALID | SNDRV_PCM_INFO_BATCH | 68 SNDRV_PCM_INFO_PAUSE | SNDRV_PCM_INFO_RESUME, 69 .formats = SNDRV_PCM_FMTBIT_S16_LE | 70 SNDRV_PCM_FMTBIT_S24_LE | SNDRV_PCM_FMTBIT_S32_LE, 71 .channels_min = 1, 72 .channels_max = 2, 73 .rates = SNDRV_PCM_RATE_8000_48000, 74 .rate_min = 8000, 75 .rate_max = 48000, 76 .buffer_bytes_max = CAPTURE_MAX_NUM_PERIODS * CAPTURE_MAX_PERIOD_SIZE, 77 .period_bytes_min = CAPTURE_MIN_PERIOD_SIZE, 78 .period_bytes_max = CAPTURE_MAX_PERIOD_SIZE, 79 .periods_min = CAPTURE_MIN_NUM_PERIODS, 80 .periods_max = CAPTURE_MAX_NUM_PERIODS, 81 }; 82 83 static const struct snd_pcm_hardware acp_st_pcm_hardware_playback = { 84 .info = SNDRV_PCM_INFO_INTERLEAVED | 85 SNDRV_PCM_INFO_BLOCK_TRANSFER | SNDRV_PCM_INFO_MMAP | 86 SNDRV_PCM_INFO_MMAP_VALID | SNDRV_PCM_INFO_BATCH | 87 SNDRV_PCM_INFO_PAUSE | SNDRV_PCM_INFO_RESUME, 88 .formats = SNDRV_PCM_FMTBIT_S16_LE | 89 SNDRV_PCM_FMTBIT_S24_LE | SNDRV_PCM_FMTBIT_S32_LE, 90 .channels_min = 1, 91 .channels_max = 8, 92 .rates = SNDRV_PCM_RATE_8000_96000, 93 .rate_min = 8000, 94 .rate_max = 96000, 95 .buffer_bytes_max = ST_MAX_BUFFER, 96 .period_bytes_min = PLAYBACK_MIN_PERIOD_SIZE, 97 .period_bytes_max = ST_PLAYBACK_MAX_PERIOD_SIZE, 98 .periods_min = PLAYBACK_MIN_NUM_PERIODS, 99 .periods_max = PLAYBACK_MAX_NUM_PERIODS, 100 }; 101 102 static const struct snd_pcm_hardware acp_st_pcm_hardware_capture = { 103 .info = SNDRV_PCM_INFO_INTERLEAVED | 104 SNDRV_PCM_INFO_BLOCK_TRANSFER | SNDRV_PCM_INFO_MMAP | 105 SNDRV_PCM_INFO_MMAP_VALID | SNDRV_PCM_INFO_BATCH | 106 SNDRV_PCM_INFO_PAUSE | SNDRV_PCM_INFO_RESUME, 107 .formats = SNDRV_PCM_FMTBIT_S16_LE | 108 SNDRV_PCM_FMTBIT_S24_LE | SNDRV_PCM_FMTBIT_S32_LE, 109 .channels_min = 1, 110 .channels_max = 2, 111 .rates = SNDRV_PCM_RATE_8000_48000, 112 .rate_min = 8000, 113 .rate_max = 48000, 114 .buffer_bytes_max = ST_MAX_BUFFER, 115 .period_bytes_min = CAPTURE_MIN_PERIOD_SIZE, 116 .period_bytes_max = ST_CAPTURE_MAX_PERIOD_SIZE, 117 .periods_min = CAPTURE_MIN_NUM_PERIODS, 118 .periods_max = CAPTURE_MAX_NUM_PERIODS, 119 }; 120 121 static u32 acp_reg_read(void __iomem *acp_mmio, u32 reg) 122 { 123 return readl(acp_mmio + (reg * 4)); 124 } 125 126 static void acp_reg_write(u32 val, void __iomem *acp_mmio, u32 reg) 127 { 128 writel(val, acp_mmio + (reg * 4)); 129 } 130 131 /* Configure a given dma channel parameters - enable/disable, 132 * number of descriptors, priority 133 */ 134 static void config_acp_dma_channel(void __iomem *acp_mmio, u8 ch_num, 135 u16 dscr_strt_idx, u16 num_dscrs, 136 enum acp_dma_priority_level priority_level) 137 { 138 u32 dma_ctrl; 139 140 /* disable the channel run field */ 141 dma_ctrl = acp_reg_read(acp_mmio, mmACP_DMA_CNTL_0 + ch_num); 142 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChRun_MASK; 143 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num); 144 145 /* program a DMA channel with first descriptor to be processed. */ 146 acp_reg_write((ACP_DMA_DSCR_STRT_IDX_0__DMAChDscrStrtIdx_MASK 147 & dscr_strt_idx), 148 acp_mmio, mmACP_DMA_DSCR_STRT_IDX_0 + ch_num); 149 150 /* program a DMA channel with the number of descriptors to be 151 * processed in the transfer 152 */ 153 acp_reg_write(ACP_DMA_DSCR_CNT_0__DMAChDscrCnt_MASK & num_dscrs, 154 acp_mmio, mmACP_DMA_DSCR_CNT_0 + ch_num); 155 156 /* set DMA channel priority */ 157 acp_reg_write(priority_level, acp_mmio, mmACP_DMA_PRIO_0 + ch_num); 158 } 159 160 /* Initialize a dma descriptor in SRAM based on descritor information passed */ 161 static void config_dma_descriptor_in_sram(void __iomem *acp_mmio, 162 u16 descr_idx, 163 acp_dma_dscr_transfer_t *descr_info) 164 { 165 u32 sram_offset; 166 167 sram_offset = (descr_idx * sizeof(acp_dma_dscr_transfer_t)); 168 169 /* program the source base address. */ 170 acp_reg_write(sram_offset, acp_mmio, mmACP_SRBM_Targ_Idx_Addr); 171 acp_reg_write(descr_info->src, acp_mmio, mmACP_SRBM_Targ_Idx_Data); 172 /* program the destination base address. */ 173 acp_reg_write(sram_offset + 4, acp_mmio, mmACP_SRBM_Targ_Idx_Addr); 174 acp_reg_write(descr_info->dest, acp_mmio, mmACP_SRBM_Targ_Idx_Data); 175 176 /* program the number of bytes to be transferred for this descriptor. */ 177 acp_reg_write(sram_offset + 8, acp_mmio, mmACP_SRBM_Targ_Idx_Addr); 178 acp_reg_write(descr_info->xfer_val, acp_mmio, mmACP_SRBM_Targ_Idx_Data); 179 } 180 181 /* Initialize the DMA descriptor information for transfer between 182 * system memory <-> ACP SRAM 183 */ 184 static void set_acp_sysmem_dma_descriptors(void __iomem *acp_mmio, 185 u32 size, int direction, 186 u32 pte_offset, u32 asic_type) 187 { 188 u16 i; 189 u16 dma_dscr_idx = PLAYBACK_START_DMA_DESCR_CH12; 190 acp_dma_dscr_transfer_t dmadscr[NUM_DSCRS_PER_CHANNEL]; 191 192 for (i = 0; i < NUM_DSCRS_PER_CHANNEL; i++) { 193 dmadscr[i].xfer_val = 0; 194 if (direction == SNDRV_PCM_STREAM_PLAYBACK) { 195 dma_dscr_idx = PLAYBACK_START_DMA_DESCR_CH12 + i; 196 dmadscr[i].dest = ACP_SHARED_RAM_BANK_1_ADDRESS 197 + (i * (size/2)); 198 dmadscr[i].src = ACP_INTERNAL_APERTURE_WINDOW_0_ADDRESS 199 + (pte_offset * SZ_4K) + (i * (size/2)); 200 switch (asic_type) { 201 case CHIP_STONEY: 202 dmadscr[i].xfer_val |= 203 (ACP_DMA_ATTRIBUTES_DAGB_GARLIC_TO_SHAREDMEM << 16) | 204 (size / 2); 205 break; 206 default: 207 dmadscr[i].xfer_val |= 208 (ACP_DMA_ATTRIBUTES_DAGB_ONION_TO_SHAREDMEM << 16) | 209 (size / 2); 210 } 211 } else { 212 dma_dscr_idx = CAPTURE_START_DMA_DESCR_CH14 + i; 213 switch (asic_type) { 214 case CHIP_STONEY: 215 dmadscr[i].src = ACP_SHARED_RAM_BANK_3_ADDRESS + 216 (i * (size/2)); 217 dmadscr[i].dest = 218 ACP_INTERNAL_APERTURE_WINDOW_0_ADDRESS + 219 (pte_offset * SZ_4K) + (i * (size/2)); 220 dmadscr[i].xfer_val |= 221 BIT(22) | 222 (ACP_DMA_ATTRIBUTES_SHARED_MEM_TO_DAGB_GARLIC << 16) | 223 (size / 2); 224 break; 225 default: 226 dmadscr[i].src = ACP_SHARED_RAM_BANK_5_ADDRESS + 227 (i * (size/2)); 228 dmadscr[i].dest = 229 ACP_INTERNAL_APERTURE_WINDOW_0_ADDRESS + 230 (pte_offset * SZ_4K) + (i * (size/2)); 231 dmadscr[i].xfer_val |= 232 BIT(22) | 233 (ACP_DMA_ATTRIBUTES_SHAREDMEM_TO_DAGB_ONION << 16) | 234 (size / 2); 235 } 236 } 237 config_dma_descriptor_in_sram(acp_mmio, dma_dscr_idx, 238 &dmadscr[i]); 239 } 240 if (direction == SNDRV_PCM_STREAM_PLAYBACK) 241 config_acp_dma_channel(acp_mmio, SYSRAM_TO_ACP_CH_NUM, 242 PLAYBACK_START_DMA_DESCR_CH12, 243 NUM_DSCRS_PER_CHANNEL, 244 ACP_DMA_PRIORITY_LEVEL_NORMAL); 245 else 246 config_acp_dma_channel(acp_mmio, ACP_TO_SYSRAM_CH_NUM, 247 CAPTURE_START_DMA_DESCR_CH14, 248 NUM_DSCRS_PER_CHANNEL, 249 ACP_DMA_PRIORITY_LEVEL_NORMAL); 250 } 251 252 /* Initialize the DMA descriptor information for transfer between 253 * ACP SRAM <-> I2S 254 */ 255 static void set_acp_to_i2s_dma_descriptors(void __iomem *acp_mmio, 256 u32 size, int direction, 257 u32 asic_type) 258 { 259 260 u16 i; 261 u16 dma_dscr_idx = PLAYBACK_START_DMA_DESCR_CH13; 262 acp_dma_dscr_transfer_t dmadscr[NUM_DSCRS_PER_CHANNEL]; 263 264 for (i = 0; i < NUM_DSCRS_PER_CHANNEL; i++) { 265 dmadscr[i].xfer_val = 0; 266 if (direction == SNDRV_PCM_STREAM_PLAYBACK) { 267 dma_dscr_idx = PLAYBACK_START_DMA_DESCR_CH13 + i; 268 dmadscr[i].src = ACP_SHARED_RAM_BANK_1_ADDRESS + 269 (i * (size/2)); 270 /* dmadscr[i].dest is unused by hardware. */ 271 dmadscr[i].dest = 0; 272 dmadscr[i].xfer_val |= BIT(22) | (TO_ACP_I2S_1 << 16) | 273 (size / 2); 274 } else { 275 dma_dscr_idx = CAPTURE_START_DMA_DESCR_CH15 + i; 276 /* dmadscr[i].src is unused by hardware. */ 277 dmadscr[i].src = 0; 278 switch (asic_type) { 279 case CHIP_STONEY: 280 dmadscr[i].dest = 281 ACP_SHARED_RAM_BANK_3_ADDRESS + 282 (i * (size / 2)); 283 break; 284 default: 285 dmadscr[i].dest = 286 ACP_SHARED_RAM_BANK_5_ADDRESS + 287 (i * (size / 2)); 288 } 289 dmadscr[i].xfer_val |= BIT(22) | 290 (FROM_ACP_I2S_1 << 16) | (size / 2); 291 } 292 config_dma_descriptor_in_sram(acp_mmio, dma_dscr_idx, 293 &dmadscr[i]); 294 } 295 /* Configure the DMA channel with the above descriptore */ 296 if (direction == SNDRV_PCM_STREAM_PLAYBACK) 297 config_acp_dma_channel(acp_mmio, ACP_TO_I2S_DMA_CH_NUM, 298 PLAYBACK_START_DMA_DESCR_CH13, 299 NUM_DSCRS_PER_CHANNEL, 300 ACP_DMA_PRIORITY_LEVEL_NORMAL); 301 else 302 config_acp_dma_channel(acp_mmio, I2S_TO_ACP_DMA_CH_NUM, 303 CAPTURE_START_DMA_DESCR_CH15, 304 NUM_DSCRS_PER_CHANNEL, 305 ACP_DMA_PRIORITY_LEVEL_NORMAL); 306 } 307 308 /* Create page table entries in ACP SRAM for the allocated memory */ 309 static void acp_pte_config(void __iomem *acp_mmio, struct page *pg, 310 u16 num_of_pages, u32 pte_offset) 311 { 312 u16 page_idx; 313 u64 addr; 314 u32 low; 315 u32 high; 316 u32 offset; 317 318 offset = ACP_DAGB_GRP_SRBM_SRAM_BASE_OFFSET + (pte_offset * 8); 319 for (page_idx = 0; page_idx < (num_of_pages); page_idx++) { 320 /* Load the low address of page int ACP SRAM through SRBM */ 321 acp_reg_write((offset + (page_idx * 8)), 322 acp_mmio, mmACP_SRBM_Targ_Idx_Addr); 323 addr = page_to_phys(pg); 324 325 low = lower_32_bits(addr); 326 high = upper_32_bits(addr); 327 328 acp_reg_write(low, acp_mmio, mmACP_SRBM_Targ_Idx_Data); 329 330 /* Load the High address of page int ACP SRAM through SRBM */ 331 acp_reg_write((offset + (page_idx * 8) + 4), 332 acp_mmio, mmACP_SRBM_Targ_Idx_Addr); 333 334 /* page enable in ACP */ 335 high |= BIT(31); 336 acp_reg_write(high, acp_mmio, mmACP_SRBM_Targ_Idx_Data); 337 338 /* Move to next physically contiguos page */ 339 pg++; 340 } 341 } 342 343 static void config_acp_dma(void __iomem *acp_mmio, 344 struct audio_substream_data *audio_config, 345 u32 asic_type) 346 { 347 u32 pte_offset; 348 349 if (audio_config->direction == SNDRV_PCM_STREAM_PLAYBACK) 350 pte_offset = ACP_PLAYBACK_PTE_OFFSET; 351 else 352 pte_offset = ACP_CAPTURE_PTE_OFFSET; 353 354 acp_pte_config(acp_mmio, audio_config->pg, audio_config->num_of_pages, 355 pte_offset); 356 357 /* Configure System memory <-> ACP SRAM DMA descriptors */ 358 set_acp_sysmem_dma_descriptors(acp_mmio, audio_config->size, 359 audio_config->direction, pte_offset, asic_type); 360 361 /* Configure ACP SRAM <-> I2S DMA descriptors */ 362 set_acp_to_i2s_dma_descriptors(acp_mmio, audio_config->size, 363 audio_config->direction, asic_type); 364 } 365 366 /* Start a given DMA channel transfer */ 367 static void acp_dma_start(void __iomem *acp_mmio, 368 u16 ch_num, bool is_circular) 369 { 370 u32 dma_ctrl; 371 372 /* read the dma control register and disable the channel run field */ 373 dma_ctrl = acp_reg_read(acp_mmio, mmACP_DMA_CNTL_0 + ch_num); 374 375 /* Invalidating the DAGB cache */ 376 acp_reg_write(1, acp_mmio, mmACP_DAGB_ATU_CTRL); 377 378 /* configure the DMA channel and start the DMA transfer 379 * set dmachrun bit to start the transfer and enable the 380 * interrupt on completion of the dma transfer 381 */ 382 dma_ctrl |= ACP_DMA_CNTL_0__DMAChRun_MASK; 383 384 switch (ch_num) { 385 case ACP_TO_I2S_DMA_CH_NUM: 386 case ACP_TO_SYSRAM_CH_NUM: 387 case I2S_TO_ACP_DMA_CH_NUM: 388 dma_ctrl |= ACP_DMA_CNTL_0__DMAChIOCEn_MASK; 389 break; 390 default: 391 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChIOCEn_MASK; 392 break; 393 } 394 395 /* enable for ACP SRAM to/from I2S DMA channel */ 396 if (is_circular == true) 397 dma_ctrl |= ACP_DMA_CNTL_0__Circular_DMA_En_MASK; 398 else 399 dma_ctrl &= ~ACP_DMA_CNTL_0__Circular_DMA_En_MASK; 400 401 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num); 402 } 403 404 /* Stop a given DMA channel transfer */ 405 static int acp_dma_stop(void __iomem *acp_mmio, u8 ch_num) 406 { 407 u32 dma_ctrl; 408 u32 dma_ch_sts; 409 u32 count = ACP_DMA_RESET_TIME; 410 411 dma_ctrl = acp_reg_read(acp_mmio, mmACP_DMA_CNTL_0 + ch_num); 412 413 /* clear the dma control register fields before writing zero 414 * in reset bit 415 */ 416 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChRun_MASK; 417 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChIOCEn_MASK; 418 419 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num); 420 dma_ch_sts = acp_reg_read(acp_mmio, mmACP_DMA_CH_STS); 421 422 if (dma_ch_sts & BIT(ch_num)) { 423 /* set the reset bit for this channel to stop the dma 424 * transfer 425 */ 426 dma_ctrl |= ACP_DMA_CNTL_0__DMAChRst_MASK; 427 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num); 428 } 429 430 /* check the channel status bit for some time and return the status */ 431 while (true) { 432 dma_ch_sts = acp_reg_read(acp_mmio, mmACP_DMA_CH_STS); 433 if (!(dma_ch_sts & BIT(ch_num))) { 434 /* clear the reset flag after successfully stopping 435 * the dma transfer and break from the loop 436 */ 437 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChRst_MASK; 438 439 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 440 + ch_num); 441 break; 442 } 443 if (--count == 0) { 444 pr_err("Failed to stop ACP DMA channel : %d\n", ch_num); 445 return -ETIMEDOUT; 446 } 447 udelay(100); 448 } 449 return 0; 450 } 451 452 static void acp_set_sram_bank_state(void __iomem *acp_mmio, u16 bank, 453 bool power_on) 454 { 455 u32 val, req_reg, sts_reg, sts_reg_mask; 456 u32 loops = 1000; 457 458 if (bank < 32) { 459 req_reg = mmACP_MEM_SHUT_DOWN_REQ_LO; 460 sts_reg = mmACP_MEM_SHUT_DOWN_STS_LO; 461 sts_reg_mask = 0xFFFFFFFF; 462 463 } else { 464 bank -= 32; 465 req_reg = mmACP_MEM_SHUT_DOWN_REQ_HI; 466 sts_reg = mmACP_MEM_SHUT_DOWN_STS_HI; 467 sts_reg_mask = 0x0000FFFF; 468 } 469 470 val = acp_reg_read(acp_mmio, req_reg); 471 if (val & (1 << bank)) { 472 /* bank is in off state */ 473 if (power_on == true) 474 /* request to on */ 475 val &= ~(1 << bank); 476 else 477 /* request to off */ 478 return; 479 } else { 480 /* bank is in on state */ 481 if (power_on == false) 482 /* request to off */ 483 val |= 1 << bank; 484 else 485 /* request to on */ 486 return; 487 } 488 acp_reg_write(val, acp_mmio, req_reg); 489 490 while (acp_reg_read(acp_mmio, sts_reg) != sts_reg_mask) { 491 if (!loops--) { 492 pr_err("ACP SRAM bank %d state change failed\n", bank); 493 break; 494 } 495 cpu_relax(); 496 } 497 } 498 499 /* Initialize and bring ACP hardware to default state. */ 500 static int acp_init(void __iomem *acp_mmio, u32 asic_type) 501 { 502 u16 bank; 503 u32 val, count, sram_pte_offset; 504 505 /* Assert Soft reset of ACP */ 506 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET); 507 508 val |= ACP_SOFT_RESET__SoftResetAud_MASK; 509 acp_reg_write(val, acp_mmio, mmACP_SOFT_RESET); 510 511 count = ACP_SOFT_RESET_DONE_TIME_OUT_VALUE; 512 while (true) { 513 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET); 514 if (ACP_SOFT_RESET__SoftResetAudDone_MASK == 515 (val & ACP_SOFT_RESET__SoftResetAudDone_MASK)) 516 break; 517 if (--count == 0) { 518 pr_err("Failed to reset ACP\n"); 519 return -ETIMEDOUT; 520 } 521 udelay(100); 522 } 523 524 /* Enable clock to ACP and wait until the clock is enabled */ 525 val = acp_reg_read(acp_mmio, mmACP_CONTROL); 526 val = val | ACP_CONTROL__ClkEn_MASK; 527 acp_reg_write(val, acp_mmio, mmACP_CONTROL); 528 529 count = ACP_CLOCK_EN_TIME_OUT_VALUE; 530 531 while (true) { 532 val = acp_reg_read(acp_mmio, mmACP_STATUS); 533 if (val & (u32) 0x1) 534 break; 535 if (--count == 0) { 536 pr_err("Failed to reset ACP\n"); 537 return -ETIMEDOUT; 538 } 539 udelay(100); 540 } 541 542 /* Deassert the SOFT RESET flags */ 543 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET); 544 val &= ~ACP_SOFT_RESET__SoftResetAud_MASK; 545 acp_reg_write(val, acp_mmio, mmACP_SOFT_RESET); 546 547 /* initiailize Onion control DAGB register */ 548 acp_reg_write(ACP_ONION_CNTL_DEFAULT, acp_mmio, 549 mmACP_AXI2DAGB_ONION_CNTL); 550 551 /* initiailize Garlic control DAGB registers */ 552 acp_reg_write(ACP_GARLIC_CNTL_DEFAULT, acp_mmio, 553 mmACP_AXI2DAGB_GARLIC_CNTL); 554 555 sram_pte_offset = ACP_DAGB_GRP_SRAM_BASE_ADDRESS | 556 ACP_DAGB_BASE_ADDR_GRP_1__AXI2DAGBSnoopSel_MASK | 557 ACP_DAGB_BASE_ADDR_GRP_1__AXI2DAGBTargetMemSel_MASK | 558 ACP_DAGB_BASE_ADDR_GRP_1__AXI2DAGBGrpEnable_MASK; 559 acp_reg_write(sram_pte_offset, acp_mmio, mmACP_DAGB_BASE_ADDR_GRP_1); 560 acp_reg_write(ACP_PAGE_SIZE_4K_ENABLE, acp_mmio, 561 mmACP_DAGB_PAGE_SIZE_GRP_1); 562 563 acp_reg_write(ACP_SRAM_BASE_ADDRESS, acp_mmio, 564 mmACP_DMA_DESC_BASE_ADDR); 565 566 /* Num of descriptiors in SRAM 0x4, means 256 descriptors;(64 * 4) */ 567 acp_reg_write(0x4, acp_mmio, mmACP_DMA_DESC_MAX_NUM_DSCR); 568 acp_reg_write(ACP_EXTERNAL_INTR_CNTL__DMAIOCMask_MASK, 569 acp_mmio, mmACP_EXTERNAL_INTR_CNTL); 570 571 /* When ACP_TILE_P1 is turned on, all SRAM banks get turned on. 572 * Now, turn off all of them. This can't be done in 'poweron' of 573 * ACP pm domain, as this requires ACP to be initialized. 574 * For Stoney, Memory gating is disabled,i.e SRAM Banks 575 * won't be turned off. The default state for SRAM banks is ON. 576 * Setting SRAM bank state code skipped for STONEY platform. 577 */ 578 if (asic_type != CHIP_STONEY) { 579 for (bank = 1; bank < 48; bank++) 580 acp_set_sram_bank_state(acp_mmio, bank, false); 581 } 582 583 /* Stoney supports 16bit resolution */ 584 if (asic_type == CHIP_STONEY) { 585 val = acp_reg_read(acp_mmio, mmACP_I2S_16BIT_RESOLUTION_EN); 586 val |= 0x03; 587 acp_reg_write(val, acp_mmio, mmACP_I2S_16BIT_RESOLUTION_EN); 588 } 589 return 0; 590 } 591 592 /* Deinitialize ACP */ 593 static int acp_deinit(void __iomem *acp_mmio) 594 { 595 u32 val; 596 u32 count; 597 598 /* Assert Soft reset of ACP */ 599 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET); 600 601 val |= ACP_SOFT_RESET__SoftResetAud_MASK; 602 acp_reg_write(val, acp_mmio, mmACP_SOFT_RESET); 603 604 count = ACP_SOFT_RESET_DONE_TIME_OUT_VALUE; 605 while (true) { 606 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET); 607 if (ACP_SOFT_RESET__SoftResetAudDone_MASK == 608 (val & ACP_SOFT_RESET__SoftResetAudDone_MASK)) 609 break; 610 if (--count == 0) { 611 pr_err("Failed to reset ACP\n"); 612 return -ETIMEDOUT; 613 } 614 udelay(100); 615 } 616 /** Disable ACP clock */ 617 val = acp_reg_read(acp_mmio, mmACP_CONTROL); 618 val &= ~ACP_CONTROL__ClkEn_MASK; 619 acp_reg_write(val, acp_mmio, mmACP_CONTROL); 620 621 count = ACP_CLOCK_EN_TIME_OUT_VALUE; 622 623 while (true) { 624 val = acp_reg_read(acp_mmio, mmACP_STATUS); 625 if (!(val & (u32) 0x1)) 626 break; 627 if (--count == 0) { 628 pr_err("Failed to reset ACP\n"); 629 return -ETIMEDOUT; 630 } 631 udelay(100); 632 } 633 return 0; 634 } 635 636 /* ACP DMA irq handler routine for playback, capture usecases */ 637 static irqreturn_t dma_irq_handler(int irq, void *arg) 638 { 639 u16 dscr_idx; 640 u32 intr_flag, ext_intr_status; 641 struct audio_drv_data *irq_data; 642 void __iomem *acp_mmio; 643 struct device *dev = arg; 644 bool valid_irq = false; 645 646 irq_data = dev_get_drvdata(dev); 647 acp_mmio = irq_data->acp_mmio; 648 649 ext_intr_status = acp_reg_read(acp_mmio, mmACP_EXTERNAL_INTR_STAT); 650 intr_flag = (((ext_intr_status & 651 ACP_EXTERNAL_INTR_STAT__DMAIOCStat_MASK) >> 652 ACP_EXTERNAL_INTR_STAT__DMAIOCStat__SHIFT)); 653 654 if ((intr_flag & BIT(ACP_TO_I2S_DMA_CH_NUM)) != 0) { 655 valid_irq = true; 656 if (acp_reg_read(acp_mmio, mmACP_DMA_CUR_DSCR_13) == 657 PLAYBACK_START_DMA_DESCR_CH13) 658 dscr_idx = PLAYBACK_END_DMA_DESCR_CH12; 659 else 660 dscr_idx = PLAYBACK_START_DMA_DESCR_CH12; 661 config_acp_dma_channel(acp_mmio, SYSRAM_TO_ACP_CH_NUM, dscr_idx, 662 1, 0); 663 acp_dma_start(acp_mmio, SYSRAM_TO_ACP_CH_NUM, false); 664 665 snd_pcm_period_elapsed(irq_data->play_stream); 666 667 acp_reg_write((intr_flag & BIT(ACP_TO_I2S_DMA_CH_NUM)) << 16, 668 acp_mmio, mmACP_EXTERNAL_INTR_STAT); 669 } 670 671 if ((intr_flag & BIT(I2S_TO_ACP_DMA_CH_NUM)) != 0) { 672 valid_irq = true; 673 if (acp_reg_read(acp_mmio, mmACP_DMA_CUR_DSCR_15) == 674 CAPTURE_START_DMA_DESCR_CH15) 675 dscr_idx = CAPTURE_END_DMA_DESCR_CH14; 676 else 677 dscr_idx = CAPTURE_START_DMA_DESCR_CH14; 678 config_acp_dma_channel(acp_mmio, ACP_TO_SYSRAM_CH_NUM, dscr_idx, 679 1, 0); 680 acp_dma_start(acp_mmio, ACP_TO_SYSRAM_CH_NUM, false); 681 682 acp_reg_write((intr_flag & BIT(I2S_TO_ACP_DMA_CH_NUM)) << 16, 683 acp_mmio, mmACP_EXTERNAL_INTR_STAT); 684 } 685 686 if ((intr_flag & BIT(ACP_TO_SYSRAM_CH_NUM)) != 0) { 687 valid_irq = true; 688 snd_pcm_period_elapsed(irq_data->capture_stream); 689 acp_reg_write((intr_flag & BIT(ACP_TO_SYSRAM_CH_NUM)) << 16, 690 acp_mmio, mmACP_EXTERNAL_INTR_STAT); 691 } 692 693 if (valid_irq) 694 return IRQ_HANDLED; 695 else 696 return IRQ_NONE; 697 } 698 699 static int acp_dma_open(struct snd_pcm_substream *substream) 700 { 701 u16 bank; 702 int ret = 0; 703 struct snd_pcm_runtime *runtime = substream->runtime; 704 struct snd_soc_pcm_runtime *prtd = substream->private_data; 705 struct audio_drv_data *intr_data = dev_get_drvdata(prtd->platform->dev); 706 707 struct audio_substream_data *adata = 708 kzalloc(sizeof(struct audio_substream_data), GFP_KERNEL); 709 if (adata == NULL) 710 return -ENOMEM; 711 712 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) { 713 switch (intr_data->asic_type) { 714 case CHIP_STONEY: 715 runtime->hw = acp_st_pcm_hardware_playback; 716 break; 717 default: 718 runtime->hw = acp_pcm_hardware_playback; 719 } 720 } else { 721 switch (intr_data->asic_type) { 722 case CHIP_STONEY: 723 runtime->hw = acp_st_pcm_hardware_capture; 724 break; 725 default: 726 runtime->hw = acp_pcm_hardware_capture; 727 } 728 } 729 730 ret = snd_pcm_hw_constraint_integer(runtime, 731 SNDRV_PCM_HW_PARAM_PERIODS); 732 if (ret < 0) { 733 dev_err(prtd->platform->dev, "set integer constraint failed\n"); 734 kfree(adata); 735 return ret; 736 } 737 738 adata->acp_mmio = intr_data->acp_mmio; 739 runtime->private_data = adata; 740 741 /* Enable ACP irq, when neither playback or capture streams are 742 * active by the time when a new stream is being opened. 743 * This enablement is not required for another stream, if current 744 * stream is not closed 745 */ 746 if (!intr_data->play_stream && !intr_data->capture_stream) 747 acp_reg_write(1, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB); 748 749 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) { 750 intr_data->play_stream = substream; 751 /* For Stoney, Memory gating is disabled,i.e SRAM Banks 752 * won't be turned off. The default state for SRAM banks is ON. 753 * Setting SRAM bank state code skipped for STONEY platform. 754 */ 755 if (intr_data->asic_type != CHIP_STONEY) { 756 for (bank = 1; bank <= 4; bank++) 757 acp_set_sram_bank_state(intr_data->acp_mmio, 758 bank, true); 759 } 760 } else { 761 intr_data->capture_stream = substream; 762 if (intr_data->asic_type != CHIP_STONEY) { 763 for (bank = 5; bank <= 8; bank++) 764 acp_set_sram_bank_state(intr_data->acp_mmio, 765 bank, true); 766 } 767 } 768 769 return 0; 770 } 771 772 static int acp_dma_hw_params(struct snd_pcm_substream *substream, 773 struct snd_pcm_hw_params *params) 774 { 775 int status; 776 uint64_t size; 777 struct page *pg; 778 struct snd_pcm_runtime *runtime; 779 struct audio_substream_data *rtd; 780 struct snd_soc_pcm_runtime *prtd = substream->private_data; 781 struct audio_drv_data *adata = dev_get_drvdata(prtd->platform->dev); 782 783 runtime = substream->runtime; 784 rtd = runtime->private_data; 785 786 if (WARN_ON(!rtd)) 787 return -EINVAL; 788 789 size = params_buffer_bytes(params); 790 status = snd_pcm_lib_malloc_pages(substream, size); 791 if (status < 0) 792 return status; 793 794 memset(substream->runtime->dma_area, 0, params_buffer_bytes(params)); 795 pg = virt_to_page(substream->dma_buffer.area); 796 797 if (pg != NULL) { 798 acp_set_sram_bank_state(rtd->acp_mmio, 0, true); 799 /* Save for runtime private data */ 800 rtd->pg = pg; 801 rtd->order = get_order(size); 802 803 /* Fill the page table entries in ACP SRAM */ 804 rtd->pg = pg; 805 rtd->size = size; 806 rtd->num_of_pages = PAGE_ALIGN(size) >> PAGE_SHIFT; 807 rtd->direction = substream->stream; 808 809 config_acp_dma(rtd->acp_mmio, rtd, adata->asic_type); 810 status = 0; 811 } else { 812 status = -ENOMEM; 813 } 814 return status; 815 } 816 817 static int acp_dma_hw_free(struct snd_pcm_substream *substream) 818 { 819 return snd_pcm_lib_free_pages(substream); 820 } 821 822 static u64 acp_get_byte_count(void __iomem *acp_mmio, int stream) 823 { 824 union acp_dma_count playback_dma_count; 825 union acp_dma_count capture_dma_count; 826 u64 bytescount = 0; 827 828 if (stream == SNDRV_PCM_STREAM_PLAYBACK) { 829 playback_dma_count.bcount.high = acp_reg_read(acp_mmio, 830 mmACP_I2S_TRANSMIT_BYTE_CNT_HIGH); 831 playback_dma_count.bcount.low = acp_reg_read(acp_mmio, 832 mmACP_I2S_TRANSMIT_BYTE_CNT_LOW); 833 bytescount = playback_dma_count.bytescount; 834 } else { 835 capture_dma_count.bcount.high = acp_reg_read(acp_mmio, 836 mmACP_I2S_RECEIVED_BYTE_CNT_HIGH); 837 capture_dma_count.bcount.low = acp_reg_read(acp_mmio, 838 mmACP_I2S_RECEIVED_BYTE_CNT_LOW); 839 bytescount = capture_dma_count.bytescount; 840 } 841 return bytescount; 842 } 843 844 static snd_pcm_uframes_t acp_dma_pointer(struct snd_pcm_substream *substream) 845 { 846 u32 buffersize; 847 u32 pos = 0; 848 u64 bytescount = 0; 849 850 struct snd_pcm_runtime *runtime = substream->runtime; 851 struct audio_substream_data *rtd = runtime->private_data; 852 853 buffersize = frames_to_bytes(runtime, runtime->buffer_size); 854 bytescount = acp_get_byte_count(rtd->acp_mmio, substream->stream); 855 856 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) { 857 if (bytescount > rtd->renderbytescount) 858 bytescount = bytescount - rtd->renderbytescount; 859 } else { 860 if (bytescount > rtd->capturebytescount) 861 bytescount = bytescount - rtd->capturebytescount; 862 } 863 pos = do_div(bytescount, buffersize); 864 return bytes_to_frames(runtime, pos); 865 } 866 867 static int acp_dma_mmap(struct snd_pcm_substream *substream, 868 struct vm_area_struct *vma) 869 { 870 return snd_pcm_lib_default_mmap(substream, vma); 871 } 872 873 static int acp_dma_prepare(struct snd_pcm_substream *substream) 874 { 875 struct snd_pcm_runtime *runtime = substream->runtime; 876 struct audio_substream_data *rtd = runtime->private_data; 877 878 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) { 879 config_acp_dma_channel(rtd->acp_mmio, SYSRAM_TO_ACP_CH_NUM, 880 PLAYBACK_START_DMA_DESCR_CH12, 881 NUM_DSCRS_PER_CHANNEL, 0); 882 config_acp_dma_channel(rtd->acp_mmio, ACP_TO_I2S_DMA_CH_NUM, 883 PLAYBACK_START_DMA_DESCR_CH13, 884 NUM_DSCRS_PER_CHANNEL, 0); 885 } else { 886 config_acp_dma_channel(rtd->acp_mmio, ACP_TO_SYSRAM_CH_NUM, 887 CAPTURE_START_DMA_DESCR_CH14, 888 NUM_DSCRS_PER_CHANNEL, 0); 889 config_acp_dma_channel(rtd->acp_mmio, I2S_TO_ACP_DMA_CH_NUM, 890 CAPTURE_START_DMA_DESCR_CH15, 891 NUM_DSCRS_PER_CHANNEL, 0); 892 } 893 return 0; 894 } 895 896 static int acp_dma_trigger(struct snd_pcm_substream *substream, int cmd) 897 { 898 int ret; 899 u32 loops = 4000; 900 u64 bytescount = 0; 901 902 struct snd_pcm_runtime *runtime = substream->runtime; 903 struct snd_soc_pcm_runtime *prtd = substream->private_data; 904 struct audio_substream_data *rtd = runtime->private_data; 905 906 if (!rtd) 907 return -EINVAL; 908 switch (cmd) { 909 case SNDRV_PCM_TRIGGER_START: 910 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE: 911 case SNDRV_PCM_TRIGGER_RESUME: 912 bytescount = acp_get_byte_count(rtd->acp_mmio, 913 substream->stream); 914 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) { 915 if (rtd->renderbytescount == 0) 916 rtd->renderbytescount = bytescount; 917 acp_dma_start(rtd->acp_mmio, 918 SYSRAM_TO_ACP_CH_NUM, false); 919 while (acp_reg_read(rtd->acp_mmio, mmACP_DMA_CH_STS) & 920 BIT(SYSRAM_TO_ACP_CH_NUM)) { 921 if (!loops--) { 922 dev_err(prtd->platform->dev, 923 "acp dma start timeout\n"); 924 return -ETIMEDOUT; 925 } 926 cpu_relax(); 927 } 928 929 acp_dma_start(rtd->acp_mmio, 930 ACP_TO_I2S_DMA_CH_NUM, true); 931 932 } else { 933 if (rtd->capturebytescount == 0) 934 rtd->capturebytescount = bytescount; 935 acp_dma_start(rtd->acp_mmio, 936 I2S_TO_ACP_DMA_CH_NUM, true); 937 } 938 ret = 0; 939 break; 940 case SNDRV_PCM_TRIGGER_STOP: 941 case SNDRV_PCM_TRIGGER_PAUSE_PUSH: 942 case SNDRV_PCM_TRIGGER_SUSPEND: 943 /* Need to stop only circular DMA channels : 944 * ACP_TO_I2S_DMA_CH_NUM / I2S_TO_ACP_DMA_CH_NUM. Non-circular 945 * channels will stopped automatically after its transfer 946 * completes : SYSRAM_TO_ACP_CH_NUM / ACP_TO_SYSRAM_CH_NUM 947 */ 948 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) { 949 ret = acp_dma_stop(rtd->acp_mmio, 950 ACP_TO_I2S_DMA_CH_NUM); 951 rtd->renderbytescount = 0; 952 } else { 953 ret = acp_dma_stop(rtd->acp_mmio, 954 I2S_TO_ACP_DMA_CH_NUM); 955 rtd->capturebytescount = 0; 956 } 957 break; 958 default: 959 ret = -EINVAL; 960 961 } 962 return ret; 963 } 964 965 static int acp_dma_new(struct snd_soc_pcm_runtime *rtd) 966 { 967 int ret; 968 struct audio_drv_data *adata = dev_get_drvdata(rtd->platform->dev); 969 970 switch (adata->asic_type) { 971 case CHIP_STONEY: 972 ret = snd_pcm_lib_preallocate_pages_for_all(rtd->pcm, 973 SNDRV_DMA_TYPE_DEV, 974 NULL, ST_MIN_BUFFER, 975 ST_MAX_BUFFER); 976 break; 977 default: 978 ret = snd_pcm_lib_preallocate_pages_for_all(rtd->pcm, 979 SNDRV_DMA_TYPE_DEV, 980 NULL, MIN_BUFFER, 981 MAX_BUFFER); 982 break; 983 } 984 if (ret < 0) 985 dev_err(rtd->platform->dev, 986 "buffer preallocation failer error:%d\n", ret); 987 return ret; 988 } 989 990 static int acp_dma_close(struct snd_pcm_substream *substream) 991 { 992 u16 bank; 993 struct snd_pcm_runtime *runtime = substream->runtime; 994 struct audio_substream_data *rtd = runtime->private_data; 995 struct snd_soc_pcm_runtime *prtd = substream->private_data; 996 struct audio_drv_data *adata = dev_get_drvdata(prtd->platform->dev); 997 998 kfree(rtd); 999 1000 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) { 1001 adata->play_stream = NULL; 1002 /* For Stoney, Memory gating is disabled,i.e SRAM Banks 1003 * won't be turned off. The default state for SRAM banks is ON. 1004 * Setting SRAM bank state code skipped for STONEY platform. 1005 * added condition checks for Carrizo platform only 1006 */ 1007 if (adata->asic_type != CHIP_STONEY) { 1008 for (bank = 1; bank <= 4; bank++) 1009 acp_set_sram_bank_state(adata->acp_mmio, bank, 1010 false); 1011 } 1012 } else { 1013 adata->capture_stream = NULL; 1014 if (adata->asic_type != CHIP_STONEY) { 1015 for (bank = 5; bank <= 8; bank++) 1016 acp_set_sram_bank_state(adata->acp_mmio, bank, 1017 false); 1018 } 1019 } 1020 1021 /* Disable ACP irq, when the current stream is being closed and 1022 * another stream is also not active. 1023 */ 1024 if (!adata->play_stream && !adata->capture_stream) 1025 acp_reg_write(0, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB); 1026 1027 return 0; 1028 } 1029 1030 static const struct snd_pcm_ops acp_dma_ops = { 1031 .open = acp_dma_open, 1032 .close = acp_dma_close, 1033 .ioctl = snd_pcm_lib_ioctl, 1034 .hw_params = acp_dma_hw_params, 1035 .hw_free = acp_dma_hw_free, 1036 .trigger = acp_dma_trigger, 1037 .pointer = acp_dma_pointer, 1038 .mmap = acp_dma_mmap, 1039 .prepare = acp_dma_prepare, 1040 }; 1041 1042 static struct snd_soc_platform_driver acp_asoc_platform = { 1043 .ops = &acp_dma_ops, 1044 .pcm_new = acp_dma_new, 1045 }; 1046 1047 static int acp_audio_probe(struct platform_device *pdev) 1048 { 1049 int status; 1050 struct audio_drv_data *audio_drv_data; 1051 struct resource *res; 1052 const u32 *pdata = pdev->dev.platform_data; 1053 1054 audio_drv_data = devm_kzalloc(&pdev->dev, sizeof(struct audio_drv_data), 1055 GFP_KERNEL); 1056 if (audio_drv_data == NULL) 1057 return -ENOMEM; 1058 1059 res = platform_get_resource(pdev, IORESOURCE_MEM, 0); 1060 audio_drv_data->acp_mmio = devm_ioremap_resource(&pdev->dev, res); 1061 1062 /* The following members gets populated in device 'open' 1063 * function. Till then interrupts are disabled in 'acp_init' 1064 * and device doesn't generate any interrupts. 1065 */ 1066 1067 audio_drv_data->play_stream = NULL; 1068 audio_drv_data->capture_stream = NULL; 1069 audio_drv_data->asic_type = *pdata; 1070 1071 res = platform_get_resource(pdev, IORESOURCE_IRQ, 0); 1072 if (!res) { 1073 dev_err(&pdev->dev, "IORESOURCE_IRQ FAILED\n"); 1074 return -ENODEV; 1075 } 1076 1077 status = devm_request_irq(&pdev->dev, res->start, dma_irq_handler, 1078 0, "ACP_IRQ", &pdev->dev); 1079 if (status) { 1080 dev_err(&pdev->dev, "ACP IRQ request failed\n"); 1081 return status; 1082 } 1083 1084 dev_set_drvdata(&pdev->dev, audio_drv_data); 1085 1086 /* Initialize the ACP */ 1087 acp_init(audio_drv_data->acp_mmio, audio_drv_data->asic_type); 1088 1089 status = snd_soc_register_platform(&pdev->dev, &acp_asoc_platform); 1090 if (status != 0) { 1091 dev_err(&pdev->dev, "Fail to register ALSA platform device\n"); 1092 return status; 1093 } 1094 1095 pm_runtime_set_autosuspend_delay(&pdev->dev, 10000); 1096 pm_runtime_use_autosuspend(&pdev->dev); 1097 pm_runtime_enable(&pdev->dev); 1098 1099 return status; 1100 } 1101 1102 static int acp_audio_remove(struct platform_device *pdev) 1103 { 1104 struct audio_drv_data *adata = dev_get_drvdata(&pdev->dev); 1105 1106 acp_deinit(adata->acp_mmio); 1107 snd_soc_unregister_platform(&pdev->dev); 1108 pm_runtime_disable(&pdev->dev); 1109 1110 return 0; 1111 } 1112 1113 static int acp_pcm_resume(struct device *dev) 1114 { 1115 u16 bank; 1116 struct audio_drv_data *adata = dev_get_drvdata(dev); 1117 1118 acp_init(adata->acp_mmio, adata->asic_type); 1119 1120 if (adata->play_stream && adata->play_stream->runtime) { 1121 /* For Stoney, Memory gating is disabled,i.e SRAM Banks 1122 * won't be turned off. The default state for SRAM banks is ON. 1123 * Setting SRAM bank state code skipped for STONEY platform. 1124 */ 1125 if (adata->asic_type != CHIP_STONEY) { 1126 for (bank = 1; bank <= 4; bank++) 1127 acp_set_sram_bank_state(adata->acp_mmio, bank, 1128 true); 1129 } 1130 config_acp_dma(adata->acp_mmio, 1131 adata->play_stream->runtime->private_data, 1132 adata->asic_type); 1133 } 1134 if (adata->capture_stream && adata->capture_stream->runtime) { 1135 if (adata->asic_type != CHIP_STONEY) { 1136 for (bank = 5; bank <= 8; bank++) 1137 acp_set_sram_bank_state(adata->acp_mmio, bank, 1138 true); 1139 } 1140 config_acp_dma(adata->acp_mmio, 1141 adata->capture_stream->runtime->private_data, 1142 adata->asic_type); 1143 } 1144 acp_reg_write(1, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB); 1145 return 0; 1146 } 1147 1148 static int acp_pcm_runtime_suspend(struct device *dev) 1149 { 1150 struct audio_drv_data *adata = dev_get_drvdata(dev); 1151 1152 acp_deinit(adata->acp_mmio); 1153 acp_reg_write(0, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB); 1154 return 0; 1155 } 1156 1157 static int acp_pcm_runtime_resume(struct device *dev) 1158 { 1159 struct audio_drv_data *adata = dev_get_drvdata(dev); 1160 1161 acp_init(adata->acp_mmio, adata->asic_type); 1162 acp_reg_write(1, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB); 1163 return 0; 1164 } 1165 1166 static const struct dev_pm_ops acp_pm_ops = { 1167 .resume = acp_pcm_resume, 1168 .runtime_suspend = acp_pcm_runtime_suspend, 1169 .runtime_resume = acp_pcm_runtime_resume, 1170 }; 1171 1172 static struct platform_driver acp_dma_driver = { 1173 .probe = acp_audio_probe, 1174 .remove = acp_audio_remove, 1175 .driver = { 1176 .name = DRV_NAME, 1177 .pm = &acp_pm_ops, 1178 }, 1179 }; 1180 1181 module_platform_driver(acp_dma_driver); 1182 1183 MODULE_AUTHOR("Vijendar.Mukunda@amd.com"); 1184 MODULE_AUTHOR("Maruthi.Bayyavarapu@amd.com"); 1185 MODULE_DESCRIPTION("AMD ACP PCM Driver"); 1186 MODULE_LICENSE("GPL v2"); 1187 MODULE_ALIAS("platform:"DRV_NAME); 1188