1 /* 2 * skl-message.c - HDA DSP interface for FW registration, Pipe and Module 3 * configurations 4 * 5 * Copyright (C) 2015 Intel Corp 6 * Author:Rafal Redzimski <rafal.f.redzimski@intel.com> 7 * Jeeja KP <jeeja.kp@intel.com> 8 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 9 * 10 * This program is free software; you can redistribute it and/or modify 11 * it under the terms of the GNU General Public License as version 2, as 12 * published by the Free Software Foundation. 13 * 14 * This program is distributed in the hope that it will be useful, but 15 * WITHOUT ANY WARRANTY; without even the implied warranty of 16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 17 * General Public License for more details. 18 */ 19 20 #include <linux/slab.h> 21 #include <linux/pci.h> 22 #include <sound/core.h> 23 #include <sound/pcm.h> 24 #include "skl-sst-dsp.h" 25 #include "skl-sst-ipc.h" 26 #include "skl.h" 27 #include "../common/sst-dsp.h" 28 #include "../common/sst-dsp-priv.h" 29 #include "skl-topology.h" 30 #include "skl-tplg-interface.h" 31 32 static int skl_alloc_dma_buf(struct device *dev, 33 struct snd_dma_buffer *dmab, size_t size) 34 { 35 struct hdac_ext_bus *ebus = dev_get_drvdata(dev); 36 struct hdac_bus *bus = ebus_to_hbus(ebus); 37 38 if (!bus) 39 return -ENODEV; 40 41 return bus->io_ops->dma_alloc_pages(bus, SNDRV_DMA_TYPE_DEV, size, dmab); 42 } 43 44 static int skl_free_dma_buf(struct device *dev, struct snd_dma_buffer *dmab) 45 { 46 struct hdac_ext_bus *ebus = dev_get_drvdata(dev); 47 struct hdac_bus *bus = ebus_to_hbus(ebus); 48 49 if (!bus) 50 return -ENODEV; 51 52 bus->io_ops->dma_free_pages(bus, dmab); 53 54 return 0; 55 } 56 57 #define NOTIFICATION_PARAM_ID 3 58 #define NOTIFICATION_MASK 0xf 59 60 /* disable notfication for underruns/overruns from firmware module */ 61 void skl_dsp_enable_notification(struct skl_sst *ctx, bool enable) 62 { 63 struct notification_mask mask; 64 struct skl_ipc_large_config_msg msg = {0}; 65 66 mask.notify = NOTIFICATION_MASK; 67 mask.enable = enable; 68 69 msg.large_param_id = NOTIFICATION_PARAM_ID; 70 msg.param_data_size = sizeof(mask); 71 72 skl_ipc_set_large_config(&ctx->ipc, &msg, (u32 *)&mask); 73 } 74 75 static int skl_dsp_setup_spib(struct device *dev, unsigned int size, 76 int stream_tag, int enable) 77 { 78 struct hdac_ext_bus *ebus = dev_get_drvdata(dev); 79 struct hdac_bus *bus = ebus_to_hbus(ebus); 80 struct hdac_stream *stream = snd_hdac_get_stream(bus, 81 SNDRV_PCM_STREAM_PLAYBACK, stream_tag); 82 struct hdac_ext_stream *estream; 83 84 if (!stream) 85 return -EINVAL; 86 87 estream = stream_to_hdac_ext_stream(stream); 88 /* enable/disable SPIB for this hdac stream */ 89 snd_hdac_ext_stream_spbcap_enable(ebus, enable, stream->index); 90 91 /* set the spib value */ 92 snd_hdac_ext_stream_set_spib(ebus, estream, size); 93 94 return 0; 95 } 96 97 static int skl_dsp_prepare(struct device *dev, unsigned int format, 98 unsigned int size, struct snd_dma_buffer *dmab) 99 { 100 struct hdac_ext_bus *ebus = dev_get_drvdata(dev); 101 struct hdac_bus *bus = ebus_to_hbus(ebus); 102 struct hdac_ext_stream *estream; 103 struct hdac_stream *stream; 104 struct snd_pcm_substream substream; 105 int ret; 106 107 if (!bus) 108 return -ENODEV; 109 110 memset(&substream, 0, sizeof(substream)); 111 substream.stream = SNDRV_PCM_STREAM_PLAYBACK; 112 113 estream = snd_hdac_ext_stream_assign(ebus, &substream, 114 HDAC_EXT_STREAM_TYPE_HOST); 115 if (!estream) 116 return -ENODEV; 117 118 stream = hdac_stream(estream); 119 120 /* assign decouple host dma channel */ 121 ret = snd_hdac_dsp_prepare(stream, format, size, dmab); 122 if (ret < 0) 123 return ret; 124 125 skl_dsp_setup_spib(dev, size, stream->stream_tag, true); 126 127 return stream->stream_tag; 128 } 129 130 static int skl_dsp_trigger(struct device *dev, bool start, int stream_tag) 131 { 132 struct hdac_ext_bus *ebus = dev_get_drvdata(dev); 133 struct hdac_stream *stream; 134 struct hdac_bus *bus = ebus_to_hbus(ebus); 135 136 if (!bus) 137 return -ENODEV; 138 139 stream = snd_hdac_get_stream(bus, 140 SNDRV_PCM_STREAM_PLAYBACK, stream_tag); 141 if (!stream) 142 return -EINVAL; 143 144 snd_hdac_dsp_trigger(stream, start); 145 146 return 0; 147 } 148 149 static int skl_dsp_cleanup(struct device *dev, 150 struct snd_dma_buffer *dmab, int stream_tag) 151 { 152 struct hdac_ext_bus *ebus = dev_get_drvdata(dev); 153 struct hdac_stream *stream; 154 struct hdac_ext_stream *estream; 155 struct hdac_bus *bus = ebus_to_hbus(ebus); 156 157 if (!bus) 158 return -ENODEV; 159 160 stream = snd_hdac_get_stream(bus, 161 SNDRV_PCM_STREAM_PLAYBACK, stream_tag); 162 if (!stream) 163 return -EINVAL; 164 165 estream = stream_to_hdac_ext_stream(stream); 166 skl_dsp_setup_spib(dev, 0, stream_tag, false); 167 snd_hdac_ext_stream_release(estream, HDAC_EXT_STREAM_TYPE_HOST); 168 169 snd_hdac_dsp_cleanup(stream, dmab); 170 171 return 0; 172 } 173 174 static struct skl_dsp_loader_ops skl_get_loader_ops(void) 175 { 176 struct skl_dsp_loader_ops loader_ops; 177 178 memset(&loader_ops, 0, sizeof(struct skl_dsp_loader_ops)); 179 180 loader_ops.alloc_dma_buf = skl_alloc_dma_buf; 181 loader_ops.free_dma_buf = skl_free_dma_buf; 182 183 return loader_ops; 184 }; 185 186 static struct skl_dsp_loader_ops bxt_get_loader_ops(void) 187 { 188 struct skl_dsp_loader_ops loader_ops; 189 190 memset(&loader_ops, 0, sizeof(loader_ops)); 191 192 loader_ops.alloc_dma_buf = skl_alloc_dma_buf; 193 loader_ops.free_dma_buf = skl_free_dma_buf; 194 loader_ops.prepare = skl_dsp_prepare; 195 loader_ops.trigger = skl_dsp_trigger; 196 loader_ops.cleanup = skl_dsp_cleanup; 197 198 return loader_ops; 199 }; 200 201 static const struct skl_dsp_ops dsp_ops[] = { 202 { 203 .id = 0x9d70, 204 .loader_ops = skl_get_loader_ops, 205 .init = skl_sst_dsp_init, 206 .init_fw = skl_sst_init_fw, 207 .cleanup = skl_sst_dsp_cleanup 208 }, 209 { 210 .id = 0x9d71, 211 .loader_ops = skl_get_loader_ops, 212 .init = kbl_sst_dsp_init, 213 .init_fw = skl_sst_init_fw, 214 .cleanup = skl_sst_dsp_cleanup 215 }, 216 { 217 .id = 0x5a98, 218 .loader_ops = bxt_get_loader_ops, 219 .init = bxt_sst_dsp_init, 220 .init_fw = bxt_sst_init_fw, 221 .cleanup = bxt_sst_dsp_cleanup 222 }, 223 { 224 .id = 0x3198, 225 .loader_ops = bxt_get_loader_ops, 226 .init = bxt_sst_dsp_init, 227 .init_fw = bxt_sst_init_fw, 228 .cleanup = bxt_sst_dsp_cleanup 229 }, 230 }; 231 232 const struct skl_dsp_ops *skl_get_dsp_ops(int pci_id) 233 { 234 int i; 235 236 for (i = 0; i < ARRAY_SIZE(dsp_ops); i++) { 237 if (dsp_ops[i].id == pci_id) 238 return &dsp_ops[i]; 239 } 240 241 return NULL; 242 } 243 244 int skl_init_dsp(struct skl *skl) 245 { 246 void __iomem *mmio_base; 247 struct hdac_ext_bus *ebus = &skl->ebus; 248 struct hdac_bus *bus = ebus_to_hbus(ebus); 249 struct skl_dsp_loader_ops loader_ops; 250 int irq = bus->irq; 251 const struct skl_dsp_ops *ops; 252 int ret; 253 254 /* enable ppcap interrupt */ 255 snd_hdac_ext_bus_ppcap_enable(&skl->ebus, true); 256 snd_hdac_ext_bus_ppcap_int_enable(&skl->ebus, true); 257 258 /* read the BAR of the ADSP MMIO */ 259 mmio_base = pci_ioremap_bar(skl->pci, 4); 260 if (mmio_base == NULL) { 261 dev_err(bus->dev, "ioremap error\n"); 262 return -ENXIO; 263 } 264 265 ops = skl_get_dsp_ops(skl->pci->device); 266 if (!ops) 267 return -EIO; 268 269 loader_ops = ops->loader_ops(); 270 ret = ops->init(bus->dev, mmio_base, irq, 271 skl->fw_name, loader_ops, 272 &skl->skl_sst); 273 274 if (ret < 0) 275 return ret; 276 277 skl->skl_sst->dsp_ops = ops; 278 dev_dbg(bus->dev, "dsp registration status=%d\n", ret); 279 280 return ret; 281 } 282 283 int skl_free_dsp(struct skl *skl) 284 { 285 struct hdac_ext_bus *ebus = &skl->ebus; 286 struct hdac_bus *bus = ebus_to_hbus(ebus); 287 struct skl_sst *ctx = skl->skl_sst; 288 289 /* disable ppcap interrupt */ 290 snd_hdac_ext_bus_ppcap_int_enable(&skl->ebus, false); 291 292 ctx->dsp_ops->cleanup(bus->dev, ctx); 293 294 if (ctx->dsp->addr.lpe) 295 iounmap(ctx->dsp->addr.lpe); 296 297 return 0; 298 } 299 300 /* 301 * In the case of "suspend_active" i.e, the Audio IP being active 302 * during system suspend, immediately excecute any pending D0i3 work 303 * before suspending. This is needed for the IP to work in low power 304 * mode during system suspend. In the case of normal suspend, cancel 305 * any pending D0i3 work. 306 */ 307 int skl_suspend_late_dsp(struct skl *skl) 308 { 309 struct skl_sst *ctx = skl->skl_sst; 310 struct delayed_work *dwork; 311 312 if (!ctx) 313 return 0; 314 315 dwork = &ctx->d0i3.work; 316 317 if (dwork->work.func) { 318 if (skl->supend_active) 319 flush_delayed_work(dwork); 320 else 321 cancel_delayed_work_sync(dwork); 322 } 323 324 return 0; 325 } 326 327 int skl_suspend_dsp(struct skl *skl) 328 { 329 struct skl_sst *ctx = skl->skl_sst; 330 int ret; 331 332 /* if ppcap is not supported return 0 */ 333 if (!skl->ebus.bus.ppcap) 334 return 0; 335 336 ret = skl_dsp_sleep(ctx->dsp); 337 if (ret < 0) 338 return ret; 339 340 /* disable ppcap interrupt */ 341 snd_hdac_ext_bus_ppcap_int_enable(&skl->ebus, false); 342 snd_hdac_ext_bus_ppcap_enable(&skl->ebus, false); 343 344 return 0; 345 } 346 347 int skl_resume_dsp(struct skl *skl) 348 { 349 struct skl_sst *ctx = skl->skl_sst; 350 int ret; 351 352 /* if ppcap is not supported return 0 */ 353 if (!skl->ebus.bus.ppcap) 354 return 0; 355 356 /* enable ppcap interrupt */ 357 snd_hdac_ext_bus_ppcap_enable(&skl->ebus, true); 358 snd_hdac_ext_bus_ppcap_int_enable(&skl->ebus, true); 359 360 /* check if DSP 1st boot is done */ 361 if (skl->skl_sst->is_first_boot == true) 362 return 0; 363 364 ret = skl_dsp_wake(ctx->dsp); 365 if (ret < 0) 366 return ret; 367 368 skl_dsp_enable_notification(skl->skl_sst, false); 369 return ret; 370 } 371 372 enum skl_bitdepth skl_get_bit_depth(int params) 373 { 374 switch (params) { 375 case 8: 376 return SKL_DEPTH_8BIT; 377 378 case 16: 379 return SKL_DEPTH_16BIT; 380 381 case 24: 382 return SKL_DEPTH_24BIT; 383 384 case 32: 385 return SKL_DEPTH_32BIT; 386 387 default: 388 return SKL_DEPTH_INVALID; 389 390 } 391 } 392 393 /* 394 * Each module in DSP expects a base module configuration, which consists of 395 * PCM format information, which we calculate in driver and resource values 396 * which are read from widget information passed through topology binary 397 * This is send when we create a module with INIT_INSTANCE IPC msg 398 */ 399 static void skl_set_base_module_format(struct skl_sst *ctx, 400 struct skl_module_cfg *mconfig, 401 struct skl_base_cfg *base_cfg) 402 { 403 struct skl_module_fmt *format = &mconfig->in_fmt[0]; 404 405 base_cfg->audio_fmt.number_of_channels = (u8)format->channels; 406 407 base_cfg->audio_fmt.s_freq = format->s_freq; 408 base_cfg->audio_fmt.bit_depth = format->bit_depth; 409 base_cfg->audio_fmt.valid_bit_depth = format->valid_bit_depth; 410 base_cfg->audio_fmt.ch_cfg = format->ch_cfg; 411 412 dev_dbg(ctx->dev, "bit_depth=%x valid_bd=%x ch_config=%x\n", 413 format->bit_depth, format->valid_bit_depth, 414 format->ch_cfg); 415 416 base_cfg->audio_fmt.channel_map = format->ch_map; 417 418 base_cfg->audio_fmt.interleaving = format->interleaving_style; 419 420 base_cfg->cps = mconfig->mcps; 421 base_cfg->ibs = mconfig->ibs; 422 base_cfg->obs = mconfig->obs; 423 base_cfg->is_pages = mconfig->mem_pages; 424 } 425 426 /* 427 * Copies copier capabilities into copier module and updates copier module 428 * config size. 429 */ 430 static void skl_copy_copier_caps(struct skl_module_cfg *mconfig, 431 struct skl_cpr_cfg *cpr_mconfig) 432 { 433 if (mconfig->formats_config.caps_size == 0) 434 return; 435 436 memcpy(cpr_mconfig->gtw_cfg.config_data, 437 mconfig->formats_config.caps, 438 mconfig->formats_config.caps_size); 439 440 cpr_mconfig->gtw_cfg.config_length = 441 (mconfig->formats_config.caps_size) / 4; 442 } 443 444 #define SKL_NON_GATEWAY_CPR_NODE_ID 0xFFFFFFFF 445 /* 446 * Calculate the gatewat settings required for copier module, type of 447 * gateway and index of gateway to use 448 */ 449 static u32 skl_get_node_id(struct skl_sst *ctx, 450 struct skl_module_cfg *mconfig) 451 { 452 union skl_connector_node_id node_id = {0}; 453 union skl_ssp_dma_node ssp_node = {0}; 454 struct skl_pipe_params *params = mconfig->pipe->p_params; 455 456 switch (mconfig->dev_type) { 457 case SKL_DEVICE_BT: 458 node_id.node.dma_type = 459 (SKL_CONN_SOURCE == mconfig->hw_conn_type) ? 460 SKL_DMA_I2S_LINK_OUTPUT_CLASS : 461 SKL_DMA_I2S_LINK_INPUT_CLASS; 462 node_id.node.vindex = params->host_dma_id + 463 (mconfig->vbus_id << 3); 464 break; 465 466 case SKL_DEVICE_I2S: 467 node_id.node.dma_type = 468 (SKL_CONN_SOURCE == mconfig->hw_conn_type) ? 469 SKL_DMA_I2S_LINK_OUTPUT_CLASS : 470 SKL_DMA_I2S_LINK_INPUT_CLASS; 471 ssp_node.dma_node.time_slot_index = mconfig->time_slot; 472 ssp_node.dma_node.i2s_instance = mconfig->vbus_id; 473 node_id.node.vindex = ssp_node.val; 474 break; 475 476 case SKL_DEVICE_DMIC: 477 node_id.node.dma_type = SKL_DMA_DMIC_LINK_INPUT_CLASS; 478 node_id.node.vindex = mconfig->vbus_id + 479 (mconfig->time_slot); 480 break; 481 482 case SKL_DEVICE_HDALINK: 483 node_id.node.dma_type = 484 (SKL_CONN_SOURCE == mconfig->hw_conn_type) ? 485 SKL_DMA_HDA_LINK_OUTPUT_CLASS : 486 SKL_DMA_HDA_LINK_INPUT_CLASS; 487 node_id.node.vindex = params->link_dma_id; 488 break; 489 490 case SKL_DEVICE_HDAHOST: 491 node_id.node.dma_type = 492 (SKL_CONN_SOURCE == mconfig->hw_conn_type) ? 493 SKL_DMA_HDA_HOST_OUTPUT_CLASS : 494 SKL_DMA_HDA_HOST_INPUT_CLASS; 495 node_id.node.vindex = params->host_dma_id; 496 break; 497 498 default: 499 node_id.val = 0xFFFFFFFF; 500 break; 501 } 502 503 return node_id.val; 504 } 505 506 static void skl_setup_cpr_gateway_cfg(struct skl_sst *ctx, 507 struct skl_module_cfg *mconfig, 508 struct skl_cpr_cfg *cpr_mconfig) 509 { 510 u32 dma_io_buf; 511 512 cpr_mconfig->gtw_cfg.node_id = skl_get_node_id(ctx, mconfig); 513 514 if (cpr_mconfig->gtw_cfg.node_id == SKL_NON_GATEWAY_CPR_NODE_ID) { 515 cpr_mconfig->cpr_feature_mask = 0; 516 return; 517 } 518 519 switch (mconfig->hw_conn_type) { 520 case SKL_CONN_SOURCE: 521 if (mconfig->dev_type == SKL_DEVICE_HDAHOST) 522 dma_io_buf = mconfig->ibs; 523 else 524 dma_io_buf = mconfig->obs; 525 break; 526 527 case SKL_CONN_SINK: 528 if (mconfig->dev_type == SKL_DEVICE_HDAHOST) 529 dma_io_buf = mconfig->obs; 530 else 531 dma_io_buf = mconfig->ibs; 532 break; 533 534 default: 535 dev_warn(ctx->dev, "wrong connection type: %d\n", 536 mconfig->hw_conn_type); 537 return; 538 } 539 540 cpr_mconfig->gtw_cfg.dma_buffer_size = 541 mconfig->dma_buffer_size * dma_io_buf; 542 543 cpr_mconfig->cpr_feature_mask = 0; 544 cpr_mconfig->gtw_cfg.config_length = 0; 545 546 skl_copy_copier_caps(mconfig, cpr_mconfig); 547 } 548 549 #define DMA_CONTROL_ID 5 550 551 int skl_dsp_set_dma_control(struct skl_sst *ctx, struct skl_module_cfg *mconfig) 552 { 553 struct skl_dma_control *dma_ctrl; 554 struct skl_ipc_large_config_msg msg = {0}; 555 int err = 0; 556 557 558 /* 559 * if blob size zero, then return 560 */ 561 if (mconfig->formats_config.caps_size == 0) 562 return 0; 563 564 msg.large_param_id = DMA_CONTROL_ID; 565 msg.param_data_size = sizeof(struct skl_dma_control) + 566 mconfig->formats_config.caps_size; 567 568 dma_ctrl = kzalloc(msg.param_data_size, GFP_KERNEL); 569 if (dma_ctrl == NULL) 570 return -ENOMEM; 571 572 dma_ctrl->node_id = skl_get_node_id(ctx, mconfig); 573 574 /* size in dwords */ 575 dma_ctrl->config_length = mconfig->formats_config.caps_size / 4; 576 577 memcpy(dma_ctrl->config_data, mconfig->formats_config.caps, 578 mconfig->formats_config.caps_size); 579 580 err = skl_ipc_set_large_config(&ctx->ipc, &msg, (u32 *)dma_ctrl); 581 582 kfree(dma_ctrl); 583 return err; 584 } 585 586 static void skl_setup_out_format(struct skl_sst *ctx, 587 struct skl_module_cfg *mconfig, 588 struct skl_audio_data_format *out_fmt) 589 { 590 struct skl_module_fmt *format = &mconfig->out_fmt[0]; 591 592 out_fmt->number_of_channels = (u8)format->channels; 593 out_fmt->s_freq = format->s_freq; 594 out_fmt->bit_depth = format->bit_depth; 595 out_fmt->valid_bit_depth = format->valid_bit_depth; 596 out_fmt->ch_cfg = format->ch_cfg; 597 598 out_fmt->channel_map = format->ch_map; 599 out_fmt->interleaving = format->interleaving_style; 600 out_fmt->sample_type = format->sample_type; 601 602 dev_dbg(ctx->dev, "copier out format chan=%d fre=%d bitdepth=%d\n", 603 out_fmt->number_of_channels, format->s_freq, format->bit_depth); 604 } 605 606 /* 607 * DSP needs SRC module for frequency conversion, SRC takes base module 608 * configuration and the target frequency as extra parameter passed as src 609 * config 610 */ 611 static void skl_set_src_format(struct skl_sst *ctx, 612 struct skl_module_cfg *mconfig, 613 struct skl_src_module_cfg *src_mconfig) 614 { 615 struct skl_module_fmt *fmt = &mconfig->out_fmt[0]; 616 617 skl_set_base_module_format(ctx, mconfig, 618 (struct skl_base_cfg *)src_mconfig); 619 620 src_mconfig->src_cfg = fmt->s_freq; 621 } 622 623 /* 624 * DSP needs updown module to do channel conversion. updown module take base 625 * module configuration and channel configuration 626 * It also take coefficients and now we have defaults applied here 627 */ 628 static void skl_set_updown_mixer_format(struct skl_sst *ctx, 629 struct skl_module_cfg *mconfig, 630 struct skl_up_down_mixer_cfg *mixer_mconfig) 631 { 632 struct skl_module_fmt *fmt = &mconfig->out_fmt[0]; 633 int i = 0; 634 635 skl_set_base_module_format(ctx, mconfig, 636 (struct skl_base_cfg *)mixer_mconfig); 637 mixer_mconfig->out_ch_cfg = fmt->ch_cfg; 638 639 /* Select F/W default coefficient */ 640 mixer_mconfig->coeff_sel = 0x0; 641 642 /* User coeff, don't care since we are selecting F/W defaults */ 643 for (i = 0; i < UP_DOWN_MIXER_MAX_COEFF; i++) 644 mixer_mconfig->coeff[i] = 0xDEADBEEF; 645 } 646 647 /* 648 * 'copier' is DSP internal module which copies data from Host DMA (HDA host 649 * dma) or link (hda link, SSP, PDM) 650 * Here we calculate the copier module parameters, like PCM format, output 651 * format, gateway settings 652 * copier_module_config is sent as input buffer with INIT_INSTANCE IPC msg 653 */ 654 static void skl_set_copier_format(struct skl_sst *ctx, 655 struct skl_module_cfg *mconfig, 656 struct skl_cpr_cfg *cpr_mconfig) 657 { 658 struct skl_audio_data_format *out_fmt = &cpr_mconfig->out_fmt; 659 struct skl_base_cfg *base_cfg = (struct skl_base_cfg *)cpr_mconfig; 660 661 skl_set_base_module_format(ctx, mconfig, base_cfg); 662 663 skl_setup_out_format(ctx, mconfig, out_fmt); 664 skl_setup_cpr_gateway_cfg(ctx, mconfig, cpr_mconfig); 665 } 666 667 /* 668 * Algo module are DSP pre processing modules. Algo module take base module 669 * configuration and params 670 */ 671 672 static void skl_set_algo_format(struct skl_sst *ctx, 673 struct skl_module_cfg *mconfig, 674 struct skl_algo_cfg *algo_mcfg) 675 { 676 struct skl_base_cfg *base_cfg = (struct skl_base_cfg *)algo_mcfg; 677 678 skl_set_base_module_format(ctx, mconfig, base_cfg); 679 680 if (mconfig->formats_config.caps_size == 0) 681 return; 682 683 memcpy(algo_mcfg->params, 684 mconfig->formats_config.caps, 685 mconfig->formats_config.caps_size); 686 687 } 688 689 /* 690 * Mic select module allows selecting one or many input channels, thus 691 * acting as a demux. 692 * 693 * Mic select module take base module configuration and out-format 694 * configuration 695 */ 696 static void skl_set_base_outfmt_format(struct skl_sst *ctx, 697 struct skl_module_cfg *mconfig, 698 struct skl_base_outfmt_cfg *base_outfmt_mcfg) 699 { 700 struct skl_audio_data_format *out_fmt = &base_outfmt_mcfg->out_fmt; 701 struct skl_base_cfg *base_cfg = 702 (struct skl_base_cfg *)base_outfmt_mcfg; 703 704 skl_set_base_module_format(ctx, mconfig, base_cfg); 705 skl_setup_out_format(ctx, mconfig, out_fmt); 706 } 707 708 static u16 skl_get_module_param_size(struct skl_sst *ctx, 709 struct skl_module_cfg *mconfig) 710 { 711 u16 param_size; 712 713 switch (mconfig->m_type) { 714 case SKL_MODULE_TYPE_COPIER: 715 param_size = sizeof(struct skl_cpr_cfg); 716 param_size += mconfig->formats_config.caps_size; 717 return param_size; 718 719 case SKL_MODULE_TYPE_SRCINT: 720 return sizeof(struct skl_src_module_cfg); 721 722 case SKL_MODULE_TYPE_UPDWMIX: 723 return sizeof(struct skl_up_down_mixer_cfg); 724 725 case SKL_MODULE_TYPE_ALGO: 726 param_size = sizeof(struct skl_base_cfg); 727 param_size += mconfig->formats_config.caps_size; 728 return param_size; 729 730 case SKL_MODULE_TYPE_BASE_OUTFMT: 731 case SKL_MODULE_TYPE_MIC_SELECT: 732 case SKL_MODULE_TYPE_KPB: 733 return sizeof(struct skl_base_outfmt_cfg); 734 735 default: 736 /* 737 * return only base cfg when no specific module type is 738 * specified 739 */ 740 return sizeof(struct skl_base_cfg); 741 } 742 743 return 0; 744 } 745 746 /* 747 * DSP firmware supports various modules like copier, SRC, updown etc. 748 * These modules required various parameters to be calculated and sent for 749 * the module initialization to DSP. By default a generic module needs only 750 * base module format configuration 751 */ 752 753 static int skl_set_module_format(struct skl_sst *ctx, 754 struct skl_module_cfg *module_config, 755 u16 *module_config_size, 756 void **param_data) 757 { 758 u16 param_size; 759 760 param_size = skl_get_module_param_size(ctx, module_config); 761 762 *param_data = kzalloc(param_size, GFP_KERNEL); 763 if (NULL == *param_data) 764 return -ENOMEM; 765 766 *module_config_size = param_size; 767 768 switch (module_config->m_type) { 769 case SKL_MODULE_TYPE_COPIER: 770 skl_set_copier_format(ctx, module_config, *param_data); 771 break; 772 773 case SKL_MODULE_TYPE_SRCINT: 774 skl_set_src_format(ctx, module_config, *param_data); 775 break; 776 777 case SKL_MODULE_TYPE_UPDWMIX: 778 skl_set_updown_mixer_format(ctx, module_config, *param_data); 779 break; 780 781 case SKL_MODULE_TYPE_ALGO: 782 skl_set_algo_format(ctx, module_config, *param_data); 783 break; 784 785 case SKL_MODULE_TYPE_BASE_OUTFMT: 786 case SKL_MODULE_TYPE_MIC_SELECT: 787 case SKL_MODULE_TYPE_KPB: 788 skl_set_base_outfmt_format(ctx, module_config, *param_data); 789 break; 790 791 default: 792 skl_set_base_module_format(ctx, module_config, *param_data); 793 break; 794 795 } 796 797 dev_dbg(ctx->dev, "Module type=%d config size: %d bytes\n", 798 module_config->id.module_id, param_size); 799 print_hex_dump_debug("Module params:", DUMP_PREFIX_OFFSET, 8, 4, 800 *param_data, param_size, false); 801 return 0; 802 } 803 804 static int skl_get_queue_index(struct skl_module_pin *mpin, 805 struct skl_module_inst_id id, int max) 806 { 807 int i; 808 809 for (i = 0; i < max; i++) { 810 if (mpin[i].id.module_id == id.module_id && 811 mpin[i].id.instance_id == id.instance_id) 812 return i; 813 } 814 815 return -EINVAL; 816 } 817 818 /* 819 * Allocates queue for each module. 820 * if dynamic, the pin_index is allocated 0 to max_pin. 821 * In static, the pin_index is fixed based on module_id and instance id 822 */ 823 static int skl_alloc_queue(struct skl_module_pin *mpin, 824 struct skl_module_cfg *tgt_cfg, int max) 825 { 826 int i; 827 struct skl_module_inst_id id = tgt_cfg->id; 828 /* 829 * if pin in dynamic, find first free pin 830 * otherwise find match module and instance id pin as topology will 831 * ensure a unique pin is assigned to this so no need to 832 * allocate/free 833 */ 834 for (i = 0; i < max; i++) { 835 if (mpin[i].is_dynamic) { 836 if (!mpin[i].in_use && 837 mpin[i].pin_state == SKL_PIN_UNBIND) { 838 839 mpin[i].in_use = true; 840 mpin[i].id.module_id = id.module_id; 841 mpin[i].id.instance_id = id.instance_id; 842 mpin[i].id.pvt_id = id.pvt_id; 843 mpin[i].tgt_mcfg = tgt_cfg; 844 return i; 845 } 846 } else { 847 if (mpin[i].id.module_id == id.module_id && 848 mpin[i].id.instance_id == id.instance_id && 849 mpin[i].pin_state == SKL_PIN_UNBIND) { 850 851 mpin[i].tgt_mcfg = tgt_cfg; 852 return i; 853 } 854 } 855 } 856 857 return -EINVAL; 858 } 859 860 static void skl_free_queue(struct skl_module_pin *mpin, int q_index) 861 { 862 if (mpin[q_index].is_dynamic) { 863 mpin[q_index].in_use = false; 864 mpin[q_index].id.module_id = 0; 865 mpin[q_index].id.instance_id = 0; 866 mpin[q_index].id.pvt_id = 0; 867 } 868 mpin[q_index].pin_state = SKL_PIN_UNBIND; 869 mpin[q_index].tgt_mcfg = NULL; 870 } 871 872 /* Module state will be set to unint, if all the out pin state is UNBIND */ 873 874 static void skl_clear_module_state(struct skl_module_pin *mpin, int max, 875 struct skl_module_cfg *mcfg) 876 { 877 int i; 878 bool found = false; 879 880 for (i = 0; i < max; i++) { 881 if (mpin[i].pin_state == SKL_PIN_UNBIND) 882 continue; 883 found = true; 884 break; 885 } 886 887 if (!found) 888 mcfg->m_state = SKL_MODULE_INIT_DONE; 889 return; 890 } 891 892 /* 893 * A module needs to be instanataited in DSP. A mdoule is present in a 894 * collection of module referred as a PIPE. 895 * We first calculate the module format, based on module type and then 896 * invoke the DSP by sending IPC INIT_INSTANCE using ipc helper 897 */ 898 int skl_init_module(struct skl_sst *ctx, 899 struct skl_module_cfg *mconfig) 900 { 901 u16 module_config_size = 0; 902 void *param_data = NULL; 903 int ret; 904 struct skl_ipc_init_instance_msg msg; 905 906 dev_dbg(ctx->dev, "%s: module_id = %d instance=%d\n", __func__, 907 mconfig->id.module_id, mconfig->id.pvt_id); 908 909 if (mconfig->pipe->state != SKL_PIPE_CREATED) { 910 dev_err(ctx->dev, "Pipe not created state= %d pipe_id= %d\n", 911 mconfig->pipe->state, mconfig->pipe->ppl_id); 912 return -EIO; 913 } 914 915 ret = skl_set_module_format(ctx, mconfig, 916 &module_config_size, ¶m_data); 917 if (ret < 0) { 918 dev_err(ctx->dev, "Failed to set module format ret=%d\n", ret); 919 return ret; 920 } 921 922 msg.module_id = mconfig->id.module_id; 923 msg.instance_id = mconfig->id.pvt_id; 924 msg.ppl_instance_id = mconfig->pipe->ppl_id; 925 msg.param_data_size = module_config_size; 926 msg.core_id = mconfig->core_id; 927 msg.domain = mconfig->domain; 928 929 ret = skl_ipc_init_instance(&ctx->ipc, &msg, param_data); 930 if (ret < 0) { 931 dev_err(ctx->dev, "Failed to init instance ret=%d\n", ret); 932 kfree(param_data); 933 return ret; 934 } 935 mconfig->m_state = SKL_MODULE_INIT_DONE; 936 kfree(param_data); 937 return ret; 938 } 939 940 static void skl_dump_bind_info(struct skl_sst *ctx, struct skl_module_cfg 941 *src_module, struct skl_module_cfg *dst_module) 942 { 943 dev_dbg(ctx->dev, "%s: src module_id = %d src_instance=%d\n", 944 __func__, src_module->id.module_id, src_module->id.pvt_id); 945 dev_dbg(ctx->dev, "%s: dst_module=%d dst_instacne=%d\n", __func__, 946 dst_module->id.module_id, dst_module->id.pvt_id); 947 948 dev_dbg(ctx->dev, "src_module state = %d dst module state = %d\n", 949 src_module->m_state, dst_module->m_state); 950 } 951 952 /* 953 * On module freeup, we need to unbind the module with modules 954 * it is already bind. 955 * Find the pin allocated and unbind then using bind_unbind IPC 956 */ 957 int skl_unbind_modules(struct skl_sst *ctx, 958 struct skl_module_cfg *src_mcfg, 959 struct skl_module_cfg *dst_mcfg) 960 { 961 int ret; 962 struct skl_ipc_bind_unbind_msg msg; 963 struct skl_module_inst_id src_id = src_mcfg->id; 964 struct skl_module_inst_id dst_id = dst_mcfg->id; 965 int in_max = dst_mcfg->max_in_queue; 966 int out_max = src_mcfg->max_out_queue; 967 int src_index, dst_index, src_pin_state, dst_pin_state; 968 969 skl_dump_bind_info(ctx, src_mcfg, dst_mcfg); 970 971 /* get src queue index */ 972 src_index = skl_get_queue_index(src_mcfg->m_out_pin, dst_id, out_max); 973 if (src_index < 0) 974 return 0; 975 976 msg.src_queue = src_index; 977 978 /* get dst queue index */ 979 dst_index = skl_get_queue_index(dst_mcfg->m_in_pin, src_id, in_max); 980 if (dst_index < 0) 981 return 0; 982 983 msg.dst_queue = dst_index; 984 985 src_pin_state = src_mcfg->m_out_pin[src_index].pin_state; 986 dst_pin_state = dst_mcfg->m_in_pin[dst_index].pin_state; 987 988 if (src_pin_state != SKL_PIN_BIND_DONE || 989 dst_pin_state != SKL_PIN_BIND_DONE) 990 return 0; 991 992 msg.module_id = src_mcfg->id.module_id; 993 msg.instance_id = src_mcfg->id.pvt_id; 994 msg.dst_module_id = dst_mcfg->id.module_id; 995 msg.dst_instance_id = dst_mcfg->id.pvt_id; 996 msg.bind = false; 997 998 ret = skl_ipc_bind_unbind(&ctx->ipc, &msg); 999 if (!ret) { 1000 /* free queue only if unbind is success */ 1001 skl_free_queue(src_mcfg->m_out_pin, src_index); 1002 skl_free_queue(dst_mcfg->m_in_pin, dst_index); 1003 1004 /* 1005 * check only if src module bind state, bind is 1006 * always from src -> sink 1007 */ 1008 skl_clear_module_state(src_mcfg->m_out_pin, out_max, src_mcfg); 1009 } 1010 1011 return ret; 1012 } 1013 1014 /* 1015 * Once a module is instantiated it need to be 'bind' with other modules in 1016 * the pipeline. For binding we need to find the module pins which are bind 1017 * together 1018 * This function finds the pins and then sends bund_unbind IPC message to 1019 * DSP using IPC helper 1020 */ 1021 int skl_bind_modules(struct skl_sst *ctx, 1022 struct skl_module_cfg *src_mcfg, 1023 struct skl_module_cfg *dst_mcfg) 1024 { 1025 int ret; 1026 struct skl_ipc_bind_unbind_msg msg; 1027 int in_max = dst_mcfg->max_in_queue; 1028 int out_max = src_mcfg->max_out_queue; 1029 int src_index, dst_index; 1030 1031 skl_dump_bind_info(ctx, src_mcfg, dst_mcfg); 1032 1033 if (src_mcfg->m_state < SKL_MODULE_INIT_DONE || 1034 dst_mcfg->m_state < SKL_MODULE_INIT_DONE) 1035 return 0; 1036 1037 src_index = skl_alloc_queue(src_mcfg->m_out_pin, dst_mcfg, out_max); 1038 if (src_index < 0) 1039 return -EINVAL; 1040 1041 msg.src_queue = src_index; 1042 dst_index = skl_alloc_queue(dst_mcfg->m_in_pin, src_mcfg, in_max); 1043 if (dst_index < 0) { 1044 skl_free_queue(src_mcfg->m_out_pin, src_index); 1045 return -EINVAL; 1046 } 1047 1048 msg.dst_queue = dst_index; 1049 1050 dev_dbg(ctx->dev, "src queue = %d dst queue =%d\n", 1051 msg.src_queue, msg.dst_queue); 1052 1053 msg.module_id = src_mcfg->id.module_id; 1054 msg.instance_id = src_mcfg->id.pvt_id; 1055 msg.dst_module_id = dst_mcfg->id.module_id; 1056 msg.dst_instance_id = dst_mcfg->id.pvt_id; 1057 msg.bind = true; 1058 1059 ret = skl_ipc_bind_unbind(&ctx->ipc, &msg); 1060 1061 if (!ret) { 1062 src_mcfg->m_state = SKL_MODULE_BIND_DONE; 1063 src_mcfg->m_out_pin[src_index].pin_state = SKL_PIN_BIND_DONE; 1064 dst_mcfg->m_in_pin[dst_index].pin_state = SKL_PIN_BIND_DONE; 1065 } else { 1066 /* error case , if IPC fails, clear the queue index */ 1067 skl_free_queue(src_mcfg->m_out_pin, src_index); 1068 skl_free_queue(dst_mcfg->m_in_pin, dst_index); 1069 } 1070 1071 return ret; 1072 } 1073 1074 static int skl_set_pipe_state(struct skl_sst *ctx, struct skl_pipe *pipe, 1075 enum skl_ipc_pipeline_state state) 1076 { 1077 dev_dbg(ctx->dev, "%s: pipe_satate = %d\n", __func__, state); 1078 1079 return skl_ipc_set_pipeline_state(&ctx->ipc, pipe->ppl_id, state); 1080 } 1081 1082 /* 1083 * A pipeline is a collection of modules. Before a module in instantiated a 1084 * pipeline needs to be created for it. 1085 * This function creates pipeline, by sending create pipeline IPC messages 1086 * to FW 1087 */ 1088 int skl_create_pipeline(struct skl_sst *ctx, struct skl_pipe *pipe) 1089 { 1090 int ret; 1091 1092 dev_dbg(ctx->dev, "%s: pipe_id = %d\n", __func__, pipe->ppl_id); 1093 1094 ret = skl_ipc_create_pipeline(&ctx->ipc, pipe->memory_pages, 1095 pipe->pipe_priority, pipe->ppl_id, 1096 pipe->lp_mode); 1097 if (ret < 0) { 1098 dev_err(ctx->dev, "Failed to create pipeline\n"); 1099 return ret; 1100 } 1101 1102 pipe->state = SKL_PIPE_CREATED; 1103 1104 return 0; 1105 } 1106 1107 /* 1108 * A pipeline needs to be deleted on cleanup. If a pipeline is running, then 1109 * pause the pipeline first and then delete it 1110 * The pipe delete is done by sending delete pipeline IPC. DSP will stop the 1111 * DMA engines and releases resources 1112 */ 1113 int skl_delete_pipe(struct skl_sst *ctx, struct skl_pipe *pipe) 1114 { 1115 int ret; 1116 1117 dev_dbg(ctx->dev, "%s: pipe = %d\n", __func__, pipe->ppl_id); 1118 1119 /* If pipe is started, do stop the pipe in FW. */ 1120 if (pipe->state >= SKL_PIPE_STARTED) { 1121 ret = skl_set_pipe_state(ctx, pipe, PPL_PAUSED); 1122 if (ret < 0) { 1123 dev_err(ctx->dev, "Failed to stop pipeline\n"); 1124 return ret; 1125 } 1126 1127 pipe->state = SKL_PIPE_PAUSED; 1128 } 1129 1130 /* If pipe was not created in FW, do not try to delete it */ 1131 if (pipe->state < SKL_PIPE_CREATED) 1132 return 0; 1133 1134 ret = skl_ipc_delete_pipeline(&ctx->ipc, pipe->ppl_id); 1135 if (ret < 0) { 1136 dev_err(ctx->dev, "Failed to delete pipeline\n"); 1137 return ret; 1138 } 1139 1140 pipe->state = SKL_PIPE_INVALID; 1141 1142 return ret; 1143 } 1144 1145 /* 1146 * A pipeline is also a scheduling entity in DSP which can be run, stopped 1147 * For processing data the pipe need to be run by sending IPC set pipe state 1148 * to DSP 1149 */ 1150 int skl_run_pipe(struct skl_sst *ctx, struct skl_pipe *pipe) 1151 { 1152 int ret; 1153 1154 dev_dbg(ctx->dev, "%s: pipe = %d\n", __func__, pipe->ppl_id); 1155 1156 /* If pipe was not created in FW, do not try to pause or delete */ 1157 if (pipe->state < SKL_PIPE_CREATED) 1158 return 0; 1159 1160 /* Pipe has to be paused before it is started */ 1161 ret = skl_set_pipe_state(ctx, pipe, PPL_PAUSED); 1162 if (ret < 0) { 1163 dev_err(ctx->dev, "Failed to pause pipe\n"); 1164 return ret; 1165 } 1166 1167 pipe->state = SKL_PIPE_PAUSED; 1168 1169 ret = skl_set_pipe_state(ctx, pipe, PPL_RUNNING); 1170 if (ret < 0) { 1171 dev_err(ctx->dev, "Failed to start pipe\n"); 1172 return ret; 1173 } 1174 1175 pipe->state = SKL_PIPE_STARTED; 1176 1177 return 0; 1178 } 1179 1180 /* 1181 * Stop the pipeline by sending set pipe state IPC 1182 * DSP doesnt implement stop so we always send pause message 1183 */ 1184 int skl_stop_pipe(struct skl_sst *ctx, struct skl_pipe *pipe) 1185 { 1186 int ret; 1187 1188 dev_dbg(ctx->dev, "In %s pipe=%d\n", __func__, pipe->ppl_id); 1189 1190 /* If pipe was not created in FW, do not try to pause or delete */ 1191 if (pipe->state < SKL_PIPE_PAUSED) 1192 return 0; 1193 1194 ret = skl_set_pipe_state(ctx, pipe, PPL_PAUSED); 1195 if (ret < 0) { 1196 dev_dbg(ctx->dev, "Failed to stop pipe\n"); 1197 return ret; 1198 } 1199 1200 pipe->state = SKL_PIPE_PAUSED; 1201 1202 return 0; 1203 } 1204 1205 /* 1206 * Reset the pipeline by sending set pipe state IPC this will reset the DMA 1207 * from the DSP side 1208 */ 1209 int skl_reset_pipe(struct skl_sst *ctx, struct skl_pipe *pipe) 1210 { 1211 int ret; 1212 1213 /* If pipe was not created in FW, do not try to pause or delete */ 1214 if (pipe->state < SKL_PIPE_PAUSED) 1215 return 0; 1216 1217 ret = skl_set_pipe_state(ctx, pipe, PPL_RESET); 1218 if (ret < 0) { 1219 dev_dbg(ctx->dev, "Failed to reset pipe ret=%d\n", ret); 1220 return ret; 1221 } 1222 1223 pipe->state = SKL_PIPE_RESET; 1224 1225 return 0; 1226 } 1227 1228 /* Algo parameter set helper function */ 1229 int skl_set_module_params(struct skl_sst *ctx, u32 *params, int size, 1230 u32 param_id, struct skl_module_cfg *mcfg) 1231 { 1232 struct skl_ipc_large_config_msg msg; 1233 1234 msg.module_id = mcfg->id.module_id; 1235 msg.instance_id = mcfg->id.pvt_id; 1236 msg.param_data_size = size; 1237 msg.large_param_id = param_id; 1238 1239 return skl_ipc_set_large_config(&ctx->ipc, &msg, params); 1240 } 1241 1242 int skl_get_module_params(struct skl_sst *ctx, u32 *params, int size, 1243 u32 param_id, struct skl_module_cfg *mcfg) 1244 { 1245 struct skl_ipc_large_config_msg msg; 1246 1247 msg.module_id = mcfg->id.module_id; 1248 msg.instance_id = mcfg->id.pvt_id; 1249 msg.param_data_size = size; 1250 msg.large_param_id = param_id; 1251 1252 return skl_ipc_get_large_config(&ctx->ipc, &msg, params); 1253 } 1254