1 // SPDX-License-Identifier: GPL-2.0-only 2 // 3 // Copyright(c) 2021 Intel Corporation. All rights reserved. 4 // 5 // Authors: Cezary Rojewski <cezary.rojewski@intel.com> 6 // Amadeusz Slawinski <amadeuszx.slawinski@linux.intel.com> 7 // 8 9 #include <sound/intel-nhlt.h> 10 #include <sound/pcm_params.h> 11 #include <sound/soc.h> 12 #include "avs.h" 13 #include "path.h" 14 #include "topology.h" 15 16 /* Must be called with adev->comp_list_mutex held. */ 17 static struct avs_tplg * 18 avs_path_find_tplg(struct avs_dev *adev, const char *name) 19 { 20 struct avs_soc_component *acomp; 21 22 list_for_each_entry(acomp, &adev->comp_list, node) 23 if (!strcmp(acomp->tplg->name, name)) 24 return acomp->tplg; 25 return NULL; 26 } 27 28 static struct avs_path_module * 29 avs_path_find_module(struct avs_path_pipeline *ppl, u32 template_id) 30 { 31 struct avs_path_module *mod; 32 33 list_for_each_entry(mod, &ppl->mod_list, node) 34 if (mod->template->id == template_id) 35 return mod; 36 return NULL; 37 } 38 39 static struct avs_path_pipeline * 40 avs_path_find_pipeline(struct avs_path *path, u32 template_id) 41 { 42 struct avs_path_pipeline *ppl; 43 44 list_for_each_entry(ppl, &path->ppl_list, node) 45 if (ppl->template->id == template_id) 46 return ppl; 47 return NULL; 48 } 49 50 static struct avs_path * 51 avs_path_find_path(struct avs_dev *adev, const char *name, u32 template_id) 52 { 53 struct avs_tplg_path_template *pos, *template = NULL; 54 struct avs_tplg *tplg; 55 struct avs_path *path; 56 57 tplg = avs_path_find_tplg(adev, name); 58 if (!tplg) 59 return NULL; 60 61 list_for_each_entry(pos, &tplg->path_tmpl_list, node) { 62 if (pos->id == template_id) { 63 template = pos; 64 break; 65 } 66 } 67 if (!template) 68 return NULL; 69 70 spin_lock(&adev->path_list_lock); 71 /* Only one variant of given path template may be instantiated at a time. */ 72 list_for_each_entry(path, &adev->path_list, node) { 73 if (path->template->owner == template) { 74 spin_unlock(&adev->path_list_lock); 75 return path; 76 } 77 } 78 79 spin_unlock(&adev->path_list_lock); 80 return NULL; 81 } 82 83 static bool avs_test_hw_params(struct snd_pcm_hw_params *params, 84 struct avs_audio_format *fmt) 85 { 86 return (params_rate(params) == fmt->sampling_freq && 87 params_channels(params) == fmt->num_channels && 88 params_physical_width(params) == fmt->bit_depth && 89 params_width(params) == fmt->valid_bit_depth); 90 } 91 92 static struct avs_tplg_path * 93 avs_path_find_variant(struct avs_dev *adev, 94 struct avs_tplg_path_template *template, 95 struct snd_pcm_hw_params *fe_params, 96 struct snd_pcm_hw_params *be_params) 97 { 98 struct avs_tplg_path *variant; 99 100 list_for_each_entry(variant, &template->path_list, node) { 101 dev_dbg(adev->dev, "check FE rate %d chn %d vbd %d bd %d\n", 102 variant->fe_fmt->sampling_freq, variant->fe_fmt->num_channels, 103 variant->fe_fmt->valid_bit_depth, variant->fe_fmt->bit_depth); 104 dev_dbg(adev->dev, "check BE rate %d chn %d vbd %d bd %d\n", 105 variant->be_fmt->sampling_freq, variant->be_fmt->num_channels, 106 variant->be_fmt->valid_bit_depth, variant->be_fmt->bit_depth); 107 108 if (variant->fe_fmt && avs_test_hw_params(fe_params, variant->fe_fmt) && 109 variant->be_fmt && avs_test_hw_params(be_params, variant->be_fmt)) 110 return variant; 111 } 112 113 return NULL; 114 } 115 116 __maybe_unused 117 static bool avs_dma_type_is_host(u32 dma_type) 118 { 119 return dma_type == AVS_DMA_HDA_HOST_OUTPUT || 120 dma_type == AVS_DMA_HDA_HOST_INPUT; 121 } 122 123 __maybe_unused 124 static bool avs_dma_type_is_link(u32 dma_type) 125 { 126 return !avs_dma_type_is_host(dma_type); 127 } 128 129 __maybe_unused 130 static bool avs_dma_type_is_output(u32 dma_type) 131 { 132 return dma_type == AVS_DMA_HDA_HOST_OUTPUT || 133 dma_type == AVS_DMA_HDA_LINK_OUTPUT || 134 dma_type == AVS_DMA_I2S_LINK_OUTPUT; 135 } 136 137 __maybe_unused 138 static bool avs_dma_type_is_input(u32 dma_type) 139 { 140 return !avs_dma_type_is_output(dma_type); 141 } 142 143 static int avs_copier_create(struct avs_dev *adev, struct avs_path_module *mod) 144 { 145 struct nhlt_acpi_table *nhlt = adev->nhlt; 146 struct avs_tplg_module *t = mod->template; 147 struct avs_copier_cfg *cfg; 148 struct nhlt_specific_cfg *ep_blob; 149 union avs_connector_node_id node_id = {0}; 150 size_t cfg_size, data_size = 0; 151 void *data = NULL; 152 u32 dma_type; 153 int ret; 154 155 dma_type = t->cfg_ext->copier.dma_type; 156 node_id.dma_type = dma_type; 157 158 switch (dma_type) { 159 struct avs_audio_format *fmt; 160 int direction; 161 162 case AVS_DMA_I2S_LINK_OUTPUT: 163 case AVS_DMA_I2S_LINK_INPUT: 164 if (avs_dma_type_is_input(dma_type)) 165 direction = SNDRV_PCM_STREAM_CAPTURE; 166 else 167 direction = SNDRV_PCM_STREAM_PLAYBACK; 168 169 if (t->cfg_ext->copier.blob_fmt) 170 fmt = t->cfg_ext->copier.blob_fmt; 171 else if (direction == SNDRV_PCM_STREAM_CAPTURE) 172 fmt = t->in_fmt; 173 else 174 fmt = t->cfg_ext->copier.out_fmt; 175 176 ep_blob = intel_nhlt_get_endpoint_blob(adev->dev, 177 nhlt, t->cfg_ext->copier.vindex.i2s.instance, 178 NHLT_LINK_SSP, fmt->valid_bit_depth, fmt->bit_depth, 179 fmt->num_channels, fmt->sampling_freq, direction, 180 NHLT_DEVICE_I2S); 181 if (!ep_blob) { 182 dev_err(adev->dev, "no I2S ep_blob found\n"); 183 return -ENOENT; 184 } 185 186 data = ep_blob->caps; 187 data_size = ep_blob->size; 188 /* I2S gateway's vindex is statically assigned in topology */ 189 node_id.vindex = t->cfg_ext->copier.vindex.val; 190 191 break; 192 193 case AVS_DMA_DMIC_LINK_INPUT: 194 direction = SNDRV_PCM_STREAM_CAPTURE; 195 196 if (t->cfg_ext->copier.blob_fmt) 197 fmt = t->cfg_ext->copier.blob_fmt; 198 else 199 fmt = t->in_fmt; 200 201 ep_blob = intel_nhlt_get_endpoint_blob(adev->dev, nhlt, 0, 202 NHLT_LINK_DMIC, fmt->valid_bit_depth, 203 fmt->bit_depth, fmt->num_channels, 204 fmt->sampling_freq, direction, NHLT_DEVICE_DMIC); 205 if (!ep_blob) { 206 dev_err(adev->dev, "no DMIC ep_blob found\n"); 207 return -ENOENT; 208 } 209 210 data = ep_blob->caps; 211 data_size = ep_blob->size; 212 /* DMIC gateway's vindex is statically assigned in topology */ 213 node_id.vindex = t->cfg_ext->copier.vindex.val; 214 215 break; 216 217 case AVS_DMA_HDA_HOST_OUTPUT: 218 case AVS_DMA_HDA_HOST_INPUT: 219 /* HOST gateway's vindex is dynamically assigned with DMA id */ 220 node_id.vindex = mod->owner->owner->dma_id; 221 break; 222 223 case AVS_DMA_HDA_LINK_OUTPUT: 224 case AVS_DMA_HDA_LINK_INPUT: 225 node_id.vindex = t->cfg_ext->copier.vindex.val | 226 mod->owner->owner->dma_id; 227 break; 228 229 case INVALID_OBJECT_ID: 230 default: 231 node_id = INVALID_NODE_ID; 232 break; 233 } 234 235 cfg_size = sizeof(*cfg) + data_size; 236 /* Every config-BLOB contains gateway attributes. */ 237 if (data_size) 238 cfg_size -= sizeof(cfg->gtw_cfg.config.attrs); 239 240 cfg = kzalloc(cfg_size, GFP_KERNEL); 241 if (!cfg) 242 return -ENOMEM; 243 244 cfg->base.cpc = t->cfg_base->cpc; 245 cfg->base.ibs = t->cfg_base->ibs; 246 cfg->base.obs = t->cfg_base->obs; 247 cfg->base.is_pages = t->cfg_base->is_pages; 248 cfg->base.audio_fmt = *t->in_fmt; 249 cfg->out_fmt = *t->cfg_ext->copier.out_fmt; 250 cfg->feature_mask = t->cfg_ext->copier.feature_mask; 251 cfg->gtw_cfg.node_id = node_id; 252 cfg->gtw_cfg.dma_buffer_size = t->cfg_ext->copier.dma_buffer_size; 253 /* config_length in DWORDs */ 254 cfg->gtw_cfg.config_length = DIV_ROUND_UP(data_size, 4); 255 if (data) 256 memcpy(&cfg->gtw_cfg.config, data, data_size); 257 258 mod->gtw_attrs = cfg->gtw_cfg.config.attrs; 259 260 ret = avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id, 261 t->core_id, t->domain, cfg, cfg_size, 262 &mod->instance_id); 263 kfree(cfg); 264 return ret; 265 } 266 267 static int avs_updown_mix_create(struct avs_dev *adev, struct avs_path_module *mod) 268 { 269 struct avs_tplg_module *t = mod->template; 270 struct avs_updown_mixer_cfg cfg; 271 int i; 272 273 cfg.base.cpc = t->cfg_base->cpc; 274 cfg.base.ibs = t->cfg_base->ibs; 275 cfg.base.obs = t->cfg_base->obs; 276 cfg.base.is_pages = t->cfg_base->is_pages; 277 cfg.base.audio_fmt = *t->in_fmt; 278 cfg.out_channel_config = t->cfg_ext->updown_mix.out_channel_config; 279 cfg.coefficients_select = t->cfg_ext->updown_mix.coefficients_select; 280 for (i = 0; i < AVS_CHANNELS_MAX; i++) 281 cfg.coefficients[i] = t->cfg_ext->updown_mix.coefficients[i]; 282 cfg.channel_map = t->cfg_ext->updown_mix.channel_map; 283 284 return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id, 285 t->core_id, t->domain, &cfg, sizeof(cfg), 286 &mod->instance_id); 287 } 288 289 static int avs_src_create(struct avs_dev *adev, struct avs_path_module *mod) 290 { 291 struct avs_tplg_module *t = mod->template; 292 struct avs_src_cfg cfg; 293 294 cfg.base.cpc = t->cfg_base->cpc; 295 cfg.base.ibs = t->cfg_base->ibs; 296 cfg.base.obs = t->cfg_base->obs; 297 cfg.base.is_pages = t->cfg_base->is_pages; 298 cfg.base.audio_fmt = *t->in_fmt; 299 cfg.out_freq = t->cfg_ext->src.out_freq; 300 301 return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id, 302 t->core_id, t->domain, &cfg, sizeof(cfg), 303 &mod->instance_id); 304 } 305 306 static int avs_asrc_create(struct avs_dev *adev, struct avs_path_module *mod) 307 { 308 struct avs_tplg_module *t = mod->template; 309 struct avs_asrc_cfg cfg; 310 311 cfg.base.cpc = t->cfg_base->cpc; 312 cfg.base.ibs = t->cfg_base->ibs; 313 cfg.base.obs = t->cfg_base->obs; 314 cfg.base.is_pages = t->cfg_base->is_pages; 315 cfg.base.audio_fmt = *t->in_fmt; 316 cfg.out_freq = t->cfg_ext->asrc.out_freq; 317 cfg.mode = t->cfg_ext->asrc.mode; 318 cfg.disable_jitter_buffer = t->cfg_ext->asrc.disable_jitter_buffer; 319 320 return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id, 321 t->core_id, t->domain, &cfg, sizeof(cfg), 322 &mod->instance_id); 323 } 324 325 static int avs_aec_create(struct avs_dev *adev, struct avs_path_module *mod) 326 { 327 struct avs_tplg_module *t = mod->template; 328 struct avs_aec_cfg cfg; 329 330 cfg.base.cpc = t->cfg_base->cpc; 331 cfg.base.ibs = t->cfg_base->ibs; 332 cfg.base.obs = t->cfg_base->obs; 333 cfg.base.is_pages = t->cfg_base->is_pages; 334 cfg.base.audio_fmt = *t->in_fmt; 335 cfg.ref_fmt = *t->cfg_ext->aec.ref_fmt; 336 cfg.out_fmt = *t->cfg_ext->aec.out_fmt; 337 cfg.cpc_lp_mode = t->cfg_ext->aec.cpc_lp_mode; 338 339 return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id, 340 t->core_id, t->domain, &cfg, sizeof(cfg), 341 &mod->instance_id); 342 } 343 344 static int avs_mux_create(struct avs_dev *adev, struct avs_path_module *mod) 345 { 346 struct avs_tplg_module *t = mod->template; 347 struct avs_mux_cfg cfg; 348 349 cfg.base.cpc = t->cfg_base->cpc; 350 cfg.base.ibs = t->cfg_base->ibs; 351 cfg.base.obs = t->cfg_base->obs; 352 cfg.base.is_pages = t->cfg_base->is_pages; 353 cfg.base.audio_fmt = *t->in_fmt; 354 cfg.ref_fmt = *t->cfg_ext->mux.ref_fmt; 355 cfg.out_fmt = *t->cfg_ext->mux.out_fmt; 356 357 return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id, 358 t->core_id, t->domain, &cfg, sizeof(cfg), 359 &mod->instance_id); 360 } 361 362 static int avs_wov_create(struct avs_dev *adev, struct avs_path_module *mod) 363 { 364 struct avs_tplg_module *t = mod->template; 365 struct avs_wov_cfg cfg; 366 367 cfg.base.cpc = t->cfg_base->cpc; 368 cfg.base.ibs = t->cfg_base->ibs; 369 cfg.base.obs = t->cfg_base->obs; 370 cfg.base.is_pages = t->cfg_base->is_pages; 371 cfg.base.audio_fmt = *t->in_fmt; 372 cfg.cpc_lp_mode = t->cfg_ext->wov.cpc_lp_mode; 373 374 return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id, 375 t->core_id, t->domain, &cfg, sizeof(cfg), 376 &mod->instance_id); 377 } 378 379 static int avs_micsel_create(struct avs_dev *adev, struct avs_path_module *mod) 380 { 381 struct avs_tplg_module *t = mod->template; 382 struct avs_micsel_cfg cfg; 383 384 cfg.base.cpc = t->cfg_base->cpc; 385 cfg.base.ibs = t->cfg_base->ibs; 386 cfg.base.obs = t->cfg_base->obs; 387 cfg.base.is_pages = t->cfg_base->is_pages; 388 cfg.base.audio_fmt = *t->in_fmt; 389 cfg.out_fmt = *t->cfg_ext->micsel.out_fmt; 390 391 return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id, 392 t->core_id, t->domain, &cfg, sizeof(cfg), 393 &mod->instance_id); 394 } 395 396 static int avs_modbase_create(struct avs_dev *adev, struct avs_path_module *mod) 397 { 398 struct avs_tplg_module *t = mod->template; 399 struct avs_modcfg_base cfg; 400 401 cfg.cpc = t->cfg_base->cpc; 402 cfg.ibs = t->cfg_base->ibs; 403 cfg.obs = t->cfg_base->obs; 404 cfg.is_pages = t->cfg_base->is_pages; 405 cfg.audio_fmt = *t->in_fmt; 406 407 return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id, 408 t->core_id, t->domain, &cfg, sizeof(cfg), 409 &mod->instance_id); 410 } 411 412 static int avs_modext_create(struct avs_dev *adev, struct avs_path_module *mod) 413 { 414 struct avs_tplg_module *t = mod->template; 415 struct avs_tplg_modcfg_ext *tcfg = t->cfg_ext; 416 struct avs_modcfg_ext *cfg; 417 size_t cfg_size, num_pins; 418 int ret, i; 419 420 num_pins = tcfg->generic.num_input_pins + tcfg->generic.num_output_pins; 421 cfg_size = sizeof(*cfg) + sizeof(*cfg->pin_fmts) * num_pins; 422 423 cfg = kzalloc(cfg_size, GFP_KERNEL); 424 if (!cfg) 425 return -ENOMEM; 426 427 cfg->base.cpc = t->cfg_base->cpc; 428 cfg->base.ibs = t->cfg_base->ibs; 429 cfg->base.obs = t->cfg_base->obs; 430 cfg->base.is_pages = t->cfg_base->is_pages; 431 cfg->base.audio_fmt = *t->in_fmt; 432 cfg->num_input_pins = tcfg->generic.num_input_pins; 433 cfg->num_output_pins = tcfg->generic.num_output_pins; 434 435 /* configure pin formats */ 436 for (i = 0; i < num_pins; i++) { 437 struct avs_tplg_pin_format *tpin = &tcfg->generic.pin_fmts[i]; 438 struct avs_pin_format *pin = &cfg->pin_fmts[i]; 439 440 pin->pin_index = tpin->pin_index; 441 pin->iobs = tpin->iobs; 442 pin->audio_fmt = *tpin->fmt; 443 } 444 445 ret = avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id, 446 t->core_id, t->domain, cfg, cfg_size, 447 &mod->instance_id); 448 kfree(cfg); 449 return ret; 450 } 451 452 static int avs_path_module_type_create(struct avs_dev *adev, struct avs_path_module *mod) 453 { 454 const guid_t *type = &mod->template->cfg_ext->type; 455 456 if (guid_equal(type, &AVS_MIXIN_MOD_UUID) || 457 guid_equal(type, &AVS_MIXOUT_MOD_UUID) || 458 guid_equal(type, &AVS_KPBUFF_MOD_UUID)) 459 return avs_modbase_create(adev, mod); 460 if (guid_equal(type, &AVS_COPIER_MOD_UUID)) 461 return avs_copier_create(adev, mod); 462 if (guid_equal(type, &AVS_MICSEL_MOD_UUID)) 463 return avs_micsel_create(adev, mod); 464 if (guid_equal(type, &AVS_MUX_MOD_UUID)) 465 return avs_mux_create(adev, mod); 466 if (guid_equal(type, &AVS_UPDWMIX_MOD_UUID)) 467 return avs_updown_mix_create(adev, mod); 468 if (guid_equal(type, &AVS_SRCINTC_MOD_UUID)) 469 return avs_src_create(adev, mod); 470 if (guid_equal(type, &AVS_AEC_MOD_UUID)) 471 return avs_aec_create(adev, mod); 472 if (guid_equal(type, &AVS_ASRC_MOD_UUID)) 473 return avs_asrc_create(adev, mod); 474 if (guid_equal(type, &AVS_INTELWOV_MOD_UUID)) 475 return avs_wov_create(adev, mod); 476 477 if (guid_equal(type, &AVS_PROBE_MOD_UUID)) { 478 dev_err(adev->dev, "Probe module can't be instantiated by topology"); 479 return -EINVAL; 480 } 481 482 return avs_modext_create(adev, mod); 483 } 484 485 static void avs_path_module_free(struct avs_dev *adev, struct avs_path_module *mod) 486 { 487 kfree(mod); 488 } 489 490 static struct avs_path_module * 491 avs_path_module_create(struct avs_dev *adev, 492 struct avs_path_pipeline *owner, 493 struct avs_tplg_module *template) 494 { 495 struct avs_path_module *mod; 496 int module_id, ret; 497 498 module_id = avs_get_module_id(adev, &template->cfg_ext->type); 499 if (module_id < 0) 500 return ERR_PTR(module_id); 501 502 mod = kzalloc(sizeof(*mod), GFP_KERNEL); 503 if (!mod) 504 return ERR_PTR(-ENOMEM); 505 506 mod->template = template; 507 mod->module_id = module_id; 508 mod->owner = owner; 509 INIT_LIST_HEAD(&mod->node); 510 511 ret = avs_path_module_type_create(adev, mod); 512 if (ret) { 513 dev_err(adev->dev, "module-type create failed: %d\n", ret); 514 kfree(mod); 515 return ERR_PTR(ret); 516 } 517 518 return mod; 519 } 520 521 static int avs_path_binding_arm(struct avs_dev *adev, struct avs_path_binding *binding) 522 { 523 struct avs_path_module *this_mod, *target_mod; 524 struct avs_path_pipeline *target_ppl; 525 struct avs_path *target_path; 526 struct avs_tplg_binding *t; 527 528 t = binding->template; 529 this_mod = avs_path_find_module(binding->owner, 530 t->mod_id); 531 if (!this_mod) { 532 dev_err(adev->dev, "path mod %d not found\n", t->mod_id); 533 return -EINVAL; 534 } 535 536 /* update with target_tplg_name too */ 537 target_path = avs_path_find_path(adev, t->target_tplg_name, 538 t->target_path_tmpl_id); 539 if (!target_path) { 540 dev_err(adev->dev, "target path %s:%d not found\n", 541 t->target_tplg_name, t->target_path_tmpl_id); 542 return -EINVAL; 543 } 544 545 target_ppl = avs_path_find_pipeline(target_path, 546 t->target_ppl_id); 547 if (!target_ppl) { 548 dev_err(adev->dev, "target ppl %d not found\n", t->target_ppl_id); 549 return -EINVAL; 550 } 551 552 target_mod = avs_path_find_module(target_ppl, t->target_mod_id); 553 if (!target_mod) { 554 dev_err(adev->dev, "target mod %d not found\n", t->target_mod_id); 555 return -EINVAL; 556 } 557 558 if (t->is_sink) { 559 binding->sink = this_mod; 560 binding->sink_pin = t->mod_pin; 561 binding->source = target_mod; 562 binding->source_pin = t->target_mod_pin; 563 } else { 564 binding->sink = target_mod; 565 binding->sink_pin = t->target_mod_pin; 566 binding->source = this_mod; 567 binding->source_pin = t->mod_pin; 568 } 569 570 return 0; 571 } 572 573 static void avs_path_binding_free(struct avs_dev *adev, struct avs_path_binding *binding) 574 { 575 kfree(binding); 576 } 577 578 static struct avs_path_binding *avs_path_binding_create(struct avs_dev *adev, 579 struct avs_path_pipeline *owner, 580 struct avs_tplg_binding *t) 581 { 582 struct avs_path_binding *binding; 583 584 binding = kzalloc(sizeof(*binding), GFP_KERNEL); 585 if (!binding) 586 return ERR_PTR(-ENOMEM); 587 588 binding->template = t; 589 binding->owner = owner; 590 INIT_LIST_HEAD(&binding->node); 591 592 return binding; 593 } 594 595 static int avs_path_pipeline_arm(struct avs_dev *adev, 596 struct avs_path_pipeline *ppl) 597 { 598 struct avs_path_module *mod; 599 600 list_for_each_entry(mod, &ppl->mod_list, node) { 601 struct avs_path_module *source, *sink; 602 int ret; 603 604 /* 605 * Only one module (so it's implicitly last) or it is the last 606 * one, either way we don't have next module to bind it to. 607 */ 608 if (mod == list_last_entry(&ppl->mod_list, 609 struct avs_path_module, node)) 610 break; 611 612 /* bind current module to next module on list */ 613 source = mod; 614 sink = list_next_entry(mod, node); 615 if (!source || !sink) 616 return -EINVAL; 617 618 ret = avs_ipc_bind(adev, source->module_id, source->instance_id, 619 sink->module_id, sink->instance_id, 0, 0); 620 if (ret) 621 return AVS_IPC_RET(ret); 622 } 623 624 return 0; 625 } 626 627 static void avs_path_pipeline_free(struct avs_dev *adev, 628 struct avs_path_pipeline *ppl) 629 { 630 struct avs_path_binding *binding, *bsave; 631 struct avs_path_module *mod, *save; 632 633 list_for_each_entry_safe(binding, bsave, &ppl->binding_list, node) { 634 list_del(&binding->node); 635 avs_path_binding_free(adev, binding); 636 } 637 638 avs_dsp_delete_pipeline(adev, ppl->instance_id); 639 640 /* Unload resources occupied by owned modules */ 641 list_for_each_entry_safe(mod, save, &ppl->mod_list, node) { 642 avs_dsp_delete_module(adev, mod->module_id, mod->instance_id, 643 mod->owner->instance_id, 644 mod->template->core_id); 645 avs_path_module_free(adev, mod); 646 } 647 648 list_del(&ppl->node); 649 kfree(ppl); 650 } 651 652 static struct avs_path_pipeline * 653 avs_path_pipeline_create(struct avs_dev *adev, struct avs_path *owner, 654 struct avs_tplg_pipeline *template) 655 { 656 struct avs_path_pipeline *ppl; 657 struct avs_tplg_pplcfg *cfg = template->cfg; 658 struct avs_tplg_module *tmod; 659 int ret, i; 660 661 ppl = kzalloc(sizeof(*ppl), GFP_KERNEL); 662 if (!ppl) 663 return ERR_PTR(-ENOMEM); 664 665 ppl->template = template; 666 ppl->owner = owner; 667 INIT_LIST_HEAD(&ppl->binding_list); 668 INIT_LIST_HEAD(&ppl->mod_list); 669 INIT_LIST_HEAD(&ppl->node); 670 671 ret = avs_dsp_create_pipeline(adev, cfg->req_size, cfg->priority, 672 cfg->lp, cfg->attributes, 673 &ppl->instance_id); 674 if (ret) { 675 dev_err(adev->dev, "error creating pipeline %d\n", ret); 676 kfree(ppl); 677 return ERR_PTR(ret); 678 } 679 680 list_for_each_entry(tmod, &template->mod_list, node) { 681 struct avs_path_module *mod; 682 683 mod = avs_path_module_create(adev, ppl, tmod); 684 if (IS_ERR(mod)) { 685 ret = PTR_ERR(mod); 686 dev_err(adev->dev, "error creating module %d\n", ret); 687 goto init_err; 688 } 689 690 list_add_tail(&mod->node, &ppl->mod_list); 691 } 692 693 for (i = 0; i < template->num_bindings; i++) { 694 struct avs_path_binding *binding; 695 696 binding = avs_path_binding_create(adev, ppl, template->bindings[i]); 697 if (IS_ERR(binding)) { 698 ret = PTR_ERR(binding); 699 dev_err(adev->dev, "error creating binding %d\n", ret); 700 goto init_err; 701 } 702 703 list_add_tail(&binding->node, &ppl->binding_list); 704 } 705 706 return ppl; 707 708 init_err: 709 avs_path_pipeline_free(adev, ppl); 710 return ERR_PTR(ret); 711 } 712 713 static int avs_path_init(struct avs_dev *adev, struct avs_path *path, 714 struct avs_tplg_path *template, u32 dma_id) 715 { 716 struct avs_tplg_pipeline *tppl; 717 718 path->owner = adev; 719 path->template = template; 720 path->dma_id = dma_id; 721 INIT_LIST_HEAD(&path->ppl_list); 722 INIT_LIST_HEAD(&path->node); 723 724 /* create all the pipelines */ 725 list_for_each_entry(tppl, &template->ppl_list, node) { 726 struct avs_path_pipeline *ppl; 727 728 ppl = avs_path_pipeline_create(adev, path, tppl); 729 if (IS_ERR(ppl)) 730 return PTR_ERR(ppl); 731 732 list_add_tail(&ppl->node, &path->ppl_list); 733 } 734 735 spin_lock(&adev->path_list_lock); 736 list_add_tail(&path->node, &adev->path_list); 737 spin_unlock(&adev->path_list_lock); 738 739 return 0; 740 } 741 742 static int avs_path_arm(struct avs_dev *adev, struct avs_path *path) 743 { 744 struct avs_path_pipeline *ppl; 745 struct avs_path_binding *binding; 746 int ret; 747 748 list_for_each_entry(ppl, &path->ppl_list, node) { 749 /* 750 * Arm all ppl bindings before binding internal modules 751 * as it costs no IPCs which isn't true for the latter. 752 */ 753 list_for_each_entry(binding, &ppl->binding_list, node) { 754 ret = avs_path_binding_arm(adev, binding); 755 if (ret < 0) 756 return ret; 757 } 758 759 ret = avs_path_pipeline_arm(adev, ppl); 760 if (ret < 0) 761 return ret; 762 } 763 764 return 0; 765 } 766 767 static void avs_path_free_unlocked(struct avs_path *path) 768 { 769 struct avs_path_pipeline *ppl, *save; 770 771 spin_lock(&path->owner->path_list_lock); 772 list_del(&path->node); 773 spin_unlock(&path->owner->path_list_lock); 774 775 list_for_each_entry_safe(ppl, save, &path->ppl_list, node) 776 avs_path_pipeline_free(path->owner, ppl); 777 778 kfree(path); 779 } 780 781 static struct avs_path *avs_path_create_unlocked(struct avs_dev *adev, u32 dma_id, 782 struct avs_tplg_path *template) 783 { 784 struct avs_path *path; 785 int ret; 786 787 path = kzalloc(sizeof(*path), GFP_KERNEL); 788 if (!path) 789 return ERR_PTR(-ENOMEM); 790 791 ret = avs_path_init(adev, path, template, dma_id); 792 if (ret < 0) 793 goto err; 794 795 ret = avs_path_arm(adev, path); 796 if (ret < 0) 797 goto err; 798 799 path->state = AVS_PPL_STATE_INVALID; 800 return path; 801 err: 802 avs_path_free_unlocked(path); 803 return ERR_PTR(ret); 804 } 805 806 void avs_path_free(struct avs_path *path) 807 { 808 struct avs_dev *adev = path->owner; 809 810 mutex_lock(&adev->path_mutex); 811 avs_path_free_unlocked(path); 812 mutex_unlock(&adev->path_mutex); 813 } 814 815 struct avs_path *avs_path_create(struct avs_dev *adev, u32 dma_id, 816 struct avs_tplg_path_template *template, 817 struct snd_pcm_hw_params *fe_params, 818 struct snd_pcm_hw_params *be_params) 819 { 820 struct avs_tplg_path *variant; 821 struct avs_path *path; 822 823 variant = avs_path_find_variant(adev, template, fe_params, be_params); 824 if (!variant) { 825 dev_err(adev->dev, "no matching variant found\n"); 826 return ERR_PTR(-ENOENT); 827 } 828 829 /* Serialize path and its components creation. */ 830 mutex_lock(&adev->path_mutex); 831 /* Satisfy needs of avs_path_find_tplg(). */ 832 mutex_lock(&adev->comp_list_mutex); 833 834 path = avs_path_create_unlocked(adev, dma_id, variant); 835 836 mutex_unlock(&adev->comp_list_mutex); 837 mutex_unlock(&adev->path_mutex); 838 839 return path; 840 } 841 842 static int avs_path_bind_prepare(struct avs_dev *adev, 843 struct avs_path_binding *binding) 844 { 845 const struct avs_audio_format *src_fmt, *sink_fmt; 846 struct avs_tplg_module *tsource = binding->source->template; 847 struct avs_path_module *source = binding->source; 848 int ret; 849 850 /* 851 * only copier modules about to be bound 852 * to output pin other than 0 need preparation 853 */ 854 if (!binding->source_pin) 855 return 0; 856 if (!guid_equal(&tsource->cfg_ext->type, &AVS_COPIER_MOD_UUID)) 857 return 0; 858 859 src_fmt = tsource->in_fmt; 860 sink_fmt = binding->sink->template->in_fmt; 861 862 ret = avs_ipc_copier_set_sink_format(adev, source->module_id, 863 source->instance_id, binding->source_pin, 864 src_fmt, sink_fmt); 865 if (ret) { 866 dev_err(adev->dev, "config copier failed: %d\n", ret); 867 return AVS_IPC_RET(ret); 868 } 869 870 return 0; 871 } 872 873 int avs_path_bind(struct avs_path *path) 874 { 875 struct avs_path_pipeline *ppl; 876 struct avs_dev *adev = path->owner; 877 int ret; 878 879 list_for_each_entry(ppl, &path->ppl_list, node) { 880 struct avs_path_binding *binding; 881 882 list_for_each_entry(binding, &ppl->binding_list, node) { 883 struct avs_path_module *source, *sink; 884 885 source = binding->source; 886 sink = binding->sink; 887 888 ret = avs_path_bind_prepare(adev, binding); 889 if (ret < 0) 890 return ret; 891 892 ret = avs_ipc_bind(adev, source->module_id, 893 source->instance_id, sink->module_id, 894 sink->instance_id, binding->sink_pin, 895 binding->source_pin); 896 if (ret) { 897 dev_err(adev->dev, "bind path failed: %d\n", ret); 898 return AVS_IPC_RET(ret); 899 } 900 } 901 } 902 903 return 0; 904 } 905 906 int avs_path_unbind(struct avs_path *path) 907 { 908 struct avs_path_pipeline *ppl; 909 struct avs_dev *adev = path->owner; 910 int ret; 911 912 list_for_each_entry(ppl, &path->ppl_list, node) { 913 struct avs_path_binding *binding; 914 915 list_for_each_entry(binding, &ppl->binding_list, node) { 916 struct avs_path_module *source, *sink; 917 918 source = binding->source; 919 sink = binding->sink; 920 921 ret = avs_ipc_unbind(adev, source->module_id, 922 source->instance_id, sink->module_id, 923 sink->instance_id, binding->sink_pin, 924 binding->source_pin); 925 if (ret) { 926 dev_err(adev->dev, "unbind path failed: %d\n", ret); 927 return AVS_IPC_RET(ret); 928 } 929 } 930 } 931 932 return 0; 933 } 934 935 int avs_path_reset(struct avs_path *path) 936 { 937 struct avs_path_pipeline *ppl; 938 struct avs_dev *adev = path->owner; 939 int ret; 940 941 if (path->state == AVS_PPL_STATE_RESET) 942 return 0; 943 944 list_for_each_entry(ppl, &path->ppl_list, node) { 945 ret = avs_ipc_set_pipeline_state(adev, ppl->instance_id, 946 AVS_PPL_STATE_RESET); 947 if (ret) { 948 dev_err(adev->dev, "reset path failed: %d\n", ret); 949 path->state = AVS_PPL_STATE_INVALID; 950 return AVS_IPC_RET(ret); 951 } 952 } 953 954 path->state = AVS_PPL_STATE_RESET; 955 return 0; 956 } 957 958 int avs_path_pause(struct avs_path *path) 959 { 960 struct avs_path_pipeline *ppl; 961 struct avs_dev *adev = path->owner; 962 int ret; 963 964 if (path->state == AVS_PPL_STATE_PAUSED) 965 return 0; 966 967 list_for_each_entry_reverse(ppl, &path->ppl_list, node) { 968 ret = avs_ipc_set_pipeline_state(adev, ppl->instance_id, 969 AVS_PPL_STATE_PAUSED); 970 if (ret) { 971 dev_err(adev->dev, "pause path failed: %d\n", ret); 972 path->state = AVS_PPL_STATE_INVALID; 973 return AVS_IPC_RET(ret); 974 } 975 } 976 977 path->state = AVS_PPL_STATE_PAUSED; 978 return 0; 979 } 980 981 int avs_path_run(struct avs_path *path, int trigger) 982 { 983 struct avs_path_pipeline *ppl; 984 struct avs_dev *adev = path->owner; 985 int ret; 986 987 if (path->state == AVS_PPL_STATE_RUNNING && trigger == AVS_TPLG_TRIGGER_AUTO) 988 return 0; 989 990 list_for_each_entry(ppl, &path->ppl_list, node) { 991 if (ppl->template->cfg->trigger != trigger) 992 continue; 993 994 ret = avs_ipc_set_pipeline_state(adev, ppl->instance_id, 995 AVS_PPL_STATE_RUNNING); 996 if (ret) { 997 dev_err(adev->dev, "run path failed: %d\n", ret); 998 path->state = AVS_PPL_STATE_INVALID; 999 return AVS_IPC_RET(ret); 1000 } 1001 } 1002 1003 path->state = AVS_PPL_STATE_RUNNING; 1004 return 0; 1005 } 1006