1 // SPDX-License-Identifier: GPL-2.0-only 2 // 3 // Copyright(c) 2021 Intel Corporation. All rights reserved. 4 // 5 // Authors: Cezary Rojewski <cezary.rojewski@intel.com> 6 // Amadeusz Slawinski <amadeuszx.slawinski@linux.intel.com> 7 // 8 9 #include <sound/intel-nhlt.h> 10 #include <sound/pcm_params.h> 11 #include <sound/soc.h> 12 #include "avs.h" 13 #include "control.h" 14 #include "path.h" 15 #include "topology.h" 16 17 /* Must be called with adev->comp_list_mutex held. */ 18 static struct avs_tplg * 19 avs_path_find_tplg(struct avs_dev *adev, const char *name) 20 { 21 struct avs_soc_component *acomp; 22 23 list_for_each_entry(acomp, &adev->comp_list, node) 24 if (!strcmp(acomp->tplg->name, name)) 25 return acomp->tplg; 26 return NULL; 27 } 28 29 static struct avs_path_module * 30 avs_path_find_module(struct avs_path_pipeline *ppl, u32 template_id) 31 { 32 struct avs_path_module *mod; 33 34 list_for_each_entry(mod, &ppl->mod_list, node) 35 if (mod->template->id == template_id) 36 return mod; 37 return NULL; 38 } 39 40 static struct avs_path_pipeline * 41 avs_path_find_pipeline(struct avs_path *path, u32 template_id) 42 { 43 struct avs_path_pipeline *ppl; 44 45 list_for_each_entry(ppl, &path->ppl_list, node) 46 if (ppl->template->id == template_id) 47 return ppl; 48 return NULL; 49 } 50 51 static struct avs_path * 52 avs_path_find_path(struct avs_dev *adev, const char *name, u32 template_id) 53 { 54 struct avs_tplg_path_template *pos, *template = NULL; 55 struct avs_tplg *tplg; 56 struct avs_path *path; 57 58 tplg = avs_path_find_tplg(adev, name); 59 if (!tplg) 60 return NULL; 61 62 list_for_each_entry(pos, &tplg->path_tmpl_list, node) { 63 if (pos->id == template_id) { 64 template = pos; 65 break; 66 } 67 } 68 if (!template) 69 return NULL; 70 71 spin_lock(&adev->path_list_lock); 72 /* Only one variant of given path template may be instantiated at a time. */ 73 list_for_each_entry(path, &adev->path_list, node) { 74 if (path->template->owner == template) { 75 spin_unlock(&adev->path_list_lock); 76 return path; 77 } 78 } 79 80 spin_unlock(&adev->path_list_lock); 81 return NULL; 82 } 83 84 static bool avs_test_hw_params(struct snd_pcm_hw_params *params, 85 struct avs_audio_format *fmt) 86 { 87 return (params_rate(params) == fmt->sampling_freq && 88 params_channels(params) == fmt->num_channels && 89 params_physical_width(params) == fmt->bit_depth && 90 params_width(params) == fmt->valid_bit_depth); 91 } 92 93 static struct avs_tplg_path * 94 avs_path_find_variant(struct avs_dev *adev, 95 struct avs_tplg_path_template *template, 96 struct snd_pcm_hw_params *fe_params, 97 struct snd_pcm_hw_params *be_params) 98 { 99 struct avs_tplg_path *variant; 100 101 list_for_each_entry(variant, &template->path_list, node) { 102 dev_dbg(adev->dev, "check FE rate %d chn %d vbd %d bd %d\n", 103 variant->fe_fmt->sampling_freq, variant->fe_fmt->num_channels, 104 variant->fe_fmt->valid_bit_depth, variant->fe_fmt->bit_depth); 105 dev_dbg(adev->dev, "check BE rate %d chn %d vbd %d bd %d\n", 106 variant->be_fmt->sampling_freq, variant->be_fmt->num_channels, 107 variant->be_fmt->valid_bit_depth, variant->be_fmt->bit_depth); 108 109 if (variant->fe_fmt && avs_test_hw_params(fe_params, variant->fe_fmt) && 110 variant->be_fmt && avs_test_hw_params(be_params, variant->be_fmt)) 111 return variant; 112 } 113 114 return NULL; 115 } 116 117 __maybe_unused 118 static bool avs_dma_type_is_host(u32 dma_type) 119 { 120 return dma_type == AVS_DMA_HDA_HOST_OUTPUT || 121 dma_type == AVS_DMA_HDA_HOST_INPUT; 122 } 123 124 __maybe_unused 125 static bool avs_dma_type_is_link(u32 dma_type) 126 { 127 return !avs_dma_type_is_host(dma_type); 128 } 129 130 __maybe_unused 131 static bool avs_dma_type_is_output(u32 dma_type) 132 { 133 return dma_type == AVS_DMA_HDA_HOST_OUTPUT || 134 dma_type == AVS_DMA_HDA_LINK_OUTPUT || 135 dma_type == AVS_DMA_I2S_LINK_OUTPUT; 136 } 137 138 __maybe_unused 139 static bool avs_dma_type_is_input(u32 dma_type) 140 { 141 return !avs_dma_type_is_output(dma_type); 142 } 143 144 static int avs_copier_create(struct avs_dev *adev, struct avs_path_module *mod) 145 { 146 struct nhlt_acpi_table *nhlt = adev->nhlt; 147 struct avs_tplg_module *t = mod->template; 148 struct avs_copier_cfg *cfg; 149 struct nhlt_specific_cfg *ep_blob; 150 union avs_connector_node_id node_id = {0}; 151 size_t cfg_size, data_size = 0; 152 void *data = NULL; 153 u32 dma_type; 154 int ret; 155 156 dma_type = t->cfg_ext->copier.dma_type; 157 node_id.dma_type = dma_type; 158 159 switch (dma_type) { 160 struct avs_audio_format *fmt; 161 int direction; 162 163 case AVS_DMA_I2S_LINK_OUTPUT: 164 case AVS_DMA_I2S_LINK_INPUT: 165 if (avs_dma_type_is_input(dma_type)) 166 direction = SNDRV_PCM_STREAM_CAPTURE; 167 else 168 direction = SNDRV_PCM_STREAM_PLAYBACK; 169 170 if (t->cfg_ext->copier.blob_fmt) 171 fmt = t->cfg_ext->copier.blob_fmt; 172 else if (direction == SNDRV_PCM_STREAM_CAPTURE) 173 fmt = t->in_fmt; 174 else 175 fmt = t->cfg_ext->copier.out_fmt; 176 177 ep_blob = intel_nhlt_get_endpoint_blob(adev->dev, 178 nhlt, t->cfg_ext->copier.vindex.i2s.instance, 179 NHLT_LINK_SSP, fmt->valid_bit_depth, fmt->bit_depth, 180 fmt->num_channels, fmt->sampling_freq, direction, 181 NHLT_DEVICE_I2S); 182 if (!ep_blob) { 183 dev_err(adev->dev, "no I2S ep_blob found\n"); 184 return -ENOENT; 185 } 186 187 data = ep_blob->caps; 188 data_size = ep_blob->size; 189 /* I2S gateway's vindex is statically assigned in topology */ 190 node_id.vindex = t->cfg_ext->copier.vindex.val; 191 192 break; 193 194 case AVS_DMA_DMIC_LINK_INPUT: 195 direction = SNDRV_PCM_STREAM_CAPTURE; 196 197 if (t->cfg_ext->copier.blob_fmt) 198 fmt = t->cfg_ext->copier.blob_fmt; 199 else 200 fmt = t->in_fmt; 201 202 ep_blob = intel_nhlt_get_endpoint_blob(adev->dev, nhlt, 0, 203 NHLT_LINK_DMIC, fmt->valid_bit_depth, 204 fmt->bit_depth, fmt->num_channels, 205 fmt->sampling_freq, direction, NHLT_DEVICE_DMIC); 206 if (!ep_blob) { 207 dev_err(adev->dev, "no DMIC ep_blob found\n"); 208 return -ENOENT; 209 } 210 211 data = ep_blob->caps; 212 data_size = ep_blob->size; 213 /* DMIC gateway's vindex is statically assigned in topology */ 214 node_id.vindex = t->cfg_ext->copier.vindex.val; 215 216 break; 217 218 case AVS_DMA_HDA_HOST_OUTPUT: 219 case AVS_DMA_HDA_HOST_INPUT: 220 /* HOST gateway's vindex is dynamically assigned with DMA id */ 221 node_id.vindex = mod->owner->owner->dma_id; 222 break; 223 224 case AVS_DMA_HDA_LINK_OUTPUT: 225 case AVS_DMA_HDA_LINK_INPUT: 226 node_id.vindex = t->cfg_ext->copier.vindex.val | 227 mod->owner->owner->dma_id; 228 break; 229 230 case INVALID_OBJECT_ID: 231 default: 232 node_id = INVALID_NODE_ID; 233 break; 234 } 235 236 cfg_size = sizeof(*cfg) + data_size; 237 /* Every config-BLOB contains gateway attributes. */ 238 if (data_size) 239 cfg_size -= sizeof(cfg->gtw_cfg.config.attrs); 240 241 cfg = kzalloc(cfg_size, GFP_KERNEL); 242 if (!cfg) 243 return -ENOMEM; 244 245 cfg->base.cpc = t->cfg_base->cpc; 246 cfg->base.ibs = t->cfg_base->ibs; 247 cfg->base.obs = t->cfg_base->obs; 248 cfg->base.is_pages = t->cfg_base->is_pages; 249 cfg->base.audio_fmt = *t->in_fmt; 250 cfg->out_fmt = *t->cfg_ext->copier.out_fmt; 251 cfg->feature_mask = t->cfg_ext->copier.feature_mask; 252 cfg->gtw_cfg.node_id = node_id; 253 cfg->gtw_cfg.dma_buffer_size = t->cfg_ext->copier.dma_buffer_size; 254 /* config_length in DWORDs */ 255 cfg->gtw_cfg.config_length = DIV_ROUND_UP(data_size, 4); 256 if (data) 257 memcpy(&cfg->gtw_cfg.config, data, data_size); 258 259 mod->gtw_attrs = cfg->gtw_cfg.config.attrs; 260 261 ret = avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id, 262 t->core_id, t->domain, cfg, cfg_size, 263 &mod->instance_id); 264 kfree(cfg); 265 return ret; 266 } 267 268 static struct avs_control_data *avs_get_module_control(struct avs_path_module *mod) 269 { 270 struct avs_tplg_module *t = mod->template; 271 struct avs_tplg_path_template *path_tmpl; 272 struct snd_soc_dapm_widget *w; 273 int i; 274 275 path_tmpl = t->owner->owner->owner; 276 w = path_tmpl->w; 277 278 for (i = 0; i < w->num_kcontrols; i++) { 279 struct avs_control_data *ctl_data; 280 struct soc_mixer_control *mc; 281 282 mc = (struct soc_mixer_control *)w->kcontrols[i]->private_value; 283 ctl_data = (struct avs_control_data *)mc->dobj.private; 284 if (ctl_data->id == t->ctl_id) 285 return ctl_data; 286 } 287 288 return NULL; 289 } 290 291 static int avs_peakvol_create(struct avs_dev *adev, struct avs_path_module *mod) 292 { 293 struct avs_tplg_module *t = mod->template; 294 struct avs_control_data *ctl_data; 295 struct avs_peakvol_cfg *cfg; 296 int volume = S32_MAX; 297 size_t size; 298 int ret; 299 300 ctl_data = avs_get_module_control(mod); 301 if (ctl_data) 302 volume = ctl_data->volume; 303 304 /* As 2+ channels controls are unsupported, have a single block for all channels. */ 305 size = struct_size(cfg, vols, 1); 306 cfg = kzalloc(size, GFP_KERNEL); 307 if (!cfg) 308 return -ENOMEM; 309 310 cfg->base.cpc = t->cfg_base->cpc; 311 cfg->base.ibs = t->cfg_base->ibs; 312 cfg->base.obs = t->cfg_base->obs; 313 cfg->base.is_pages = t->cfg_base->is_pages; 314 cfg->base.audio_fmt = *t->in_fmt; 315 cfg->vols[0].target_volume = volume; 316 cfg->vols[0].channel_id = AVS_ALL_CHANNELS_MASK; 317 cfg->vols[0].curve_type = AVS_AUDIO_CURVE_NONE; 318 cfg->vols[0].curve_duration = 0; 319 320 ret = avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id, t->core_id, 321 t->domain, cfg, size, &mod->instance_id); 322 323 kfree(cfg); 324 return ret; 325 } 326 327 static int avs_updown_mix_create(struct avs_dev *adev, struct avs_path_module *mod) 328 { 329 struct avs_tplg_module *t = mod->template; 330 struct avs_updown_mixer_cfg cfg; 331 int i; 332 333 cfg.base.cpc = t->cfg_base->cpc; 334 cfg.base.ibs = t->cfg_base->ibs; 335 cfg.base.obs = t->cfg_base->obs; 336 cfg.base.is_pages = t->cfg_base->is_pages; 337 cfg.base.audio_fmt = *t->in_fmt; 338 cfg.out_channel_config = t->cfg_ext->updown_mix.out_channel_config; 339 cfg.coefficients_select = t->cfg_ext->updown_mix.coefficients_select; 340 for (i = 0; i < AVS_CHANNELS_MAX; i++) 341 cfg.coefficients[i] = t->cfg_ext->updown_mix.coefficients[i]; 342 cfg.channel_map = t->cfg_ext->updown_mix.channel_map; 343 344 return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id, 345 t->core_id, t->domain, &cfg, sizeof(cfg), 346 &mod->instance_id); 347 } 348 349 static int avs_src_create(struct avs_dev *adev, struct avs_path_module *mod) 350 { 351 struct avs_tplg_module *t = mod->template; 352 struct avs_src_cfg cfg; 353 354 cfg.base.cpc = t->cfg_base->cpc; 355 cfg.base.ibs = t->cfg_base->ibs; 356 cfg.base.obs = t->cfg_base->obs; 357 cfg.base.is_pages = t->cfg_base->is_pages; 358 cfg.base.audio_fmt = *t->in_fmt; 359 cfg.out_freq = t->cfg_ext->src.out_freq; 360 361 return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id, 362 t->core_id, t->domain, &cfg, sizeof(cfg), 363 &mod->instance_id); 364 } 365 366 static int avs_asrc_create(struct avs_dev *adev, struct avs_path_module *mod) 367 { 368 struct avs_tplg_module *t = mod->template; 369 struct avs_asrc_cfg cfg; 370 371 memset(&cfg, 0, sizeof(cfg)); 372 cfg.base.cpc = t->cfg_base->cpc; 373 cfg.base.ibs = t->cfg_base->ibs; 374 cfg.base.obs = t->cfg_base->obs; 375 cfg.base.is_pages = t->cfg_base->is_pages; 376 cfg.base.audio_fmt = *t->in_fmt; 377 cfg.out_freq = t->cfg_ext->asrc.out_freq; 378 cfg.mode = t->cfg_ext->asrc.mode; 379 cfg.disable_jitter_buffer = t->cfg_ext->asrc.disable_jitter_buffer; 380 381 return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id, 382 t->core_id, t->domain, &cfg, sizeof(cfg), 383 &mod->instance_id); 384 } 385 386 static int avs_aec_create(struct avs_dev *adev, struct avs_path_module *mod) 387 { 388 struct avs_tplg_module *t = mod->template; 389 struct avs_aec_cfg cfg; 390 391 cfg.base.cpc = t->cfg_base->cpc; 392 cfg.base.ibs = t->cfg_base->ibs; 393 cfg.base.obs = t->cfg_base->obs; 394 cfg.base.is_pages = t->cfg_base->is_pages; 395 cfg.base.audio_fmt = *t->in_fmt; 396 cfg.ref_fmt = *t->cfg_ext->aec.ref_fmt; 397 cfg.out_fmt = *t->cfg_ext->aec.out_fmt; 398 cfg.cpc_lp_mode = t->cfg_ext->aec.cpc_lp_mode; 399 400 return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id, 401 t->core_id, t->domain, &cfg, sizeof(cfg), 402 &mod->instance_id); 403 } 404 405 static int avs_mux_create(struct avs_dev *adev, struct avs_path_module *mod) 406 { 407 struct avs_tplg_module *t = mod->template; 408 struct avs_mux_cfg cfg; 409 410 cfg.base.cpc = t->cfg_base->cpc; 411 cfg.base.ibs = t->cfg_base->ibs; 412 cfg.base.obs = t->cfg_base->obs; 413 cfg.base.is_pages = t->cfg_base->is_pages; 414 cfg.base.audio_fmt = *t->in_fmt; 415 cfg.ref_fmt = *t->cfg_ext->mux.ref_fmt; 416 cfg.out_fmt = *t->cfg_ext->mux.out_fmt; 417 418 return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id, 419 t->core_id, t->domain, &cfg, sizeof(cfg), 420 &mod->instance_id); 421 } 422 423 static int avs_wov_create(struct avs_dev *adev, struct avs_path_module *mod) 424 { 425 struct avs_tplg_module *t = mod->template; 426 struct avs_wov_cfg cfg; 427 428 cfg.base.cpc = t->cfg_base->cpc; 429 cfg.base.ibs = t->cfg_base->ibs; 430 cfg.base.obs = t->cfg_base->obs; 431 cfg.base.is_pages = t->cfg_base->is_pages; 432 cfg.base.audio_fmt = *t->in_fmt; 433 cfg.cpc_lp_mode = t->cfg_ext->wov.cpc_lp_mode; 434 435 return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id, 436 t->core_id, t->domain, &cfg, sizeof(cfg), 437 &mod->instance_id); 438 } 439 440 static int avs_micsel_create(struct avs_dev *adev, struct avs_path_module *mod) 441 { 442 struct avs_tplg_module *t = mod->template; 443 struct avs_micsel_cfg cfg; 444 445 cfg.base.cpc = t->cfg_base->cpc; 446 cfg.base.ibs = t->cfg_base->ibs; 447 cfg.base.obs = t->cfg_base->obs; 448 cfg.base.is_pages = t->cfg_base->is_pages; 449 cfg.base.audio_fmt = *t->in_fmt; 450 cfg.out_fmt = *t->cfg_ext->micsel.out_fmt; 451 452 return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id, 453 t->core_id, t->domain, &cfg, sizeof(cfg), 454 &mod->instance_id); 455 } 456 457 static int avs_modbase_create(struct avs_dev *adev, struct avs_path_module *mod) 458 { 459 struct avs_tplg_module *t = mod->template; 460 struct avs_modcfg_base cfg; 461 462 cfg.cpc = t->cfg_base->cpc; 463 cfg.ibs = t->cfg_base->ibs; 464 cfg.obs = t->cfg_base->obs; 465 cfg.is_pages = t->cfg_base->is_pages; 466 cfg.audio_fmt = *t->in_fmt; 467 468 return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id, 469 t->core_id, t->domain, &cfg, sizeof(cfg), 470 &mod->instance_id); 471 } 472 473 static int avs_modext_create(struct avs_dev *adev, struct avs_path_module *mod) 474 { 475 struct avs_tplg_module *t = mod->template; 476 struct avs_tplg_modcfg_ext *tcfg = t->cfg_ext; 477 struct avs_modcfg_ext *cfg; 478 size_t cfg_size, num_pins; 479 int ret, i; 480 481 num_pins = tcfg->generic.num_input_pins + tcfg->generic.num_output_pins; 482 cfg_size = struct_size(cfg, pin_fmts, num_pins); 483 484 cfg = kzalloc(cfg_size, GFP_KERNEL); 485 if (!cfg) 486 return -ENOMEM; 487 488 cfg->base.cpc = t->cfg_base->cpc; 489 cfg->base.ibs = t->cfg_base->ibs; 490 cfg->base.obs = t->cfg_base->obs; 491 cfg->base.is_pages = t->cfg_base->is_pages; 492 cfg->base.audio_fmt = *t->in_fmt; 493 cfg->num_input_pins = tcfg->generic.num_input_pins; 494 cfg->num_output_pins = tcfg->generic.num_output_pins; 495 496 /* configure pin formats */ 497 for (i = 0; i < num_pins; i++) { 498 struct avs_tplg_pin_format *tpin = &tcfg->generic.pin_fmts[i]; 499 struct avs_pin_format *pin = &cfg->pin_fmts[i]; 500 501 pin->pin_index = tpin->pin_index; 502 pin->iobs = tpin->iobs; 503 pin->audio_fmt = *tpin->fmt; 504 } 505 506 ret = avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id, 507 t->core_id, t->domain, cfg, cfg_size, 508 &mod->instance_id); 509 kfree(cfg); 510 return ret; 511 } 512 513 static int avs_probe_create(struct avs_dev *adev, struct avs_path_module *mod) 514 { 515 dev_err(adev->dev, "Probe module can't be instantiated by topology"); 516 return -EINVAL; 517 } 518 519 struct avs_module_create { 520 guid_t *guid; 521 int (*create)(struct avs_dev *adev, struct avs_path_module *mod); 522 }; 523 524 static struct avs_module_create avs_module_create[] = { 525 { &AVS_MIXIN_MOD_UUID, avs_modbase_create }, 526 { &AVS_MIXOUT_MOD_UUID, avs_modbase_create }, 527 { &AVS_KPBUFF_MOD_UUID, avs_modbase_create }, 528 { &AVS_COPIER_MOD_UUID, avs_copier_create }, 529 { &AVS_PEAKVOL_MOD_UUID, avs_peakvol_create }, 530 { &AVS_GAIN_MOD_UUID, avs_peakvol_create }, 531 { &AVS_MICSEL_MOD_UUID, avs_micsel_create }, 532 { &AVS_MUX_MOD_UUID, avs_mux_create }, 533 { &AVS_UPDWMIX_MOD_UUID, avs_updown_mix_create }, 534 { &AVS_SRCINTC_MOD_UUID, avs_src_create }, 535 { &AVS_AEC_MOD_UUID, avs_aec_create }, 536 { &AVS_ASRC_MOD_UUID, avs_asrc_create }, 537 { &AVS_INTELWOV_MOD_UUID, avs_wov_create }, 538 { &AVS_PROBE_MOD_UUID, avs_probe_create }, 539 }; 540 541 static int avs_path_module_type_create(struct avs_dev *adev, struct avs_path_module *mod) 542 { 543 const guid_t *type = &mod->template->cfg_ext->type; 544 545 for (int i = 0; i < ARRAY_SIZE(avs_module_create); i++) 546 if (guid_equal(type, avs_module_create[i].guid)) 547 return avs_module_create[i].create(adev, mod); 548 549 return avs_modext_create(adev, mod); 550 } 551 552 static void avs_path_module_free(struct avs_dev *adev, struct avs_path_module *mod) 553 { 554 kfree(mod); 555 } 556 557 static struct avs_path_module * 558 avs_path_module_create(struct avs_dev *adev, 559 struct avs_path_pipeline *owner, 560 struct avs_tplg_module *template) 561 { 562 struct avs_path_module *mod; 563 int module_id, ret; 564 565 module_id = avs_get_module_id(adev, &template->cfg_ext->type); 566 if (module_id < 0) 567 return ERR_PTR(module_id); 568 569 mod = kzalloc(sizeof(*mod), GFP_KERNEL); 570 if (!mod) 571 return ERR_PTR(-ENOMEM); 572 573 mod->template = template; 574 mod->module_id = module_id; 575 mod->owner = owner; 576 INIT_LIST_HEAD(&mod->node); 577 578 ret = avs_path_module_type_create(adev, mod); 579 if (ret) { 580 dev_err(adev->dev, "module-type create failed: %d\n", ret); 581 kfree(mod); 582 return ERR_PTR(ret); 583 } 584 585 return mod; 586 } 587 588 static int avs_path_binding_arm(struct avs_dev *adev, struct avs_path_binding *binding) 589 { 590 struct avs_path_module *this_mod, *target_mod; 591 struct avs_path_pipeline *target_ppl; 592 struct avs_path *target_path; 593 struct avs_tplg_binding *t; 594 595 t = binding->template; 596 this_mod = avs_path_find_module(binding->owner, 597 t->mod_id); 598 if (!this_mod) { 599 dev_err(adev->dev, "path mod %d not found\n", t->mod_id); 600 return -EINVAL; 601 } 602 603 /* update with target_tplg_name too */ 604 target_path = avs_path_find_path(adev, t->target_tplg_name, 605 t->target_path_tmpl_id); 606 if (!target_path) { 607 dev_err(adev->dev, "target path %s:%d not found\n", 608 t->target_tplg_name, t->target_path_tmpl_id); 609 return -EINVAL; 610 } 611 612 target_ppl = avs_path_find_pipeline(target_path, 613 t->target_ppl_id); 614 if (!target_ppl) { 615 dev_err(adev->dev, "target ppl %d not found\n", t->target_ppl_id); 616 return -EINVAL; 617 } 618 619 target_mod = avs_path_find_module(target_ppl, t->target_mod_id); 620 if (!target_mod) { 621 dev_err(adev->dev, "target mod %d not found\n", t->target_mod_id); 622 return -EINVAL; 623 } 624 625 if (t->is_sink) { 626 binding->sink = this_mod; 627 binding->sink_pin = t->mod_pin; 628 binding->source = target_mod; 629 binding->source_pin = t->target_mod_pin; 630 } else { 631 binding->sink = target_mod; 632 binding->sink_pin = t->target_mod_pin; 633 binding->source = this_mod; 634 binding->source_pin = t->mod_pin; 635 } 636 637 return 0; 638 } 639 640 static void avs_path_binding_free(struct avs_dev *adev, struct avs_path_binding *binding) 641 { 642 kfree(binding); 643 } 644 645 static struct avs_path_binding *avs_path_binding_create(struct avs_dev *adev, 646 struct avs_path_pipeline *owner, 647 struct avs_tplg_binding *t) 648 { 649 struct avs_path_binding *binding; 650 651 binding = kzalloc(sizeof(*binding), GFP_KERNEL); 652 if (!binding) 653 return ERR_PTR(-ENOMEM); 654 655 binding->template = t; 656 binding->owner = owner; 657 INIT_LIST_HEAD(&binding->node); 658 659 return binding; 660 } 661 662 static int avs_path_pipeline_arm(struct avs_dev *adev, 663 struct avs_path_pipeline *ppl) 664 { 665 struct avs_path_module *mod; 666 667 list_for_each_entry(mod, &ppl->mod_list, node) { 668 struct avs_path_module *source, *sink; 669 int ret; 670 671 /* 672 * Only one module (so it's implicitly last) or it is the last 673 * one, either way we don't have next module to bind it to. 674 */ 675 if (mod == list_last_entry(&ppl->mod_list, 676 struct avs_path_module, node)) 677 break; 678 679 /* bind current module to next module on list */ 680 source = mod; 681 sink = list_next_entry(mod, node); 682 if (!source || !sink) 683 return -EINVAL; 684 685 ret = avs_ipc_bind(adev, source->module_id, source->instance_id, 686 sink->module_id, sink->instance_id, 0, 0); 687 if (ret) 688 return AVS_IPC_RET(ret); 689 } 690 691 return 0; 692 } 693 694 static void avs_path_pipeline_free(struct avs_dev *adev, 695 struct avs_path_pipeline *ppl) 696 { 697 struct avs_path_binding *binding, *bsave; 698 struct avs_path_module *mod, *save; 699 700 list_for_each_entry_safe(binding, bsave, &ppl->binding_list, node) { 701 list_del(&binding->node); 702 avs_path_binding_free(adev, binding); 703 } 704 705 avs_dsp_delete_pipeline(adev, ppl->instance_id); 706 707 /* Unload resources occupied by owned modules */ 708 list_for_each_entry_safe(mod, save, &ppl->mod_list, node) { 709 avs_dsp_delete_module(adev, mod->module_id, mod->instance_id, 710 mod->owner->instance_id, 711 mod->template->core_id); 712 avs_path_module_free(adev, mod); 713 } 714 715 list_del(&ppl->node); 716 kfree(ppl); 717 } 718 719 static struct avs_path_pipeline * 720 avs_path_pipeline_create(struct avs_dev *adev, struct avs_path *owner, 721 struct avs_tplg_pipeline *template) 722 { 723 struct avs_path_pipeline *ppl; 724 struct avs_tplg_pplcfg *cfg = template->cfg; 725 struct avs_tplg_module *tmod; 726 int ret, i; 727 728 ppl = kzalloc(sizeof(*ppl), GFP_KERNEL); 729 if (!ppl) 730 return ERR_PTR(-ENOMEM); 731 732 ppl->template = template; 733 ppl->owner = owner; 734 INIT_LIST_HEAD(&ppl->binding_list); 735 INIT_LIST_HEAD(&ppl->mod_list); 736 INIT_LIST_HEAD(&ppl->node); 737 738 ret = avs_dsp_create_pipeline(adev, cfg->req_size, cfg->priority, 739 cfg->lp, cfg->attributes, 740 &ppl->instance_id); 741 if (ret) { 742 dev_err(adev->dev, "error creating pipeline %d\n", ret); 743 kfree(ppl); 744 return ERR_PTR(ret); 745 } 746 747 list_for_each_entry(tmod, &template->mod_list, node) { 748 struct avs_path_module *mod; 749 750 mod = avs_path_module_create(adev, ppl, tmod); 751 if (IS_ERR(mod)) { 752 ret = PTR_ERR(mod); 753 dev_err(adev->dev, "error creating module %d\n", ret); 754 goto init_err; 755 } 756 757 list_add_tail(&mod->node, &ppl->mod_list); 758 } 759 760 for (i = 0; i < template->num_bindings; i++) { 761 struct avs_path_binding *binding; 762 763 binding = avs_path_binding_create(adev, ppl, template->bindings[i]); 764 if (IS_ERR(binding)) { 765 ret = PTR_ERR(binding); 766 dev_err(adev->dev, "error creating binding %d\n", ret); 767 goto init_err; 768 } 769 770 list_add_tail(&binding->node, &ppl->binding_list); 771 } 772 773 return ppl; 774 775 init_err: 776 avs_path_pipeline_free(adev, ppl); 777 return ERR_PTR(ret); 778 } 779 780 static int avs_path_init(struct avs_dev *adev, struct avs_path *path, 781 struct avs_tplg_path *template, u32 dma_id) 782 { 783 struct avs_tplg_pipeline *tppl; 784 785 path->owner = adev; 786 path->template = template; 787 path->dma_id = dma_id; 788 INIT_LIST_HEAD(&path->ppl_list); 789 INIT_LIST_HEAD(&path->node); 790 791 /* create all the pipelines */ 792 list_for_each_entry(tppl, &template->ppl_list, node) { 793 struct avs_path_pipeline *ppl; 794 795 ppl = avs_path_pipeline_create(adev, path, tppl); 796 if (IS_ERR(ppl)) 797 return PTR_ERR(ppl); 798 799 list_add_tail(&ppl->node, &path->ppl_list); 800 } 801 802 spin_lock(&adev->path_list_lock); 803 list_add_tail(&path->node, &adev->path_list); 804 spin_unlock(&adev->path_list_lock); 805 806 return 0; 807 } 808 809 static int avs_path_arm(struct avs_dev *adev, struct avs_path *path) 810 { 811 struct avs_path_pipeline *ppl; 812 struct avs_path_binding *binding; 813 int ret; 814 815 list_for_each_entry(ppl, &path->ppl_list, node) { 816 /* 817 * Arm all ppl bindings before binding internal modules 818 * as it costs no IPCs which isn't true for the latter. 819 */ 820 list_for_each_entry(binding, &ppl->binding_list, node) { 821 ret = avs_path_binding_arm(adev, binding); 822 if (ret < 0) 823 return ret; 824 } 825 826 ret = avs_path_pipeline_arm(adev, ppl); 827 if (ret < 0) 828 return ret; 829 } 830 831 return 0; 832 } 833 834 static void avs_path_free_unlocked(struct avs_path *path) 835 { 836 struct avs_path_pipeline *ppl, *save; 837 838 spin_lock(&path->owner->path_list_lock); 839 list_del(&path->node); 840 spin_unlock(&path->owner->path_list_lock); 841 842 list_for_each_entry_safe(ppl, save, &path->ppl_list, node) 843 avs_path_pipeline_free(path->owner, ppl); 844 845 kfree(path); 846 } 847 848 static struct avs_path *avs_path_create_unlocked(struct avs_dev *adev, u32 dma_id, 849 struct avs_tplg_path *template) 850 { 851 struct avs_path *path; 852 int ret; 853 854 path = kzalloc(sizeof(*path), GFP_KERNEL); 855 if (!path) 856 return ERR_PTR(-ENOMEM); 857 858 ret = avs_path_init(adev, path, template, dma_id); 859 if (ret < 0) 860 goto err; 861 862 ret = avs_path_arm(adev, path); 863 if (ret < 0) 864 goto err; 865 866 path->state = AVS_PPL_STATE_INVALID; 867 return path; 868 err: 869 avs_path_free_unlocked(path); 870 return ERR_PTR(ret); 871 } 872 873 void avs_path_free(struct avs_path *path) 874 { 875 struct avs_dev *adev = path->owner; 876 877 mutex_lock(&adev->path_mutex); 878 avs_path_free_unlocked(path); 879 mutex_unlock(&adev->path_mutex); 880 } 881 882 struct avs_path *avs_path_create(struct avs_dev *adev, u32 dma_id, 883 struct avs_tplg_path_template *template, 884 struct snd_pcm_hw_params *fe_params, 885 struct snd_pcm_hw_params *be_params) 886 { 887 struct avs_tplg_path *variant; 888 struct avs_path *path; 889 890 variant = avs_path_find_variant(adev, template, fe_params, be_params); 891 if (!variant) { 892 dev_err(adev->dev, "no matching variant found\n"); 893 return ERR_PTR(-ENOENT); 894 } 895 896 /* Serialize path and its components creation. */ 897 mutex_lock(&adev->path_mutex); 898 /* Satisfy needs of avs_path_find_tplg(). */ 899 mutex_lock(&adev->comp_list_mutex); 900 901 path = avs_path_create_unlocked(adev, dma_id, variant); 902 903 mutex_unlock(&adev->comp_list_mutex); 904 mutex_unlock(&adev->path_mutex); 905 906 return path; 907 } 908 909 static int avs_path_bind_prepare(struct avs_dev *adev, 910 struct avs_path_binding *binding) 911 { 912 const struct avs_audio_format *src_fmt, *sink_fmt; 913 struct avs_tplg_module *tsource = binding->source->template; 914 struct avs_path_module *source = binding->source; 915 int ret; 916 917 /* 918 * only copier modules about to be bound 919 * to output pin other than 0 need preparation 920 */ 921 if (!binding->source_pin) 922 return 0; 923 if (!guid_equal(&tsource->cfg_ext->type, &AVS_COPIER_MOD_UUID)) 924 return 0; 925 926 src_fmt = tsource->in_fmt; 927 sink_fmt = binding->sink->template->in_fmt; 928 929 ret = avs_ipc_copier_set_sink_format(adev, source->module_id, 930 source->instance_id, binding->source_pin, 931 src_fmt, sink_fmt); 932 if (ret) { 933 dev_err(adev->dev, "config copier failed: %d\n", ret); 934 return AVS_IPC_RET(ret); 935 } 936 937 return 0; 938 } 939 940 int avs_path_bind(struct avs_path *path) 941 { 942 struct avs_path_pipeline *ppl; 943 struct avs_dev *adev = path->owner; 944 int ret; 945 946 list_for_each_entry(ppl, &path->ppl_list, node) { 947 struct avs_path_binding *binding; 948 949 list_for_each_entry(binding, &ppl->binding_list, node) { 950 struct avs_path_module *source, *sink; 951 952 source = binding->source; 953 sink = binding->sink; 954 955 ret = avs_path_bind_prepare(adev, binding); 956 if (ret < 0) 957 return ret; 958 959 ret = avs_ipc_bind(adev, source->module_id, 960 source->instance_id, sink->module_id, 961 sink->instance_id, binding->sink_pin, 962 binding->source_pin); 963 if (ret) { 964 dev_err(adev->dev, "bind path failed: %d\n", ret); 965 return AVS_IPC_RET(ret); 966 } 967 } 968 } 969 970 return 0; 971 } 972 973 int avs_path_unbind(struct avs_path *path) 974 { 975 struct avs_path_pipeline *ppl; 976 struct avs_dev *adev = path->owner; 977 int ret; 978 979 list_for_each_entry(ppl, &path->ppl_list, node) { 980 struct avs_path_binding *binding; 981 982 list_for_each_entry(binding, &ppl->binding_list, node) { 983 struct avs_path_module *source, *sink; 984 985 source = binding->source; 986 sink = binding->sink; 987 988 ret = avs_ipc_unbind(adev, source->module_id, 989 source->instance_id, sink->module_id, 990 sink->instance_id, binding->sink_pin, 991 binding->source_pin); 992 if (ret) { 993 dev_err(adev->dev, "unbind path failed: %d\n", ret); 994 return AVS_IPC_RET(ret); 995 } 996 } 997 } 998 999 return 0; 1000 } 1001 1002 int avs_path_reset(struct avs_path *path) 1003 { 1004 struct avs_path_pipeline *ppl; 1005 struct avs_dev *adev = path->owner; 1006 int ret; 1007 1008 if (path->state == AVS_PPL_STATE_RESET) 1009 return 0; 1010 1011 list_for_each_entry(ppl, &path->ppl_list, node) { 1012 ret = avs_ipc_set_pipeline_state(adev, ppl->instance_id, 1013 AVS_PPL_STATE_RESET); 1014 if (ret) { 1015 dev_err(adev->dev, "reset path failed: %d\n", ret); 1016 path->state = AVS_PPL_STATE_INVALID; 1017 return AVS_IPC_RET(ret); 1018 } 1019 } 1020 1021 path->state = AVS_PPL_STATE_RESET; 1022 return 0; 1023 } 1024 1025 int avs_path_pause(struct avs_path *path) 1026 { 1027 struct avs_path_pipeline *ppl; 1028 struct avs_dev *adev = path->owner; 1029 int ret; 1030 1031 if (path->state == AVS_PPL_STATE_PAUSED) 1032 return 0; 1033 1034 list_for_each_entry_reverse(ppl, &path->ppl_list, node) { 1035 ret = avs_ipc_set_pipeline_state(adev, ppl->instance_id, 1036 AVS_PPL_STATE_PAUSED); 1037 if (ret) { 1038 dev_err(adev->dev, "pause path failed: %d\n", ret); 1039 path->state = AVS_PPL_STATE_INVALID; 1040 return AVS_IPC_RET(ret); 1041 } 1042 } 1043 1044 path->state = AVS_PPL_STATE_PAUSED; 1045 return 0; 1046 } 1047 1048 int avs_path_run(struct avs_path *path, int trigger) 1049 { 1050 struct avs_path_pipeline *ppl; 1051 struct avs_dev *adev = path->owner; 1052 int ret; 1053 1054 if (path->state == AVS_PPL_STATE_RUNNING && trigger == AVS_TPLG_TRIGGER_AUTO) 1055 return 0; 1056 1057 list_for_each_entry(ppl, &path->ppl_list, node) { 1058 if (ppl->template->cfg->trigger != trigger) 1059 continue; 1060 1061 ret = avs_ipc_set_pipeline_state(adev, ppl->instance_id, 1062 AVS_PPL_STATE_RUNNING); 1063 if (ret) { 1064 dev_err(adev->dev, "run path failed: %d\n", ret); 1065 path->state = AVS_PPL_STATE_INVALID; 1066 return AVS_IPC_RET(ret); 1067 } 1068 } 1069 1070 path->state = AVS_PPL_STATE_RUNNING; 1071 return 0; 1072 } 1073