1 // SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause) 2 // 3 // This file is provided under a dual BSD/GPLv2 license. When using or 4 // redistributing this file, you may do so under either license. 5 // 6 // Copyright(c) 2018 Intel Corporation. All rights reserved. 7 // 8 // Authors: Liam Girdwood <liam.r.girdwood@linux.intel.com> 9 // Ranjani Sridharan <ranjani.sridharan@linux.intel.com> 10 // Rander Wang <rander.wang@intel.com> 11 // Keyon Jie <yang.jie@linux.intel.com> 12 // 13 14 /* 15 * Hardware interface for generic Intel audio DSP HDA IP 16 */ 17 18 #include <linux/module.h> 19 #include <sound/hdaudio_ext.h> 20 #include <sound/hda_register.h> 21 #include "../sof-audio.h" 22 #include "../ops.h" 23 #include "hda.h" 24 #include "hda-ipc.h" 25 26 static bool hda_enable_trace_D0I3_S0; 27 #if IS_ENABLED(CONFIG_SND_SOC_SOF_DEBUG) 28 module_param_named(enable_trace_D0I3_S0, hda_enable_trace_D0I3_S0, bool, 0444); 29 MODULE_PARM_DESC(enable_trace_D0I3_S0, 30 "SOF HDA enable trace when the DSP is in D0I3 in S0"); 31 #endif 32 33 /* 34 * DSP Core control. 35 */ 36 37 static int hda_dsp_core_reset_enter(struct snd_sof_dev *sdev, unsigned int core_mask) 38 { 39 u32 adspcs; 40 u32 reset; 41 int ret; 42 43 /* set reset bits for cores */ 44 reset = HDA_DSP_ADSPCS_CRST_MASK(core_mask); 45 snd_sof_dsp_update_bits_unlocked(sdev, HDA_DSP_BAR, 46 HDA_DSP_REG_ADSPCS, 47 reset, reset); 48 49 /* poll with timeout to check if operation successful */ 50 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_BAR, 51 HDA_DSP_REG_ADSPCS, adspcs, 52 ((adspcs & reset) == reset), 53 HDA_DSP_REG_POLL_INTERVAL_US, 54 HDA_DSP_RESET_TIMEOUT_US); 55 if (ret < 0) { 56 dev_err(sdev->dev, 57 "error: %s: timeout on HDA_DSP_REG_ADSPCS read\n", 58 __func__); 59 return ret; 60 } 61 62 /* has core entered reset ? */ 63 adspcs = snd_sof_dsp_read(sdev, HDA_DSP_BAR, 64 HDA_DSP_REG_ADSPCS); 65 if ((adspcs & HDA_DSP_ADSPCS_CRST_MASK(core_mask)) != 66 HDA_DSP_ADSPCS_CRST_MASK(core_mask)) { 67 dev_err(sdev->dev, 68 "error: reset enter failed: core_mask %x adspcs 0x%x\n", 69 core_mask, adspcs); 70 ret = -EIO; 71 } 72 73 return ret; 74 } 75 76 static int hda_dsp_core_reset_leave(struct snd_sof_dev *sdev, unsigned int core_mask) 77 { 78 unsigned int crst; 79 u32 adspcs; 80 int ret; 81 82 /* clear reset bits for cores */ 83 snd_sof_dsp_update_bits_unlocked(sdev, HDA_DSP_BAR, 84 HDA_DSP_REG_ADSPCS, 85 HDA_DSP_ADSPCS_CRST_MASK(core_mask), 86 0); 87 88 /* poll with timeout to check if operation successful */ 89 crst = HDA_DSP_ADSPCS_CRST_MASK(core_mask); 90 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_BAR, 91 HDA_DSP_REG_ADSPCS, adspcs, 92 !(adspcs & crst), 93 HDA_DSP_REG_POLL_INTERVAL_US, 94 HDA_DSP_RESET_TIMEOUT_US); 95 96 if (ret < 0) { 97 dev_err(sdev->dev, 98 "error: %s: timeout on HDA_DSP_REG_ADSPCS read\n", 99 __func__); 100 return ret; 101 } 102 103 /* has core left reset ? */ 104 adspcs = snd_sof_dsp_read(sdev, HDA_DSP_BAR, 105 HDA_DSP_REG_ADSPCS); 106 if ((adspcs & HDA_DSP_ADSPCS_CRST_MASK(core_mask)) != 0) { 107 dev_err(sdev->dev, 108 "error: reset leave failed: core_mask %x adspcs 0x%x\n", 109 core_mask, adspcs); 110 ret = -EIO; 111 } 112 113 return ret; 114 } 115 116 static int hda_dsp_core_stall_reset(struct snd_sof_dev *sdev, unsigned int core_mask) 117 { 118 /* stall core */ 119 snd_sof_dsp_update_bits_unlocked(sdev, HDA_DSP_BAR, 120 HDA_DSP_REG_ADSPCS, 121 HDA_DSP_ADSPCS_CSTALL_MASK(core_mask), 122 HDA_DSP_ADSPCS_CSTALL_MASK(core_mask)); 123 124 /* set reset state */ 125 return hda_dsp_core_reset_enter(sdev, core_mask); 126 } 127 128 static bool hda_dsp_core_is_enabled(struct snd_sof_dev *sdev, unsigned int core_mask) 129 { 130 int val; 131 bool is_enable; 132 133 val = snd_sof_dsp_read(sdev, HDA_DSP_BAR, HDA_DSP_REG_ADSPCS); 134 135 #define MASK_IS_EQUAL(v, m, field) ({ \ 136 u32 _m = field(m); \ 137 ((v) & _m) == _m; \ 138 }) 139 140 is_enable = MASK_IS_EQUAL(val, core_mask, HDA_DSP_ADSPCS_CPA_MASK) && 141 MASK_IS_EQUAL(val, core_mask, HDA_DSP_ADSPCS_SPA_MASK) && 142 !(val & HDA_DSP_ADSPCS_CRST_MASK(core_mask)) && 143 !(val & HDA_DSP_ADSPCS_CSTALL_MASK(core_mask)); 144 145 #undef MASK_IS_EQUAL 146 147 dev_dbg(sdev->dev, "DSP core(s) enabled? %d : core_mask %x\n", 148 is_enable, core_mask); 149 150 return is_enable; 151 } 152 153 int hda_dsp_core_run(struct snd_sof_dev *sdev, unsigned int core_mask) 154 { 155 int ret; 156 157 /* leave reset state */ 158 ret = hda_dsp_core_reset_leave(sdev, core_mask); 159 if (ret < 0) 160 return ret; 161 162 /* run core */ 163 dev_dbg(sdev->dev, "unstall/run core: core_mask = %x\n", core_mask); 164 snd_sof_dsp_update_bits_unlocked(sdev, HDA_DSP_BAR, 165 HDA_DSP_REG_ADSPCS, 166 HDA_DSP_ADSPCS_CSTALL_MASK(core_mask), 167 0); 168 169 /* is core now running ? */ 170 if (!hda_dsp_core_is_enabled(sdev, core_mask)) { 171 hda_dsp_core_stall_reset(sdev, core_mask); 172 dev_err(sdev->dev, "error: DSP start core failed: core_mask %x\n", 173 core_mask); 174 ret = -EIO; 175 } 176 177 return ret; 178 } 179 180 /* 181 * Power Management. 182 */ 183 184 static int hda_dsp_core_power_up(struct snd_sof_dev *sdev, unsigned int core_mask) 185 { 186 unsigned int cpa; 187 u32 adspcs; 188 int ret; 189 190 /* update bits */ 191 snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR, HDA_DSP_REG_ADSPCS, 192 HDA_DSP_ADSPCS_SPA_MASK(core_mask), 193 HDA_DSP_ADSPCS_SPA_MASK(core_mask)); 194 195 /* poll with timeout to check if operation successful */ 196 cpa = HDA_DSP_ADSPCS_CPA_MASK(core_mask); 197 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_BAR, 198 HDA_DSP_REG_ADSPCS, adspcs, 199 (adspcs & cpa) == cpa, 200 HDA_DSP_REG_POLL_INTERVAL_US, 201 HDA_DSP_RESET_TIMEOUT_US); 202 if (ret < 0) { 203 dev_err(sdev->dev, 204 "error: %s: timeout on HDA_DSP_REG_ADSPCS read\n", 205 __func__); 206 return ret; 207 } 208 209 /* did core power up ? */ 210 adspcs = snd_sof_dsp_read(sdev, HDA_DSP_BAR, 211 HDA_DSP_REG_ADSPCS); 212 if ((adspcs & HDA_DSP_ADSPCS_CPA_MASK(core_mask)) != 213 HDA_DSP_ADSPCS_CPA_MASK(core_mask)) { 214 dev_err(sdev->dev, 215 "error: power up core failed core_mask %xadspcs 0x%x\n", 216 core_mask, adspcs); 217 ret = -EIO; 218 } 219 220 return ret; 221 } 222 223 static int hda_dsp_core_power_down(struct snd_sof_dev *sdev, unsigned int core_mask) 224 { 225 u32 adspcs; 226 int ret; 227 228 /* update bits */ 229 snd_sof_dsp_update_bits_unlocked(sdev, HDA_DSP_BAR, 230 HDA_DSP_REG_ADSPCS, 231 HDA_DSP_ADSPCS_SPA_MASK(core_mask), 0); 232 233 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_BAR, 234 HDA_DSP_REG_ADSPCS, adspcs, 235 !(adspcs & HDA_DSP_ADSPCS_CPA_MASK(core_mask)), 236 HDA_DSP_REG_POLL_INTERVAL_US, 237 HDA_DSP_PD_TIMEOUT * USEC_PER_MSEC); 238 if (ret < 0) 239 dev_err(sdev->dev, 240 "error: %s: timeout on HDA_DSP_REG_ADSPCS read\n", 241 __func__); 242 243 return ret; 244 } 245 246 int hda_dsp_enable_core(struct snd_sof_dev *sdev, unsigned int core_mask) 247 { 248 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata; 249 const struct sof_intel_dsp_desc *chip = hda->desc; 250 int ret; 251 252 /* restrict core_mask to host managed cores mask */ 253 core_mask &= chip->host_managed_cores_mask; 254 255 /* return if core_mask is not valid or cores are already enabled */ 256 if (!core_mask || hda_dsp_core_is_enabled(sdev, core_mask)) 257 return 0; 258 259 /* power up */ 260 ret = hda_dsp_core_power_up(sdev, core_mask); 261 if (ret < 0) { 262 dev_err(sdev->dev, "error: dsp core power up failed: core_mask %x\n", 263 core_mask); 264 return ret; 265 } 266 267 return hda_dsp_core_run(sdev, core_mask); 268 } 269 270 int hda_dsp_core_reset_power_down(struct snd_sof_dev *sdev, 271 unsigned int core_mask) 272 { 273 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata; 274 const struct sof_intel_dsp_desc *chip = hda->desc; 275 int ret; 276 277 /* restrict core_mask to host managed cores mask */ 278 core_mask &= chip->host_managed_cores_mask; 279 280 /* return if core_mask is not valid */ 281 if (!core_mask) 282 return 0; 283 284 /* place core in reset prior to power down */ 285 ret = hda_dsp_core_stall_reset(sdev, core_mask); 286 if (ret < 0) { 287 dev_err(sdev->dev, "error: dsp core reset failed: core_mask %x\n", 288 core_mask); 289 return ret; 290 } 291 292 /* power down core */ 293 ret = hda_dsp_core_power_down(sdev, core_mask); 294 if (ret < 0) { 295 dev_err(sdev->dev, "error: dsp core power down fail mask %x: %d\n", 296 core_mask, ret); 297 return ret; 298 } 299 300 /* make sure we are in OFF state */ 301 if (hda_dsp_core_is_enabled(sdev, core_mask)) { 302 dev_err(sdev->dev, "error: dsp core disable fail mask %x: %d\n", 303 core_mask, ret); 304 ret = -EIO; 305 } 306 307 return ret; 308 } 309 310 void hda_dsp_ipc_int_enable(struct snd_sof_dev *sdev) 311 { 312 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata; 313 const struct sof_intel_dsp_desc *chip = hda->desc; 314 315 /* enable IPC DONE and BUSY interrupts */ 316 snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR, chip->ipc_ctl, 317 HDA_DSP_REG_HIPCCTL_DONE | HDA_DSP_REG_HIPCCTL_BUSY, 318 HDA_DSP_REG_HIPCCTL_DONE | HDA_DSP_REG_HIPCCTL_BUSY); 319 320 /* enable IPC interrupt */ 321 snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR, HDA_DSP_REG_ADSPIC, 322 HDA_DSP_ADSPIC_IPC, HDA_DSP_ADSPIC_IPC); 323 } 324 325 void hda_dsp_ipc_int_disable(struct snd_sof_dev *sdev) 326 { 327 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata; 328 const struct sof_intel_dsp_desc *chip = hda->desc; 329 330 /* disable IPC interrupt */ 331 snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR, HDA_DSP_REG_ADSPIC, 332 HDA_DSP_ADSPIC_IPC, 0); 333 334 /* disable IPC BUSY and DONE interrupt */ 335 snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR, chip->ipc_ctl, 336 HDA_DSP_REG_HIPCCTL_BUSY | HDA_DSP_REG_HIPCCTL_DONE, 0); 337 } 338 339 static int hda_dsp_wait_d0i3c_done(struct snd_sof_dev *sdev) 340 { 341 struct hdac_bus *bus = sof_to_bus(sdev); 342 int retry = HDA_DSP_REG_POLL_RETRY_COUNT; 343 344 while (snd_hdac_chip_readb(bus, VS_D0I3C) & SOF_HDA_VS_D0I3C_CIP) { 345 if (!retry--) 346 return -ETIMEDOUT; 347 usleep_range(10, 15); 348 } 349 350 return 0; 351 } 352 353 static int hda_dsp_send_pm_gate_ipc(struct snd_sof_dev *sdev, u32 flags) 354 { 355 struct sof_ipc_pm_gate pm_gate; 356 struct sof_ipc_reply reply; 357 358 memset(&pm_gate, 0, sizeof(pm_gate)); 359 360 /* configure pm_gate ipc message */ 361 pm_gate.hdr.size = sizeof(pm_gate); 362 pm_gate.hdr.cmd = SOF_IPC_GLB_PM_MSG | SOF_IPC_PM_GATE; 363 pm_gate.flags = flags; 364 365 /* send pm_gate ipc to dsp */ 366 return sof_ipc_tx_message_no_pm(sdev->ipc, pm_gate.hdr.cmd, 367 &pm_gate, sizeof(pm_gate), &reply, 368 sizeof(reply)); 369 } 370 371 static int hda_dsp_update_d0i3c_register(struct snd_sof_dev *sdev, u8 value) 372 { 373 struct hdac_bus *bus = sof_to_bus(sdev); 374 int ret; 375 376 /* Write to D0I3C after Command-In-Progress bit is cleared */ 377 ret = hda_dsp_wait_d0i3c_done(sdev); 378 if (ret < 0) { 379 dev_err(bus->dev, "CIP timeout before D0I3C update!\n"); 380 return ret; 381 } 382 383 /* Update D0I3C register */ 384 snd_hdac_chip_updateb(bus, VS_D0I3C, SOF_HDA_VS_D0I3C_I3, value); 385 386 /* Wait for cmd in progress to be cleared before exiting the function */ 387 ret = hda_dsp_wait_d0i3c_done(sdev); 388 if (ret < 0) { 389 dev_err(bus->dev, "CIP timeout after D0I3C update!\n"); 390 return ret; 391 } 392 393 dev_vdbg(bus->dev, "D0I3C updated, register = 0x%x\n", 394 snd_hdac_chip_readb(bus, VS_D0I3C)); 395 396 return 0; 397 } 398 399 static int hda_dsp_set_D0_state(struct snd_sof_dev *sdev, 400 const struct sof_dsp_power_state *target_state) 401 { 402 u32 flags = 0; 403 int ret; 404 u8 value = 0; 405 406 /* 407 * Sanity check for illegal state transitions 408 * The only allowed transitions are: 409 * 1. D3 -> D0I0 410 * 2. D0I0 -> D0I3 411 * 3. D0I3 -> D0I0 412 */ 413 switch (sdev->dsp_power_state.state) { 414 case SOF_DSP_PM_D0: 415 /* Follow the sequence below for D0 substate transitions */ 416 break; 417 case SOF_DSP_PM_D3: 418 /* Follow regular flow for D3 -> D0 transition */ 419 return 0; 420 default: 421 dev_err(sdev->dev, "error: transition from %d to %d not allowed\n", 422 sdev->dsp_power_state.state, target_state->state); 423 return -EINVAL; 424 } 425 426 /* Set flags and register value for D0 target substate */ 427 if (target_state->substate == SOF_HDA_DSP_PM_D0I3) { 428 value = SOF_HDA_VS_D0I3C_I3; 429 430 /* 431 * Trace DMA need to be disabled when the DSP enters 432 * D0I3 for S0Ix suspend, but it can be kept enabled 433 * when the DSP enters D0I3 while the system is in S0 434 * for debug purpose. 435 */ 436 if (!sdev->dtrace_is_supported || 437 !hda_enable_trace_D0I3_S0 || 438 sdev->system_suspend_target != SOF_SUSPEND_NONE) 439 flags = HDA_PM_NO_DMA_TRACE; 440 } else { 441 /* prevent power gating in D0I0 */ 442 flags = HDA_PM_PPG; 443 } 444 445 /* update D0I3C register */ 446 ret = hda_dsp_update_d0i3c_register(sdev, value); 447 if (ret < 0) 448 return ret; 449 450 /* 451 * Notify the DSP of the state change. 452 * If this IPC fails, revert the D0I3C register update in order 453 * to prevent partial state change. 454 */ 455 ret = hda_dsp_send_pm_gate_ipc(sdev, flags); 456 if (ret < 0) { 457 dev_err(sdev->dev, 458 "error: PM_GATE ipc error %d\n", ret); 459 goto revert; 460 } 461 462 return ret; 463 464 revert: 465 /* fallback to the previous register value */ 466 value = value ? 0 : SOF_HDA_VS_D0I3C_I3; 467 468 /* 469 * This can fail but return the IPC error to signal that 470 * the state change failed. 471 */ 472 hda_dsp_update_d0i3c_register(sdev, value); 473 474 return ret; 475 } 476 477 /* helper to log DSP state */ 478 static void hda_dsp_state_log(struct snd_sof_dev *sdev) 479 { 480 switch (sdev->dsp_power_state.state) { 481 case SOF_DSP_PM_D0: 482 switch (sdev->dsp_power_state.substate) { 483 case SOF_HDA_DSP_PM_D0I0: 484 dev_dbg(sdev->dev, "Current DSP power state: D0I0\n"); 485 break; 486 case SOF_HDA_DSP_PM_D0I3: 487 dev_dbg(sdev->dev, "Current DSP power state: D0I3\n"); 488 break; 489 default: 490 dev_dbg(sdev->dev, "Unknown DSP D0 substate: %d\n", 491 sdev->dsp_power_state.substate); 492 break; 493 } 494 break; 495 case SOF_DSP_PM_D1: 496 dev_dbg(sdev->dev, "Current DSP power state: D1\n"); 497 break; 498 case SOF_DSP_PM_D2: 499 dev_dbg(sdev->dev, "Current DSP power state: D2\n"); 500 break; 501 case SOF_DSP_PM_D3_HOT: 502 dev_dbg(sdev->dev, "Current DSP power state: D3_HOT\n"); 503 break; 504 case SOF_DSP_PM_D3: 505 dev_dbg(sdev->dev, "Current DSP power state: D3\n"); 506 break; 507 case SOF_DSP_PM_D3_COLD: 508 dev_dbg(sdev->dev, "Current DSP power state: D3_COLD\n"); 509 break; 510 default: 511 dev_dbg(sdev->dev, "Unknown DSP power state: %d\n", 512 sdev->dsp_power_state.state); 513 break; 514 } 515 } 516 517 /* 518 * All DSP power state transitions are initiated by the driver. 519 * If the requested state change fails, the error is simply returned. 520 * Further state transitions are attempted only when the set_power_save() op 521 * is called again either because of a new IPC sent to the DSP or 522 * during system suspend/resume. 523 */ 524 int hda_dsp_set_power_state(struct snd_sof_dev *sdev, 525 const struct sof_dsp_power_state *target_state) 526 { 527 int ret = 0; 528 529 /* 530 * When the DSP is already in D0I3 and the target state is D0I3, 531 * it could be the case that the DSP is in D0I3 during S0 532 * and the system is suspending to S0Ix. Therefore, 533 * hda_dsp_set_D0_state() must be called to disable trace DMA 534 * by sending the PM_GATE IPC to the FW. 535 */ 536 if (target_state->substate == SOF_HDA_DSP_PM_D0I3 && 537 sdev->system_suspend_target == SOF_SUSPEND_S0IX) 538 goto set_state; 539 540 /* 541 * For all other cases, return without doing anything if 542 * the DSP is already in the target state. 543 */ 544 if (target_state->state == sdev->dsp_power_state.state && 545 target_state->substate == sdev->dsp_power_state.substate) 546 return 0; 547 548 set_state: 549 switch (target_state->state) { 550 case SOF_DSP_PM_D0: 551 ret = hda_dsp_set_D0_state(sdev, target_state); 552 break; 553 case SOF_DSP_PM_D3: 554 /* The only allowed transition is: D0I0 -> D3 */ 555 if (sdev->dsp_power_state.state == SOF_DSP_PM_D0 && 556 sdev->dsp_power_state.substate == SOF_HDA_DSP_PM_D0I0) 557 break; 558 559 dev_err(sdev->dev, 560 "error: transition from %d to %d not allowed\n", 561 sdev->dsp_power_state.state, target_state->state); 562 return -EINVAL; 563 default: 564 dev_err(sdev->dev, "error: target state unsupported %d\n", 565 target_state->state); 566 return -EINVAL; 567 } 568 if (ret < 0) { 569 dev_err(sdev->dev, 570 "failed to set requested target DSP state %d substate %d\n", 571 target_state->state, target_state->substate); 572 return ret; 573 } 574 575 sdev->dsp_power_state = *target_state; 576 hda_dsp_state_log(sdev); 577 return ret; 578 } 579 580 /* 581 * Audio DSP states may transform as below:- 582 * 583 * Opportunistic D0I3 in S0 584 * Runtime +---------------------+ Delayed D0i3 work timeout 585 * suspend | +--------------------+ 586 * +------------+ D0I0(active) | | 587 * | | <---------------+ | 588 * | +--------> | New IPC | | 589 * | |Runtime +--^--+---------^--+--+ (via mailbox) | | 590 * | |resume | | | | | | 591 * | | | | | | | | 592 * | | System| | | | | | 593 * | | resume| | S3/S0IX | | | | 594 * | | | | suspend | | S0IX | | 595 * | | | | | |suspend | | 596 * | | | | | | | | 597 * | | | | | | | | 598 * +-v---+-----------+--v-------+ | | +------+----v----+ 599 * | | | +-----------> | 600 * | D3 (suspended) | | | D0I3 | 601 * | | +--------------+ | 602 * | | System resume | | 603 * +----------------------------+ +----------------+ 604 * 605 * S0IX suspend: The DSP is in D0I3 if any D0I3-compatible streams 606 * ignored the suspend trigger. Otherwise the DSP 607 * is in D3. 608 */ 609 610 static int hda_suspend(struct snd_sof_dev *sdev, bool runtime_suspend) 611 { 612 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata; 613 const struct sof_intel_dsp_desc *chip = hda->desc; 614 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA) 615 struct hdac_bus *bus = sof_to_bus(sdev); 616 #endif 617 int ret, j; 618 619 hda_sdw_int_enable(sdev, false); 620 621 /* disable IPC interrupts */ 622 hda_dsp_ipc_int_disable(sdev); 623 624 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA) 625 hda_codec_jack_wake_enable(sdev, runtime_suspend); 626 627 /* power down all hda link */ 628 snd_hdac_ext_bus_link_power_down_all(bus); 629 #endif 630 631 /* power down DSP */ 632 ret = hda_dsp_core_reset_power_down(sdev, chip->host_managed_cores_mask); 633 if (ret < 0) { 634 dev_err(sdev->dev, 635 "error: failed to power down core during suspend\n"); 636 return ret; 637 } 638 639 /* reset ref counts for all cores */ 640 for (j = 0; j < chip->cores_num; j++) 641 sdev->dsp_core_ref_count[j] = 0; 642 643 /* disable ppcap interrupt */ 644 hda_dsp_ctrl_ppcap_enable(sdev, false); 645 hda_dsp_ctrl_ppcap_int_enable(sdev, false); 646 647 /* disable hda bus irq and streams */ 648 hda_dsp_ctrl_stop_chip(sdev); 649 650 /* disable LP retention mode */ 651 snd_sof_pci_update_bits(sdev, PCI_PGCTL, 652 PCI_PGCTL_LSRMD_MASK, PCI_PGCTL_LSRMD_MASK); 653 654 /* reset controller */ 655 ret = hda_dsp_ctrl_link_reset(sdev, true); 656 if (ret < 0) { 657 dev_err(sdev->dev, 658 "error: failed to reset controller during suspend\n"); 659 return ret; 660 } 661 662 /* display codec can powered off after link reset */ 663 hda_codec_i915_display_power(sdev, false); 664 665 return 0; 666 } 667 668 static int hda_resume(struct snd_sof_dev *sdev, bool runtime_resume) 669 { 670 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA) 671 struct hdac_bus *bus = sof_to_bus(sdev); 672 struct hdac_ext_link *hlink = NULL; 673 #endif 674 int ret; 675 676 /* display codec must be powered before link reset */ 677 hda_codec_i915_display_power(sdev, true); 678 679 /* 680 * clear TCSEL to clear playback on some HD Audio 681 * codecs. PCI TCSEL is defined in the Intel manuals. 682 */ 683 snd_sof_pci_update_bits(sdev, PCI_TCSEL, 0x07, 0); 684 685 /* reset and start hda controller */ 686 ret = hda_dsp_ctrl_init_chip(sdev, true); 687 if (ret < 0) { 688 dev_err(sdev->dev, 689 "error: failed to start controller after resume\n"); 690 goto cleanup; 691 } 692 693 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA) 694 /* check jack status */ 695 if (runtime_resume) { 696 hda_codec_jack_wake_enable(sdev, false); 697 if (sdev->system_suspend_target == SOF_SUSPEND_NONE) 698 hda_codec_jack_check(sdev); 699 } 700 701 /* turn off the links that were off before suspend */ 702 list_for_each_entry(hlink, &bus->hlink_list, list) { 703 if (!hlink->ref_count) 704 snd_hdac_ext_bus_link_power_down(hlink); 705 } 706 707 /* check dma status and clean up CORB/RIRB buffers */ 708 if (!bus->cmd_dma_state) 709 snd_hdac_bus_stop_cmd_io(bus); 710 #endif 711 712 /* enable ppcap interrupt */ 713 hda_dsp_ctrl_ppcap_enable(sdev, true); 714 hda_dsp_ctrl_ppcap_int_enable(sdev, true); 715 716 cleanup: 717 /* display codec can powered off after controller init */ 718 hda_codec_i915_display_power(sdev, false); 719 720 return 0; 721 } 722 723 int hda_dsp_resume(struct snd_sof_dev *sdev) 724 { 725 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata; 726 struct pci_dev *pci = to_pci_dev(sdev->dev); 727 const struct sof_dsp_power_state target_state = { 728 .state = SOF_DSP_PM_D0, 729 .substate = SOF_HDA_DSP_PM_D0I0, 730 }; 731 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA) 732 struct hdac_bus *bus = sof_to_bus(sdev); 733 struct hdac_ext_link *hlink = NULL; 734 #endif 735 int ret; 736 737 /* resume from D0I3 */ 738 if (sdev->dsp_power_state.state == SOF_DSP_PM_D0) { 739 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA) 740 /* power up links that were active before suspend */ 741 list_for_each_entry(hlink, &bus->hlink_list, list) { 742 if (hlink->ref_count) { 743 ret = snd_hdac_ext_bus_link_power_up(hlink); 744 if (ret < 0) { 745 dev_dbg(sdev->dev, 746 "error %d in %s: failed to power up links", 747 ret, __func__); 748 return ret; 749 } 750 } 751 } 752 753 /* set up CORB/RIRB buffers if was on before suspend */ 754 if (bus->cmd_dma_state) 755 snd_hdac_bus_init_cmd_io(bus); 756 #endif 757 758 /* Set DSP power state */ 759 ret = snd_sof_dsp_set_power_state(sdev, &target_state); 760 if (ret < 0) { 761 dev_err(sdev->dev, "error: setting dsp state %d substate %d\n", 762 target_state.state, target_state.substate); 763 return ret; 764 } 765 766 /* restore L1SEN bit */ 767 if (hda->l1_support_changed) 768 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 769 HDA_VS_INTEL_EM2, 770 HDA_VS_INTEL_EM2_L1SEN, 0); 771 772 /* restore and disable the system wakeup */ 773 pci_restore_state(pci); 774 disable_irq_wake(pci->irq); 775 return 0; 776 } 777 778 /* init hda controller. DSP cores will be powered up during fw boot */ 779 ret = hda_resume(sdev, false); 780 if (ret < 0) 781 return ret; 782 783 return snd_sof_dsp_set_power_state(sdev, &target_state); 784 } 785 786 int hda_dsp_runtime_resume(struct snd_sof_dev *sdev) 787 { 788 const struct sof_dsp_power_state target_state = { 789 .state = SOF_DSP_PM_D0, 790 }; 791 int ret; 792 793 /* init hda controller. DSP cores will be powered up during fw boot */ 794 ret = hda_resume(sdev, true); 795 if (ret < 0) 796 return ret; 797 798 return snd_sof_dsp_set_power_state(sdev, &target_state); 799 } 800 801 int hda_dsp_runtime_idle(struct snd_sof_dev *sdev) 802 { 803 struct hdac_bus *hbus = sof_to_bus(sdev); 804 805 if (hbus->codec_powered) { 806 dev_dbg(sdev->dev, "some codecs still powered (%08X), not idle\n", 807 (unsigned int)hbus->codec_powered); 808 return -EBUSY; 809 } 810 811 return 0; 812 } 813 814 int hda_dsp_runtime_suspend(struct snd_sof_dev *sdev) 815 { 816 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata; 817 const struct sof_dsp_power_state target_state = { 818 .state = SOF_DSP_PM_D3, 819 }; 820 int ret; 821 822 /* cancel any attempt for DSP D0I3 */ 823 cancel_delayed_work_sync(&hda->d0i3_work); 824 825 /* stop hda controller and power dsp off */ 826 ret = hda_suspend(sdev, true); 827 if (ret < 0) 828 return ret; 829 830 return snd_sof_dsp_set_power_state(sdev, &target_state); 831 } 832 833 int hda_dsp_suspend(struct snd_sof_dev *sdev, u32 target_state) 834 { 835 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata; 836 struct hdac_bus *bus = sof_to_bus(sdev); 837 struct pci_dev *pci = to_pci_dev(sdev->dev); 838 const struct sof_dsp_power_state target_dsp_state = { 839 .state = target_state, 840 .substate = target_state == SOF_DSP_PM_D0 ? 841 SOF_HDA_DSP_PM_D0I3 : 0, 842 }; 843 int ret; 844 845 /* cancel any attempt for DSP D0I3 */ 846 cancel_delayed_work_sync(&hda->d0i3_work); 847 848 if (target_state == SOF_DSP_PM_D0) { 849 /* Set DSP power state */ 850 ret = snd_sof_dsp_set_power_state(sdev, &target_dsp_state); 851 if (ret < 0) { 852 dev_err(sdev->dev, "error: setting dsp state %d substate %d\n", 853 target_dsp_state.state, 854 target_dsp_state.substate); 855 return ret; 856 } 857 858 /* enable L1SEN to make sure the system can enter S0Ix */ 859 hda->l1_support_changed = 860 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 861 HDA_VS_INTEL_EM2, 862 HDA_VS_INTEL_EM2_L1SEN, 863 HDA_VS_INTEL_EM2_L1SEN); 864 865 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA) 866 /* stop the CORB/RIRB DMA if it is On */ 867 if (bus->cmd_dma_state) 868 snd_hdac_bus_stop_cmd_io(bus); 869 870 /* no link can be powered in s0ix state */ 871 ret = snd_hdac_ext_bus_link_power_down_all(bus); 872 if (ret < 0) { 873 dev_dbg(sdev->dev, 874 "error %d in %s: failed to power down links", 875 ret, __func__); 876 return ret; 877 } 878 #endif 879 880 /* enable the system waking up via IPC IRQ */ 881 enable_irq_wake(pci->irq); 882 pci_save_state(pci); 883 return 0; 884 } 885 886 /* stop hda controller and power dsp off */ 887 ret = hda_suspend(sdev, false); 888 if (ret < 0) { 889 dev_err(bus->dev, "error: suspending dsp\n"); 890 return ret; 891 } 892 893 return snd_sof_dsp_set_power_state(sdev, &target_dsp_state); 894 } 895 896 int hda_dsp_shutdown(struct snd_sof_dev *sdev) 897 { 898 sdev->system_suspend_target = SOF_SUSPEND_S3; 899 return snd_sof_suspend(sdev->dev); 900 } 901 902 int hda_dsp_set_hw_params_upon_resume(struct snd_sof_dev *sdev) 903 { 904 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA) 905 struct hdac_bus *bus = sof_to_bus(sdev); 906 struct snd_soc_pcm_runtime *rtd; 907 struct hdac_ext_stream *stream; 908 struct hdac_ext_link *link; 909 struct hdac_stream *s; 910 const char *name; 911 int stream_tag; 912 913 /* set internal flag for BE */ 914 list_for_each_entry(s, &bus->stream_list, list) { 915 stream = stream_to_hdac_ext_stream(s); 916 917 /* 918 * clear stream. This should already be taken care for running 919 * streams when the SUSPEND trigger is called. But paused 920 * streams do not get suspended, so this needs to be done 921 * explicitly during suspend. 922 */ 923 if (stream->link_substream) { 924 rtd = asoc_substream_to_rtd(stream->link_substream); 925 name = asoc_rtd_to_codec(rtd, 0)->component->name; 926 link = snd_hdac_ext_bus_get_link(bus, name); 927 if (!link) 928 return -EINVAL; 929 930 stream->link_prepared = 0; 931 932 if (hdac_stream(stream)->direction == 933 SNDRV_PCM_STREAM_CAPTURE) 934 continue; 935 936 stream_tag = hdac_stream(stream)->stream_tag; 937 snd_hdac_ext_link_clear_stream_id(link, stream_tag); 938 } 939 } 940 #endif 941 return 0; 942 } 943 944 void hda_dsp_d0i3_work(struct work_struct *work) 945 { 946 struct sof_intel_hda_dev *hdev = container_of(work, 947 struct sof_intel_hda_dev, 948 d0i3_work.work); 949 struct hdac_bus *bus = &hdev->hbus.core; 950 struct snd_sof_dev *sdev = dev_get_drvdata(bus->dev); 951 struct sof_dsp_power_state target_state = { 952 .state = SOF_DSP_PM_D0, 953 .substate = SOF_HDA_DSP_PM_D0I3, 954 }; 955 int ret; 956 957 /* DSP can enter D0I3 iff only D0I3-compatible streams are active */ 958 if (!snd_sof_dsp_only_d0i3_compatible_stream_active(sdev)) 959 /* remain in D0I0 */ 960 return; 961 962 /* This can fail but error cannot be propagated */ 963 ret = snd_sof_dsp_set_power_state(sdev, &target_state); 964 if (ret < 0) 965 dev_err_ratelimited(sdev->dev, 966 "error: failed to set DSP state %d substate %d\n", 967 target_state.state, target_state.substate); 968 } 969 970 int hda_dsp_core_get(struct snd_sof_dev *sdev, int core) 971 { 972 struct sof_ipc_pm_core_config pm_core_config = { 973 .hdr = { 974 .cmd = SOF_IPC_GLB_PM_MSG | SOF_IPC_PM_CORE_ENABLE, 975 .size = sizeof(pm_core_config), 976 }, 977 .enable_mask = sdev->enabled_cores_mask | BIT(core), 978 }; 979 int ret, ret1; 980 981 /* power up core */ 982 ret = hda_dsp_enable_core(sdev, BIT(core)); 983 if (ret < 0) { 984 dev_err(sdev->dev, "failed to power up core %d with err: %d\n", 985 core, ret); 986 return ret; 987 } 988 989 /* No need to send IPC for primary core or if FW boot is not complete */ 990 if (sdev->fw_state != SOF_FW_BOOT_COMPLETE || core == SOF_DSP_PRIMARY_CORE) 991 return 0; 992 993 /* Now notify DSP for secondary cores */ 994 ret = sof_ipc_tx_message(sdev->ipc, pm_core_config.hdr.cmd, 995 &pm_core_config, sizeof(pm_core_config), 996 &pm_core_config, sizeof(pm_core_config)); 997 if (ret < 0) { 998 dev_err(sdev->dev, "failed to enable secondary core '%d' failed with %d\n", 999 core, ret); 1000 goto power_down; 1001 } 1002 1003 return ret; 1004 1005 power_down: 1006 /* power down core if it is host managed and return the original error if this fails too */ 1007 ret1 = hda_dsp_core_reset_power_down(sdev, BIT(core)); 1008 if (ret1 < 0) 1009 dev_err(sdev->dev, "failed to power down core: %d with err: %d\n", core, ret1); 1010 1011 return ret; 1012 } 1013