1 // SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause) 2 // 3 // This file is provided under a dual BSD/GPLv2 license. When using or 4 // redistributing this file, you may do so under either license. 5 // 6 // Copyright(c) 2018 Intel Corporation. All rights reserved. 7 // 8 // Authors: Liam Girdwood <liam.r.girdwood@linux.intel.com> 9 // Ranjani Sridharan <ranjani.sridharan@linux.intel.com> 10 // Rander Wang <rander.wang@intel.com> 11 // Keyon Jie <yang.jie@linux.intel.com> 12 // 13 14 /* 15 * Hardware interface for generic Intel audio DSP HDA IP 16 */ 17 18 #include <linux/module.h> 19 #include <sound/hdaudio_ext.h> 20 #include <sound/hda_register.h> 21 #include "../sof-audio.h" 22 #include "../ops.h" 23 #include "hda.h" 24 #include "hda-ipc.h" 25 26 static bool hda_enable_trace_D0I3_S0; 27 #if IS_ENABLED(CONFIG_SND_SOC_SOF_DEBUG) 28 module_param_named(enable_trace_D0I3_S0, hda_enable_trace_D0I3_S0, bool, 0444); 29 MODULE_PARM_DESC(enable_trace_D0I3_S0, 30 "SOF HDA enable trace when the DSP is in D0I3 in S0"); 31 #endif 32 33 /* 34 * DSP Core control. 35 */ 36 37 int hda_dsp_core_reset_enter(struct snd_sof_dev *sdev, unsigned int core_mask) 38 { 39 u32 adspcs; 40 u32 reset; 41 int ret; 42 43 /* set reset bits for cores */ 44 reset = HDA_DSP_ADSPCS_CRST_MASK(core_mask); 45 snd_sof_dsp_update_bits_unlocked(sdev, HDA_DSP_BAR, 46 HDA_DSP_REG_ADSPCS, 47 reset, reset), 48 49 /* poll with timeout to check if operation successful */ 50 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_BAR, 51 HDA_DSP_REG_ADSPCS, adspcs, 52 ((adspcs & reset) == reset), 53 HDA_DSP_REG_POLL_INTERVAL_US, 54 HDA_DSP_RESET_TIMEOUT_US); 55 if (ret < 0) { 56 dev_err(sdev->dev, 57 "error: %s: timeout on HDA_DSP_REG_ADSPCS read\n", 58 __func__); 59 return ret; 60 } 61 62 /* has core entered reset ? */ 63 adspcs = snd_sof_dsp_read(sdev, HDA_DSP_BAR, 64 HDA_DSP_REG_ADSPCS); 65 if ((adspcs & HDA_DSP_ADSPCS_CRST_MASK(core_mask)) != 66 HDA_DSP_ADSPCS_CRST_MASK(core_mask)) { 67 dev_err(sdev->dev, 68 "error: reset enter failed: core_mask %x adspcs 0x%x\n", 69 core_mask, adspcs); 70 ret = -EIO; 71 } 72 73 return ret; 74 } 75 76 int hda_dsp_core_reset_leave(struct snd_sof_dev *sdev, unsigned int core_mask) 77 { 78 unsigned int crst; 79 u32 adspcs; 80 int ret; 81 82 /* clear reset bits for cores */ 83 snd_sof_dsp_update_bits_unlocked(sdev, HDA_DSP_BAR, 84 HDA_DSP_REG_ADSPCS, 85 HDA_DSP_ADSPCS_CRST_MASK(core_mask), 86 0); 87 88 /* poll with timeout to check if operation successful */ 89 crst = HDA_DSP_ADSPCS_CRST_MASK(core_mask); 90 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_BAR, 91 HDA_DSP_REG_ADSPCS, adspcs, 92 !(adspcs & crst), 93 HDA_DSP_REG_POLL_INTERVAL_US, 94 HDA_DSP_RESET_TIMEOUT_US); 95 96 if (ret < 0) { 97 dev_err(sdev->dev, 98 "error: %s: timeout on HDA_DSP_REG_ADSPCS read\n", 99 __func__); 100 return ret; 101 } 102 103 /* has core left reset ? */ 104 adspcs = snd_sof_dsp_read(sdev, HDA_DSP_BAR, 105 HDA_DSP_REG_ADSPCS); 106 if ((adspcs & HDA_DSP_ADSPCS_CRST_MASK(core_mask)) != 0) { 107 dev_err(sdev->dev, 108 "error: reset leave failed: core_mask %x adspcs 0x%x\n", 109 core_mask, adspcs); 110 ret = -EIO; 111 } 112 113 return ret; 114 } 115 116 int hda_dsp_core_stall_reset(struct snd_sof_dev *sdev, unsigned int core_mask) 117 { 118 /* stall core */ 119 snd_sof_dsp_update_bits_unlocked(sdev, HDA_DSP_BAR, 120 HDA_DSP_REG_ADSPCS, 121 HDA_DSP_ADSPCS_CSTALL_MASK(core_mask), 122 HDA_DSP_ADSPCS_CSTALL_MASK(core_mask)); 123 124 /* set reset state */ 125 return hda_dsp_core_reset_enter(sdev, core_mask); 126 } 127 128 int hda_dsp_core_run(struct snd_sof_dev *sdev, unsigned int core_mask) 129 { 130 int ret; 131 132 /* leave reset state */ 133 ret = hda_dsp_core_reset_leave(sdev, core_mask); 134 if (ret < 0) 135 return ret; 136 137 /* run core */ 138 dev_dbg(sdev->dev, "unstall/run core: core_mask = %x\n", core_mask); 139 snd_sof_dsp_update_bits_unlocked(sdev, HDA_DSP_BAR, 140 HDA_DSP_REG_ADSPCS, 141 HDA_DSP_ADSPCS_CSTALL_MASK(core_mask), 142 0); 143 144 /* is core now running ? */ 145 if (!hda_dsp_core_is_enabled(sdev, core_mask)) { 146 hda_dsp_core_stall_reset(sdev, core_mask); 147 dev_err(sdev->dev, "error: DSP start core failed: core_mask %x\n", 148 core_mask); 149 ret = -EIO; 150 } 151 152 return ret; 153 } 154 155 /* 156 * Power Management. 157 */ 158 159 int hda_dsp_core_power_up(struct snd_sof_dev *sdev, unsigned int core_mask) 160 { 161 unsigned int cpa; 162 u32 adspcs; 163 int ret; 164 165 /* update bits */ 166 snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR, HDA_DSP_REG_ADSPCS, 167 HDA_DSP_ADSPCS_SPA_MASK(core_mask), 168 HDA_DSP_ADSPCS_SPA_MASK(core_mask)); 169 170 /* poll with timeout to check if operation successful */ 171 cpa = HDA_DSP_ADSPCS_CPA_MASK(core_mask); 172 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_BAR, 173 HDA_DSP_REG_ADSPCS, adspcs, 174 (adspcs & cpa) == cpa, 175 HDA_DSP_REG_POLL_INTERVAL_US, 176 HDA_DSP_RESET_TIMEOUT_US); 177 if (ret < 0) { 178 dev_err(sdev->dev, 179 "error: %s: timeout on HDA_DSP_REG_ADSPCS read\n", 180 __func__); 181 return ret; 182 } 183 184 /* did core power up ? */ 185 adspcs = snd_sof_dsp_read(sdev, HDA_DSP_BAR, 186 HDA_DSP_REG_ADSPCS); 187 if ((adspcs & HDA_DSP_ADSPCS_CPA_MASK(core_mask)) != 188 HDA_DSP_ADSPCS_CPA_MASK(core_mask)) { 189 dev_err(sdev->dev, 190 "error: power up core failed core_mask %xadspcs 0x%x\n", 191 core_mask, adspcs); 192 ret = -EIO; 193 } 194 195 return ret; 196 } 197 198 int hda_dsp_core_power_down(struct snd_sof_dev *sdev, unsigned int core_mask) 199 { 200 u32 adspcs; 201 int ret; 202 203 /* update bits */ 204 snd_sof_dsp_update_bits_unlocked(sdev, HDA_DSP_BAR, 205 HDA_DSP_REG_ADSPCS, 206 HDA_DSP_ADSPCS_SPA_MASK(core_mask), 0); 207 208 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_BAR, 209 HDA_DSP_REG_ADSPCS, adspcs, 210 !(adspcs & HDA_DSP_ADSPCS_SPA_MASK(core_mask)), 211 HDA_DSP_REG_POLL_INTERVAL_US, 212 HDA_DSP_PD_TIMEOUT * USEC_PER_MSEC); 213 if (ret < 0) 214 dev_err(sdev->dev, 215 "error: %s: timeout on HDA_DSP_REG_ADSPCS read\n", 216 __func__); 217 218 return ret; 219 } 220 221 bool hda_dsp_core_is_enabled(struct snd_sof_dev *sdev, 222 unsigned int core_mask) 223 { 224 int val; 225 bool is_enable; 226 227 val = snd_sof_dsp_read(sdev, HDA_DSP_BAR, HDA_DSP_REG_ADSPCS); 228 229 is_enable = (val & HDA_DSP_ADSPCS_CPA_MASK(core_mask)) && 230 (val & HDA_DSP_ADSPCS_SPA_MASK(core_mask)) && 231 !(val & HDA_DSP_ADSPCS_CRST_MASK(core_mask)) && 232 !(val & HDA_DSP_ADSPCS_CSTALL_MASK(core_mask)); 233 234 dev_dbg(sdev->dev, "DSP core(s) enabled? %d : core_mask %x\n", 235 is_enable, core_mask); 236 237 return is_enable; 238 } 239 240 int hda_dsp_enable_core(struct snd_sof_dev *sdev, unsigned int core_mask) 241 { 242 int ret; 243 244 /* return if core is already enabled */ 245 if (hda_dsp_core_is_enabled(sdev, core_mask)) 246 return 0; 247 248 /* power up */ 249 ret = hda_dsp_core_power_up(sdev, core_mask); 250 if (ret < 0) { 251 dev_err(sdev->dev, "error: dsp core power up failed: core_mask %x\n", 252 core_mask); 253 return ret; 254 } 255 256 return hda_dsp_core_run(sdev, core_mask); 257 } 258 259 int hda_dsp_core_reset_power_down(struct snd_sof_dev *sdev, 260 unsigned int core_mask) 261 { 262 int ret; 263 264 /* place core in reset prior to power down */ 265 ret = hda_dsp_core_stall_reset(sdev, core_mask); 266 if (ret < 0) { 267 dev_err(sdev->dev, "error: dsp core reset failed: core_mask %x\n", 268 core_mask); 269 return ret; 270 } 271 272 /* power down core */ 273 ret = hda_dsp_core_power_down(sdev, core_mask); 274 if (ret < 0) { 275 dev_err(sdev->dev, "error: dsp core power down fail mask %x: %d\n", 276 core_mask, ret); 277 return ret; 278 } 279 280 /* make sure we are in OFF state */ 281 if (hda_dsp_core_is_enabled(sdev, core_mask)) { 282 dev_err(sdev->dev, "error: dsp core disable fail mask %x: %d\n", 283 core_mask, ret); 284 ret = -EIO; 285 } 286 287 return ret; 288 } 289 290 void hda_dsp_ipc_int_enable(struct snd_sof_dev *sdev) 291 { 292 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata; 293 const struct sof_intel_dsp_desc *chip = hda->desc; 294 295 /* enable IPC DONE and BUSY interrupts */ 296 snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR, chip->ipc_ctl, 297 HDA_DSP_REG_HIPCCTL_DONE | HDA_DSP_REG_HIPCCTL_BUSY, 298 HDA_DSP_REG_HIPCCTL_DONE | HDA_DSP_REG_HIPCCTL_BUSY); 299 300 /* enable IPC interrupt */ 301 snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR, HDA_DSP_REG_ADSPIC, 302 HDA_DSP_ADSPIC_IPC, HDA_DSP_ADSPIC_IPC); 303 } 304 305 void hda_dsp_ipc_int_disable(struct snd_sof_dev *sdev) 306 { 307 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata; 308 const struct sof_intel_dsp_desc *chip = hda->desc; 309 310 /* disable IPC interrupt */ 311 snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR, HDA_DSP_REG_ADSPIC, 312 HDA_DSP_ADSPIC_IPC, 0); 313 314 /* disable IPC BUSY and DONE interrupt */ 315 snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR, chip->ipc_ctl, 316 HDA_DSP_REG_HIPCCTL_BUSY | HDA_DSP_REG_HIPCCTL_DONE, 0); 317 } 318 319 static int hda_dsp_wait_d0i3c_done(struct snd_sof_dev *sdev) 320 { 321 struct hdac_bus *bus = sof_to_bus(sdev); 322 int retry = HDA_DSP_REG_POLL_RETRY_COUNT; 323 324 while (snd_hdac_chip_readb(bus, VS_D0I3C) & SOF_HDA_VS_D0I3C_CIP) { 325 if (!retry--) 326 return -ETIMEDOUT; 327 usleep_range(10, 15); 328 } 329 330 return 0; 331 } 332 333 static int hda_dsp_send_pm_gate_ipc(struct snd_sof_dev *sdev, u32 flags) 334 { 335 struct sof_ipc_pm_gate pm_gate; 336 struct sof_ipc_reply reply; 337 338 memset(&pm_gate, 0, sizeof(pm_gate)); 339 340 /* configure pm_gate ipc message */ 341 pm_gate.hdr.size = sizeof(pm_gate); 342 pm_gate.hdr.cmd = SOF_IPC_GLB_PM_MSG | SOF_IPC_PM_GATE; 343 pm_gate.flags = flags; 344 345 /* send pm_gate ipc to dsp */ 346 return sof_ipc_tx_message_no_pm(sdev->ipc, pm_gate.hdr.cmd, 347 &pm_gate, sizeof(pm_gate), &reply, 348 sizeof(reply)); 349 } 350 351 static int hda_dsp_update_d0i3c_register(struct snd_sof_dev *sdev, u8 value) 352 { 353 struct hdac_bus *bus = sof_to_bus(sdev); 354 int ret; 355 356 /* Write to D0I3C after Command-In-Progress bit is cleared */ 357 ret = hda_dsp_wait_d0i3c_done(sdev); 358 if (ret < 0) { 359 dev_err(bus->dev, "CIP timeout before D0I3C update!\n"); 360 return ret; 361 } 362 363 /* Update D0I3C register */ 364 snd_hdac_chip_updateb(bus, VS_D0I3C, SOF_HDA_VS_D0I3C_I3, value); 365 366 /* Wait for cmd in progress to be cleared before exiting the function */ 367 ret = hda_dsp_wait_d0i3c_done(sdev); 368 if (ret < 0) { 369 dev_err(bus->dev, "CIP timeout after D0I3C update!\n"); 370 return ret; 371 } 372 373 dev_vdbg(bus->dev, "D0I3C updated, register = 0x%x\n", 374 snd_hdac_chip_readb(bus, VS_D0I3C)); 375 376 return 0; 377 } 378 379 static int hda_dsp_set_D0_state(struct snd_sof_dev *sdev, 380 const struct sof_dsp_power_state *target_state) 381 { 382 u32 flags = 0; 383 int ret; 384 u8 value = 0; 385 386 /* 387 * Sanity check for illegal state transitions 388 * The only allowed transitions are: 389 * 1. D3 -> D0I0 390 * 2. D0I0 -> D0I3 391 * 3. D0I3 -> D0I0 392 */ 393 switch (sdev->dsp_power_state.state) { 394 case SOF_DSP_PM_D0: 395 /* Follow the sequence below for D0 substate transitions */ 396 break; 397 case SOF_DSP_PM_D3: 398 /* Follow regular flow for D3 -> D0 transition */ 399 return 0; 400 default: 401 dev_err(sdev->dev, "error: transition from %d to %d not allowed\n", 402 sdev->dsp_power_state.state, target_state->state); 403 return -EINVAL; 404 } 405 406 /* Set flags and register value for D0 target substate */ 407 if (target_state->substate == SOF_HDA_DSP_PM_D0I3) { 408 value = SOF_HDA_VS_D0I3C_I3; 409 410 /* 411 * Trace DMA need to be disabled when the DSP enters 412 * D0I3 for S0Ix suspend, but it can be kept enabled 413 * when the DSP enters D0I3 while the system is in S0 414 * for debug purpose. 415 */ 416 if (!sdev->dtrace_is_supported || 417 !hda_enable_trace_D0I3_S0 || 418 sdev->system_suspend_target != SOF_SUSPEND_NONE) 419 flags = HDA_PM_NO_DMA_TRACE; 420 } else { 421 /* prevent power gating in D0I0 */ 422 flags = HDA_PM_PPG; 423 } 424 425 /* update D0I3C register */ 426 ret = hda_dsp_update_d0i3c_register(sdev, value); 427 if (ret < 0) 428 return ret; 429 430 /* 431 * Notify the DSP of the state change. 432 * If this IPC fails, revert the D0I3C register update in order 433 * to prevent partial state change. 434 */ 435 ret = hda_dsp_send_pm_gate_ipc(sdev, flags); 436 if (ret < 0) { 437 dev_err(sdev->dev, 438 "error: PM_GATE ipc error %d\n", ret); 439 goto revert; 440 } 441 442 return ret; 443 444 revert: 445 /* fallback to the previous register value */ 446 value = value ? 0 : SOF_HDA_VS_D0I3C_I3; 447 448 /* 449 * This can fail but return the IPC error to signal that 450 * the state change failed. 451 */ 452 hda_dsp_update_d0i3c_register(sdev, value); 453 454 return ret; 455 } 456 457 /* helper to log DSP state */ 458 static void hda_dsp_state_log(struct snd_sof_dev *sdev) 459 { 460 switch (sdev->dsp_power_state.state) { 461 case SOF_DSP_PM_D0: 462 switch (sdev->dsp_power_state.substate) { 463 case SOF_HDA_DSP_PM_D0I0: 464 dev_dbg(sdev->dev, "Current DSP power state: D0I0\n"); 465 break; 466 case SOF_HDA_DSP_PM_D0I3: 467 dev_dbg(sdev->dev, "Current DSP power state: D0I3\n"); 468 break; 469 default: 470 dev_dbg(sdev->dev, "Unknown DSP D0 substate: %d\n", 471 sdev->dsp_power_state.substate); 472 break; 473 } 474 break; 475 case SOF_DSP_PM_D1: 476 dev_dbg(sdev->dev, "Current DSP power state: D1\n"); 477 break; 478 case SOF_DSP_PM_D2: 479 dev_dbg(sdev->dev, "Current DSP power state: D2\n"); 480 break; 481 case SOF_DSP_PM_D3_HOT: 482 dev_dbg(sdev->dev, "Current DSP power state: D3_HOT\n"); 483 break; 484 case SOF_DSP_PM_D3: 485 dev_dbg(sdev->dev, "Current DSP power state: D3\n"); 486 break; 487 case SOF_DSP_PM_D3_COLD: 488 dev_dbg(sdev->dev, "Current DSP power state: D3_COLD\n"); 489 break; 490 default: 491 dev_dbg(sdev->dev, "Unknown DSP power state: %d\n", 492 sdev->dsp_power_state.state); 493 break; 494 } 495 } 496 497 /* 498 * All DSP power state transitions are initiated by the driver. 499 * If the requested state change fails, the error is simply returned. 500 * Further state transitions are attempted only when the set_power_save() op 501 * is called again either because of a new IPC sent to the DSP or 502 * during system suspend/resume. 503 */ 504 int hda_dsp_set_power_state(struct snd_sof_dev *sdev, 505 const struct sof_dsp_power_state *target_state) 506 { 507 int ret = 0; 508 509 /* 510 * When the DSP is already in D0I3 and the target state is D0I3, 511 * it could be the case that the DSP is in D0I3 during S0 512 * and the system is suspending to S0Ix. Therefore, 513 * hda_dsp_set_D0_state() must be called to disable trace DMA 514 * by sending the PM_GATE IPC to the FW. 515 */ 516 if (target_state->substate == SOF_HDA_DSP_PM_D0I3 && 517 sdev->system_suspend_target == SOF_SUSPEND_S0IX) 518 goto set_state; 519 520 /* 521 * For all other cases, return without doing anything if 522 * the DSP is already in the target state. 523 */ 524 if (target_state->state == sdev->dsp_power_state.state && 525 target_state->substate == sdev->dsp_power_state.substate) 526 return 0; 527 528 set_state: 529 switch (target_state->state) { 530 case SOF_DSP_PM_D0: 531 ret = hda_dsp_set_D0_state(sdev, target_state); 532 break; 533 case SOF_DSP_PM_D3: 534 /* The only allowed transition is: D0I0 -> D3 */ 535 if (sdev->dsp_power_state.state == SOF_DSP_PM_D0 && 536 sdev->dsp_power_state.substate == SOF_HDA_DSP_PM_D0I0) 537 break; 538 539 dev_err(sdev->dev, 540 "error: transition from %d to %d not allowed\n", 541 sdev->dsp_power_state.state, target_state->state); 542 return -EINVAL; 543 default: 544 dev_err(sdev->dev, "error: target state unsupported %d\n", 545 target_state->state); 546 return -EINVAL; 547 } 548 if (ret < 0) { 549 dev_err(sdev->dev, 550 "failed to set requested target DSP state %d substate %d\n", 551 target_state->state, target_state->substate); 552 return ret; 553 } 554 555 sdev->dsp_power_state = *target_state; 556 hda_dsp_state_log(sdev); 557 return ret; 558 } 559 560 /* 561 * Audio DSP states may transform as below:- 562 * 563 * Opportunistic D0I3 in S0 564 * Runtime +---------------------+ Delayed D0i3 work timeout 565 * suspend | +--------------------+ 566 * +------------+ D0I0(active) | | 567 * | | <---------------+ | 568 * | +--------> | New IPC | | 569 * | |Runtime +--^--+---------^--+--+ (via mailbox) | | 570 * | |resume | | | | | | 571 * | | | | | | | | 572 * | | System| | | | | | 573 * | | resume| | S3/S0IX | | | | 574 * | | | | suspend | | S0IX | | 575 * | | | | | |suspend | | 576 * | | | | | | | | 577 * | | | | | | | | 578 * +-v---+-----------+--v-------+ | | +------+----v----+ 579 * | | | +-----------> | 580 * | D3 (suspended) | | | D0I3 | 581 * | | +--------------+ | 582 * | | System resume | | 583 * +----------------------------+ +----------------+ 584 * 585 * S0IX suspend: The DSP is in D0I3 if any D0I3-compatible streams 586 * ignored the suspend trigger. Otherwise the DSP 587 * is in D3. 588 */ 589 590 static int hda_suspend(struct snd_sof_dev *sdev, bool runtime_suspend) 591 { 592 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata; 593 const struct sof_intel_dsp_desc *chip = hda->desc; 594 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA) 595 struct hdac_bus *bus = sof_to_bus(sdev); 596 #endif 597 int ret; 598 599 hda_sdw_int_enable(sdev, false); 600 601 /* disable IPC interrupts */ 602 hda_dsp_ipc_int_disable(sdev); 603 604 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA) 605 if (runtime_suspend) 606 hda_codec_jack_wake_enable(sdev); 607 608 /* power down all hda link */ 609 snd_hdac_ext_bus_link_power_down_all(bus); 610 #endif 611 612 /* power down DSP */ 613 ret = hda_dsp_core_reset_power_down(sdev, chip->cores_mask); 614 if (ret < 0) { 615 dev_err(sdev->dev, 616 "error: failed to power down core during suspend\n"); 617 return ret; 618 } 619 620 /* disable ppcap interrupt */ 621 hda_dsp_ctrl_ppcap_enable(sdev, false); 622 hda_dsp_ctrl_ppcap_int_enable(sdev, false); 623 624 /* disable hda bus irq and streams */ 625 hda_dsp_ctrl_stop_chip(sdev); 626 627 /* disable LP retention mode */ 628 snd_sof_pci_update_bits(sdev, PCI_PGCTL, 629 PCI_PGCTL_LSRMD_MASK, PCI_PGCTL_LSRMD_MASK); 630 631 /* reset controller */ 632 ret = hda_dsp_ctrl_link_reset(sdev, true); 633 if (ret < 0) { 634 dev_err(sdev->dev, 635 "error: failed to reset controller during suspend\n"); 636 return ret; 637 } 638 639 /* display codec can powered off after link reset */ 640 hda_codec_i915_display_power(sdev, false); 641 642 return 0; 643 } 644 645 static int hda_resume(struct snd_sof_dev *sdev, bool runtime_resume) 646 { 647 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA) 648 struct hdac_bus *bus = sof_to_bus(sdev); 649 struct hdac_ext_link *hlink = NULL; 650 #endif 651 int ret; 652 653 /* display codec must be powered before link reset */ 654 hda_codec_i915_display_power(sdev, true); 655 656 /* 657 * clear TCSEL to clear playback on some HD Audio 658 * codecs. PCI TCSEL is defined in the Intel manuals. 659 */ 660 snd_sof_pci_update_bits(sdev, PCI_TCSEL, 0x07, 0); 661 662 /* reset and start hda controller */ 663 ret = hda_dsp_ctrl_init_chip(sdev, true); 664 if (ret < 0) { 665 dev_err(sdev->dev, 666 "error: failed to start controller after resume\n"); 667 return ret; 668 } 669 670 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA) 671 /* check jack status */ 672 if (runtime_resume) 673 hda_codec_jack_check(sdev); 674 675 /* turn off the links that were off before suspend */ 676 list_for_each_entry(hlink, &bus->hlink_list, list) { 677 if (!hlink->ref_count) 678 snd_hdac_ext_bus_link_power_down(hlink); 679 } 680 681 /* check dma status and clean up CORB/RIRB buffers */ 682 if (!bus->cmd_dma_state) 683 snd_hdac_bus_stop_cmd_io(bus); 684 #endif 685 686 /* enable ppcap interrupt */ 687 hda_dsp_ctrl_ppcap_enable(sdev, true); 688 hda_dsp_ctrl_ppcap_int_enable(sdev, true); 689 690 return 0; 691 } 692 693 int hda_dsp_resume(struct snd_sof_dev *sdev) 694 { 695 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata; 696 struct pci_dev *pci = to_pci_dev(sdev->dev); 697 const struct sof_dsp_power_state target_state = { 698 .state = SOF_DSP_PM_D0, 699 .substate = SOF_HDA_DSP_PM_D0I0, 700 }; 701 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA) 702 struct hdac_bus *bus = sof_to_bus(sdev); 703 struct hdac_ext_link *hlink = NULL; 704 #endif 705 int ret; 706 707 /* resume from D0I3 */ 708 if (sdev->dsp_power_state.state == SOF_DSP_PM_D0) { 709 hda_codec_i915_display_power(sdev, true); 710 711 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA) 712 /* power up links that were active before suspend */ 713 list_for_each_entry(hlink, &bus->hlink_list, list) { 714 if (hlink->ref_count) { 715 ret = snd_hdac_ext_bus_link_power_up(hlink); 716 if (ret < 0) { 717 dev_dbg(sdev->dev, 718 "error %x in %s: failed to power up links", 719 ret, __func__); 720 return ret; 721 } 722 } 723 } 724 725 /* set up CORB/RIRB buffers if was on before suspend */ 726 if (bus->cmd_dma_state) 727 snd_hdac_bus_init_cmd_io(bus); 728 #endif 729 730 /* Set DSP power state */ 731 ret = snd_sof_dsp_set_power_state(sdev, &target_state); 732 if (ret < 0) { 733 dev_err(sdev->dev, "error: setting dsp state %d substate %d\n", 734 target_state.state, target_state.substate); 735 return ret; 736 } 737 738 /* restore L1SEN bit */ 739 if (hda->l1_support_changed) 740 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 741 HDA_VS_INTEL_EM2, 742 HDA_VS_INTEL_EM2_L1SEN, 0); 743 744 /* restore and disable the system wakeup */ 745 pci_restore_state(pci); 746 disable_irq_wake(pci->irq); 747 return 0; 748 } 749 750 /* init hda controller. DSP cores will be powered up during fw boot */ 751 ret = hda_resume(sdev, false); 752 if (ret < 0) 753 return ret; 754 755 return snd_sof_dsp_set_power_state(sdev, &target_state); 756 } 757 758 int hda_dsp_runtime_resume(struct snd_sof_dev *sdev) 759 { 760 const struct sof_dsp_power_state target_state = { 761 .state = SOF_DSP_PM_D0, 762 }; 763 int ret; 764 765 /* init hda controller. DSP cores will be powered up during fw boot */ 766 ret = hda_resume(sdev, true); 767 if (ret < 0) 768 return ret; 769 770 return snd_sof_dsp_set_power_state(sdev, &target_state); 771 } 772 773 int hda_dsp_runtime_idle(struct snd_sof_dev *sdev) 774 { 775 struct hdac_bus *hbus = sof_to_bus(sdev); 776 777 if (hbus->codec_powered) { 778 dev_dbg(sdev->dev, "some codecs still powered (%08X), not idle\n", 779 (unsigned int)hbus->codec_powered); 780 return -EBUSY; 781 } 782 783 return 0; 784 } 785 786 int hda_dsp_runtime_suspend(struct snd_sof_dev *sdev) 787 { 788 const struct sof_dsp_power_state target_state = { 789 .state = SOF_DSP_PM_D3, 790 }; 791 int ret; 792 793 /* stop hda controller and power dsp off */ 794 ret = hda_suspend(sdev, true); 795 if (ret < 0) 796 return ret; 797 798 return snd_sof_dsp_set_power_state(sdev, &target_state); 799 } 800 801 int hda_dsp_suspend(struct snd_sof_dev *sdev, u32 target_state) 802 { 803 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata; 804 struct hdac_bus *bus = sof_to_bus(sdev); 805 struct pci_dev *pci = to_pci_dev(sdev->dev); 806 const struct sof_dsp_power_state target_dsp_state = { 807 .state = target_state, 808 .substate = target_state == SOF_DSP_PM_D0 ? 809 SOF_HDA_DSP_PM_D0I3 : 0, 810 }; 811 int ret; 812 813 /* cancel any attempt for DSP D0I3 */ 814 cancel_delayed_work_sync(&hda->d0i3_work); 815 816 if (target_state == SOF_DSP_PM_D0) { 817 /* we can't keep a wakeref to display driver at suspend */ 818 hda_codec_i915_display_power(sdev, false); 819 820 /* Set DSP power state */ 821 ret = snd_sof_dsp_set_power_state(sdev, &target_dsp_state); 822 if (ret < 0) { 823 dev_err(sdev->dev, "error: setting dsp state %d substate %d\n", 824 target_dsp_state.state, 825 target_dsp_state.substate); 826 return ret; 827 } 828 829 /* enable L1SEN to make sure the system can enter S0Ix */ 830 hda->l1_support_changed = 831 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 832 HDA_VS_INTEL_EM2, 833 HDA_VS_INTEL_EM2_L1SEN, 834 HDA_VS_INTEL_EM2_L1SEN); 835 836 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA) 837 /* stop the CORB/RIRB DMA if it is On */ 838 if (bus->cmd_dma_state) 839 snd_hdac_bus_stop_cmd_io(bus); 840 841 /* no link can be powered in s0ix state */ 842 ret = snd_hdac_ext_bus_link_power_down_all(bus); 843 if (ret < 0) { 844 dev_dbg(sdev->dev, 845 "error %d in %s: failed to power down links", 846 ret, __func__); 847 return ret; 848 } 849 #endif 850 851 /* enable the system waking up via IPC IRQ */ 852 enable_irq_wake(pci->irq); 853 pci_save_state(pci); 854 return 0; 855 } 856 857 /* stop hda controller and power dsp off */ 858 ret = hda_suspend(sdev, false); 859 if (ret < 0) { 860 dev_err(bus->dev, "error: suspending dsp\n"); 861 return ret; 862 } 863 864 return snd_sof_dsp_set_power_state(sdev, &target_dsp_state); 865 } 866 867 int hda_dsp_set_hw_params_upon_resume(struct snd_sof_dev *sdev) 868 { 869 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA) 870 struct hdac_bus *bus = sof_to_bus(sdev); 871 struct snd_soc_pcm_runtime *rtd; 872 struct hdac_ext_stream *stream; 873 struct hdac_ext_link *link; 874 struct hdac_stream *s; 875 const char *name; 876 int stream_tag; 877 878 /* set internal flag for BE */ 879 list_for_each_entry(s, &bus->stream_list, list) { 880 stream = stream_to_hdac_ext_stream(s); 881 882 /* 883 * clear stream. This should already be taken care for running 884 * streams when the SUSPEND trigger is called. But paused 885 * streams do not get suspended, so this needs to be done 886 * explicitly during suspend. 887 */ 888 if (stream->link_substream) { 889 rtd = asoc_substream_to_rtd(stream->link_substream); 890 name = asoc_rtd_to_codec(rtd, 0)->component->name; 891 link = snd_hdac_ext_bus_get_link(bus, name); 892 if (!link) 893 return -EINVAL; 894 895 stream->link_prepared = 0; 896 897 if (hdac_stream(stream)->direction == 898 SNDRV_PCM_STREAM_CAPTURE) 899 continue; 900 901 stream_tag = hdac_stream(stream)->stream_tag; 902 snd_hdac_ext_link_clear_stream_id(link, stream_tag); 903 } 904 } 905 #endif 906 return 0; 907 } 908 909 void hda_dsp_d0i3_work(struct work_struct *work) 910 { 911 struct sof_intel_hda_dev *hdev = container_of(work, 912 struct sof_intel_hda_dev, 913 d0i3_work.work); 914 struct hdac_bus *bus = &hdev->hbus.core; 915 struct snd_sof_dev *sdev = dev_get_drvdata(bus->dev); 916 struct sof_dsp_power_state target_state; 917 int ret; 918 919 target_state.state = SOF_DSP_PM_D0; 920 921 /* DSP can enter D0I3 iff only D0I3-compatible streams are active */ 922 if (snd_sof_dsp_only_d0i3_compatible_stream_active(sdev)) 923 target_state.substate = SOF_HDA_DSP_PM_D0I3; 924 else 925 target_state.substate = SOF_HDA_DSP_PM_D0I0; 926 927 /* remain in D0I0 */ 928 if (target_state.substate == SOF_HDA_DSP_PM_D0I0) 929 return; 930 931 /* This can fail but error cannot be propagated */ 932 ret = snd_sof_dsp_set_power_state(sdev, &target_state); 933 if (ret < 0) 934 dev_err_ratelimited(sdev->dev, 935 "error: failed to set DSP state %d substate %d\n", 936 target_state.state, target_state.substate); 937 } 938