1 // SPDX-License-Identifier: GPL-2.0 2 // Copyright 2019 NXP 3 4 #include <linux/bitrev.h> 5 #include <linux/clk.h> 6 #include <linux/firmware.h> 7 #include <linux/interrupt.h> 8 #include <linux/module.h> 9 #include <linux/of_platform.h> 10 #include <linux/pm_runtime.h> 11 #include <linux/regmap.h> 12 #include <linux/reset.h> 13 #include <sound/dmaengine_pcm.h> 14 #include <sound/pcm_iec958.h> 15 #include <sound/pcm_params.h> 16 17 #include "fsl_xcvr.h" 18 #include "imx-pcm.h" 19 20 #define FSL_XCVR_CAPDS_SIZE 256 21 22 struct fsl_xcvr_soc_data { 23 const char *fw_name; 24 }; 25 26 struct fsl_xcvr { 27 const struct fsl_xcvr_soc_data *soc_data; 28 struct platform_device *pdev; 29 struct regmap *regmap; 30 struct clk *ipg_clk; 31 struct clk *pll_ipg_clk; 32 struct clk *phy_clk; 33 struct clk *spba_clk; 34 struct reset_control *reset; 35 u8 streams; 36 u32 mode; 37 u32 arc_mode; 38 void __iomem *ram_addr; 39 struct snd_dmaengine_dai_dma_data dma_prms_rx; 40 struct snd_dmaengine_dai_dma_data dma_prms_tx; 41 struct snd_aes_iec958 rx_iec958; 42 struct snd_aes_iec958 tx_iec958; 43 u8 cap_ds[FSL_XCVR_CAPDS_SIZE]; 44 }; 45 46 static const struct fsl_xcvr_pll_conf { 47 u8 mfi; /* min=0x18, max=0x38 */ 48 u32 mfn; /* signed int, 2's compl., min=0x3FFF0000, max=0x00010000 */ 49 u32 mfd; /* unsigned int */ 50 u32 fout; /* Fout = Fref*(MFI + MFN/MFD), Fref is 24MHz */ 51 } fsl_xcvr_pll_cfg[] = { 52 { .mfi = 54, .mfn = 1, .mfd = 6, .fout = 1300000000, }, /* 1.3 GHz */ 53 { .mfi = 32, .mfn = 96, .mfd = 125, .fout = 786432000, }, /* 8000 Hz */ 54 { .mfi = 30, .mfn = 66, .mfd = 625, .fout = 722534400, }, /* 11025 Hz */ 55 { .mfi = 29, .mfn = 1, .mfd = 6, .fout = 700000000, }, /* 700 MHz */ 56 }; 57 58 /* 59 * HDMI2.1 spec defines 6- and 12-channels layout for one bit audio 60 * stream. Todo: to check how this case can be considered below 61 */ 62 static const u32 fsl_xcvr_earc_channels[] = { 1, 2, 8, 16, 32, }; 63 static const struct snd_pcm_hw_constraint_list fsl_xcvr_earc_channels_constr = { 64 .count = ARRAY_SIZE(fsl_xcvr_earc_channels), 65 .list = fsl_xcvr_earc_channels, 66 }; 67 68 static const u32 fsl_xcvr_earc_rates[] = { 69 32000, 44100, 48000, 64000, 88200, 96000, 70 128000, 176400, 192000, 256000, 352800, 384000, 71 512000, 705600, 768000, 1024000, 1411200, 1536000, 72 }; 73 static const struct snd_pcm_hw_constraint_list fsl_xcvr_earc_rates_constr = { 74 .count = ARRAY_SIZE(fsl_xcvr_earc_rates), 75 .list = fsl_xcvr_earc_rates, 76 }; 77 78 static const u32 fsl_xcvr_spdif_channels[] = { 2, }; 79 static const struct snd_pcm_hw_constraint_list fsl_xcvr_spdif_channels_constr = { 80 .count = ARRAY_SIZE(fsl_xcvr_spdif_channels), 81 .list = fsl_xcvr_spdif_channels, 82 }; 83 84 static const u32 fsl_xcvr_spdif_rates[] = { 85 32000, 44100, 48000, 88200, 96000, 176400, 192000, 86 }; 87 static const struct snd_pcm_hw_constraint_list fsl_xcvr_spdif_rates_constr = { 88 .count = ARRAY_SIZE(fsl_xcvr_spdif_rates), 89 .list = fsl_xcvr_spdif_rates, 90 }; 91 92 static int fsl_xcvr_arc_mode_put(struct snd_kcontrol *kcontrol, 93 struct snd_ctl_elem_value *ucontrol) 94 { 95 struct snd_soc_dai *dai = snd_kcontrol_chip(kcontrol); 96 struct fsl_xcvr *xcvr = snd_soc_dai_get_drvdata(dai); 97 struct soc_enum *e = (struct soc_enum *)kcontrol->private_value; 98 unsigned int *item = ucontrol->value.enumerated.item; 99 100 xcvr->arc_mode = snd_soc_enum_item_to_val(e, item[0]); 101 102 return 0; 103 } 104 105 static int fsl_xcvr_arc_mode_get(struct snd_kcontrol *kcontrol, 106 struct snd_ctl_elem_value *ucontrol) 107 { 108 struct snd_soc_dai *dai = snd_kcontrol_chip(kcontrol); 109 struct fsl_xcvr *xcvr = snd_soc_dai_get_drvdata(dai); 110 111 ucontrol->value.enumerated.item[0] = xcvr->arc_mode; 112 113 return 0; 114 } 115 116 static const u32 fsl_xcvr_phy_arc_cfg[] = { 117 FSL_XCVR_PHY_CTRL_ARC_MODE_SE_EN, FSL_XCVR_PHY_CTRL_ARC_MODE_CM_EN, 118 }; 119 120 static const char * const fsl_xcvr_arc_mode[] = { "Single Ended", "Common", }; 121 static const struct soc_enum fsl_xcvr_arc_mode_enum = 122 SOC_ENUM_SINGLE_EXT(ARRAY_SIZE(fsl_xcvr_arc_mode), fsl_xcvr_arc_mode); 123 static struct snd_kcontrol_new fsl_xcvr_arc_mode_kctl = 124 SOC_ENUM_EXT("ARC Mode", fsl_xcvr_arc_mode_enum, 125 fsl_xcvr_arc_mode_get, fsl_xcvr_arc_mode_put); 126 127 /* Capabilities data structure, bytes */ 128 static int fsl_xcvr_type_capds_bytes_info(struct snd_kcontrol *kcontrol, 129 struct snd_ctl_elem_info *uinfo) 130 { 131 uinfo->type = SNDRV_CTL_ELEM_TYPE_BYTES; 132 uinfo->count = FSL_XCVR_CAPDS_SIZE; 133 134 return 0; 135 } 136 137 static int fsl_xcvr_capds_get(struct snd_kcontrol *kcontrol, 138 struct snd_ctl_elem_value *ucontrol) 139 { 140 struct snd_soc_dai *dai = snd_kcontrol_chip(kcontrol); 141 struct fsl_xcvr *xcvr = snd_soc_dai_get_drvdata(dai); 142 143 memcpy(ucontrol->value.bytes.data, xcvr->cap_ds, FSL_XCVR_CAPDS_SIZE); 144 145 return 0; 146 } 147 148 static int fsl_xcvr_capds_put(struct snd_kcontrol *kcontrol, 149 struct snd_ctl_elem_value *ucontrol) 150 { 151 struct snd_soc_dai *dai = snd_kcontrol_chip(kcontrol); 152 struct fsl_xcvr *xcvr = snd_soc_dai_get_drvdata(dai); 153 154 memcpy(xcvr->cap_ds, ucontrol->value.bytes.data, FSL_XCVR_CAPDS_SIZE); 155 156 return 0; 157 } 158 159 static struct snd_kcontrol_new fsl_xcvr_earc_capds_kctl = { 160 .iface = SNDRV_CTL_ELEM_IFACE_PCM, 161 .name = "Capabilities Data Structure", 162 .access = SNDRV_CTL_ELEM_ACCESS_READWRITE, 163 .info = fsl_xcvr_type_capds_bytes_info, 164 .get = fsl_xcvr_capds_get, 165 .put = fsl_xcvr_capds_put, 166 }; 167 168 static int fsl_xcvr_activate_ctl(struct snd_soc_dai *dai, const char *name, 169 bool active) 170 { 171 struct snd_soc_card *card = dai->component->card; 172 struct snd_kcontrol *kctl; 173 bool enabled; 174 175 kctl = snd_soc_card_get_kcontrol(card, name); 176 if (kctl == NULL) 177 return -ENOENT; 178 179 enabled = ((kctl->vd[0].access & SNDRV_CTL_ELEM_ACCESS_WRITE) != 0); 180 if (active == enabled) 181 return 0; /* nothing to do */ 182 183 if (active) 184 kctl->vd[0].access |= SNDRV_CTL_ELEM_ACCESS_WRITE; 185 else 186 kctl->vd[0].access &= ~SNDRV_CTL_ELEM_ACCESS_WRITE; 187 188 snd_ctl_notify(card->snd_card, SNDRV_CTL_EVENT_MASK_INFO, &kctl->id); 189 190 return 1; 191 } 192 193 static int fsl_xcvr_mode_put(struct snd_kcontrol *kcontrol, 194 struct snd_ctl_elem_value *ucontrol) 195 { 196 struct snd_soc_dai *dai = snd_kcontrol_chip(kcontrol); 197 struct fsl_xcvr *xcvr = snd_soc_dai_get_drvdata(dai); 198 struct soc_enum *e = (struct soc_enum *)kcontrol->private_value; 199 unsigned int *item = ucontrol->value.enumerated.item; 200 struct snd_soc_card *card = dai->component->card; 201 struct snd_soc_pcm_runtime *rtd; 202 203 xcvr->mode = snd_soc_enum_item_to_val(e, item[0]); 204 205 fsl_xcvr_activate_ctl(dai, fsl_xcvr_arc_mode_kctl.name, 206 (xcvr->mode == FSL_XCVR_MODE_ARC)); 207 fsl_xcvr_activate_ctl(dai, fsl_xcvr_earc_capds_kctl.name, 208 (xcvr->mode == FSL_XCVR_MODE_EARC)); 209 /* Allow playback for SPDIF only */ 210 rtd = snd_soc_get_pcm_runtime(card, card->dai_link); 211 rtd->pcm->streams[SNDRV_PCM_STREAM_PLAYBACK].substream_count = 212 (xcvr->mode == FSL_XCVR_MODE_SPDIF ? 1 : 0); 213 return 0; 214 } 215 216 static int fsl_xcvr_mode_get(struct snd_kcontrol *kcontrol, 217 struct snd_ctl_elem_value *ucontrol) 218 { 219 struct snd_soc_dai *dai = snd_kcontrol_chip(kcontrol); 220 struct fsl_xcvr *xcvr = snd_soc_dai_get_drvdata(dai); 221 222 ucontrol->value.enumerated.item[0] = xcvr->mode; 223 224 return 0; 225 } 226 227 static const char * const fsl_xcvr_mode[] = { "SPDIF", "ARC RX", "eARC", }; 228 static const struct soc_enum fsl_xcvr_mode_enum = 229 SOC_ENUM_SINGLE_EXT(ARRAY_SIZE(fsl_xcvr_mode), fsl_xcvr_mode); 230 static struct snd_kcontrol_new fsl_xcvr_mode_kctl = 231 SOC_ENUM_EXT("XCVR Mode", fsl_xcvr_mode_enum, 232 fsl_xcvr_mode_get, fsl_xcvr_mode_put); 233 234 /** phy: true => phy, false => pll */ 235 static int fsl_xcvr_ai_write(struct fsl_xcvr *xcvr, u8 reg, u32 data, bool phy) 236 { 237 struct device *dev = &xcvr->pdev->dev; 238 u32 val, idx, tidx; 239 int ret; 240 241 idx = BIT(phy ? 26 : 24); 242 tidx = BIT(phy ? 27 : 25); 243 244 regmap_write(xcvr->regmap, FSL_XCVR_PHY_AI_CTRL_CLR, 0xFF); 245 regmap_write(xcvr->regmap, FSL_XCVR_PHY_AI_CTRL_SET, reg); 246 regmap_write(xcvr->regmap, FSL_XCVR_PHY_AI_WDATA, data); 247 regmap_write(xcvr->regmap, FSL_XCVR_PHY_AI_CTRL_TOG, idx); 248 249 ret = regmap_read_poll_timeout(xcvr->regmap, FSL_XCVR_PHY_AI_CTRL, val, 250 (val & idx) == ((val & tidx) >> 1), 251 10, 10000); 252 if (ret) 253 dev_err(dev, "AI timeout: failed to set %s reg 0x%02x=0x%08x\n", 254 phy ? "PHY" : "PLL", reg, data); 255 return ret; 256 } 257 258 static int fsl_xcvr_en_phy_pll(struct fsl_xcvr *xcvr, u32 freq, bool tx) 259 { 260 struct device *dev = &xcvr->pdev->dev; 261 u32 i, div = 0, log2; 262 int ret; 263 264 for (i = 0; i < ARRAY_SIZE(fsl_xcvr_pll_cfg); i++) { 265 if (fsl_xcvr_pll_cfg[i].fout % freq == 0) { 266 div = fsl_xcvr_pll_cfg[i].fout / freq; 267 break; 268 } 269 } 270 271 if (!div || i >= ARRAY_SIZE(fsl_xcvr_pll_cfg)) 272 return -EINVAL; 273 274 log2 = ilog2(div); 275 276 /* Release AI interface from reset */ 277 ret = regmap_write(xcvr->regmap, FSL_XCVR_PHY_AI_CTRL_SET, 278 FSL_XCVR_PHY_AI_CTRL_AI_RESETN); 279 if (ret < 0) { 280 dev_err(dev, "Error while setting IER0: %d\n", ret); 281 return ret; 282 } 283 284 /* PLL: BANDGAP_SET: EN_VBG (enable bandgap) */ 285 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PLL_BANDGAP_SET, 286 FSL_XCVR_PLL_BANDGAP_EN_VBG, 0); 287 288 /* PLL: CTRL0: DIV_INTEGER */ 289 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PLL_CTRL0, fsl_xcvr_pll_cfg[i].mfi, 0); 290 /* PLL: NUMERATOR: MFN */ 291 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PLL_NUM, fsl_xcvr_pll_cfg[i].mfn, 0); 292 /* PLL: DENOMINATOR: MFD */ 293 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PLL_DEN, fsl_xcvr_pll_cfg[i].mfd, 0); 294 /* PLL: CTRL0_SET: HOLD_RING_OFF, POWER_UP */ 295 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PLL_CTRL0_SET, 296 FSL_XCVR_PLL_CTRL0_HROFF | FSL_XCVR_PLL_CTRL0_PWP, 0); 297 udelay(25); 298 /* PLL: CTRL0: Clear Hold Ring Off */ 299 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PLL_CTRL0_CLR, 300 FSL_XCVR_PLL_CTRL0_HROFF, 0); 301 udelay(100); 302 if (tx) { /* TX is enabled for SPDIF only */ 303 /* PLL: POSTDIV: PDIV0 */ 304 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PLL_PDIV, 305 FSL_XCVR_PLL_PDIVx(log2, 0), 0); 306 /* PLL: CTRL_SET: CLKMUX0_EN */ 307 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PLL_CTRL0_SET, 308 FSL_XCVR_PLL_CTRL0_CM0_EN, 0); 309 } else if (xcvr->mode == FSL_XCVR_MODE_EARC) { /* eARC RX */ 310 /* PLL: POSTDIV: PDIV1 */ 311 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PLL_PDIV, 312 FSL_XCVR_PLL_PDIVx(log2, 1), 0); 313 /* PLL: CTRL_SET: CLKMUX1_EN */ 314 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PLL_CTRL0_SET, 315 FSL_XCVR_PLL_CTRL0_CM1_EN, 0); 316 } else { /* SPDIF / ARC RX */ 317 /* PLL: POSTDIV: PDIV2 */ 318 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PLL_PDIV, 319 FSL_XCVR_PLL_PDIVx(log2, 2), 0); 320 /* PLL: CTRL_SET: CLKMUX2_EN */ 321 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PLL_CTRL0_SET, 322 FSL_XCVR_PLL_CTRL0_CM2_EN, 0); 323 } 324 325 if (xcvr->mode == FSL_XCVR_MODE_EARC) { /* eARC mode */ 326 /* PHY: CTRL_SET: TX_DIFF_OE, PHY_EN */ 327 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PHY_CTRL_SET, 328 FSL_XCVR_PHY_CTRL_TSDIFF_OE | 329 FSL_XCVR_PHY_CTRL_PHY_EN, 1); 330 /* PHY: CTRL2_SET: EARC_TX_MODE */ 331 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PHY_CTRL2_SET, 332 FSL_XCVR_PHY_CTRL2_EARC_TXMS, 1); 333 } else if (!tx) { /* SPDIF / ARC RX mode */ 334 if (xcvr->mode == FSL_XCVR_MODE_SPDIF) 335 /* PHY: CTRL_SET: SPDIF_EN */ 336 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PHY_CTRL_SET, 337 FSL_XCVR_PHY_CTRL_SPDIF_EN, 1); 338 else /* PHY: CTRL_SET: ARC RX setup */ 339 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PHY_CTRL_SET, 340 FSL_XCVR_PHY_CTRL_PHY_EN | 341 FSL_XCVR_PHY_CTRL_RX_CM_EN | 342 fsl_xcvr_phy_arc_cfg[xcvr->arc_mode], 1); 343 } 344 345 dev_dbg(dev, "PLL Fexp: %u, Fout: %u, mfi: %u, mfn: %u, mfd: %d, div: %u, pdiv0: %u\n", 346 freq, fsl_xcvr_pll_cfg[i].fout, fsl_xcvr_pll_cfg[i].mfi, 347 fsl_xcvr_pll_cfg[i].mfn, fsl_xcvr_pll_cfg[i].mfd, div, log2); 348 return 0; 349 } 350 351 static int fsl_xcvr_en_aud_pll(struct fsl_xcvr *xcvr, u32 freq) 352 { 353 struct device *dev = &xcvr->pdev->dev; 354 int ret; 355 356 clk_disable_unprepare(xcvr->phy_clk); 357 ret = clk_set_rate(xcvr->phy_clk, freq); 358 if (ret < 0) { 359 dev_err(dev, "Error while setting AUD PLL rate: %d\n", ret); 360 return ret; 361 } 362 ret = clk_prepare_enable(xcvr->phy_clk); 363 if (ret) { 364 dev_err(dev, "failed to start PHY clock: %d\n", ret); 365 return ret; 366 } 367 368 /* Release AI interface from reset */ 369 ret = regmap_write(xcvr->regmap, FSL_XCVR_PHY_AI_CTRL_SET, 370 FSL_XCVR_PHY_AI_CTRL_AI_RESETN); 371 if (ret < 0) { 372 dev_err(dev, "Error while setting IER0: %d\n", ret); 373 return ret; 374 } 375 376 if (xcvr->mode == FSL_XCVR_MODE_EARC) { /* eARC mode */ 377 /* PHY: CTRL_SET: TX_DIFF_OE, PHY_EN */ 378 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PHY_CTRL_SET, 379 FSL_XCVR_PHY_CTRL_TSDIFF_OE | 380 FSL_XCVR_PHY_CTRL_PHY_EN, 1); 381 /* PHY: CTRL2_SET: EARC_TX_MODE */ 382 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PHY_CTRL2_SET, 383 FSL_XCVR_PHY_CTRL2_EARC_TXMS, 1); 384 } else { /* SPDIF mode */ 385 /* PHY: CTRL_SET: TX_CLK_AUD_SS | SPDIF_EN */ 386 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PHY_CTRL_SET, 387 FSL_XCVR_PHY_CTRL_TX_CLK_AUD_SS | 388 FSL_XCVR_PHY_CTRL_SPDIF_EN, 1); 389 } 390 391 dev_dbg(dev, "PLL Fexp: %u\n", freq); 392 393 return 0; 394 } 395 396 #define FSL_XCVR_SPDIF_RX_FREQ 175000000 397 static int fsl_xcvr_prepare(struct snd_pcm_substream *substream, 398 struct snd_soc_dai *dai) 399 { 400 struct fsl_xcvr *xcvr = snd_soc_dai_get_drvdata(dai); 401 bool tx = substream->stream == SNDRV_PCM_STREAM_PLAYBACK; 402 u32 m_ctl = 0, v_ctl = 0; 403 u32 r = substream->runtime->rate, ch = substream->runtime->channels; 404 u32 fout = 32 * r * ch * 10 * 2; 405 int ret = 0; 406 407 switch (xcvr->mode) { 408 case FSL_XCVR_MODE_SPDIF: 409 case FSL_XCVR_MODE_ARC: 410 if (tx) { 411 ret = fsl_xcvr_en_aud_pll(xcvr, fout); 412 if (ret < 0) { 413 dev_err(dai->dev, "Failed to set TX freq %u: %d\n", 414 fout, ret); 415 return ret; 416 } 417 418 ret = regmap_write(xcvr->regmap, FSL_XCVR_TX_DPTH_CTRL_SET, 419 FSL_XCVR_TX_DPTH_CTRL_FRM_FMT); 420 if (ret < 0) { 421 dev_err(dai->dev, "Failed to set TX_DPTH: %d\n", ret); 422 return ret; 423 } 424 425 /** 426 * set SPDIF MODE - this flag is used to gate 427 * SPDIF output, useless for SPDIF RX 428 */ 429 m_ctl |= FSL_XCVR_EXT_CTRL_SPDIF_MODE; 430 v_ctl |= FSL_XCVR_EXT_CTRL_SPDIF_MODE; 431 } else { 432 /** 433 * Clear RX FIFO, flip RX FIFO bits, 434 * disable eARC related HW mode detects 435 */ 436 ret = regmap_write(xcvr->regmap, FSL_XCVR_RX_DPTH_CTRL_SET, 437 FSL_XCVR_RX_DPTH_CTRL_STORE_FMT | 438 FSL_XCVR_RX_DPTH_CTRL_CLR_RX_FIFO | 439 FSL_XCVR_RX_DPTH_CTRL_COMP | 440 FSL_XCVR_RX_DPTH_CTRL_LAYB_CTRL); 441 if (ret < 0) { 442 dev_err(dai->dev, "Failed to set RX_DPTH: %d\n", ret); 443 return ret; 444 } 445 446 ret = fsl_xcvr_en_phy_pll(xcvr, FSL_XCVR_SPDIF_RX_FREQ, tx); 447 if (ret < 0) { 448 dev_err(dai->dev, "Failed to set RX freq %u: %d\n", 449 FSL_XCVR_SPDIF_RX_FREQ, ret); 450 return ret; 451 } 452 } 453 break; 454 case FSL_XCVR_MODE_EARC: 455 if (!tx) { 456 /** Clear RX FIFO, flip RX FIFO bits */ 457 ret = regmap_write(xcvr->regmap, FSL_XCVR_RX_DPTH_CTRL_SET, 458 FSL_XCVR_RX_DPTH_CTRL_STORE_FMT | 459 FSL_XCVR_RX_DPTH_CTRL_CLR_RX_FIFO); 460 if (ret < 0) { 461 dev_err(dai->dev, "Failed to set RX_DPTH: %d\n", ret); 462 return ret; 463 } 464 465 /** Enable eARC related HW mode detects */ 466 ret = regmap_write(xcvr->regmap, FSL_XCVR_RX_DPTH_CTRL_CLR, 467 FSL_XCVR_RX_DPTH_CTRL_COMP | 468 FSL_XCVR_RX_DPTH_CTRL_LAYB_CTRL); 469 if (ret < 0) { 470 dev_err(dai->dev, "Failed to clr TX_DPTH: %d\n", ret); 471 return ret; 472 } 473 } 474 475 /* clear CMDC RESET */ 476 m_ctl |= FSL_XCVR_EXT_CTRL_CMDC_RESET(tx); 477 /* set TX_RX_MODE */ 478 m_ctl |= FSL_XCVR_EXT_CTRL_TX_RX_MODE; 479 v_ctl |= (tx ? FSL_XCVR_EXT_CTRL_TX_RX_MODE : 0); 480 break; 481 } 482 483 ret = regmap_update_bits(xcvr->regmap, FSL_XCVR_EXT_IER0, 484 FSL_XCVR_IRQ_EARC_ALL, FSL_XCVR_IRQ_EARC_ALL); 485 if (ret < 0) { 486 dev_err(dai->dev, "Error while setting IER0: %d\n", ret); 487 return ret; 488 } 489 490 /* set DPATH RESET */ 491 m_ctl |= FSL_XCVR_EXT_CTRL_DPTH_RESET(tx); 492 v_ctl |= FSL_XCVR_EXT_CTRL_DPTH_RESET(tx); 493 ret = regmap_update_bits(xcvr->regmap, FSL_XCVR_EXT_CTRL, m_ctl, v_ctl); 494 if (ret < 0) { 495 dev_err(dai->dev, "Error while setting EXT_CTRL: %d\n", ret); 496 return ret; 497 } 498 499 return 0; 500 } 501 502 static int fsl_xcvr_constr(const struct snd_pcm_substream *substream, 503 const struct snd_pcm_hw_constraint_list *channels, 504 const struct snd_pcm_hw_constraint_list *rates) 505 { 506 struct snd_pcm_runtime *rt = substream->runtime; 507 int ret; 508 509 ret = snd_pcm_hw_constraint_list(rt, 0, SNDRV_PCM_HW_PARAM_CHANNELS, 510 channels); 511 if (ret < 0) 512 return ret; 513 514 ret = snd_pcm_hw_constraint_list(rt, 0, SNDRV_PCM_HW_PARAM_RATE, 515 rates); 516 if (ret < 0) 517 return ret; 518 519 return 0; 520 } 521 522 static int fsl_xcvr_startup(struct snd_pcm_substream *substream, 523 struct snd_soc_dai *dai) 524 { 525 struct fsl_xcvr *xcvr = snd_soc_dai_get_drvdata(dai); 526 bool tx = substream->stream == SNDRV_PCM_STREAM_PLAYBACK; 527 int ret = 0; 528 529 if (xcvr->streams & BIT(substream->stream)) { 530 dev_err(dai->dev, "%sX busy\n", tx ? "T" : "R"); 531 return -EBUSY; 532 } 533 534 switch (xcvr->mode) { 535 case FSL_XCVR_MODE_SPDIF: 536 case FSL_XCVR_MODE_ARC: 537 ret = fsl_xcvr_constr(substream, &fsl_xcvr_spdif_channels_constr, 538 &fsl_xcvr_spdif_rates_constr); 539 break; 540 case FSL_XCVR_MODE_EARC: 541 ret = fsl_xcvr_constr(substream, &fsl_xcvr_earc_channels_constr, 542 &fsl_xcvr_earc_rates_constr); 543 break; 544 } 545 if (ret < 0) 546 return ret; 547 548 xcvr->streams |= BIT(substream->stream); 549 550 /* Disable XCVR controls if there is stream started */ 551 fsl_xcvr_activate_ctl(dai, fsl_xcvr_mode_kctl.name, false); 552 fsl_xcvr_activate_ctl(dai, fsl_xcvr_arc_mode_kctl.name, false); 553 fsl_xcvr_activate_ctl(dai, fsl_xcvr_earc_capds_kctl.name, false); 554 555 return 0; 556 } 557 558 static void fsl_xcvr_shutdown(struct snd_pcm_substream *substream, 559 struct snd_soc_dai *dai) 560 { 561 struct fsl_xcvr *xcvr = snd_soc_dai_get_drvdata(dai); 562 bool tx = substream->stream == SNDRV_PCM_STREAM_PLAYBACK; 563 u32 mask = 0, val = 0; 564 int ret; 565 566 xcvr->streams &= ~BIT(substream->stream); 567 568 /* Enable XCVR controls if there is no stream started */ 569 if (!xcvr->streams) { 570 fsl_xcvr_activate_ctl(dai, fsl_xcvr_mode_kctl.name, true); 571 fsl_xcvr_activate_ctl(dai, fsl_xcvr_arc_mode_kctl.name, 572 (xcvr->mode == FSL_XCVR_MODE_ARC)); 573 fsl_xcvr_activate_ctl(dai, fsl_xcvr_earc_capds_kctl.name, 574 (xcvr->mode == FSL_XCVR_MODE_EARC)); 575 576 ret = regmap_update_bits(xcvr->regmap, FSL_XCVR_EXT_IER0, 577 FSL_XCVR_IRQ_EARC_ALL, 0); 578 if (ret < 0) { 579 dev_err(dai->dev, "Failed to set IER0: %d\n", ret); 580 return; 581 } 582 583 /* clear SPDIF MODE */ 584 if (xcvr->mode == FSL_XCVR_MODE_SPDIF) 585 mask |= FSL_XCVR_EXT_CTRL_SPDIF_MODE; 586 } 587 588 if (xcvr->mode == FSL_XCVR_MODE_EARC) { 589 /* set CMDC RESET */ 590 mask |= FSL_XCVR_EXT_CTRL_CMDC_RESET(tx); 591 val |= FSL_XCVR_EXT_CTRL_CMDC_RESET(tx); 592 } 593 594 ret = regmap_update_bits(xcvr->regmap, FSL_XCVR_EXT_CTRL, mask, val); 595 if (ret < 0) { 596 dev_err(dai->dev, "Err setting DPATH RESET: %d\n", ret); 597 return; 598 } 599 } 600 601 static int fsl_xcvr_trigger(struct snd_pcm_substream *substream, int cmd, 602 struct snd_soc_dai *dai) 603 { 604 struct fsl_xcvr *xcvr = snd_soc_dai_get_drvdata(dai); 605 bool tx = substream->stream == SNDRV_PCM_STREAM_PLAYBACK; 606 int ret; 607 608 switch (cmd) { 609 case SNDRV_PCM_TRIGGER_START: 610 case SNDRV_PCM_TRIGGER_RESUME: 611 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE: 612 if (tx) { 613 switch (xcvr->mode) { 614 case FSL_XCVR_MODE_EARC: 615 /* set isr_cmdc_tx_en, w1c */ 616 ret = regmap_write(xcvr->regmap, 617 FSL_XCVR_ISR_SET, 618 FSL_XCVR_ISR_CMDC_TX_EN); 619 if (ret < 0) { 620 dev_err(dai->dev, "err updating isr %d\n", ret); 621 return ret; 622 } 623 fallthrough; 624 case FSL_XCVR_MODE_SPDIF: 625 ret = regmap_write(xcvr->regmap, 626 FSL_XCVR_TX_DPTH_CTRL_SET, 627 FSL_XCVR_TX_DPTH_CTRL_STRT_DATA_TX); 628 if (ret < 0) { 629 dev_err(dai->dev, "Failed to start DATA_TX: %d\n", ret); 630 return ret; 631 } 632 break; 633 } 634 } 635 636 /* enable DMA RD/WR */ 637 ret = regmap_update_bits(xcvr->regmap, FSL_XCVR_EXT_CTRL, 638 FSL_XCVR_EXT_CTRL_DMA_DIS(tx), 0); 639 if (ret < 0) { 640 dev_err(dai->dev, "Failed to enable DMA: %d\n", ret); 641 return ret; 642 } 643 644 /* clear DPATH RESET */ 645 ret = regmap_update_bits(xcvr->regmap, FSL_XCVR_EXT_CTRL, 646 FSL_XCVR_EXT_CTRL_DPTH_RESET(tx), 647 0); 648 if (ret < 0) { 649 dev_err(dai->dev, "Failed to clear DPATH RESET: %d\n", ret); 650 return ret; 651 } 652 653 break; 654 case SNDRV_PCM_TRIGGER_STOP: 655 case SNDRV_PCM_TRIGGER_SUSPEND: 656 case SNDRV_PCM_TRIGGER_PAUSE_PUSH: 657 /* disable DMA RD/WR */ 658 ret = regmap_update_bits(xcvr->regmap, FSL_XCVR_EXT_CTRL, 659 FSL_XCVR_EXT_CTRL_DMA_DIS(tx), 660 FSL_XCVR_EXT_CTRL_DMA_DIS(tx)); 661 if (ret < 0) { 662 dev_err(dai->dev, "Failed to disable DMA: %d\n", ret); 663 return ret; 664 } 665 666 if (tx) { 667 switch (xcvr->mode) { 668 case FSL_XCVR_MODE_SPDIF: 669 ret = regmap_write(xcvr->regmap, 670 FSL_XCVR_TX_DPTH_CTRL_CLR, 671 FSL_XCVR_TX_DPTH_CTRL_STRT_DATA_TX); 672 if (ret < 0) { 673 dev_err(dai->dev, "Failed to stop DATA_TX: %d\n", ret); 674 return ret; 675 } 676 fallthrough; 677 case FSL_XCVR_MODE_EARC: 678 /* clear ISR_CMDC_TX_EN, W1C */ 679 ret = regmap_write(xcvr->regmap, 680 FSL_XCVR_ISR_CLR, 681 FSL_XCVR_ISR_CMDC_TX_EN); 682 if (ret < 0) { 683 dev_err(dai->dev, 684 "Err updating ISR %d\n", ret); 685 return ret; 686 } 687 break; 688 } 689 } 690 break; 691 default: 692 return -EINVAL; 693 } 694 695 return 0; 696 } 697 698 static int fsl_xcvr_load_firmware(struct fsl_xcvr *xcvr) 699 { 700 struct device *dev = &xcvr->pdev->dev; 701 const struct firmware *fw; 702 int ret = 0, rem, off, out, page = 0, size = FSL_XCVR_REG_OFFSET; 703 u32 mask, val; 704 705 ret = request_firmware(&fw, xcvr->soc_data->fw_name, dev); 706 if (ret) { 707 dev_err(dev, "failed to request firmware.\n"); 708 return ret; 709 } 710 711 rem = fw->size; 712 713 /* RAM is 20KiB = 16KiB code + 4KiB data => max 10 pages 2KiB each */ 714 if (rem > 16384) { 715 dev_err(dev, "FW size %d is bigger than 16KiB.\n", rem); 716 release_firmware(fw); 717 return -ENOMEM; 718 } 719 720 for (page = 0; page < 10; page++) { 721 ret = regmap_update_bits(xcvr->regmap, FSL_XCVR_EXT_CTRL, 722 FSL_XCVR_EXT_CTRL_PAGE_MASK, 723 FSL_XCVR_EXT_CTRL_PAGE(page)); 724 if (ret < 0) { 725 dev_err(dev, "FW: failed to set page %d, err=%d\n", 726 page, ret); 727 goto err_firmware; 728 } 729 730 off = page * size; 731 out = min(rem, size); 732 /* IPG clock is assumed to be running, otherwise it will hang */ 733 if (out > 0) { 734 /* write firmware into code memory */ 735 memcpy_toio(xcvr->ram_addr, fw->data + off, out); 736 rem -= out; 737 if (rem == 0) { 738 /* last part of firmware written */ 739 /* clean remaining part of code memory page */ 740 memset_io(xcvr->ram_addr + out, 0, size - out); 741 } 742 } else { 743 /* clean current page, including data memory */ 744 memset_io(xcvr->ram_addr, 0, size); 745 } 746 } 747 748 err_firmware: 749 release_firmware(fw); 750 if (ret < 0) 751 return ret; 752 753 /* configure watermarks */ 754 mask = FSL_XCVR_EXT_CTRL_RX_FWM_MASK | FSL_XCVR_EXT_CTRL_TX_FWM_MASK; 755 val = FSL_XCVR_EXT_CTRL_RX_FWM(FSL_XCVR_FIFO_WMK_RX); 756 val |= FSL_XCVR_EXT_CTRL_TX_FWM(FSL_XCVR_FIFO_WMK_TX); 757 /* disable DMA RD/WR */ 758 mask |= FSL_XCVR_EXT_CTRL_DMA_RD_DIS | FSL_XCVR_EXT_CTRL_DMA_WR_DIS; 759 val |= FSL_XCVR_EXT_CTRL_DMA_RD_DIS | FSL_XCVR_EXT_CTRL_DMA_WR_DIS; 760 /* Data RAM is 4KiB, last two pages: 8 and 9. Select page 8. */ 761 mask |= FSL_XCVR_EXT_CTRL_PAGE_MASK; 762 val |= FSL_XCVR_EXT_CTRL_PAGE(8); 763 764 ret = regmap_update_bits(xcvr->regmap, FSL_XCVR_EXT_CTRL, mask, val); 765 if (ret < 0) { 766 dev_err(dev, "Failed to set watermarks: %d\n", ret); 767 return ret; 768 } 769 770 /* Store Capabilities Data Structure into Data RAM */ 771 memcpy_toio(xcvr->ram_addr + FSL_XCVR_CAP_DATA_STR, xcvr->cap_ds, 772 FSL_XCVR_CAPDS_SIZE); 773 return 0; 774 } 775 776 static int fsl_xcvr_type_iec958_info(struct snd_kcontrol *kcontrol, 777 struct snd_ctl_elem_info *uinfo) 778 { 779 uinfo->type = SNDRV_CTL_ELEM_TYPE_IEC958; 780 uinfo->count = 1; 781 782 return 0; 783 } 784 785 static int fsl_xcvr_type_iec958_bytes_info(struct snd_kcontrol *kcontrol, 786 struct snd_ctl_elem_info *uinfo) 787 { 788 uinfo->type = SNDRV_CTL_ELEM_TYPE_BYTES; 789 uinfo->count = sizeof_field(struct snd_aes_iec958, status); 790 791 return 0; 792 } 793 794 static int fsl_xcvr_rx_cs_get(struct snd_kcontrol *kcontrol, 795 struct snd_ctl_elem_value *ucontrol) 796 { 797 struct snd_soc_dai *dai = snd_kcontrol_chip(kcontrol); 798 struct fsl_xcvr *xcvr = snd_soc_dai_get_drvdata(dai); 799 800 memcpy(ucontrol->value.iec958.status, xcvr->rx_iec958.status, 24); 801 802 return 0; 803 } 804 805 static int fsl_xcvr_tx_cs_get(struct snd_kcontrol *kcontrol, 806 struct snd_ctl_elem_value *ucontrol) 807 { 808 struct snd_soc_dai *dai = snd_kcontrol_chip(kcontrol); 809 struct fsl_xcvr *xcvr = snd_soc_dai_get_drvdata(dai); 810 811 memcpy(ucontrol->value.iec958.status, xcvr->tx_iec958.status, 24); 812 813 return 0; 814 } 815 816 static int fsl_xcvr_tx_cs_put(struct snd_kcontrol *kcontrol, 817 struct snd_ctl_elem_value *ucontrol) 818 { 819 struct snd_soc_dai *dai = snd_kcontrol_chip(kcontrol); 820 struct fsl_xcvr *xcvr = snd_soc_dai_get_drvdata(dai); 821 822 memcpy(xcvr->tx_iec958.status, ucontrol->value.iec958.status, 24); 823 824 return 0; 825 } 826 827 static struct snd_kcontrol_new fsl_xcvr_rx_ctls[] = { 828 /* Channel status controller */ 829 { 830 .iface = SNDRV_CTL_ELEM_IFACE_PCM, 831 .name = SNDRV_CTL_NAME_IEC958("", CAPTURE, DEFAULT), 832 .access = SNDRV_CTL_ELEM_ACCESS_READ, 833 .info = fsl_xcvr_type_iec958_info, 834 .get = fsl_xcvr_rx_cs_get, 835 }, 836 /* Capture channel status, bytes */ 837 { 838 .iface = SNDRV_CTL_ELEM_IFACE_PCM, 839 .name = "Capture Channel Status", 840 .access = SNDRV_CTL_ELEM_ACCESS_READ, 841 .info = fsl_xcvr_type_iec958_bytes_info, 842 .get = fsl_xcvr_rx_cs_get, 843 }, 844 }; 845 846 static struct snd_kcontrol_new fsl_xcvr_tx_ctls[] = { 847 /* Channel status controller */ 848 { 849 .iface = SNDRV_CTL_ELEM_IFACE_PCM, 850 .name = SNDRV_CTL_NAME_IEC958("", PLAYBACK, DEFAULT), 851 .access = SNDRV_CTL_ELEM_ACCESS_READWRITE, 852 .info = fsl_xcvr_type_iec958_info, 853 .get = fsl_xcvr_tx_cs_get, 854 .put = fsl_xcvr_tx_cs_put, 855 }, 856 /* Playback channel status, bytes */ 857 { 858 .iface = SNDRV_CTL_ELEM_IFACE_PCM, 859 .name = "Playback Channel Status", 860 .access = SNDRV_CTL_ELEM_ACCESS_READWRITE, 861 .info = fsl_xcvr_type_iec958_bytes_info, 862 .get = fsl_xcvr_tx_cs_get, 863 .put = fsl_xcvr_tx_cs_put, 864 }, 865 }; 866 867 static const struct snd_soc_dai_ops fsl_xcvr_dai_ops = { 868 .prepare = fsl_xcvr_prepare, 869 .startup = fsl_xcvr_startup, 870 .shutdown = fsl_xcvr_shutdown, 871 .trigger = fsl_xcvr_trigger, 872 }; 873 874 static int fsl_xcvr_dai_probe(struct snd_soc_dai *dai) 875 { 876 struct fsl_xcvr *xcvr = snd_soc_dai_get_drvdata(dai); 877 878 snd_soc_dai_init_dma_data(dai, &xcvr->dma_prms_tx, &xcvr->dma_prms_rx); 879 880 snd_soc_add_dai_controls(dai, &fsl_xcvr_mode_kctl, 1); 881 snd_soc_add_dai_controls(dai, &fsl_xcvr_arc_mode_kctl, 1); 882 snd_soc_add_dai_controls(dai, &fsl_xcvr_earc_capds_kctl, 1); 883 snd_soc_add_dai_controls(dai, fsl_xcvr_tx_ctls, 884 ARRAY_SIZE(fsl_xcvr_tx_ctls)); 885 snd_soc_add_dai_controls(dai, fsl_xcvr_rx_ctls, 886 ARRAY_SIZE(fsl_xcvr_rx_ctls)); 887 return 0; 888 } 889 890 static struct snd_soc_dai_driver fsl_xcvr_dai = { 891 .probe = fsl_xcvr_dai_probe, 892 .ops = &fsl_xcvr_dai_ops, 893 .playback = { 894 .stream_name = "CPU-Playback", 895 .channels_min = 1, 896 .channels_max = 32, 897 .rate_min = 32000, 898 .rate_max = 1536000, 899 .rates = SNDRV_PCM_RATE_KNOT, 900 .formats = SNDRV_PCM_FMTBIT_IEC958_SUBFRAME_LE, 901 }, 902 .capture = { 903 .stream_name = "CPU-Capture", 904 .channels_min = 1, 905 .channels_max = 32, 906 .rate_min = 32000, 907 .rate_max = 1536000, 908 .rates = SNDRV_PCM_RATE_KNOT, 909 .formats = SNDRV_PCM_FMTBIT_IEC958_SUBFRAME_LE, 910 }, 911 }; 912 913 static const struct snd_soc_component_driver fsl_xcvr_comp = { 914 .name = "fsl-xcvr-dai", 915 .legacy_dai_naming = 1, 916 }; 917 918 static const struct reg_default fsl_xcvr_reg_defaults[] = { 919 { FSL_XCVR_VERSION, 0x00000000 }, 920 { FSL_XCVR_EXT_CTRL, 0xF8204040 }, 921 { FSL_XCVR_EXT_STATUS, 0x00000000 }, 922 { FSL_XCVR_EXT_IER0, 0x00000000 }, 923 { FSL_XCVR_EXT_IER1, 0x00000000 }, 924 { FSL_XCVR_EXT_ISR, 0x00000000 }, 925 { FSL_XCVR_EXT_ISR_SET, 0x00000000 }, 926 { FSL_XCVR_EXT_ISR_CLR, 0x00000000 }, 927 { FSL_XCVR_EXT_ISR_TOG, 0x00000000 }, 928 { FSL_XCVR_IER, 0x00000000 }, 929 { FSL_XCVR_ISR, 0x00000000 }, 930 { FSL_XCVR_ISR_SET, 0x00000000 }, 931 { FSL_XCVR_ISR_CLR, 0x00000000 }, 932 { FSL_XCVR_ISR_TOG, 0x00000000 }, 933 { FSL_XCVR_RX_DPTH_CTRL, 0x00002C89 }, 934 { FSL_XCVR_RX_DPTH_CTRL_SET, 0x00002C89 }, 935 { FSL_XCVR_RX_DPTH_CTRL_CLR, 0x00002C89 }, 936 { FSL_XCVR_RX_DPTH_CTRL_TOG, 0x00002C89 }, 937 { FSL_XCVR_TX_DPTH_CTRL, 0x00000000 }, 938 { FSL_XCVR_TX_DPTH_CTRL_SET, 0x00000000 }, 939 { FSL_XCVR_TX_DPTH_CTRL_CLR, 0x00000000 }, 940 { FSL_XCVR_TX_DPTH_CTRL_TOG, 0x00000000 }, 941 { FSL_XCVR_TX_CS_DATA_0, 0x00000000 }, 942 { FSL_XCVR_TX_CS_DATA_1, 0x00000000 }, 943 { FSL_XCVR_TX_CS_DATA_2, 0x00000000 }, 944 { FSL_XCVR_TX_CS_DATA_3, 0x00000000 }, 945 { FSL_XCVR_TX_CS_DATA_4, 0x00000000 }, 946 { FSL_XCVR_TX_CS_DATA_5, 0x00000000 }, 947 { FSL_XCVR_DEBUG_REG_0, 0x00000000 }, 948 { FSL_XCVR_DEBUG_REG_1, 0x00000000 }, 949 }; 950 951 static bool fsl_xcvr_readable_reg(struct device *dev, unsigned int reg) 952 { 953 switch (reg) { 954 case FSL_XCVR_VERSION: 955 case FSL_XCVR_EXT_CTRL: 956 case FSL_XCVR_EXT_STATUS: 957 case FSL_XCVR_EXT_IER0: 958 case FSL_XCVR_EXT_IER1: 959 case FSL_XCVR_EXT_ISR: 960 case FSL_XCVR_EXT_ISR_SET: 961 case FSL_XCVR_EXT_ISR_CLR: 962 case FSL_XCVR_EXT_ISR_TOG: 963 case FSL_XCVR_IER: 964 case FSL_XCVR_ISR: 965 case FSL_XCVR_ISR_SET: 966 case FSL_XCVR_ISR_CLR: 967 case FSL_XCVR_ISR_TOG: 968 case FSL_XCVR_PHY_AI_CTRL: 969 case FSL_XCVR_PHY_AI_CTRL_SET: 970 case FSL_XCVR_PHY_AI_CTRL_CLR: 971 case FSL_XCVR_PHY_AI_CTRL_TOG: 972 case FSL_XCVR_PHY_AI_RDATA: 973 case FSL_XCVR_CLK_CTRL: 974 case FSL_XCVR_RX_DPTH_CTRL: 975 case FSL_XCVR_RX_DPTH_CTRL_SET: 976 case FSL_XCVR_RX_DPTH_CTRL_CLR: 977 case FSL_XCVR_RX_DPTH_CTRL_TOG: 978 case FSL_XCVR_TX_DPTH_CTRL: 979 case FSL_XCVR_TX_DPTH_CTRL_SET: 980 case FSL_XCVR_TX_DPTH_CTRL_CLR: 981 case FSL_XCVR_TX_DPTH_CTRL_TOG: 982 case FSL_XCVR_TX_CS_DATA_0: 983 case FSL_XCVR_TX_CS_DATA_1: 984 case FSL_XCVR_TX_CS_DATA_2: 985 case FSL_XCVR_TX_CS_DATA_3: 986 case FSL_XCVR_TX_CS_DATA_4: 987 case FSL_XCVR_TX_CS_DATA_5: 988 case FSL_XCVR_DEBUG_REG_0: 989 case FSL_XCVR_DEBUG_REG_1: 990 return true; 991 default: 992 return false; 993 } 994 } 995 996 static bool fsl_xcvr_writeable_reg(struct device *dev, unsigned int reg) 997 { 998 switch (reg) { 999 case FSL_XCVR_EXT_CTRL: 1000 case FSL_XCVR_EXT_IER0: 1001 case FSL_XCVR_EXT_IER1: 1002 case FSL_XCVR_EXT_ISR: 1003 case FSL_XCVR_EXT_ISR_SET: 1004 case FSL_XCVR_EXT_ISR_CLR: 1005 case FSL_XCVR_EXT_ISR_TOG: 1006 case FSL_XCVR_IER: 1007 case FSL_XCVR_ISR_SET: 1008 case FSL_XCVR_ISR_CLR: 1009 case FSL_XCVR_ISR_TOG: 1010 case FSL_XCVR_PHY_AI_CTRL: 1011 case FSL_XCVR_PHY_AI_CTRL_SET: 1012 case FSL_XCVR_PHY_AI_CTRL_CLR: 1013 case FSL_XCVR_PHY_AI_CTRL_TOG: 1014 case FSL_XCVR_PHY_AI_WDATA: 1015 case FSL_XCVR_CLK_CTRL: 1016 case FSL_XCVR_RX_DPTH_CTRL: 1017 case FSL_XCVR_RX_DPTH_CTRL_SET: 1018 case FSL_XCVR_RX_DPTH_CTRL_CLR: 1019 case FSL_XCVR_RX_DPTH_CTRL_TOG: 1020 case FSL_XCVR_TX_DPTH_CTRL_SET: 1021 case FSL_XCVR_TX_DPTH_CTRL_CLR: 1022 case FSL_XCVR_TX_DPTH_CTRL_TOG: 1023 case FSL_XCVR_TX_CS_DATA_0: 1024 case FSL_XCVR_TX_CS_DATA_1: 1025 case FSL_XCVR_TX_CS_DATA_2: 1026 case FSL_XCVR_TX_CS_DATA_3: 1027 case FSL_XCVR_TX_CS_DATA_4: 1028 case FSL_XCVR_TX_CS_DATA_5: 1029 return true; 1030 default: 1031 return false; 1032 } 1033 } 1034 1035 static bool fsl_xcvr_volatile_reg(struct device *dev, unsigned int reg) 1036 { 1037 return fsl_xcvr_readable_reg(dev, reg); 1038 } 1039 1040 static const struct regmap_config fsl_xcvr_regmap_cfg = { 1041 .reg_bits = 32, 1042 .reg_stride = 4, 1043 .val_bits = 32, 1044 .max_register = FSL_XCVR_MAX_REG, 1045 .reg_defaults = fsl_xcvr_reg_defaults, 1046 .num_reg_defaults = ARRAY_SIZE(fsl_xcvr_reg_defaults), 1047 .readable_reg = fsl_xcvr_readable_reg, 1048 .volatile_reg = fsl_xcvr_volatile_reg, 1049 .writeable_reg = fsl_xcvr_writeable_reg, 1050 .cache_type = REGCACHE_FLAT, 1051 }; 1052 1053 static irqreturn_t irq0_isr(int irq, void *devid) 1054 { 1055 struct fsl_xcvr *xcvr = (struct fsl_xcvr *)devid; 1056 struct device *dev = &xcvr->pdev->dev; 1057 struct regmap *regmap = xcvr->regmap; 1058 void __iomem *reg_ctrl, *reg_buff; 1059 u32 isr, isr_clr = 0, val, i; 1060 1061 regmap_read(regmap, FSL_XCVR_EXT_ISR, &isr); 1062 1063 if (isr & FSL_XCVR_IRQ_NEW_CS) { 1064 dev_dbg(dev, "Received new CS block\n"); 1065 isr_clr |= FSL_XCVR_IRQ_NEW_CS; 1066 /* Data RAM is 4KiB, last two pages: 8 and 9. Select page 8. */ 1067 regmap_update_bits(xcvr->regmap, FSL_XCVR_EXT_CTRL, 1068 FSL_XCVR_EXT_CTRL_PAGE_MASK, 1069 FSL_XCVR_EXT_CTRL_PAGE(8)); 1070 1071 /* Find updated CS buffer */ 1072 reg_ctrl = xcvr->ram_addr + FSL_XCVR_RX_CS_CTRL_0; 1073 reg_buff = xcvr->ram_addr + FSL_XCVR_RX_CS_BUFF_0; 1074 memcpy_fromio(&val, reg_ctrl, sizeof(val)); 1075 if (!val) { 1076 reg_ctrl = xcvr->ram_addr + FSL_XCVR_RX_CS_CTRL_1; 1077 reg_buff = xcvr->ram_addr + FSL_XCVR_RX_CS_BUFF_1; 1078 memcpy_fromio(&val, reg_ctrl, sizeof(val)); 1079 } 1080 1081 if (val) { 1082 /* copy CS buffer */ 1083 memcpy_fromio(&xcvr->rx_iec958.status, reg_buff, 1084 sizeof(xcvr->rx_iec958.status)); 1085 for (i = 0; i < 6; i++) { 1086 val = *(u32 *)(xcvr->rx_iec958.status + i*4); 1087 *(u32 *)(xcvr->rx_iec958.status + i*4) = 1088 bitrev32(val); 1089 } 1090 /* clear CS control register */ 1091 memset_io(reg_ctrl, 0, sizeof(val)); 1092 } 1093 } 1094 if (isr & FSL_XCVR_IRQ_NEW_UD) { 1095 dev_dbg(dev, "Received new UD block\n"); 1096 isr_clr |= FSL_XCVR_IRQ_NEW_UD; 1097 } 1098 if (isr & FSL_XCVR_IRQ_MUTE) { 1099 dev_dbg(dev, "HW mute bit detected\n"); 1100 isr_clr |= FSL_XCVR_IRQ_MUTE; 1101 } 1102 if (isr & FSL_XCVR_IRQ_FIFO_UOFL_ERR) { 1103 dev_dbg(dev, "RX/TX FIFO full/empty\n"); 1104 isr_clr |= FSL_XCVR_IRQ_FIFO_UOFL_ERR; 1105 } 1106 if (isr & FSL_XCVR_IRQ_ARC_MODE) { 1107 dev_dbg(dev, "CMDC SM falls out of eARC mode\n"); 1108 isr_clr |= FSL_XCVR_IRQ_ARC_MODE; 1109 } 1110 if (isr & FSL_XCVR_IRQ_DMA_RD_REQ) { 1111 dev_dbg(dev, "DMA read request\n"); 1112 isr_clr |= FSL_XCVR_IRQ_DMA_RD_REQ; 1113 } 1114 if (isr & FSL_XCVR_IRQ_DMA_WR_REQ) { 1115 dev_dbg(dev, "DMA write request\n"); 1116 isr_clr |= FSL_XCVR_IRQ_DMA_WR_REQ; 1117 } 1118 1119 if (isr_clr) { 1120 regmap_write(regmap, FSL_XCVR_EXT_ISR_CLR, isr_clr); 1121 return IRQ_HANDLED; 1122 } 1123 1124 return IRQ_NONE; 1125 } 1126 1127 static const struct fsl_xcvr_soc_data fsl_xcvr_imx8mp_data = { 1128 .fw_name = "imx/xcvr/xcvr-imx8mp.bin", 1129 }; 1130 1131 static const struct of_device_id fsl_xcvr_dt_ids[] = { 1132 { .compatible = "fsl,imx8mp-xcvr", .data = &fsl_xcvr_imx8mp_data }, 1133 { /* sentinel */ } 1134 }; 1135 MODULE_DEVICE_TABLE(of, fsl_xcvr_dt_ids); 1136 1137 static int fsl_xcvr_probe(struct platform_device *pdev) 1138 { 1139 struct device *dev = &pdev->dev; 1140 struct fsl_xcvr *xcvr; 1141 struct resource *rx_res, *tx_res; 1142 void __iomem *regs; 1143 int ret, irq; 1144 1145 xcvr = devm_kzalloc(dev, sizeof(*xcvr), GFP_KERNEL); 1146 if (!xcvr) 1147 return -ENOMEM; 1148 1149 xcvr->pdev = pdev; 1150 xcvr->soc_data = of_device_get_match_data(&pdev->dev); 1151 1152 xcvr->ipg_clk = devm_clk_get(dev, "ipg"); 1153 if (IS_ERR(xcvr->ipg_clk)) { 1154 dev_err(dev, "failed to get ipg clock\n"); 1155 return PTR_ERR(xcvr->ipg_clk); 1156 } 1157 1158 xcvr->phy_clk = devm_clk_get(dev, "phy"); 1159 if (IS_ERR(xcvr->phy_clk)) { 1160 dev_err(dev, "failed to get phy clock\n"); 1161 return PTR_ERR(xcvr->phy_clk); 1162 } 1163 1164 xcvr->spba_clk = devm_clk_get(dev, "spba"); 1165 if (IS_ERR(xcvr->spba_clk)) { 1166 dev_err(dev, "failed to get spba clock\n"); 1167 return PTR_ERR(xcvr->spba_clk); 1168 } 1169 1170 xcvr->pll_ipg_clk = devm_clk_get(dev, "pll_ipg"); 1171 if (IS_ERR(xcvr->pll_ipg_clk)) { 1172 dev_err(dev, "failed to get pll_ipg clock\n"); 1173 return PTR_ERR(xcvr->pll_ipg_clk); 1174 } 1175 1176 xcvr->ram_addr = devm_platform_ioremap_resource_byname(pdev, "ram"); 1177 if (IS_ERR(xcvr->ram_addr)) 1178 return PTR_ERR(xcvr->ram_addr); 1179 1180 regs = devm_platform_ioremap_resource_byname(pdev, "regs"); 1181 if (IS_ERR(regs)) 1182 return PTR_ERR(regs); 1183 1184 xcvr->regmap = devm_regmap_init_mmio_clk(dev, NULL, regs, 1185 &fsl_xcvr_regmap_cfg); 1186 if (IS_ERR(xcvr->regmap)) { 1187 dev_err(dev, "failed to init XCVR regmap: %ld\n", 1188 PTR_ERR(xcvr->regmap)); 1189 return PTR_ERR(xcvr->regmap); 1190 } 1191 1192 xcvr->reset = devm_reset_control_get_exclusive(dev, NULL); 1193 if (IS_ERR(xcvr->reset)) { 1194 dev_err(dev, "failed to get XCVR reset control\n"); 1195 return PTR_ERR(xcvr->reset); 1196 } 1197 1198 /* get IRQs */ 1199 irq = platform_get_irq(pdev, 0); 1200 if (irq < 0) 1201 return irq; 1202 1203 ret = devm_request_irq(dev, irq, irq0_isr, 0, pdev->name, xcvr); 1204 if (ret) { 1205 dev_err(dev, "failed to claim IRQ0: %i\n", ret); 1206 return ret; 1207 } 1208 1209 rx_res = platform_get_resource_byname(pdev, IORESOURCE_MEM, "rxfifo"); 1210 tx_res = platform_get_resource_byname(pdev, IORESOURCE_MEM, "txfifo"); 1211 if (!rx_res || !tx_res) { 1212 dev_err(dev, "could not find rxfifo or txfifo resource\n"); 1213 return -EINVAL; 1214 } 1215 xcvr->dma_prms_rx.chan_name = "rx"; 1216 xcvr->dma_prms_tx.chan_name = "tx"; 1217 xcvr->dma_prms_rx.addr = rx_res->start; 1218 xcvr->dma_prms_tx.addr = tx_res->start; 1219 xcvr->dma_prms_rx.maxburst = FSL_XCVR_MAXBURST_RX; 1220 xcvr->dma_prms_tx.maxburst = FSL_XCVR_MAXBURST_TX; 1221 1222 platform_set_drvdata(pdev, xcvr); 1223 pm_runtime_enable(dev); 1224 regcache_cache_only(xcvr->regmap, true); 1225 1226 /* 1227 * Register platform component before registering cpu dai for there 1228 * is not defer probe for platform component in snd_soc_add_pcm_runtime(). 1229 */ 1230 ret = devm_snd_dmaengine_pcm_register(dev, NULL, 0); 1231 if (ret) { 1232 pm_runtime_disable(dev); 1233 dev_err(dev, "failed to pcm register\n"); 1234 return ret; 1235 } 1236 1237 ret = devm_snd_soc_register_component(dev, &fsl_xcvr_comp, 1238 &fsl_xcvr_dai, 1); 1239 if (ret) { 1240 pm_runtime_disable(dev); 1241 dev_err(dev, "failed to register component %s\n", 1242 fsl_xcvr_comp.name); 1243 } 1244 1245 return ret; 1246 } 1247 1248 static int fsl_xcvr_remove(struct platform_device *pdev) 1249 { 1250 pm_runtime_disable(&pdev->dev); 1251 return 0; 1252 } 1253 1254 static __maybe_unused int fsl_xcvr_runtime_suspend(struct device *dev) 1255 { 1256 struct fsl_xcvr *xcvr = dev_get_drvdata(dev); 1257 int ret; 1258 1259 /* 1260 * Clear interrupts, when streams starts or resumes after 1261 * suspend, interrupts are enabled in prepare(), so no need 1262 * to enable interrupts in resume(). 1263 */ 1264 ret = regmap_update_bits(xcvr->regmap, FSL_XCVR_EXT_IER0, 1265 FSL_XCVR_IRQ_EARC_ALL, 0); 1266 if (ret < 0) 1267 dev_err(dev, "Failed to clear IER0: %d\n", ret); 1268 1269 /* Assert M0+ reset */ 1270 ret = regmap_update_bits(xcvr->regmap, FSL_XCVR_EXT_CTRL, 1271 FSL_XCVR_EXT_CTRL_CORE_RESET, 1272 FSL_XCVR_EXT_CTRL_CORE_RESET); 1273 if (ret < 0) 1274 dev_err(dev, "Failed to assert M0+ core: %d\n", ret); 1275 1276 regcache_cache_only(xcvr->regmap, true); 1277 1278 clk_disable_unprepare(xcvr->spba_clk); 1279 clk_disable_unprepare(xcvr->phy_clk); 1280 clk_disable_unprepare(xcvr->pll_ipg_clk); 1281 clk_disable_unprepare(xcvr->ipg_clk); 1282 1283 return 0; 1284 } 1285 1286 static __maybe_unused int fsl_xcvr_runtime_resume(struct device *dev) 1287 { 1288 struct fsl_xcvr *xcvr = dev_get_drvdata(dev); 1289 int ret; 1290 1291 ret = reset_control_assert(xcvr->reset); 1292 if (ret < 0) { 1293 dev_err(dev, "Failed to assert M0+ reset: %d\n", ret); 1294 return ret; 1295 } 1296 1297 ret = clk_prepare_enable(xcvr->ipg_clk); 1298 if (ret) { 1299 dev_err(dev, "failed to start IPG clock.\n"); 1300 return ret; 1301 } 1302 1303 ret = clk_prepare_enable(xcvr->pll_ipg_clk); 1304 if (ret) { 1305 dev_err(dev, "failed to start PLL IPG clock.\n"); 1306 goto stop_ipg_clk; 1307 } 1308 1309 ret = clk_prepare_enable(xcvr->phy_clk); 1310 if (ret) { 1311 dev_err(dev, "failed to start PHY clock: %d\n", ret); 1312 goto stop_pll_ipg_clk; 1313 } 1314 1315 ret = clk_prepare_enable(xcvr->spba_clk); 1316 if (ret) { 1317 dev_err(dev, "failed to start SPBA clock.\n"); 1318 goto stop_phy_clk; 1319 } 1320 1321 regcache_cache_only(xcvr->regmap, false); 1322 regcache_mark_dirty(xcvr->regmap); 1323 ret = regcache_sync(xcvr->regmap); 1324 1325 if (ret) { 1326 dev_err(dev, "failed to sync regcache.\n"); 1327 goto stop_spba_clk; 1328 } 1329 1330 ret = reset_control_deassert(xcvr->reset); 1331 if (ret) { 1332 dev_err(dev, "failed to deassert M0+ reset.\n"); 1333 goto stop_spba_clk; 1334 } 1335 1336 ret = fsl_xcvr_load_firmware(xcvr); 1337 if (ret) { 1338 dev_err(dev, "failed to load firmware.\n"); 1339 goto stop_spba_clk; 1340 } 1341 1342 /* Release M0+ reset */ 1343 ret = regmap_update_bits(xcvr->regmap, FSL_XCVR_EXT_CTRL, 1344 FSL_XCVR_EXT_CTRL_CORE_RESET, 0); 1345 if (ret < 0) { 1346 dev_err(dev, "M0+ core release failed: %d\n", ret); 1347 goto stop_spba_clk; 1348 } 1349 1350 /* Let M0+ core complete firmware initialization */ 1351 msleep(50); 1352 1353 return 0; 1354 1355 stop_spba_clk: 1356 clk_disable_unprepare(xcvr->spba_clk); 1357 stop_phy_clk: 1358 clk_disable_unprepare(xcvr->phy_clk); 1359 stop_pll_ipg_clk: 1360 clk_disable_unprepare(xcvr->pll_ipg_clk); 1361 stop_ipg_clk: 1362 clk_disable_unprepare(xcvr->ipg_clk); 1363 1364 return ret; 1365 } 1366 1367 static const struct dev_pm_ops fsl_xcvr_pm_ops = { 1368 SET_RUNTIME_PM_OPS(fsl_xcvr_runtime_suspend, 1369 fsl_xcvr_runtime_resume, 1370 NULL) 1371 SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend, 1372 pm_runtime_force_resume) 1373 }; 1374 1375 static struct platform_driver fsl_xcvr_driver = { 1376 .probe = fsl_xcvr_probe, 1377 .driver = { 1378 .name = "fsl,imx8mp-audio-xcvr", 1379 .pm = &fsl_xcvr_pm_ops, 1380 .of_match_table = fsl_xcvr_dt_ids, 1381 }, 1382 .remove = fsl_xcvr_remove, 1383 }; 1384 module_platform_driver(fsl_xcvr_driver); 1385 1386 MODULE_AUTHOR("Viorel Suman <viorel.suman@nxp.com>"); 1387 MODULE_DESCRIPTION("NXP Audio Transceiver (XCVR) driver"); 1388 MODULE_LICENSE("GPL v2"); 1389