1 // SPDX-License-Identifier: GPL-2.0 2 // Copyright 2019 NXP 3 4 #include <linux/bitrev.h> 5 #include <linux/clk.h> 6 #include <linux/firmware.h> 7 #include <linux/interrupt.h> 8 #include <linux/module.h> 9 #include <linux/of_platform.h> 10 #include <linux/pm_runtime.h> 11 #include <linux/regmap.h> 12 #include <linux/reset.h> 13 #include <sound/dmaengine_pcm.h> 14 #include <sound/pcm_iec958.h> 15 #include <sound/pcm_params.h> 16 17 #include "fsl_xcvr.h" 18 #include "imx-pcm.h" 19 20 #define FSL_XCVR_CAPDS_SIZE 256 21 22 struct fsl_xcvr_soc_data { 23 const char *fw_name; 24 }; 25 26 struct fsl_xcvr { 27 const struct fsl_xcvr_soc_data *soc_data; 28 struct platform_device *pdev; 29 struct regmap *regmap; 30 struct clk *ipg_clk; 31 struct clk *pll_ipg_clk; 32 struct clk *phy_clk; 33 struct clk *spba_clk; 34 struct reset_control *reset; 35 u8 streams; 36 u32 mode; 37 u32 arc_mode; 38 void __iomem *ram_addr; 39 struct snd_dmaengine_dai_dma_data dma_prms_rx; 40 struct snd_dmaengine_dai_dma_data dma_prms_tx; 41 struct snd_aes_iec958 rx_iec958; 42 struct snd_aes_iec958 tx_iec958; 43 u8 cap_ds[FSL_XCVR_CAPDS_SIZE]; 44 }; 45 46 static const struct fsl_xcvr_pll_conf { 47 u8 mfi; /* min=0x18, max=0x38 */ 48 u32 mfn; /* signed int, 2's compl., min=0x3FFF0000, max=0x00010000 */ 49 u32 mfd; /* unsigned int */ 50 u32 fout; /* Fout = Fref*(MFI + MFN/MFD), Fref is 24MHz */ 51 } fsl_xcvr_pll_cfg[] = { 52 { .mfi = 54, .mfn = 1, .mfd = 6, .fout = 1300000000, }, /* 1.3 GHz */ 53 { .mfi = 32, .mfn = 96, .mfd = 125, .fout = 786432000, }, /* 8000 Hz */ 54 { .mfi = 30, .mfn = 66, .mfd = 625, .fout = 722534400, }, /* 11025 Hz */ 55 { .mfi = 29, .mfn = 1, .mfd = 6, .fout = 700000000, }, /* 700 MHz */ 56 }; 57 58 /* 59 * HDMI2.1 spec defines 6- and 12-channels layout for one bit audio 60 * stream. Todo: to check how this case can be considered below 61 */ 62 static const u32 fsl_xcvr_earc_channels[] = { 1, 2, 8, 16, 32, }; 63 static const struct snd_pcm_hw_constraint_list fsl_xcvr_earc_channels_constr = { 64 .count = ARRAY_SIZE(fsl_xcvr_earc_channels), 65 .list = fsl_xcvr_earc_channels, 66 }; 67 68 static const u32 fsl_xcvr_earc_rates[] = { 69 32000, 44100, 48000, 64000, 88200, 96000, 70 128000, 176400, 192000, 256000, 352800, 384000, 71 512000, 705600, 768000, 1024000, 1411200, 1536000, 72 }; 73 static const struct snd_pcm_hw_constraint_list fsl_xcvr_earc_rates_constr = { 74 .count = ARRAY_SIZE(fsl_xcvr_earc_rates), 75 .list = fsl_xcvr_earc_rates, 76 }; 77 78 static const u32 fsl_xcvr_spdif_channels[] = { 2, }; 79 static const struct snd_pcm_hw_constraint_list fsl_xcvr_spdif_channels_constr = { 80 .count = ARRAY_SIZE(fsl_xcvr_spdif_channels), 81 .list = fsl_xcvr_spdif_channels, 82 }; 83 84 static const u32 fsl_xcvr_spdif_rates[] = { 85 32000, 44100, 48000, 88200, 96000, 176400, 192000, 86 }; 87 static const struct snd_pcm_hw_constraint_list fsl_xcvr_spdif_rates_constr = { 88 .count = ARRAY_SIZE(fsl_xcvr_spdif_rates), 89 .list = fsl_xcvr_spdif_rates, 90 }; 91 92 static int fsl_xcvr_arc_mode_put(struct snd_kcontrol *kcontrol, 93 struct snd_ctl_elem_value *ucontrol) 94 { 95 struct snd_soc_dai *dai = snd_kcontrol_chip(kcontrol); 96 struct fsl_xcvr *xcvr = snd_soc_dai_get_drvdata(dai); 97 struct soc_enum *e = (struct soc_enum *)kcontrol->private_value; 98 unsigned int *item = ucontrol->value.enumerated.item; 99 100 xcvr->arc_mode = snd_soc_enum_item_to_val(e, item[0]); 101 102 return 0; 103 } 104 105 static int fsl_xcvr_arc_mode_get(struct snd_kcontrol *kcontrol, 106 struct snd_ctl_elem_value *ucontrol) 107 { 108 struct snd_soc_dai *dai = snd_kcontrol_chip(kcontrol); 109 struct fsl_xcvr *xcvr = snd_soc_dai_get_drvdata(dai); 110 111 ucontrol->value.enumerated.item[0] = xcvr->arc_mode; 112 113 return 0; 114 } 115 116 static const u32 fsl_xcvr_phy_arc_cfg[] = { 117 FSL_XCVR_PHY_CTRL_ARC_MODE_SE_EN, FSL_XCVR_PHY_CTRL_ARC_MODE_CM_EN, 118 }; 119 120 static const char * const fsl_xcvr_arc_mode[] = { "Single Ended", "Common", }; 121 static const struct soc_enum fsl_xcvr_arc_mode_enum = 122 SOC_ENUM_SINGLE_EXT(ARRAY_SIZE(fsl_xcvr_arc_mode), fsl_xcvr_arc_mode); 123 static struct snd_kcontrol_new fsl_xcvr_arc_mode_kctl = 124 SOC_ENUM_EXT("ARC Mode", fsl_xcvr_arc_mode_enum, 125 fsl_xcvr_arc_mode_get, fsl_xcvr_arc_mode_put); 126 127 /* Capabilities data structure, bytes */ 128 static int fsl_xcvr_type_capds_bytes_info(struct snd_kcontrol *kcontrol, 129 struct snd_ctl_elem_info *uinfo) 130 { 131 uinfo->type = SNDRV_CTL_ELEM_TYPE_BYTES; 132 uinfo->count = FSL_XCVR_CAPDS_SIZE; 133 134 return 0; 135 } 136 137 static int fsl_xcvr_capds_get(struct snd_kcontrol *kcontrol, 138 struct snd_ctl_elem_value *ucontrol) 139 { 140 struct snd_soc_dai *dai = snd_kcontrol_chip(kcontrol); 141 struct fsl_xcvr *xcvr = snd_soc_dai_get_drvdata(dai); 142 143 memcpy(ucontrol->value.bytes.data, xcvr->cap_ds, FSL_XCVR_CAPDS_SIZE); 144 145 return 0; 146 } 147 148 static int fsl_xcvr_capds_put(struct snd_kcontrol *kcontrol, 149 struct snd_ctl_elem_value *ucontrol) 150 { 151 struct snd_soc_dai *dai = snd_kcontrol_chip(kcontrol); 152 struct fsl_xcvr *xcvr = snd_soc_dai_get_drvdata(dai); 153 154 memcpy(xcvr->cap_ds, ucontrol->value.bytes.data, FSL_XCVR_CAPDS_SIZE); 155 156 return 0; 157 } 158 159 static struct snd_kcontrol_new fsl_xcvr_earc_capds_kctl = { 160 .iface = SNDRV_CTL_ELEM_IFACE_PCM, 161 .name = "Capabilities Data Structure", 162 .access = SNDRV_CTL_ELEM_ACCESS_READWRITE, 163 .info = fsl_xcvr_type_capds_bytes_info, 164 .get = fsl_xcvr_capds_get, 165 .put = fsl_xcvr_capds_put, 166 }; 167 168 static int fsl_xcvr_activate_ctl(struct snd_soc_dai *dai, const char *name, 169 bool active) 170 { 171 struct snd_soc_card *card = dai->component->card; 172 struct snd_kcontrol *kctl; 173 bool enabled; 174 175 kctl = snd_soc_card_get_kcontrol(card, name); 176 if (kctl == NULL) 177 return -ENOENT; 178 179 enabled = ((kctl->vd[0].access & SNDRV_CTL_ELEM_ACCESS_WRITE) != 0); 180 if (active == enabled) 181 return 0; /* nothing to do */ 182 183 if (active) 184 kctl->vd[0].access |= SNDRV_CTL_ELEM_ACCESS_WRITE; 185 else 186 kctl->vd[0].access &= ~SNDRV_CTL_ELEM_ACCESS_WRITE; 187 188 snd_ctl_notify(card->snd_card, SNDRV_CTL_EVENT_MASK_INFO, &kctl->id); 189 190 return 1; 191 } 192 193 static int fsl_xcvr_mode_put(struct snd_kcontrol *kcontrol, 194 struct snd_ctl_elem_value *ucontrol) 195 { 196 struct snd_soc_dai *dai = snd_kcontrol_chip(kcontrol); 197 struct fsl_xcvr *xcvr = snd_soc_dai_get_drvdata(dai); 198 struct soc_enum *e = (struct soc_enum *)kcontrol->private_value; 199 unsigned int *item = ucontrol->value.enumerated.item; 200 struct snd_soc_card *card = dai->component->card; 201 struct snd_soc_pcm_runtime *rtd; 202 203 xcvr->mode = snd_soc_enum_item_to_val(e, item[0]); 204 205 fsl_xcvr_activate_ctl(dai, fsl_xcvr_arc_mode_kctl.name, 206 (xcvr->mode == FSL_XCVR_MODE_ARC)); 207 fsl_xcvr_activate_ctl(dai, fsl_xcvr_earc_capds_kctl.name, 208 (xcvr->mode == FSL_XCVR_MODE_EARC)); 209 /* Allow playback for SPDIF only */ 210 rtd = snd_soc_get_pcm_runtime(card, card->dai_link); 211 rtd->pcm->streams[SNDRV_PCM_STREAM_PLAYBACK].substream_count = 212 (xcvr->mode == FSL_XCVR_MODE_SPDIF ? 1 : 0); 213 return 0; 214 } 215 216 static int fsl_xcvr_mode_get(struct snd_kcontrol *kcontrol, 217 struct snd_ctl_elem_value *ucontrol) 218 { 219 struct snd_soc_dai *dai = snd_kcontrol_chip(kcontrol); 220 struct fsl_xcvr *xcvr = snd_soc_dai_get_drvdata(dai); 221 222 ucontrol->value.enumerated.item[0] = xcvr->mode; 223 224 return 0; 225 } 226 227 static const char * const fsl_xcvr_mode[] = { "SPDIF", "ARC RX", "eARC", }; 228 static const struct soc_enum fsl_xcvr_mode_enum = 229 SOC_ENUM_SINGLE_EXT(ARRAY_SIZE(fsl_xcvr_mode), fsl_xcvr_mode); 230 static struct snd_kcontrol_new fsl_xcvr_mode_kctl = 231 SOC_ENUM_EXT("XCVR Mode", fsl_xcvr_mode_enum, 232 fsl_xcvr_mode_get, fsl_xcvr_mode_put); 233 234 /** phy: true => phy, false => pll */ 235 static int fsl_xcvr_ai_write(struct fsl_xcvr *xcvr, u8 reg, u32 data, bool phy) 236 { 237 struct device *dev = &xcvr->pdev->dev; 238 u32 val, idx, tidx; 239 int ret; 240 241 idx = BIT(phy ? 26 : 24); 242 tidx = BIT(phy ? 27 : 25); 243 244 regmap_write(xcvr->regmap, FSL_XCVR_PHY_AI_CTRL_CLR, 0xFF); 245 regmap_write(xcvr->regmap, FSL_XCVR_PHY_AI_CTRL_SET, reg); 246 regmap_write(xcvr->regmap, FSL_XCVR_PHY_AI_WDATA, data); 247 regmap_write(xcvr->regmap, FSL_XCVR_PHY_AI_CTRL_TOG, idx); 248 249 ret = regmap_read_poll_timeout(xcvr->regmap, FSL_XCVR_PHY_AI_CTRL, val, 250 (val & idx) == ((val & tidx) >> 1), 251 10, 10000); 252 if (ret) 253 dev_err(dev, "AI timeout: failed to set %s reg 0x%02x=0x%08x\n", 254 phy ? "PHY" : "PLL", reg, data); 255 return ret; 256 } 257 258 static int fsl_xcvr_en_phy_pll(struct fsl_xcvr *xcvr, u32 freq, bool tx) 259 { 260 struct device *dev = &xcvr->pdev->dev; 261 u32 i, div = 0, log2; 262 int ret; 263 264 for (i = 0; i < ARRAY_SIZE(fsl_xcvr_pll_cfg); i++) { 265 if (fsl_xcvr_pll_cfg[i].fout % freq == 0) { 266 div = fsl_xcvr_pll_cfg[i].fout / freq; 267 break; 268 } 269 } 270 271 if (!div || i >= ARRAY_SIZE(fsl_xcvr_pll_cfg)) 272 return -EINVAL; 273 274 log2 = ilog2(div); 275 276 /* Release AI interface from reset */ 277 ret = regmap_write(xcvr->regmap, FSL_XCVR_PHY_AI_CTRL_SET, 278 FSL_XCVR_PHY_AI_CTRL_AI_RESETN); 279 if (ret < 0) { 280 dev_err(dev, "Error while setting IER0: %d\n", ret); 281 return ret; 282 } 283 284 /* PLL: BANDGAP_SET: EN_VBG (enable bandgap) */ 285 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PLL_BANDGAP_SET, 286 FSL_XCVR_PLL_BANDGAP_EN_VBG, 0); 287 288 /* PLL: CTRL0: DIV_INTEGER */ 289 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PLL_CTRL0, fsl_xcvr_pll_cfg[i].mfi, 0); 290 /* PLL: NUMERATOR: MFN */ 291 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PLL_NUM, fsl_xcvr_pll_cfg[i].mfn, 0); 292 /* PLL: DENOMINATOR: MFD */ 293 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PLL_DEN, fsl_xcvr_pll_cfg[i].mfd, 0); 294 /* PLL: CTRL0_SET: HOLD_RING_OFF, POWER_UP */ 295 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PLL_CTRL0_SET, 296 FSL_XCVR_PLL_CTRL0_HROFF | FSL_XCVR_PLL_CTRL0_PWP, 0); 297 udelay(25); 298 /* PLL: CTRL0: Clear Hold Ring Off */ 299 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PLL_CTRL0_CLR, 300 FSL_XCVR_PLL_CTRL0_HROFF, 0); 301 udelay(100); 302 if (tx) { /* TX is enabled for SPDIF only */ 303 /* PLL: POSTDIV: PDIV0 */ 304 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PLL_PDIV, 305 FSL_XCVR_PLL_PDIVx(log2, 0), 0); 306 /* PLL: CTRL_SET: CLKMUX0_EN */ 307 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PLL_CTRL0_SET, 308 FSL_XCVR_PLL_CTRL0_CM0_EN, 0); 309 } else if (xcvr->mode == FSL_XCVR_MODE_EARC) { /* eARC RX */ 310 /* PLL: POSTDIV: PDIV1 */ 311 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PLL_PDIV, 312 FSL_XCVR_PLL_PDIVx(log2, 1), 0); 313 /* PLL: CTRL_SET: CLKMUX1_EN */ 314 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PLL_CTRL0_SET, 315 FSL_XCVR_PLL_CTRL0_CM1_EN, 0); 316 } else { /* SPDIF / ARC RX */ 317 /* PLL: POSTDIV: PDIV2 */ 318 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PLL_PDIV, 319 FSL_XCVR_PLL_PDIVx(log2, 2), 0); 320 /* PLL: CTRL_SET: CLKMUX2_EN */ 321 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PLL_CTRL0_SET, 322 FSL_XCVR_PLL_CTRL0_CM2_EN, 0); 323 } 324 325 if (xcvr->mode == FSL_XCVR_MODE_EARC) { /* eARC mode */ 326 /* PHY: CTRL_SET: TX_DIFF_OE, PHY_EN */ 327 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PHY_CTRL_SET, 328 FSL_XCVR_PHY_CTRL_TSDIFF_OE | 329 FSL_XCVR_PHY_CTRL_PHY_EN, 1); 330 /* PHY: CTRL2_SET: EARC_TX_MODE */ 331 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PHY_CTRL2_SET, 332 FSL_XCVR_PHY_CTRL2_EARC_TXMS, 1); 333 } else if (!tx) { /* SPDIF / ARC RX mode */ 334 if (xcvr->mode == FSL_XCVR_MODE_SPDIF) 335 /* PHY: CTRL_SET: SPDIF_EN */ 336 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PHY_CTRL_SET, 337 FSL_XCVR_PHY_CTRL_SPDIF_EN, 1); 338 else /* PHY: CTRL_SET: ARC RX setup */ 339 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PHY_CTRL_SET, 340 FSL_XCVR_PHY_CTRL_PHY_EN | 341 FSL_XCVR_PHY_CTRL_RX_CM_EN | 342 fsl_xcvr_phy_arc_cfg[xcvr->arc_mode], 1); 343 } 344 345 dev_dbg(dev, "PLL Fexp: %u, Fout: %u, mfi: %u, mfn: %u, mfd: %d, div: %u, pdiv0: %u\n", 346 freq, fsl_xcvr_pll_cfg[i].fout, fsl_xcvr_pll_cfg[i].mfi, 347 fsl_xcvr_pll_cfg[i].mfn, fsl_xcvr_pll_cfg[i].mfd, div, log2); 348 return 0; 349 } 350 351 static int fsl_xcvr_en_aud_pll(struct fsl_xcvr *xcvr, u32 freq) 352 { 353 struct device *dev = &xcvr->pdev->dev; 354 int ret; 355 356 clk_disable_unprepare(xcvr->phy_clk); 357 ret = clk_set_rate(xcvr->phy_clk, freq); 358 if (ret < 0) { 359 dev_err(dev, "Error while setting AUD PLL rate: %d\n", ret); 360 return ret; 361 } 362 ret = clk_prepare_enable(xcvr->phy_clk); 363 if (ret) { 364 dev_err(dev, "failed to start PHY clock: %d\n", ret); 365 return ret; 366 } 367 368 /* Release AI interface from reset */ 369 ret = regmap_write(xcvr->regmap, FSL_XCVR_PHY_AI_CTRL_SET, 370 FSL_XCVR_PHY_AI_CTRL_AI_RESETN); 371 if (ret < 0) { 372 dev_err(dev, "Error while setting IER0: %d\n", ret); 373 return ret; 374 } 375 376 if (xcvr->mode == FSL_XCVR_MODE_EARC) { /* eARC mode */ 377 /* PHY: CTRL_SET: TX_DIFF_OE, PHY_EN */ 378 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PHY_CTRL_SET, 379 FSL_XCVR_PHY_CTRL_TSDIFF_OE | 380 FSL_XCVR_PHY_CTRL_PHY_EN, 1); 381 /* PHY: CTRL2_SET: EARC_TX_MODE */ 382 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PHY_CTRL2_SET, 383 FSL_XCVR_PHY_CTRL2_EARC_TXMS, 1); 384 } else { /* SPDIF mode */ 385 /* PHY: CTRL_SET: TX_CLK_AUD_SS | SPDIF_EN */ 386 fsl_xcvr_ai_write(xcvr, FSL_XCVR_PHY_CTRL_SET, 387 FSL_XCVR_PHY_CTRL_TX_CLK_AUD_SS | 388 FSL_XCVR_PHY_CTRL_SPDIF_EN, 1); 389 } 390 391 dev_dbg(dev, "PLL Fexp: %u\n", freq); 392 393 return 0; 394 } 395 396 #define FSL_XCVR_SPDIF_RX_FREQ 175000000 397 static int fsl_xcvr_prepare(struct snd_pcm_substream *substream, 398 struct snd_soc_dai *dai) 399 { 400 struct fsl_xcvr *xcvr = snd_soc_dai_get_drvdata(dai); 401 bool tx = substream->stream == SNDRV_PCM_STREAM_PLAYBACK; 402 u32 m_ctl = 0, v_ctl = 0; 403 u32 r = substream->runtime->rate, ch = substream->runtime->channels; 404 u32 fout = 32 * r * ch * 10 * 2; 405 int ret = 0; 406 407 switch (xcvr->mode) { 408 case FSL_XCVR_MODE_SPDIF: 409 case FSL_XCVR_MODE_ARC: 410 if (tx) { 411 ret = fsl_xcvr_en_aud_pll(xcvr, fout); 412 if (ret < 0) { 413 dev_err(dai->dev, "Failed to set TX freq %u: %d\n", 414 fout, ret); 415 return ret; 416 } 417 418 ret = regmap_write(xcvr->regmap, FSL_XCVR_TX_DPTH_CTRL_SET, 419 FSL_XCVR_TX_DPTH_CTRL_FRM_FMT); 420 if (ret < 0) { 421 dev_err(dai->dev, "Failed to set TX_DPTH: %d\n", ret); 422 return ret; 423 } 424 425 /** 426 * set SPDIF MODE - this flag is used to gate 427 * SPDIF output, useless for SPDIF RX 428 */ 429 m_ctl |= FSL_XCVR_EXT_CTRL_SPDIF_MODE; 430 v_ctl |= FSL_XCVR_EXT_CTRL_SPDIF_MODE; 431 } else { 432 /** 433 * Clear RX FIFO, flip RX FIFO bits, 434 * disable eARC related HW mode detects 435 */ 436 ret = regmap_write(xcvr->regmap, FSL_XCVR_RX_DPTH_CTRL_SET, 437 FSL_XCVR_RX_DPTH_CTRL_STORE_FMT | 438 FSL_XCVR_RX_DPTH_CTRL_CLR_RX_FIFO | 439 FSL_XCVR_RX_DPTH_CTRL_COMP | 440 FSL_XCVR_RX_DPTH_CTRL_LAYB_CTRL); 441 if (ret < 0) { 442 dev_err(dai->dev, "Failed to set RX_DPTH: %d\n", ret); 443 return ret; 444 } 445 446 ret = fsl_xcvr_en_phy_pll(xcvr, FSL_XCVR_SPDIF_RX_FREQ, tx); 447 if (ret < 0) { 448 dev_err(dai->dev, "Failed to set RX freq %u: %d\n", 449 FSL_XCVR_SPDIF_RX_FREQ, ret); 450 return ret; 451 } 452 } 453 break; 454 case FSL_XCVR_MODE_EARC: 455 if (!tx) { 456 /** Clear RX FIFO, flip RX FIFO bits */ 457 ret = regmap_write(xcvr->regmap, FSL_XCVR_RX_DPTH_CTRL_SET, 458 FSL_XCVR_RX_DPTH_CTRL_STORE_FMT | 459 FSL_XCVR_RX_DPTH_CTRL_CLR_RX_FIFO); 460 if (ret < 0) { 461 dev_err(dai->dev, "Failed to set RX_DPTH: %d\n", ret); 462 return ret; 463 } 464 465 /** Enable eARC related HW mode detects */ 466 ret = regmap_write(xcvr->regmap, FSL_XCVR_RX_DPTH_CTRL_CLR, 467 FSL_XCVR_RX_DPTH_CTRL_COMP | 468 FSL_XCVR_RX_DPTH_CTRL_LAYB_CTRL); 469 if (ret < 0) { 470 dev_err(dai->dev, "Failed to clr TX_DPTH: %d\n", ret); 471 return ret; 472 } 473 } 474 475 /* clear CMDC RESET */ 476 m_ctl |= FSL_XCVR_EXT_CTRL_CMDC_RESET(tx); 477 /* set TX_RX_MODE */ 478 m_ctl |= FSL_XCVR_EXT_CTRL_TX_RX_MODE; 479 v_ctl |= (tx ? FSL_XCVR_EXT_CTRL_TX_RX_MODE : 0); 480 break; 481 } 482 483 ret = regmap_update_bits(xcvr->regmap, FSL_XCVR_EXT_IER0, 484 FSL_XCVR_IRQ_EARC_ALL, FSL_XCVR_IRQ_EARC_ALL); 485 if (ret < 0) { 486 dev_err(dai->dev, "Error while setting IER0: %d\n", ret); 487 return ret; 488 } 489 490 /* clear DPATH RESET */ 491 m_ctl |= FSL_XCVR_EXT_CTRL_DPTH_RESET(tx); 492 ret = regmap_update_bits(xcvr->regmap, FSL_XCVR_EXT_CTRL, m_ctl, v_ctl); 493 if (ret < 0) { 494 dev_err(dai->dev, "Error while setting EXT_CTRL: %d\n", ret); 495 return ret; 496 } 497 498 return 0; 499 } 500 501 static int fsl_xcvr_constr(const struct snd_pcm_substream *substream, 502 const struct snd_pcm_hw_constraint_list *channels, 503 const struct snd_pcm_hw_constraint_list *rates) 504 { 505 struct snd_pcm_runtime *rt = substream->runtime; 506 int ret; 507 508 ret = snd_pcm_hw_constraint_list(rt, 0, SNDRV_PCM_HW_PARAM_CHANNELS, 509 channels); 510 if (ret < 0) 511 return ret; 512 513 ret = snd_pcm_hw_constraint_list(rt, 0, SNDRV_PCM_HW_PARAM_RATE, 514 rates); 515 if (ret < 0) 516 return ret; 517 518 return 0; 519 } 520 521 static int fsl_xcvr_startup(struct snd_pcm_substream *substream, 522 struct snd_soc_dai *dai) 523 { 524 struct fsl_xcvr *xcvr = snd_soc_dai_get_drvdata(dai); 525 bool tx = substream->stream == SNDRV_PCM_STREAM_PLAYBACK; 526 int ret = 0; 527 528 if (xcvr->streams & BIT(substream->stream)) { 529 dev_err(dai->dev, "%sX busy\n", tx ? "T" : "R"); 530 return -EBUSY; 531 } 532 533 switch (xcvr->mode) { 534 case FSL_XCVR_MODE_SPDIF: 535 case FSL_XCVR_MODE_ARC: 536 ret = fsl_xcvr_constr(substream, &fsl_xcvr_spdif_channels_constr, 537 &fsl_xcvr_spdif_rates_constr); 538 break; 539 case FSL_XCVR_MODE_EARC: 540 ret = fsl_xcvr_constr(substream, &fsl_xcvr_earc_channels_constr, 541 &fsl_xcvr_earc_rates_constr); 542 break; 543 } 544 if (ret < 0) 545 return ret; 546 547 xcvr->streams |= BIT(substream->stream); 548 549 /* Disable XCVR controls if there is stream started */ 550 fsl_xcvr_activate_ctl(dai, fsl_xcvr_mode_kctl.name, false); 551 fsl_xcvr_activate_ctl(dai, fsl_xcvr_arc_mode_kctl.name, false); 552 fsl_xcvr_activate_ctl(dai, fsl_xcvr_earc_capds_kctl.name, false); 553 554 return 0; 555 } 556 557 static void fsl_xcvr_shutdown(struct snd_pcm_substream *substream, 558 struct snd_soc_dai *dai) 559 { 560 struct fsl_xcvr *xcvr = snd_soc_dai_get_drvdata(dai); 561 bool tx = substream->stream == SNDRV_PCM_STREAM_PLAYBACK; 562 u32 mask = 0, val = 0; 563 int ret; 564 565 xcvr->streams &= ~BIT(substream->stream); 566 567 /* Enable XCVR controls if there is no stream started */ 568 if (!xcvr->streams) { 569 fsl_xcvr_activate_ctl(dai, fsl_xcvr_mode_kctl.name, true); 570 fsl_xcvr_activate_ctl(dai, fsl_xcvr_arc_mode_kctl.name, 571 (xcvr->mode == FSL_XCVR_MODE_ARC)); 572 fsl_xcvr_activate_ctl(dai, fsl_xcvr_earc_capds_kctl.name, 573 (xcvr->mode == FSL_XCVR_MODE_EARC)); 574 575 ret = regmap_update_bits(xcvr->regmap, FSL_XCVR_EXT_IER0, 576 FSL_XCVR_IRQ_EARC_ALL, 0); 577 if (ret < 0) { 578 dev_err(dai->dev, "Failed to set IER0: %d\n", ret); 579 return; 580 } 581 582 /* clear SPDIF MODE */ 583 if (xcvr->mode == FSL_XCVR_MODE_SPDIF) 584 mask |= FSL_XCVR_EXT_CTRL_SPDIF_MODE; 585 } 586 587 if (xcvr->mode == FSL_XCVR_MODE_EARC) { 588 /* set CMDC RESET */ 589 mask |= FSL_XCVR_EXT_CTRL_CMDC_RESET(tx); 590 val |= FSL_XCVR_EXT_CTRL_CMDC_RESET(tx); 591 } 592 593 /* set DPATH RESET */ 594 mask |= FSL_XCVR_EXT_CTRL_DPTH_RESET(tx); 595 val |= FSL_XCVR_EXT_CTRL_DPTH_RESET(tx); 596 597 ret = regmap_update_bits(xcvr->regmap, FSL_XCVR_EXT_CTRL, mask, val); 598 if (ret < 0) { 599 dev_err(dai->dev, "Err setting DPATH RESET: %d\n", ret); 600 return; 601 } 602 } 603 604 static int fsl_xcvr_trigger(struct snd_pcm_substream *substream, int cmd, 605 struct snd_soc_dai *dai) 606 { 607 struct fsl_xcvr *xcvr = snd_soc_dai_get_drvdata(dai); 608 bool tx = substream->stream == SNDRV_PCM_STREAM_PLAYBACK; 609 int ret; 610 611 switch (cmd) { 612 case SNDRV_PCM_TRIGGER_START: 613 case SNDRV_PCM_TRIGGER_RESUME: 614 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE: 615 if (tx) { 616 switch (xcvr->mode) { 617 case FSL_XCVR_MODE_EARC: 618 /* set isr_cmdc_tx_en, w1c */ 619 ret = regmap_write(xcvr->regmap, 620 FSL_XCVR_ISR_SET, 621 FSL_XCVR_ISR_CMDC_TX_EN); 622 if (ret < 0) { 623 dev_err(dai->dev, "err updating isr %d\n", ret); 624 return ret; 625 } 626 fallthrough; 627 case FSL_XCVR_MODE_SPDIF: 628 ret = regmap_write(xcvr->regmap, 629 FSL_XCVR_TX_DPTH_CTRL_SET, 630 FSL_XCVR_TX_DPTH_CTRL_STRT_DATA_TX); 631 if (ret < 0) { 632 dev_err(dai->dev, "Failed to start DATA_TX: %d\n", ret); 633 return ret; 634 } 635 break; 636 } 637 } 638 639 /* enable DMA RD/WR */ 640 ret = regmap_update_bits(xcvr->regmap, FSL_XCVR_EXT_CTRL, 641 FSL_XCVR_EXT_CTRL_DMA_DIS(tx), 0); 642 if (ret < 0) { 643 dev_err(dai->dev, "Failed to enable DMA: %d\n", ret); 644 return ret; 645 } 646 break; 647 case SNDRV_PCM_TRIGGER_STOP: 648 case SNDRV_PCM_TRIGGER_SUSPEND: 649 case SNDRV_PCM_TRIGGER_PAUSE_PUSH: 650 /* disable DMA RD/WR */ 651 ret = regmap_update_bits(xcvr->regmap, FSL_XCVR_EXT_CTRL, 652 FSL_XCVR_EXT_CTRL_DMA_DIS(tx), 653 FSL_XCVR_EXT_CTRL_DMA_DIS(tx)); 654 if (ret < 0) { 655 dev_err(dai->dev, "Failed to disable DMA: %d\n", ret); 656 return ret; 657 } 658 659 if (tx) { 660 switch (xcvr->mode) { 661 case FSL_XCVR_MODE_SPDIF: 662 ret = regmap_write(xcvr->regmap, 663 FSL_XCVR_TX_DPTH_CTRL_CLR, 664 FSL_XCVR_TX_DPTH_CTRL_STRT_DATA_TX); 665 if (ret < 0) { 666 dev_err(dai->dev, "Failed to stop DATA_TX: %d\n", ret); 667 return ret; 668 } 669 fallthrough; 670 case FSL_XCVR_MODE_EARC: 671 /* clear ISR_CMDC_TX_EN, W1C */ 672 ret = regmap_write(xcvr->regmap, 673 FSL_XCVR_ISR_CLR, 674 FSL_XCVR_ISR_CMDC_TX_EN); 675 if (ret < 0) { 676 dev_err(dai->dev, 677 "Err updating ISR %d\n", ret); 678 return ret; 679 } 680 break; 681 } 682 } 683 break; 684 default: 685 return -EINVAL; 686 } 687 688 return 0; 689 } 690 691 static int fsl_xcvr_load_firmware(struct fsl_xcvr *xcvr) 692 { 693 struct device *dev = &xcvr->pdev->dev; 694 const struct firmware *fw; 695 int ret = 0, rem, off, out, page = 0, size = FSL_XCVR_REG_OFFSET; 696 u32 mask, val; 697 698 ret = request_firmware(&fw, xcvr->soc_data->fw_name, dev); 699 if (ret) { 700 dev_err(dev, "failed to request firmware.\n"); 701 return ret; 702 } 703 704 rem = fw->size; 705 706 /* RAM is 20KiB = 16KiB code + 4KiB data => max 10 pages 2KiB each */ 707 if (rem > 16384) { 708 dev_err(dev, "FW size %d is bigger than 16KiB.\n", rem); 709 release_firmware(fw); 710 return -ENOMEM; 711 } 712 713 for (page = 0; page < 10; page++) { 714 ret = regmap_update_bits(xcvr->regmap, FSL_XCVR_EXT_CTRL, 715 FSL_XCVR_EXT_CTRL_PAGE_MASK, 716 FSL_XCVR_EXT_CTRL_PAGE(page)); 717 if (ret < 0) { 718 dev_err(dev, "FW: failed to set page %d, err=%d\n", 719 page, ret); 720 goto err_firmware; 721 } 722 723 off = page * size; 724 out = min(rem, size); 725 /* IPG clock is assumed to be running, otherwise it will hang */ 726 if (out > 0) { 727 /* write firmware into code memory */ 728 memcpy_toio(xcvr->ram_addr, fw->data + off, out); 729 rem -= out; 730 if (rem == 0) { 731 /* last part of firmware written */ 732 /* clean remaining part of code memory page */ 733 memset_io(xcvr->ram_addr + out, 0, size - out); 734 } 735 } else { 736 /* clean current page, including data memory */ 737 memset_io(xcvr->ram_addr, 0, size); 738 } 739 } 740 741 err_firmware: 742 release_firmware(fw); 743 if (ret < 0) 744 return ret; 745 746 /* configure watermarks */ 747 mask = FSL_XCVR_EXT_CTRL_RX_FWM_MASK | FSL_XCVR_EXT_CTRL_TX_FWM_MASK; 748 val = FSL_XCVR_EXT_CTRL_RX_FWM(FSL_XCVR_FIFO_WMK_RX); 749 val |= FSL_XCVR_EXT_CTRL_TX_FWM(FSL_XCVR_FIFO_WMK_TX); 750 /* disable DMA RD/WR */ 751 mask |= FSL_XCVR_EXT_CTRL_DMA_RD_DIS | FSL_XCVR_EXT_CTRL_DMA_WR_DIS; 752 val |= FSL_XCVR_EXT_CTRL_DMA_RD_DIS | FSL_XCVR_EXT_CTRL_DMA_WR_DIS; 753 /* Data RAM is 4KiB, last two pages: 8 and 9. Select page 8. */ 754 mask |= FSL_XCVR_EXT_CTRL_PAGE_MASK; 755 val |= FSL_XCVR_EXT_CTRL_PAGE(8); 756 757 ret = regmap_update_bits(xcvr->regmap, FSL_XCVR_EXT_CTRL, mask, val); 758 if (ret < 0) { 759 dev_err(dev, "Failed to set watermarks: %d\n", ret); 760 return ret; 761 } 762 763 /* Store Capabilities Data Structure into Data RAM */ 764 memcpy_toio(xcvr->ram_addr + FSL_XCVR_CAP_DATA_STR, xcvr->cap_ds, 765 FSL_XCVR_CAPDS_SIZE); 766 return 0; 767 } 768 769 static int fsl_xcvr_type_iec958_info(struct snd_kcontrol *kcontrol, 770 struct snd_ctl_elem_info *uinfo) 771 { 772 uinfo->type = SNDRV_CTL_ELEM_TYPE_IEC958; 773 uinfo->count = 1; 774 775 return 0; 776 } 777 778 static int fsl_xcvr_type_iec958_bytes_info(struct snd_kcontrol *kcontrol, 779 struct snd_ctl_elem_info *uinfo) 780 { 781 uinfo->type = SNDRV_CTL_ELEM_TYPE_BYTES; 782 uinfo->count = sizeof_field(struct snd_aes_iec958, status); 783 784 return 0; 785 } 786 787 static int fsl_xcvr_rx_cs_get(struct snd_kcontrol *kcontrol, 788 struct snd_ctl_elem_value *ucontrol) 789 { 790 struct snd_soc_dai *dai = snd_kcontrol_chip(kcontrol); 791 struct fsl_xcvr *xcvr = snd_soc_dai_get_drvdata(dai); 792 793 memcpy(ucontrol->value.iec958.status, xcvr->rx_iec958.status, 24); 794 795 return 0; 796 } 797 798 static int fsl_xcvr_tx_cs_get(struct snd_kcontrol *kcontrol, 799 struct snd_ctl_elem_value *ucontrol) 800 { 801 struct snd_soc_dai *dai = snd_kcontrol_chip(kcontrol); 802 struct fsl_xcvr *xcvr = snd_soc_dai_get_drvdata(dai); 803 804 memcpy(ucontrol->value.iec958.status, xcvr->tx_iec958.status, 24); 805 806 return 0; 807 } 808 809 static int fsl_xcvr_tx_cs_put(struct snd_kcontrol *kcontrol, 810 struct snd_ctl_elem_value *ucontrol) 811 { 812 struct snd_soc_dai *dai = snd_kcontrol_chip(kcontrol); 813 struct fsl_xcvr *xcvr = snd_soc_dai_get_drvdata(dai); 814 815 memcpy(xcvr->tx_iec958.status, ucontrol->value.iec958.status, 24); 816 817 return 0; 818 } 819 820 static struct snd_kcontrol_new fsl_xcvr_rx_ctls[] = { 821 /* Channel status controller */ 822 { 823 .iface = SNDRV_CTL_ELEM_IFACE_PCM, 824 .name = SNDRV_CTL_NAME_IEC958("", CAPTURE, DEFAULT), 825 .access = SNDRV_CTL_ELEM_ACCESS_READ, 826 .info = fsl_xcvr_type_iec958_info, 827 .get = fsl_xcvr_rx_cs_get, 828 }, 829 /* Capture channel status, bytes */ 830 { 831 .iface = SNDRV_CTL_ELEM_IFACE_PCM, 832 .name = "Capture Channel Status", 833 .access = SNDRV_CTL_ELEM_ACCESS_READ, 834 .info = fsl_xcvr_type_iec958_bytes_info, 835 .get = fsl_xcvr_rx_cs_get, 836 }, 837 }; 838 839 static struct snd_kcontrol_new fsl_xcvr_tx_ctls[] = { 840 /* Channel status controller */ 841 { 842 .iface = SNDRV_CTL_ELEM_IFACE_PCM, 843 .name = SNDRV_CTL_NAME_IEC958("", PLAYBACK, DEFAULT), 844 .access = SNDRV_CTL_ELEM_ACCESS_READWRITE, 845 .info = fsl_xcvr_type_iec958_info, 846 .get = fsl_xcvr_tx_cs_get, 847 .put = fsl_xcvr_tx_cs_put, 848 }, 849 /* Playback channel status, bytes */ 850 { 851 .iface = SNDRV_CTL_ELEM_IFACE_PCM, 852 .name = "Playback Channel Status", 853 .access = SNDRV_CTL_ELEM_ACCESS_READWRITE, 854 .info = fsl_xcvr_type_iec958_bytes_info, 855 .get = fsl_xcvr_tx_cs_get, 856 .put = fsl_xcvr_tx_cs_put, 857 }, 858 }; 859 860 static const struct snd_soc_dai_ops fsl_xcvr_dai_ops = { 861 .prepare = fsl_xcvr_prepare, 862 .startup = fsl_xcvr_startup, 863 .shutdown = fsl_xcvr_shutdown, 864 .trigger = fsl_xcvr_trigger, 865 }; 866 867 static int fsl_xcvr_dai_probe(struct snd_soc_dai *dai) 868 { 869 struct fsl_xcvr *xcvr = snd_soc_dai_get_drvdata(dai); 870 871 snd_soc_dai_init_dma_data(dai, &xcvr->dma_prms_tx, &xcvr->dma_prms_rx); 872 873 snd_soc_add_dai_controls(dai, &fsl_xcvr_mode_kctl, 1); 874 snd_soc_add_dai_controls(dai, &fsl_xcvr_arc_mode_kctl, 1); 875 snd_soc_add_dai_controls(dai, &fsl_xcvr_earc_capds_kctl, 1); 876 snd_soc_add_dai_controls(dai, fsl_xcvr_tx_ctls, 877 ARRAY_SIZE(fsl_xcvr_tx_ctls)); 878 snd_soc_add_dai_controls(dai, fsl_xcvr_rx_ctls, 879 ARRAY_SIZE(fsl_xcvr_rx_ctls)); 880 return 0; 881 } 882 883 static struct snd_soc_dai_driver fsl_xcvr_dai = { 884 .probe = fsl_xcvr_dai_probe, 885 .ops = &fsl_xcvr_dai_ops, 886 .playback = { 887 .stream_name = "CPU-Playback", 888 .channels_min = 1, 889 .channels_max = 32, 890 .rate_min = 32000, 891 .rate_max = 1536000, 892 .rates = SNDRV_PCM_RATE_KNOT, 893 .formats = SNDRV_PCM_FMTBIT_IEC958_SUBFRAME_LE, 894 }, 895 .capture = { 896 .stream_name = "CPU-Capture", 897 .channels_min = 1, 898 .channels_max = 32, 899 .rate_min = 32000, 900 .rate_max = 1536000, 901 .rates = SNDRV_PCM_RATE_KNOT, 902 .formats = SNDRV_PCM_FMTBIT_IEC958_SUBFRAME_LE, 903 }, 904 }; 905 906 static const struct snd_soc_component_driver fsl_xcvr_comp = { 907 .name = "fsl-xcvr-dai", 908 }; 909 910 static const struct reg_default fsl_xcvr_reg_defaults[] = { 911 { FSL_XCVR_VERSION, 0x00000000 }, 912 { FSL_XCVR_EXT_CTRL, 0xF8204040 }, 913 { FSL_XCVR_EXT_STATUS, 0x00000000 }, 914 { FSL_XCVR_EXT_IER0, 0x00000000 }, 915 { FSL_XCVR_EXT_IER1, 0x00000000 }, 916 { FSL_XCVR_EXT_ISR, 0x00000000 }, 917 { FSL_XCVR_EXT_ISR_SET, 0x00000000 }, 918 { FSL_XCVR_EXT_ISR_CLR, 0x00000000 }, 919 { FSL_XCVR_EXT_ISR_TOG, 0x00000000 }, 920 { FSL_XCVR_IER, 0x00000000 }, 921 { FSL_XCVR_ISR, 0x00000000 }, 922 { FSL_XCVR_ISR_SET, 0x00000000 }, 923 { FSL_XCVR_ISR_CLR, 0x00000000 }, 924 { FSL_XCVR_ISR_TOG, 0x00000000 }, 925 { FSL_XCVR_RX_DPTH_CTRL, 0x00002C89 }, 926 { FSL_XCVR_RX_DPTH_CTRL_SET, 0x00002C89 }, 927 { FSL_XCVR_RX_DPTH_CTRL_CLR, 0x00002C89 }, 928 { FSL_XCVR_RX_DPTH_CTRL_TOG, 0x00002C89 }, 929 { FSL_XCVR_TX_DPTH_CTRL, 0x00000000 }, 930 { FSL_XCVR_TX_DPTH_CTRL_SET, 0x00000000 }, 931 { FSL_XCVR_TX_DPTH_CTRL_CLR, 0x00000000 }, 932 { FSL_XCVR_TX_DPTH_CTRL_TOG, 0x00000000 }, 933 { FSL_XCVR_TX_CS_DATA_0, 0x00000000 }, 934 { FSL_XCVR_TX_CS_DATA_1, 0x00000000 }, 935 { FSL_XCVR_TX_CS_DATA_2, 0x00000000 }, 936 { FSL_XCVR_TX_CS_DATA_3, 0x00000000 }, 937 { FSL_XCVR_TX_CS_DATA_4, 0x00000000 }, 938 { FSL_XCVR_TX_CS_DATA_5, 0x00000000 }, 939 { FSL_XCVR_DEBUG_REG_0, 0x00000000 }, 940 { FSL_XCVR_DEBUG_REG_1, 0x00000000 }, 941 }; 942 943 static bool fsl_xcvr_readable_reg(struct device *dev, unsigned int reg) 944 { 945 switch (reg) { 946 case FSL_XCVR_VERSION: 947 case FSL_XCVR_EXT_CTRL: 948 case FSL_XCVR_EXT_STATUS: 949 case FSL_XCVR_EXT_IER0: 950 case FSL_XCVR_EXT_IER1: 951 case FSL_XCVR_EXT_ISR: 952 case FSL_XCVR_EXT_ISR_SET: 953 case FSL_XCVR_EXT_ISR_CLR: 954 case FSL_XCVR_EXT_ISR_TOG: 955 case FSL_XCVR_IER: 956 case FSL_XCVR_ISR: 957 case FSL_XCVR_ISR_SET: 958 case FSL_XCVR_ISR_CLR: 959 case FSL_XCVR_ISR_TOG: 960 case FSL_XCVR_PHY_AI_CTRL: 961 case FSL_XCVR_PHY_AI_CTRL_SET: 962 case FSL_XCVR_PHY_AI_CTRL_CLR: 963 case FSL_XCVR_PHY_AI_CTRL_TOG: 964 case FSL_XCVR_PHY_AI_RDATA: 965 case FSL_XCVR_CLK_CTRL: 966 case FSL_XCVR_RX_DPTH_CTRL: 967 case FSL_XCVR_RX_DPTH_CTRL_SET: 968 case FSL_XCVR_RX_DPTH_CTRL_CLR: 969 case FSL_XCVR_RX_DPTH_CTRL_TOG: 970 case FSL_XCVR_TX_DPTH_CTRL: 971 case FSL_XCVR_TX_DPTH_CTRL_SET: 972 case FSL_XCVR_TX_DPTH_CTRL_CLR: 973 case FSL_XCVR_TX_DPTH_CTRL_TOG: 974 case FSL_XCVR_TX_CS_DATA_0: 975 case FSL_XCVR_TX_CS_DATA_1: 976 case FSL_XCVR_TX_CS_DATA_2: 977 case FSL_XCVR_TX_CS_DATA_3: 978 case FSL_XCVR_TX_CS_DATA_4: 979 case FSL_XCVR_TX_CS_DATA_5: 980 case FSL_XCVR_DEBUG_REG_0: 981 case FSL_XCVR_DEBUG_REG_1: 982 return true; 983 default: 984 return false; 985 } 986 } 987 988 static bool fsl_xcvr_writeable_reg(struct device *dev, unsigned int reg) 989 { 990 switch (reg) { 991 case FSL_XCVR_EXT_CTRL: 992 case FSL_XCVR_EXT_IER0: 993 case FSL_XCVR_EXT_IER1: 994 case FSL_XCVR_EXT_ISR: 995 case FSL_XCVR_EXT_ISR_SET: 996 case FSL_XCVR_EXT_ISR_CLR: 997 case FSL_XCVR_EXT_ISR_TOG: 998 case FSL_XCVR_IER: 999 case FSL_XCVR_ISR_SET: 1000 case FSL_XCVR_ISR_CLR: 1001 case FSL_XCVR_ISR_TOG: 1002 case FSL_XCVR_PHY_AI_CTRL: 1003 case FSL_XCVR_PHY_AI_CTRL_SET: 1004 case FSL_XCVR_PHY_AI_CTRL_CLR: 1005 case FSL_XCVR_PHY_AI_CTRL_TOG: 1006 case FSL_XCVR_PHY_AI_WDATA: 1007 case FSL_XCVR_CLK_CTRL: 1008 case FSL_XCVR_RX_DPTH_CTRL: 1009 case FSL_XCVR_RX_DPTH_CTRL_SET: 1010 case FSL_XCVR_RX_DPTH_CTRL_CLR: 1011 case FSL_XCVR_RX_DPTH_CTRL_TOG: 1012 case FSL_XCVR_TX_DPTH_CTRL_SET: 1013 case FSL_XCVR_TX_DPTH_CTRL_CLR: 1014 case FSL_XCVR_TX_DPTH_CTRL_TOG: 1015 case FSL_XCVR_TX_CS_DATA_0: 1016 case FSL_XCVR_TX_CS_DATA_1: 1017 case FSL_XCVR_TX_CS_DATA_2: 1018 case FSL_XCVR_TX_CS_DATA_3: 1019 case FSL_XCVR_TX_CS_DATA_4: 1020 case FSL_XCVR_TX_CS_DATA_5: 1021 return true; 1022 default: 1023 return false; 1024 } 1025 } 1026 1027 static bool fsl_xcvr_volatile_reg(struct device *dev, unsigned int reg) 1028 { 1029 return fsl_xcvr_readable_reg(dev, reg); 1030 } 1031 1032 static const struct regmap_config fsl_xcvr_regmap_cfg = { 1033 .reg_bits = 32, 1034 .reg_stride = 4, 1035 .val_bits = 32, 1036 .max_register = FSL_XCVR_MAX_REG, 1037 .reg_defaults = fsl_xcvr_reg_defaults, 1038 .num_reg_defaults = ARRAY_SIZE(fsl_xcvr_reg_defaults), 1039 .readable_reg = fsl_xcvr_readable_reg, 1040 .volatile_reg = fsl_xcvr_volatile_reg, 1041 .writeable_reg = fsl_xcvr_writeable_reg, 1042 .cache_type = REGCACHE_FLAT, 1043 }; 1044 1045 static irqreturn_t irq0_isr(int irq, void *devid) 1046 { 1047 struct fsl_xcvr *xcvr = (struct fsl_xcvr *)devid; 1048 struct device *dev = &xcvr->pdev->dev; 1049 struct regmap *regmap = xcvr->regmap; 1050 void __iomem *reg_ctrl, *reg_buff; 1051 u32 isr, isr_clr = 0, val, i; 1052 1053 regmap_read(regmap, FSL_XCVR_EXT_ISR, &isr); 1054 1055 if (isr & FSL_XCVR_IRQ_NEW_CS) { 1056 dev_dbg(dev, "Received new CS block\n"); 1057 isr_clr |= FSL_XCVR_IRQ_NEW_CS; 1058 /* Data RAM is 4KiB, last two pages: 8 and 9. Select page 8. */ 1059 regmap_update_bits(xcvr->regmap, FSL_XCVR_EXT_CTRL, 1060 FSL_XCVR_EXT_CTRL_PAGE_MASK, 1061 FSL_XCVR_EXT_CTRL_PAGE(8)); 1062 1063 /* Find updated CS buffer */ 1064 reg_ctrl = xcvr->ram_addr + FSL_XCVR_RX_CS_CTRL_0; 1065 reg_buff = xcvr->ram_addr + FSL_XCVR_RX_CS_BUFF_0; 1066 memcpy_fromio(&val, reg_ctrl, sizeof(val)); 1067 if (!val) { 1068 reg_ctrl = xcvr->ram_addr + FSL_XCVR_RX_CS_CTRL_1; 1069 reg_buff = xcvr->ram_addr + FSL_XCVR_RX_CS_BUFF_1; 1070 memcpy_fromio(&val, reg_ctrl, sizeof(val)); 1071 } 1072 1073 if (val) { 1074 /* copy CS buffer */ 1075 memcpy_fromio(&xcvr->rx_iec958.status, reg_buff, 1076 sizeof(xcvr->rx_iec958.status)); 1077 for (i = 0; i < 6; i++) { 1078 val = *(u32 *)(xcvr->rx_iec958.status + i*4); 1079 *(u32 *)(xcvr->rx_iec958.status + i*4) = 1080 bitrev32(val); 1081 } 1082 /* clear CS control register */ 1083 memset_io(reg_ctrl, 0, sizeof(val)); 1084 } 1085 } 1086 if (isr & FSL_XCVR_IRQ_NEW_UD) { 1087 dev_dbg(dev, "Received new UD block\n"); 1088 isr_clr |= FSL_XCVR_IRQ_NEW_UD; 1089 } 1090 if (isr & FSL_XCVR_IRQ_MUTE) { 1091 dev_dbg(dev, "HW mute bit detected\n"); 1092 isr_clr |= FSL_XCVR_IRQ_MUTE; 1093 } 1094 if (isr & FSL_XCVR_IRQ_FIFO_UOFL_ERR) { 1095 dev_dbg(dev, "RX/TX FIFO full/empty\n"); 1096 isr_clr |= FSL_XCVR_IRQ_FIFO_UOFL_ERR; 1097 } 1098 if (isr & FSL_XCVR_IRQ_ARC_MODE) { 1099 dev_dbg(dev, "CMDC SM falls out of eARC mode\n"); 1100 isr_clr |= FSL_XCVR_IRQ_ARC_MODE; 1101 } 1102 if (isr & FSL_XCVR_IRQ_DMA_RD_REQ) { 1103 dev_dbg(dev, "DMA read request\n"); 1104 isr_clr |= FSL_XCVR_IRQ_DMA_RD_REQ; 1105 } 1106 if (isr & FSL_XCVR_IRQ_DMA_WR_REQ) { 1107 dev_dbg(dev, "DMA write request\n"); 1108 isr_clr |= FSL_XCVR_IRQ_DMA_WR_REQ; 1109 } 1110 1111 if (isr_clr) { 1112 regmap_write(regmap, FSL_XCVR_EXT_ISR_CLR, isr_clr); 1113 return IRQ_HANDLED; 1114 } 1115 1116 return IRQ_NONE; 1117 } 1118 1119 static const struct fsl_xcvr_soc_data fsl_xcvr_imx8mp_data = { 1120 .fw_name = "imx/xcvr/xcvr-imx8mp.bin", 1121 }; 1122 1123 static const struct of_device_id fsl_xcvr_dt_ids[] = { 1124 { .compatible = "fsl,imx8mp-xcvr", .data = &fsl_xcvr_imx8mp_data }, 1125 { /* sentinel */ } 1126 }; 1127 MODULE_DEVICE_TABLE(of, fsl_xcvr_dt_ids); 1128 1129 static int fsl_xcvr_probe(struct platform_device *pdev) 1130 { 1131 struct device *dev = &pdev->dev; 1132 struct fsl_xcvr *xcvr; 1133 struct resource *rx_res, *tx_res; 1134 void __iomem *regs; 1135 int ret, irq; 1136 1137 xcvr = devm_kzalloc(dev, sizeof(*xcvr), GFP_KERNEL); 1138 if (!xcvr) 1139 return -ENOMEM; 1140 1141 xcvr->pdev = pdev; 1142 xcvr->soc_data = of_device_get_match_data(&pdev->dev); 1143 1144 xcvr->ipg_clk = devm_clk_get(dev, "ipg"); 1145 if (IS_ERR(xcvr->ipg_clk)) { 1146 dev_err(dev, "failed to get ipg clock\n"); 1147 return PTR_ERR(xcvr->ipg_clk); 1148 } 1149 1150 xcvr->phy_clk = devm_clk_get(dev, "phy"); 1151 if (IS_ERR(xcvr->phy_clk)) { 1152 dev_err(dev, "failed to get phy clock\n"); 1153 return PTR_ERR(xcvr->phy_clk); 1154 } 1155 1156 xcvr->spba_clk = devm_clk_get(dev, "spba"); 1157 if (IS_ERR(xcvr->spba_clk)) { 1158 dev_err(dev, "failed to get spba clock\n"); 1159 return PTR_ERR(xcvr->spba_clk); 1160 } 1161 1162 xcvr->pll_ipg_clk = devm_clk_get(dev, "pll_ipg"); 1163 if (IS_ERR(xcvr->pll_ipg_clk)) { 1164 dev_err(dev, "failed to get pll_ipg clock\n"); 1165 return PTR_ERR(xcvr->pll_ipg_clk); 1166 } 1167 1168 xcvr->ram_addr = devm_platform_ioremap_resource_byname(pdev, "ram"); 1169 if (IS_ERR(xcvr->ram_addr)) 1170 return PTR_ERR(xcvr->ram_addr); 1171 1172 regs = devm_platform_ioremap_resource_byname(pdev, "regs"); 1173 if (IS_ERR(regs)) 1174 return PTR_ERR(regs); 1175 1176 xcvr->regmap = devm_regmap_init_mmio_clk(dev, NULL, regs, 1177 &fsl_xcvr_regmap_cfg); 1178 if (IS_ERR(xcvr->regmap)) { 1179 dev_err(dev, "failed to init XCVR regmap: %ld\n", 1180 PTR_ERR(xcvr->regmap)); 1181 return PTR_ERR(xcvr->regmap); 1182 } 1183 1184 xcvr->reset = devm_reset_control_get_exclusive(dev, NULL); 1185 if (IS_ERR(xcvr->reset)) { 1186 dev_err(dev, "failed to get XCVR reset control\n"); 1187 return PTR_ERR(xcvr->reset); 1188 } 1189 1190 /* get IRQs */ 1191 irq = platform_get_irq(pdev, 0); 1192 if (irq < 0) 1193 return irq; 1194 1195 ret = devm_request_irq(dev, irq, irq0_isr, 0, pdev->name, xcvr); 1196 if (ret) { 1197 dev_err(dev, "failed to claim IRQ0: %i\n", ret); 1198 return ret; 1199 } 1200 1201 rx_res = platform_get_resource_byname(pdev, IORESOURCE_MEM, "rxfifo"); 1202 tx_res = platform_get_resource_byname(pdev, IORESOURCE_MEM, "txfifo"); 1203 if (!rx_res || !tx_res) { 1204 dev_err(dev, "could not find rxfifo or txfifo resource\n"); 1205 return -EINVAL; 1206 } 1207 xcvr->dma_prms_rx.chan_name = "rx"; 1208 xcvr->dma_prms_tx.chan_name = "tx"; 1209 xcvr->dma_prms_rx.addr = rx_res->start; 1210 xcvr->dma_prms_tx.addr = tx_res->start; 1211 xcvr->dma_prms_rx.maxburst = FSL_XCVR_MAXBURST_RX; 1212 xcvr->dma_prms_tx.maxburst = FSL_XCVR_MAXBURST_TX; 1213 1214 platform_set_drvdata(pdev, xcvr); 1215 pm_runtime_enable(dev); 1216 regcache_cache_only(xcvr->regmap, true); 1217 1218 /* 1219 * Register platform component before registering cpu dai for there 1220 * is not defer probe for platform component in snd_soc_add_pcm_runtime(). 1221 */ 1222 ret = devm_snd_dmaengine_pcm_register(dev, NULL, 0); 1223 if (ret) { 1224 dev_err(dev, "failed to pcm register\n"); 1225 return ret; 1226 } 1227 1228 ret = devm_snd_soc_register_component(dev, &fsl_xcvr_comp, 1229 &fsl_xcvr_dai, 1); 1230 if (ret) { 1231 dev_err(dev, "failed to register component %s\n", 1232 fsl_xcvr_comp.name); 1233 } 1234 1235 return ret; 1236 } 1237 1238 static __maybe_unused int fsl_xcvr_runtime_suspend(struct device *dev) 1239 { 1240 struct fsl_xcvr *xcvr = dev_get_drvdata(dev); 1241 int ret; 1242 1243 /* 1244 * Clear interrupts, when streams starts or resumes after 1245 * suspend, interrupts are enabled in prepare(), so no need 1246 * to enable interrupts in resume(). 1247 */ 1248 ret = regmap_update_bits(xcvr->regmap, FSL_XCVR_EXT_IER0, 1249 FSL_XCVR_IRQ_EARC_ALL, 0); 1250 if (ret < 0) 1251 dev_err(dev, "Failed to clear IER0: %d\n", ret); 1252 1253 /* Assert M0+ reset */ 1254 ret = regmap_update_bits(xcvr->regmap, FSL_XCVR_EXT_CTRL, 1255 FSL_XCVR_EXT_CTRL_CORE_RESET, 1256 FSL_XCVR_EXT_CTRL_CORE_RESET); 1257 if (ret < 0) 1258 dev_err(dev, "Failed to assert M0+ core: %d\n", ret); 1259 1260 regcache_cache_only(xcvr->regmap, true); 1261 1262 clk_disable_unprepare(xcvr->spba_clk); 1263 clk_disable_unprepare(xcvr->phy_clk); 1264 clk_disable_unprepare(xcvr->pll_ipg_clk); 1265 clk_disable_unprepare(xcvr->ipg_clk); 1266 1267 return 0; 1268 } 1269 1270 static __maybe_unused int fsl_xcvr_runtime_resume(struct device *dev) 1271 { 1272 struct fsl_xcvr *xcvr = dev_get_drvdata(dev); 1273 int ret; 1274 1275 ret = reset_control_assert(xcvr->reset); 1276 if (ret < 0) { 1277 dev_err(dev, "Failed to assert M0+ reset: %d\n", ret); 1278 return ret; 1279 } 1280 1281 ret = clk_prepare_enable(xcvr->ipg_clk); 1282 if (ret) { 1283 dev_err(dev, "failed to start IPG clock.\n"); 1284 return ret; 1285 } 1286 1287 ret = clk_prepare_enable(xcvr->pll_ipg_clk); 1288 if (ret) { 1289 dev_err(dev, "failed to start PLL IPG clock.\n"); 1290 goto stop_ipg_clk; 1291 } 1292 1293 ret = clk_prepare_enable(xcvr->phy_clk); 1294 if (ret) { 1295 dev_err(dev, "failed to start PHY clock: %d\n", ret); 1296 goto stop_pll_ipg_clk; 1297 } 1298 1299 ret = clk_prepare_enable(xcvr->spba_clk); 1300 if (ret) { 1301 dev_err(dev, "failed to start SPBA clock.\n"); 1302 goto stop_phy_clk; 1303 } 1304 1305 regcache_cache_only(xcvr->regmap, false); 1306 regcache_mark_dirty(xcvr->regmap); 1307 ret = regcache_sync(xcvr->regmap); 1308 1309 if (ret) { 1310 dev_err(dev, "failed to sync regcache.\n"); 1311 goto stop_spba_clk; 1312 } 1313 1314 ret = reset_control_deassert(xcvr->reset); 1315 if (ret) { 1316 dev_err(dev, "failed to deassert M0+ reset.\n"); 1317 goto stop_spba_clk; 1318 } 1319 1320 ret = fsl_xcvr_load_firmware(xcvr); 1321 if (ret) { 1322 dev_err(dev, "failed to load firmware.\n"); 1323 goto stop_spba_clk; 1324 } 1325 1326 /* Release M0+ reset */ 1327 ret = regmap_update_bits(xcvr->regmap, FSL_XCVR_EXT_CTRL, 1328 FSL_XCVR_EXT_CTRL_CORE_RESET, 0); 1329 if (ret < 0) { 1330 dev_err(dev, "M0+ core release failed: %d\n", ret); 1331 goto stop_spba_clk; 1332 } 1333 1334 /* Let M0+ core complete firmware initialization */ 1335 msleep(50); 1336 1337 return 0; 1338 1339 stop_spba_clk: 1340 clk_disable_unprepare(xcvr->spba_clk); 1341 stop_phy_clk: 1342 clk_disable_unprepare(xcvr->phy_clk); 1343 stop_pll_ipg_clk: 1344 clk_disable_unprepare(xcvr->pll_ipg_clk); 1345 stop_ipg_clk: 1346 clk_disable_unprepare(xcvr->ipg_clk); 1347 1348 return ret; 1349 } 1350 1351 static const struct dev_pm_ops fsl_xcvr_pm_ops = { 1352 SET_RUNTIME_PM_OPS(fsl_xcvr_runtime_suspend, 1353 fsl_xcvr_runtime_resume, 1354 NULL) 1355 SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend, 1356 pm_runtime_force_resume) 1357 }; 1358 1359 static struct platform_driver fsl_xcvr_driver = { 1360 .probe = fsl_xcvr_probe, 1361 .driver = { 1362 .name = "fsl,imx8mp-audio-xcvr", 1363 .pm = &fsl_xcvr_pm_ops, 1364 .of_match_table = fsl_xcvr_dt_ids, 1365 }, 1366 }; 1367 module_platform_driver(fsl_xcvr_driver); 1368 1369 MODULE_AUTHOR("Viorel Suman <viorel.suman@nxp.com>"); 1370 MODULE_DESCRIPTION("NXP Audio Transceiver (XCVR) driver"); 1371 MODULE_LICENSE("GPL v2"); 1372