Lines Matching full:i2s
9 * The I2S interface consists of two ring buffers - one for RX and one for
70 .name = "chv3-i2s",
101 static inline void chv3_i2s_wr(struct chv3_i2s_dev *i2s, int offset, u32 val) in chv3_i2s_wr() argument
103 writel(val, i2s->iobase + offset); in chv3_i2s_wr()
106 static inline u32 chv3_i2s_rd(struct chv3_i2s_dev *i2s, int offset) in chv3_i2s_rd() argument
108 return readl(i2s->iobase + offset); in chv3_i2s_rd()
113 struct chv3_i2s_dev *i2s = data; in chv3_i2s_isr() local
116 reg = readl(i2s->iobase_irq + I2S_IRQ_CLR); in chv3_i2s_isr()
121 snd_pcm_period_elapsed(i2s->rx_substream); in chv3_i2s_isr()
124 snd_pcm_period_elapsed(i2s->tx_substream); in chv3_i2s_isr()
126 writel(reg, i2s->iobase_irq + I2S_IRQ_CLR); in chv3_i2s_isr()
135 struct chv3_i2s_dev *i2s = snd_soc_dai_get_drvdata(asoc_rtd_to_cpu(rtd, 0)); in chv3_dma_open() local
146 i2s->rx_substream = substream; in chv3_dma_open()
148 i2s->tx_substream = substream; in chv3_dma_open()
156 struct chv3_i2s_dev *i2s = snd_soc_dai_get_drvdata(asoc_rtd_to_cpu(rtd, 0)); in chv3_dma_close() local
159 chv3_i2s_wr(i2s, I2S_RX_ENABLE, 0); in chv3_dma_close()
161 chv3_i2s_wr(i2s, I2S_TX_ENABLE, 0); in chv3_dma_close()
169 struct chv3_i2s_dev *i2s = snd_soc_dai_get_drvdata(asoc_rtd_to_cpu(rtd, 0)); in chv3_dma_pcm_construct() local
175 res = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, i2s->dev, in chv3_dma_pcm_construct()
183 res = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, i2s->dev, in chv3_dma_pcm_construct()
204 struct chv3_i2s_dev *i2s = snd_soc_dai_get_drvdata(asoc_rtd_to_cpu(rtd, 0)); in chv3_dma_prepare() local
212 chv3_i2s_wr(i2s, I2S_SOFT_RESET, I2S_SOFT_RESET_RX_BIT); in chv3_dma_prepare()
213 chv3_i2s_wr(i2s, I2S_RX_BASE_ADDR, substream->dma_buffer.addr); in chv3_dma_prepare()
214 chv3_i2s_wr(i2s, I2S_RX_BUFFER_SIZE, buffer_bytes); in chv3_dma_prepare()
215 chv3_i2s_wr(i2s, I2S_RX_IRQ, (period_size << 8) | 1); in chv3_dma_prepare()
216 chv3_i2s_wr(i2s, I2S_RX_ENABLE, 1); in chv3_dma_prepare()
218 chv3_i2s_wr(i2s, I2S_SOFT_RESET, I2S_SOFT_RESET_TX_BIT); in chv3_dma_prepare()
219 chv3_i2s_wr(i2s, I2S_TX_BASE_ADDR, substream->dma_buffer.addr); in chv3_dma_prepare()
220 chv3_i2s_wr(i2s, I2S_TX_BUFFER_SIZE, buffer_bytes); in chv3_dma_prepare()
221 chv3_i2s_wr(i2s, I2S_TX_IRQ, ((period_bytes / i2s->tx_bytes_to_fetch) << 8) | 1); in chv3_dma_prepare()
222 chv3_i2s_wr(i2s, I2S_TX_ENABLE, 1); in chv3_dma_prepare()
224 writel(I2S_IRQ_RX_BIT | I2S_IRQ_TX_BIT, i2s->iobase_irq + I2S_IRQ_MASK); in chv3_dma_prepare()
233 struct chv3_i2s_dev *i2s = snd_soc_dai_get_drvdata(asoc_rtd_to_cpu(rtd, 0)); in chv3_dma_pointer() local
241 idx_bytes = chv3_i2s_rd(i2s, I2S_RX_PRODUCER_IDX); in chv3_dma_pointer()
243 idx_bytes = chv3_i2s_rd(i2s, I2S_TX_CONSUMER_IDX); in chv3_dma_pointer()
256 struct chv3_i2s_dev *i2s = snd_soc_dai_get_drvdata(asoc_rtd_to_cpu(rtd, 0)); in chv3_dma_ack() local
263 chv3_i2s_wr(i2s, I2S_RX_CONSUMER_IDX, idx); in chv3_dma_ack()
265 chv3_i2s_wr(i2s, I2S_TX_PRODUCER_IDX, idx); in chv3_dma_ack()
271 .name = "chv3-i2s-comp",
283 struct chv3_i2s_dev *i2s; in chv3_i2s_probe() local
287 i2s = devm_kzalloc(&pdev->dev, sizeof(*i2s), GFP_KERNEL); in chv3_i2s_probe()
288 if (!i2s) in chv3_i2s_probe()
291 i2s->iobase = devm_platform_ioremap_resource(pdev, 0); in chv3_i2s_probe()
292 if (IS_ERR(i2s->iobase)) in chv3_i2s_probe()
293 return PTR_ERR(i2s->iobase); in chv3_i2s_probe()
295 i2s->iobase_irq = devm_platform_ioremap_resource(pdev, 1); in chv3_i2s_probe()
296 if (IS_ERR(i2s->iobase_irq)) in chv3_i2s_probe()
297 return PTR_ERR(i2s->iobase_irq); in chv3_i2s_probe()
299 i2s->tx_bytes_to_fetch = (chv3_i2s_rd(i2s, I2S_TX_IRQ_CONST) >> 8) & 0xffff; in chv3_i2s_probe()
301 i2s->dev = &pdev->dev; in chv3_i2s_probe()
302 dev_set_drvdata(&pdev->dev, i2s); in chv3_i2s_probe()
307 res = devm_request_irq(i2s->dev, irq, chv3_i2s_isr, 0, "chv3-i2s", i2s); in chv3_i2s_probe()
322 { .compatible = "google,chv3-i2s" },
330 .name = "chv3-i2s",
338 MODULE_DESCRIPTION("Chameleon v3 I2S interface");