Lines Matching +full:exynos5433 +full:- +full:spi
1 // SPDX-License-Identifier: GPL-2.0+
10 #include <linux/dma-mapping.h>
16 #include <linux/platform_data/spi-s3c64xx.h>
19 #include <linux/spi/spi.h>
25 /* Registers and bit-fields */
108 #define FIFO_LVL_MASK(i) ((i)->port_conf->fifo_lvl_mask[i->port_id])
110 (1 << (i)->port_conf->tx_st_done)) ? 1 : 0)
111 #define TX_FIFO_LVL(v, sdd) (((v) & (sdd)->tx_fifomask) >> \
112 __ffs((sdd)->tx_fifomask))
113 #define RX_FIFO_LVL(v, sdd) (((v) & (sdd)->rx_fifomask) >> \
114 __ffs((sdd)->rx_fifomask))
125 #define is_polling(x) (x->cntrlr_info->polling)
137 * struct s3c64xx_spi_port_config - SPI Controller hardware info
138 * @fifo_lvl_mask: Bit-mask for {TX|RX}_FIFO_LVL bits in SPI_STATUS register.
153 * The Samsung s3c64xx SPI controller are used on various Samsung SoC's but
154 * differ in some aspects such as the size of the fifo and spi bus clock
173 * struct s3c64xx_spi_driver_data - Runtime info holder for SPI driver.
174 * @clk: Pointer to the spi clock.
175 * @src_clk: Pointer to the clock used to generate SPI signals.
178 * @host: Pointer to the SPI Protocol host.
182 * @sfr_start: BUS address of SPI controller regs.
190 * @port_conf: Local SPI port configuartion data
223 void __iomem *regs = sdd->regs; in s3c64xx_flush_fifo()
242 } while (TX_FIFO_LVL(val, sdd) && --loops); in s3c64xx_flush_fifo()
245 dev_warn(&sdd->pdev->dev, "Timed out flushing TX FIFO\n"); in s3c64xx_flush_fifo()
255 } while (--loops); in s3c64xx_flush_fifo()
258 dev_warn(&sdd->pdev->dev, "Timed out flushing RX FIFO\n"); in s3c64xx_flush_fifo()
275 if (dma->direction == DMA_DEV_TO_MEM) in s3c64xx_spi_dmacb()
282 spin_lock_irqsave(&sdd->lock, flags); in s3c64xx_spi_dmacb()
284 if (dma->direction == DMA_DEV_TO_MEM) { in s3c64xx_spi_dmacb()
285 sdd->state &= ~RXBUSY; in s3c64xx_spi_dmacb()
286 if (!(sdd->state & TXBUSY)) in s3c64xx_spi_dmacb()
287 complete(&sdd->xfer_completion); in s3c64xx_spi_dmacb()
289 sdd->state &= ~TXBUSY; in s3c64xx_spi_dmacb()
290 if (!(sdd->state & RXBUSY)) in s3c64xx_spi_dmacb()
291 complete(&sdd->xfer_completion); in s3c64xx_spi_dmacb()
294 spin_unlock_irqrestore(&sdd->lock, flags); in s3c64xx_spi_dmacb()
307 if (dma->direction == DMA_DEV_TO_MEM) { in prepare_dma()
310 config.direction = dma->direction; in prepare_dma()
311 config.src_addr = sdd->sfr_start + S3C64XX_SPI_RX_DATA; in prepare_dma()
312 config.src_addr_width = sdd->cur_bpw / 8; in prepare_dma()
314 dmaengine_slave_config(dma->ch, &config); in prepare_dma()
318 config.direction = dma->direction; in prepare_dma()
319 config.dst_addr = sdd->sfr_start + S3C64XX_SPI_TX_DATA; in prepare_dma()
320 config.dst_addr_width = sdd->cur_bpw / 8; in prepare_dma()
322 dmaengine_slave_config(dma->ch, &config); in prepare_dma()
325 desc = dmaengine_prep_slave_sg(dma->ch, sgt->sgl, sgt->nents, in prepare_dma()
326 dma->direction, DMA_PREP_INTERRUPT); in prepare_dma()
328 dev_err(&sdd->pdev->dev, "unable to prepare %s scatterlist", in prepare_dma()
329 dma->direction == DMA_DEV_TO_MEM ? "rx" : "tx"); in prepare_dma()
330 return -ENOMEM; in prepare_dma()
333 desc->callback = s3c64xx_spi_dmacb; in prepare_dma()
334 desc->callback_param = dma; in prepare_dma()
336 dma->cookie = dmaengine_submit(desc); in prepare_dma()
337 ret = dma_submit_error(dma->cookie); in prepare_dma()
339 dev_err(&sdd->pdev->dev, "DMA submission failed"); in prepare_dma()
340 return -EIO; in prepare_dma()
343 dma_async_issue_pending(dma->ch); in prepare_dma()
347 static void s3c64xx_spi_set_cs(struct spi_device *spi, bool enable) in s3c64xx_spi_set_cs() argument
350 spi_controller_get_devdata(spi->controller); in s3c64xx_spi_set_cs()
352 if (sdd->cntrlr_info->no_cs) in s3c64xx_spi_set_cs()
356 if (!(sdd->port_conf->quirks & S3C64XX_SPI_QUIRK_CS_AUTO)) { in s3c64xx_spi_set_cs()
357 writel(0, sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_set_cs()
359 u32 ssel = readl(sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_set_cs()
363 writel(ssel, sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_set_cs()
366 if (!(sdd->port_conf->quirks & S3C64XX_SPI_QUIRK_CS_AUTO)) in s3c64xx_spi_set_cs()
368 sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_set_cs()
372 static int s3c64xx_spi_prepare_transfer(struct spi_controller *spi) in s3c64xx_spi_prepare_transfer() argument
374 struct s3c64xx_spi_driver_data *sdd = spi_controller_get_devdata(spi); in s3c64xx_spi_prepare_transfer()
380 sdd->rx_dma.ch = dma_request_chan(&sdd->pdev->dev, "rx"); in s3c64xx_spi_prepare_transfer()
381 if (IS_ERR(sdd->rx_dma.ch)) { in s3c64xx_spi_prepare_transfer()
382 dev_err(&sdd->pdev->dev, "Failed to get RX DMA channel\n"); in s3c64xx_spi_prepare_transfer()
383 sdd->rx_dma.ch = NULL; in s3c64xx_spi_prepare_transfer()
387 sdd->tx_dma.ch = dma_request_chan(&sdd->pdev->dev, "tx"); in s3c64xx_spi_prepare_transfer()
388 if (IS_ERR(sdd->tx_dma.ch)) { in s3c64xx_spi_prepare_transfer()
389 dev_err(&sdd->pdev->dev, "Failed to get TX DMA channel\n"); in s3c64xx_spi_prepare_transfer()
390 dma_release_channel(sdd->rx_dma.ch); in s3c64xx_spi_prepare_transfer()
391 sdd->tx_dma.ch = NULL; in s3c64xx_spi_prepare_transfer()
392 sdd->rx_dma.ch = NULL; in s3c64xx_spi_prepare_transfer()
396 spi->dma_rx = sdd->rx_dma.ch; in s3c64xx_spi_prepare_transfer()
397 spi->dma_tx = sdd->tx_dma.ch; in s3c64xx_spi_prepare_transfer()
402 static int s3c64xx_spi_unprepare_transfer(struct spi_controller *spi) in s3c64xx_spi_unprepare_transfer() argument
404 struct s3c64xx_spi_driver_data *sdd = spi_controller_get_devdata(spi); in s3c64xx_spi_unprepare_transfer()
410 if (sdd->rx_dma.ch && sdd->tx_dma.ch) { in s3c64xx_spi_unprepare_transfer()
411 dma_release_channel(sdd->rx_dma.ch); in s3c64xx_spi_unprepare_transfer()
412 dma_release_channel(sdd->tx_dma.ch); in s3c64xx_spi_unprepare_transfer()
413 sdd->rx_dma.ch = NULL; in s3c64xx_spi_unprepare_transfer()
414 sdd->tx_dma.ch = NULL; in s3c64xx_spi_unprepare_transfer()
421 struct spi_device *spi, in s3c64xx_spi_can_dma() argument
426 if (sdd->rx_dma.ch && sdd->tx_dma.ch) in s3c64xx_spi_can_dma()
427 return xfer->len >= sdd->fifo_depth; in s3c64xx_spi_can_dma()
435 void __iomem *regs = sdd->regs; in s3c64xx_enable_datapath()
453 writel(((xfer->len * 8 / sdd->cur_bpw) & 0xffff) in s3c64xx_enable_datapath()
458 if (xfer->tx_buf != NULL) { in s3c64xx_enable_datapath()
459 sdd->state |= TXBUSY; in s3c64xx_enable_datapath()
463 ret = prepare_dma(&sdd->tx_dma, &xfer->tx_sg); in s3c64xx_enable_datapath()
465 switch (sdd->cur_bpw) { in s3c64xx_enable_datapath()
468 xfer->tx_buf, xfer->len / 4); in s3c64xx_enable_datapath()
472 xfer->tx_buf, xfer->len / 2); in s3c64xx_enable_datapath()
476 xfer->tx_buf, xfer->len); in s3c64xx_enable_datapath()
482 if (xfer->rx_buf != NULL) { in s3c64xx_enable_datapath()
483 sdd->state |= RXBUSY; in s3c64xx_enable_datapath()
485 if (sdd->port_conf->high_speed && sdd->cur_speed >= 30000000UL in s3c64xx_enable_datapath()
486 && !(sdd->cur_mode & SPI_CPHA)) in s3c64xx_enable_datapath()
492 writel(((xfer->len * 8 / sdd->cur_bpw) & 0xffff) in s3c64xx_enable_datapath()
495 ret = prepare_dma(&sdd->rx_dma, &xfer->rx_sg); in s3c64xx_enable_datapath()
511 void __iomem *regs = sdd->regs; in s3c64xx_spi_wait_for_timeout()
514 u32 max_fifo = sdd->fifo_depth; in s3c64xx_spi_wait_for_timeout()
521 } while (RX_FIFO_LVL(status, sdd) < max_fifo && --val); in s3c64xx_spi_wait_for_timeout()
530 void __iomem *regs = sdd->regs; in s3c64xx_wait_for_dma()
536 ms = xfer->len * 8 * 1000 / sdd->cur_speed; in s3c64xx_wait_for_dma()
541 val = wait_for_completion_timeout(&sdd->xfer_completion, val); in s3c64xx_wait_for_dma()
545 * proceed further else return -EIO. in s3c64xx_wait_for_dma()
552 if (val && !xfer->rx_buf) { in s3c64xx_wait_for_dma()
557 && --val) { in s3c64xx_wait_for_dma()
566 return -EIO; in s3c64xx_wait_for_dma()
574 void __iomem *regs = sdd->regs; in s3c64xx_wait_for_pio()
584 time_us = (xfer->len * 8 * 1000 * 1000) / sdd->cur_speed; in s3c64xx_wait_for_pio()
590 if (RX_FIFO_LVL(status, sdd) < xfer->len) in s3c64xx_wait_for_pio()
595 if (!wait_for_completion_timeout(&sdd->xfer_completion, val)) in s3c64xx_wait_for_pio()
596 return -EIO; in s3c64xx_wait_for_pio()
602 } while (RX_FIFO_LVL(status, sdd) < xfer->len && --val); in s3c64xx_wait_for_pio()
605 return -EIO; in s3c64xx_wait_for_pio()
608 if (!xfer->rx_buf) { in s3c64xx_wait_for_pio()
609 sdd->state &= ~TXBUSY; in s3c64xx_wait_for_pio()
621 loops = xfer->len / sdd->fifo_depth; in s3c64xx_wait_for_pio()
622 buf = xfer->rx_buf; in s3c64xx_wait_for_pio()
628 switch (sdd->cur_bpw) { in s3c64xx_wait_for_pio()
644 } while (loops--); in s3c64xx_wait_for_pio()
645 sdd->state &= ~RXBUSY; in s3c64xx_wait_for_pio()
652 void __iomem *regs = sdd->regs; in s3c64xx_spi_config()
655 int div = sdd->port_conf->clk_div; in s3c64xx_spi_config()
658 if (!sdd->port_conf->clk_from_cmu) { in s3c64xx_spi_config()
670 if (sdd->cur_mode & SPI_CPOL) in s3c64xx_spi_config()
673 if (sdd->cur_mode & SPI_CPHA) in s3c64xx_spi_config()
683 switch (sdd->cur_bpw) { in s3c64xx_spi_config()
698 if ((sdd->cur_mode & SPI_LOOP) && sdd->port_conf->has_loopback) in s3c64xx_spi_config()
705 if (sdd->port_conf->clk_from_cmu) { in s3c64xx_spi_config()
706 ret = clk_set_rate(sdd->src_clk, sdd->cur_speed * div); in s3c64xx_spi_config()
709 sdd->cur_speed = clk_get_rate(sdd->src_clk) / div; in s3c64xx_spi_config()
714 val |= ((clk_get_rate(sdd->src_clk) / sdd->cur_speed / div - 1) in s3c64xx_spi_config()
733 struct spi_device *spi = msg->spi; in s3c64xx_spi_prepare_message() local
734 struct s3c64xx_spi_csinfo *cs = spi->controller_data; in s3c64xx_spi_prepare_message()
739 writel(0, sdd->regs + S3C64XX_SPI_FB_CLK); in s3c64xx_spi_prepare_message()
741 writel(cs->fb_delay & 0x3, sdd->regs + S3C64XX_SPI_FB_CLK); in s3c64xx_spi_prepare_message()
746 static size_t s3c64xx_spi_max_transfer_size(struct spi_device *spi) in s3c64xx_spi_max_transfer_size() argument
748 struct spi_controller *ctlr = spi->controller; in s3c64xx_spi_max_transfer_size()
750 return ctlr->can_dma ? S3C64XX_SPI_PACKET_CNT_MASK : SIZE_MAX; in s3c64xx_spi_max_transfer_size()
754 struct spi_device *spi, in s3c64xx_spi_transfer_one() argument
758 const unsigned int fifo_len = sdd->fifo_depth; in s3c64xx_spi_transfer_one()
771 reinit_completion(&sdd->xfer_completion); in s3c64xx_spi_transfer_one()
774 bpw = xfer->bits_per_word; in s3c64xx_spi_transfer_one()
775 speed = xfer->speed_hz; in s3c64xx_spi_transfer_one()
777 if (bpw != sdd->cur_bpw || speed != sdd->cur_speed) { in s3c64xx_spi_transfer_one()
778 sdd->cur_bpw = bpw; in s3c64xx_spi_transfer_one()
779 sdd->cur_speed = speed; in s3c64xx_spi_transfer_one()
780 sdd->cur_mode = spi->mode; in s3c64xx_spi_transfer_one()
786 if (!is_polling(sdd) && xfer->len >= fifo_len && in s3c64xx_spi_transfer_one()
787 sdd->rx_dma.ch && sdd->tx_dma.ch) { in s3c64xx_spi_transfer_one()
789 } else if (xfer->len >= fifo_len) { in s3c64xx_spi_transfer_one()
790 tx_buf = xfer->tx_buf; in s3c64xx_spi_transfer_one()
791 rx_buf = xfer->rx_buf; in s3c64xx_spi_transfer_one()
792 origin_len = xfer->len; in s3c64xx_spi_transfer_one()
793 target_len = xfer->len; in s3c64xx_spi_transfer_one()
794 xfer->len = fifo_len - 1; in s3c64xx_spi_transfer_one()
799 if (!use_dma && xfer->len > S3C64XX_SPI_POLLING_SIZE) in s3c64xx_spi_transfer_one()
803 reinit_completion(&sdd->xfer_completion); in s3c64xx_spi_transfer_one()
805 rdy_lv = xfer->len; in s3c64xx_spi_transfer_one()
808 * fifo_lvl up to 64 byte -> N bytes in s3c64xx_spi_transfer_one()
809 * 128 byte -> RDY_LVL * 2 bytes in s3c64xx_spi_transfer_one()
810 * 256 byte -> RDY_LVL * 4 bytes in s3c64xx_spi_transfer_one()
817 val = readl(sdd->regs + S3C64XX_SPI_MODE_CFG); in s3c64xx_spi_transfer_one()
820 writel(val, sdd->regs + S3C64XX_SPI_MODE_CFG); in s3c64xx_spi_transfer_one()
823 val = readl(sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_transfer_one()
825 sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_transfer_one()
829 spin_lock_irqsave(&sdd->lock, flags); in s3c64xx_spi_transfer_one()
832 sdd->state &= ~RXBUSY; in s3c64xx_spi_transfer_one()
833 sdd->state &= ~TXBUSY; in s3c64xx_spi_transfer_one()
836 s3c64xx_spi_set_cs(spi, true); in s3c64xx_spi_transfer_one()
840 spin_unlock_irqrestore(&sdd->lock, flags); in s3c64xx_spi_transfer_one()
843 dev_err(&spi->dev, "failed to enable data path for transfer: %d\n", status); in s3c64xx_spi_transfer_one()
853 dev_err(&spi->dev, in s3c64xx_spi_transfer_one()
854 "I/O Error: rx-%d tx-%d rx-%c tx-%c len-%d dma-%d res-(%d)\n", in s3c64xx_spi_transfer_one()
855 xfer->rx_buf ? 1 : 0, xfer->tx_buf ? 1 : 0, in s3c64xx_spi_transfer_one()
856 (sdd->state & RXBUSY) ? 'f' : 'p', in s3c64xx_spi_transfer_one()
857 (sdd->state & TXBUSY) ? 'f' : 'p', in s3c64xx_spi_transfer_one()
858 xfer->len, use_dma ? 1 : 0, status); in s3c64xx_spi_transfer_one()
863 if (xfer->tx_buf && (sdd->state & TXBUSY)) { in s3c64xx_spi_transfer_one()
864 dmaengine_pause(sdd->tx_dma.ch); in s3c64xx_spi_transfer_one()
865 dmaengine_tx_status(sdd->tx_dma.ch, sdd->tx_dma.cookie, &s); in s3c64xx_spi_transfer_one()
866 dmaengine_terminate_all(sdd->tx_dma.ch); in s3c64xx_spi_transfer_one()
867 dev_err(&spi->dev, "TX residue: %d\n", s.residue); in s3c64xx_spi_transfer_one()
870 if (xfer->rx_buf && (sdd->state & RXBUSY)) { in s3c64xx_spi_transfer_one()
871 dmaengine_pause(sdd->rx_dma.ch); in s3c64xx_spi_transfer_one()
872 dmaengine_tx_status(sdd->rx_dma.ch, sdd->rx_dma.cookie, &s); in s3c64xx_spi_transfer_one()
873 dmaengine_terminate_all(sdd->rx_dma.ch); in s3c64xx_spi_transfer_one()
874 dev_err(&spi->dev, "RX residue: %d\n", s.residue); in s3c64xx_spi_transfer_one()
881 target_len -= xfer->len; in s3c64xx_spi_transfer_one()
883 if (xfer->tx_buf) in s3c64xx_spi_transfer_one()
884 xfer->tx_buf += xfer->len; in s3c64xx_spi_transfer_one()
886 if (xfer->rx_buf) in s3c64xx_spi_transfer_one()
887 xfer->rx_buf += xfer->len; in s3c64xx_spi_transfer_one()
890 xfer->len = fifo_len - 1; in s3c64xx_spi_transfer_one()
892 xfer->len = target_len; in s3c64xx_spi_transfer_one()
898 xfer->tx_buf = tx_buf; in s3c64xx_spi_transfer_one()
899 xfer->rx_buf = rx_buf; in s3c64xx_spi_transfer_one()
900 xfer->len = origin_len; in s3c64xx_spi_transfer_one()
907 struct spi_device *spi) in s3c64xx_get_target_ctrldata() argument
913 target_np = spi->dev.of_node; in s3c64xx_get_target_ctrldata()
915 dev_err(&spi->dev, "device node not found\n"); in s3c64xx_get_target_ctrldata()
916 return ERR_PTR(-EINVAL); in s3c64xx_get_target_ctrldata()
921 return ERR_PTR(-ENOMEM); in s3c64xx_get_target_ctrldata()
923 data_np = of_get_child_by_name(target_np, "controller-data"); in s3c64xx_get_target_ctrldata()
925 dev_info(&spi->dev, "feedback delay set to default (0)\n"); in s3c64xx_get_target_ctrldata()
929 of_property_read_u32(data_np, "samsung,spi-feedback-delay", &fb_delay); in s3c64xx_get_target_ctrldata()
930 cs->fb_delay = fb_delay; in s3c64xx_get_target_ctrldata()
937 * and save the configuration in a local data-structure.
941 static int s3c64xx_spi_setup(struct spi_device *spi) in s3c64xx_spi_setup() argument
943 struct s3c64xx_spi_csinfo *cs = spi->controller_data; in s3c64xx_spi_setup()
948 sdd = spi_controller_get_devdata(spi->controller); in s3c64xx_spi_setup()
949 if (spi->dev.of_node) { in s3c64xx_spi_setup()
950 cs = s3c64xx_get_target_ctrldata(spi); in s3c64xx_spi_setup()
951 spi->controller_data = cs; in s3c64xx_spi_setup()
956 dev_err(&spi->dev, "No CS for SPI(%d)\n", spi_get_chipselect(spi, 0)); in s3c64xx_spi_setup()
957 return -ENODEV; in s3c64xx_spi_setup()
960 if (!spi_get_ctldata(spi)) in s3c64xx_spi_setup()
961 spi_set_ctldata(spi, cs); in s3c64xx_spi_setup()
963 pm_runtime_get_sync(&sdd->pdev->dev); in s3c64xx_spi_setup()
965 div = sdd->port_conf->clk_div; in s3c64xx_spi_setup()
968 if (!sdd->port_conf->clk_from_cmu) { in s3c64xx_spi_setup()
972 speed = clk_get_rate(sdd->src_clk) / div / (0 + 1); in s3c64xx_spi_setup()
974 if (spi->max_speed_hz > speed) in s3c64xx_spi_setup()
975 spi->max_speed_hz = speed; in s3c64xx_spi_setup()
977 psr = clk_get_rate(sdd->src_clk) / div / spi->max_speed_hz - 1; in s3c64xx_spi_setup()
980 psr--; in s3c64xx_spi_setup()
982 speed = clk_get_rate(sdd->src_clk) / div / (psr + 1); in s3c64xx_spi_setup()
983 if (spi->max_speed_hz < speed) { in s3c64xx_spi_setup()
987 err = -EINVAL; in s3c64xx_spi_setup()
992 speed = clk_get_rate(sdd->src_clk) / div / (psr + 1); in s3c64xx_spi_setup()
993 if (spi->max_speed_hz >= speed) { in s3c64xx_spi_setup()
994 spi->max_speed_hz = speed; in s3c64xx_spi_setup()
996 dev_err(&spi->dev, "Can't set %dHz transfer speed\n", in s3c64xx_spi_setup()
997 spi->max_speed_hz); in s3c64xx_spi_setup()
998 err = -EINVAL; in s3c64xx_spi_setup()
1003 pm_runtime_mark_last_busy(&sdd->pdev->dev); in s3c64xx_spi_setup()
1004 pm_runtime_put_autosuspend(&sdd->pdev->dev); in s3c64xx_spi_setup()
1005 s3c64xx_spi_set_cs(spi, false); in s3c64xx_spi_setup()
1010 pm_runtime_mark_last_busy(&sdd->pdev->dev); in s3c64xx_spi_setup()
1011 pm_runtime_put_autosuspend(&sdd->pdev->dev); in s3c64xx_spi_setup()
1012 /* setup() returns with device de-selected */ in s3c64xx_spi_setup()
1013 s3c64xx_spi_set_cs(spi, false); in s3c64xx_spi_setup()
1015 spi_set_ctldata(spi, NULL); in s3c64xx_spi_setup()
1018 if (spi->dev.of_node) in s3c64xx_spi_setup()
1024 static void s3c64xx_spi_cleanup(struct spi_device *spi) in s3c64xx_spi_cleanup() argument
1026 struct s3c64xx_spi_csinfo *cs = spi_get_ctldata(spi); in s3c64xx_spi_cleanup()
1029 if (spi->dev.of_node) in s3c64xx_spi_cleanup()
1032 spi_set_ctldata(spi, NULL); in s3c64xx_spi_cleanup()
1038 struct spi_controller *spi = sdd->host; in s3c64xx_spi_irq() local
1041 val = readl(sdd->regs + S3C64XX_SPI_STATUS); in s3c64xx_spi_irq()
1045 dev_err(&spi->dev, "RX overrun\n"); in s3c64xx_spi_irq()
1049 dev_err(&spi->dev, "RX underrun\n"); in s3c64xx_spi_irq()
1053 dev_err(&spi->dev, "TX overrun\n"); in s3c64xx_spi_irq()
1057 dev_err(&spi->dev, "TX underrun\n"); in s3c64xx_spi_irq()
1061 complete(&sdd->xfer_completion); in s3c64xx_spi_irq()
1062 /* No pending clear irq, turn-off INT_EN_RX_FIFO_RDY */ in s3c64xx_spi_irq()
1063 val = readl(sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_irq()
1065 sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_irq()
1069 writel(clr, sdd->regs + S3C64XX_SPI_PENDING_CLR); in s3c64xx_spi_irq()
1070 writel(0, sdd->regs + S3C64XX_SPI_PENDING_CLR); in s3c64xx_spi_irq()
1077 struct s3c64xx_spi_info *sci = sdd->cntrlr_info; in s3c64xx_spi_hwinit()
1078 void __iomem *regs = sdd->regs; in s3c64xx_spi_hwinit()
1081 sdd->cur_speed = 0; in s3c64xx_spi_hwinit()
1083 if (sci->no_cs) in s3c64xx_spi_hwinit()
1084 writel(0, sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_hwinit()
1085 else if (!(sdd->port_conf->quirks & S3C64XX_SPI_QUIRK_CS_AUTO)) in s3c64xx_spi_hwinit()
1086 writel(S3C64XX_SPI_CS_SIG_INACT, sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_hwinit()
1088 /* Disable Interrupts - we use Polling if not DMA mode */ in s3c64xx_spi_hwinit()
1091 if (!sdd->port_conf->clk_from_cmu) in s3c64xx_spi_hwinit()
1092 writel(sci->src_clk_nr << S3C64XX_SPI_CLKSEL_SRCSHFT, in s3c64xx_spi_hwinit()
1124 return ERR_PTR(-ENOMEM); in s3c64xx_spi_parse_dt()
1126 if (of_property_read_u32(dev->of_node, "samsung,spi-src-clk", &temp)) { in s3c64xx_spi_parse_dt()
1127 dev_warn(dev, "spi bus clock parent not specified, using clock at index 0 as parent\n"); in s3c64xx_spi_parse_dt()
1128 sci->src_clk_nr = 0; in s3c64xx_spi_parse_dt()
1130 sci->src_clk_nr = temp; in s3c64xx_spi_parse_dt()
1133 if (of_property_read_u32(dev->of_node, "num-cs", &temp)) { in s3c64xx_spi_parse_dt()
1135 sci->num_cs = 1; in s3c64xx_spi_parse_dt()
1137 sci->num_cs = temp; in s3c64xx_spi_parse_dt()
1140 sci->no_cs = of_property_read_bool(dev->of_node, "no-cs-readback"); in s3c64xx_spi_parse_dt()
1141 sci->polling = !of_property_present(dev->of_node, "dmas"); in s3c64xx_spi_parse_dt()
1156 if (pdev->dev.of_node) in s3c64xx_spi_get_port_config()
1157 return of_device_get_match_data(&pdev->dev); in s3c64xx_spi_get_port_config()
1159 return (const struct s3c64xx_spi_port_config *)platform_get_device_id(pdev)->driver_data; in s3c64xx_spi_get_port_config()
1164 const struct s3c64xx_spi_port_config *port_conf = sdd->port_conf; in s3c64xx_spi_set_fifomask()
1166 if (port_conf->rx_fifomask) in s3c64xx_spi_set_fifomask()
1167 sdd->rx_fifomask = port_conf->rx_fifomask; in s3c64xx_spi_set_fifomask()
1169 sdd->rx_fifomask = FIFO_LVL_MASK(sdd) << in s3c64xx_spi_set_fifomask()
1170 port_conf->rx_lvl_offset; in s3c64xx_spi_set_fifomask()
1172 if (port_conf->tx_fifomask) in s3c64xx_spi_set_fifomask()
1173 sdd->tx_fifomask = port_conf->tx_fifomask; in s3c64xx_spi_set_fifomask()
1175 sdd->tx_fifomask = FIFO_LVL_MASK(sdd) << in s3c64xx_spi_set_fifomask()
1183 struct s3c64xx_spi_info *sci = dev_get_platdata(&pdev->dev); in s3c64xx_spi_probe()
1188 if (!sci && pdev->dev.of_node) { in s3c64xx_spi_probe()
1189 sci = s3c64xx_spi_parse_dt(&pdev->dev); in s3c64xx_spi_probe()
1195 return dev_err_probe(&pdev->dev, -ENODEV, in s3c64xx_spi_probe()
1202 host = devm_spi_alloc_host(&pdev->dev, sizeof(*sdd)); in s3c64xx_spi_probe()
1204 return dev_err_probe(&pdev->dev, -ENOMEM, in s3c64xx_spi_probe()
1205 "Unable to allocate SPI Host\n"); in s3c64xx_spi_probe()
1210 sdd->port_conf = s3c64xx_spi_get_port_config(pdev); in s3c64xx_spi_probe()
1211 sdd->host = host; in s3c64xx_spi_probe()
1212 sdd->cntrlr_info = sci; in s3c64xx_spi_probe()
1213 sdd->pdev = pdev; in s3c64xx_spi_probe()
1214 if (pdev->dev.of_node) { in s3c64xx_spi_probe()
1215 ret = of_alias_get_id(pdev->dev.of_node, "spi"); in s3c64xx_spi_probe()
1217 return dev_err_probe(&pdev->dev, ret, in s3c64xx_spi_probe()
1219 sdd->port_id = ret; in s3c64xx_spi_probe()
1221 sdd->port_id = pdev->id; in s3c64xx_spi_probe()
1224 sdd->fifo_depth = FIFO_DEPTH(sdd); in s3c64xx_spi_probe()
1228 sdd->cur_bpw = 8; in s3c64xx_spi_probe()
1230 sdd->tx_dma.direction = DMA_MEM_TO_DEV; in s3c64xx_spi_probe()
1231 sdd->rx_dma.direction = DMA_DEV_TO_MEM; in s3c64xx_spi_probe()
1233 host->dev.of_node = pdev->dev.of_node; in s3c64xx_spi_probe()
1234 host->bus_num = sdd->port_id; in s3c64xx_spi_probe()
1235 host->setup = s3c64xx_spi_setup; in s3c64xx_spi_probe()
1236 host->cleanup = s3c64xx_spi_cleanup; in s3c64xx_spi_probe()
1237 host->prepare_transfer_hardware = s3c64xx_spi_prepare_transfer; in s3c64xx_spi_probe()
1238 host->unprepare_transfer_hardware = s3c64xx_spi_unprepare_transfer; in s3c64xx_spi_probe()
1239 host->prepare_message = s3c64xx_spi_prepare_message; in s3c64xx_spi_probe()
1240 host->transfer_one = s3c64xx_spi_transfer_one; in s3c64xx_spi_probe()
1241 host->max_transfer_size = s3c64xx_spi_max_transfer_size; in s3c64xx_spi_probe()
1242 host->num_chipselect = sci->num_cs; in s3c64xx_spi_probe()
1243 host->use_gpio_descriptors = true; in s3c64xx_spi_probe()
1244 host->dma_alignment = 8; in s3c64xx_spi_probe()
1245 host->bits_per_word_mask = SPI_BPW_MASK(32) | SPI_BPW_MASK(16) | in s3c64xx_spi_probe()
1247 /* the spi->mode bits understood by this driver: */ in s3c64xx_spi_probe()
1248 host->mode_bits = SPI_CPOL | SPI_CPHA | SPI_CS_HIGH; in s3c64xx_spi_probe()
1249 if (sdd->port_conf->has_loopback) in s3c64xx_spi_probe()
1250 host->mode_bits |= SPI_LOOP; in s3c64xx_spi_probe()
1251 host->auto_runtime_pm = true; in s3c64xx_spi_probe()
1253 host->can_dma = s3c64xx_spi_can_dma; in s3c64xx_spi_probe()
1255 sdd->regs = devm_platform_get_and_ioremap_resource(pdev, 0, &mem_res); in s3c64xx_spi_probe()
1256 if (IS_ERR(sdd->regs)) in s3c64xx_spi_probe()
1257 return PTR_ERR(sdd->regs); in s3c64xx_spi_probe()
1258 sdd->sfr_start = mem_res->start; in s3c64xx_spi_probe()
1260 if (sci->cfg_gpio && sci->cfg_gpio()) in s3c64xx_spi_probe()
1261 return dev_err_probe(&pdev->dev, -EBUSY, in s3c64xx_spi_probe()
1265 sdd->clk = devm_clk_get_enabled(&pdev->dev, "spi"); in s3c64xx_spi_probe()
1266 if (IS_ERR(sdd->clk)) in s3c64xx_spi_probe()
1267 return dev_err_probe(&pdev->dev, PTR_ERR(sdd->clk), in s3c64xx_spi_probe()
1268 "Unable to acquire clock 'spi'\n"); in s3c64xx_spi_probe()
1270 sprintf(clk_name, "spi_busclk%d", sci->src_clk_nr); in s3c64xx_spi_probe()
1271 sdd->src_clk = devm_clk_get_enabled(&pdev->dev, clk_name); in s3c64xx_spi_probe()
1272 if (IS_ERR(sdd->src_clk)) in s3c64xx_spi_probe()
1273 return dev_err_probe(&pdev->dev, PTR_ERR(sdd->src_clk), in s3c64xx_spi_probe()
1277 if (sdd->port_conf->clk_ioclk) { in s3c64xx_spi_probe()
1278 sdd->ioclk = devm_clk_get_enabled(&pdev->dev, "spi_ioclk"); in s3c64xx_spi_probe()
1279 if (IS_ERR(sdd->ioclk)) in s3c64xx_spi_probe()
1280 return dev_err_probe(&pdev->dev, PTR_ERR(sdd->ioclk), in s3c64xx_spi_probe()
1284 pm_runtime_set_autosuspend_delay(&pdev->dev, AUTOSUSPEND_TIMEOUT); in s3c64xx_spi_probe()
1285 pm_runtime_use_autosuspend(&pdev->dev); in s3c64xx_spi_probe()
1286 pm_runtime_set_active(&pdev->dev); in s3c64xx_spi_probe()
1287 pm_runtime_enable(&pdev->dev); in s3c64xx_spi_probe()
1288 pm_runtime_get_sync(&pdev->dev); in s3c64xx_spi_probe()
1293 spin_lock_init(&sdd->lock); in s3c64xx_spi_probe()
1294 init_completion(&sdd->xfer_completion); in s3c64xx_spi_probe()
1296 ret = devm_request_irq(&pdev->dev, irq, s3c64xx_spi_irq, 0, in s3c64xx_spi_probe()
1297 "spi-s3c64xx", sdd); in s3c64xx_spi_probe()
1299 dev_err(&pdev->dev, "Failed to request IRQ %d: %d\n", in s3c64xx_spi_probe()
1306 sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_probe()
1308 ret = devm_spi_register_controller(&pdev->dev, host); in s3c64xx_spi_probe()
1310 dev_err(&pdev->dev, "cannot register SPI host: %d\n", ret); in s3c64xx_spi_probe()
1314 dev_dbg(&pdev->dev, "Samsung SoC SPI Driver loaded for Bus SPI-%d with %d Targets attached\n", in s3c64xx_spi_probe()
1315 sdd->port_id, host->num_chipselect); in s3c64xx_spi_probe()
1316 dev_dbg(&pdev->dev, "\tIOmem=[%pR]\tFIFO %dbytes\n", in s3c64xx_spi_probe()
1317 mem_res, sdd->fifo_depth); in s3c64xx_spi_probe()
1319 pm_runtime_mark_last_busy(&pdev->dev); in s3c64xx_spi_probe()
1320 pm_runtime_put_autosuspend(&pdev->dev); in s3c64xx_spi_probe()
1325 pm_runtime_put_noidle(&pdev->dev); in s3c64xx_spi_probe()
1326 pm_runtime_disable(&pdev->dev); in s3c64xx_spi_probe()
1327 pm_runtime_set_suspended(&pdev->dev); in s3c64xx_spi_probe()
1337 pm_runtime_get_sync(&pdev->dev); in s3c64xx_spi_remove()
1339 writel(0, sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_remove()
1342 dma_release_channel(sdd->rx_dma.ch); in s3c64xx_spi_remove()
1343 dma_release_channel(sdd->tx_dma.ch); in s3c64xx_spi_remove()
1346 pm_runtime_put_noidle(&pdev->dev); in s3c64xx_spi_remove()
1347 pm_runtime_disable(&pdev->dev); in s3c64xx_spi_remove()
1348 pm_runtime_set_suspended(&pdev->dev); in s3c64xx_spi_remove()
1365 sdd->cur_speed = 0; /* Output Clock is stopped */ in s3c64xx_spi_suspend()
1374 struct s3c64xx_spi_info *sci = sdd->cntrlr_info; in s3c64xx_spi_resume()
1377 if (sci->cfg_gpio) in s3c64xx_spi_resume()
1378 sci->cfg_gpio(); in s3c64xx_spi_resume()
1394 clk_disable_unprepare(sdd->clk); in s3c64xx_spi_runtime_suspend()
1395 clk_disable_unprepare(sdd->src_clk); in s3c64xx_spi_runtime_suspend()
1396 clk_disable_unprepare(sdd->ioclk); in s3c64xx_spi_runtime_suspend()
1407 if (sdd->port_conf->clk_ioclk) { in s3c64xx_spi_runtime_resume()
1408 ret = clk_prepare_enable(sdd->ioclk); in s3c64xx_spi_runtime_resume()
1413 ret = clk_prepare_enable(sdd->src_clk); in s3c64xx_spi_runtime_resume()
1417 ret = clk_prepare_enable(sdd->clk); in s3c64xx_spi_runtime_resume()
1425 sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_runtime_resume()
1430 clk_disable_unprepare(sdd->src_clk); in s3c64xx_spi_runtime_resume()
1432 clk_disable_unprepare(sdd->ioclk); in s3c64xx_spi_runtime_resume()
1524 .name = "s3c2443-spi",
1527 .name = "s3c6410-spi",
1534 { .compatible = "samsung,s3c2443-spi",
1537 { .compatible = "samsung,s3c6410-spi",
1540 { .compatible = "samsung,s5pv210-spi",
1543 { .compatible = "samsung,exynos4210-spi",
1546 { .compatible = "samsung,exynos7-spi",
1549 { .compatible = "samsung,exynos5433-spi",
1552 { .compatible = "samsung,exynosautov9-spi",
1555 { .compatible = "tesla,fsd-spi",
1564 .name = "s3c64xx-spi",
1572 MODULE_ALIAS("platform:s3c64xx-spi");
1577 MODULE_DESCRIPTION("S3C64XX SPI Controller Driver");