1 // SPDX-License-Identifier: GPL-2.0 2 // 3 // STMicroelectronics STM32 SPI Controller driver 4 // 5 // Copyright (C) 2017, STMicroelectronics - All Rights Reserved 6 // Author(s): Amelie Delaunay <amelie.delaunay@st.com> for STMicroelectronics. 7 8 #include <linux/bitfield.h> 9 #include <linux/debugfs.h> 10 #include <linux/clk.h> 11 #include <linux/delay.h> 12 #include <linux/dmaengine.h> 13 #include <linux/interrupt.h> 14 #include <linux/iopoll.h> 15 #include <linux/module.h> 16 #include <linux/of_platform.h> 17 #include <linux/pinctrl/consumer.h> 18 #include <linux/pm_runtime.h> 19 #include <linux/reset.h> 20 #include <linux/spi/spi.h> 21 22 #define DRIVER_NAME "spi_stm32" 23 24 /* STM32F4 SPI registers */ 25 #define STM32F4_SPI_CR1 0x00 26 #define STM32F4_SPI_CR2 0x04 27 #define STM32F4_SPI_SR 0x08 28 #define STM32F4_SPI_DR 0x0C 29 #define STM32F4_SPI_I2SCFGR 0x1C 30 31 /* STM32F4_SPI_CR1 bit fields */ 32 #define STM32F4_SPI_CR1_CPHA BIT(0) 33 #define STM32F4_SPI_CR1_CPOL BIT(1) 34 #define STM32F4_SPI_CR1_MSTR BIT(2) 35 #define STM32F4_SPI_CR1_BR_SHIFT 3 36 #define STM32F4_SPI_CR1_BR GENMASK(5, 3) 37 #define STM32F4_SPI_CR1_SPE BIT(6) 38 #define STM32F4_SPI_CR1_LSBFRST BIT(7) 39 #define STM32F4_SPI_CR1_SSI BIT(8) 40 #define STM32F4_SPI_CR1_SSM BIT(9) 41 #define STM32F4_SPI_CR1_RXONLY BIT(10) 42 #define STM32F4_SPI_CR1_DFF BIT(11) 43 #define STM32F4_SPI_CR1_CRCNEXT BIT(12) 44 #define STM32F4_SPI_CR1_CRCEN BIT(13) 45 #define STM32F4_SPI_CR1_BIDIOE BIT(14) 46 #define STM32F4_SPI_CR1_BIDIMODE BIT(15) 47 #define STM32F4_SPI_CR1_BR_MIN 0 48 #define STM32F4_SPI_CR1_BR_MAX (GENMASK(5, 3) >> 3) 49 50 /* STM32F4_SPI_CR2 bit fields */ 51 #define STM32F4_SPI_CR2_RXDMAEN BIT(0) 52 #define STM32F4_SPI_CR2_TXDMAEN BIT(1) 53 #define STM32F4_SPI_CR2_SSOE BIT(2) 54 #define STM32F4_SPI_CR2_FRF BIT(4) 55 #define STM32F4_SPI_CR2_ERRIE BIT(5) 56 #define STM32F4_SPI_CR2_RXNEIE BIT(6) 57 #define STM32F4_SPI_CR2_TXEIE BIT(7) 58 59 /* STM32F4_SPI_SR bit fields */ 60 #define STM32F4_SPI_SR_RXNE BIT(0) 61 #define STM32F4_SPI_SR_TXE BIT(1) 62 #define STM32F4_SPI_SR_CHSIDE BIT(2) 63 #define STM32F4_SPI_SR_UDR BIT(3) 64 #define STM32F4_SPI_SR_CRCERR BIT(4) 65 #define STM32F4_SPI_SR_MODF BIT(5) 66 #define STM32F4_SPI_SR_OVR BIT(6) 67 #define STM32F4_SPI_SR_BSY BIT(7) 68 #define STM32F4_SPI_SR_FRE BIT(8) 69 70 /* STM32F4_SPI_I2SCFGR bit fields */ 71 #define STM32F4_SPI_I2SCFGR_I2SMOD BIT(11) 72 73 /* STM32F4 SPI Baud Rate min/max divisor */ 74 #define STM32F4_SPI_BR_DIV_MIN (2 << STM32F4_SPI_CR1_BR_MIN) 75 #define STM32F4_SPI_BR_DIV_MAX (2 << STM32F4_SPI_CR1_BR_MAX) 76 77 /* STM32H7 SPI registers */ 78 #define STM32H7_SPI_CR1 0x00 79 #define STM32H7_SPI_CR2 0x04 80 #define STM32H7_SPI_CFG1 0x08 81 #define STM32H7_SPI_CFG2 0x0C 82 #define STM32H7_SPI_IER 0x10 83 #define STM32H7_SPI_SR 0x14 84 #define STM32H7_SPI_IFCR 0x18 85 #define STM32H7_SPI_TXDR 0x20 86 #define STM32H7_SPI_RXDR 0x30 87 #define STM32H7_SPI_I2SCFGR 0x50 88 89 /* STM32H7_SPI_CR1 bit fields */ 90 #define STM32H7_SPI_CR1_SPE BIT(0) 91 #define STM32H7_SPI_CR1_MASRX BIT(8) 92 #define STM32H7_SPI_CR1_CSTART BIT(9) 93 #define STM32H7_SPI_CR1_CSUSP BIT(10) 94 #define STM32H7_SPI_CR1_HDDIR BIT(11) 95 #define STM32H7_SPI_CR1_SSI BIT(12) 96 97 /* STM32H7_SPI_CR2 bit fields */ 98 #define STM32H7_SPI_CR2_TSIZE GENMASK(15, 0) 99 #define STM32H7_SPI_TSIZE_MAX GENMASK(15, 0) 100 101 /* STM32H7_SPI_CFG1 bit fields */ 102 #define STM32H7_SPI_CFG1_DSIZE GENMASK(4, 0) 103 #define STM32H7_SPI_CFG1_FTHLV GENMASK(8, 5) 104 #define STM32H7_SPI_CFG1_RXDMAEN BIT(14) 105 #define STM32H7_SPI_CFG1_TXDMAEN BIT(15) 106 #define STM32H7_SPI_CFG1_MBR GENMASK(30, 28) 107 #define STM32H7_SPI_CFG1_MBR_SHIFT 28 108 #define STM32H7_SPI_CFG1_MBR_MIN 0 109 #define STM32H7_SPI_CFG1_MBR_MAX (GENMASK(30, 28) >> 28) 110 111 /* STM32H7_SPI_CFG2 bit fields */ 112 #define STM32H7_SPI_CFG2_MIDI GENMASK(7, 4) 113 #define STM32H7_SPI_CFG2_COMM GENMASK(18, 17) 114 #define STM32H7_SPI_CFG2_SP GENMASK(21, 19) 115 #define STM32H7_SPI_CFG2_MASTER BIT(22) 116 #define STM32H7_SPI_CFG2_LSBFRST BIT(23) 117 #define STM32H7_SPI_CFG2_CPHA BIT(24) 118 #define STM32H7_SPI_CFG2_CPOL BIT(25) 119 #define STM32H7_SPI_CFG2_SSM BIT(26) 120 #define STM32H7_SPI_CFG2_SSIOP BIT(28) 121 #define STM32H7_SPI_CFG2_AFCNTR BIT(31) 122 123 /* STM32H7_SPI_IER bit fields */ 124 #define STM32H7_SPI_IER_RXPIE BIT(0) 125 #define STM32H7_SPI_IER_TXPIE BIT(1) 126 #define STM32H7_SPI_IER_DXPIE BIT(2) 127 #define STM32H7_SPI_IER_EOTIE BIT(3) 128 #define STM32H7_SPI_IER_TXTFIE BIT(4) 129 #define STM32H7_SPI_IER_OVRIE BIT(6) 130 #define STM32H7_SPI_IER_MODFIE BIT(9) 131 #define STM32H7_SPI_IER_ALL GENMASK(10, 0) 132 133 /* STM32H7_SPI_SR bit fields */ 134 #define STM32H7_SPI_SR_RXP BIT(0) 135 #define STM32H7_SPI_SR_TXP BIT(1) 136 #define STM32H7_SPI_SR_EOT BIT(3) 137 #define STM32H7_SPI_SR_OVR BIT(6) 138 #define STM32H7_SPI_SR_MODF BIT(9) 139 #define STM32H7_SPI_SR_SUSP BIT(11) 140 #define STM32H7_SPI_SR_RXPLVL GENMASK(14, 13) 141 #define STM32H7_SPI_SR_RXWNE BIT(15) 142 143 /* STM32H7_SPI_IFCR bit fields */ 144 #define STM32H7_SPI_IFCR_ALL GENMASK(11, 3) 145 146 /* STM32H7_SPI_I2SCFGR bit fields */ 147 #define STM32H7_SPI_I2SCFGR_I2SMOD BIT(0) 148 149 /* STM32H7 SPI Master Baud Rate min/max divisor */ 150 #define STM32H7_SPI_MBR_DIV_MIN (2 << STM32H7_SPI_CFG1_MBR_MIN) 151 #define STM32H7_SPI_MBR_DIV_MAX (2 << STM32H7_SPI_CFG1_MBR_MAX) 152 153 /* STM32H7 SPI Communication mode */ 154 #define STM32H7_SPI_FULL_DUPLEX 0 155 #define STM32H7_SPI_SIMPLEX_TX 1 156 #define STM32H7_SPI_SIMPLEX_RX 2 157 #define STM32H7_SPI_HALF_DUPLEX 3 158 159 /* SPI Communication type */ 160 #define SPI_FULL_DUPLEX 0 161 #define SPI_SIMPLEX_TX 1 162 #define SPI_SIMPLEX_RX 2 163 #define SPI_3WIRE_TX 3 164 #define SPI_3WIRE_RX 4 165 166 #define STM32_SPI_AUTOSUSPEND_DELAY 1 /* 1 ms */ 167 168 /* 169 * use PIO for small transfers, avoiding DMA setup/teardown overhead for drivers 170 * without fifo buffers. 171 */ 172 #define SPI_DMA_MIN_BYTES 16 173 174 /* STM32 SPI driver helpers */ 175 #define STM32_SPI_MASTER_MODE(stm32_spi) (!(stm32_spi)->device_mode) 176 #define STM32_SPI_DEVICE_MODE(stm32_spi) ((stm32_spi)->device_mode) 177 178 /** 179 * struct stm32_spi_reg - stm32 SPI register & bitfield desc 180 * @reg: register offset 181 * @mask: bitfield mask 182 * @shift: left shift 183 */ 184 struct stm32_spi_reg { 185 int reg; 186 int mask; 187 int shift; 188 }; 189 190 /** 191 * struct stm32_spi_regspec - stm32 registers definition, compatible dependent data 192 * @en: enable register and SPI enable bit 193 * @dma_rx_en: SPI DMA RX enable register end SPI DMA RX enable bit 194 * @dma_tx_en: SPI DMA TX enable register end SPI DMA TX enable bit 195 * @cpol: clock polarity register and polarity bit 196 * @cpha: clock phase register and phase bit 197 * @lsb_first: LSB transmitted first register and bit 198 * @cs_high: chips select active value 199 * @br: baud rate register and bitfields 200 * @rx: SPI RX data register 201 * @tx: SPI TX data register 202 */ 203 struct stm32_spi_regspec { 204 const struct stm32_spi_reg en; 205 const struct stm32_spi_reg dma_rx_en; 206 const struct stm32_spi_reg dma_tx_en; 207 const struct stm32_spi_reg cpol; 208 const struct stm32_spi_reg cpha; 209 const struct stm32_spi_reg lsb_first; 210 const struct stm32_spi_reg cs_high; 211 const struct stm32_spi_reg br; 212 const struct stm32_spi_reg rx; 213 const struct stm32_spi_reg tx; 214 }; 215 216 struct stm32_spi; 217 218 /** 219 * struct stm32_spi_cfg - stm32 compatible configuration data 220 * @regs: registers descriptions 221 * @get_fifo_size: routine to get fifo size 222 * @get_bpw_mask: routine to get bits per word mask 223 * @disable: routine to disable controller 224 * @config: routine to configure controller as SPI Master 225 * @set_bpw: routine to configure registers to for bits per word 226 * @set_mode: routine to configure registers to desired mode 227 * @set_data_idleness: optional routine to configure registers to desired idle 228 * time between frames (if driver has this functionality) 229 * @set_number_of_data: optional routine to configure registers to desired 230 * number of data (if driver has this functionality) 231 * @transfer_one_dma_start: routine to start transfer a single spi_transfer 232 * using DMA 233 * @dma_rx_cb: routine to call after DMA RX channel operation is complete 234 * @dma_tx_cb: routine to call after DMA TX channel operation is complete 235 * @transfer_one_irq: routine to configure interrupts for driver 236 * @irq_handler_event: Interrupt handler for SPI controller events 237 * @irq_handler_thread: thread of interrupt handler for SPI controller 238 * @baud_rate_div_min: minimum baud rate divisor 239 * @baud_rate_div_max: maximum baud rate divisor 240 * @has_fifo: boolean to know if fifo is used for driver 241 * @has_device_mode: is this compatible capable to switch on device mode 242 * @flags: compatible specific SPI controller flags used at registration time 243 */ 244 struct stm32_spi_cfg { 245 const struct stm32_spi_regspec *regs; 246 int (*get_fifo_size)(struct stm32_spi *spi); 247 int (*get_bpw_mask)(struct stm32_spi *spi); 248 void (*disable)(struct stm32_spi *spi); 249 int (*config)(struct stm32_spi *spi); 250 void (*set_bpw)(struct stm32_spi *spi); 251 int (*set_mode)(struct stm32_spi *spi, unsigned int comm_type); 252 void (*set_data_idleness)(struct stm32_spi *spi, u32 length); 253 int (*set_number_of_data)(struct stm32_spi *spi, u32 length); 254 void (*transfer_one_dma_start)(struct stm32_spi *spi); 255 void (*dma_rx_cb)(void *data); 256 void (*dma_tx_cb)(void *data); 257 int (*transfer_one_irq)(struct stm32_spi *spi); 258 irqreturn_t (*irq_handler_event)(int irq, void *dev_id); 259 irqreturn_t (*irq_handler_thread)(int irq, void *dev_id); 260 unsigned int baud_rate_div_min; 261 unsigned int baud_rate_div_max; 262 bool has_fifo; 263 bool has_device_mode; 264 u16 flags; 265 }; 266 267 /** 268 * struct stm32_spi - private data of the SPI controller 269 * @dev: driver model representation of the controller 270 * @ctrl: controller interface 271 * @cfg: compatible configuration data 272 * @base: virtual memory area 273 * @clk: hw kernel clock feeding the SPI clock generator 274 * @clk_rate: rate of the hw kernel clock feeding the SPI clock generator 275 * @lock: prevent I/O concurrent access 276 * @irq: SPI controller interrupt line 277 * @fifo_size: size of the embedded fifo in bytes 278 * @cur_midi: master inter-data idleness in ns 279 * @cur_speed: speed configured in Hz 280 * @cur_half_period: time of a half bit in us 281 * @cur_bpw: number of bits in a single SPI data frame 282 * @cur_fthlv: fifo threshold level (data frames in a single data packet) 283 * @cur_comm: SPI communication mode 284 * @cur_xferlen: current transfer length in bytes 285 * @cur_usedma: boolean to know if dma is used in current transfer 286 * @tx_buf: data to be written, or NULL 287 * @rx_buf: data to be read, or NULL 288 * @tx_len: number of data to be written in bytes 289 * @rx_len: number of data to be read in bytes 290 * @dma_tx: dma channel for TX transfer 291 * @dma_rx: dma channel for RX transfer 292 * @phys_addr: SPI registers physical base address 293 * @device_mode: the controller is configured as SPI device 294 */ 295 struct stm32_spi { 296 struct device *dev; 297 struct spi_controller *ctrl; 298 const struct stm32_spi_cfg *cfg; 299 void __iomem *base; 300 struct clk *clk; 301 u32 clk_rate; 302 spinlock_t lock; /* prevent I/O concurrent access */ 303 int irq; 304 unsigned int fifo_size; 305 306 unsigned int cur_midi; 307 unsigned int cur_speed; 308 unsigned int cur_half_period; 309 unsigned int cur_bpw; 310 unsigned int cur_fthlv; 311 unsigned int cur_comm; 312 unsigned int cur_xferlen; 313 bool cur_usedma; 314 315 const void *tx_buf; 316 void *rx_buf; 317 int tx_len; 318 int rx_len; 319 struct dma_chan *dma_tx; 320 struct dma_chan *dma_rx; 321 dma_addr_t phys_addr; 322 323 bool device_mode; 324 }; 325 326 static const struct stm32_spi_regspec stm32f4_spi_regspec = { 327 .en = { STM32F4_SPI_CR1, STM32F4_SPI_CR1_SPE }, 328 329 .dma_rx_en = { STM32F4_SPI_CR2, STM32F4_SPI_CR2_RXDMAEN }, 330 .dma_tx_en = { STM32F4_SPI_CR2, STM32F4_SPI_CR2_TXDMAEN }, 331 332 .cpol = { STM32F4_SPI_CR1, STM32F4_SPI_CR1_CPOL }, 333 .cpha = { STM32F4_SPI_CR1, STM32F4_SPI_CR1_CPHA }, 334 .lsb_first = { STM32F4_SPI_CR1, STM32F4_SPI_CR1_LSBFRST }, 335 .cs_high = {}, 336 .br = { STM32F4_SPI_CR1, STM32F4_SPI_CR1_BR, STM32F4_SPI_CR1_BR_SHIFT }, 337 338 .rx = { STM32F4_SPI_DR }, 339 .tx = { STM32F4_SPI_DR }, 340 }; 341 342 static const struct stm32_spi_regspec stm32h7_spi_regspec = { 343 /* SPI data transfer is enabled but spi_ker_ck is idle. 344 * CFG1 and CFG2 registers are write protected when SPE is enabled. 345 */ 346 .en = { STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE }, 347 348 .dma_rx_en = { STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_RXDMAEN }, 349 .dma_tx_en = { STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_TXDMAEN }, 350 351 .cpol = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_CPOL }, 352 .cpha = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_CPHA }, 353 .lsb_first = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_LSBFRST }, 354 .cs_high = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_SSIOP }, 355 .br = { STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_MBR, 356 STM32H7_SPI_CFG1_MBR_SHIFT }, 357 358 .rx = { STM32H7_SPI_RXDR }, 359 .tx = { STM32H7_SPI_TXDR }, 360 }; 361 362 static inline void stm32_spi_set_bits(struct stm32_spi *spi, 363 u32 offset, u32 bits) 364 { 365 writel_relaxed(readl_relaxed(spi->base + offset) | bits, 366 spi->base + offset); 367 } 368 369 static inline void stm32_spi_clr_bits(struct stm32_spi *spi, 370 u32 offset, u32 bits) 371 { 372 writel_relaxed(readl_relaxed(spi->base + offset) & ~bits, 373 spi->base + offset); 374 } 375 376 /** 377 * stm32h7_spi_get_fifo_size - Return fifo size 378 * @spi: pointer to the spi controller data structure 379 */ 380 static int stm32h7_spi_get_fifo_size(struct stm32_spi *spi) 381 { 382 unsigned long flags; 383 u32 count = 0; 384 385 spin_lock_irqsave(&spi->lock, flags); 386 387 stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE); 388 389 while (readl_relaxed(spi->base + STM32H7_SPI_SR) & STM32H7_SPI_SR_TXP) 390 writeb_relaxed(++count, spi->base + STM32H7_SPI_TXDR); 391 392 stm32_spi_clr_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE); 393 394 spin_unlock_irqrestore(&spi->lock, flags); 395 396 dev_dbg(spi->dev, "%d x 8-bit fifo size\n", count); 397 398 return count; 399 } 400 401 /** 402 * stm32f4_spi_get_bpw_mask - Return bits per word mask 403 * @spi: pointer to the spi controller data structure 404 */ 405 static int stm32f4_spi_get_bpw_mask(struct stm32_spi *spi) 406 { 407 dev_dbg(spi->dev, "8-bit or 16-bit data frame supported\n"); 408 return SPI_BPW_MASK(8) | SPI_BPW_MASK(16); 409 } 410 411 /** 412 * stm32h7_spi_get_bpw_mask - Return bits per word mask 413 * @spi: pointer to the spi controller data structure 414 */ 415 static int stm32h7_spi_get_bpw_mask(struct stm32_spi *spi) 416 { 417 unsigned long flags; 418 u32 cfg1, max_bpw; 419 420 spin_lock_irqsave(&spi->lock, flags); 421 422 /* 423 * The most significant bit at DSIZE bit field is reserved when the 424 * maximum data size of periperal instances is limited to 16-bit 425 */ 426 stm32_spi_set_bits(spi, STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_DSIZE); 427 428 cfg1 = readl_relaxed(spi->base + STM32H7_SPI_CFG1); 429 max_bpw = FIELD_GET(STM32H7_SPI_CFG1_DSIZE, cfg1) + 1; 430 431 spin_unlock_irqrestore(&spi->lock, flags); 432 433 dev_dbg(spi->dev, "%d-bit maximum data frame\n", max_bpw); 434 435 return SPI_BPW_RANGE_MASK(4, max_bpw); 436 } 437 438 /** 439 * stm32_spi_prepare_mbr - Determine baud rate divisor value 440 * @spi: pointer to the spi controller data structure 441 * @speed_hz: requested speed 442 * @min_div: minimum baud rate divisor 443 * @max_div: maximum baud rate divisor 444 * 445 * Return baud rate divisor value in case of success or -EINVAL 446 */ 447 static int stm32_spi_prepare_mbr(struct stm32_spi *spi, u32 speed_hz, 448 u32 min_div, u32 max_div) 449 { 450 u32 div, mbrdiv; 451 452 /* Ensure spi->clk_rate is even */ 453 div = DIV_ROUND_CLOSEST(spi->clk_rate & ~0x1, speed_hz); 454 455 /* 456 * SPI framework set xfer->speed_hz to ctrl->max_speed_hz if 457 * xfer->speed_hz is greater than ctrl->max_speed_hz, and it returns 458 * an error when xfer->speed_hz is lower than ctrl->min_speed_hz, so 459 * no need to check it there. 460 * However, we need to ensure the following calculations. 461 */ 462 if ((div < min_div) || (div > max_div)) 463 return -EINVAL; 464 465 /* Determine the first power of 2 greater than or equal to div */ 466 if (div & (div - 1)) 467 mbrdiv = fls(div); 468 else 469 mbrdiv = fls(div) - 1; 470 471 spi->cur_speed = spi->clk_rate / (1 << mbrdiv); 472 473 spi->cur_half_period = DIV_ROUND_CLOSEST(USEC_PER_SEC, 2 * spi->cur_speed); 474 475 return mbrdiv - 1; 476 } 477 478 /** 479 * stm32h7_spi_prepare_fthlv - Determine FIFO threshold level 480 * @spi: pointer to the spi controller data structure 481 * @xfer_len: length of the message to be transferred 482 */ 483 static u32 stm32h7_spi_prepare_fthlv(struct stm32_spi *spi, u32 xfer_len) 484 { 485 u32 packet, bpw; 486 487 /* data packet should not exceed 1/2 of fifo space */ 488 packet = clamp(xfer_len, 1U, spi->fifo_size / 2); 489 490 /* align packet size with data registers access */ 491 bpw = DIV_ROUND_UP(spi->cur_bpw, 8); 492 return DIV_ROUND_UP(packet, bpw); 493 } 494 495 /** 496 * stm32f4_spi_write_tx - Write bytes to Transmit Data Register 497 * @spi: pointer to the spi controller data structure 498 * 499 * Read from tx_buf depends on remaining bytes to avoid to read beyond 500 * tx_buf end. 501 */ 502 static void stm32f4_spi_write_tx(struct stm32_spi *spi) 503 { 504 if ((spi->tx_len > 0) && (readl_relaxed(spi->base + STM32F4_SPI_SR) & 505 STM32F4_SPI_SR_TXE)) { 506 u32 offs = spi->cur_xferlen - spi->tx_len; 507 508 if (spi->cur_bpw == 16) { 509 const u16 *tx_buf16 = (const u16 *)(spi->tx_buf + offs); 510 511 writew_relaxed(*tx_buf16, spi->base + STM32F4_SPI_DR); 512 spi->tx_len -= sizeof(u16); 513 } else { 514 const u8 *tx_buf8 = (const u8 *)(spi->tx_buf + offs); 515 516 writeb_relaxed(*tx_buf8, spi->base + STM32F4_SPI_DR); 517 spi->tx_len -= sizeof(u8); 518 } 519 } 520 521 dev_dbg(spi->dev, "%s: %d bytes left\n", __func__, spi->tx_len); 522 } 523 524 /** 525 * stm32h7_spi_write_txfifo - Write bytes in Transmit Data Register 526 * @spi: pointer to the spi controller data structure 527 * 528 * Read from tx_buf depends on remaining bytes to avoid to read beyond 529 * tx_buf end. 530 */ 531 static void stm32h7_spi_write_txfifo(struct stm32_spi *spi) 532 { 533 while ((spi->tx_len > 0) && 534 (readl_relaxed(spi->base + STM32H7_SPI_SR) & 535 STM32H7_SPI_SR_TXP)) { 536 u32 offs = spi->cur_xferlen - spi->tx_len; 537 538 if (spi->tx_len >= sizeof(u32)) { 539 const u32 *tx_buf32 = (const u32 *)(spi->tx_buf + offs); 540 541 writel_relaxed(*tx_buf32, spi->base + STM32H7_SPI_TXDR); 542 spi->tx_len -= sizeof(u32); 543 } else if (spi->tx_len >= sizeof(u16)) { 544 const u16 *tx_buf16 = (const u16 *)(spi->tx_buf + offs); 545 546 writew_relaxed(*tx_buf16, spi->base + STM32H7_SPI_TXDR); 547 spi->tx_len -= sizeof(u16); 548 } else { 549 const u8 *tx_buf8 = (const u8 *)(spi->tx_buf + offs); 550 551 writeb_relaxed(*tx_buf8, spi->base + STM32H7_SPI_TXDR); 552 spi->tx_len -= sizeof(u8); 553 } 554 } 555 556 dev_dbg(spi->dev, "%s: %d bytes left\n", __func__, spi->tx_len); 557 } 558 559 /** 560 * stm32f4_spi_read_rx - Read bytes from Receive Data Register 561 * @spi: pointer to the spi controller data structure 562 * 563 * Write in rx_buf depends on remaining bytes to avoid to write beyond 564 * rx_buf end. 565 */ 566 static void stm32f4_spi_read_rx(struct stm32_spi *spi) 567 { 568 if ((spi->rx_len > 0) && (readl_relaxed(spi->base + STM32F4_SPI_SR) & 569 STM32F4_SPI_SR_RXNE)) { 570 u32 offs = spi->cur_xferlen - spi->rx_len; 571 572 if (spi->cur_bpw == 16) { 573 u16 *rx_buf16 = (u16 *)(spi->rx_buf + offs); 574 575 *rx_buf16 = readw_relaxed(spi->base + STM32F4_SPI_DR); 576 spi->rx_len -= sizeof(u16); 577 } else { 578 u8 *rx_buf8 = (u8 *)(spi->rx_buf + offs); 579 580 *rx_buf8 = readb_relaxed(spi->base + STM32F4_SPI_DR); 581 spi->rx_len -= sizeof(u8); 582 } 583 } 584 585 dev_dbg(spi->dev, "%s: %d bytes left\n", __func__, spi->rx_len); 586 } 587 588 /** 589 * stm32h7_spi_read_rxfifo - Read bytes in Receive Data Register 590 * @spi: pointer to the spi controller data structure 591 * 592 * Write in rx_buf depends on remaining bytes to avoid to write beyond 593 * rx_buf end. 594 */ 595 static void stm32h7_spi_read_rxfifo(struct stm32_spi *spi) 596 { 597 u32 sr = readl_relaxed(spi->base + STM32H7_SPI_SR); 598 u32 rxplvl = FIELD_GET(STM32H7_SPI_SR_RXPLVL, sr); 599 600 while ((spi->rx_len > 0) && 601 ((sr & STM32H7_SPI_SR_RXP) || 602 ((sr & STM32H7_SPI_SR_EOT) && 603 ((sr & STM32H7_SPI_SR_RXWNE) || (rxplvl > 0))))) { 604 u32 offs = spi->cur_xferlen - spi->rx_len; 605 606 if ((spi->rx_len >= sizeof(u32)) || 607 (sr & STM32H7_SPI_SR_RXWNE)) { 608 u32 *rx_buf32 = (u32 *)(spi->rx_buf + offs); 609 610 *rx_buf32 = readl_relaxed(spi->base + STM32H7_SPI_RXDR); 611 spi->rx_len -= sizeof(u32); 612 } else if ((spi->rx_len >= sizeof(u16)) || 613 (!(sr & STM32H7_SPI_SR_RXWNE) && 614 (rxplvl >= 2 || spi->cur_bpw > 8))) { 615 u16 *rx_buf16 = (u16 *)(spi->rx_buf + offs); 616 617 *rx_buf16 = readw_relaxed(spi->base + STM32H7_SPI_RXDR); 618 spi->rx_len -= sizeof(u16); 619 } else { 620 u8 *rx_buf8 = (u8 *)(spi->rx_buf + offs); 621 622 *rx_buf8 = readb_relaxed(spi->base + STM32H7_SPI_RXDR); 623 spi->rx_len -= sizeof(u8); 624 } 625 626 sr = readl_relaxed(spi->base + STM32H7_SPI_SR); 627 rxplvl = FIELD_GET(STM32H7_SPI_SR_RXPLVL, sr); 628 } 629 630 dev_dbg(spi->dev, "%s: %d bytes left (sr=%08x)\n", 631 __func__, spi->rx_len, sr); 632 } 633 634 /** 635 * stm32_spi_enable - Enable SPI controller 636 * @spi: pointer to the spi controller data structure 637 */ 638 static void stm32_spi_enable(struct stm32_spi *spi) 639 { 640 dev_dbg(spi->dev, "enable controller\n"); 641 642 stm32_spi_set_bits(spi, spi->cfg->regs->en.reg, 643 spi->cfg->regs->en.mask); 644 } 645 646 /** 647 * stm32f4_spi_disable - Disable SPI controller 648 * @spi: pointer to the spi controller data structure 649 */ 650 static void stm32f4_spi_disable(struct stm32_spi *spi) 651 { 652 unsigned long flags; 653 u32 sr; 654 655 dev_dbg(spi->dev, "disable controller\n"); 656 657 spin_lock_irqsave(&spi->lock, flags); 658 659 if (!(readl_relaxed(spi->base + STM32F4_SPI_CR1) & 660 STM32F4_SPI_CR1_SPE)) { 661 spin_unlock_irqrestore(&spi->lock, flags); 662 return; 663 } 664 665 /* Disable interrupts */ 666 stm32_spi_clr_bits(spi, STM32F4_SPI_CR2, STM32F4_SPI_CR2_TXEIE | 667 STM32F4_SPI_CR2_RXNEIE | 668 STM32F4_SPI_CR2_ERRIE); 669 670 /* Wait until BSY = 0 */ 671 if (readl_relaxed_poll_timeout_atomic(spi->base + STM32F4_SPI_SR, 672 sr, !(sr & STM32F4_SPI_SR_BSY), 673 10, 100000) < 0) { 674 dev_warn(spi->dev, "disabling condition timeout\n"); 675 } 676 677 if (spi->cur_usedma && spi->dma_tx) 678 dmaengine_terminate_async(spi->dma_tx); 679 if (spi->cur_usedma && spi->dma_rx) 680 dmaengine_terminate_async(spi->dma_rx); 681 682 stm32_spi_clr_bits(spi, STM32F4_SPI_CR1, STM32F4_SPI_CR1_SPE); 683 684 stm32_spi_clr_bits(spi, STM32F4_SPI_CR2, STM32F4_SPI_CR2_TXDMAEN | 685 STM32F4_SPI_CR2_RXDMAEN); 686 687 /* Sequence to clear OVR flag */ 688 readl_relaxed(spi->base + STM32F4_SPI_DR); 689 readl_relaxed(spi->base + STM32F4_SPI_SR); 690 691 spin_unlock_irqrestore(&spi->lock, flags); 692 } 693 694 /** 695 * stm32h7_spi_disable - Disable SPI controller 696 * @spi: pointer to the spi controller data structure 697 * 698 * RX-Fifo is flushed when SPI controller is disabled. 699 */ 700 static void stm32h7_spi_disable(struct stm32_spi *spi) 701 { 702 unsigned long flags; 703 u32 cr1; 704 705 dev_dbg(spi->dev, "disable controller\n"); 706 707 spin_lock_irqsave(&spi->lock, flags); 708 709 cr1 = readl_relaxed(spi->base + STM32H7_SPI_CR1); 710 711 if (!(cr1 & STM32H7_SPI_CR1_SPE)) { 712 spin_unlock_irqrestore(&spi->lock, flags); 713 return; 714 } 715 716 /* Add a delay to make sure that transmission is ended. */ 717 if (spi->cur_half_period) 718 udelay(spi->cur_half_period); 719 720 if (spi->cur_usedma && spi->dma_tx) 721 dmaengine_terminate_async(spi->dma_tx); 722 if (spi->cur_usedma && spi->dma_rx) 723 dmaengine_terminate_async(spi->dma_rx); 724 725 stm32_spi_clr_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE); 726 727 stm32_spi_clr_bits(spi, STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_TXDMAEN | 728 STM32H7_SPI_CFG1_RXDMAEN); 729 730 /* Disable interrupts and clear status flags */ 731 writel_relaxed(0, spi->base + STM32H7_SPI_IER); 732 writel_relaxed(STM32H7_SPI_IFCR_ALL, spi->base + STM32H7_SPI_IFCR); 733 734 spin_unlock_irqrestore(&spi->lock, flags); 735 } 736 737 /** 738 * stm32_spi_can_dma - Determine if the transfer is eligible for DMA use 739 * @ctrl: controller interface 740 * @spi_dev: pointer to the spi device 741 * @transfer: pointer to spi transfer 742 * 743 * If driver has fifo and the current transfer size is greater than fifo size, 744 * use DMA. Otherwise use DMA for transfer longer than defined DMA min bytes. 745 */ 746 static bool stm32_spi_can_dma(struct spi_controller *ctrl, 747 struct spi_device *spi_dev, 748 struct spi_transfer *transfer) 749 { 750 unsigned int dma_size; 751 struct stm32_spi *spi = spi_controller_get_devdata(ctrl); 752 753 if (spi->cfg->has_fifo) 754 dma_size = spi->fifo_size; 755 else 756 dma_size = SPI_DMA_MIN_BYTES; 757 758 dev_dbg(spi->dev, "%s: %s\n", __func__, 759 (transfer->len > dma_size) ? "true" : "false"); 760 761 return (transfer->len > dma_size); 762 } 763 764 /** 765 * stm32f4_spi_irq_event - Interrupt handler for SPI controller events 766 * @irq: interrupt line 767 * @dev_id: SPI controller ctrl interface 768 */ 769 static irqreturn_t stm32f4_spi_irq_event(int irq, void *dev_id) 770 { 771 struct spi_controller *ctrl = dev_id; 772 struct stm32_spi *spi = spi_controller_get_devdata(ctrl); 773 u32 sr, mask = 0; 774 bool end = false; 775 776 spin_lock(&spi->lock); 777 778 sr = readl_relaxed(spi->base + STM32F4_SPI_SR); 779 /* 780 * BSY flag is not handled in interrupt but it is normal behavior when 781 * this flag is set. 782 */ 783 sr &= ~STM32F4_SPI_SR_BSY; 784 785 if (!spi->cur_usedma && (spi->cur_comm == SPI_SIMPLEX_TX || 786 spi->cur_comm == SPI_3WIRE_TX)) { 787 /* OVR flag shouldn't be handled for TX only mode */ 788 sr &= ~(STM32F4_SPI_SR_OVR | STM32F4_SPI_SR_RXNE); 789 mask |= STM32F4_SPI_SR_TXE; 790 } 791 792 if (!spi->cur_usedma && (spi->cur_comm == SPI_FULL_DUPLEX || 793 spi->cur_comm == SPI_SIMPLEX_RX || 794 spi->cur_comm == SPI_3WIRE_RX)) { 795 /* TXE flag is set and is handled when RXNE flag occurs */ 796 sr &= ~STM32F4_SPI_SR_TXE; 797 mask |= STM32F4_SPI_SR_RXNE | STM32F4_SPI_SR_OVR; 798 } 799 800 if (!(sr & mask)) { 801 dev_dbg(spi->dev, "spurious IT (sr=0x%08x)\n", sr); 802 spin_unlock(&spi->lock); 803 return IRQ_NONE; 804 } 805 806 if (sr & STM32F4_SPI_SR_OVR) { 807 dev_warn(spi->dev, "Overrun: received value discarded\n"); 808 809 /* Sequence to clear OVR flag */ 810 readl_relaxed(spi->base + STM32F4_SPI_DR); 811 readl_relaxed(spi->base + STM32F4_SPI_SR); 812 813 /* 814 * If overrun is detected, it means that something went wrong, 815 * so stop the current transfer. Transfer can wait for next 816 * RXNE but DR is already read and end never happens. 817 */ 818 end = true; 819 goto end_irq; 820 } 821 822 if (sr & STM32F4_SPI_SR_TXE) { 823 if (spi->tx_buf) 824 stm32f4_spi_write_tx(spi); 825 if (spi->tx_len == 0) 826 end = true; 827 } 828 829 if (sr & STM32F4_SPI_SR_RXNE) { 830 stm32f4_spi_read_rx(spi); 831 if (spi->rx_len == 0) 832 end = true; 833 else if (spi->tx_buf)/* Load data for discontinuous mode */ 834 stm32f4_spi_write_tx(spi); 835 } 836 837 end_irq: 838 if (end) { 839 /* Immediately disable interrupts to do not generate new one */ 840 stm32_spi_clr_bits(spi, STM32F4_SPI_CR2, 841 STM32F4_SPI_CR2_TXEIE | 842 STM32F4_SPI_CR2_RXNEIE | 843 STM32F4_SPI_CR2_ERRIE); 844 spin_unlock(&spi->lock); 845 return IRQ_WAKE_THREAD; 846 } 847 848 spin_unlock(&spi->lock); 849 return IRQ_HANDLED; 850 } 851 852 /** 853 * stm32f4_spi_irq_thread - Thread of interrupt handler for SPI controller 854 * @irq: interrupt line 855 * @dev_id: SPI controller interface 856 */ 857 static irqreturn_t stm32f4_spi_irq_thread(int irq, void *dev_id) 858 { 859 struct spi_controller *ctrl = dev_id; 860 struct stm32_spi *spi = spi_controller_get_devdata(ctrl); 861 862 spi_finalize_current_transfer(ctrl); 863 stm32f4_spi_disable(spi); 864 865 return IRQ_HANDLED; 866 } 867 868 /** 869 * stm32h7_spi_irq_thread - Thread of interrupt handler for SPI controller 870 * @irq: interrupt line 871 * @dev_id: SPI controller interface 872 */ 873 static irqreturn_t stm32h7_spi_irq_thread(int irq, void *dev_id) 874 { 875 struct spi_controller *ctrl = dev_id; 876 struct stm32_spi *spi = spi_controller_get_devdata(ctrl); 877 u32 sr, ier, mask; 878 unsigned long flags; 879 bool end = false; 880 881 spin_lock_irqsave(&spi->lock, flags); 882 883 sr = readl_relaxed(spi->base + STM32H7_SPI_SR); 884 ier = readl_relaxed(spi->base + STM32H7_SPI_IER); 885 886 mask = ier; 887 /* 888 * EOTIE enables irq from EOT, SUSP and TXC events. We need to set 889 * SUSP to acknowledge it later. TXC is automatically cleared 890 */ 891 892 mask |= STM32H7_SPI_SR_SUSP; 893 /* 894 * DXPIE is set in Full-Duplex, one IT will be raised if TXP and RXP 895 * are set. So in case of Full-Duplex, need to poll TXP and RXP event. 896 */ 897 if ((spi->cur_comm == SPI_FULL_DUPLEX) && !spi->cur_usedma) 898 mask |= STM32H7_SPI_SR_TXP | STM32H7_SPI_SR_RXP; 899 900 if (!(sr & mask)) { 901 dev_vdbg(spi->dev, "spurious IT (sr=0x%08x, ier=0x%08x)\n", 902 sr, ier); 903 spin_unlock_irqrestore(&spi->lock, flags); 904 return IRQ_NONE; 905 } 906 907 if (sr & STM32H7_SPI_SR_SUSP) { 908 static DEFINE_RATELIMIT_STATE(rs, 909 DEFAULT_RATELIMIT_INTERVAL * 10, 910 1); 911 ratelimit_set_flags(&rs, RATELIMIT_MSG_ON_RELEASE); 912 if (__ratelimit(&rs)) 913 dev_dbg_ratelimited(spi->dev, "Communication suspended\n"); 914 if (!spi->cur_usedma && (spi->rx_buf && (spi->rx_len > 0))) 915 stm32h7_spi_read_rxfifo(spi); 916 /* 917 * If communication is suspended while using DMA, it means 918 * that something went wrong, so stop the current transfer 919 */ 920 if (spi->cur_usedma) 921 end = true; 922 } 923 924 if (sr & STM32H7_SPI_SR_MODF) { 925 dev_warn(spi->dev, "Mode fault: transfer aborted\n"); 926 end = true; 927 } 928 929 if (sr & STM32H7_SPI_SR_OVR) { 930 dev_err(spi->dev, "Overrun: RX data lost\n"); 931 end = true; 932 } 933 934 if (sr & STM32H7_SPI_SR_EOT) { 935 if (!spi->cur_usedma && (spi->rx_buf && (spi->rx_len > 0))) 936 stm32h7_spi_read_rxfifo(spi); 937 if (!spi->cur_usedma || 938 (spi->cur_comm == SPI_SIMPLEX_TX || spi->cur_comm == SPI_3WIRE_TX)) 939 end = true; 940 } 941 942 if (sr & STM32H7_SPI_SR_TXP) 943 if (!spi->cur_usedma && (spi->tx_buf && (spi->tx_len > 0))) 944 stm32h7_spi_write_txfifo(spi); 945 946 if (sr & STM32H7_SPI_SR_RXP) 947 if (!spi->cur_usedma && (spi->rx_buf && (spi->rx_len > 0))) 948 stm32h7_spi_read_rxfifo(spi); 949 950 writel_relaxed(sr & mask, spi->base + STM32H7_SPI_IFCR); 951 952 spin_unlock_irqrestore(&spi->lock, flags); 953 954 if (end) { 955 stm32h7_spi_disable(spi); 956 spi_finalize_current_transfer(ctrl); 957 } 958 959 return IRQ_HANDLED; 960 } 961 962 /** 963 * stm32_spi_prepare_msg - set up the controller to transfer a single message 964 * @ctrl: controller interface 965 * @msg: pointer to spi message 966 */ 967 static int stm32_spi_prepare_msg(struct spi_controller *ctrl, 968 struct spi_message *msg) 969 { 970 struct stm32_spi *spi = spi_controller_get_devdata(ctrl); 971 struct spi_device *spi_dev = msg->spi; 972 struct device_node *np = spi_dev->dev.of_node; 973 unsigned long flags; 974 u32 clrb = 0, setb = 0; 975 976 /* SPI slave device may need time between data frames */ 977 spi->cur_midi = 0; 978 if (np && !of_property_read_u32(np, "st,spi-midi-ns", &spi->cur_midi)) 979 dev_dbg(spi->dev, "%dns inter-data idleness\n", spi->cur_midi); 980 981 if (spi_dev->mode & SPI_CPOL) 982 setb |= spi->cfg->regs->cpol.mask; 983 else 984 clrb |= spi->cfg->regs->cpol.mask; 985 986 if (spi_dev->mode & SPI_CPHA) 987 setb |= spi->cfg->regs->cpha.mask; 988 else 989 clrb |= spi->cfg->regs->cpha.mask; 990 991 if (spi_dev->mode & SPI_LSB_FIRST) 992 setb |= spi->cfg->regs->lsb_first.mask; 993 else 994 clrb |= spi->cfg->regs->lsb_first.mask; 995 996 if (STM32_SPI_DEVICE_MODE(spi) && spi_dev->mode & SPI_CS_HIGH) 997 setb |= spi->cfg->regs->cs_high.mask; 998 else 999 clrb |= spi->cfg->regs->cs_high.mask; 1000 1001 dev_dbg(spi->dev, "cpol=%d cpha=%d lsb_first=%d cs_high=%d\n", 1002 !!(spi_dev->mode & SPI_CPOL), 1003 !!(spi_dev->mode & SPI_CPHA), 1004 !!(spi_dev->mode & SPI_LSB_FIRST), 1005 !!(spi_dev->mode & SPI_CS_HIGH)); 1006 1007 /* On STM32H7, messages should not exceed a maximum size setted 1008 * afterward via the set_number_of_data function. In order to 1009 * ensure that, split large messages into several messages 1010 */ 1011 if (spi->cfg->set_number_of_data) { 1012 int ret; 1013 1014 ret = spi_split_transfers_maxwords(ctrl, msg, 1015 STM32H7_SPI_TSIZE_MAX, 1016 GFP_KERNEL | GFP_DMA); 1017 if (ret) 1018 return ret; 1019 } 1020 1021 spin_lock_irqsave(&spi->lock, flags); 1022 1023 /* CPOL, CPHA and LSB FIRST bits have common register */ 1024 if (clrb || setb) 1025 writel_relaxed( 1026 (readl_relaxed(spi->base + spi->cfg->regs->cpol.reg) & 1027 ~clrb) | setb, 1028 spi->base + spi->cfg->regs->cpol.reg); 1029 1030 spin_unlock_irqrestore(&spi->lock, flags); 1031 1032 return 0; 1033 } 1034 1035 /** 1036 * stm32f4_spi_dma_tx_cb - dma callback 1037 * @data: pointer to the spi controller data structure 1038 * 1039 * DMA callback is called when the transfer is complete for DMA TX channel. 1040 */ 1041 static void stm32f4_spi_dma_tx_cb(void *data) 1042 { 1043 struct stm32_spi *spi = data; 1044 1045 if (spi->cur_comm == SPI_SIMPLEX_TX || spi->cur_comm == SPI_3WIRE_TX) { 1046 spi_finalize_current_transfer(spi->ctrl); 1047 stm32f4_spi_disable(spi); 1048 } 1049 } 1050 1051 /** 1052 * stm32_spi_dma_rx_cb - dma callback 1053 * @data: pointer to the spi controller data structure 1054 * 1055 * DMA callback is called when the transfer is complete for DMA RX channel. 1056 */ 1057 static void stm32_spi_dma_rx_cb(void *data) 1058 { 1059 struct stm32_spi *spi = data; 1060 1061 spi_finalize_current_transfer(spi->ctrl); 1062 spi->cfg->disable(spi); 1063 } 1064 1065 /** 1066 * stm32_spi_dma_config - configure dma slave channel depending on current 1067 * transfer bits_per_word. 1068 * @spi: pointer to the spi controller data structure 1069 * @dma_conf: pointer to the dma_slave_config structure 1070 * @dir: direction of the dma transfer 1071 */ 1072 static void stm32_spi_dma_config(struct stm32_spi *spi, 1073 struct dma_slave_config *dma_conf, 1074 enum dma_transfer_direction dir) 1075 { 1076 enum dma_slave_buswidth buswidth; 1077 u32 maxburst; 1078 1079 if (spi->cur_bpw <= 8) 1080 buswidth = DMA_SLAVE_BUSWIDTH_1_BYTE; 1081 else if (spi->cur_bpw <= 16) 1082 buswidth = DMA_SLAVE_BUSWIDTH_2_BYTES; 1083 else 1084 buswidth = DMA_SLAVE_BUSWIDTH_4_BYTES; 1085 1086 if (spi->cfg->has_fifo) { 1087 /* Valid for DMA Half or Full Fifo threshold */ 1088 if (spi->cur_fthlv == 2) 1089 maxburst = 1; 1090 else 1091 maxburst = spi->cur_fthlv; 1092 } else { 1093 maxburst = 1; 1094 } 1095 1096 memset(dma_conf, 0, sizeof(struct dma_slave_config)); 1097 dma_conf->direction = dir; 1098 if (dma_conf->direction == DMA_DEV_TO_MEM) { /* RX */ 1099 dma_conf->src_addr = spi->phys_addr + spi->cfg->regs->rx.reg; 1100 dma_conf->src_addr_width = buswidth; 1101 dma_conf->src_maxburst = maxburst; 1102 1103 dev_dbg(spi->dev, "Rx DMA config buswidth=%d, maxburst=%d\n", 1104 buswidth, maxburst); 1105 } else if (dma_conf->direction == DMA_MEM_TO_DEV) { /* TX */ 1106 dma_conf->dst_addr = spi->phys_addr + spi->cfg->regs->tx.reg; 1107 dma_conf->dst_addr_width = buswidth; 1108 dma_conf->dst_maxburst = maxburst; 1109 1110 dev_dbg(spi->dev, "Tx DMA config buswidth=%d, maxburst=%d\n", 1111 buswidth, maxburst); 1112 } 1113 } 1114 1115 /** 1116 * stm32f4_spi_transfer_one_irq - transfer a single spi_transfer using 1117 * interrupts 1118 * @spi: pointer to the spi controller data structure 1119 * 1120 * It must returns 0 if the transfer is finished or 1 if the transfer is still 1121 * in progress. 1122 */ 1123 static int stm32f4_spi_transfer_one_irq(struct stm32_spi *spi) 1124 { 1125 unsigned long flags; 1126 u32 cr2 = 0; 1127 1128 /* Enable the interrupts relative to the current communication mode */ 1129 if (spi->cur_comm == SPI_SIMPLEX_TX || spi->cur_comm == SPI_3WIRE_TX) { 1130 cr2 |= STM32F4_SPI_CR2_TXEIE; 1131 } else if (spi->cur_comm == SPI_FULL_DUPLEX || 1132 spi->cur_comm == SPI_SIMPLEX_RX || 1133 spi->cur_comm == SPI_3WIRE_RX) { 1134 /* In transmit-only mode, the OVR flag is set in the SR register 1135 * since the received data are never read. Therefore set OVR 1136 * interrupt only when rx buffer is available. 1137 */ 1138 cr2 |= STM32F4_SPI_CR2_RXNEIE | STM32F4_SPI_CR2_ERRIE; 1139 } else { 1140 return -EINVAL; 1141 } 1142 1143 spin_lock_irqsave(&spi->lock, flags); 1144 1145 stm32_spi_set_bits(spi, STM32F4_SPI_CR2, cr2); 1146 1147 stm32_spi_enable(spi); 1148 1149 /* starting data transfer when buffer is loaded */ 1150 if (spi->tx_buf) 1151 stm32f4_spi_write_tx(spi); 1152 1153 spin_unlock_irqrestore(&spi->lock, flags); 1154 1155 return 1; 1156 } 1157 1158 /** 1159 * stm32h7_spi_transfer_one_irq - transfer a single spi_transfer using 1160 * interrupts 1161 * @spi: pointer to the spi controller data structure 1162 * 1163 * It must returns 0 if the transfer is finished or 1 if the transfer is still 1164 * in progress. 1165 */ 1166 static int stm32h7_spi_transfer_one_irq(struct stm32_spi *spi) 1167 { 1168 unsigned long flags; 1169 u32 ier = 0; 1170 1171 /* Enable the interrupts relative to the current communication mode */ 1172 if (spi->tx_buf && spi->rx_buf) /* Full Duplex */ 1173 ier |= STM32H7_SPI_IER_DXPIE; 1174 else if (spi->tx_buf) /* Half-Duplex TX dir or Simplex TX */ 1175 ier |= STM32H7_SPI_IER_TXPIE; 1176 else if (spi->rx_buf) /* Half-Duplex RX dir or Simplex RX */ 1177 ier |= STM32H7_SPI_IER_RXPIE; 1178 1179 /* Enable the interrupts relative to the end of transfer */ 1180 ier |= STM32H7_SPI_IER_EOTIE | STM32H7_SPI_IER_TXTFIE | 1181 STM32H7_SPI_IER_OVRIE | STM32H7_SPI_IER_MODFIE; 1182 1183 spin_lock_irqsave(&spi->lock, flags); 1184 1185 stm32_spi_enable(spi); 1186 1187 /* Be sure to have data in fifo before starting data transfer */ 1188 if (spi->tx_buf) 1189 stm32h7_spi_write_txfifo(spi); 1190 1191 if (STM32_SPI_MASTER_MODE(spi)) 1192 stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_CSTART); 1193 1194 writel_relaxed(ier, spi->base + STM32H7_SPI_IER); 1195 1196 spin_unlock_irqrestore(&spi->lock, flags); 1197 1198 return 1; 1199 } 1200 1201 /** 1202 * stm32f4_spi_transfer_one_dma_start - Set SPI driver registers to start 1203 * transfer using DMA 1204 * @spi: pointer to the spi controller data structure 1205 */ 1206 static void stm32f4_spi_transfer_one_dma_start(struct stm32_spi *spi) 1207 { 1208 /* In DMA mode end of transfer is handled by DMA TX or RX callback. */ 1209 if (spi->cur_comm == SPI_SIMPLEX_RX || spi->cur_comm == SPI_3WIRE_RX || 1210 spi->cur_comm == SPI_FULL_DUPLEX) { 1211 /* 1212 * In transmit-only mode, the OVR flag is set in the SR register 1213 * since the received data are never read. Therefore set OVR 1214 * interrupt only when rx buffer is available. 1215 */ 1216 stm32_spi_set_bits(spi, STM32F4_SPI_CR2, STM32F4_SPI_CR2_ERRIE); 1217 } 1218 1219 stm32_spi_enable(spi); 1220 } 1221 1222 /** 1223 * stm32h7_spi_transfer_one_dma_start - Set SPI driver registers to start 1224 * transfer using DMA 1225 * @spi: pointer to the spi controller data structure 1226 */ 1227 static void stm32h7_spi_transfer_one_dma_start(struct stm32_spi *spi) 1228 { 1229 uint32_t ier = STM32H7_SPI_IER_OVRIE | STM32H7_SPI_IER_MODFIE; 1230 1231 /* Enable the interrupts */ 1232 if (spi->cur_comm == SPI_SIMPLEX_TX || spi->cur_comm == SPI_3WIRE_TX) 1233 ier |= STM32H7_SPI_IER_EOTIE | STM32H7_SPI_IER_TXTFIE; 1234 1235 stm32_spi_set_bits(spi, STM32H7_SPI_IER, ier); 1236 1237 stm32_spi_enable(spi); 1238 1239 if (STM32_SPI_MASTER_MODE(spi)) 1240 stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_CSTART); 1241 } 1242 1243 /** 1244 * stm32_spi_transfer_one_dma - transfer a single spi_transfer using DMA 1245 * @spi: pointer to the spi controller data structure 1246 * @xfer: pointer to the spi_transfer structure 1247 * 1248 * It must returns 0 if the transfer is finished or 1 if the transfer is still 1249 * in progress. 1250 */ 1251 static int stm32_spi_transfer_one_dma(struct stm32_spi *spi, 1252 struct spi_transfer *xfer) 1253 { 1254 struct dma_slave_config tx_dma_conf, rx_dma_conf; 1255 struct dma_async_tx_descriptor *tx_dma_desc, *rx_dma_desc; 1256 unsigned long flags; 1257 1258 spin_lock_irqsave(&spi->lock, flags); 1259 1260 rx_dma_desc = NULL; 1261 if (spi->rx_buf && spi->dma_rx) { 1262 stm32_spi_dma_config(spi, &rx_dma_conf, DMA_DEV_TO_MEM); 1263 dmaengine_slave_config(spi->dma_rx, &rx_dma_conf); 1264 1265 /* Enable Rx DMA request */ 1266 stm32_spi_set_bits(spi, spi->cfg->regs->dma_rx_en.reg, 1267 spi->cfg->regs->dma_rx_en.mask); 1268 1269 rx_dma_desc = dmaengine_prep_slave_sg( 1270 spi->dma_rx, xfer->rx_sg.sgl, 1271 xfer->rx_sg.nents, 1272 rx_dma_conf.direction, 1273 DMA_PREP_INTERRUPT); 1274 } 1275 1276 tx_dma_desc = NULL; 1277 if (spi->tx_buf && spi->dma_tx) { 1278 stm32_spi_dma_config(spi, &tx_dma_conf, DMA_MEM_TO_DEV); 1279 dmaengine_slave_config(spi->dma_tx, &tx_dma_conf); 1280 1281 tx_dma_desc = dmaengine_prep_slave_sg( 1282 spi->dma_tx, xfer->tx_sg.sgl, 1283 xfer->tx_sg.nents, 1284 tx_dma_conf.direction, 1285 DMA_PREP_INTERRUPT); 1286 } 1287 1288 if ((spi->tx_buf && spi->dma_tx && !tx_dma_desc) || 1289 (spi->rx_buf && spi->dma_rx && !rx_dma_desc)) 1290 goto dma_desc_error; 1291 1292 if (spi->cur_comm == SPI_FULL_DUPLEX && (!tx_dma_desc || !rx_dma_desc)) 1293 goto dma_desc_error; 1294 1295 if (rx_dma_desc) { 1296 rx_dma_desc->callback = spi->cfg->dma_rx_cb; 1297 rx_dma_desc->callback_param = spi; 1298 1299 if (dma_submit_error(dmaengine_submit(rx_dma_desc))) { 1300 dev_err(spi->dev, "Rx DMA submit failed\n"); 1301 goto dma_desc_error; 1302 } 1303 /* Enable Rx DMA channel */ 1304 dma_async_issue_pending(spi->dma_rx); 1305 } 1306 1307 if (tx_dma_desc) { 1308 if (spi->cur_comm == SPI_SIMPLEX_TX || 1309 spi->cur_comm == SPI_3WIRE_TX) { 1310 tx_dma_desc->callback = spi->cfg->dma_tx_cb; 1311 tx_dma_desc->callback_param = spi; 1312 } 1313 1314 if (dma_submit_error(dmaengine_submit(tx_dma_desc))) { 1315 dev_err(spi->dev, "Tx DMA submit failed\n"); 1316 goto dma_submit_error; 1317 } 1318 /* Enable Tx DMA channel */ 1319 dma_async_issue_pending(spi->dma_tx); 1320 1321 /* Enable Tx DMA request */ 1322 stm32_spi_set_bits(spi, spi->cfg->regs->dma_tx_en.reg, 1323 spi->cfg->regs->dma_tx_en.mask); 1324 } 1325 1326 spi->cfg->transfer_one_dma_start(spi); 1327 1328 spin_unlock_irqrestore(&spi->lock, flags); 1329 1330 return 1; 1331 1332 dma_submit_error: 1333 if (spi->dma_rx) 1334 dmaengine_terminate_sync(spi->dma_rx); 1335 1336 dma_desc_error: 1337 stm32_spi_clr_bits(spi, spi->cfg->regs->dma_rx_en.reg, 1338 spi->cfg->regs->dma_rx_en.mask); 1339 1340 spin_unlock_irqrestore(&spi->lock, flags); 1341 1342 dev_info(spi->dev, "DMA issue: fall back to irq transfer\n"); 1343 1344 spi->cur_usedma = false; 1345 return spi->cfg->transfer_one_irq(spi); 1346 } 1347 1348 /** 1349 * stm32f4_spi_set_bpw - Configure bits per word 1350 * @spi: pointer to the spi controller data structure 1351 */ 1352 static void stm32f4_spi_set_bpw(struct stm32_spi *spi) 1353 { 1354 if (spi->cur_bpw == 16) 1355 stm32_spi_set_bits(spi, STM32F4_SPI_CR1, STM32F4_SPI_CR1_DFF); 1356 else 1357 stm32_spi_clr_bits(spi, STM32F4_SPI_CR1, STM32F4_SPI_CR1_DFF); 1358 } 1359 1360 /** 1361 * stm32h7_spi_set_bpw - configure bits per word 1362 * @spi: pointer to the spi controller data structure 1363 */ 1364 static void stm32h7_spi_set_bpw(struct stm32_spi *spi) 1365 { 1366 u32 bpw, fthlv; 1367 u32 cfg1_clrb = 0, cfg1_setb = 0; 1368 1369 bpw = spi->cur_bpw - 1; 1370 1371 cfg1_clrb |= STM32H7_SPI_CFG1_DSIZE; 1372 cfg1_setb |= FIELD_PREP(STM32H7_SPI_CFG1_DSIZE, bpw); 1373 1374 spi->cur_fthlv = stm32h7_spi_prepare_fthlv(spi, spi->cur_xferlen); 1375 fthlv = spi->cur_fthlv - 1; 1376 1377 cfg1_clrb |= STM32H7_SPI_CFG1_FTHLV; 1378 cfg1_setb |= FIELD_PREP(STM32H7_SPI_CFG1_FTHLV, fthlv); 1379 1380 writel_relaxed( 1381 (readl_relaxed(spi->base + STM32H7_SPI_CFG1) & 1382 ~cfg1_clrb) | cfg1_setb, 1383 spi->base + STM32H7_SPI_CFG1); 1384 } 1385 1386 /** 1387 * stm32_spi_set_mbr - Configure baud rate divisor in master mode 1388 * @spi: pointer to the spi controller data structure 1389 * @mbrdiv: baud rate divisor value 1390 */ 1391 static void stm32_spi_set_mbr(struct stm32_spi *spi, u32 mbrdiv) 1392 { 1393 u32 clrb = 0, setb = 0; 1394 1395 clrb |= spi->cfg->regs->br.mask; 1396 setb |= (mbrdiv << spi->cfg->regs->br.shift) & spi->cfg->regs->br.mask; 1397 1398 writel_relaxed((readl_relaxed(spi->base + spi->cfg->regs->br.reg) & 1399 ~clrb) | setb, 1400 spi->base + spi->cfg->regs->br.reg); 1401 } 1402 1403 /** 1404 * stm32_spi_communication_type - return transfer communication type 1405 * @spi_dev: pointer to the spi device 1406 * @transfer: pointer to spi transfer 1407 */ 1408 static unsigned int stm32_spi_communication_type(struct spi_device *spi_dev, 1409 struct spi_transfer *transfer) 1410 { 1411 unsigned int type = SPI_FULL_DUPLEX; 1412 1413 if (spi_dev->mode & SPI_3WIRE) { /* MISO/MOSI signals shared */ 1414 /* 1415 * SPI_3WIRE and xfer->tx_buf != NULL and xfer->rx_buf != NULL 1416 * is forbidden and unvalidated by SPI subsystem so depending 1417 * on the valid buffer, we can determine the direction of the 1418 * transfer. 1419 */ 1420 if (!transfer->tx_buf) 1421 type = SPI_3WIRE_RX; 1422 else 1423 type = SPI_3WIRE_TX; 1424 } else { 1425 if (!transfer->tx_buf) 1426 type = SPI_SIMPLEX_RX; 1427 else if (!transfer->rx_buf) 1428 type = SPI_SIMPLEX_TX; 1429 } 1430 1431 return type; 1432 } 1433 1434 /** 1435 * stm32f4_spi_set_mode - configure communication mode 1436 * @spi: pointer to the spi controller data structure 1437 * @comm_type: type of communication to configure 1438 */ 1439 static int stm32f4_spi_set_mode(struct stm32_spi *spi, unsigned int comm_type) 1440 { 1441 if (comm_type == SPI_3WIRE_TX || comm_type == SPI_SIMPLEX_TX) { 1442 stm32_spi_set_bits(spi, STM32F4_SPI_CR1, 1443 STM32F4_SPI_CR1_BIDIMODE | 1444 STM32F4_SPI_CR1_BIDIOE); 1445 } else if (comm_type == SPI_FULL_DUPLEX || 1446 comm_type == SPI_SIMPLEX_RX) { 1447 stm32_spi_clr_bits(spi, STM32F4_SPI_CR1, 1448 STM32F4_SPI_CR1_BIDIMODE | 1449 STM32F4_SPI_CR1_BIDIOE); 1450 } else if (comm_type == SPI_3WIRE_RX) { 1451 stm32_spi_set_bits(spi, STM32F4_SPI_CR1, 1452 STM32F4_SPI_CR1_BIDIMODE); 1453 stm32_spi_clr_bits(spi, STM32F4_SPI_CR1, 1454 STM32F4_SPI_CR1_BIDIOE); 1455 } else { 1456 return -EINVAL; 1457 } 1458 1459 return 0; 1460 } 1461 1462 /** 1463 * stm32h7_spi_set_mode - configure communication mode 1464 * @spi: pointer to the spi controller data structure 1465 * @comm_type: type of communication to configure 1466 */ 1467 static int stm32h7_spi_set_mode(struct stm32_spi *spi, unsigned int comm_type) 1468 { 1469 u32 mode; 1470 u32 cfg2_clrb = 0, cfg2_setb = 0; 1471 1472 if (comm_type == SPI_3WIRE_RX) { 1473 mode = STM32H7_SPI_HALF_DUPLEX; 1474 stm32_spi_clr_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_HDDIR); 1475 } else if (comm_type == SPI_3WIRE_TX) { 1476 mode = STM32H7_SPI_HALF_DUPLEX; 1477 stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_HDDIR); 1478 } else if (comm_type == SPI_SIMPLEX_RX) { 1479 mode = STM32H7_SPI_SIMPLEX_RX; 1480 } else if (comm_type == SPI_SIMPLEX_TX) { 1481 mode = STM32H7_SPI_SIMPLEX_TX; 1482 } else { 1483 mode = STM32H7_SPI_FULL_DUPLEX; 1484 } 1485 1486 cfg2_clrb |= STM32H7_SPI_CFG2_COMM; 1487 cfg2_setb |= FIELD_PREP(STM32H7_SPI_CFG2_COMM, mode); 1488 1489 writel_relaxed( 1490 (readl_relaxed(spi->base + STM32H7_SPI_CFG2) & 1491 ~cfg2_clrb) | cfg2_setb, 1492 spi->base + STM32H7_SPI_CFG2); 1493 1494 return 0; 1495 } 1496 1497 /** 1498 * stm32h7_spi_data_idleness - configure minimum time delay inserted between two 1499 * consecutive data frames in master mode 1500 * @spi: pointer to the spi controller data structure 1501 * @len: transfer len 1502 */ 1503 static void stm32h7_spi_data_idleness(struct stm32_spi *spi, u32 len) 1504 { 1505 u32 cfg2_clrb = 0, cfg2_setb = 0; 1506 1507 cfg2_clrb |= STM32H7_SPI_CFG2_MIDI; 1508 if ((len > 1) && (spi->cur_midi > 0)) { 1509 u32 sck_period_ns = DIV_ROUND_UP(NSEC_PER_SEC, spi->cur_speed); 1510 u32 midi = min_t(u32, 1511 DIV_ROUND_UP(spi->cur_midi, sck_period_ns), 1512 FIELD_GET(STM32H7_SPI_CFG2_MIDI, 1513 STM32H7_SPI_CFG2_MIDI)); 1514 1515 1516 dev_dbg(spi->dev, "period=%dns, midi=%d(=%dns)\n", 1517 sck_period_ns, midi, midi * sck_period_ns); 1518 cfg2_setb |= FIELD_PREP(STM32H7_SPI_CFG2_MIDI, midi); 1519 } 1520 1521 writel_relaxed((readl_relaxed(spi->base + STM32H7_SPI_CFG2) & 1522 ~cfg2_clrb) | cfg2_setb, 1523 spi->base + STM32H7_SPI_CFG2); 1524 } 1525 1526 /** 1527 * stm32h7_spi_number_of_data - configure number of data at current transfer 1528 * @spi: pointer to the spi controller data structure 1529 * @nb_words: transfer length (in words) 1530 */ 1531 static int stm32h7_spi_number_of_data(struct stm32_spi *spi, u32 nb_words) 1532 { 1533 if (nb_words <= STM32H7_SPI_TSIZE_MAX) { 1534 writel_relaxed(FIELD_PREP(STM32H7_SPI_CR2_TSIZE, nb_words), 1535 spi->base + STM32H7_SPI_CR2); 1536 } else { 1537 return -EMSGSIZE; 1538 } 1539 1540 return 0; 1541 } 1542 1543 /** 1544 * stm32_spi_transfer_one_setup - common setup to transfer a single 1545 * spi_transfer either using DMA or 1546 * interrupts. 1547 * @spi: pointer to the spi controller data structure 1548 * @spi_dev: pointer to the spi device 1549 * @transfer: pointer to spi transfer 1550 */ 1551 static int stm32_spi_transfer_one_setup(struct stm32_spi *spi, 1552 struct spi_device *spi_dev, 1553 struct spi_transfer *transfer) 1554 { 1555 unsigned long flags; 1556 unsigned int comm_type; 1557 int nb_words, ret = 0; 1558 int mbr; 1559 1560 spin_lock_irqsave(&spi->lock, flags); 1561 1562 spi->cur_xferlen = transfer->len; 1563 1564 spi->cur_bpw = transfer->bits_per_word; 1565 spi->cfg->set_bpw(spi); 1566 1567 /* Update spi->cur_speed with real clock speed */ 1568 if (STM32_SPI_MASTER_MODE(spi)) { 1569 mbr = stm32_spi_prepare_mbr(spi, transfer->speed_hz, 1570 spi->cfg->baud_rate_div_min, 1571 spi->cfg->baud_rate_div_max); 1572 if (mbr < 0) { 1573 ret = mbr; 1574 goto out; 1575 } 1576 1577 transfer->speed_hz = spi->cur_speed; 1578 stm32_spi_set_mbr(spi, mbr); 1579 } 1580 1581 comm_type = stm32_spi_communication_type(spi_dev, transfer); 1582 ret = spi->cfg->set_mode(spi, comm_type); 1583 if (ret < 0) 1584 goto out; 1585 1586 spi->cur_comm = comm_type; 1587 1588 if (STM32_SPI_MASTER_MODE(spi) && spi->cfg->set_data_idleness) 1589 spi->cfg->set_data_idleness(spi, transfer->len); 1590 1591 if (spi->cur_bpw <= 8) 1592 nb_words = transfer->len; 1593 else if (spi->cur_bpw <= 16) 1594 nb_words = DIV_ROUND_UP(transfer->len * 8, 16); 1595 else 1596 nb_words = DIV_ROUND_UP(transfer->len * 8, 32); 1597 1598 if (spi->cfg->set_number_of_data) { 1599 ret = spi->cfg->set_number_of_data(spi, nb_words); 1600 if (ret < 0) 1601 goto out; 1602 } 1603 1604 dev_dbg(spi->dev, "transfer communication mode set to %d\n", 1605 spi->cur_comm); 1606 dev_dbg(spi->dev, 1607 "data frame of %d-bit, data packet of %d data frames\n", 1608 spi->cur_bpw, spi->cur_fthlv); 1609 if (STM32_SPI_MASTER_MODE(spi)) 1610 dev_dbg(spi->dev, "speed set to %dHz\n", spi->cur_speed); 1611 dev_dbg(spi->dev, "transfer of %d bytes (%d data frames)\n", 1612 spi->cur_xferlen, nb_words); 1613 dev_dbg(spi->dev, "dma %s\n", 1614 (spi->cur_usedma) ? "enabled" : "disabled"); 1615 1616 out: 1617 spin_unlock_irqrestore(&spi->lock, flags); 1618 1619 return ret; 1620 } 1621 1622 /** 1623 * stm32_spi_transfer_one - transfer a single spi_transfer 1624 * @ctrl: controller interface 1625 * @spi_dev: pointer to the spi device 1626 * @transfer: pointer to spi transfer 1627 * 1628 * It must return 0 if the transfer is finished or 1 if the transfer is still 1629 * in progress. 1630 */ 1631 static int stm32_spi_transfer_one(struct spi_controller *ctrl, 1632 struct spi_device *spi_dev, 1633 struct spi_transfer *transfer) 1634 { 1635 struct stm32_spi *spi = spi_controller_get_devdata(ctrl); 1636 int ret; 1637 1638 spi->tx_buf = transfer->tx_buf; 1639 spi->rx_buf = transfer->rx_buf; 1640 spi->tx_len = spi->tx_buf ? transfer->len : 0; 1641 spi->rx_len = spi->rx_buf ? transfer->len : 0; 1642 1643 spi->cur_usedma = (ctrl->can_dma && 1644 ctrl->can_dma(ctrl, spi_dev, transfer)); 1645 1646 ret = stm32_spi_transfer_one_setup(spi, spi_dev, transfer); 1647 if (ret) { 1648 dev_err(spi->dev, "SPI transfer setup failed\n"); 1649 return ret; 1650 } 1651 1652 if (spi->cur_usedma) 1653 return stm32_spi_transfer_one_dma(spi, transfer); 1654 else 1655 return spi->cfg->transfer_one_irq(spi); 1656 } 1657 1658 /** 1659 * stm32_spi_unprepare_msg - relax the hardware 1660 * @ctrl: controller interface 1661 * @msg: pointer to the spi message 1662 */ 1663 static int stm32_spi_unprepare_msg(struct spi_controller *ctrl, 1664 struct spi_message *msg) 1665 { 1666 struct stm32_spi *spi = spi_controller_get_devdata(ctrl); 1667 1668 spi->cfg->disable(spi); 1669 1670 return 0; 1671 } 1672 1673 /** 1674 * stm32f4_spi_config - Configure SPI controller as SPI master 1675 * @spi: pointer to the spi controller data structure 1676 */ 1677 static int stm32f4_spi_config(struct stm32_spi *spi) 1678 { 1679 unsigned long flags; 1680 1681 spin_lock_irqsave(&spi->lock, flags); 1682 1683 /* Ensure I2SMOD bit is kept cleared */ 1684 stm32_spi_clr_bits(spi, STM32F4_SPI_I2SCFGR, 1685 STM32F4_SPI_I2SCFGR_I2SMOD); 1686 1687 /* 1688 * - SS input value high 1689 * - transmitter half duplex direction 1690 * - Set the master mode (default Motorola mode) 1691 * - Consider 1 master/n slaves configuration and 1692 * SS input value is determined by the SSI bit 1693 */ 1694 stm32_spi_set_bits(spi, STM32F4_SPI_CR1, STM32F4_SPI_CR1_SSI | 1695 STM32F4_SPI_CR1_BIDIOE | 1696 STM32F4_SPI_CR1_MSTR | 1697 STM32F4_SPI_CR1_SSM); 1698 1699 spin_unlock_irqrestore(&spi->lock, flags); 1700 1701 return 0; 1702 } 1703 1704 /** 1705 * stm32h7_spi_config - Configure SPI controller 1706 * @spi: pointer to the spi controller data structure 1707 */ 1708 static int stm32h7_spi_config(struct stm32_spi *spi) 1709 { 1710 unsigned long flags; 1711 u32 cr1 = 0, cfg2 = 0; 1712 1713 spin_lock_irqsave(&spi->lock, flags); 1714 1715 /* Ensure I2SMOD bit is kept cleared */ 1716 stm32_spi_clr_bits(spi, STM32H7_SPI_I2SCFGR, 1717 STM32H7_SPI_I2SCFGR_I2SMOD); 1718 1719 if (STM32_SPI_DEVICE_MODE(spi)) { 1720 /* Use native device select */ 1721 cfg2 &= ~STM32H7_SPI_CFG2_SSM; 1722 } else { 1723 /* 1724 * - Transmitter half duplex direction 1725 * - Automatic communication suspend when RX-Fifo is full 1726 * - SS input value high 1727 */ 1728 cr1 |= STM32H7_SPI_CR1_HDDIR | STM32H7_SPI_CR1_MASRX | STM32H7_SPI_CR1_SSI; 1729 1730 /* 1731 * - Set the master mode (default Motorola mode) 1732 * - Consider 1 master/n devices configuration and 1733 * SS input value is determined by the SSI bit 1734 * - keep control of all associated GPIOs 1735 */ 1736 cfg2 |= STM32H7_SPI_CFG2_MASTER | STM32H7_SPI_CFG2_SSM | STM32H7_SPI_CFG2_AFCNTR; 1737 } 1738 1739 stm32_spi_set_bits(spi, STM32H7_SPI_CR1, cr1); 1740 stm32_spi_set_bits(spi, STM32H7_SPI_CFG2, cfg2); 1741 1742 spin_unlock_irqrestore(&spi->lock, flags); 1743 1744 return 0; 1745 } 1746 1747 static const struct stm32_spi_cfg stm32f4_spi_cfg = { 1748 .regs = &stm32f4_spi_regspec, 1749 .get_bpw_mask = stm32f4_spi_get_bpw_mask, 1750 .disable = stm32f4_spi_disable, 1751 .config = stm32f4_spi_config, 1752 .set_bpw = stm32f4_spi_set_bpw, 1753 .set_mode = stm32f4_spi_set_mode, 1754 .transfer_one_dma_start = stm32f4_spi_transfer_one_dma_start, 1755 .dma_tx_cb = stm32f4_spi_dma_tx_cb, 1756 .dma_rx_cb = stm32_spi_dma_rx_cb, 1757 .transfer_one_irq = stm32f4_spi_transfer_one_irq, 1758 .irq_handler_event = stm32f4_spi_irq_event, 1759 .irq_handler_thread = stm32f4_spi_irq_thread, 1760 .baud_rate_div_min = STM32F4_SPI_BR_DIV_MIN, 1761 .baud_rate_div_max = STM32F4_SPI_BR_DIV_MAX, 1762 .has_fifo = false, 1763 .has_device_mode = false, 1764 .flags = SPI_CONTROLLER_MUST_TX, 1765 }; 1766 1767 static const struct stm32_spi_cfg stm32h7_spi_cfg = { 1768 .regs = &stm32h7_spi_regspec, 1769 .get_fifo_size = stm32h7_spi_get_fifo_size, 1770 .get_bpw_mask = stm32h7_spi_get_bpw_mask, 1771 .disable = stm32h7_spi_disable, 1772 .config = stm32h7_spi_config, 1773 .set_bpw = stm32h7_spi_set_bpw, 1774 .set_mode = stm32h7_spi_set_mode, 1775 .set_data_idleness = stm32h7_spi_data_idleness, 1776 .set_number_of_data = stm32h7_spi_number_of_data, 1777 .transfer_one_dma_start = stm32h7_spi_transfer_one_dma_start, 1778 .dma_rx_cb = stm32_spi_dma_rx_cb, 1779 /* 1780 * dma_tx_cb is not necessary since in case of TX, dma is followed by 1781 * SPI access hence handling is performed within the SPI interrupt 1782 */ 1783 .transfer_one_irq = stm32h7_spi_transfer_one_irq, 1784 .irq_handler_thread = stm32h7_spi_irq_thread, 1785 .baud_rate_div_min = STM32H7_SPI_MBR_DIV_MIN, 1786 .baud_rate_div_max = STM32H7_SPI_MBR_DIV_MAX, 1787 .has_fifo = true, 1788 .has_device_mode = true, 1789 }; 1790 1791 static const struct of_device_id stm32_spi_of_match[] = { 1792 { .compatible = "st,stm32h7-spi", .data = (void *)&stm32h7_spi_cfg }, 1793 { .compatible = "st,stm32f4-spi", .data = (void *)&stm32f4_spi_cfg }, 1794 {}, 1795 }; 1796 MODULE_DEVICE_TABLE(of, stm32_spi_of_match); 1797 1798 static int stm32h7_spi_device_abort(struct spi_controller *ctrl) 1799 { 1800 spi_finalize_current_transfer(ctrl); 1801 return 0; 1802 } 1803 1804 static int stm32_spi_probe(struct platform_device *pdev) 1805 { 1806 struct spi_controller *ctrl; 1807 struct stm32_spi *spi; 1808 struct resource *res; 1809 struct reset_control *rst; 1810 struct device_node *np = pdev->dev.of_node; 1811 bool device_mode; 1812 int ret; 1813 const struct stm32_spi_cfg *cfg = of_device_get_match_data(&pdev->dev); 1814 1815 device_mode = of_property_read_bool(np, "spi-slave"); 1816 if (!cfg->has_device_mode && device_mode) { 1817 dev_err(&pdev->dev, "spi-slave not supported\n"); 1818 return -EPERM; 1819 } 1820 1821 if (device_mode) 1822 ctrl = devm_spi_alloc_slave(&pdev->dev, sizeof(struct stm32_spi)); 1823 else 1824 ctrl = devm_spi_alloc_master(&pdev->dev, sizeof(struct stm32_spi)); 1825 if (!ctrl) { 1826 dev_err(&pdev->dev, "spi controller allocation failed\n"); 1827 return -ENOMEM; 1828 } 1829 platform_set_drvdata(pdev, ctrl); 1830 1831 spi = spi_controller_get_devdata(ctrl); 1832 spi->dev = &pdev->dev; 1833 spi->ctrl = ctrl; 1834 spi->device_mode = device_mode; 1835 spin_lock_init(&spi->lock); 1836 1837 spi->cfg = cfg; 1838 1839 spi->base = devm_platform_get_and_ioremap_resource(pdev, 0, &res); 1840 if (IS_ERR(spi->base)) 1841 return PTR_ERR(spi->base); 1842 1843 spi->phys_addr = (dma_addr_t)res->start; 1844 1845 spi->irq = platform_get_irq(pdev, 0); 1846 if (spi->irq <= 0) 1847 return spi->irq; 1848 1849 ret = devm_request_threaded_irq(&pdev->dev, spi->irq, 1850 spi->cfg->irq_handler_event, 1851 spi->cfg->irq_handler_thread, 1852 IRQF_ONESHOT, pdev->name, ctrl); 1853 if (ret) { 1854 dev_err(&pdev->dev, "irq%d request failed: %d\n", spi->irq, 1855 ret); 1856 return ret; 1857 } 1858 1859 spi->clk = devm_clk_get(&pdev->dev, NULL); 1860 if (IS_ERR(spi->clk)) { 1861 ret = PTR_ERR(spi->clk); 1862 dev_err(&pdev->dev, "clk get failed: %d\n", ret); 1863 return ret; 1864 } 1865 1866 ret = clk_prepare_enable(spi->clk); 1867 if (ret) { 1868 dev_err(&pdev->dev, "clk enable failed: %d\n", ret); 1869 return ret; 1870 } 1871 spi->clk_rate = clk_get_rate(spi->clk); 1872 if (!spi->clk_rate) { 1873 dev_err(&pdev->dev, "clk rate = 0\n"); 1874 ret = -EINVAL; 1875 goto err_clk_disable; 1876 } 1877 1878 rst = devm_reset_control_get_optional_exclusive(&pdev->dev, NULL); 1879 if (rst) { 1880 if (IS_ERR(rst)) { 1881 ret = dev_err_probe(&pdev->dev, PTR_ERR(rst), 1882 "failed to get reset\n"); 1883 goto err_clk_disable; 1884 } 1885 1886 reset_control_assert(rst); 1887 udelay(2); 1888 reset_control_deassert(rst); 1889 } 1890 1891 if (spi->cfg->has_fifo) 1892 spi->fifo_size = spi->cfg->get_fifo_size(spi); 1893 1894 ret = spi->cfg->config(spi); 1895 if (ret) { 1896 dev_err(&pdev->dev, "controller configuration failed: %d\n", 1897 ret); 1898 goto err_clk_disable; 1899 } 1900 1901 ctrl->dev.of_node = pdev->dev.of_node; 1902 ctrl->auto_runtime_pm = true; 1903 ctrl->bus_num = pdev->id; 1904 ctrl->mode_bits = SPI_CPHA | SPI_CPOL | SPI_CS_HIGH | SPI_LSB_FIRST | 1905 SPI_3WIRE; 1906 ctrl->bits_per_word_mask = spi->cfg->get_bpw_mask(spi); 1907 ctrl->max_speed_hz = spi->clk_rate / spi->cfg->baud_rate_div_min; 1908 ctrl->min_speed_hz = spi->clk_rate / spi->cfg->baud_rate_div_max; 1909 ctrl->use_gpio_descriptors = true; 1910 ctrl->prepare_message = stm32_spi_prepare_msg; 1911 ctrl->transfer_one = stm32_spi_transfer_one; 1912 ctrl->unprepare_message = stm32_spi_unprepare_msg; 1913 ctrl->flags = spi->cfg->flags; 1914 if (STM32_SPI_DEVICE_MODE(spi)) 1915 ctrl->slave_abort = stm32h7_spi_device_abort; 1916 1917 spi->dma_tx = dma_request_chan(spi->dev, "tx"); 1918 if (IS_ERR(spi->dma_tx)) { 1919 ret = PTR_ERR(spi->dma_tx); 1920 spi->dma_tx = NULL; 1921 if (ret == -EPROBE_DEFER) 1922 goto err_clk_disable; 1923 1924 dev_warn(&pdev->dev, "failed to request tx dma channel\n"); 1925 } else { 1926 ctrl->dma_tx = spi->dma_tx; 1927 } 1928 1929 spi->dma_rx = dma_request_chan(spi->dev, "rx"); 1930 if (IS_ERR(spi->dma_rx)) { 1931 ret = PTR_ERR(spi->dma_rx); 1932 spi->dma_rx = NULL; 1933 if (ret == -EPROBE_DEFER) 1934 goto err_dma_release; 1935 1936 dev_warn(&pdev->dev, "failed to request rx dma channel\n"); 1937 } else { 1938 ctrl->dma_rx = spi->dma_rx; 1939 } 1940 1941 if (spi->dma_tx || spi->dma_rx) 1942 ctrl->can_dma = stm32_spi_can_dma; 1943 1944 pm_runtime_set_autosuspend_delay(&pdev->dev, 1945 STM32_SPI_AUTOSUSPEND_DELAY); 1946 pm_runtime_use_autosuspend(&pdev->dev); 1947 pm_runtime_set_active(&pdev->dev); 1948 pm_runtime_get_noresume(&pdev->dev); 1949 pm_runtime_enable(&pdev->dev); 1950 1951 ret = spi_register_controller(ctrl); 1952 if (ret) { 1953 dev_err(&pdev->dev, "spi controller registration failed: %d\n", 1954 ret); 1955 goto err_pm_disable; 1956 } 1957 1958 pm_runtime_mark_last_busy(&pdev->dev); 1959 pm_runtime_put_autosuspend(&pdev->dev); 1960 1961 dev_info(&pdev->dev, "driver initialized (%s mode)\n", 1962 STM32_SPI_MASTER_MODE(spi) ? "master" : "device"); 1963 1964 return 0; 1965 1966 err_pm_disable: 1967 pm_runtime_disable(&pdev->dev); 1968 pm_runtime_put_noidle(&pdev->dev); 1969 pm_runtime_set_suspended(&pdev->dev); 1970 pm_runtime_dont_use_autosuspend(&pdev->dev); 1971 err_dma_release: 1972 if (spi->dma_tx) 1973 dma_release_channel(spi->dma_tx); 1974 if (spi->dma_rx) 1975 dma_release_channel(spi->dma_rx); 1976 err_clk_disable: 1977 clk_disable_unprepare(spi->clk); 1978 1979 return ret; 1980 } 1981 1982 static void stm32_spi_remove(struct platform_device *pdev) 1983 { 1984 struct spi_controller *ctrl = platform_get_drvdata(pdev); 1985 struct stm32_spi *spi = spi_controller_get_devdata(ctrl); 1986 1987 pm_runtime_get_sync(&pdev->dev); 1988 1989 spi_unregister_controller(ctrl); 1990 spi->cfg->disable(spi); 1991 1992 pm_runtime_disable(&pdev->dev); 1993 pm_runtime_put_noidle(&pdev->dev); 1994 pm_runtime_set_suspended(&pdev->dev); 1995 pm_runtime_dont_use_autosuspend(&pdev->dev); 1996 1997 if (ctrl->dma_tx) 1998 dma_release_channel(ctrl->dma_tx); 1999 if (ctrl->dma_rx) 2000 dma_release_channel(ctrl->dma_rx); 2001 2002 clk_disable_unprepare(spi->clk); 2003 2004 2005 pinctrl_pm_select_sleep_state(&pdev->dev); 2006 } 2007 2008 static int __maybe_unused stm32_spi_runtime_suspend(struct device *dev) 2009 { 2010 struct spi_controller *ctrl = dev_get_drvdata(dev); 2011 struct stm32_spi *spi = spi_controller_get_devdata(ctrl); 2012 2013 clk_disable_unprepare(spi->clk); 2014 2015 return pinctrl_pm_select_sleep_state(dev); 2016 } 2017 2018 static int __maybe_unused stm32_spi_runtime_resume(struct device *dev) 2019 { 2020 struct spi_controller *ctrl = dev_get_drvdata(dev); 2021 struct stm32_spi *spi = spi_controller_get_devdata(ctrl); 2022 int ret; 2023 2024 ret = pinctrl_pm_select_default_state(dev); 2025 if (ret) 2026 return ret; 2027 2028 return clk_prepare_enable(spi->clk); 2029 } 2030 2031 static int __maybe_unused stm32_spi_suspend(struct device *dev) 2032 { 2033 struct spi_controller *ctrl = dev_get_drvdata(dev); 2034 int ret; 2035 2036 ret = spi_controller_suspend(ctrl); 2037 if (ret) 2038 return ret; 2039 2040 return pm_runtime_force_suspend(dev); 2041 } 2042 2043 static int __maybe_unused stm32_spi_resume(struct device *dev) 2044 { 2045 struct spi_controller *ctrl = dev_get_drvdata(dev); 2046 struct stm32_spi *spi = spi_controller_get_devdata(ctrl); 2047 int ret; 2048 2049 ret = pm_runtime_force_resume(dev); 2050 if (ret) 2051 return ret; 2052 2053 ret = spi_controller_resume(ctrl); 2054 if (ret) { 2055 clk_disable_unprepare(spi->clk); 2056 return ret; 2057 } 2058 2059 ret = pm_runtime_resume_and_get(dev); 2060 if (ret < 0) { 2061 dev_err(dev, "Unable to power device:%d\n", ret); 2062 return ret; 2063 } 2064 2065 spi->cfg->config(spi); 2066 2067 pm_runtime_mark_last_busy(dev); 2068 pm_runtime_put_autosuspend(dev); 2069 2070 return 0; 2071 } 2072 2073 static const struct dev_pm_ops stm32_spi_pm_ops = { 2074 SET_SYSTEM_SLEEP_PM_OPS(stm32_spi_suspend, stm32_spi_resume) 2075 SET_RUNTIME_PM_OPS(stm32_spi_runtime_suspend, 2076 stm32_spi_runtime_resume, NULL) 2077 }; 2078 2079 static struct platform_driver stm32_spi_driver = { 2080 .probe = stm32_spi_probe, 2081 .remove_new = stm32_spi_remove, 2082 .driver = { 2083 .name = DRIVER_NAME, 2084 .pm = &stm32_spi_pm_ops, 2085 .of_match_table = stm32_spi_of_match, 2086 }, 2087 }; 2088 2089 module_platform_driver(stm32_spi_driver); 2090 2091 MODULE_ALIAS("platform:" DRIVER_NAME); 2092 MODULE_DESCRIPTION("STMicroelectronics STM32 SPI Controller driver"); 2093 MODULE_AUTHOR("Amelie Delaunay <amelie.delaunay@st.com>"); 2094 MODULE_LICENSE("GPL v2"); 2095