xref: /openbmc/linux/drivers/spi/spi-stm32.c (revision 2a598d0b)
1 // SPDX-License-Identifier: GPL-2.0
2 //
3 // STMicroelectronics STM32 SPI Controller driver
4 //
5 // Copyright (C) 2017, STMicroelectronics - All Rights Reserved
6 // Author(s): Amelie Delaunay <amelie.delaunay@st.com> for STMicroelectronics.
7 
8 #include <linux/bitfield.h>
9 #include <linux/debugfs.h>
10 #include <linux/clk.h>
11 #include <linux/delay.h>
12 #include <linux/dmaengine.h>
13 #include <linux/interrupt.h>
14 #include <linux/iopoll.h>
15 #include <linux/module.h>
16 #include <linux/of_platform.h>
17 #include <linux/pinctrl/consumer.h>
18 #include <linux/pm_runtime.h>
19 #include <linux/reset.h>
20 #include <linux/spi/spi.h>
21 
22 #define DRIVER_NAME "spi_stm32"
23 
24 /* STM32F4 SPI registers */
25 #define STM32F4_SPI_CR1			0x00
26 #define STM32F4_SPI_CR2			0x04
27 #define STM32F4_SPI_SR			0x08
28 #define STM32F4_SPI_DR			0x0C
29 #define STM32F4_SPI_I2SCFGR		0x1C
30 
31 /* STM32F4_SPI_CR1 bit fields */
32 #define STM32F4_SPI_CR1_CPHA		BIT(0)
33 #define STM32F4_SPI_CR1_CPOL		BIT(1)
34 #define STM32F4_SPI_CR1_MSTR		BIT(2)
35 #define STM32F4_SPI_CR1_BR_SHIFT	3
36 #define STM32F4_SPI_CR1_BR		GENMASK(5, 3)
37 #define STM32F4_SPI_CR1_SPE		BIT(6)
38 #define STM32F4_SPI_CR1_LSBFRST		BIT(7)
39 #define STM32F4_SPI_CR1_SSI		BIT(8)
40 #define STM32F4_SPI_CR1_SSM		BIT(9)
41 #define STM32F4_SPI_CR1_RXONLY		BIT(10)
42 #define STM32F4_SPI_CR1_DFF		BIT(11)
43 #define STM32F4_SPI_CR1_CRCNEXT		BIT(12)
44 #define STM32F4_SPI_CR1_CRCEN		BIT(13)
45 #define STM32F4_SPI_CR1_BIDIOE		BIT(14)
46 #define STM32F4_SPI_CR1_BIDIMODE	BIT(15)
47 #define STM32F4_SPI_CR1_BR_MIN		0
48 #define STM32F4_SPI_CR1_BR_MAX		(GENMASK(5, 3) >> 3)
49 
50 /* STM32F4_SPI_CR2 bit fields */
51 #define STM32F4_SPI_CR2_RXDMAEN		BIT(0)
52 #define STM32F4_SPI_CR2_TXDMAEN		BIT(1)
53 #define STM32F4_SPI_CR2_SSOE		BIT(2)
54 #define STM32F4_SPI_CR2_FRF		BIT(4)
55 #define STM32F4_SPI_CR2_ERRIE		BIT(5)
56 #define STM32F4_SPI_CR2_RXNEIE		BIT(6)
57 #define STM32F4_SPI_CR2_TXEIE		BIT(7)
58 
59 /* STM32F4_SPI_SR bit fields */
60 #define STM32F4_SPI_SR_RXNE		BIT(0)
61 #define STM32F4_SPI_SR_TXE		BIT(1)
62 #define STM32F4_SPI_SR_CHSIDE		BIT(2)
63 #define STM32F4_SPI_SR_UDR		BIT(3)
64 #define STM32F4_SPI_SR_CRCERR		BIT(4)
65 #define STM32F4_SPI_SR_MODF		BIT(5)
66 #define STM32F4_SPI_SR_OVR		BIT(6)
67 #define STM32F4_SPI_SR_BSY		BIT(7)
68 #define STM32F4_SPI_SR_FRE		BIT(8)
69 
70 /* STM32F4_SPI_I2SCFGR bit fields */
71 #define STM32F4_SPI_I2SCFGR_I2SMOD	BIT(11)
72 
73 /* STM32F4 SPI Baud Rate min/max divisor */
74 #define STM32F4_SPI_BR_DIV_MIN		(2 << STM32F4_SPI_CR1_BR_MIN)
75 #define STM32F4_SPI_BR_DIV_MAX		(2 << STM32F4_SPI_CR1_BR_MAX)
76 
77 /* STM32H7 SPI registers */
78 #define STM32H7_SPI_CR1			0x00
79 #define STM32H7_SPI_CR2			0x04
80 #define STM32H7_SPI_CFG1		0x08
81 #define STM32H7_SPI_CFG2		0x0C
82 #define STM32H7_SPI_IER			0x10
83 #define STM32H7_SPI_SR			0x14
84 #define STM32H7_SPI_IFCR		0x18
85 #define STM32H7_SPI_TXDR		0x20
86 #define STM32H7_SPI_RXDR		0x30
87 #define STM32H7_SPI_I2SCFGR		0x50
88 
89 /* STM32H7_SPI_CR1 bit fields */
90 #define STM32H7_SPI_CR1_SPE		BIT(0)
91 #define STM32H7_SPI_CR1_MASRX		BIT(8)
92 #define STM32H7_SPI_CR1_CSTART		BIT(9)
93 #define STM32H7_SPI_CR1_CSUSP		BIT(10)
94 #define STM32H7_SPI_CR1_HDDIR		BIT(11)
95 #define STM32H7_SPI_CR1_SSI		BIT(12)
96 
97 /* STM32H7_SPI_CR2 bit fields */
98 #define STM32H7_SPI_CR2_TSIZE		GENMASK(15, 0)
99 #define STM32H7_SPI_TSIZE_MAX		GENMASK(15, 0)
100 
101 /* STM32H7_SPI_CFG1 bit fields */
102 #define STM32H7_SPI_CFG1_DSIZE		GENMASK(4, 0)
103 #define STM32H7_SPI_CFG1_FTHLV		GENMASK(8, 5)
104 #define STM32H7_SPI_CFG1_RXDMAEN	BIT(14)
105 #define STM32H7_SPI_CFG1_TXDMAEN	BIT(15)
106 #define STM32H7_SPI_CFG1_MBR		GENMASK(30, 28)
107 #define STM32H7_SPI_CFG1_MBR_SHIFT	28
108 #define STM32H7_SPI_CFG1_MBR_MIN	0
109 #define STM32H7_SPI_CFG1_MBR_MAX	(GENMASK(30, 28) >> 28)
110 
111 /* STM32H7_SPI_CFG2 bit fields */
112 #define STM32H7_SPI_CFG2_MIDI		GENMASK(7, 4)
113 #define STM32H7_SPI_CFG2_COMM		GENMASK(18, 17)
114 #define STM32H7_SPI_CFG2_SP		GENMASK(21, 19)
115 #define STM32H7_SPI_CFG2_MASTER		BIT(22)
116 #define STM32H7_SPI_CFG2_LSBFRST	BIT(23)
117 #define STM32H7_SPI_CFG2_CPHA		BIT(24)
118 #define STM32H7_SPI_CFG2_CPOL		BIT(25)
119 #define STM32H7_SPI_CFG2_SSM		BIT(26)
120 #define STM32H7_SPI_CFG2_SSIOP		BIT(28)
121 #define STM32H7_SPI_CFG2_AFCNTR		BIT(31)
122 
123 /* STM32H7_SPI_IER bit fields */
124 #define STM32H7_SPI_IER_RXPIE		BIT(0)
125 #define STM32H7_SPI_IER_TXPIE		BIT(1)
126 #define STM32H7_SPI_IER_DXPIE		BIT(2)
127 #define STM32H7_SPI_IER_EOTIE		BIT(3)
128 #define STM32H7_SPI_IER_TXTFIE		BIT(4)
129 #define STM32H7_SPI_IER_OVRIE		BIT(6)
130 #define STM32H7_SPI_IER_MODFIE		BIT(9)
131 #define STM32H7_SPI_IER_ALL		GENMASK(10, 0)
132 
133 /* STM32H7_SPI_SR bit fields */
134 #define STM32H7_SPI_SR_RXP		BIT(0)
135 #define STM32H7_SPI_SR_TXP		BIT(1)
136 #define STM32H7_SPI_SR_EOT		BIT(3)
137 #define STM32H7_SPI_SR_OVR		BIT(6)
138 #define STM32H7_SPI_SR_MODF		BIT(9)
139 #define STM32H7_SPI_SR_SUSP		BIT(11)
140 #define STM32H7_SPI_SR_RXPLVL		GENMASK(14, 13)
141 #define STM32H7_SPI_SR_RXWNE		BIT(15)
142 
143 /* STM32H7_SPI_IFCR bit fields */
144 #define STM32H7_SPI_IFCR_ALL		GENMASK(11, 3)
145 
146 /* STM32H7_SPI_I2SCFGR bit fields */
147 #define STM32H7_SPI_I2SCFGR_I2SMOD	BIT(0)
148 
149 /* STM32H7 SPI Master Baud Rate min/max divisor */
150 #define STM32H7_SPI_MBR_DIV_MIN		(2 << STM32H7_SPI_CFG1_MBR_MIN)
151 #define STM32H7_SPI_MBR_DIV_MAX		(2 << STM32H7_SPI_CFG1_MBR_MAX)
152 
153 /* STM32H7 SPI Communication mode */
154 #define STM32H7_SPI_FULL_DUPLEX		0
155 #define STM32H7_SPI_SIMPLEX_TX		1
156 #define STM32H7_SPI_SIMPLEX_RX		2
157 #define STM32H7_SPI_HALF_DUPLEX		3
158 
159 /* SPI Communication type */
160 #define SPI_FULL_DUPLEX		0
161 #define SPI_SIMPLEX_TX		1
162 #define SPI_SIMPLEX_RX		2
163 #define SPI_3WIRE_TX		3
164 #define SPI_3WIRE_RX		4
165 
166 #define STM32_SPI_AUTOSUSPEND_DELAY		1	/* 1 ms */
167 
168 /*
169  * use PIO for small transfers, avoiding DMA setup/teardown overhead for drivers
170  * without fifo buffers.
171  */
172 #define SPI_DMA_MIN_BYTES	16
173 
174 /* STM32 SPI driver helpers */
175 #define STM32_SPI_MASTER_MODE(stm32_spi) (!(stm32_spi)->device_mode)
176 #define STM32_SPI_DEVICE_MODE(stm32_spi) ((stm32_spi)->device_mode)
177 
178 /**
179  * struct stm32_spi_reg - stm32 SPI register & bitfield desc
180  * @reg:		register offset
181  * @mask:		bitfield mask
182  * @shift:		left shift
183  */
184 struct stm32_spi_reg {
185 	int reg;
186 	int mask;
187 	int shift;
188 };
189 
190 /**
191  * struct stm32_spi_regspec - stm32 registers definition, compatible dependent data
192  * @en: enable register and SPI enable bit
193  * @dma_rx_en: SPI DMA RX enable register end SPI DMA RX enable bit
194  * @dma_tx_en: SPI DMA TX enable register end SPI DMA TX enable bit
195  * @cpol: clock polarity register and polarity bit
196  * @cpha: clock phase register and phase bit
197  * @lsb_first: LSB transmitted first register and bit
198  * @cs_high: chips select active value
199  * @br: baud rate register and bitfields
200  * @rx: SPI RX data register
201  * @tx: SPI TX data register
202  */
203 struct stm32_spi_regspec {
204 	const struct stm32_spi_reg en;
205 	const struct stm32_spi_reg dma_rx_en;
206 	const struct stm32_spi_reg dma_tx_en;
207 	const struct stm32_spi_reg cpol;
208 	const struct stm32_spi_reg cpha;
209 	const struct stm32_spi_reg lsb_first;
210 	const struct stm32_spi_reg cs_high;
211 	const struct stm32_spi_reg br;
212 	const struct stm32_spi_reg rx;
213 	const struct stm32_spi_reg tx;
214 };
215 
216 struct stm32_spi;
217 
218 /**
219  * struct stm32_spi_cfg - stm32 compatible configuration data
220  * @regs: registers descriptions
221  * @get_fifo_size: routine to get fifo size
222  * @get_bpw_mask: routine to get bits per word mask
223  * @disable: routine to disable controller
224  * @config: routine to configure controller as SPI Master
225  * @set_bpw: routine to configure registers to for bits per word
226  * @set_mode: routine to configure registers to desired mode
227  * @set_data_idleness: optional routine to configure registers to desired idle
228  * time between frames (if driver has this functionality)
229  * @set_number_of_data: optional routine to configure registers to desired
230  * number of data (if driver has this functionality)
231  * @transfer_one_dma_start: routine to start transfer a single spi_transfer
232  * using DMA
233  * @dma_rx_cb: routine to call after DMA RX channel operation is complete
234  * @dma_tx_cb: routine to call after DMA TX channel operation is complete
235  * @transfer_one_irq: routine to configure interrupts for driver
236  * @irq_handler_event: Interrupt handler for SPI controller events
237  * @irq_handler_thread: thread of interrupt handler for SPI controller
238  * @baud_rate_div_min: minimum baud rate divisor
239  * @baud_rate_div_max: maximum baud rate divisor
240  * @has_fifo: boolean to know if fifo is used for driver
241  * @flags: compatible specific SPI controller flags used at registration time
242  */
243 struct stm32_spi_cfg {
244 	const struct stm32_spi_regspec *regs;
245 	int (*get_fifo_size)(struct stm32_spi *spi);
246 	int (*get_bpw_mask)(struct stm32_spi *spi);
247 	void (*disable)(struct stm32_spi *spi);
248 	int (*config)(struct stm32_spi *spi);
249 	void (*set_bpw)(struct stm32_spi *spi);
250 	int (*set_mode)(struct stm32_spi *spi, unsigned int comm_type);
251 	void (*set_data_idleness)(struct stm32_spi *spi, u32 length);
252 	int (*set_number_of_data)(struct stm32_spi *spi, u32 length);
253 	void (*transfer_one_dma_start)(struct stm32_spi *spi);
254 	void (*dma_rx_cb)(void *data);
255 	void (*dma_tx_cb)(void *data);
256 	int (*transfer_one_irq)(struct stm32_spi *spi);
257 	irqreturn_t (*irq_handler_event)(int irq, void *dev_id);
258 	irqreturn_t (*irq_handler_thread)(int irq, void *dev_id);
259 	unsigned int baud_rate_div_min;
260 	unsigned int baud_rate_div_max;
261 	bool has_fifo;
262 	u16 flags;
263 };
264 
265 /**
266  * struct stm32_spi - private data of the SPI controller
267  * @dev: driver model representation of the controller
268  * @ctrl: controller interface
269  * @cfg: compatible configuration data
270  * @base: virtual memory area
271  * @clk: hw kernel clock feeding the SPI clock generator
272  * @clk_rate: rate of the hw kernel clock feeding the SPI clock generator
273  * @lock: prevent I/O concurrent access
274  * @irq: SPI controller interrupt line
275  * @fifo_size: size of the embedded fifo in bytes
276  * @cur_midi: master inter-data idleness in ns
277  * @cur_speed: speed configured in Hz
278  * @cur_bpw: number of bits in a single SPI data frame
279  * @cur_fthlv: fifo threshold level (data frames in a single data packet)
280  * @cur_comm: SPI communication mode
281  * @cur_xferlen: current transfer length in bytes
282  * @cur_usedma: boolean to know if dma is used in current transfer
283  * @tx_buf: data to be written, or NULL
284  * @rx_buf: data to be read, or NULL
285  * @tx_len: number of data to be written in bytes
286  * @rx_len: number of data to be read in bytes
287  * @dma_tx: dma channel for TX transfer
288  * @dma_rx: dma channel for RX transfer
289  * @phys_addr: SPI registers physical base address
290  * @device_mode: the controller is configured as SPI device
291  */
292 struct stm32_spi {
293 	struct device *dev;
294 	struct spi_controller *ctrl;
295 	const struct stm32_spi_cfg *cfg;
296 	void __iomem *base;
297 	struct clk *clk;
298 	u32 clk_rate;
299 	spinlock_t lock; /* prevent I/O concurrent access */
300 	int irq;
301 	unsigned int fifo_size;
302 
303 	unsigned int cur_midi;
304 	unsigned int cur_speed;
305 	unsigned int cur_bpw;
306 	unsigned int cur_fthlv;
307 	unsigned int cur_comm;
308 	unsigned int cur_xferlen;
309 	bool cur_usedma;
310 
311 	const void *tx_buf;
312 	void *rx_buf;
313 	int tx_len;
314 	int rx_len;
315 	struct dma_chan *dma_tx;
316 	struct dma_chan *dma_rx;
317 	dma_addr_t phys_addr;
318 
319 	bool device_mode;
320 };
321 
322 static const struct stm32_spi_regspec stm32f4_spi_regspec = {
323 	.en = { STM32F4_SPI_CR1, STM32F4_SPI_CR1_SPE },
324 
325 	.dma_rx_en = { STM32F4_SPI_CR2, STM32F4_SPI_CR2_RXDMAEN },
326 	.dma_tx_en = { STM32F4_SPI_CR2, STM32F4_SPI_CR2_TXDMAEN },
327 
328 	.cpol = { STM32F4_SPI_CR1, STM32F4_SPI_CR1_CPOL },
329 	.cpha = { STM32F4_SPI_CR1, STM32F4_SPI_CR1_CPHA },
330 	.lsb_first = { STM32F4_SPI_CR1, STM32F4_SPI_CR1_LSBFRST },
331 	.cs_high = {},
332 	.br = { STM32F4_SPI_CR1, STM32F4_SPI_CR1_BR, STM32F4_SPI_CR1_BR_SHIFT },
333 
334 	.rx = { STM32F4_SPI_DR },
335 	.tx = { STM32F4_SPI_DR },
336 };
337 
338 static const struct stm32_spi_regspec stm32h7_spi_regspec = {
339 	/* SPI data transfer is enabled but spi_ker_ck is idle.
340 	 * CFG1 and CFG2 registers are write protected when SPE is enabled.
341 	 */
342 	.en = { STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE },
343 
344 	.dma_rx_en = { STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_RXDMAEN },
345 	.dma_tx_en = { STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_TXDMAEN },
346 
347 	.cpol = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_CPOL },
348 	.cpha = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_CPHA },
349 	.lsb_first = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_LSBFRST },
350 	.cs_high = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_SSIOP },
351 	.br = { STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_MBR,
352 		STM32H7_SPI_CFG1_MBR_SHIFT },
353 
354 	.rx = { STM32H7_SPI_RXDR },
355 	.tx = { STM32H7_SPI_TXDR },
356 };
357 
358 static inline void stm32_spi_set_bits(struct stm32_spi *spi,
359 				      u32 offset, u32 bits)
360 {
361 	writel_relaxed(readl_relaxed(spi->base + offset) | bits,
362 		       spi->base + offset);
363 }
364 
365 static inline void stm32_spi_clr_bits(struct stm32_spi *spi,
366 				      u32 offset, u32 bits)
367 {
368 	writel_relaxed(readl_relaxed(spi->base + offset) & ~bits,
369 		       spi->base + offset);
370 }
371 
372 /**
373  * stm32h7_spi_get_fifo_size - Return fifo size
374  * @spi: pointer to the spi controller data structure
375  */
376 static int stm32h7_spi_get_fifo_size(struct stm32_spi *spi)
377 {
378 	unsigned long flags;
379 	u32 count = 0;
380 
381 	spin_lock_irqsave(&spi->lock, flags);
382 
383 	stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE);
384 
385 	while (readl_relaxed(spi->base + STM32H7_SPI_SR) & STM32H7_SPI_SR_TXP)
386 		writeb_relaxed(++count, spi->base + STM32H7_SPI_TXDR);
387 
388 	stm32_spi_clr_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE);
389 
390 	spin_unlock_irqrestore(&spi->lock, flags);
391 
392 	dev_dbg(spi->dev, "%d x 8-bit fifo size\n", count);
393 
394 	return count;
395 }
396 
397 /**
398  * stm32f4_spi_get_bpw_mask - Return bits per word mask
399  * @spi: pointer to the spi controller data structure
400  */
401 static int stm32f4_spi_get_bpw_mask(struct stm32_spi *spi)
402 {
403 	dev_dbg(spi->dev, "8-bit or 16-bit data frame supported\n");
404 	return SPI_BPW_MASK(8) | SPI_BPW_MASK(16);
405 }
406 
407 /**
408  * stm32h7_spi_get_bpw_mask - Return bits per word mask
409  * @spi: pointer to the spi controller data structure
410  */
411 static int stm32h7_spi_get_bpw_mask(struct stm32_spi *spi)
412 {
413 	unsigned long flags;
414 	u32 cfg1, max_bpw;
415 
416 	spin_lock_irqsave(&spi->lock, flags);
417 
418 	/*
419 	 * The most significant bit at DSIZE bit field is reserved when the
420 	 * maximum data size of periperal instances is limited to 16-bit
421 	 */
422 	stm32_spi_set_bits(spi, STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_DSIZE);
423 
424 	cfg1 = readl_relaxed(spi->base + STM32H7_SPI_CFG1);
425 	max_bpw = FIELD_GET(STM32H7_SPI_CFG1_DSIZE, cfg1) + 1;
426 
427 	spin_unlock_irqrestore(&spi->lock, flags);
428 
429 	dev_dbg(spi->dev, "%d-bit maximum data frame\n", max_bpw);
430 
431 	return SPI_BPW_RANGE_MASK(4, max_bpw);
432 }
433 
434 /**
435  * stm32_spi_prepare_mbr - Determine baud rate divisor value
436  * @spi: pointer to the spi controller data structure
437  * @speed_hz: requested speed
438  * @min_div: minimum baud rate divisor
439  * @max_div: maximum baud rate divisor
440  *
441  * Return baud rate divisor value in case of success or -EINVAL
442  */
443 static int stm32_spi_prepare_mbr(struct stm32_spi *spi, u32 speed_hz,
444 				 u32 min_div, u32 max_div)
445 {
446 	u32 div, mbrdiv;
447 
448 	/* Ensure spi->clk_rate is even */
449 	div = DIV_ROUND_CLOSEST(spi->clk_rate & ~0x1, speed_hz);
450 
451 	/*
452 	 * SPI framework set xfer->speed_hz to ctrl->max_speed_hz if
453 	 * xfer->speed_hz is greater than ctrl->max_speed_hz, and it returns
454 	 * an error when xfer->speed_hz is lower than ctrl->min_speed_hz, so
455 	 * no need to check it there.
456 	 * However, we need to ensure the following calculations.
457 	 */
458 	if ((div < min_div) || (div > max_div))
459 		return -EINVAL;
460 
461 	/* Determine the first power of 2 greater than or equal to div */
462 	if (div & (div - 1))
463 		mbrdiv = fls(div);
464 	else
465 		mbrdiv = fls(div) - 1;
466 
467 	spi->cur_speed = spi->clk_rate / (1 << mbrdiv);
468 
469 	return mbrdiv - 1;
470 }
471 
472 /**
473  * stm32h7_spi_prepare_fthlv - Determine FIFO threshold level
474  * @spi: pointer to the spi controller data structure
475  * @xfer_len: length of the message to be transferred
476  */
477 static u32 stm32h7_spi_prepare_fthlv(struct stm32_spi *spi, u32 xfer_len)
478 {
479 	u32 packet, bpw;
480 
481 	/* data packet should not exceed 1/2 of fifo space */
482 	packet = clamp(xfer_len, 1U, spi->fifo_size / 2);
483 
484 	/* align packet size with data registers access */
485 	bpw = DIV_ROUND_UP(spi->cur_bpw, 8);
486 	return DIV_ROUND_UP(packet, bpw);
487 }
488 
489 /**
490  * stm32f4_spi_write_tx - Write bytes to Transmit Data Register
491  * @spi: pointer to the spi controller data structure
492  *
493  * Read from tx_buf depends on remaining bytes to avoid to read beyond
494  * tx_buf end.
495  */
496 static void stm32f4_spi_write_tx(struct stm32_spi *spi)
497 {
498 	if ((spi->tx_len > 0) && (readl_relaxed(spi->base + STM32F4_SPI_SR) &
499 				  STM32F4_SPI_SR_TXE)) {
500 		u32 offs = spi->cur_xferlen - spi->tx_len;
501 
502 		if (spi->cur_bpw == 16) {
503 			const u16 *tx_buf16 = (const u16 *)(spi->tx_buf + offs);
504 
505 			writew_relaxed(*tx_buf16, spi->base + STM32F4_SPI_DR);
506 			spi->tx_len -= sizeof(u16);
507 		} else {
508 			const u8 *tx_buf8 = (const u8 *)(spi->tx_buf + offs);
509 
510 			writeb_relaxed(*tx_buf8, spi->base + STM32F4_SPI_DR);
511 			spi->tx_len -= sizeof(u8);
512 		}
513 	}
514 
515 	dev_dbg(spi->dev, "%s: %d bytes left\n", __func__, spi->tx_len);
516 }
517 
518 /**
519  * stm32h7_spi_write_txfifo - Write bytes in Transmit Data Register
520  * @spi: pointer to the spi controller data structure
521  *
522  * Read from tx_buf depends on remaining bytes to avoid to read beyond
523  * tx_buf end.
524  */
525 static void stm32h7_spi_write_txfifo(struct stm32_spi *spi)
526 {
527 	while ((spi->tx_len > 0) &&
528 		       (readl_relaxed(spi->base + STM32H7_SPI_SR) &
529 			STM32H7_SPI_SR_TXP)) {
530 		u32 offs = spi->cur_xferlen - spi->tx_len;
531 
532 		if (spi->tx_len >= sizeof(u32)) {
533 			const u32 *tx_buf32 = (const u32 *)(spi->tx_buf + offs);
534 
535 			writel_relaxed(*tx_buf32, spi->base + STM32H7_SPI_TXDR);
536 			spi->tx_len -= sizeof(u32);
537 		} else if (spi->tx_len >= sizeof(u16)) {
538 			const u16 *tx_buf16 = (const u16 *)(spi->tx_buf + offs);
539 
540 			writew_relaxed(*tx_buf16, spi->base + STM32H7_SPI_TXDR);
541 			spi->tx_len -= sizeof(u16);
542 		} else {
543 			const u8 *tx_buf8 = (const u8 *)(spi->tx_buf + offs);
544 
545 			writeb_relaxed(*tx_buf8, spi->base + STM32H7_SPI_TXDR);
546 			spi->tx_len -= sizeof(u8);
547 		}
548 	}
549 
550 	dev_dbg(spi->dev, "%s: %d bytes left\n", __func__, spi->tx_len);
551 }
552 
553 /**
554  * stm32f4_spi_read_rx - Read bytes from Receive Data Register
555  * @spi: pointer to the spi controller data structure
556  *
557  * Write in rx_buf depends on remaining bytes to avoid to write beyond
558  * rx_buf end.
559  */
560 static void stm32f4_spi_read_rx(struct stm32_spi *spi)
561 {
562 	if ((spi->rx_len > 0) && (readl_relaxed(spi->base + STM32F4_SPI_SR) &
563 				  STM32F4_SPI_SR_RXNE)) {
564 		u32 offs = spi->cur_xferlen - spi->rx_len;
565 
566 		if (spi->cur_bpw == 16) {
567 			u16 *rx_buf16 = (u16 *)(spi->rx_buf + offs);
568 
569 			*rx_buf16 = readw_relaxed(spi->base + STM32F4_SPI_DR);
570 			spi->rx_len -= sizeof(u16);
571 		} else {
572 			u8 *rx_buf8 = (u8 *)(spi->rx_buf + offs);
573 
574 			*rx_buf8 = readb_relaxed(spi->base + STM32F4_SPI_DR);
575 			spi->rx_len -= sizeof(u8);
576 		}
577 	}
578 
579 	dev_dbg(spi->dev, "%s: %d bytes left\n", __func__, spi->rx_len);
580 }
581 
582 /**
583  * stm32h7_spi_read_rxfifo - Read bytes in Receive Data Register
584  * @spi: pointer to the spi controller data structure
585  *
586  * Write in rx_buf depends on remaining bytes to avoid to write beyond
587  * rx_buf end.
588  */
589 static void stm32h7_spi_read_rxfifo(struct stm32_spi *spi)
590 {
591 	u32 sr = readl_relaxed(spi->base + STM32H7_SPI_SR);
592 	u32 rxplvl = FIELD_GET(STM32H7_SPI_SR_RXPLVL, sr);
593 
594 	while ((spi->rx_len > 0) &&
595 	       ((sr & STM32H7_SPI_SR_RXP) ||
596 		((sr & STM32H7_SPI_SR_EOT) &&
597 		 ((sr & STM32H7_SPI_SR_RXWNE) || (rxplvl > 0))))) {
598 		u32 offs = spi->cur_xferlen - spi->rx_len;
599 
600 		if ((spi->rx_len >= sizeof(u32)) ||
601 		    (sr & STM32H7_SPI_SR_RXWNE)) {
602 			u32 *rx_buf32 = (u32 *)(spi->rx_buf + offs);
603 
604 			*rx_buf32 = readl_relaxed(spi->base + STM32H7_SPI_RXDR);
605 			spi->rx_len -= sizeof(u32);
606 		} else if ((spi->rx_len >= sizeof(u16)) ||
607 			   (!(sr & STM32H7_SPI_SR_RXWNE) &&
608 			    (rxplvl >= 2 || spi->cur_bpw > 8))) {
609 			u16 *rx_buf16 = (u16 *)(spi->rx_buf + offs);
610 
611 			*rx_buf16 = readw_relaxed(spi->base + STM32H7_SPI_RXDR);
612 			spi->rx_len -= sizeof(u16);
613 		} else {
614 			u8 *rx_buf8 = (u8 *)(spi->rx_buf + offs);
615 
616 			*rx_buf8 = readb_relaxed(spi->base + STM32H7_SPI_RXDR);
617 			spi->rx_len -= sizeof(u8);
618 		}
619 
620 		sr = readl_relaxed(spi->base + STM32H7_SPI_SR);
621 		rxplvl = FIELD_GET(STM32H7_SPI_SR_RXPLVL, sr);
622 	}
623 
624 	dev_dbg(spi->dev, "%s: %d bytes left (sr=%08x)\n",
625 		__func__, spi->rx_len, sr);
626 }
627 
628 /**
629  * stm32_spi_enable - Enable SPI controller
630  * @spi: pointer to the spi controller data structure
631  */
632 static void stm32_spi_enable(struct stm32_spi *spi)
633 {
634 	dev_dbg(spi->dev, "enable controller\n");
635 
636 	stm32_spi_set_bits(spi, spi->cfg->regs->en.reg,
637 			   spi->cfg->regs->en.mask);
638 }
639 
640 /**
641  * stm32f4_spi_disable - Disable SPI controller
642  * @spi: pointer to the spi controller data structure
643  */
644 static void stm32f4_spi_disable(struct stm32_spi *spi)
645 {
646 	unsigned long flags;
647 	u32 sr;
648 
649 	dev_dbg(spi->dev, "disable controller\n");
650 
651 	spin_lock_irqsave(&spi->lock, flags);
652 
653 	if (!(readl_relaxed(spi->base + STM32F4_SPI_CR1) &
654 	      STM32F4_SPI_CR1_SPE)) {
655 		spin_unlock_irqrestore(&spi->lock, flags);
656 		return;
657 	}
658 
659 	/* Disable interrupts */
660 	stm32_spi_clr_bits(spi, STM32F4_SPI_CR2, STM32F4_SPI_CR2_TXEIE |
661 						 STM32F4_SPI_CR2_RXNEIE |
662 						 STM32F4_SPI_CR2_ERRIE);
663 
664 	/* Wait until BSY = 0 */
665 	if (readl_relaxed_poll_timeout_atomic(spi->base + STM32F4_SPI_SR,
666 					      sr, !(sr & STM32F4_SPI_SR_BSY),
667 					      10, 100000) < 0) {
668 		dev_warn(spi->dev, "disabling condition timeout\n");
669 	}
670 
671 	if (spi->cur_usedma && spi->dma_tx)
672 		dmaengine_terminate_async(spi->dma_tx);
673 	if (spi->cur_usedma && spi->dma_rx)
674 		dmaengine_terminate_async(spi->dma_rx);
675 
676 	stm32_spi_clr_bits(spi, STM32F4_SPI_CR1, STM32F4_SPI_CR1_SPE);
677 
678 	stm32_spi_clr_bits(spi, STM32F4_SPI_CR2, STM32F4_SPI_CR2_TXDMAEN |
679 						 STM32F4_SPI_CR2_RXDMAEN);
680 
681 	/* Sequence to clear OVR flag */
682 	readl_relaxed(spi->base + STM32F4_SPI_DR);
683 	readl_relaxed(spi->base + STM32F4_SPI_SR);
684 
685 	spin_unlock_irqrestore(&spi->lock, flags);
686 }
687 
688 /**
689  * stm32h7_spi_disable - Disable SPI controller
690  * @spi: pointer to the spi controller data structure
691  *
692  * RX-Fifo is flushed when SPI controller is disabled.
693  */
694 static void stm32h7_spi_disable(struct stm32_spi *spi)
695 {
696 	unsigned long flags;
697 	u32 cr1;
698 
699 	dev_dbg(spi->dev, "disable controller\n");
700 
701 	spin_lock_irqsave(&spi->lock, flags);
702 
703 	cr1 = readl_relaxed(spi->base + STM32H7_SPI_CR1);
704 
705 	if (!(cr1 & STM32H7_SPI_CR1_SPE)) {
706 		spin_unlock_irqrestore(&spi->lock, flags);
707 		return;
708 	}
709 
710 	if (spi->cur_usedma && spi->dma_tx)
711 		dmaengine_terminate_async(spi->dma_tx);
712 	if (spi->cur_usedma && spi->dma_rx)
713 		dmaengine_terminate_async(spi->dma_rx);
714 
715 	stm32_spi_clr_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE);
716 
717 	stm32_spi_clr_bits(spi, STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_TXDMAEN |
718 						STM32H7_SPI_CFG1_RXDMAEN);
719 
720 	/* Disable interrupts and clear status flags */
721 	writel_relaxed(0, spi->base + STM32H7_SPI_IER);
722 	writel_relaxed(STM32H7_SPI_IFCR_ALL, spi->base + STM32H7_SPI_IFCR);
723 
724 	spin_unlock_irqrestore(&spi->lock, flags);
725 }
726 
727 /**
728  * stm32_spi_can_dma - Determine if the transfer is eligible for DMA use
729  * @ctrl: controller interface
730  * @spi_dev: pointer to the spi device
731  * @transfer: pointer to spi transfer
732  *
733  * If driver has fifo and the current transfer size is greater than fifo size,
734  * use DMA. Otherwise use DMA for transfer longer than defined DMA min bytes.
735  */
736 static bool stm32_spi_can_dma(struct spi_controller *ctrl,
737 			      struct spi_device *spi_dev,
738 			      struct spi_transfer *transfer)
739 {
740 	unsigned int dma_size;
741 	struct stm32_spi *spi = spi_controller_get_devdata(ctrl);
742 
743 	if (spi->cfg->has_fifo)
744 		dma_size = spi->fifo_size;
745 	else
746 		dma_size = SPI_DMA_MIN_BYTES;
747 
748 	dev_dbg(spi->dev, "%s: %s\n", __func__,
749 		(transfer->len > dma_size) ? "true" : "false");
750 
751 	return (transfer->len > dma_size);
752 }
753 
754 /**
755  * stm32f4_spi_irq_event - Interrupt handler for SPI controller events
756  * @irq: interrupt line
757  * @dev_id: SPI controller ctrl interface
758  */
759 static irqreturn_t stm32f4_spi_irq_event(int irq, void *dev_id)
760 {
761 	struct spi_controller *ctrl = dev_id;
762 	struct stm32_spi *spi = spi_controller_get_devdata(ctrl);
763 	u32 sr, mask = 0;
764 	bool end = false;
765 
766 	spin_lock(&spi->lock);
767 
768 	sr = readl_relaxed(spi->base + STM32F4_SPI_SR);
769 	/*
770 	 * BSY flag is not handled in interrupt but it is normal behavior when
771 	 * this flag is set.
772 	 */
773 	sr &= ~STM32F4_SPI_SR_BSY;
774 
775 	if (!spi->cur_usedma && (spi->cur_comm == SPI_SIMPLEX_TX ||
776 				 spi->cur_comm == SPI_3WIRE_TX)) {
777 		/* OVR flag shouldn't be handled for TX only mode */
778 		sr &= ~(STM32F4_SPI_SR_OVR | STM32F4_SPI_SR_RXNE);
779 		mask |= STM32F4_SPI_SR_TXE;
780 	}
781 
782 	if (!spi->cur_usedma && (spi->cur_comm == SPI_FULL_DUPLEX ||
783 				spi->cur_comm == SPI_SIMPLEX_RX ||
784 				spi->cur_comm == SPI_3WIRE_RX)) {
785 		/* TXE flag is set and is handled when RXNE flag occurs */
786 		sr &= ~STM32F4_SPI_SR_TXE;
787 		mask |= STM32F4_SPI_SR_RXNE | STM32F4_SPI_SR_OVR;
788 	}
789 
790 	if (!(sr & mask)) {
791 		dev_dbg(spi->dev, "spurious IT (sr=0x%08x)\n", sr);
792 		spin_unlock(&spi->lock);
793 		return IRQ_NONE;
794 	}
795 
796 	if (sr & STM32F4_SPI_SR_OVR) {
797 		dev_warn(spi->dev, "Overrun: received value discarded\n");
798 
799 		/* Sequence to clear OVR flag */
800 		readl_relaxed(spi->base + STM32F4_SPI_DR);
801 		readl_relaxed(spi->base + STM32F4_SPI_SR);
802 
803 		/*
804 		 * If overrun is detected, it means that something went wrong,
805 		 * so stop the current transfer. Transfer can wait for next
806 		 * RXNE but DR is already read and end never happens.
807 		 */
808 		end = true;
809 		goto end_irq;
810 	}
811 
812 	if (sr & STM32F4_SPI_SR_TXE) {
813 		if (spi->tx_buf)
814 			stm32f4_spi_write_tx(spi);
815 		if (spi->tx_len == 0)
816 			end = true;
817 	}
818 
819 	if (sr & STM32F4_SPI_SR_RXNE) {
820 		stm32f4_spi_read_rx(spi);
821 		if (spi->rx_len == 0)
822 			end = true;
823 		else if (spi->tx_buf)/* Load data for discontinuous mode */
824 			stm32f4_spi_write_tx(spi);
825 	}
826 
827 end_irq:
828 	if (end) {
829 		/* Immediately disable interrupts to do not generate new one */
830 		stm32_spi_clr_bits(spi, STM32F4_SPI_CR2,
831 					STM32F4_SPI_CR2_TXEIE |
832 					STM32F4_SPI_CR2_RXNEIE |
833 					STM32F4_SPI_CR2_ERRIE);
834 		spin_unlock(&spi->lock);
835 		return IRQ_WAKE_THREAD;
836 	}
837 
838 	spin_unlock(&spi->lock);
839 	return IRQ_HANDLED;
840 }
841 
842 /**
843  * stm32f4_spi_irq_thread - Thread of interrupt handler for SPI controller
844  * @irq: interrupt line
845  * @dev_id: SPI controller interface
846  */
847 static irqreturn_t stm32f4_spi_irq_thread(int irq, void *dev_id)
848 {
849 	struct spi_controller *ctrl = dev_id;
850 	struct stm32_spi *spi = spi_controller_get_devdata(ctrl);
851 
852 	spi_finalize_current_transfer(ctrl);
853 	stm32f4_spi_disable(spi);
854 
855 	return IRQ_HANDLED;
856 }
857 
858 /**
859  * stm32h7_spi_irq_thread - Thread of interrupt handler for SPI controller
860  * @irq: interrupt line
861  * @dev_id: SPI controller interface
862  */
863 static irqreturn_t stm32h7_spi_irq_thread(int irq, void *dev_id)
864 {
865 	struct spi_controller *ctrl = dev_id;
866 	struct stm32_spi *spi = spi_controller_get_devdata(ctrl);
867 	u32 sr, ier, mask;
868 	unsigned long flags;
869 	bool end = false;
870 
871 	spin_lock_irqsave(&spi->lock, flags);
872 
873 	sr = readl_relaxed(spi->base + STM32H7_SPI_SR);
874 	ier = readl_relaxed(spi->base + STM32H7_SPI_IER);
875 
876 	mask = ier;
877 	/*
878 	 * EOTIE enables irq from EOT, SUSP and TXC events. We need to set
879 	 * SUSP to acknowledge it later. TXC is automatically cleared
880 	 */
881 
882 	mask |= STM32H7_SPI_SR_SUSP;
883 	/*
884 	 * DXPIE is set in Full-Duplex, one IT will be raised if TXP and RXP
885 	 * are set. So in case of Full-Duplex, need to poll TXP and RXP event.
886 	 */
887 	if ((spi->cur_comm == SPI_FULL_DUPLEX) && !spi->cur_usedma)
888 		mask |= STM32H7_SPI_SR_TXP | STM32H7_SPI_SR_RXP;
889 
890 	if (!(sr & mask)) {
891 		dev_warn(spi->dev, "spurious IT (sr=0x%08x, ier=0x%08x)\n",
892 			 sr, ier);
893 		spin_unlock_irqrestore(&spi->lock, flags);
894 		return IRQ_NONE;
895 	}
896 
897 	if (sr & STM32H7_SPI_SR_SUSP) {
898 		static DEFINE_RATELIMIT_STATE(rs,
899 					      DEFAULT_RATELIMIT_INTERVAL * 10,
900 					      1);
901 		ratelimit_set_flags(&rs, RATELIMIT_MSG_ON_RELEASE);
902 		if (__ratelimit(&rs))
903 			dev_dbg_ratelimited(spi->dev, "Communication suspended\n");
904 		if (!spi->cur_usedma && (spi->rx_buf && (spi->rx_len > 0)))
905 			stm32h7_spi_read_rxfifo(spi);
906 		/*
907 		 * If communication is suspended while using DMA, it means
908 		 * that something went wrong, so stop the current transfer
909 		 */
910 		if (spi->cur_usedma)
911 			end = true;
912 	}
913 
914 	if (sr & STM32H7_SPI_SR_MODF) {
915 		dev_warn(spi->dev, "Mode fault: transfer aborted\n");
916 		end = true;
917 	}
918 
919 	if (sr & STM32H7_SPI_SR_OVR) {
920 		dev_err(spi->dev, "Overrun: RX data lost\n");
921 		end = true;
922 	}
923 
924 	if (sr & STM32H7_SPI_SR_EOT) {
925 		if (!spi->cur_usedma && (spi->rx_buf && (spi->rx_len > 0)))
926 			stm32h7_spi_read_rxfifo(spi);
927 		if (!spi->cur_usedma ||
928 		    (spi->cur_comm == SPI_SIMPLEX_TX || spi->cur_comm == SPI_3WIRE_TX))
929 			end = true;
930 	}
931 
932 	if (sr & STM32H7_SPI_SR_TXP)
933 		if (!spi->cur_usedma && (spi->tx_buf && (spi->tx_len > 0)))
934 			stm32h7_spi_write_txfifo(spi);
935 
936 	if (sr & STM32H7_SPI_SR_RXP)
937 		if (!spi->cur_usedma && (spi->rx_buf && (spi->rx_len > 0)))
938 			stm32h7_spi_read_rxfifo(spi);
939 
940 	writel_relaxed(sr & mask, spi->base + STM32H7_SPI_IFCR);
941 
942 	spin_unlock_irqrestore(&spi->lock, flags);
943 
944 	if (end) {
945 		stm32h7_spi_disable(spi);
946 		spi_finalize_current_transfer(ctrl);
947 	}
948 
949 	return IRQ_HANDLED;
950 }
951 
952 /**
953  * stm32_spi_prepare_msg - set up the controller to transfer a single message
954  * @ctrl: controller interface
955  * @msg: pointer to spi message
956  */
957 static int stm32_spi_prepare_msg(struct spi_controller *ctrl,
958 				 struct spi_message *msg)
959 {
960 	struct stm32_spi *spi = spi_controller_get_devdata(ctrl);
961 	struct spi_device *spi_dev = msg->spi;
962 	struct device_node *np = spi_dev->dev.of_node;
963 	unsigned long flags;
964 	u32 clrb = 0, setb = 0;
965 
966 	/* SPI slave device may need time between data frames */
967 	spi->cur_midi = 0;
968 	if (np && !of_property_read_u32(np, "st,spi-midi-ns", &spi->cur_midi))
969 		dev_dbg(spi->dev, "%dns inter-data idleness\n", spi->cur_midi);
970 
971 	if (spi_dev->mode & SPI_CPOL)
972 		setb |= spi->cfg->regs->cpol.mask;
973 	else
974 		clrb |= spi->cfg->regs->cpol.mask;
975 
976 	if (spi_dev->mode & SPI_CPHA)
977 		setb |= spi->cfg->regs->cpha.mask;
978 	else
979 		clrb |= spi->cfg->regs->cpha.mask;
980 
981 	if (spi_dev->mode & SPI_LSB_FIRST)
982 		setb |= spi->cfg->regs->lsb_first.mask;
983 	else
984 		clrb |= spi->cfg->regs->lsb_first.mask;
985 
986 	if (STM32_SPI_DEVICE_MODE(spi) && spi_dev->mode & SPI_CS_HIGH)
987 		setb |= spi->cfg->regs->cs_high.mask;
988 	else
989 		clrb |= spi->cfg->regs->cs_high.mask;
990 
991 	dev_dbg(spi->dev, "cpol=%d cpha=%d lsb_first=%d cs_high=%d\n",
992 		!!(spi_dev->mode & SPI_CPOL),
993 		!!(spi_dev->mode & SPI_CPHA),
994 		!!(spi_dev->mode & SPI_LSB_FIRST),
995 		!!(spi_dev->mode & SPI_CS_HIGH));
996 
997 	/* On STM32H7, messages should not exceed a maximum size setted
998 	 * afterward via the set_number_of_data function. In order to
999 	 * ensure that, split large messages into several messages
1000 	 */
1001 	if (spi->cfg->set_number_of_data) {
1002 		int ret;
1003 
1004 		ret = spi_split_transfers_maxsize(ctrl, msg,
1005 						  STM32H7_SPI_TSIZE_MAX,
1006 						  GFP_KERNEL | GFP_DMA);
1007 		if (ret)
1008 			return ret;
1009 	}
1010 
1011 	spin_lock_irqsave(&spi->lock, flags);
1012 
1013 	/* CPOL, CPHA and LSB FIRST bits have common register */
1014 	if (clrb || setb)
1015 		writel_relaxed(
1016 			(readl_relaxed(spi->base + spi->cfg->regs->cpol.reg) &
1017 			 ~clrb) | setb,
1018 			spi->base + spi->cfg->regs->cpol.reg);
1019 
1020 	spin_unlock_irqrestore(&spi->lock, flags);
1021 
1022 	return 0;
1023 }
1024 
1025 /**
1026  * stm32f4_spi_dma_tx_cb - dma callback
1027  * @data: pointer to the spi controller data structure
1028  *
1029  * DMA callback is called when the transfer is complete for DMA TX channel.
1030  */
1031 static void stm32f4_spi_dma_tx_cb(void *data)
1032 {
1033 	struct stm32_spi *spi = data;
1034 
1035 	if (spi->cur_comm == SPI_SIMPLEX_TX || spi->cur_comm == SPI_3WIRE_TX) {
1036 		spi_finalize_current_transfer(spi->ctrl);
1037 		stm32f4_spi_disable(spi);
1038 	}
1039 }
1040 
1041 /**
1042  * stm32_spi_dma_rx_cb - dma callback
1043  * @data: pointer to the spi controller data structure
1044  *
1045  * DMA callback is called when the transfer is complete for DMA RX channel.
1046  */
1047 static void stm32_spi_dma_rx_cb(void *data)
1048 {
1049 	struct stm32_spi *spi = data;
1050 
1051 	spi_finalize_current_transfer(spi->ctrl);
1052 	spi->cfg->disable(spi);
1053 }
1054 
1055 /**
1056  * stm32_spi_dma_config - configure dma slave channel depending on current
1057  *			  transfer bits_per_word.
1058  * @spi: pointer to the spi controller data structure
1059  * @dma_conf: pointer to the dma_slave_config structure
1060  * @dir: direction of the dma transfer
1061  */
1062 static void stm32_spi_dma_config(struct stm32_spi *spi,
1063 				 struct dma_slave_config *dma_conf,
1064 				 enum dma_transfer_direction dir)
1065 {
1066 	enum dma_slave_buswidth buswidth;
1067 	u32 maxburst;
1068 
1069 	if (spi->cur_bpw <= 8)
1070 		buswidth = DMA_SLAVE_BUSWIDTH_1_BYTE;
1071 	else if (spi->cur_bpw <= 16)
1072 		buswidth = DMA_SLAVE_BUSWIDTH_2_BYTES;
1073 	else
1074 		buswidth = DMA_SLAVE_BUSWIDTH_4_BYTES;
1075 
1076 	if (spi->cfg->has_fifo) {
1077 		/* Valid for DMA Half or Full Fifo threshold */
1078 		if (spi->cur_fthlv == 2)
1079 			maxburst = 1;
1080 		else
1081 			maxburst = spi->cur_fthlv;
1082 	} else {
1083 		maxburst = 1;
1084 	}
1085 
1086 	memset(dma_conf, 0, sizeof(struct dma_slave_config));
1087 	dma_conf->direction = dir;
1088 	if (dma_conf->direction == DMA_DEV_TO_MEM) { /* RX */
1089 		dma_conf->src_addr = spi->phys_addr + spi->cfg->regs->rx.reg;
1090 		dma_conf->src_addr_width = buswidth;
1091 		dma_conf->src_maxburst = maxburst;
1092 
1093 		dev_dbg(spi->dev, "Rx DMA config buswidth=%d, maxburst=%d\n",
1094 			buswidth, maxburst);
1095 	} else if (dma_conf->direction == DMA_MEM_TO_DEV) { /* TX */
1096 		dma_conf->dst_addr = spi->phys_addr + spi->cfg->regs->tx.reg;
1097 		dma_conf->dst_addr_width = buswidth;
1098 		dma_conf->dst_maxburst = maxburst;
1099 
1100 		dev_dbg(spi->dev, "Tx DMA config buswidth=%d, maxburst=%d\n",
1101 			buswidth, maxburst);
1102 	}
1103 }
1104 
1105 /**
1106  * stm32f4_spi_transfer_one_irq - transfer a single spi_transfer using
1107  *				  interrupts
1108  * @spi: pointer to the spi controller data structure
1109  *
1110  * It must returns 0 if the transfer is finished or 1 if the transfer is still
1111  * in progress.
1112  */
1113 static int stm32f4_spi_transfer_one_irq(struct stm32_spi *spi)
1114 {
1115 	unsigned long flags;
1116 	u32 cr2 = 0;
1117 
1118 	/* Enable the interrupts relative to the current communication mode */
1119 	if (spi->cur_comm == SPI_SIMPLEX_TX || spi->cur_comm == SPI_3WIRE_TX) {
1120 		cr2 |= STM32F4_SPI_CR2_TXEIE;
1121 	} else if (spi->cur_comm == SPI_FULL_DUPLEX ||
1122 				spi->cur_comm == SPI_SIMPLEX_RX ||
1123 				spi->cur_comm == SPI_3WIRE_RX) {
1124 		/* In transmit-only mode, the OVR flag is set in the SR register
1125 		 * since the received data are never read. Therefore set OVR
1126 		 * interrupt only when rx buffer is available.
1127 		 */
1128 		cr2 |= STM32F4_SPI_CR2_RXNEIE | STM32F4_SPI_CR2_ERRIE;
1129 	} else {
1130 		return -EINVAL;
1131 	}
1132 
1133 	spin_lock_irqsave(&spi->lock, flags);
1134 
1135 	stm32_spi_set_bits(spi, STM32F4_SPI_CR2, cr2);
1136 
1137 	stm32_spi_enable(spi);
1138 
1139 	/* starting data transfer when buffer is loaded */
1140 	if (spi->tx_buf)
1141 		stm32f4_spi_write_tx(spi);
1142 
1143 	spin_unlock_irqrestore(&spi->lock, flags);
1144 
1145 	return 1;
1146 }
1147 
1148 /**
1149  * stm32h7_spi_transfer_one_irq - transfer a single spi_transfer using
1150  *				  interrupts
1151  * @spi: pointer to the spi controller data structure
1152  *
1153  * It must returns 0 if the transfer is finished or 1 if the transfer is still
1154  * in progress.
1155  */
1156 static int stm32h7_spi_transfer_one_irq(struct stm32_spi *spi)
1157 {
1158 	unsigned long flags;
1159 	u32 ier = 0;
1160 
1161 	/* Enable the interrupts relative to the current communication mode */
1162 	if (spi->tx_buf && spi->rx_buf)	/* Full Duplex */
1163 		ier |= STM32H7_SPI_IER_DXPIE;
1164 	else if (spi->tx_buf)		/* Half-Duplex TX dir or Simplex TX */
1165 		ier |= STM32H7_SPI_IER_TXPIE;
1166 	else if (spi->rx_buf)		/* Half-Duplex RX dir or Simplex RX */
1167 		ier |= STM32H7_SPI_IER_RXPIE;
1168 
1169 	/* Enable the interrupts relative to the end of transfer */
1170 	ier |= STM32H7_SPI_IER_EOTIE | STM32H7_SPI_IER_TXTFIE |
1171 	       STM32H7_SPI_IER_OVRIE | STM32H7_SPI_IER_MODFIE;
1172 
1173 	spin_lock_irqsave(&spi->lock, flags);
1174 
1175 	stm32_spi_enable(spi);
1176 
1177 	/* Be sure to have data in fifo before starting data transfer */
1178 	if (spi->tx_buf)
1179 		stm32h7_spi_write_txfifo(spi);
1180 
1181 	if (STM32_SPI_MASTER_MODE(spi))
1182 		stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_CSTART);
1183 
1184 	writel_relaxed(ier, spi->base + STM32H7_SPI_IER);
1185 
1186 	spin_unlock_irqrestore(&spi->lock, flags);
1187 
1188 	return 1;
1189 }
1190 
1191 /**
1192  * stm32f4_spi_transfer_one_dma_start - Set SPI driver registers to start
1193  *					transfer using DMA
1194  * @spi: pointer to the spi controller data structure
1195  */
1196 static void stm32f4_spi_transfer_one_dma_start(struct stm32_spi *spi)
1197 {
1198 	/* In DMA mode end of transfer is handled by DMA TX or RX callback. */
1199 	if (spi->cur_comm == SPI_SIMPLEX_RX || spi->cur_comm == SPI_3WIRE_RX ||
1200 	    spi->cur_comm == SPI_FULL_DUPLEX) {
1201 		/*
1202 		 * In transmit-only mode, the OVR flag is set in the SR register
1203 		 * since the received data are never read. Therefore set OVR
1204 		 * interrupt only when rx buffer is available.
1205 		 */
1206 		stm32_spi_set_bits(spi, STM32F4_SPI_CR2, STM32F4_SPI_CR2_ERRIE);
1207 	}
1208 
1209 	stm32_spi_enable(spi);
1210 }
1211 
1212 /**
1213  * stm32h7_spi_transfer_one_dma_start - Set SPI driver registers to start
1214  *					transfer using DMA
1215  * @spi: pointer to the spi controller data structure
1216  */
1217 static void stm32h7_spi_transfer_one_dma_start(struct stm32_spi *spi)
1218 {
1219 	uint32_t ier = STM32H7_SPI_IER_OVRIE | STM32H7_SPI_IER_MODFIE;
1220 
1221 	/* Enable the interrupts */
1222 	if (spi->cur_comm == SPI_SIMPLEX_TX || spi->cur_comm == SPI_3WIRE_TX)
1223 		ier |= STM32H7_SPI_IER_EOTIE | STM32H7_SPI_IER_TXTFIE;
1224 
1225 	stm32_spi_set_bits(spi, STM32H7_SPI_IER, ier);
1226 
1227 	stm32_spi_enable(spi);
1228 
1229 	if (STM32_SPI_MASTER_MODE(spi))
1230 		stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_CSTART);
1231 }
1232 
1233 /**
1234  * stm32_spi_transfer_one_dma - transfer a single spi_transfer using DMA
1235  * @spi: pointer to the spi controller data structure
1236  * @xfer: pointer to the spi_transfer structure
1237  *
1238  * It must returns 0 if the transfer is finished or 1 if the transfer is still
1239  * in progress.
1240  */
1241 static int stm32_spi_transfer_one_dma(struct stm32_spi *spi,
1242 				      struct spi_transfer *xfer)
1243 {
1244 	struct dma_slave_config tx_dma_conf, rx_dma_conf;
1245 	struct dma_async_tx_descriptor *tx_dma_desc, *rx_dma_desc;
1246 	unsigned long flags;
1247 
1248 	spin_lock_irqsave(&spi->lock, flags);
1249 
1250 	rx_dma_desc = NULL;
1251 	if (spi->rx_buf && spi->dma_rx) {
1252 		stm32_spi_dma_config(spi, &rx_dma_conf, DMA_DEV_TO_MEM);
1253 		dmaengine_slave_config(spi->dma_rx, &rx_dma_conf);
1254 
1255 		/* Enable Rx DMA request */
1256 		stm32_spi_set_bits(spi, spi->cfg->regs->dma_rx_en.reg,
1257 				   spi->cfg->regs->dma_rx_en.mask);
1258 
1259 		rx_dma_desc = dmaengine_prep_slave_sg(
1260 					spi->dma_rx, xfer->rx_sg.sgl,
1261 					xfer->rx_sg.nents,
1262 					rx_dma_conf.direction,
1263 					DMA_PREP_INTERRUPT);
1264 	}
1265 
1266 	tx_dma_desc = NULL;
1267 	if (spi->tx_buf && spi->dma_tx) {
1268 		stm32_spi_dma_config(spi, &tx_dma_conf, DMA_MEM_TO_DEV);
1269 		dmaengine_slave_config(spi->dma_tx, &tx_dma_conf);
1270 
1271 		tx_dma_desc = dmaengine_prep_slave_sg(
1272 					spi->dma_tx, xfer->tx_sg.sgl,
1273 					xfer->tx_sg.nents,
1274 					tx_dma_conf.direction,
1275 					DMA_PREP_INTERRUPT);
1276 	}
1277 
1278 	if ((spi->tx_buf && spi->dma_tx && !tx_dma_desc) ||
1279 	    (spi->rx_buf && spi->dma_rx && !rx_dma_desc))
1280 		goto dma_desc_error;
1281 
1282 	if (spi->cur_comm == SPI_FULL_DUPLEX && (!tx_dma_desc || !rx_dma_desc))
1283 		goto dma_desc_error;
1284 
1285 	if (rx_dma_desc) {
1286 		rx_dma_desc->callback = spi->cfg->dma_rx_cb;
1287 		rx_dma_desc->callback_param = spi;
1288 
1289 		if (dma_submit_error(dmaengine_submit(rx_dma_desc))) {
1290 			dev_err(spi->dev, "Rx DMA submit failed\n");
1291 			goto dma_desc_error;
1292 		}
1293 		/* Enable Rx DMA channel */
1294 		dma_async_issue_pending(spi->dma_rx);
1295 	}
1296 
1297 	if (tx_dma_desc) {
1298 		if (spi->cur_comm == SPI_SIMPLEX_TX ||
1299 		    spi->cur_comm == SPI_3WIRE_TX) {
1300 			tx_dma_desc->callback = spi->cfg->dma_tx_cb;
1301 			tx_dma_desc->callback_param = spi;
1302 		}
1303 
1304 		if (dma_submit_error(dmaengine_submit(tx_dma_desc))) {
1305 			dev_err(spi->dev, "Tx DMA submit failed\n");
1306 			goto dma_submit_error;
1307 		}
1308 		/* Enable Tx DMA channel */
1309 		dma_async_issue_pending(spi->dma_tx);
1310 
1311 		/* Enable Tx DMA request */
1312 		stm32_spi_set_bits(spi, spi->cfg->regs->dma_tx_en.reg,
1313 				   spi->cfg->regs->dma_tx_en.mask);
1314 	}
1315 
1316 	spi->cfg->transfer_one_dma_start(spi);
1317 
1318 	spin_unlock_irqrestore(&spi->lock, flags);
1319 
1320 	return 1;
1321 
1322 dma_submit_error:
1323 	if (spi->dma_rx)
1324 		dmaengine_terminate_sync(spi->dma_rx);
1325 
1326 dma_desc_error:
1327 	stm32_spi_clr_bits(spi, spi->cfg->regs->dma_rx_en.reg,
1328 			   spi->cfg->regs->dma_rx_en.mask);
1329 
1330 	spin_unlock_irqrestore(&spi->lock, flags);
1331 
1332 	dev_info(spi->dev, "DMA issue: fall back to irq transfer\n");
1333 
1334 	spi->cur_usedma = false;
1335 	return spi->cfg->transfer_one_irq(spi);
1336 }
1337 
1338 /**
1339  * stm32f4_spi_set_bpw - Configure bits per word
1340  * @spi: pointer to the spi controller data structure
1341  */
1342 static void stm32f4_spi_set_bpw(struct stm32_spi *spi)
1343 {
1344 	if (spi->cur_bpw == 16)
1345 		stm32_spi_set_bits(spi, STM32F4_SPI_CR1, STM32F4_SPI_CR1_DFF);
1346 	else
1347 		stm32_spi_clr_bits(spi, STM32F4_SPI_CR1, STM32F4_SPI_CR1_DFF);
1348 }
1349 
1350 /**
1351  * stm32h7_spi_set_bpw - configure bits per word
1352  * @spi: pointer to the spi controller data structure
1353  */
1354 static void stm32h7_spi_set_bpw(struct stm32_spi *spi)
1355 {
1356 	u32 bpw, fthlv;
1357 	u32 cfg1_clrb = 0, cfg1_setb = 0;
1358 
1359 	bpw = spi->cur_bpw - 1;
1360 
1361 	cfg1_clrb |= STM32H7_SPI_CFG1_DSIZE;
1362 	cfg1_setb |= FIELD_PREP(STM32H7_SPI_CFG1_DSIZE, bpw);
1363 
1364 	spi->cur_fthlv = stm32h7_spi_prepare_fthlv(spi, spi->cur_xferlen);
1365 	fthlv = spi->cur_fthlv - 1;
1366 
1367 	cfg1_clrb |= STM32H7_SPI_CFG1_FTHLV;
1368 	cfg1_setb |= FIELD_PREP(STM32H7_SPI_CFG1_FTHLV, fthlv);
1369 
1370 	writel_relaxed(
1371 		(readl_relaxed(spi->base + STM32H7_SPI_CFG1) &
1372 		 ~cfg1_clrb) | cfg1_setb,
1373 		spi->base + STM32H7_SPI_CFG1);
1374 }
1375 
1376 /**
1377  * stm32_spi_set_mbr - Configure baud rate divisor in master mode
1378  * @spi: pointer to the spi controller data structure
1379  * @mbrdiv: baud rate divisor value
1380  */
1381 static void stm32_spi_set_mbr(struct stm32_spi *spi, u32 mbrdiv)
1382 {
1383 	u32 clrb = 0, setb = 0;
1384 
1385 	clrb |= spi->cfg->regs->br.mask;
1386 	setb |= (mbrdiv << spi->cfg->regs->br.shift) & spi->cfg->regs->br.mask;
1387 
1388 	writel_relaxed((readl_relaxed(spi->base + spi->cfg->regs->br.reg) &
1389 			~clrb) | setb,
1390 		       spi->base + spi->cfg->regs->br.reg);
1391 }
1392 
1393 /**
1394  * stm32_spi_communication_type - return transfer communication type
1395  * @spi_dev: pointer to the spi device
1396  * @transfer: pointer to spi transfer
1397  */
1398 static unsigned int stm32_spi_communication_type(struct spi_device *spi_dev,
1399 						 struct spi_transfer *transfer)
1400 {
1401 	unsigned int type = SPI_FULL_DUPLEX;
1402 
1403 	if (spi_dev->mode & SPI_3WIRE) { /* MISO/MOSI signals shared */
1404 		/*
1405 		 * SPI_3WIRE and xfer->tx_buf != NULL and xfer->rx_buf != NULL
1406 		 * is forbidden and unvalidated by SPI subsystem so depending
1407 		 * on the valid buffer, we can determine the direction of the
1408 		 * transfer.
1409 		 */
1410 		if (!transfer->tx_buf)
1411 			type = SPI_3WIRE_RX;
1412 		else
1413 			type = SPI_3WIRE_TX;
1414 	} else {
1415 		if (!transfer->tx_buf)
1416 			type = SPI_SIMPLEX_RX;
1417 		else if (!transfer->rx_buf)
1418 			type = SPI_SIMPLEX_TX;
1419 	}
1420 
1421 	return type;
1422 }
1423 
1424 /**
1425  * stm32f4_spi_set_mode - configure communication mode
1426  * @spi: pointer to the spi controller data structure
1427  * @comm_type: type of communication to configure
1428  */
1429 static int stm32f4_spi_set_mode(struct stm32_spi *spi, unsigned int comm_type)
1430 {
1431 	if (comm_type == SPI_3WIRE_TX || comm_type == SPI_SIMPLEX_TX) {
1432 		stm32_spi_set_bits(spi, STM32F4_SPI_CR1,
1433 					STM32F4_SPI_CR1_BIDIMODE |
1434 					STM32F4_SPI_CR1_BIDIOE);
1435 	} else if (comm_type == SPI_FULL_DUPLEX ||
1436 				comm_type == SPI_SIMPLEX_RX) {
1437 		stm32_spi_clr_bits(spi, STM32F4_SPI_CR1,
1438 					STM32F4_SPI_CR1_BIDIMODE |
1439 					STM32F4_SPI_CR1_BIDIOE);
1440 	} else if (comm_type == SPI_3WIRE_RX) {
1441 		stm32_spi_set_bits(spi, STM32F4_SPI_CR1,
1442 					STM32F4_SPI_CR1_BIDIMODE);
1443 		stm32_spi_clr_bits(spi, STM32F4_SPI_CR1,
1444 					STM32F4_SPI_CR1_BIDIOE);
1445 	} else {
1446 		return -EINVAL;
1447 	}
1448 
1449 	return 0;
1450 }
1451 
1452 /**
1453  * stm32h7_spi_set_mode - configure communication mode
1454  * @spi: pointer to the spi controller data structure
1455  * @comm_type: type of communication to configure
1456  */
1457 static int stm32h7_spi_set_mode(struct stm32_spi *spi, unsigned int comm_type)
1458 {
1459 	u32 mode;
1460 	u32 cfg2_clrb = 0, cfg2_setb = 0;
1461 
1462 	if (comm_type == SPI_3WIRE_RX) {
1463 		mode = STM32H7_SPI_HALF_DUPLEX;
1464 		stm32_spi_clr_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_HDDIR);
1465 	} else if (comm_type == SPI_3WIRE_TX) {
1466 		mode = STM32H7_SPI_HALF_DUPLEX;
1467 		stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_HDDIR);
1468 	} else if (comm_type == SPI_SIMPLEX_RX) {
1469 		mode = STM32H7_SPI_SIMPLEX_RX;
1470 	} else if (comm_type == SPI_SIMPLEX_TX) {
1471 		mode = STM32H7_SPI_SIMPLEX_TX;
1472 	} else {
1473 		mode = STM32H7_SPI_FULL_DUPLEX;
1474 	}
1475 
1476 	cfg2_clrb |= STM32H7_SPI_CFG2_COMM;
1477 	cfg2_setb |= FIELD_PREP(STM32H7_SPI_CFG2_COMM, mode);
1478 
1479 	writel_relaxed(
1480 		(readl_relaxed(spi->base + STM32H7_SPI_CFG2) &
1481 		 ~cfg2_clrb) | cfg2_setb,
1482 		spi->base + STM32H7_SPI_CFG2);
1483 
1484 	return 0;
1485 }
1486 
1487 /**
1488  * stm32h7_spi_data_idleness - configure minimum time delay inserted between two
1489  *			       consecutive data frames in master mode
1490  * @spi: pointer to the spi controller data structure
1491  * @len: transfer len
1492  */
1493 static void stm32h7_spi_data_idleness(struct stm32_spi *spi, u32 len)
1494 {
1495 	u32 cfg2_clrb = 0, cfg2_setb = 0;
1496 
1497 	cfg2_clrb |= STM32H7_SPI_CFG2_MIDI;
1498 	if ((len > 1) && (spi->cur_midi > 0)) {
1499 		u32 sck_period_ns = DIV_ROUND_UP(NSEC_PER_SEC, spi->cur_speed);
1500 		u32 midi = min_t(u32,
1501 				 DIV_ROUND_UP(spi->cur_midi, sck_period_ns),
1502 				 FIELD_GET(STM32H7_SPI_CFG2_MIDI,
1503 				 STM32H7_SPI_CFG2_MIDI));
1504 
1505 
1506 		dev_dbg(spi->dev, "period=%dns, midi=%d(=%dns)\n",
1507 			sck_period_ns, midi, midi * sck_period_ns);
1508 		cfg2_setb |= FIELD_PREP(STM32H7_SPI_CFG2_MIDI, midi);
1509 	}
1510 
1511 	writel_relaxed((readl_relaxed(spi->base + STM32H7_SPI_CFG2) &
1512 			~cfg2_clrb) | cfg2_setb,
1513 		       spi->base + STM32H7_SPI_CFG2);
1514 }
1515 
1516 /**
1517  * stm32h7_spi_number_of_data - configure number of data at current transfer
1518  * @spi: pointer to the spi controller data structure
1519  * @nb_words: transfer length (in words)
1520  */
1521 static int stm32h7_spi_number_of_data(struct stm32_spi *spi, u32 nb_words)
1522 {
1523 	if (nb_words <= STM32H7_SPI_TSIZE_MAX) {
1524 		writel_relaxed(FIELD_PREP(STM32H7_SPI_CR2_TSIZE, nb_words),
1525 			       spi->base + STM32H7_SPI_CR2);
1526 	} else {
1527 		return -EMSGSIZE;
1528 	}
1529 
1530 	return 0;
1531 }
1532 
1533 /**
1534  * stm32_spi_transfer_one_setup - common setup to transfer a single
1535  *				  spi_transfer either using DMA or
1536  *				  interrupts.
1537  * @spi: pointer to the spi controller data structure
1538  * @spi_dev: pointer to the spi device
1539  * @transfer: pointer to spi transfer
1540  */
1541 static int stm32_spi_transfer_one_setup(struct stm32_spi *spi,
1542 					struct spi_device *spi_dev,
1543 					struct spi_transfer *transfer)
1544 {
1545 	unsigned long flags;
1546 	unsigned int comm_type;
1547 	int nb_words, ret = 0;
1548 	int mbr;
1549 
1550 	spin_lock_irqsave(&spi->lock, flags);
1551 
1552 	spi->cur_xferlen = transfer->len;
1553 
1554 	spi->cur_bpw = transfer->bits_per_word;
1555 	spi->cfg->set_bpw(spi);
1556 
1557 	/* Update spi->cur_speed with real clock speed */
1558 	if (STM32_SPI_MASTER_MODE(spi)) {
1559 		mbr = stm32_spi_prepare_mbr(spi, transfer->speed_hz,
1560 					    spi->cfg->baud_rate_div_min,
1561 					    spi->cfg->baud_rate_div_max);
1562 		if (mbr < 0) {
1563 			ret = mbr;
1564 			goto out;
1565 		}
1566 
1567 		transfer->speed_hz = spi->cur_speed;
1568 		stm32_spi_set_mbr(spi, mbr);
1569 	}
1570 
1571 	comm_type = stm32_spi_communication_type(spi_dev, transfer);
1572 	ret = spi->cfg->set_mode(spi, comm_type);
1573 	if (ret < 0)
1574 		goto out;
1575 
1576 	spi->cur_comm = comm_type;
1577 
1578 	if (STM32_SPI_MASTER_MODE(spi) && spi->cfg->set_data_idleness)
1579 		spi->cfg->set_data_idleness(spi, transfer->len);
1580 
1581 	if (spi->cur_bpw <= 8)
1582 		nb_words = transfer->len;
1583 	else if (spi->cur_bpw <= 16)
1584 		nb_words = DIV_ROUND_UP(transfer->len * 8, 16);
1585 	else
1586 		nb_words = DIV_ROUND_UP(transfer->len * 8, 32);
1587 
1588 	if (spi->cfg->set_number_of_data) {
1589 		ret = spi->cfg->set_number_of_data(spi, nb_words);
1590 		if (ret < 0)
1591 			goto out;
1592 	}
1593 
1594 	dev_dbg(spi->dev, "transfer communication mode set to %d\n",
1595 		spi->cur_comm);
1596 	dev_dbg(spi->dev,
1597 		"data frame of %d-bit, data packet of %d data frames\n",
1598 		spi->cur_bpw, spi->cur_fthlv);
1599 	if (STM32_SPI_MASTER_MODE(spi))
1600 		dev_dbg(spi->dev, "speed set to %dHz\n", spi->cur_speed);
1601 	dev_dbg(spi->dev, "transfer of %d bytes (%d data frames)\n",
1602 		spi->cur_xferlen, nb_words);
1603 	dev_dbg(spi->dev, "dma %s\n",
1604 		(spi->cur_usedma) ? "enabled" : "disabled");
1605 
1606 out:
1607 	spin_unlock_irqrestore(&spi->lock, flags);
1608 
1609 	return ret;
1610 }
1611 
1612 /**
1613  * stm32_spi_transfer_one - transfer a single spi_transfer
1614  * @ctrl: controller interface
1615  * @spi_dev: pointer to the spi device
1616  * @transfer: pointer to spi transfer
1617  *
1618  * It must return 0 if the transfer is finished or 1 if the transfer is still
1619  * in progress.
1620  */
1621 static int stm32_spi_transfer_one(struct spi_controller *ctrl,
1622 				  struct spi_device *spi_dev,
1623 				  struct spi_transfer *transfer)
1624 {
1625 	struct stm32_spi *spi = spi_controller_get_devdata(ctrl);
1626 	int ret;
1627 
1628 	spi->tx_buf = transfer->tx_buf;
1629 	spi->rx_buf = transfer->rx_buf;
1630 	spi->tx_len = spi->tx_buf ? transfer->len : 0;
1631 	spi->rx_len = spi->rx_buf ? transfer->len : 0;
1632 
1633 	spi->cur_usedma = (ctrl->can_dma &&
1634 			   ctrl->can_dma(ctrl, spi_dev, transfer));
1635 
1636 	ret = stm32_spi_transfer_one_setup(spi, spi_dev, transfer);
1637 	if (ret) {
1638 		dev_err(spi->dev, "SPI transfer setup failed\n");
1639 		return ret;
1640 	}
1641 
1642 	if (spi->cur_usedma)
1643 		return stm32_spi_transfer_one_dma(spi, transfer);
1644 	else
1645 		return spi->cfg->transfer_one_irq(spi);
1646 }
1647 
1648 /**
1649  * stm32_spi_unprepare_msg - relax the hardware
1650  * @ctrl: controller interface
1651  * @msg: pointer to the spi message
1652  */
1653 static int stm32_spi_unprepare_msg(struct spi_controller *ctrl,
1654 				   struct spi_message *msg)
1655 {
1656 	struct stm32_spi *spi = spi_controller_get_devdata(ctrl);
1657 
1658 	spi->cfg->disable(spi);
1659 
1660 	return 0;
1661 }
1662 
1663 /**
1664  * stm32f4_spi_config - Configure SPI controller as SPI master
1665  * @spi: pointer to the spi controller data structure
1666  */
1667 static int stm32f4_spi_config(struct stm32_spi *spi)
1668 {
1669 	unsigned long flags;
1670 
1671 	spin_lock_irqsave(&spi->lock, flags);
1672 
1673 	/* Ensure I2SMOD bit is kept cleared */
1674 	stm32_spi_clr_bits(spi, STM32F4_SPI_I2SCFGR,
1675 			   STM32F4_SPI_I2SCFGR_I2SMOD);
1676 
1677 	/*
1678 	 * - SS input value high
1679 	 * - transmitter half duplex direction
1680 	 * - Set the master mode (default Motorola mode)
1681 	 * - Consider 1 master/n slaves configuration and
1682 	 *   SS input value is determined by the SSI bit
1683 	 */
1684 	stm32_spi_set_bits(spi, STM32F4_SPI_CR1, STM32F4_SPI_CR1_SSI |
1685 						 STM32F4_SPI_CR1_BIDIOE |
1686 						 STM32F4_SPI_CR1_MSTR |
1687 						 STM32F4_SPI_CR1_SSM);
1688 
1689 	spin_unlock_irqrestore(&spi->lock, flags);
1690 
1691 	return 0;
1692 }
1693 
1694 /**
1695  * stm32h7_spi_config - Configure SPI controller
1696  * @spi: pointer to the spi controller data structure
1697  */
1698 static int stm32h7_spi_config(struct stm32_spi *spi)
1699 {
1700 	unsigned long flags;
1701 	u32 cr1 = 0, cfg2 = 0;
1702 
1703 	spin_lock_irqsave(&spi->lock, flags);
1704 
1705 	/* Ensure I2SMOD bit is kept cleared */
1706 	stm32_spi_clr_bits(spi, STM32H7_SPI_I2SCFGR,
1707 			   STM32H7_SPI_I2SCFGR_I2SMOD);
1708 
1709 	if (STM32_SPI_DEVICE_MODE(spi)) {
1710 		/* Use native device select */
1711 		cfg2 &= ~STM32H7_SPI_CFG2_SSM;
1712 	} else {
1713 		/*
1714 		 * - Transmitter half duplex direction
1715 		 * - Automatic communication suspend when RX-Fifo is full
1716 		 * - SS input value high
1717 		 */
1718 		cr1 |= STM32H7_SPI_CR1_HDDIR | STM32H7_SPI_CR1_MASRX | STM32H7_SPI_CR1_SSI;
1719 
1720 		/*
1721 		 * - Set the master mode (default Motorola mode)
1722 		 * - Consider 1 master/n devices configuration and
1723 		 *   SS input value is determined by the SSI bit
1724 		 * - keep control of all associated GPIOs
1725 		 */
1726 		cfg2 |= STM32H7_SPI_CFG2_MASTER | STM32H7_SPI_CFG2_SSM | STM32H7_SPI_CFG2_AFCNTR;
1727 	}
1728 
1729 	stm32_spi_set_bits(spi, STM32H7_SPI_CR1, cr1);
1730 	stm32_spi_set_bits(spi, STM32H7_SPI_CFG2, cfg2);
1731 
1732 	spin_unlock_irqrestore(&spi->lock, flags);
1733 
1734 	return 0;
1735 }
1736 
1737 static const struct stm32_spi_cfg stm32f4_spi_cfg = {
1738 	.regs = &stm32f4_spi_regspec,
1739 	.get_bpw_mask = stm32f4_spi_get_bpw_mask,
1740 	.disable = stm32f4_spi_disable,
1741 	.config = stm32f4_spi_config,
1742 	.set_bpw = stm32f4_spi_set_bpw,
1743 	.set_mode = stm32f4_spi_set_mode,
1744 	.transfer_one_dma_start = stm32f4_spi_transfer_one_dma_start,
1745 	.dma_tx_cb = stm32f4_spi_dma_tx_cb,
1746 	.dma_rx_cb = stm32_spi_dma_rx_cb,
1747 	.transfer_one_irq = stm32f4_spi_transfer_one_irq,
1748 	.irq_handler_event = stm32f4_spi_irq_event,
1749 	.irq_handler_thread = stm32f4_spi_irq_thread,
1750 	.baud_rate_div_min = STM32F4_SPI_BR_DIV_MIN,
1751 	.baud_rate_div_max = STM32F4_SPI_BR_DIV_MAX,
1752 	.has_fifo = false,
1753 	.flags = SPI_MASTER_MUST_TX,
1754 };
1755 
1756 static const struct stm32_spi_cfg stm32h7_spi_cfg = {
1757 	.regs = &stm32h7_spi_regspec,
1758 	.get_fifo_size = stm32h7_spi_get_fifo_size,
1759 	.get_bpw_mask = stm32h7_spi_get_bpw_mask,
1760 	.disable = stm32h7_spi_disable,
1761 	.config = stm32h7_spi_config,
1762 	.set_bpw = stm32h7_spi_set_bpw,
1763 	.set_mode = stm32h7_spi_set_mode,
1764 	.set_data_idleness = stm32h7_spi_data_idleness,
1765 	.set_number_of_data = stm32h7_spi_number_of_data,
1766 	.transfer_one_dma_start = stm32h7_spi_transfer_one_dma_start,
1767 	.dma_rx_cb = stm32_spi_dma_rx_cb,
1768 	/*
1769 	 * dma_tx_cb is not necessary since in case of TX, dma is followed by
1770 	 * SPI access hence handling is performed within the SPI interrupt
1771 	 */
1772 	.transfer_one_irq = stm32h7_spi_transfer_one_irq,
1773 	.irq_handler_thread = stm32h7_spi_irq_thread,
1774 	.baud_rate_div_min = STM32H7_SPI_MBR_DIV_MIN,
1775 	.baud_rate_div_max = STM32H7_SPI_MBR_DIV_MAX,
1776 	.has_fifo = true,
1777 };
1778 
1779 static const struct of_device_id stm32_spi_of_match[] = {
1780 	{ .compatible = "st,stm32h7-spi", .data = (void *)&stm32h7_spi_cfg },
1781 	{ .compatible = "st,stm32f4-spi", .data = (void *)&stm32f4_spi_cfg },
1782 	{},
1783 };
1784 MODULE_DEVICE_TABLE(of, stm32_spi_of_match);
1785 
1786 static int stm32h7_spi_device_abort(struct spi_controller *ctrl)
1787 {
1788 	spi_finalize_current_transfer(ctrl);
1789 	return 0;
1790 }
1791 
1792 static int stm32_spi_probe(struct platform_device *pdev)
1793 {
1794 	struct spi_controller *ctrl;
1795 	struct stm32_spi *spi;
1796 	struct resource *res;
1797 	struct reset_control *rst;
1798 	struct device_node *np = pdev->dev.of_node;
1799 	bool device_mode;
1800 	int ret;
1801 
1802 	device_mode = of_property_read_bool(np, "spi-slave");
1803 
1804 	if (device_mode)
1805 		ctrl = devm_spi_alloc_slave(&pdev->dev, sizeof(struct stm32_spi));
1806 	else
1807 		ctrl = devm_spi_alloc_master(&pdev->dev, sizeof(struct stm32_spi));
1808 	if (!ctrl) {
1809 		dev_err(&pdev->dev, "spi controller allocation failed\n");
1810 		return -ENOMEM;
1811 	}
1812 	platform_set_drvdata(pdev, ctrl);
1813 
1814 	spi = spi_controller_get_devdata(ctrl);
1815 	spi->dev = &pdev->dev;
1816 	spi->ctrl = ctrl;
1817 	spi->device_mode = device_mode;
1818 	spin_lock_init(&spi->lock);
1819 
1820 	spi->cfg = (const struct stm32_spi_cfg *)
1821 		of_match_device(pdev->dev.driver->of_match_table,
1822 				&pdev->dev)->data;
1823 
1824 	spi->base = devm_platform_get_and_ioremap_resource(pdev, 0, &res);
1825 	if (IS_ERR(spi->base))
1826 		return PTR_ERR(spi->base);
1827 
1828 	spi->phys_addr = (dma_addr_t)res->start;
1829 
1830 	spi->irq = platform_get_irq(pdev, 0);
1831 	if (spi->irq <= 0)
1832 		return dev_err_probe(&pdev->dev, spi->irq,
1833 				     "failed to get irq\n");
1834 
1835 	ret = devm_request_threaded_irq(&pdev->dev, spi->irq,
1836 					spi->cfg->irq_handler_event,
1837 					spi->cfg->irq_handler_thread,
1838 					IRQF_ONESHOT, pdev->name, ctrl);
1839 	if (ret) {
1840 		dev_err(&pdev->dev, "irq%d request failed: %d\n", spi->irq,
1841 			ret);
1842 		return ret;
1843 	}
1844 
1845 	spi->clk = devm_clk_get(&pdev->dev, NULL);
1846 	if (IS_ERR(spi->clk)) {
1847 		ret = PTR_ERR(spi->clk);
1848 		dev_err(&pdev->dev, "clk get failed: %d\n", ret);
1849 		return ret;
1850 	}
1851 
1852 	ret = clk_prepare_enable(spi->clk);
1853 	if (ret) {
1854 		dev_err(&pdev->dev, "clk enable failed: %d\n", ret);
1855 		return ret;
1856 	}
1857 	spi->clk_rate = clk_get_rate(spi->clk);
1858 	if (!spi->clk_rate) {
1859 		dev_err(&pdev->dev, "clk rate = 0\n");
1860 		ret = -EINVAL;
1861 		goto err_clk_disable;
1862 	}
1863 
1864 	rst = devm_reset_control_get_optional_exclusive(&pdev->dev, NULL);
1865 	if (rst) {
1866 		if (IS_ERR(rst)) {
1867 			ret = dev_err_probe(&pdev->dev, PTR_ERR(rst),
1868 					    "failed to get reset\n");
1869 			goto err_clk_disable;
1870 		}
1871 
1872 		reset_control_assert(rst);
1873 		udelay(2);
1874 		reset_control_deassert(rst);
1875 	}
1876 
1877 	if (spi->cfg->has_fifo)
1878 		spi->fifo_size = spi->cfg->get_fifo_size(spi);
1879 
1880 	ret = spi->cfg->config(spi);
1881 	if (ret) {
1882 		dev_err(&pdev->dev, "controller configuration failed: %d\n",
1883 			ret);
1884 		goto err_clk_disable;
1885 	}
1886 
1887 	ctrl->dev.of_node = pdev->dev.of_node;
1888 	ctrl->auto_runtime_pm = true;
1889 	ctrl->bus_num = pdev->id;
1890 	ctrl->mode_bits = SPI_CPHA | SPI_CPOL | SPI_CS_HIGH | SPI_LSB_FIRST |
1891 			  SPI_3WIRE;
1892 	ctrl->bits_per_word_mask = spi->cfg->get_bpw_mask(spi);
1893 	ctrl->max_speed_hz = spi->clk_rate / spi->cfg->baud_rate_div_min;
1894 	ctrl->min_speed_hz = spi->clk_rate / spi->cfg->baud_rate_div_max;
1895 	ctrl->use_gpio_descriptors = true;
1896 	ctrl->prepare_message = stm32_spi_prepare_msg;
1897 	ctrl->transfer_one = stm32_spi_transfer_one;
1898 	ctrl->unprepare_message = stm32_spi_unprepare_msg;
1899 	ctrl->flags = spi->cfg->flags;
1900 	if (STM32_SPI_DEVICE_MODE(spi))
1901 		ctrl->slave_abort = stm32h7_spi_device_abort;
1902 
1903 	spi->dma_tx = dma_request_chan(spi->dev, "tx");
1904 	if (IS_ERR(spi->dma_tx)) {
1905 		ret = PTR_ERR(spi->dma_tx);
1906 		spi->dma_tx = NULL;
1907 		if (ret == -EPROBE_DEFER)
1908 			goto err_clk_disable;
1909 
1910 		dev_warn(&pdev->dev, "failed to request tx dma channel\n");
1911 	} else {
1912 		ctrl->dma_tx = spi->dma_tx;
1913 	}
1914 
1915 	spi->dma_rx = dma_request_chan(spi->dev, "rx");
1916 	if (IS_ERR(spi->dma_rx)) {
1917 		ret = PTR_ERR(spi->dma_rx);
1918 		spi->dma_rx = NULL;
1919 		if (ret == -EPROBE_DEFER)
1920 			goto err_dma_release;
1921 
1922 		dev_warn(&pdev->dev, "failed to request rx dma channel\n");
1923 	} else {
1924 		ctrl->dma_rx = spi->dma_rx;
1925 	}
1926 
1927 	if (spi->dma_tx || spi->dma_rx)
1928 		ctrl->can_dma = stm32_spi_can_dma;
1929 
1930 	pm_runtime_set_autosuspend_delay(&pdev->dev,
1931 					 STM32_SPI_AUTOSUSPEND_DELAY);
1932 	pm_runtime_use_autosuspend(&pdev->dev);
1933 	pm_runtime_set_active(&pdev->dev);
1934 	pm_runtime_get_noresume(&pdev->dev);
1935 	pm_runtime_enable(&pdev->dev);
1936 
1937 	ret = spi_register_controller(ctrl);
1938 	if (ret) {
1939 		dev_err(&pdev->dev, "spi controller registration failed: %d\n",
1940 			ret);
1941 		goto err_pm_disable;
1942 	}
1943 
1944 	pm_runtime_mark_last_busy(&pdev->dev);
1945 	pm_runtime_put_autosuspend(&pdev->dev);
1946 
1947 	dev_info(&pdev->dev, "driver initialized (%s mode)\n",
1948 		 STM32_SPI_MASTER_MODE(spi) ? "master" : "device");
1949 
1950 	return 0;
1951 
1952 err_pm_disable:
1953 	pm_runtime_disable(&pdev->dev);
1954 	pm_runtime_put_noidle(&pdev->dev);
1955 	pm_runtime_set_suspended(&pdev->dev);
1956 	pm_runtime_dont_use_autosuspend(&pdev->dev);
1957 err_dma_release:
1958 	if (spi->dma_tx)
1959 		dma_release_channel(spi->dma_tx);
1960 	if (spi->dma_rx)
1961 		dma_release_channel(spi->dma_rx);
1962 err_clk_disable:
1963 	clk_disable_unprepare(spi->clk);
1964 
1965 	return ret;
1966 }
1967 
1968 static void stm32_spi_remove(struct platform_device *pdev)
1969 {
1970 	struct spi_controller *ctrl = platform_get_drvdata(pdev);
1971 	struct stm32_spi *spi = spi_controller_get_devdata(ctrl);
1972 
1973 	pm_runtime_get_sync(&pdev->dev);
1974 
1975 	spi_unregister_controller(ctrl);
1976 	spi->cfg->disable(spi);
1977 
1978 	pm_runtime_disable(&pdev->dev);
1979 	pm_runtime_put_noidle(&pdev->dev);
1980 	pm_runtime_set_suspended(&pdev->dev);
1981 	pm_runtime_dont_use_autosuspend(&pdev->dev);
1982 
1983 	if (ctrl->dma_tx)
1984 		dma_release_channel(ctrl->dma_tx);
1985 	if (ctrl->dma_rx)
1986 		dma_release_channel(ctrl->dma_rx);
1987 
1988 	clk_disable_unprepare(spi->clk);
1989 
1990 
1991 	pinctrl_pm_select_sleep_state(&pdev->dev);
1992 }
1993 
1994 static int __maybe_unused stm32_spi_runtime_suspend(struct device *dev)
1995 {
1996 	struct spi_controller *ctrl = dev_get_drvdata(dev);
1997 	struct stm32_spi *spi = spi_controller_get_devdata(ctrl);
1998 
1999 	clk_disable_unprepare(spi->clk);
2000 
2001 	return pinctrl_pm_select_sleep_state(dev);
2002 }
2003 
2004 static int __maybe_unused stm32_spi_runtime_resume(struct device *dev)
2005 {
2006 	struct spi_controller *ctrl = dev_get_drvdata(dev);
2007 	struct stm32_spi *spi = spi_controller_get_devdata(ctrl);
2008 	int ret;
2009 
2010 	ret = pinctrl_pm_select_default_state(dev);
2011 	if (ret)
2012 		return ret;
2013 
2014 	return clk_prepare_enable(spi->clk);
2015 }
2016 
2017 static int __maybe_unused stm32_spi_suspend(struct device *dev)
2018 {
2019 	struct spi_controller *ctrl = dev_get_drvdata(dev);
2020 	int ret;
2021 
2022 	ret = spi_controller_suspend(ctrl);
2023 	if (ret)
2024 		return ret;
2025 
2026 	return pm_runtime_force_suspend(dev);
2027 }
2028 
2029 static int __maybe_unused stm32_spi_resume(struct device *dev)
2030 {
2031 	struct spi_controller *ctrl = dev_get_drvdata(dev);
2032 	struct stm32_spi *spi = spi_controller_get_devdata(ctrl);
2033 	int ret;
2034 
2035 	ret = pm_runtime_force_resume(dev);
2036 	if (ret)
2037 		return ret;
2038 
2039 	ret = spi_controller_resume(ctrl);
2040 	if (ret) {
2041 		clk_disable_unprepare(spi->clk);
2042 		return ret;
2043 	}
2044 
2045 	ret = pm_runtime_resume_and_get(dev);
2046 	if (ret < 0) {
2047 		dev_err(dev, "Unable to power device:%d\n", ret);
2048 		return ret;
2049 	}
2050 
2051 	spi->cfg->config(spi);
2052 
2053 	pm_runtime_mark_last_busy(dev);
2054 	pm_runtime_put_autosuspend(dev);
2055 
2056 	return 0;
2057 }
2058 
2059 static const struct dev_pm_ops stm32_spi_pm_ops = {
2060 	SET_SYSTEM_SLEEP_PM_OPS(stm32_spi_suspend, stm32_spi_resume)
2061 	SET_RUNTIME_PM_OPS(stm32_spi_runtime_suspend,
2062 			   stm32_spi_runtime_resume, NULL)
2063 };
2064 
2065 static struct platform_driver stm32_spi_driver = {
2066 	.probe = stm32_spi_probe,
2067 	.remove_new = stm32_spi_remove,
2068 	.driver = {
2069 		.name = DRIVER_NAME,
2070 		.pm = &stm32_spi_pm_ops,
2071 		.of_match_table = stm32_spi_of_match,
2072 	},
2073 };
2074 
2075 module_platform_driver(stm32_spi_driver);
2076 
2077 MODULE_ALIAS("platform:" DRIVER_NAME);
2078 MODULE_DESCRIPTION("STMicroelectronics STM32 SPI Controller driver");
2079 MODULE_AUTHOR("Amelie Delaunay <amelie.delaunay@st.com>");
2080 MODULE_LICENSE("GPL v2");
2081