xref: /openbmc/linux/drivers/spi/spi-fsl-dspi.c (revision 260ea95c)
1 /*
2  * drivers/spi/spi-fsl-dspi.c
3  *
4  * Copyright 2013 Freescale Semiconductor, Inc.
5  *
6  * Freescale DSPI driver
7  * This file contains a driver for the Freescale DSPI
8  *
9  * This program is free software; you can redistribute it and/or modify
10  * it under the terms of the GNU General Public License as published by
11  * the Free Software Foundation; either version 2 of the License, or
12  * (at your option) any later version.
13  *
14  */
15 
16 #include <linux/clk.h>
17 #include <linux/delay.h>
18 #include <linux/dmaengine.h>
19 #include <linux/dma-mapping.h>
20 #include <linux/err.h>
21 #include <linux/errno.h>
22 #include <linux/interrupt.h>
23 #include <linux/io.h>
24 #include <linux/kernel.h>
25 #include <linux/math64.h>
26 #include <linux/module.h>
27 #include <linux/of.h>
28 #include <linux/of_device.h>
29 #include <linux/pinctrl/consumer.h>
30 #include <linux/platform_device.h>
31 #include <linux/pm_runtime.h>
32 #include <linux/regmap.h>
33 #include <linux/sched.h>
34 #include <linux/spi/spi.h>
35 #include <linux/spi/spi_bitbang.h>
36 #include <linux/time.h>
37 
38 #define DRIVER_NAME "fsl-dspi"
39 
40 #define TRAN_STATE_RX_VOID		0x01
41 #define TRAN_STATE_TX_VOID		0x02
42 #define TRAN_STATE_WORD_ODD_NUM	0x04
43 
44 #define DSPI_FIFO_SIZE			4
45 #define DSPI_DMA_BUFSIZE		(DSPI_FIFO_SIZE * 1024)
46 
47 #define SPI_MCR		0x00
48 #define SPI_MCR_MASTER		(1 << 31)
49 #define SPI_MCR_PCSIS		(0x3F << 16)
50 #define SPI_MCR_CLR_TXF	(1 << 11)
51 #define SPI_MCR_CLR_RXF	(1 << 10)
52 
53 #define SPI_TCR			0x08
54 #define SPI_TCR_GET_TCNT(x)	(((x) & 0xffff0000) >> 16)
55 
56 #define SPI_CTAR(x)		(0x0c + (((x) & 0x3) * 4))
57 #define SPI_CTAR_FMSZ(x)	(((x) & 0x0000000f) << 27)
58 #define SPI_CTAR_CPOL(x)	((x) << 26)
59 #define SPI_CTAR_CPHA(x)	((x) << 25)
60 #define SPI_CTAR_LSBFE(x)	((x) << 24)
61 #define SPI_CTAR_PCSSCK(x)	(((x) & 0x00000003) << 22)
62 #define SPI_CTAR_PASC(x)	(((x) & 0x00000003) << 20)
63 #define SPI_CTAR_PDT(x)	(((x) & 0x00000003) << 18)
64 #define SPI_CTAR_PBR(x)	(((x) & 0x00000003) << 16)
65 #define SPI_CTAR_CSSCK(x)	(((x) & 0x0000000f) << 12)
66 #define SPI_CTAR_ASC(x)	(((x) & 0x0000000f) << 8)
67 #define SPI_CTAR_DT(x)		(((x) & 0x0000000f) << 4)
68 #define SPI_CTAR_BR(x)		((x) & 0x0000000f)
69 #define SPI_CTAR_SCALE_BITS	0xf
70 
71 #define SPI_CTAR0_SLAVE	0x0c
72 
73 #define SPI_SR			0x2c
74 #define SPI_SR_EOQF		0x10000000
75 #define SPI_SR_TCFQF		0x80000000
76 #define SPI_SR_CLEAR		0xdaad0000
77 
78 #define SPI_RSER_TFFFE		BIT(25)
79 #define SPI_RSER_TFFFD		BIT(24)
80 #define SPI_RSER_RFDFE		BIT(17)
81 #define SPI_RSER_RFDFD		BIT(16)
82 
83 #define SPI_RSER		0x30
84 #define SPI_RSER_EOQFE		0x10000000
85 #define SPI_RSER_TCFQE		0x80000000
86 
87 #define SPI_PUSHR		0x34
88 #define SPI_PUSHR_CONT		(1 << 31)
89 #define SPI_PUSHR_CTAS(x)	(((x) & 0x00000003) << 28)
90 #define SPI_PUSHR_EOQ		(1 << 27)
91 #define SPI_PUSHR_CTCNT	(1 << 26)
92 #define SPI_PUSHR_PCS(x)	(((1 << x) & 0x0000003f) << 16)
93 #define SPI_PUSHR_TXDATA(x)	((x) & 0x0000ffff)
94 
95 #define SPI_PUSHR_SLAVE	0x34
96 
97 #define SPI_POPR		0x38
98 #define SPI_POPR_RXDATA(x)	((x) & 0x0000ffff)
99 
100 #define SPI_TXFR0		0x3c
101 #define SPI_TXFR1		0x40
102 #define SPI_TXFR2		0x44
103 #define SPI_TXFR3		0x48
104 #define SPI_RXFR0		0x7c
105 #define SPI_RXFR1		0x80
106 #define SPI_RXFR2		0x84
107 #define SPI_RXFR3		0x88
108 
109 #define SPI_FRAME_BITS(bits)	SPI_CTAR_FMSZ((bits) - 1)
110 #define SPI_FRAME_BITS_MASK	SPI_CTAR_FMSZ(0xf)
111 #define SPI_FRAME_BITS_16	SPI_CTAR_FMSZ(0xf)
112 #define SPI_FRAME_BITS_8	SPI_CTAR_FMSZ(0x7)
113 
114 #define SPI_CS_INIT		0x01
115 #define SPI_CS_ASSERT		0x02
116 #define SPI_CS_DROP		0x04
117 
118 #define SPI_TCR_TCNT_MAX	0x10000
119 
120 #define DMA_COMPLETION_TIMEOUT	msecs_to_jiffies(3000)
121 
122 struct chip_data {
123 	u32 mcr_val;
124 	u32 ctar_val;
125 	u16 void_write_data;
126 };
127 
128 enum dspi_trans_mode {
129 	DSPI_EOQ_MODE = 0,
130 	DSPI_TCFQ_MODE,
131 	DSPI_DMA_MODE,
132 };
133 
134 struct fsl_dspi_devtype_data {
135 	enum dspi_trans_mode trans_mode;
136 	u8 max_clock_factor;
137 };
138 
139 static const struct fsl_dspi_devtype_data vf610_data = {
140 	.trans_mode = DSPI_DMA_MODE,
141 	.max_clock_factor = 2,
142 };
143 
144 static const struct fsl_dspi_devtype_data ls1021a_v1_data = {
145 	.trans_mode = DSPI_TCFQ_MODE,
146 	.max_clock_factor = 8,
147 };
148 
149 static const struct fsl_dspi_devtype_data ls2085a_data = {
150 	.trans_mode = DSPI_TCFQ_MODE,
151 	.max_clock_factor = 8,
152 };
153 
154 struct fsl_dspi_dma {
155 	/* Length of transfer in words of DSPI_FIFO_SIZE */
156 	u32 curr_xfer_len;
157 
158 	u32 *tx_dma_buf;
159 	struct dma_chan *chan_tx;
160 	dma_addr_t tx_dma_phys;
161 	struct completion cmd_tx_complete;
162 	struct dma_async_tx_descriptor *tx_desc;
163 
164 	u32 *rx_dma_buf;
165 	struct dma_chan *chan_rx;
166 	dma_addr_t rx_dma_phys;
167 	struct completion cmd_rx_complete;
168 	struct dma_async_tx_descriptor *rx_desc;
169 };
170 
171 struct fsl_dspi {
172 	struct spi_master	*master;
173 	struct platform_device	*pdev;
174 
175 	struct regmap		*regmap;
176 	int			irq;
177 	struct clk		*clk;
178 
179 	struct spi_transfer	*cur_transfer;
180 	struct spi_message	*cur_msg;
181 	struct chip_data	*cur_chip;
182 	size_t			len;
183 	void			*tx;
184 	void			*tx_end;
185 	void			*rx;
186 	void			*rx_end;
187 	char			dataflags;
188 	u8			cs;
189 	u16			void_write_data;
190 	u32			cs_change;
191 	const struct fsl_dspi_devtype_data *devtype_data;
192 
193 	wait_queue_head_t	waitq;
194 	u32			waitflags;
195 
196 	u32			spi_tcnt;
197 	struct fsl_dspi_dma	*dma;
198 };
199 
200 static u32 dspi_data_to_pushr(struct fsl_dspi *dspi, int tx_word);
201 
202 static inline int is_double_byte_mode(struct fsl_dspi *dspi)
203 {
204 	unsigned int val;
205 
206 	regmap_read(dspi->regmap, SPI_CTAR(0), &val);
207 
208 	return ((val & SPI_FRAME_BITS_MASK) == SPI_FRAME_BITS(8)) ? 0 : 1;
209 }
210 
211 static void dspi_tx_dma_callback(void *arg)
212 {
213 	struct fsl_dspi *dspi = arg;
214 	struct fsl_dspi_dma *dma = dspi->dma;
215 
216 	complete(&dma->cmd_tx_complete);
217 }
218 
219 static void dspi_rx_dma_callback(void *arg)
220 {
221 	struct fsl_dspi *dspi = arg;
222 	struct fsl_dspi_dma *dma = dspi->dma;
223 	int rx_word;
224 	int i;
225 	u16 d;
226 
227 	rx_word = is_double_byte_mode(dspi);
228 
229 	if (!(dspi->dataflags & TRAN_STATE_RX_VOID)) {
230 		for (i = 0; i < dma->curr_xfer_len; i++) {
231 			d = dspi->dma->rx_dma_buf[i];
232 			rx_word ? (*(u16 *)dspi->rx = d) :
233 						(*(u8 *)dspi->rx = d);
234 			dspi->rx += rx_word + 1;
235 		}
236 	}
237 
238 	complete(&dma->cmd_rx_complete);
239 }
240 
241 static int dspi_next_xfer_dma_submit(struct fsl_dspi *dspi)
242 {
243 	struct fsl_dspi_dma *dma = dspi->dma;
244 	struct device *dev = &dspi->pdev->dev;
245 	int time_left;
246 	int tx_word;
247 	int i;
248 
249 	tx_word = is_double_byte_mode(dspi);
250 
251 	for (i = 0; i < dma->curr_xfer_len; i++) {
252 		dspi->dma->tx_dma_buf[i] = dspi_data_to_pushr(dspi, tx_word);
253 		if ((dspi->cs_change) && (!dspi->len))
254 			dspi->dma->tx_dma_buf[i] &= ~SPI_PUSHR_CONT;
255 	}
256 
257 	dma->tx_desc = dmaengine_prep_slave_single(dma->chan_tx,
258 					dma->tx_dma_phys,
259 					dma->curr_xfer_len *
260 					DMA_SLAVE_BUSWIDTH_4_BYTES,
261 					DMA_MEM_TO_DEV,
262 					DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
263 	if (!dma->tx_desc) {
264 		dev_err(dev, "Not able to get desc for DMA xfer\n");
265 		return -EIO;
266 	}
267 
268 	dma->tx_desc->callback = dspi_tx_dma_callback;
269 	dma->tx_desc->callback_param = dspi;
270 	if (dma_submit_error(dmaengine_submit(dma->tx_desc))) {
271 		dev_err(dev, "DMA submit failed\n");
272 		return -EINVAL;
273 	}
274 
275 	dma->rx_desc = dmaengine_prep_slave_single(dma->chan_rx,
276 					dma->rx_dma_phys,
277 					dma->curr_xfer_len *
278 					DMA_SLAVE_BUSWIDTH_4_BYTES,
279 					DMA_DEV_TO_MEM,
280 					DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
281 	if (!dma->rx_desc) {
282 		dev_err(dev, "Not able to get desc for DMA xfer\n");
283 		return -EIO;
284 	}
285 
286 	dma->rx_desc->callback = dspi_rx_dma_callback;
287 	dma->rx_desc->callback_param = dspi;
288 	if (dma_submit_error(dmaengine_submit(dma->rx_desc))) {
289 		dev_err(dev, "DMA submit failed\n");
290 		return -EINVAL;
291 	}
292 
293 	reinit_completion(&dspi->dma->cmd_rx_complete);
294 	reinit_completion(&dspi->dma->cmd_tx_complete);
295 
296 	dma_async_issue_pending(dma->chan_rx);
297 	dma_async_issue_pending(dma->chan_tx);
298 
299 	time_left = wait_for_completion_timeout(&dspi->dma->cmd_tx_complete,
300 					DMA_COMPLETION_TIMEOUT);
301 	if (time_left == 0) {
302 		dev_err(dev, "DMA tx timeout\n");
303 		dmaengine_terminate_all(dma->chan_tx);
304 		dmaengine_terminate_all(dma->chan_rx);
305 		return -ETIMEDOUT;
306 	}
307 
308 	time_left = wait_for_completion_timeout(&dspi->dma->cmd_rx_complete,
309 					DMA_COMPLETION_TIMEOUT);
310 	if (time_left == 0) {
311 		dev_err(dev, "DMA rx timeout\n");
312 		dmaengine_terminate_all(dma->chan_tx);
313 		dmaengine_terminate_all(dma->chan_rx);
314 		return -ETIMEDOUT;
315 	}
316 
317 	return 0;
318 }
319 
320 static int dspi_dma_xfer(struct fsl_dspi *dspi)
321 {
322 	struct fsl_dspi_dma *dma = dspi->dma;
323 	struct device *dev = &dspi->pdev->dev;
324 	int curr_remaining_bytes;
325 	int bytes_per_buffer;
326 	int word = 1;
327 	int ret = 0;
328 
329 	if (is_double_byte_mode(dspi))
330 		word = 2;
331 	curr_remaining_bytes = dspi->len;
332 	bytes_per_buffer = DSPI_DMA_BUFSIZE / DSPI_FIFO_SIZE;
333 	while (curr_remaining_bytes) {
334 		/* Check if current transfer fits the DMA buffer */
335 		dma->curr_xfer_len = curr_remaining_bytes / word;
336 		if (dma->curr_xfer_len > bytes_per_buffer)
337 			dma->curr_xfer_len = bytes_per_buffer;
338 
339 		ret = dspi_next_xfer_dma_submit(dspi);
340 		if (ret) {
341 			dev_err(dev, "DMA transfer failed\n");
342 			goto exit;
343 
344 		} else {
345 			curr_remaining_bytes -= dma->curr_xfer_len * word;
346 			if (curr_remaining_bytes < 0)
347 				curr_remaining_bytes = 0;
348 		}
349 	}
350 
351 exit:
352 	return ret;
353 }
354 
355 static int dspi_request_dma(struct fsl_dspi *dspi, phys_addr_t phy_addr)
356 {
357 	struct fsl_dspi_dma *dma;
358 	struct dma_slave_config cfg;
359 	struct device *dev = &dspi->pdev->dev;
360 	int ret;
361 
362 	dma = devm_kzalloc(dev, sizeof(*dma), GFP_KERNEL);
363 	if (!dma)
364 		return -ENOMEM;
365 
366 	dma->chan_rx = dma_request_slave_channel(dev, "rx");
367 	if (!dma->chan_rx) {
368 		dev_err(dev, "rx dma channel not available\n");
369 		ret = -ENODEV;
370 		return ret;
371 	}
372 
373 	dma->chan_tx = dma_request_slave_channel(dev, "tx");
374 	if (!dma->chan_tx) {
375 		dev_err(dev, "tx dma channel not available\n");
376 		ret = -ENODEV;
377 		goto err_tx_channel;
378 	}
379 
380 	dma->tx_dma_buf = dma_alloc_coherent(dev, DSPI_DMA_BUFSIZE,
381 					&dma->tx_dma_phys, GFP_KERNEL);
382 	if (!dma->tx_dma_buf) {
383 		ret = -ENOMEM;
384 		goto err_tx_dma_buf;
385 	}
386 
387 	dma->rx_dma_buf = dma_alloc_coherent(dev, DSPI_DMA_BUFSIZE,
388 					&dma->rx_dma_phys, GFP_KERNEL);
389 	if (!dma->rx_dma_buf) {
390 		ret = -ENOMEM;
391 		goto err_rx_dma_buf;
392 	}
393 
394 	cfg.src_addr = phy_addr + SPI_POPR;
395 	cfg.dst_addr = phy_addr + SPI_PUSHR;
396 	cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
397 	cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
398 	cfg.src_maxburst = 1;
399 	cfg.dst_maxburst = 1;
400 
401 	cfg.direction = DMA_DEV_TO_MEM;
402 	ret = dmaengine_slave_config(dma->chan_rx, &cfg);
403 	if (ret) {
404 		dev_err(dev, "can't configure rx dma channel\n");
405 		ret = -EINVAL;
406 		goto err_slave_config;
407 	}
408 
409 	cfg.direction = DMA_MEM_TO_DEV;
410 	ret = dmaengine_slave_config(dma->chan_tx, &cfg);
411 	if (ret) {
412 		dev_err(dev, "can't configure tx dma channel\n");
413 		ret = -EINVAL;
414 		goto err_slave_config;
415 	}
416 
417 	dspi->dma = dma;
418 	init_completion(&dma->cmd_tx_complete);
419 	init_completion(&dma->cmd_rx_complete);
420 
421 	return 0;
422 
423 err_slave_config:
424 	dma_free_coherent(dev, DSPI_DMA_BUFSIZE,
425 			dma->rx_dma_buf, dma->rx_dma_phys);
426 err_rx_dma_buf:
427 	dma_free_coherent(dev, DSPI_DMA_BUFSIZE,
428 			dma->tx_dma_buf, dma->tx_dma_phys);
429 err_tx_dma_buf:
430 	dma_release_channel(dma->chan_tx);
431 err_tx_channel:
432 	dma_release_channel(dma->chan_rx);
433 
434 	devm_kfree(dev, dma);
435 	dspi->dma = NULL;
436 
437 	return ret;
438 }
439 
440 static void dspi_release_dma(struct fsl_dspi *dspi)
441 {
442 	struct fsl_dspi_dma *dma = dspi->dma;
443 	struct device *dev = &dspi->pdev->dev;
444 
445 	if (dma) {
446 		if (dma->chan_tx) {
447 			dma_unmap_single(dev, dma->tx_dma_phys,
448 					DSPI_DMA_BUFSIZE, DMA_TO_DEVICE);
449 			dma_release_channel(dma->chan_tx);
450 		}
451 
452 		if (dma->chan_rx) {
453 			dma_unmap_single(dev, dma->rx_dma_phys,
454 					DSPI_DMA_BUFSIZE, DMA_FROM_DEVICE);
455 			dma_release_channel(dma->chan_rx);
456 		}
457 	}
458 }
459 
460 static void hz_to_spi_baud(char *pbr, char *br, int speed_hz,
461 		unsigned long clkrate)
462 {
463 	/* Valid baud rate pre-scaler values */
464 	int pbr_tbl[4] = {2, 3, 5, 7};
465 	int brs[16] = {	2,	4,	6,	8,
466 		16,	32,	64,	128,
467 		256,	512,	1024,	2048,
468 		4096,	8192,	16384,	32768 };
469 	int scale_needed, scale, minscale = INT_MAX;
470 	int i, j;
471 
472 	scale_needed = clkrate / speed_hz;
473 	if (clkrate % speed_hz)
474 		scale_needed++;
475 
476 	for (i = 0; i < ARRAY_SIZE(brs); i++)
477 		for (j = 0; j < ARRAY_SIZE(pbr_tbl); j++) {
478 			scale = brs[i] * pbr_tbl[j];
479 			if (scale >= scale_needed) {
480 				if (scale < minscale) {
481 					minscale = scale;
482 					*br = i;
483 					*pbr = j;
484 				}
485 				break;
486 			}
487 		}
488 
489 	if (minscale == INT_MAX) {
490 		pr_warn("Can not find valid baud rate,speed_hz is %d,clkrate is %ld, we use the max prescaler value.\n",
491 			speed_hz, clkrate);
492 		*pbr = ARRAY_SIZE(pbr_tbl) - 1;
493 		*br =  ARRAY_SIZE(brs) - 1;
494 	}
495 }
496 
497 static void ns_delay_scale(char *psc, char *sc, int delay_ns,
498 		unsigned long clkrate)
499 {
500 	int pscale_tbl[4] = {1, 3, 5, 7};
501 	int scale_needed, scale, minscale = INT_MAX;
502 	int i, j;
503 	u32 remainder;
504 
505 	scale_needed = div_u64_rem((u64)delay_ns * clkrate, NSEC_PER_SEC,
506 			&remainder);
507 	if (remainder)
508 		scale_needed++;
509 
510 	for (i = 0; i < ARRAY_SIZE(pscale_tbl); i++)
511 		for (j = 0; j <= SPI_CTAR_SCALE_BITS; j++) {
512 			scale = pscale_tbl[i] * (2 << j);
513 			if (scale >= scale_needed) {
514 				if (scale < minscale) {
515 					minscale = scale;
516 					*psc = i;
517 					*sc = j;
518 				}
519 				break;
520 			}
521 		}
522 
523 	if (minscale == INT_MAX) {
524 		pr_warn("Cannot find correct scale values for %dns delay at clkrate %ld, using max prescaler value",
525 			delay_ns, clkrate);
526 		*psc = ARRAY_SIZE(pscale_tbl) - 1;
527 		*sc = SPI_CTAR_SCALE_BITS;
528 	}
529 }
530 
531 static u32 dspi_data_to_pushr(struct fsl_dspi *dspi, int tx_word)
532 {
533 	u16 d16;
534 
535 	if (!(dspi->dataflags & TRAN_STATE_TX_VOID))
536 		d16 = tx_word ? *(u16 *)dspi->tx : *(u8 *)dspi->tx;
537 	else
538 		d16 = dspi->void_write_data;
539 
540 	dspi->tx += tx_word + 1;
541 	dspi->len -= tx_word + 1;
542 
543 	return	SPI_PUSHR_TXDATA(d16) |
544 		SPI_PUSHR_PCS(dspi->cs) |
545 		SPI_PUSHR_CTAS(0) |
546 		SPI_PUSHR_CONT;
547 }
548 
549 static void dspi_data_from_popr(struct fsl_dspi *dspi, int rx_word)
550 {
551 	u16 d;
552 	unsigned int val;
553 
554 	regmap_read(dspi->regmap, SPI_POPR, &val);
555 	d = SPI_POPR_RXDATA(val);
556 
557 	if (!(dspi->dataflags & TRAN_STATE_RX_VOID))
558 		rx_word ? (*(u16 *)dspi->rx = d) : (*(u8 *)dspi->rx = d);
559 
560 	dspi->rx += rx_word + 1;
561 }
562 
563 static int dspi_eoq_write(struct fsl_dspi *dspi)
564 {
565 	int tx_count = 0;
566 	int tx_word;
567 	u32 dspi_pushr = 0;
568 
569 	tx_word = is_double_byte_mode(dspi);
570 
571 	while (dspi->len && (tx_count < DSPI_FIFO_SIZE)) {
572 		/* If we are in word mode, only have a single byte to transfer
573 		 * switch to byte mode temporarily.  Will switch back at the
574 		 * end of the transfer.
575 		 */
576 		if (tx_word && (dspi->len == 1)) {
577 			dspi->dataflags |= TRAN_STATE_WORD_ODD_NUM;
578 			regmap_update_bits(dspi->regmap, SPI_CTAR(0),
579 					SPI_FRAME_BITS_MASK, SPI_FRAME_BITS(8));
580 			tx_word = 0;
581 		}
582 
583 		dspi_pushr = dspi_data_to_pushr(dspi, tx_word);
584 
585 		if (dspi->len == 0 || tx_count == DSPI_FIFO_SIZE - 1) {
586 			/* last transfer in the transfer */
587 			dspi_pushr |= SPI_PUSHR_EOQ;
588 			if ((dspi->cs_change) && (!dspi->len))
589 				dspi_pushr &= ~SPI_PUSHR_CONT;
590 		} else if (tx_word && (dspi->len == 1))
591 			dspi_pushr |= SPI_PUSHR_EOQ;
592 
593 		regmap_write(dspi->regmap, SPI_PUSHR, dspi_pushr);
594 
595 		tx_count++;
596 	}
597 
598 	return tx_count * (tx_word + 1);
599 }
600 
601 static int dspi_eoq_read(struct fsl_dspi *dspi)
602 {
603 	int rx_count = 0;
604 	int rx_word = is_double_byte_mode(dspi);
605 
606 	while ((dspi->rx < dspi->rx_end)
607 			&& (rx_count < DSPI_FIFO_SIZE)) {
608 		if (rx_word && (dspi->rx_end - dspi->rx) == 1)
609 			rx_word = 0;
610 
611 		dspi_data_from_popr(dspi, rx_word);
612 		rx_count++;
613 	}
614 
615 	return rx_count;
616 }
617 
618 static int dspi_tcfq_write(struct fsl_dspi *dspi)
619 {
620 	int tx_word;
621 	u32 dspi_pushr = 0;
622 
623 	tx_word = is_double_byte_mode(dspi);
624 
625 	if (tx_word && (dspi->len == 1)) {
626 		dspi->dataflags |= TRAN_STATE_WORD_ODD_NUM;
627 		regmap_update_bits(dspi->regmap, SPI_CTAR(0),
628 				SPI_FRAME_BITS_MASK, SPI_FRAME_BITS(8));
629 		tx_word = 0;
630 	}
631 
632 	dspi_pushr = dspi_data_to_pushr(dspi, tx_word);
633 
634 	if ((dspi->cs_change) && (!dspi->len))
635 		dspi_pushr &= ~SPI_PUSHR_CONT;
636 
637 	regmap_write(dspi->regmap, SPI_PUSHR, dspi_pushr);
638 
639 	return tx_word + 1;
640 }
641 
642 static void dspi_tcfq_read(struct fsl_dspi *dspi)
643 {
644 	int rx_word = is_double_byte_mode(dspi);
645 
646 	if (rx_word && (dspi->rx_end - dspi->rx) == 1)
647 		rx_word = 0;
648 
649 	dspi_data_from_popr(dspi, rx_word);
650 }
651 
652 static int dspi_transfer_one_message(struct spi_master *master,
653 		struct spi_message *message)
654 {
655 	struct fsl_dspi *dspi = spi_master_get_devdata(master);
656 	struct spi_device *spi = message->spi;
657 	struct spi_transfer *transfer;
658 	int status = 0;
659 	enum dspi_trans_mode trans_mode;
660 	u32 spi_tcr;
661 
662 	regmap_read(dspi->regmap, SPI_TCR, &spi_tcr);
663 	dspi->spi_tcnt = SPI_TCR_GET_TCNT(spi_tcr);
664 
665 	message->actual_length = 0;
666 
667 	list_for_each_entry(transfer, &message->transfers, transfer_list) {
668 		dspi->cur_transfer = transfer;
669 		dspi->cur_msg = message;
670 		dspi->cur_chip = spi_get_ctldata(spi);
671 		dspi->cs = spi->chip_select;
672 		dspi->cs_change = 0;
673 		if (list_is_last(&dspi->cur_transfer->transfer_list,
674 				 &dspi->cur_msg->transfers) || transfer->cs_change)
675 			dspi->cs_change = 1;
676 		dspi->void_write_data = dspi->cur_chip->void_write_data;
677 
678 		dspi->dataflags = 0;
679 		dspi->tx = (void *)transfer->tx_buf;
680 		dspi->tx_end = dspi->tx + transfer->len;
681 		dspi->rx = transfer->rx_buf;
682 		dspi->rx_end = dspi->rx + transfer->len;
683 		dspi->len = transfer->len;
684 
685 		if (!dspi->rx)
686 			dspi->dataflags |= TRAN_STATE_RX_VOID;
687 
688 		if (!dspi->tx)
689 			dspi->dataflags |= TRAN_STATE_TX_VOID;
690 
691 		regmap_write(dspi->regmap, SPI_MCR, dspi->cur_chip->mcr_val);
692 		regmap_update_bits(dspi->regmap, SPI_MCR,
693 				SPI_MCR_CLR_TXF | SPI_MCR_CLR_RXF,
694 				SPI_MCR_CLR_TXF | SPI_MCR_CLR_RXF);
695 		regmap_write(dspi->regmap, SPI_CTAR(0),
696 				dspi->cur_chip->ctar_val);
697 
698 		trans_mode = dspi->devtype_data->trans_mode;
699 		switch (trans_mode) {
700 		case DSPI_EOQ_MODE:
701 			regmap_write(dspi->regmap, SPI_RSER, SPI_RSER_EOQFE);
702 			dspi_eoq_write(dspi);
703 			break;
704 		case DSPI_TCFQ_MODE:
705 			regmap_write(dspi->regmap, SPI_RSER, SPI_RSER_TCFQE);
706 			dspi_tcfq_write(dspi);
707 			break;
708 		case DSPI_DMA_MODE:
709 			regmap_write(dspi->regmap, SPI_RSER,
710 				SPI_RSER_TFFFE | SPI_RSER_TFFFD |
711 				SPI_RSER_RFDFE | SPI_RSER_RFDFD);
712 			status = dspi_dma_xfer(dspi);
713 			break;
714 		default:
715 			dev_err(&dspi->pdev->dev, "unsupported trans_mode %u\n",
716 				trans_mode);
717 			status = -EINVAL;
718 			goto out;
719 		}
720 
721 		if (trans_mode != DSPI_DMA_MODE) {
722 			if (wait_event_interruptible(dspi->waitq,
723 						dspi->waitflags))
724 				dev_err(&dspi->pdev->dev,
725 					"wait transfer complete fail!\n");
726 			dspi->waitflags = 0;
727 		}
728 
729 		if (transfer->delay_usecs)
730 			udelay(transfer->delay_usecs);
731 	}
732 
733 out:
734 	message->status = status;
735 	spi_finalize_current_message(master);
736 
737 	return status;
738 }
739 
740 static int dspi_setup(struct spi_device *spi)
741 {
742 	struct chip_data *chip;
743 	struct fsl_dspi *dspi = spi_master_get_devdata(spi->master);
744 	u32 cs_sck_delay = 0, sck_cs_delay = 0;
745 	unsigned char br = 0, pbr = 0, pcssck = 0, cssck = 0;
746 	unsigned char pasc = 0, asc = 0, fmsz = 0;
747 	unsigned long clkrate;
748 
749 	if ((spi->bits_per_word >= 4) && (spi->bits_per_word <= 16)) {
750 		fmsz = spi->bits_per_word - 1;
751 	} else {
752 		pr_err("Invalid wordsize\n");
753 		return -ENODEV;
754 	}
755 
756 	/* Only alloc on first setup */
757 	chip = spi_get_ctldata(spi);
758 	if (chip == NULL) {
759 		chip = kzalloc(sizeof(struct chip_data), GFP_KERNEL);
760 		if (!chip)
761 			return -ENOMEM;
762 	}
763 
764 	of_property_read_u32(spi->dev.of_node, "fsl,spi-cs-sck-delay",
765 			&cs_sck_delay);
766 
767 	of_property_read_u32(spi->dev.of_node, "fsl,spi-sck-cs-delay",
768 			&sck_cs_delay);
769 
770 	chip->mcr_val = SPI_MCR_MASTER | SPI_MCR_PCSIS |
771 		SPI_MCR_CLR_TXF | SPI_MCR_CLR_RXF;
772 
773 	chip->void_write_data = 0;
774 
775 	clkrate = clk_get_rate(dspi->clk);
776 	hz_to_spi_baud(&pbr, &br, spi->max_speed_hz, clkrate);
777 
778 	/* Set PCS to SCK delay scale values */
779 	ns_delay_scale(&pcssck, &cssck, cs_sck_delay, clkrate);
780 
781 	/* Set After SCK delay scale values */
782 	ns_delay_scale(&pasc, &asc, sck_cs_delay, clkrate);
783 
784 	chip->ctar_val =  SPI_CTAR_FMSZ(fmsz)
785 		| SPI_CTAR_CPOL(spi->mode & SPI_CPOL ? 1 : 0)
786 		| SPI_CTAR_CPHA(spi->mode & SPI_CPHA ? 1 : 0)
787 		| SPI_CTAR_LSBFE(spi->mode & SPI_LSB_FIRST ? 1 : 0)
788 		| SPI_CTAR_PCSSCK(pcssck)
789 		| SPI_CTAR_CSSCK(cssck)
790 		| SPI_CTAR_PASC(pasc)
791 		| SPI_CTAR_ASC(asc)
792 		| SPI_CTAR_PBR(pbr)
793 		| SPI_CTAR_BR(br);
794 
795 	spi_set_ctldata(spi, chip);
796 
797 	return 0;
798 }
799 
800 static void dspi_cleanup(struct spi_device *spi)
801 {
802 	struct chip_data *chip = spi_get_ctldata((struct spi_device *)spi);
803 
804 	dev_dbg(&spi->dev, "spi_device %u.%u cleanup\n",
805 			spi->master->bus_num, spi->chip_select);
806 
807 	kfree(chip);
808 }
809 
810 static irqreturn_t dspi_interrupt(int irq, void *dev_id)
811 {
812 	struct fsl_dspi *dspi = (struct fsl_dspi *)dev_id;
813 	struct spi_message *msg = dspi->cur_msg;
814 	enum dspi_trans_mode trans_mode;
815 	u32 spi_sr, spi_tcr;
816 	u32 spi_tcnt, tcnt_diff;
817 	int tx_word;
818 
819 	regmap_read(dspi->regmap, SPI_SR, &spi_sr);
820 	regmap_write(dspi->regmap, SPI_SR, spi_sr);
821 
822 
823 	if (spi_sr & (SPI_SR_EOQF | SPI_SR_TCFQF)) {
824 		tx_word = is_double_byte_mode(dspi);
825 
826 		regmap_read(dspi->regmap, SPI_TCR, &spi_tcr);
827 		spi_tcnt = SPI_TCR_GET_TCNT(spi_tcr);
828 		/*
829 		 * The width of SPI Transfer Counter in SPI_TCR is 16bits,
830 		 * so the max couner is 65535. When the counter reach 65535,
831 		 * it will wrap around, counter reset to zero.
832 		 * spi_tcnt my be less than dspi->spi_tcnt, it means the
833 		 * counter already wrapped around.
834 		 * SPI Transfer Counter is a counter of transmitted frames.
835 		 * The size of frame maybe two bytes.
836 		 */
837 		tcnt_diff = ((spi_tcnt + SPI_TCR_TCNT_MAX) - dspi->spi_tcnt)
838 			% SPI_TCR_TCNT_MAX;
839 		tcnt_diff *= (tx_word + 1);
840 		if (dspi->dataflags & TRAN_STATE_WORD_ODD_NUM)
841 			tcnt_diff--;
842 
843 		msg->actual_length += tcnt_diff;
844 
845 		dspi->spi_tcnt = spi_tcnt;
846 
847 		trans_mode = dspi->devtype_data->trans_mode;
848 		switch (trans_mode) {
849 		case DSPI_EOQ_MODE:
850 			dspi_eoq_read(dspi);
851 			break;
852 		case DSPI_TCFQ_MODE:
853 			dspi_tcfq_read(dspi);
854 			break;
855 		default:
856 			dev_err(&dspi->pdev->dev, "unsupported trans_mode %u\n",
857 				trans_mode);
858 				return IRQ_HANDLED;
859 		}
860 
861 		if (!dspi->len) {
862 			if (dspi->dataflags & TRAN_STATE_WORD_ODD_NUM) {
863 				regmap_update_bits(dspi->regmap,
864 						   SPI_CTAR(0),
865 						   SPI_FRAME_BITS_MASK,
866 						   SPI_FRAME_BITS(16));
867 				dspi->dataflags &= ~TRAN_STATE_WORD_ODD_NUM;
868 			}
869 
870 			dspi->waitflags = 1;
871 			wake_up_interruptible(&dspi->waitq);
872 		} else {
873 			switch (trans_mode) {
874 			case DSPI_EOQ_MODE:
875 				dspi_eoq_write(dspi);
876 				break;
877 			case DSPI_TCFQ_MODE:
878 				dspi_tcfq_write(dspi);
879 				break;
880 			default:
881 				dev_err(&dspi->pdev->dev,
882 					"unsupported trans_mode %u\n",
883 					trans_mode);
884 			}
885 		}
886 	}
887 
888 	return IRQ_HANDLED;
889 }
890 
891 static const struct of_device_id fsl_dspi_dt_ids[] = {
892 	{ .compatible = "fsl,vf610-dspi", .data = (void *)&vf610_data, },
893 	{ .compatible = "fsl,ls1021a-v1.0-dspi",
894 		.data = (void *)&ls1021a_v1_data, },
895 	{ .compatible = "fsl,ls2085a-dspi", .data = (void *)&ls2085a_data, },
896 	{ /* sentinel */ }
897 };
898 MODULE_DEVICE_TABLE(of, fsl_dspi_dt_ids);
899 
900 #ifdef CONFIG_PM_SLEEP
901 static int dspi_suspend(struct device *dev)
902 {
903 	struct spi_master *master = dev_get_drvdata(dev);
904 	struct fsl_dspi *dspi = spi_master_get_devdata(master);
905 
906 	spi_master_suspend(master);
907 	clk_disable_unprepare(dspi->clk);
908 
909 	pinctrl_pm_select_sleep_state(dev);
910 
911 	return 0;
912 }
913 
914 static int dspi_resume(struct device *dev)
915 {
916 	struct spi_master *master = dev_get_drvdata(dev);
917 	struct fsl_dspi *dspi = spi_master_get_devdata(master);
918 	int ret;
919 
920 	pinctrl_pm_select_default_state(dev);
921 
922 	ret = clk_prepare_enable(dspi->clk);
923 	if (ret)
924 		return ret;
925 	spi_master_resume(master);
926 
927 	return 0;
928 }
929 #endif /* CONFIG_PM_SLEEP */
930 
931 static SIMPLE_DEV_PM_OPS(dspi_pm, dspi_suspend, dspi_resume);
932 
933 static const struct regmap_config dspi_regmap_config = {
934 	.reg_bits = 32,
935 	.val_bits = 32,
936 	.reg_stride = 4,
937 	.max_register = 0x88,
938 };
939 
940 static void dspi_init(struct fsl_dspi *dspi)
941 {
942 	regmap_write(dspi->regmap, SPI_SR, SPI_SR_CLEAR);
943 }
944 
945 static int dspi_probe(struct platform_device *pdev)
946 {
947 	struct device_node *np = pdev->dev.of_node;
948 	struct spi_master *master;
949 	struct fsl_dspi *dspi;
950 	struct resource *res;
951 	void __iomem *base;
952 	int ret = 0, cs_num, bus_num;
953 
954 	master = spi_alloc_master(&pdev->dev, sizeof(struct fsl_dspi));
955 	if (!master)
956 		return -ENOMEM;
957 
958 	dspi = spi_master_get_devdata(master);
959 	dspi->pdev = pdev;
960 	dspi->master = master;
961 
962 	master->transfer = NULL;
963 	master->setup = dspi_setup;
964 	master->transfer_one_message = dspi_transfer_one_message;
965 	master->dev.of_node = pdev->dev.of_node;
966 
967 	master->cleanup = dspi_cleanup;
968 	master->mode_bits = SPI_CPOL | SPI_CPHA;
969 	master->bits_per_word_mask = SPI_BPW_MASK(4) | SPI_BPW_MASK(8) |
970 					SPI_BPW_MASK(16);
971 
972 	ret = of_property_read_u32(np, "spi-num-chipselects", &cs_num);
973 	if (ret < 0) {
974 		dev_err(&pdev->dev, "can't get spi-num-chipselects\n");
975 		goto out_master_put;
976 	}
977 	master->num_chipselect = cs_num;
978 
979 	ret = of_property_read_u32(np, "bus-num", &bus_num);
980 	if (ret < 0) {
981 		dev_err(&pdev->dev, "can't get bus-num\n");
982 		goto out_master_put;
983 	}
984 	master->bus_num = bus_num;
985 
986 	dspi->devtype_data = of_device_get_match_data(&pdev->dev);
987 	if (!dspi->devtype_data) {
988 		dev_err(&pdev->dev, "can't get devtype_data\n");
989 		ret = -EFAULT;
990 		goto out_master_put;
991 	}
992 
993 	res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
994 	base = devm_ioremap_resource(&pdev->dev, res);
995 	if (IS_ERR(base)) {
996 		ret = PTR_ERR(base);
997 		goto out_master_put;
998 	}
999 
1000 	dspi->regmap = devm_regmap_init_mmio_clk(&pdev->dev, NULL, base,
1001 						&dspi_regmap_config);
1002 	if (IS_ERR(dspi->regmap)) {
1003 		dev_err(&pdev->dev, "failed to init regmap: %ld\n",
1004 				PTR_ERR(dspi->regmap));
1005 		ret = PTR_ERR(dspi->regmap);
1006 		goto out_master_put;
1007 	}
1008 
1009 	dspi_init(dspi);
1010 	dspi->irq = platform_get_irq(pdev, 0);
1011 	if (dspi->irq < 0) {
1012 		dev_err(&pdev->dev, "can't get platform irq\n");
1013 		ret = dspi->irq;
1014 		goto out_master_put;
1015 	}
1016 
1017 	ret = devm_request_irq(&pdev->dev, dspi->irq, dspi_interrupt, 0,
1018 			pdev->name, dspi);
1019 	if (ret < 0) {
1020 		dev_err(&pdev->dev, "Unable to attach DSPI interrupt\n");
1021 		goto out_master_put;
1022 	}
1023 
1024 	dspi->clk = devm_clk_get(&pdev->dev, "dspi");
1025 	if (IS_ERR(dspi->clk)) {
1026 		ret = PTR_ERR(dspi->clk);
1027 		dev_err(&pdev->dev, "unable to get clock\n");
1028 		goto out_master_put;
1029 	}
1030 	ret = clk_prepare_enable(dspi->clk);
1031 	if (ret)
1032 		goto out_master_put;
1033 
1034 	if (dspi->devtype_data->trans_mode == DSPI_DMA_MODE) {
1035 		ret = dspi_request_dma(dspi, res->start);
1036 		if (ret < 0) {
1037 			dev_err(&pdev->dev, "can't get dma channels\n");
1038 			goto out_clk_put;
1039 		}
1040 	}
1041 
1042 	master->max_speed_hz =
1043 		clk_get_rate(dspi->clk) / dspi->devtype_data->max_clock_factor;
1044 
1045 	init_waitqueue_head(&dspi->waitq);
1046 	platform_set_drvdata(pdev, master);
1047 
1048 	ret = spi_register_master(master);
1049 	if (ret != 0) {
1050 		dev_err(&pdev->dev, "Problem registering DSPI master\n");
1051 		goto out_clk_put;
1052 	}
1053 
1054 	return ret;
1055 
1056 out_clk_put:
1057 	clk_disable_unprepare(dspi->clk);
1058 out_master_put:
1059 	spi_master_put(master);
1060 
1061 	return ret;
1062 }
1063 
1064 static int dspi_remove(struct platform_device *pdev)
1065 {
1066 	struct spi_master *master = platform_get_drvdata(pdev);
1067 	struct fsl_dspi *dspi = spi_master_get_devdata(master);
1068 
1069 	/* Disconnect from the SPI framework */
1070 	dspi_release_dma(dspi);
1071 	clk_disable_unprepare(dspi->clk);
1072 	spi_unregister_master(dspi->master);
1073 
1074 	return 0;
1075 }
1076 
1077 static struct platform_driver fsl_dspi_driver = {
1078 	.driver.name    = DRIVER_NAME,
1079 	.driver.of_match_table = fsl_dspi_dt_ids,
1080 	.driver.owner   = THIS_MODULE,
1081 	.driver.pm = &dspi_pm,
1082 	.probe          = dspi_probe,
1083 	.remove		= dspi_remove,
1084 };
1085 module_platform_driver(fsl_dspi_driver);
1086 
1087 MODULE_DESCRIPTION("Freescale DSPI Controller Driver");
1088 MODULE_LICENSE("GPL");
1089 MODULE_ALIAS("platform:" DRIVER_NAME);
1090