xref: /openbmc/linux/drivers/spi/spi-at91-usart.c (revision 60ea3db3)
1 // SPDX-License-Identifier: GPL-2.0
2 //
3 // Driver for AT91 USART Controllers as SPI
4 //
5 // Copyright (C) 2018 Microchip Technology Inc.
6 //
7 // Author: Radu Pirea <radu.pirea@microchip.com>
8 
9 #include <linux/clk.h>
10 #include <linux/delay.h>
11 #include <linux/dmaengine.h>
12 #include <linux/dma-direction.h>
13 #include <linux/interrupt.h>
14 #include <linux/kernel.h>
15 #include <linux/module.h>
16 #include <linux/gpio/consumer.h>
17 #include <linux/pinctrl/consumer.h>
18 #include <linux/platform_device.h>
19 #include <linux/pm_runtime.h>
20 
21 #include <linux/spi/spi.h>
22 
23 #define US_CR			0x00
24 #define US_MR			0x04
25 #define US_IER			0x08
26 #define US_IDR			0x0C
27 #define US_CSR			0x14
28 #define US_RHR			0x18
29 #define US_THR			0x1C
30 #define US_BRGR			0x20
31 #define US_VERSION		0xFC
32 
33 #define US_CR_RSTRX		BIT(2)
34 #define US_CR_RSTTX		BIT(3)
35 #define US_CR_RXEN		BIT(4)
36 #define US_CR_RXDIS		BIT(5)
37 #define US_CR_TXEN		BIT(6)
38 #define US_CR_TXDIS		BIT(7)
39 
40 #define US_MR_SPI_HOST		0x0E
41 #define US_MR_CHRL		GENMASK(7, 6)
42 #define US_MR_CPHA		BIT(8)
43 #define US_MR_CPOL		BIT(16)
44 #define US_MR_CLKO		BIT(18)
45 #define US_MR_WRDBT		BIT(20)
46 #define US_MR_LOOP		BIT(15)
47 
48 #define US_IR_RXRDY		BIT(0)
49 #define US_IR_TXRDY		BIT(1)
50 #define US_IR_OVRE		BIT(5)
51 
52 #define US_BRGR_SIZE		BIT(16)
53 
54 #define US_MIN_CLK_DIV		0x06
55 #define US_MAX_CLK_DIV		BIT(16)
56 
57 #define US_RESET		(US_CR_RSTRX | US_CR_RSTTX)
58 #define US_DISABLE		(US_CR_RXDIS | US_CR_TXDIS)
59 #define US_ENABLE		(US_CR_RXEN | US_CR_TXEN)
60 #define US_OVRE_RXRDY_IRQS	(US_IR_OVRE | US_IR_RXRDY)
61 
62 #define US_INIT \
63 	(US_MR_SPI_HOST | US_MR_CHRL | US_MR_CLKO | US_MR_WRDBT)
64 #define US_DMA_MIN_BYTES       16
65 #define US_DMA_TIMEOUT         (msecs_to_jiffies(1000))
66 
67 /* Register access macros */
68 #define at91_usart_spi_readl(port, reg) \
69 	readl_relaxed((port)->regs + US_##reg)
70 #define at91_usart_spi_writel(port, reg, value) \
71 	writel_relaxed((value), (port)->regs + US_##reg)
72 
73 #define at91_usart_spi_readb(port, reg) \
74 	readb_relaxed((port)->regs + US_##reg)
75 #define at91_usart_spi_writeb(port, reg, value) \
76 	writeb_relaxed((value), (port)->regs + US_##reg)
77 
78 struct at91_usart_spi {
79 	struct platform_device  *mpdev;
80 	struct spi_transfer	*current_transfer;
81 	void __iomem		*regs;
82 	struct device		*dev;
83 	struct clk		*clk;
84 
85 	struct completion	xfer_completion;
86 
87 	/*used in interrupt to protect data reading*/
88 	spinlock_t		lock;
89 
90 	phys_addr_t		phybase;
91 
92 	int			irq;
93 	unsigned int		current_tx_remaining_bytes;
94 	unsigned int		current_rx_remaining_bytes;
95 
96 	u32			spi_clk;
97 	u32			status;
98 
99 	bool			xfer_failed;
100 	bool			use_dma;
101 };
102 
dma_callback(void * data)103 static void dma_callback(void *data)
104 {
105 	struct spi_controller   *ctlr = data;
106 	struct at91_usart_spi   *aus = spi_controller_get_devdata(ctlr);
107 
108 	at91_usart_spi_writel(aus, IER, US_IR_RXRDY);
109 	aus->current_rx_remaining_bytes = 0;
110 	complete(&aus->xfer_completion);
111 }
112 
at91_usart_spi_can_dma(struct spi_controller * ctrl,struct spi_device * spi,struct spi_transfer * xfer)113 static bool at91_usart_spi_can_dma(struct spi_controller *ctrl,
114 				   struct spi_device *spi,
115 				   struct spi_transfer *xfer)
116 {
117 	struct at91_usart_spi *aus = spi_controller_get_devdata(ctrl);
118 
119 	return aus->use_dma && xfer->len >= US_DMA_MIN_BYTES;
120 }
121 
at91_usart_spi_configure_dma(struct spi_controller * ctlr,struct at91_usart_spi * aus)122 static int at91_usart_spi_configure_dma(struct spi_controller *ctlr,
123 					struct at91_usart_spi *aus)
124 {
125 	struct dma_slave_config slave_config;
126 	struct device *dev = &aus->mpdev->dev;
127 	phys_addr_t phybase = aus->phybase;
128 	dma_cap_mask_t mask;
129 	int err = 0;
130 
131 	dma_cap_zero(mask);
132 	dma_cap_set(DMA_SLAVE, mask);
133 
134 	ctlr->dma_tx = dma_request_chan(dev, "tx");
135 	if (IS_ERR_OR_NULL(ctlr->dma_tx)) {
136 		if (IS_ERR(ctlr->dma_tx)) {
137 			err = PTR_ERR(ctlr->dma_tx);
138 			goto at91_usart_spi_error_clear;
139 		}
140 
141 		dev_dbg(dev,
142 			"DMA TX channel not available, SPI unable to use DMA\n");
143 		err = -EBUSY;
144 		goto at91_usart_spi_error_clear;
145 	}
146 
147 	ctlr->dma_rx = dma_request_chan(dev, "rx");
148 	if (IS_ERR_OR_NULL(ctlr->dma_rx)) {
149 		if (IS_ERR(ctlr->dma_rx)) {
150 			err = PTR_ERR(ctlr->dma_rx);
151 			goto at91_usart_spi_error;
152 		}
153 
154 		dev_dbg(dev,
155 			"DMA RX channel not available, SPI unable to use DMA\n");
156 		err = -EBUSY;
157 		goto at91_usart_spi_error;
158 	}
159 
160 	slave_config.dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
161 	slave_config.src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
162 	slave_config.dst_addr = (dma_addr_t)phybase + US_THR;
163 	slave_config.src_addr = (dma_addr_t)phybase + US_RHR;
164 	slave_config.src_maxburst = 1;
165 	slave_config.dst_maxburst = 1;
166 	slave_config.device_fc = false;
167 
168 	slave_config.direction = DMA_DEV_TO_MEM;
169 	if (dmaengine_slave_config(ctlr->dma_rx, &slave_config)) {
170 		dev_err(&ctlr->dev,
171 			"failed to configure rx dma channel\n");
172 		err = -EINVAL;
173 		goto at91_usart_spi_error;
174 	}
175 
176 	slave_config.direction = DMA_MEM_TO_DEV;
177 	if (dmaengine_slave_config(ctlr->dma_tx, &slave_config)) {
178 		dev_err(&ctlr->dev,
179 			"failed to configure tx dma channel\n");
180 		err = -EINVAL;
181 		goto at91_usart_spi_error;
182 	}
183 
184 	aus->use_dma = true;
185 	return 0;
186 
187 at91_usart_spi_error:
188 	if (!IS_ERR_OR_NULL(ctlr->dma_tx))
189 		dma_release_channel(ctlr->dma_tx);
190 	if (!IS_ERR_OR_NULL(ctlr->dma_rx))
191 		dma_release_channel(ctlr->dma_rx);
192 	ctlr->dma_tx = NULL;
193 	ctlr->dma_rx = NULL;
194 
195 at91_usart_spi_error_clear:
196 	return err;
197 }
198 
at91_usart_spi_release_dma(struct spi_controller * ctlr)199 static void at91_usart_spi_release_dma(struct spi_controller *ctlr)
200 {
201 	if (ctlr->dma_rx)
202 		dma_release_channel(ctlr->dma_rx);
203 	if (ctlr->dma_tx)
204 		dma_release_channel(ctlr->dma_tx);
205 }
206 
at91_usart_spi_stop_dma(struct spi_controller * ctlr)207 static void at91_usart_spi_stop_dma(struct spi_controller *ctlr)
208 {
209 	if (ctlr->dma_rx)
210 		dmaengine_terminate_all(ctlr->dma_rx);
211 	if (ctlr->dma_tx)
212 		dmaengine_terminate_all(ctlr->dma_tx);
213 }
214 
at91_usart_spi_dma_transfer(struct spi_controller * ctlr,struct spi_transfer * xfer)215 static int at91_usart_spi_dma_transfer(struct spi_controller *ctlr,
216 				       struct spi_transfer *xfer)
217 {
218 	struct at91_usart_spi *aus = spi_controller_get_devdata(ctlr);
219 	struct dma_chan	 *rxchan = ctlr->dma_rx;
220 	struct dma_chan *txchan = ctlr->dma_tx;
221 	struct dma_async_tx_descriptor *rxdesc;
222 	struct dma_async_tx_descriptor *txdesc;
223 	dma_cookie_t cookie;
224 
225 	/* Disable RX interrupt */
226 	at91_usart_spi_writel(aus, IDR, US_IR_RXRDY);
227 
228 	rxdesc = dmaengine_prep_slave_sg(rxchan,
229 					 xfer->rx_sg.sgl,
230 					 xfer->rx_sg.nents,
231 					 DMA_DEV_TO_MEM,
232 					 DMA_PREP_INTERRUPT |
233 					 DMA_CTRL_ACK);
234 	if (!rxdesc)
235 		goto at91_usart_spi_err_dma;
236 
237 	txdesc = dmaengine_prep_slave_sg(txchan,
238 					 xfer->tx_sg.sgl,
239 					 xfer->tx_sg.nents,
240 					 DMA_MEM_TO_DEV,
241 					 DMA_PREP_INTERRUPT |
242 					 DMA_CTRL_ACK);
243 	if (!txdesc)
244 		goto at91_usart_spi_err_dma;
245 
246 	rxdesc->callback = dma_callback;
247 	rxdesc->callback_param = ctlr;
248 
249 	cookie = rxdesc->tx_submit(rxdesc);
250 	if (dma_submit_error(cookie))
251 		goto at91_usart_spi_err_dma;
252 
253 	cookie = txdesc->tx_submit(txdesc);
254 	if (dma_submit_error(cookie))
255 		goto at91_usart_spi_err_dma;
256 
257 	rxchan->device->device_issue_pending(rxchan);
258 	txchan->device->device_issue_pending(txchan);
259 
260 	return 0;
261 
262 at91_usart_spi_err_dma:
263 	/* Enable RX interrupt if something fails and fallback to PIO */
264 	at91_usart_spi_writel(aus, IER, US_IR_RXRDY);
265 	at91_usart_spi_stop_dma(ctlr);
266 
267 	return -ENOMEM;
268 }
269 
at91_usart_spi_dma_timeout(struct at91_usart_spi * aus)270 static unsigned long at91_usart_spi_dma_timeout(struct at91_usart_spi *aus)
271 {
272 	return wait_for_completion_timeout(&aus->xfer_completion,
273 					   US_DMA_TIMEOUT);
274 }
275 
at91_usart_spi_tx_ready(struct at91_usart_spi * aus)276 static inline u32 at91_usart_spi_tx_ready(struct at91_usart_spi *aus)
277 {
278 	return aus->status & US_IR_TXRDY;
279 }
280 
at91_usart_spi_rx_ready(struct at91_usart_spi * aus)281 static inline u32 at91_usart_spi_rx_ready(struct at91_usart_spi *aus)
282 {
283 	return aus->status & US_IR_RXRDY;
284 }
285 
at91_usart_spi_check_overrun(struct at91_usart_spi * aus)286 static inline u32 at91_usart_spi_check_overrun(struct at91_usart_spi *aus)
287 {
288 	return aus->status & US_IR_OVRE;
289 }
290 
at91_usart_spi_read_status(struct at91_usart_spi * aus)291 static inline u32 at91_usart_spi_read_status(struct at91_usart_spi *aus)
292 {
293 	aus->status = at91_usart_spi_readl(aus, CSR);
294 	return aus->status;
295 }
296 
at91_usart_spi_tx(struct at91_usart_spi * aus)297 static inline void at91_usart_spi_tx(struct at91_usart_spi *aus)
298 {
299 	unsigned int len = aus->current_transfer->len;
300 	unsigned int remaining = aus->current_tx_remaining_bytes;
301 	const u8  *tx_buf = aus->current_transfer->tx_buf;
302 
303 	if (!remaining)
304 		return;
305 
306 	if (at91_usart_spi_tx_ready(aus)) {
307 		at91_usart_spi_writeb(aus, THR, tx_buf[len - remaining]);
308 		aus->current_tx_remaining_bytes--;
309 	}
310 }
311 
at91_usart_spi_rx(struct at91_usart_spi * aus)312 static inline void at91_usart_spi_rx(struct at91_usart_spi *aus)
313 {
314 	int len = aus->current_transfer->len;
315 	int remaining = aus->current_rx_remaining_bytes;
316 	u8  *rx_buf = aus->current_transfer->rx_buf;
317 
318 	if (!remaining)
319 		return;
320 
321 	rx_buf[len - remaining] = at91_usart_spi_readb(aus, RHR);
322 	aus->current_rx_remaining_bytes--;
323 }
324 
325 static inline void
at91_usart_spi_set_xfer_speed(struct at91_usart_spi * aus,struct spi_transfer * xfer)326 at91_usart_spi_set_xfer_speed(struct at91_usart_spi *aus,
327 			      struct spi_transfer *xfer)
328 {
329 	at91_usart_spi_writel(aus, BRGR,
330 			      DIV_ROUND_UP(aus->spi_clk, xfer->speed_hz));
331 }
332 
at91_usart_spi_interrupt(int irq,void * dev_id)333 static irqreturn_t at91_usart_spi_interrupt(int irq, void *dev_id)
334 {
335 	struct spi_controller *controller = dev_id;
336 	struct at91_usart_spi *aus = spi_controller_get_devdata(controller);
337 
338 	spin_lock(&aus->lock);
339 	at91_usart_spi_read_status(aus);
340 
341 	if (at91_usart_spi_check_overrun(aus)) {
342 		aus->xfer_failed = true;
343 		at91_usart_spi_writel(aus, IDR, US_IR_OVRE | US_IR_RXRDY);
344 		spin_unlock(&aus->lock);
345 		return IRQ_HANDLED;
346 	}
347 
348 	if (at91_usart_spi_rx_ready(aus)) {
349 		at91_usart_spi_rx(aus);
350 		spin_unlock(&aus->lock);
351 		return IRQ_HANDLED;
352 	}
353 
354 	spin_unlock(&aus->lock);
355 
356 	return IRQ_NONE;
357 }
358 
at91_usart_spi_setup(struct spi_device * spi)359 static int at91_usart_spi_setup(struct spi_device *spi)
360 {
361 	struct at91_usart_spi *aus = spi_controller_get_devdata(spi->controller);
362 	u32 *ausd = spi->controller_state;
363 	unsigned int mr = at91_usart_spi_readl(aus, MR);
364 
365 	if (spi->mode & SPI_CPOL)
366 		mr |= US_MR_CPOL;
367 	else
368 		mr &= ~US_MR_CPOL;
369 
370 	if (spi->mode & SPI_CPHA)
371 		mr |= US_MR_CPHA;
372 	else
373 		mr &= ~US_MR_CPHA;
374 
375 	if (spi->mode & SPI_LOOP)
376 		mr |= US_MR_LOOP;
377 	else
378 		mr &= ~US_MR_LOOP;
379 
380 	if (!ausd) {
381 		ausd = kzalloc(sizeof(*ausd), GFP_KERNEL);
382 		if (!ausd)
383 			return -ENOMEM;
384 
385 		spi->controller_state = ausd;
386 	}
387 
388 	*ausd = mr;
389 
390 	dev_dbg(&spi->dev,
391 		"setup: bpw %u mode 0x%x -> mr %d %08x\n",
392 		spi->bits_per_word, spi->mode, spi_get_chipselect(spi, 0), mr);
393 
394 	return 0;
395 }
396 
at91_usart_spi_transfer_one(struct spi_controller * ctlr,struct spi_device * spi,struct spi_transfer * xfer)397 static int at91_usart_spi_transfer_one(struct spi_controller *ctlr,
398 				       struct spi_device *spi,
399 				       struct spi_transfer *xfer)
400 {
401 	struct at91_usart_spi *aus = spi_controller_get_devdata(ctlr);
402 	unsigned long dma_timeout = 0;
403 	int ret = 0;
404 
405 	at91_usart_spi_set_xfer_speed(aus, xfer);
406 	aus->xfer_failed = false;
407 	aus->current_transfer = xfer;
408 	aus->current_tx_remaining_bytes = xfer->len;
409 	aus->current_rx_remaining_bytes = xfer->len;
410 
411 	while ((aus->current_tx_remaining_bytes ||
412 		aus->current_rx_remaining_bytes) && !aus->xfer_failed) {
413 		reinit_completion(&aus->xfer_completion);
414 		if (at91_usart_spi_can_dma(ctlr, spi, xfer) &&
415 		    !ret) {
416 			ret = at91_usart_spi_dma_transfer(ctlr, xfer);
417 			if (ret)
418 				continue;
419 
420 			dma_timeout = at91_usart_spi_dma_timeout(aus);
421 
422 			if (WARN_ON(dma_timeout == 0)) {
423 				dev_err(&spi->dev, "DMA transfer timeout\n");
424 				return -EIO;
425 			}
426 			aus->current_tx_remaining_bytes = 0;
427 		} else {
428 			at91_usart_spi_read_status(aus);
429 			at91_usart_spi_tx(aus);
430 		}
431 
432 		cpu_relax();
433 	}
434 
435 	if (aus->xfer_failed) {
436 		dev_err(aus->dev, "Overrun!\n");
437 		return -EIO;
438 	}
439 
440 	return 0;
441 }
442 
at91_usart_spi_prepare_message(struct spi_controller * ctlr,struct spi_message * message)443 static int at91_usart_spi_prepare_message(struct spi_controller *ctlr,
444 					  struct spi_message *message)
445 {
446 	struct at91_usart_spi *aus = spi_controller_get_devdata(ctlr);
447 	struct spi_device *spi = message->spi;
448 	u32 *ausd = spi->controller_state;
449 
450 	at91_usart_spi_writel(aus, CR, US_ENABLE);
451 	at91_usart_spi_writel(aus, IER, US_OVRE_RXRDY_IRQS);
452 	at91_usart_spi_writel(aus, MR, *ausd);
453 
454 	return 0;
455 }
456 
at91_usart_spi_unprepare_message(struct spi_controller * ctlr,struct spi_message * message)457 static int at91_usart_spi_unprepare_message(struct spi_controller *ctlr,
458 					    struct spi_message *message)
459 {
460 	struct at91_usart_spi *aus = spi_controller_get_devdata(ctlr);
461 
462 	at91_usart_spi_writel(aus, CR, US_RESET | US_DISABLE);
463 	at91_usart_spi_writel(aus, IDR, US_OVRE_RXRDY_IRQS);
464 
465 	return 0;
466 }
467 
at91_usart_spi_cleanup(struct spi_device * spi)468 static void at91_usart_spi_cleanup(struct spi_device *spi)
469 {
470 	struct at91_usart_spi_device *ausd = spi->controller_state;
471 
472 	spi->controller_state = NULL;
473 	kfree(ausd);
474 }
475 
at91_usart_spi_init(struct at91_usart_spi * aus)476 static void at91_usart_spi_init(struct at91_usart_spi *aus)
477 {
478 	at91_usart_spi_writel(aus, MR, US_INIT);
479 	at91_usart_spi_writel(aus, CR, US_RESET | US_DISABLE);
480 }
481 
at91_usart_gpio_setup(struct platform_device * pdev)482 static int at91_usart_gpio_setup(struct platform_device *pdev)
483 {
484 	struct gpio_descs *cs_gpios;
485 
486 	cs_gpios = devm_gpiod_get_array_optional(&pdev->dev, "cs", GPIOD_OUT_LOW);
487 
488 	return PTR_ERR_OR_ZERO(cs_gpios);
489 }
490 
at91_usart_spi_probe(struct platform_device * pdev)491 static int at91_usart_spi_probe(struct platform_device *pdev)
492 {
493 	struct resource *regs;
494 	struct spi_controller *controller;
495 	struct at91_usart_spi *aus;
496 	struct clk *clk;
497 	int irq;
498 	int ret;
499 
500 	regs = platform_get_resource(to_platform_device(pdev->dev.parent),
501 				     IORESOURCE_MEM, 0);
502 	if (!regs)
503 		return -EINVAL;
504 
505 	irq = platform_get_irq(to_platform_device(pdev->dev.parent), 0);
506 	if (irq < 0)
507 		return irq;
508 
509 	clk = devm_clk_get(pdev->dev.parent, "usart");
510 	if (IS_ERR(clk))
511 		return PTR_ERR(clk);
512 
513 	ret = -ENOMEM;
514 	controller = spi_alloc_host(&pdev->dev, sizeof(*aus));
515 	if (!controller)
516 		goto at91_usart_spi_probe_fail;
517 
518 	ret = at91_usart_gpio_setup(pdev);
519 	if (ret)
520 		goto at91_usart_spi_probe_fail;
521 
522 	controller->mode_bits = SPI_CPOL | SPI_CPHA | SPI_LOOP | SPI_CS_HIGH;
523 	controller->dev.of_node = pdev->dev.parent->of_node;
524 	controller->bits_per_word_mask = SPI_BPW_MASK(8);
525 	controller->setup = at91_usart_spi_setup;
526 	controller->flags = SPI_CONTROLLER_MUST_RX | SPI_CONTROLLER_MUST_TX;
527 	controller->transfer_one = at91_usart_spi_transfer_one;
528 	controller->prepare_message = at91_usart_spi_prepare_message;
529 	controller->unprepare_message = at91_usart_spi_unprepare_message;
530 	controller->can_dma = at91_usart_spi_can_dma;
531 	controller->cleanup = at91_usart_spi_cleanup;
532 	controller->max_speed_hz = DIV_ROUND_UP(clk_get_rate(clk),
533 						US_MIN_CLK_DIV);
534 	controller->min_speed_hz = DIV_ROUND_UP(clk_get_rate(clk),
535 						US_MAX_CLK_DIV);
536 	platform_set_drvdata(pdev, controller);
537 
538 	aus = spi_controller_get_devdata(controller);
539 
540 	aus->dev = &pdev->dev;
541 	aus->regs = devm_ioremap_resource(&pdev->dev, regs);
542 	if (IS_ERR(aus->regs)) {
543 		ret = PTR_ERR(aus->regs);
544 		goto at91_usart_spi_probe_fail;
545 	}
546 
547 	aus->irq = irq;
548 	aus->clk = clk;
549 
550 	ret = devm_request_irq(&pdev->dev, irq, at91_usart_spi_interrupt, 0,
551 			       dev_name(&pdev->dev), controller);
552 	if (ret)
553 		goto at91_usart_spi_probe_fail;
554 
555 	ret = clk_prepare_enable(clk);
556 	if (ret)
557 		goto at91_usart_spi_probe_fail;
558 
559 	aus->spi_clk = clk_get_rate(clk);
560 	at91_usart_spi_init(aus);
561 
562 	aus->phybase = regs->start;
563 
564 	aus->mpdev = to_platform_device(pdev->dev.parent);
565 
566 	ret = at91_usart_spi_configure_dma(controller, aus);
567 	if (ret)
568 		goto at91_usart_fail_dma;
569 
570 	spin_lock_init(&aus->lock);
571 	init_completion(&aus->xfer_completion);
572 
573 	ret = devm_spi_register_controller(&pdev->dev, controller);
574 	if (ret)
575 		goto at91_usart_fail_register_controller;
576 
577 	dev_info(&pdev->dev,
578 		 "AT91 USART SPI Controller version 0x%x at %pa (irq %d)\n",
579 		 at91_usart_spi_readl(aus, VERSION),
580 		 &regs->start, irq);
581 
582 	return 0;
583 
584 at91_usart_fail_register_controller:
585 	at91_usart_spi_release_dma(controller);
586 at91_usart_fail_dma:
587 	clk_disable_unprepare(clk);
588 at91_usart_spi_probe_fail:
589 	spi_controller_put(controller);
590 	return ret;
591 }
592 
at91_usart_spi_runtime_suspend(struct device * dev)593 __maybe_unused static int at91_usart_spi_runtime_suspend(struct device *dev)
594 {
595 	struct spi_controller *ctlr = dev_get_drvdata(dev);
596 	struct at91_usart_spi *aus = spi_controller_get_devdata(ctlr);
597 
598 	clk_disable_unprepare(aus->clk);
599 	pinctrl_pm_select_sleep_state(dev);
600 
601 	return 0;
602 }
603 
at91_usart_spi_runtime_resume(struct device * dev)604 __maybe_unused static int at91_usart_spi_runtime_resume(struct device *dev)
605 {
606 	struct spi_controller *ctrl = dev_get_drvdata(dev);
607 	struct at91_usart_spi *aus = spi_controller_get_devdata(ctrl);
608 
609 	pinctrl_pm_select_default_state(dev);
610 
611 	return clk_prepare_enable(aus->clk);
612 }
613 
at91_usart_spi_suspend(struct device * dev)614 __maybe_unused static int at91_usart_spi_suspend(struct device *dev)
615 {
616 	struct spi_controller *ctrl = dev_get_drvdata(dev);
617 	int ret;
618 
619 	ret = spi_controller_suspend(ctrl);
620 	if (ret)
621 		return ret;
622 
623 	if (!pm_runtime_suspended(dev))
624 		at91_usart_spi_runtime_suspend(dev);
625 
626 	return 0;
627 }
628 
at91_usart_spi_resume(struct device * dev)629 __maybe_unused static int at91_usart_spi_resume(struct device *dev)
630 {
631 	struct spi_controller *ctrl = dev_get_drvdata(dev);
632 	struct at91_usart_spi *aus = spi_controller_get_devdata(ctrl);
633 	int ret;
634 
635 	if (!pm_runtime_suspended(dev)) {
636 		ret = at91_usart_spi_runtime_resume(dev);
637 		if (ret)
638 			return ret;
639 	}
640 
641 	at91_usart_spi_init(aus);
642 
643 	return spi_controller_resume(ctrl);
644 }
645 
at91_usart_spi_remove(struct platform_device * pdev)646 static void at91_usart_spi_remove(struct platform_device *pdev)
647 {
648 	struct spi_controller *ctlr = platform_get_drvdata(pdev);
649 	struct at91_usart_spi *aus = spi_controller_get_devdata(ctlr);
650 
651 	at91_usart_spi_release_dma(ctlr);
652 	clk_disable_unprepare(aus->clk);
653 }
654 
655 static const struct dev_pm_ops at91_usart_spi_pm_ops = {
656 	SET_SYSTEM_SLEEP_PM_OPS(at91_usart_spi_suspend, at91_usart_spi_resume)
657 	SET_RUNTIME_PM_OPS(at91_usart_spi_runtime_suspend,
658 			   at91_usart_spi_runtime_resume, NULL)
659 };
660 
661 static struct platform_driver at91_usart_spi_driver = {
662 	.driver = {
663 		.name = "at91_usart_spi",
664 		.pm = &at91_usart_spi_pm_ops,
665 	},
666 	.probe = at91_usart_spi_probe,
667 	.remove_new = at91_usart_spi_remove,
668 };
669 
670 module_platform_driver(at91_usart_spi_driver);
671 
672 MODULE_DESCRIPTION("Microchip AT91 USART SPI Controller driver");
673 MODULE_AUTHOR("Radu Pirea <radu.pirea@microchip.com>");
674 MODULE_LICENSE("GPL v2");
675 MODULE_ALIAS("platform:at91_usart_spi");
676