xref: /openbmc/linux/drivers/dma/stm32-mdma.c (revision 9dd7c463)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  *
4  * Copyright (C) STMicroelectronics SA 2017
5  * Author(s): M'boumba Cedric Madianga <cedric.madianga@gmail.com>
6  *            Pierre-Yves Mordret <pierre-yves.mordret@st.com>
7  *
8  * Driver for STM32 MDMA controller
9  *
10  * Inspired by stm32-dma.c and dma-jz4780.c
11  */
12 
13 #include <linux/bitfield.h>
14 #include <linux/clk.h>
15 #include <linux/delay.h>
16 #include <linux/dmaengine.h>
17 #include <linux/dma-mapping.h>
18 #include <linux/dmapool.h>
19 #include <linux/err.h>
20 #include <linux/init.h>
21 #include <linux/iopoll.h>
22 #include <linux/jiffies.h>
23 #include <linux/list.h>
24 #include <linux/log2.h>
25 #include <linux/module.h>
26 #include <linux/of.h>
27 #include <linux/of_device.h>
28 #include <linux/of_dma.h>
29 #include <linux/platform_device.h>
30 #include <linux/pm_runtime.h>
31 #include <linux/reset.h>
32 #include <linux/slab.h>
33 
34 #include "virt-dma.h"
35 
36 #define STM32_MDMA_GISR0		0x0000 /* MDMA Int Status Reg 1 */
37 #define STM32_MDMA_GISR1		0x0004 /* MDMA Int Status Reg 2 */
38 
39 /* MDMA Channel x interrupt/status register */
40 #define STM32_MDMA_CISR(x)		(0x40 + 0x40 * (x)) /* x = 0..62 */
41 #define STM32_MDMA_CISR_CRQA		BIT(16)
42 #define STM32_MDMA_CISR_TCIF		BIT(4)
43 #define STM32_MDMA_CISR_BTIF		BIT(3)
44 #define STM32_MDMA_CISR_BRTIF		BIT(2)
45 #define STM32_MDMA_CISR_CTCIF		BIT(1)
46 #define STM32_MDMA_CISR_TEIF		BIT(0)
47 
48 /* MDMA Channel x interrupt flag clear register */
49 #define STM32_MDMA_CIFCR(x)		(0x44 + 0x40 * (x))
50 #define STM32_MDMA_CIFCR_CLTCIF		BIT(4)
51 #define STM32_MDMA_CIFCR_CBTIF		BIT(3)
52 #define STM32_MDMA_CIFCR_CBRTIF		BIT(2)
53 #define STM32_MDMA_CIFCR_CCTCIF		BIT(1)
54 #define STM32_MDMA_CIFCR_CTEIF		BIT(0)
55 #define STM32_MDMA_CIFCR_CLEAR_ALL	(STM32_MDMA_CIFCR_CLTCIF \
56 					| STM32_MDMA_CIFCR_CBTIF \
57 					| STM32_MDMA_CIFCR_CBRTIF \
58 					| STM32_MDMA_CIFCR_CCTCIF \
59 					| STM32_MDMA_CIFCR_CTEIF)
60 
61 /* MDMA Channel x error status register */
62 #define STM32_MDMA_CESR(x)		(0x48 + 0x40 * (x))
63 #define STM32_MDMA_CESR_BSE		BIT(11)
64 #define STM32_MDMA_CESR_ASR		BIT(10)
65 #define STM32_MDMA_CESR_TEMD		BIT(9)
66 #define STM32_MDMA_CESR_TELD		BIT(8)
67 #define STM32_MDMA_CESR_TED		BIT(7)
68 #define STM32_MDMA_CESR_TEA_MASK	GENMASK(6, 0)
69 
70 /* MDMA Channel x control register */
71 #define STM32_MDMA_CCR(x)		(0x4C + 0x40 * (x))
72 #define STM32_MDMA_CCR_SWRQ		BIT(16)
73 #define STM32_MDMA_CCR_WEX		BIT(14)
74 #define STM32_MDMA_CCR_HEX		BIT(13)
75 #define STM32_MDMA_CCR_BEX		BIT(12)
76 #define STM32_MDMA_CCR_PL_MASK		GENMASK(7, 6)
77 #define STM32_MDMA_CCR_PL(n)		FIELD_PREP(STM32_MDMA_CCR_PL_MASK, (n))
78 #define STM32_MDMA_CCR_TCIE		BIT(5)
79 #define STM32_MDMA_CCR_BTIE		BIT(4)
80 #define STM32_MDMA_CCR_BRTIE		BIT(3)
81 #define STM32_MDMA_CCR_CTCIE		BIT(2)
82 #define STM32_MDMA_CCR_TEIE		BIT(1)
83 #define STM32_MDMA_CCR_EN		BIT(0)
84 #define STM32_MDMA_CCR_IRQ_MASK		(STM32_MDMA_CCR_TCIE \
85 					| STM32_MDMA_CCR_BTIE \
86 					| STM32_MDMA_CCR_BRTIE \
87 					| STM32_MDMA_CCR_CTCIE \
88 					| STM32_MDMA_CCR_TEIE)
89 
90 /* MDMA Channel x transfer configuration register */
91 #define STM32_MDMA_CTCR(x)		(0x50 + 0x40 * (x))
92 #define STM32_MDMA_CTCR_BWM		BIT(31)
93 #define STM32_MDMA_CTCR_SWRM		BIT(30)
94 #define STM32_MDMA_CTCR_TRGM_MSK	GENMASK(29, 28)
95 #define STM32_MDMA_CTCR_TRGM(n)		FIELD_PREP(STM32_MDMA_CTCR_TRGM_MSK, (n))
96 #define STM32_MDMA_CTCR_TRGM_GET(n)	FIELD_GET(STM32_MDMA_CTCR_TRGM_MSK, (n))
97 #define STM32_MDMA_CTCR_PAM_MASK	GENMASK(27, 26)
98 #define STM32_MDMA_CTCR_PAM(n)		FIELD_PREP(STM32_MDMA_CTCR_PAM_MASK, (n))
99 #define STM32_MDMA_CTCR_PKE		BIT(25)
100 #define STM32_MDMA_CTCR_TLEN_MSK	GENMASK(24, 18)
101 #define STM32_MDMA_CTCR_TLEN(n)		FIELD_PREP(STM32_MDMA_CTCR_TLEN_MSK, (n))
102 #define STM32_MDMA_CTCR_TLEN_GET(n)	FIELD_GET(STM32_MDMA_CTCR_TLEN_MSK, (n))
103 #define STM32_MDMA_CTCR_LEN2_MSK	GENMASK(25, 18)
104 #define STM32_MDMA_CTCR_LEN2(n)		FIELD_PREP(STM32_MDMA_CTCR_LEN2_MSK, (n))
105 #define STM32_MDMA_CTCR_LEN2_GET(n)	FIELD_GET(STM32_MDMA_CTCR_LEN2_MSK, (n))
106 #define STM32_MDMA_CTCR_DBURST_MASK	GENMASK(17, 15)
107 #define STM32_MDMA_CTCR_DBURST(n)	FIELD_PREP(STM32_MDMA_CTCR_DBURST_MASK, (n))
108 #define STM32_MDMA_CTCR_SBURST_MASK	GENMASK(14, 12)
109 #define STM32_MDMA_CTCR_SBURST(n)	FIELD_PREP(STM32_MDMA_CTCR_SBURST_MASK, (n))
110 #define STM32_MDMA_CTCR_DINCOS_MASK	GENMASK(11, 10)
111 #define STM32_MDMA_CTCR_DINCOS(n)	FIELD_PREP(STM32_MDMA_CTCR_DINCOS_MASK, (n))
112 #define STM32_MDMA_CTCR_SINCOS_MASK	GENMASK(9, 8)
113 #define STM32_MDMA_CTCR_SINCOS(n)	FIELD_PREP(STM32_MDMA_CTCR_SINCOS_MASK, (n))
114 #define STM32_MDMA_CTCR_DSIZE_MASK	GENMASK(7, 6)
115 #define STM32_MDMA_CTCR_DSIZE(n)	FIELD_PREP(STM32_MDMA_CTCR_DSIZE_MASK, (n))
116 #define STM32_MDMA_CTCR_SSIZE_MASK	GENMASK(5, 4)
117 #define STM32_MDMA_CTCR_SSIZE(n)	FIELD_PREP(STM32_MDMA_CTCR_SSIZE_MASK, (n))
118 #define STM32_MDMA_CTCR_DINC_MASK	GENMASK(3, 2)
119 #define STM32_MDMA_CTCR_DINC(n)		FIELD_PREP(STM32_MDMA_CTCR_DINC_MASK, (n))
120 #define STM32_MDMA_CTCR_SINC_MASK	GENMASK(1, 0)
121 #define STM32_MDMA_CTCR_SINC(n)		FIELD_PREP(STM32_MDMA_CTCR_SINC_MASK, (n))
122 #define STM32_MDMA_CTCR_CFG_MASK	(STM32_MDMA_CTCR_SINC_MASK \
123 					| STM32_MDMA_CTCR_DINC_MASK \
124 					| STM32_MDMA_CTCR_SINCOS_MASK \
125 					| STM32_MDMA_CTCR_DINCOS_MASK \
126 					| STM32_MDMA_CTCR_LEN2_MSK \
127 					| STM32_MDMA_CTCR_TRGM_MSK)
128 
129 /* MDMA Channel x block number of data register */
130 #define STM32_MDMA_CBNDTR(x)		(0x54 + 0x40 * (x))
131 #define STM32_MDMA_CBNDTR_BRC_MK	GENMASK(31, 20)
132 #define STM32_MDMA_CBNDTR_BRC(n)	FIELD_PREP(STM32_MDMA_CBNDTR_BRC_MK, (n))
133 #define STM32_MDMA_CBNDTR_BRC_GET(n)	FIELD_GET(STM32_MDMA_CBNDTR_BRC_MK, (n))
134 
135 #define STM32_MDMA_CBNDTR_BRDUM		BIT(19)
136 #define STM32_MDMA_CBNDTR_BRSUM		BIT(18)
137 #define STM32_MDMA_CBNDTR_BNDT_MASK	GENMASK(16, 0)
138 #define STM32_MDMA_CBNDTR_BNDT(n)	FIELD_PREP(STM32_MDMA_CBNDTR_BNDT_MASK, (n))
139 
140 /* MDMA Channel x source address register */
141 #define STM32_MDMA_CSAR(x)		(0x58 + 0x40 * (x))
142 
143 /* MDMA Channel x destination address register */
144 #define STM32_MDMA_CDAR(x)		(0x5C + 0x40 * (x))
145 
146 /* MDMA Channel x block repeat address update register */
147 #define STM32_MDMA_CBRUR(x)		(0x60 + 0x40 * (x))
148 #define STM32_MDMA_CBRUR_DUV_MASK	GENMASK(31, 16)
149 #define STM32_MDMA_CBRUR_DUV(n)		FIELD_PREP(STM32_MDMA_CBRUR_DUV_MASK, (n))
150 #define STM32_MDMA_CBRUR_SUV_MASK	GENMASK(15, 0)
151 #define STM32_MDMA_CBRUR_SUV(n)		FIELD_PREP(STM32_MDMA_CBRUR_SUV_MASK, (n))
152 
153 /* MDMA Channel x link address register */
154 #define STM32_MDMA_CLAR(x)		(0x64 + 0x40 * (x))
155 
156 /* MDMA Channel x trigger and bus selection register */
157 #define STM32_MDMA_CTBR(x)		(0x68 + 0x40 * (x))
158 #define STM32_MDMA_CTBR_DBUS		BIT(17)
159 #define STM32_MDMA_CTBR_SBUS		BIT(16)
160 #define STM32_MDMA_CTBR_TSEL_MASK	GENMASK(5, 0)
161 #define STM32_MDMA_CTBR_TSEL(n)		FIELD_PREP(STM32_MDMA_CTBR_TSEL_MASK, (n))
162 
163 /* MDMA Channel x mask address register */
164 #define STM32_MDMA_CMAR(x)		(0x70 + 0x40 * (x))
165 
166 /* MDMA Channel x mask data register */
167 #define STM32_MDMA_CMDR(x)		(0x74 + 0x40 * (x))
168 
169 #define STM32_MDMA_MAX_BUF_LEN		128
170 #define STM32_MDMA_MAX_BLOCK_LEN	65536
171 #define STM32_MDMA_MAX_CHANNELS		63
172 #define STM32_MDMA_MAX_REQUESTS		256
173 #define STM32_MDMA_MAX_BURST		128
174 #define STM32_MDMA_VERY_HIGH_PRIORITY	0x3
175 
176 enum stm32_mdma_trigger_mode {
177 	STM32_MDMA_BUFFER,
178 	STM32_MDMA_BLOCK,
179 	STM32_MDMA_BLOCK_REP,
180 	STM32_MDMA_LINKED_LIST,
181 };
182 
183 enum stm32_mdma_width {
184 	STM32_MDMA_BYTE,
185 	STM32_MDMA_HALF_WORD,
186 	STM32_MDMA_WORD,
187 	STM32_MDMA_DOUBLE_WORD,
188 };
189 
190 enum stm32_mdma_inc_mode {
191 	STM32_MDMA_FIXED = 0,
192 	STM32_MDMA_INC = 2,
193 	STM32_MDMA_DEC = 3,
194 };
195 
196 struct stm32_mdma_chan_config {
197 	u32 request;
198 	u32 priority_level;
199 	u32 transfer_config;
200 	u32 mask_addr;
201 	u32 mask_data;
202 };
203 
204 struct stm32_mdma_hwdesc {
205 	u32 ctcr;
206 	u32 cbndtr;
207 	u32 csar;
208 	u32 cdar;
209 	u32 cbrur;
210 	u32 clar;
211 	u32 ctbr;
212 	u32 dummy;
213 	u32 cmar;
214 	u32 cmdr;
215 } __aligned(64);
216 
217 struct stm32_mdma_desc_node {
218 	struct stm32_mdma_hwdesc *hwdesc;
219 	dma_addr_t hwdesc_phys;
220 };
221 
222 struct stm32_mdma_desc {
223 	struct virt_dma_desc vdesc;
224 	u32 ccr;
225 	bool cyclic;
226 	u32 count;
227 	struct stm32_mdma_desc_node node[];
228 };
229 
230 struct stm32_mdma_chan {
231 	struct virt_dma_chan vchan;
232 	struct dma_pool *desc_pool;
233 	u32 id;
234 	struct stm32_mdma_desc *desc;
235 	u32 curr_hwdesc;
236 	struct dma_slave_config dma_config;
237 	struct stm32_mdma_chan_config chan_config;
238 	bool busy;
239 	u32 mem_burst;
240 	u32 mem_width;
241 };
242 
243 struct stm32_mdma_device {
244 	struct dma_device ddev;
245 	void __iomem *base;
246 	struct clk *clk;
247 	int irq;
248 	u32 nr_channels;
249 	u32 nr_requests;
250 	u32 nr_ahb_addr_masks;
251 	struct stm32_mdma_chan chan[STM32_MDMA_MAX_CHANNELS];
252 	u32 ahb_addr_masks[];
253 };
254 
255 static struct stm32_mdma_device *stm32_mdma_get_dev(
256 	struct stm32_mdma_chan *chan)
257 {
258 	return container_of(chan->vchan.chan.device, struct stm32_mdma_device,
259 			    ddev);
260 }
261 
262 static struct stm32_mdma_chan *to_stm32_mdma_chan(struct dma_chan *c)
263 {
264 	return container_of(c, struct stm32_mdma_chan, vchan.chan);
265 }
266 
267 static struct stm32_mdma_desc *to_stm32_mdma_desc(struct virt_dma_desc *vdesc)
268 {
269 	return container_of(vdesc, struct stm32_mdma_desc, vdesc);
270 }
271 
272 static struct device *chan2dev(struct stm32_mdma_chan *chan)
273 {
274 	return &chan->vchan.chan.dev->device;
275 }
276 
277 static struct device *mdma2dev(struct stm32_mdma_device *mdma_dev)
278 {
279 	return mdma_dev->ddev.dev;
280 }
281 
282 static u32 stm32_mdma_read(struct stm32_mdma_device *dmadev, u32 reg)
283 {
284 	return readl_relaxed(dmadev->base + reg);
285 }
286 
287 static void stm32_mdma_write(struct stm32_mdma_device *dmadev, u32 reg, u32 val)
288 {
289 	writel_relaxed(val, dmadev->base + reg);
290 }
291 
292 static void stm32_mdma_set_bits(struct stm32_mdma_device *dmadev, u32 reg,
293 				u32 mask)
294 {
295 	void __iomem *addr = dmadev->base + reg;
296 
297 	writel_relaxed(readl_relaxed(addr) | mask, addr);
298 }
299 
300 static void stm32_mdma_clr_bits(struct stm32_mdma_device *dmadev, u32 reg,
301 				u32 mask)
302 {
303 	void __iomem *addr = dmadev->base + reg;
304 
305 	writel_relaxed(readl_relaxed(addr) & ~mask, addr);
306 }
307 
308 static struct stm32_mdma_desc *stm32_mdma_alloc_desc(
309 		struct stm32_mdma_chan *chan, u32 count)
310 {
311 	struct stm32_mdma_desc *desc;
312 	int i;
313 
314 	desc = kzalloc(struct_size(desc, node, count), GFP_NOWAIT);
315 	if (!desc)
316 		return NULL;
317 
318 	for (i = 0; i < count; i++) {
319 		desc->node[i].hwdesc =
320 			dma_pool_alloc(chan->desc_pool, GFP_NOWAIT,
321 				       &desc->node[i].hwdesc_phys);
322 		if (!desc->node[i].hwdesc)
323 			goto err;
324 	}
325 
326 	desc->count = count;
327 
328 	return desc;
329 
330 err:
331 	dev_err(chan2dev(chan), "Failed to allocate descriptor\n");
332 	while (--i >= 0)
333 		dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
334 			      desc->node[i].hwdesc_phys);
335 	kfree(desc);
336 	return NULL;
337 }
338 
339 static void stm32_mdma_desc_free(struct virt_dma_desc *vdesc)
340 {
341 	struct stm32_mdma_desc *desc = to_stm32_mdma_desc(vdesc);
342 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(vdesc->tx.chan);
343 	int i;
344 
345 	for (i = 0; i < desc->count; i++)
346 		dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
347 			      desc->node[i].hwdesc_phys);
348 	kfree(desc);
349 }
350 
351 static int stm32_mdma_get_width(struct stm32_mdma_chan *chan,
352 				enum dma_slave_buswidth width)
353 {
354 	switch (width) {
355 	case DMA_SLAVE_BUSWIDTH_1_BYTE:
356 	case DMA_SLAVE_BUSWIDTH_2_BYTES:
357 	case DMA_SLAVE_BUSWIDTH_4_BYTES:
358 	case DMA_SLAVE_BUSWIDTH_8_BYTES:
359 		return ffs(width) - 1;
360 	default:
361 		dev_err(chan2dev(chan), "Dma bus width %i not supported\n",
362 			width);
363 		return -EINVAL;
364 	}
365 }
366 
367 static enum dma_slave_buswidth stm32_mdma_get_max_width(dma_addr_t addr,
368 							u32 buf_len, u32 tlen)
369 {
370 	enum dma_slave_buswidth max_width = DMA_SLAVE_BUSWIDTH_8_BYTES;
371 
372 	for (max_width = DMA_SLAVE_BUSWIDTH_8_BYTES;
373 	     max_width > DMA_SLAVE_BUSWIDTH_1_BYTE;
374 	     max_width >>= 1) {
375 		/*
376 		 * Address and buffer length both have to be aligned on
377 		 * bus width
378 		 */
379 		if ((((buf_len | addr) & (max_width - 1)) == 0) &&
380 		    tlen >= max_width)
381 			break;
382 	}
383 
384 	return max_width;
385 }
386 
387 static u32 stm32_mdma_get_best_burst(u32 buf_len, u32 tlen, u32 max_burst,
388 				     enum dma_slave_buswidth width)
389 {
390 	u32 best_burst;
391 
392 	best_burst = min((u32)1 << __ffs(tlen | buf_len),
393 			 max_burst * width) / width;
394 
395 	return (best_burst > 0) ? best_burst : 1;
396 }
397 
398 static int stm32_mdma_disable_chan(struct stm32_mdma_chan *chan)
399 {
400 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
401 	u32 ccr, cisr, id, reg;
402 	int ret;
403 
404 	id = chan->id;
405 	reg = STM32_MDMA_CCR(id);
406 
407 	/* Disable interrupts */
408 	stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_IRQ_MASK);
409 
410 	ccr = stm32_mdma_read(dmadev, reg);
411 	if (ccr & STM32_MDMA_CCR_EN) {
412 		stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_EN);
413 
414 		/* Ensure that any ongoing transfer has been completed */
415 		ret = readl_relaxed_poll_timeout_atomic(
416 				dmadev->base + STM32_MDMA_CISR(id), cisr,
417 				(cisr & STM32_MDMA_CISR_CTCIF), 10, 1000);
418 		if (ret) {
419 			dev_err(chan2dev(chan), "%s: timeout!\n", __func__);
420 			return -EBUSY;
421 		}
422 	}
423 
424 	return 0;
425 }
426 
427 static void stm32_mdma_stop(struct stm32_mdma_chan *chan)
428 {
429 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
430 	u32 status;
431 	int ret;
432 
433 	/* Disable DMA */
434 	ret = stm32_mdma_disable_chan(chan);
435 	if (ret < 0)
436 		return;
437 
438 	/* Clear interrupt status if it is there */
439 	status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
440 	if (status) {
441 		dev_dbg(chan2dev(chan), "%s(): clearing interrupt: 0x%08x\n",
442 			__func__, status);
443 		stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status);
444 	}
445 
446 	chan->busy = false;
447 }
448 
449 static void stm32_mdma_set_bus(struct stm32_mdma_device *dmadev, u32 *ctbr,
450 			       u32 ctbr_mask, u32 src_addr)
451 {
452 	u32 mask;
453 	int i;
454 
455 	/* Check if memory device is on AHB or AXI */
456 	*ctbr &= ~ctbr_mask;
457 	mask = src_addr & 0xF0000000;
458 	for (i = 0; i < dmadev->nr_ahb_addr_masks; i++) {
459 		if (mask == dmadev->ahb_addr_masks[i]) {
460 			*ctbr |= ctbr_mask;
461 			break;
462 		}
463 	}
464 }
465 
466 static int stm32_mdma_set_xfer_param(struct stm32_mdma_chan *chan,
467 				     enum dma_transfer_direction direction,
468 				     u32 *mdma_ccr, u32 *mdma_ctcr,
469 				     u32 *mdma_ctbr, dma_addr_t addr,
470 				     u32 buf_len)
471 {
472 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
473 	struct stm32_mdma_chan_config *chan_config = &chan->chan_config;
474 	enum dma_slave_buswidth src_addr_width, dst_addr_width;
475 	phys_addr_t src_addr, dst_addr;
476 	int src_bus_width, dst_bus_width;
477 	u32 src_maxburst, dst_maxburst, src_best_burst, dst_best_burst;
478 	u32 ccr, ctcr, ctbr, tlen;
479 
480 	src_addr_width = chan->dma_config.src_addr_width;
481 	dst_addr_width = chan->dma_config.dst_addr_width;
482 	src_maxburst = chan->dma_config.src_maxburst;
483 	dst_maxburst = chan->dma_config.dst_maxburst;
484 
485 	ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id));
486 	ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id));
487 	ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id));
488 
489 	/* Enable HW request mode */
490 	ctcr &= ~STM32_MDMA_CTCR_SWRM;
491 
492 	/* Set DINC, SINC, DINCOS, SINCOS, TRGM and TLEN retrieve from DT */
493 	ctcr &= ~STM32_MDMA_CTCR_CFG_MASK;
494 	ctcr |= chan_config->transfer_config & STM32_MDMA_CTCR_CFG_MASK;
495 
496 	/*
497 	 * For buffer transfer length (TLEN) we have to set
498 	 * the number of bytes - 1 in CTCR register
499 	 */
500 	tlen = STM32_MDMA_CTCR_LEN2_GET(ctcr);
501 	ctcr &= ~STM32_MDMA_CTCR_LEN2_MSK;
502 	ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1));
503 
504 	/* Disable Pack Enable */
505 	ctcr &= ~STM32_MDMA_CTCR_PKE;
506 
507 	/* Check burst size constraints */
508 	if (src_maxburst * src_addr_width > STM32_MDMA_MAX_BURST ||
509 	    dst_maxburst * dst_addr_width > STM32_MDMA_MAX_BURST) {
510 		dev_err(chan2dev(chan),
511 			"burst size * bus width higher than %d bytes\n",
512 			STM32_MDMA_MAX_BURST);
513 		return -EINVAL;
514 	}
515 
516 	if ((!is_power_of_2(src_maxburst) && src_maxburst > 0) ||
517 	    (!is_power_of_2(dst_maxburst) && dst_maxburst > 0)) {
518 		dev_err(chan2dev(chan), "burst size must be a power of 2\n");
519 		return -EINVAL;
520 	}
521 
522 	/*
523 	 * Configure channel control:
524 	 * - Clear SW request as in this case this is a HW one
525 	 * - Clear WEX, HEX and BEX bits
526 	 * - Set priority level
527 	 */
528 	ccr &= ~(STM32_MDMA_CCR_SWRQ | STM32_MDMA_CCR_WEX | STM32_MDMA_CCR_HEX |
529 		 STM32_MDMA_CCR_BEX | STM32_MDMA_CCR_PL_MASK);
530 	ccr |= STM32_MDMA_CCR_PL(chan_config->priority_level);
531 
532 	/* Configure Trigger selection */
533 	ctbr &= ~STM32_MDMA_CTBR_TSEL_MASK;
534 	ctbr |= STM32_MDMA_CTBR_TSEL(chan_config->request);
535 
536 	switch (direction) {
537 	case DMA_MEM_TO_DEV:
538 		dst_addr = chan->dma_config.dst_addr;
539 
540 		/* Set device data size */
541 		dst_bus_width = stm32_mdma_get_width(chan, dst_addr_width);
542 		if (dst_bus_width < 0)
543 			return dst_bus_width;
544 		ctcr &= ~STM32_MDMA_CTCR_DSIZE_MASK;
545 		ctcr |= STM32_MDMA_CTCR_DSIZE(dst_bus_width);
546 
547 		/* Set device burst value */
548 		dst_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
549 							   dst_maxburst,
550 							   dst_addr_width);
551 		chan->mem_burst = dst_best_burst;
552 		ctcr &= ~STM32_MDMA_CTCR_DBURST_MASK;
553 		ctcr |= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst)));
554 
555 		/* Set memory data size */
556 		src_addr_width = stm32_mdma_get_max_width(addr, buf_len, tlen);
557 		chan->mem_width = src_addr_width;
558 		src_bus_width = stm32_mdma_get_width(chan, src_addr_width);
559 		if (src_bus_width < 0)
560 			return src_bus_width;
561 		ctcr &= ~STM32_MDMA_CTCR_SSIZE_MASK |
562 			STM32_MDMA_CTCR_SINCOS_MASK;
563 		ctcr |= STM32_MDMA_CTCR_SSIZE(src_bus_width) |
564 			STM32_MDMA_CTCR_SINCOS(src_bus_width);
565 
566 		/* Set memory burst value */
567 		src_maxburst = STM32_MDMA_MAX_BUF_LEN / src_addr_width;
568 		src_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
569 							   src_maxburst,
570 							   src_addr_width);
571 		chan->mem_burst = src_best_burst;
572 		ctcr &= ~STM32_MDMA_CTCR_SBURST_MASK;
573 		ctcr |= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst)));
574 
575 		/* Select bus */
576 		stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
577 				   dst_addr);
578 
579 		if (dst_bus_width != src_bus_width)
580 			ctcr |= STM32_MDMA_CTCR_PKE;
581 
582 		/* Set destination address */
583 		stm32_mdma_write(dmadev, STM32_MDMA_CDAR(chan->id), dst_addr);
584 		break;
585 
586 	case DMA_DEV_TO_MEM:
587 		src_addr = chan->dma_config.src_addr;
588 
589 		/* Set device data size */
590 		src_bus_width = stm32_mdma_get_width(chan, src_addr_width);
591 		if (src_bus_width < 0)
592 			return src_bus_width;
593 		ctcr &= ~STM32_MDMA_CTCR_SSIZE_MASK;
594 		ctcr |= STM32_MDMA_CTCR_SSIZE(src_bus_width);
595 
596 		/* Set device burst value */
597 		src_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
598 							   src_maxburst,
599 							   src_addr_width);
600 		ctcr &= ~STM32_MDMA_CTCR_SBURST_MASK;
601 		ctcr |= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst)));
602 
603 		/* Set memory data size */
604 		dst_addr_width = stm32_mdma_get_max_width(addr, buf_len, tlen);
605 		chan->mem_width = dst_addr_width;
606 		dst_bus_width = stm32_mdma_get_width(chan, dst_addr_width);
607 		if (dst_bus_width < 0)
608 			return dst_bus_width;
609 		ctcr &= ~(STM32_MDMA_CTCR_DSIZE_MASK |
610 			STM32_MDMA_CTCR_DINCOS_MASK);
611 		ctcr |= STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
612 			STM32_MDMA_CTCR_DINCOS(dst_bus_width);
613 
614 		/* Set memory burst value */
615 		dst_maxburst = STM32_MDMA_MAX_BUF_LEN / dst_addr_width;
616 		dst_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
617 							   dst_maxburst,
618 							   dst_addr_width);
619 		ctcr &= ~STM32_MDMA_CTCR_DBURST_MASK;
620 		ctcr |= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst)));
621 
622 		/* Select bus */
623 		stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
624 				   src_addr);
625 
626 		if (dst_bus_width != src_bus_width)
627 			ctcr |= STM32_MDMA_CTCR_PKE;
628 
629 		/* Set source address */
630 		stm32_mdma_write(dmadev, STM32_MDMA_CSAR(chan->id), src_addr);
631 		break;
632 
633 	default:
634 		dev_err(chan2dev(chan), "Dma direction is not supported\n");
635 		return -EINVAL;
636 	}
637 
638 	*mdma_ccr = ccr;
639 	*mdma_ctcr = ctcr;
640 	*mdma_ctbr = ctbr;
641 
642 	return 0;
643 }
644 
645 static void stm32_mdma_dump_hwdesc(struct stm32_mdma_chan *chan,
646 				   struct stm32_mdma_desc_node *node)
647 {
648 	dev_dbg(chan2dev(chan), "hwdesc:  %pad\n", &node->hwdesc_phys);
649 	dev_dbg(chan2dev(chan), "CTCR:    0x%08x\n", node->hwdesc->ctcr);
650 	dev_dbg(chan2dev(chan), "CBNDTR:  0x%08x\n", node->hwdesc->cbndtr);
651 	dev_dbg(chan2dev(chan), "CSAR:    0x%08x\n", node->hwdesc->csar);
652 	dev_dbg(chan2dev(chan), "CDAR:    0x%08x\n", node->hwdesc->cdar);
653 	dev_dbg(chan2dev(chan), "CBRUR:   0x%08x\n", node->hwdesc->cbrur);
654 	dev_dbg(chan2dev(chan), "CLAR:    0x%08x\n", node->hwdesc->clar);
655 	dev_dbg(chan2dev(chan), "CTBR:    0x%08x\n", node->hwdesc->ctbr);
656 	dev_dbg(chan2dev(chan), "CMAR:    0x%08x\n", node->hwdesc->cmar);
657 	dev_dbg(chan2dev(chan), "CMDR:    0x%08x\n\n", node->hwdesc->cmdr);
658 }
659 
660 static void stm32_mdma_setup_hwdesc(struct stm32_mdma_chan *chan,
661 				    struct stm32_mdma_desc *desc,
662 				    enum dma_transfer_direction dir, u32 count,
663 				    dma_addr_t src_addr, dma_addr_t dst_addr,
664 				    u32 len, u32 ctcr, u32 ctbr, bool is_last,
665 				    bool is_first, bool is_cyclic)
666 {
667 	struct stm32_mdma_chan_config *config = &chan->chan_config;
668 	struct stm32_mdma_hwdesc *hwdesc;
669 	u32 next = count + 1;
670 
671 	hwdesc = desc->node[count].hwdesc;
672 	hwdesc->ctcr = ctcr;
673 	hwdesc->cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK |
674 			STM32_MDMA_CBNDTR_BRDUM |
675 			STM32_MDMA_CBNDTR_BRSUM |
676 			STM32_MDMA_CBNDTR_BNDT_MASK);
677 	hwdesc->cbndtr |= STM32_MDMA_CBNDTR_BNDT(len);
678 	hwdesc->csar = src_addr;
679 	hwdesc->cdar = dst_addr;
680 	hwdesc->cbrur = 0;
681 	hwdesc->ctbr = ctbr;
682 	hwdesc->cmar = config->mask_addr;
683 	hwdesc->cmdr = config->mask_data;
684 
685 	if (is_last) {
686 		if (is_cyclic)
687 			hwdesc->clar = desc->node[0].hwdesc_phys;
688 		else
689 			hwdesc->clar = 0;
690 	} else {
691 		hwdesc->clar = desc->node[next].hwdesc_phys;
692 	}
693 
694 	stm32_mdma_dump_hwdesc(chan, &desc->node[count]);
695 }
696 
697 static int stm32_mdma_setup_xfer(struct stm32_mdma_chan *chan,
698 				 struct stm32_mdma_desc *desc,
699 				 struct scatterlist *sgl, u32 sg_len,
700 				 enum dma_transfer_direction direction)
701 {
702 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
703 	struct dma_slave_config *dma_config = &chan->dma_config;
704 	struct scatterlist *sg;
705 	dma_addr_t src_addr, dst_addr;
706 	u32 ccr, ctcr, ctbr;
707 	int i, ret = 0;
708 
709 	for_each_sg(sgl, sg, sg_len, i) {
710 		if (sg_dma_len(sg) > STM32_MDMA_MAX_BLOCK_LEN) {
711 			dev_err(chan2dev(chan), "Invalid block len\n");
712 			return -EINVAL;
713 		}
714 
715 		if (direction == DMA_MEM_TO_DEV) {
716 			src_addr = sg_dma_address(sg);
717 			dst_addr = dma_config->dst_addr;
718 			ret = stm32_mdma_set_xfer_param(chan, direction, &ccr,
719 							&ctcr, &ctbr, src_addr,
720 							sg_dma_len(sg));
721 			stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
722 					   src_addr);
723 		} else {
724 			src_addr = dma_config->src_addr;
725 			dst_addr = sg_dma_address(sg);
726 			ret = stm32_mdma_set_xfer_param(chan, direction, &ccr,
727 							&ctcr, &ctbr, dst_addr,
728 							sg_dma_len(sg));
729 			stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
730 					   dst_addr);
731 		}
732 
733 		if (ret < 0)
734 			return ret;
735 
736 		stm32_mdma_setup_hwdesc(chan, desc, direction, i, src_addr,
737 					dst_addr, sg_dma_len(sg), ctcr, ctbr,
738 					i == sg_len - 1, i == 0, false);
739 	}
740 
741 	/* Enable interrupts */
742 	ccr &= ~STM32_MDMA_CCR_IRQ_MASK;
743 	ccr |= STM32_MDMA_CCR_TEIE | STM32_MDMA_CCR_CTCIE;
744 	if (sg_len > 1)
745 		ccr |= STM32_MDMA_CCR_BTIE;
746 	desc->ccr = ccr;
747 
748 	return 0;
749 }
750 
751 static struct dma_async_tx_descriptor *
752 stm32_mdma_prep_slave_sg(struct dma_chan *c, struct scatterlist *sgl,
753 			 u32 sg_len, enum dma_transfer_direction direction,
754 			 unsigned long flags, void *context)
755 {
756 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
757 	struct stm32_mdma_desc *desc;
758 	int i, ret;
759 
760 	/*
761 	 * Once DMA is in setup cyclic mode the channel we cannot assign this
762 	 * channel anymore. The DMA channel needs to be aborted or terminated
763 	 * for allowing another request.
764 	 */
765 	if (chan->desc && chan->desc->cyclic) {
766 		dev_err(chan2dev(chan),
767 			"Request not allowed when dma in cyclic mode\n");
768 		return NULL;
769 	}
770 
771 	desc = stm32_mdma_alloc_desc(chan, sg_len);
772 	if (!desc)
773 		return NULL;
774 
775 	ret = stm32_mdma_setup_xfer(chan, desc, sgl, sg_len, direction);
776 	if (ret < 0)
777 		goto xfer_setup_err;
778 
779 	desc->cyclic = false;
780 
781 	return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
782 
783 xfer_setup_err:
784 	for (i = 0; i < desc->count; i++)
785 		dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
786 			      desc->node[i].hwdesc_phys);
787 	kfree(desc);
788 	return NULL;
789 }
790 
791 static struct dma_async_tx_descriptor *
792 stm32_mdma_prep_dma_cyclic(struct dma_chan *c, dma_addr_t buf_addr,
793 			   size_t buf_len, size_t period_len,
794 			   enum dma_transfer_direction direction,
795 			   unsigned long flags)
796 {
797 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
798 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
799 	struct dma_slave_config *dma_config = &chan->dma_config;
800 	struct stm32_mdma_desc *desc;
801 	dma_addr_t src_addr, dst_addr;
802 	u32 ccr, ctcr, ctbr, count;
803 	int i, ret;
804 
805 	/*
806 	 * Once DMA is in setup cyclic mode the channel we cannot assign this
807 	 * channel anymore. The DMA channel needs to be aborted or terminated
808 	 * for allowing another request.
809 	 */
810 	if (chan->desc && chan->desc->cyclic) {
811 		dev_err(chan2dev(chan),
812 			"Request not allowed when dma in cyclic mode\n");
813 		return NULL;
814 	}
815 
816 	if (!buf_len || !period_len || period_len > STM32_MDMA_MAX_BLOCK_LEN) {
817 		dev_err(chan2dev(chan), "Invalid buffer/period len\n");
818 		return NULL;
819 	}
820 
821 	if (buf_len % period_len) {
822 		dev_err(chan2dev(chan), "buf_len not multiple of period_len\n");
823 		return NULL;
824 	}
825 
826 	count = buf_len / period_len;
827 
828 	desc = stm32_mdma_alloc_desc(chan, count);
829 	if (!desc)
830 		return NULL;
831 
832 	/* Select bus */
833 	if (direction == DMA_MEM_TO_DEV) {
834 		src_addr = buf_addr;
835 		ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, &ctcr,
836 						&ctbr, src_addr, period_len);
837 		stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
838 				   src_addr);
839 	} else {
840 		dst_addr = buf_addr;
841 		ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, &ctcr,
842 						&ctbr, dst_addr, period_len);
843 		stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
844 				   dst_addr);
845 	}
846 
847 	if (ret < 0)
848 		goto xfer_setup_err;
849 
850 	/* Enable interrupts */
851 	ccr &= ~STM32_MDMA_CCR_IRQ_MASK;
852 	ccr |= STM32_MDMA_CCR_TEIE | STM32_MDMA_CCR_CTCIE | STM32_MDMA_CCR_BTIE;
853 	desc->ccr = ccr;
854 
855 	/* Configure hwdesc list */
856 	for (i = 0; i < count; i++) {
857 		if (direction == DMA_MEM_TO_DEV) {
858 			src_addr = buf_addr + i * period_len;
859 			dst_addr = dma_config->dst_addr;
860 		} else {
861 			src_addr = dma_config->src_addr;
862 			dst_addr = buf_addr + i * period_len;
863 		}
864 
865 		stm32_mdma_setup_hwdesc(chan, desc, direction, i, src_addr,
866 					dst_addr, period_len, ctcr, ctbr,
867 					i == count - 1, i == 0, true);
868 	}
869 
870 	desc->cyclic = true;
871 
872 	return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
873 
874 xfer_setup_err:
875 	for (i = 0; i < desc->count; i++)
876 		dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
877 			      desc->node[i].hwdesc_phys);
878 	kfree(desc);
879 	return NULL;
880 }
881 
882 static struct dma_async_tx_descriptor *
883 stm32_mdma_prep_dma_memcpy(struct dma_chan *c, dma_addr_t dest, dma_addr_t src,
884 			   size_t len, unsigned long flags)
885 {
886 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
887 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
888 	enum dma_slave_buswidth max_width;
889 	struct stm32_mdma_desc *desc;
890 	struct stm32_mdma_hwdesc *hwdesc;
891 	u32 ccr, ctcr, ctbr, cbndtr, count, max_burst, mdma_burst;
892 	u32 best_burst, tlen;
893 	size_t xfer_count, offset;
894 	int src_bus_width, dst_bus_width;
895 	int i;
896 
897 	/*
898 	 * Once DMA is in setup cyclic mode the channel we cannot assign this
899 	 * channel anymore. The DMA channel needs to be aborted or terminated
900 	 * to allow another request
901 	 */
902 	if (chan->desc && chan->desc->cyclic) {
903 		dev_err(chan2dev(chan),
904 			"Request not allowed when dma in cyclic mode\n");
905 		return NULL;
906 	}
907 
908 	count = DIV_ROUND_UP(len, STM32_MDMA_MAX_BLOCK_LEN);
909 	desc = stm32_mdma_alloc_desc(chan, count);
910 	if (!desc)
911 		return NULL;
912 
913 	ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id));
914 	ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id));
915 	ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id));
916 	cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id));
917 
918 	/* Enable sw req, some interrupts and clear other bits */
919 	ccr &= ~(STM32_MDMA_CCR_WEX | STM32_MDMA_CCR_HEX |
920 		 STM32_MDMA_CCR_BEX | STM32_MDMA_CCR_PL_MASK |
921 		 STM32_MDMA_CCR_IRQ_MASK);
922 	ccr |= STM32_MDMA_CCR_TEIE;
923 
924 	/* Enable SW request mode, dest/src inc and clear other bits */
925 	ctcr &= ~(STM32_MDMA_CTCR_BWM | STM32_MDMA_CTCR_TRGM_MSK |
926 		  STM32_MDMA_CTCR_PAM_MASK | STM32_MDMA_CTCR_PKE |
927 		  STM32_MDMA_CTCR_TLEN_MSK | STM32_MDMA_CTCR_DBURST_MASK |
928 		  STM32_MDMA_CTCR_SBURST_MASK | STM32_MDMA_CTCR_DINCOS_MASK |
929 		  STM32_MDMA_CTCR_SINCOS_MASK | STM32_MDMA_CTCR_DSIZE_MASK |
930 		  STM32_MDMA_CTCR_SSIZE_MASK | STM32_MDMA_CTCR_DINC_MASK |
931 		  STM32_MDMA_CTCR_SINC_MASK);
932 	ctcr |= STM32_MDMA_CTCR_SWRM | STM32_MDMA_CTCR_SINC(STM32_MDMA_INC) |
933 		STM32_MDMA_CTCR_DINC(STM32_MDMA_INC);
934 
935 	/* Reset HW request */
936 	ctbr &= ~STM32_MDMA_CTBR_TSEL_MASK;
937 
938 	/* Select bus */
939 	stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS, src);
940 	stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS, dest);
941 
942 	/* Clear CBNDTR registers */
943 	cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK | STM32_MDMA_CBNDTR_BRDUM |
944 			STM32_MDMA_CBNDTR_BRSUM | STM32_MDMA_CBNDTR_BNDT_MASK);
945 
946 	if (len <= STM32_MDMA_MAX_BLOCK_LEN) {
947 		cbndtr |= STM32_MDMA_CBNDTR_BNDT(len);
948 		if (len <= STM32_MDMA_MAX_BUF_LEN) {
949 			/* Setup a buffer transfer */
950 			ccr |= STM32_MDMA_CCR_TCIE | STM32_MDMA_CCR_CTCIE;
951 			ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BUFFER);
952 		} else {
953 			/* Setup a block transfer */
954 			ccr |= STM32_MDMA_CCR_BTIE | STM32_MDMA_CCR_CTCIE;
955 			ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BLOCK);
956 		}
957 
958 		tlen = STM32_MDMA_MAX_BUF_LEN;
959 		ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1));
960 
961 		/* Set source best burst size */
962 		max_width = stm32_mdma_get_max_width(src, len, tlen);
963 		src_bus_width = stm32_mdma_get_width(chan, max_width);
964 
965 		max_burst = tlen / max_width;
966 		best_burst = stm32_mdma_get_best_burst(len, tlen, max_burst,
967 						       max_width);
968 		mdma_burst = ilog2(best_burst);
969 
970 		ctcr |= STM32_MDMA_CTCR_SBURST(mdma_burst) |
971 			STM32_MDMA_CTCR_SSIZE(src_bus_width) |
972 			STM32_MDMA_CTCR_SINCOS(src_bus_width);
973 
974 		/* Set destination best burst size */
975 		max_width = stm32_mdma_get_max_width(dest, len, tlen);
976 		dst_bus_width = stm32_mdma_get_width(chan, max_width);
977 
978 		max_burst = tlen / max_width;
979 		best_burst = stm32_mdma_get_best_burst(len, tlen, max_burst,
980 						       max_width);
981 		mdma_burst = ilog2(best_burst);
982 
983 		ctcr |= STM32_MDMA_CTCR_DBURST(mdma_burst) |
984 			STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
985 			STM32_MDMA_CTCR_DINCOS(dst_bus_width);
986 
987 		if (dst_bus_width != src_bus_width)
988 			ctcr |= STM32_MDMA_CTCR_PKE;
989 
990 		/* Prepare hardware descriptor */
991 		hwdesc = desc->node[0].hwdesc;
992 		hwdesc->ctcr = ctcr;
993 		hwdesc->cbndtr = cbndtr;
994 		hwdesc->csar = src;
995 		hwdesc->cdar = dest;
996 		hwdesc->cbrur = 0;
997 		hwdesc->clar = 0;
998 		hwdesc->ctbr = ctbr;
999 		hwdesc->cmar = 0;
1000 		hwdesc->cmdr = 0;
1001 
1002 		stm32_mdma_dump_hwdesc(chan, &desc->node[0]);
1003 	} else {
1004 		/* Setup a LLI transfer */
1005 		ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_LINKED_LIST) |
1006 			STM32_MDMA_CTCR_TLEN((STM32_MDMA_MAX_BUF_LEN - 1));
1007 		ccr |= STM32_MDMA_CCR_BTIE | STM32_MDMA_CCR_CTCIE;
1008 		tlen = STM32_MDMA_MAX_BUF_LEN;
1009 
1010 		for (i = 0, offset = 0; offset < len;
1011 		     i++, offset += xfer_count) {
1012 			xfer_count = min_t(size_t, len - offset,
1013 					   STM32_MDMA_MAX_BLOCK_LEN);
1014 
1015 			/* Set source best burst size */
1016 			max_width = stm32_mdma_get_max_width(src, len, tlen);
1017 			src_bus_width = stm32_mdma_get_width(chan, max_width);
1018 
1019 			max_burst = tlen / max_width;
1020 			best_burst = stm32_mdma_get_best_burst(len, tlen,
1021 							       max_burst,
1022 							       max_width);
1023 			mdma_burst = ilog2(best_burst);
1024 
1025 			ctcr |= STM32_MDMA_CTCR_SBURST(mdma_burst) |
1026 				STM32_MDMA_CTCR_SSIZE(src_bus_width) |
1027 				STM32_MDMA_CTCR_SINCOS(src_bus_width);
1028 
1029 			/* Set destination best burst size */
1030 			max_width = stm32_mdma_get_max_width(dest, len, tlen);
1031 			dst_bus_width = stm32_mdma_get_width(chan, max_width);
1032 
1033 			max_burst = tlen / max_width;
1034 			best_burst = stm32_mdma_get_best_burst(len, tlen,
1035 							       max_burst,
1036 							       max_width);
1037 			mdma_burst = ilog2(best_burst);
1038 
1039 			ctcr |= STM32_MDMA_CTCR_DBURST(mdma_burst) |
1040 				STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
1041 				STM32_MDMA_CTCR_DINCOS(dst_bus_width);
1042 
1043 			if (dst_bus_width != src_bus_width)
1044 				ctcr |= STM32_MDMA_CTCR_PKE;
1045 
1046 			/* Prepare hardware descriptor */
1047 			stm32_mdma_setup_hwdesc(chan, desc, DMA_MEM_TO_MEM, i,
1048 						src + offset, dest + offset,
1049 						xfer_count, ctcr, ctbr,
1050 						i == count - 1, i == 0, false);
1051 		}
1052 	}
1053 
1054 	desc->ccr = ccr;
1055 
1056 	desc->cyclic = false;
1057 
1058 	return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
1059 }
1060 
1061 static void stm32_mdma_dump_reg(struct stm32_mdma_chan *chan)
1062 {
1063 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1064 
1065 	dev_dbg(chan2dev(chan), "CCR:     0x%08x\n",
1066 		stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)));
1067 	dev_dbg(chan2dev(chan), "CTCR:    0x%08x\n",
1068 		stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id)));
1069 	dev_dbg(chan2dev(chan), "CBNDTR:  0x%08x\n",
1070 		stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id)));
1071 	dev_dbg(chan2dev(chan), "CSAR:    0x%08x\n",
1072 		stm32_mdma_read(dmadev, STM32_MDMA_CSAR(chan->id)));
1073 	dev_dbg(chan2dev(chan), "CDAR:    0x%08x\n",
1074 		stm32_mdma_read(dmadev, STM32_MDMA_CDAR(chan->id)));
1075 	dev_dbg(chan2dev(chan), "CBRUR:   0x%08x\n",
1076 		stm32_mdma_read(dmadev, STM32_MDMA_CBRUR(chan->id)));
1077 	dev_dbg(chan2dev(chan), "CLAR:    0x%08x\n",
1078 		stm32_mdma_read(dmadev, STM32_MDMA_CLAR(chan->id)));
1079 	dev_dbg(chan2dev(chan), "CTBR:    0x%08x\n",
1080 		stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id)));
1081 	dev_dbg(chan2dev(chan), "CMAR:    0x%08x\n",
1082 		stm32_mdma_read(dmadev, STM32_MDMA_CMAR(chan->id)));
1083 	dev_dbg(chan2dev(chan), "CMDR:    0x%08x\n",
1084 		stm32_mdma_read(dmadev, STM32_MDMA_CMDR(chan->id)));
1085 }
1086 
1087 static void stm32_mdma_start_transfer(struct stm32_mdma_chan *chan)
1088 {
1089 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1090 	struct virt_dma_desc *vdesc;
1091 	struct stm32_mdma_hwdesc *hwdesc;
1092 	u32 id = chan->id;
1093 	u32 status, reg;
1094 
1095 	vdesc = vchan_next_desc(&chan->vchan);
1096 	if (!vdesc) {
1097 		chan->desc = NULL;
1098 		return;
1099 	}
1100 
1101 	list_del(&vdesc->node);
1102 
1103 	chan->desc = to_stm32_mdma_desc(vdesc);
1104 	hwdesc = chan->desc->node[0].hwdesc;
1105 	chan->curr_hwdesc = 0;
1106 
1107 	stm32_mdma_write(dmadev, STM32_MDMA_CCR(id), chan->desc->ccr);
1108 	stm32_mdma_write(dmadev, STM32_MDMA_CTCR(id), hwdesc->ctcr);
1109 	stm32_mdma_write(dmadev, STM32_MDMA_CBNDTR(id), hwdesc->cbndtr);
1110 	stm32_mdma_write(dmadev, STM32_MDMA_CSAR(id), hwdesc->csar);
1111 	stm32_mdma_write(dmadev, STM32_MDMA_CDAR(id), hwdesc->cdar);
1112 	stm32_mdma_write(dmadev, STM32_MDMA_CBRUR(id), hwdesc->cbrur);
1113 	stm32_mdma_write(dmadev, STM32_MDMA_CLAR(id), hwdesc->clar);
1114 	stm32_mdma_write(dmadev, STM32_MDMA_CTBR(id), hwdesc->ctbr);
1115 	stm32_mdma_write(dmadev, STM32_MDMA_CMAR(id), hwdesc->cmar);
1116 	stm32_mdma_write(dmadev, STM32_MDMA_CMDR(id), hwdesc->cmdr);
1117 
1118 	/* Clear interrupt status if it is there */
1119 	status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(id));
1120 	if (status)
1121 		stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(id), status);
1122 
1123 	stm32_mdma_dump_reg(chan);
1124 
1125 	/* Start DMA */
1126 	stm32_mdma_set_bits(dmadev, STM32_MDMA_CCR(id), STM32_MDMA_CCR_EN);
1127 
1128 	/* Set SW request in case of MEM2MEM transfer */
1129 	if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM) {
1130 		reg = STM32_MDMA_CCR(id);
1131 		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ);
1132 	}
1133 
1134 	chan->busy = true;
1135 
1136 	dev_dbg(chan2dev(chan), "vchan %pK: started\n", &chan->vchan);
1137 }
1138 
1139 static void stm32_mdma_issue_pending(struct dma_chan *c)
1140 {
1141 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1142 	unsigned long flags;
1143 
1144 	spin_lock_irqsave(&chan->vchan.lock, flags);
1145 
1146 	if (!vchan_issue_pending(&chan->vchan))
1147 		goto end;
1148 
1149 	dev_dbg(chan2dev(chan), "vchan %pK: issued\n", &chan->vchan);
1150 
1151 	if (!chan->desc && !chan->busy)
1152 		stm32_mdma_start_transfer(chan);
1153 
1154 end:
1155 	spin_unlock_irqrestore(&chan->vchan.lock, flags);
1156 }
1157 
1158 static int stm32_mdma_pause(struct dma_chan *c)
1159 {
1160 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1161 	unsigned long flags;
1162 	int ret;
1163 
1164 	spin_lock_irqsave(&chan->vchan.lock, flags);
1165 	ret = stm32_mdma_disable_chan(chan);
1166 	spin_unlock_irqrestore(&chan->vchan.lock, flags);
1167 
1168 	if (!ret)
1169 		dev_dbg(chan2dev(chan), "vchan %pK: pause\n", &chan->vchan);
1170 
1171 	return ret;
1172 }
1173 
1174 static int stm32_mdma_resume(struct dma_chan *c)
1175 {
1176 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1177 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1178 	struct stm32_mdma_hwdesc *hwdesc;
1179 	unsigned long flags;
1180 	u32 status, reg;
1181 
1182 	hwdesc = chan->desc->node[chan->curr_hwdesc].hwdesc;
1183 
1184 	spin_lock_irqsave(&chan->vchan.lock, flags);
1185 
1186 	/* Re-configure control register */
1187 	stm32_mdma_write(dmadev, STM32_MDMA_CCR(chan->id), chan->desc->ccr);
1188 
1189 	/* Clear interrupt status if it is there */
1190 	status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
1191 	if (status)
1192 		stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status);
1193 
1194 	stm32_mdma_dump_reg(chan);
1195 
1196 	/* Re-start DMA */
1197 	reg = STM32_MDMA_CCR(chan->id);
1198 	stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_EN);
1199 
1200 	/* Set SW request in case of MEM2MEM transfer */
1201 	if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM)
1202 		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ);
1203 
1204 	spin_unlock_irqrestore(&chan->vchan.lock, flags);
1205 
1206 	dev_dbg(chan2dev(chan), "vchan %pK: resume\n", &chan->vchan);
1207 
1208 	return 0;
1209 }
1210 
1211 static int stm32_mdma_terminate_all(struct dma_chan *c)
1212 {
1213 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1214 	unsigned long flags;
1215 	LIST_HEAD(head);
1216 
1217 	spin_lock_irqsave(&chan->vchan.lock, flags);
1218 	if (chan->desc) {
1219 		vchan_terminate_vdesc(&chan->desc->vdesc);
1220 		if (chan->busy)
1221 			stm32_mdma_stop(chan);
1222 		chan->desc = NULL;
1223 	}
1224 	vchan_get_all_descriptors(&chan->vchan, &head);
1225 	spin_unlock_irqrestore(&chan->vchan.lock, flags);
1226 
1227 	vchan_dma_desc_free_list(&chan->vchan, &head);
1228 
1229 	return 0;
1230 }
1231 
1232 static void stm32_mdma_synchronize(struct dma_chan *c)
1233 {
1234 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1235 
1236 	vchan_synchronize(&chan->vchan);
1237 }
1238 
1239 static int stm32_mdma_slave_config(struct dma_chan *c,
1240 				   struct dma_slave_config *config)
1241 {
1242 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1243 
1244 	memcpy(&chan->dma_config, config, sizeof(*config));
1245 
1246 	return 0;
1247 }
1248 
1249 static size_t stm32_mdma_desc_residue(struct stm32_mdma_chan *chan,
1250 				      struct stm32_mdma_desc *desc,
1251 				      u32 curr_hwdesc)
1252 {
1253 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1254 	struct stm32_mdma_hwdesc *hwdesc;
1255 	u32 cbndtr, residue, modulo, burst_size;
1256 	int i;
1257 
1258 	residue = 0;
1259 	for (i = curr_hwdesc + 1; i < desc->count; i++) {
1260 		hwdesc = desc->node[i].hwdesc;
1261 		residue += STM32_MDMA_CBNDTR_BNDT(hwdesc->cbndtr);
1262 	}
1263 	cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id));
1264 	residue += cbndtr & STM32_MDMA_CBNDTR_BNDT_MASK;
1265 
1266 	if (!chan->mem_burst)
1267 		return residue;
1268 
1269 	burst_size = chan->mem_burst * chan->mem_width;
1270 	modulo = residue % burst_size;
1271 	if (modulo)
1272 		residue = residue - modulo + burst_size;
1273 
1274 	return residue;
1275 }
1276 
1277 static enum dma_status stm32_mdma_tx_status(struct dma_chan *c,
1278 					    dma_cookie_t cookie,
1279 					    struct dma_tx_state *state)
1280 {
1281 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1282 	struct virt_dma_desc *vdesc;
1283 	enum dma_status status;
1284 	unsigned long flags;
1285 	u32 residue = 0;
1286 
1287 	status = dma_cookie_status(c, cookie, state);
1288 	if ((status == DMA_COMPLETE) || (!state))
1289 		return status;
1290 
1291 	spin_lock_irqsave(&chan->vchan.lock, flags);
1292 
1293 	vdesc = vchan_find_desc(&chan->vchan, cookie);
1294 	if (chan->desc && cookie == chan->desc->vdesc.tx.cookie)
1295 		residue = stm32_mdma_desc_residue(chan, chan->desc,
1296 						  chan->curr_hwdesc);
1297 	else if (vdesc)
1298 		residue = stm32_mdma_desc_residue(chan,
1299 						  to_stm32_mdma_desc(vdesc), 0);
1300 	dma_set_residue(state, residue);
1301 
1302 	spin_unlock_irqrestore(&chan->vchan.lock, flags);
1303 
1304 	return status;
1305 }
1306 
1307 static void stm32_mdma_xfer_end(struct stm32_mdma_chan *chan)
1308 {
1309 	vchan_cookie_complete(&chan->desc->vdesc);
1310 	chan->desc = NULL;
1311 	chan->busy = false;
1312 
1313 	/* Start the next transfer if this driver has a next desc */
1314 	stm32_mdma_start_transfer(chan);
1315 }
1316 
1317 static irqreturn_t stm32_mdma_irq_handler(int irq, void *devid)
1318 {
1319 	struct stm32_mdma_device *dmadev = devid;
1320 	struct stm32_mdma_chan *chan = devid;
1321 	u32 reg, id, ccr, ien, status;
1322 
1323 	/* Find out which channel generates the interrupt */
1324 	status = readl_relaxed(dmadev->base + STM32_MDMA_GISR0);
1325 	if (status) {
1326 		id = __ffs(status);
1327 	} else {
1328 		status = readl_relaxed(dmadev->base + STM32_MDMA_GISR1);
1329 		if (!status) {
1330 			dev_dbg(mdma2dev(dmadev), "spurious it\n");
1331 			return IRQ_NONE;
1332 		}
1333 		id = __ffs(status);
1334 		/*
1335 		 * As GISR0 provides status for channel id from 0 to 31,
1336 		 * so GISR1 provides status for channel id from 32 to 62
1337 		 */
1338 		id += 32;
1339 	}
1340 
1341 	chan = &dmadev->chan[id];
1342 	if (!chan) {
1343 		dev_warn(mdma2dev(dmadev), "MDMA channel not initialized\n");
1344 		return IRQ_NONE;
1345 	}
1346 
1347 	/* Handle interrupt for the channel */
1348 	spin_lock(&chan->vchan.lock);
1349 	status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(id));
1350 	/* Mask Channel ReQuest Active bit which can be set in case of MEM2MEM */
1351 	status &= ~STM32_MDMA_CISR_CRQA;
1352 	ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(id));
1353 	ien = (ccr & STM32_MDMA_CCR_IRQ_MASK) >> 1;
1354 
1355 	if (!(status & ien)) {
1356 		spin_unlock(&chan->vchan.lock);
1357 		dev_warn(chan2dev(chan),
1358 			 "spurious it (status=0x%04x, ien=0x%04x)\n",
1359 			 status, ien);
1360 		return IRQ_NONE;
1361 	}
1362 
1363 	reg = STM32_MDMA_CIFCR(id);
1364 
1365 	if (status & STM32_MDMA_CISR_TEIF) {
1366 		dev_err(chan2dev(chan), "Transfer Err: stat=0x%08x\n",
1367 			readl_relaxed(dmadev->base + STM32_MDMA_CESR(id)));
1368 		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CTEIF);
1369 		status &= ~STM32_MDMA_CISR_TEIF;
1370 	}
1371 
1372 	if (status & STM32_MDMA_CISR_CTCIF) {
1373 		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CCTCIF);
1374 		status &= ~STM32_MDMA_CISR_CTCIF;
1375 		stm32_mdma_xfer_end(chan);
1376 	}
1377 
1378 	if (status & STM32_MDMA_CISR_BRTIF) {
1379 		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBRTIF);
1380 		status &= ~STM32_MDMA_CISR_BRTIF;
1381 	}
1382 
1383 	if (status & STM32_MDMA_CISR_BTIF) {
1384 		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBTIF);
1385 		status &= ~STM32_MDMA_CISR_BTIF;
1386 		chan->curr_hwdesc++;
1387 		if (chan->desc && chan->desc->cyclic) {
1388 			if (chan->curr_hwdesc == chan->desc->count)
1389 				chan->curr_hwdesc = 0;
1390 			vchan_cyclic_callback(&chan->desc->vdesc);
1391 		}
1392 	}
1393 
1394 	if (status & STM32_MDMA_CISR_TCIF) {
1395 		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CLTCIF);
1396 		status &= ~STM32_MDMA_CISR_TCIF;
1397 	}
1398 
1399 	if (status) {
1400 		stm32_mdma_set_bits(dmadev, reg, status);
1401 		dev_err(chan2dev(chan), "DMA error: status=0x%08x\n", status);
1402 		if (!(ccr & STM32_MDMA_CCR_EN))
1403 			dev_err(chan2dev(chan), "chan disabled by HW\n");
1404 	}
1405 
1406 	spin_unlock(&chan->vchan.lock);
1407 
1408 	return IRQ_HANDLED;
1409 }
1410 
1411 static int stm32_mdma_alloc_chan_resources(struct dma_chan *c)
1412 {
1413 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1414 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1415 	int ret;
1416 
1417 	chan->desc_pool = dmam_pool_create(dev_name(&c->dev->device),
1418 					   c->device->dev,
1419 					   sizeof(struct stm32_mdma_hwdesc),
1420 					  __alignof__(struct stm32_mdma_hwdesc),
1421 					   0);
1422 	if (!chan->desc_pool) {
1423 		dev_err(chan2dev(chan), "failed to allocate descriptor pool\n");
1424 		return -ENOMEM;
1425 	}
1426 
1427 	ret = pm_runtime_resume_and_get(dmadev->ddev.dev);
1428 	if (ret < 0)
1429 		return ret;
1430 
1431 	ret = stm32_mdma_disable_chan(chan);
1432 	if (ret < 0)
1433 		pm_runtime_put(dmadev->ddev.dev);
1434 
1435 	return ret;
1436 }
1437 
1438 static void stm32_mdma_free_chan_resources(struct dma_chan *c)
1439 {
1440 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1441 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1442 	unsigned long flags;
1443 
1444 	dev_dbg(chan2dev(chan), "Freeing channel %d\n", chan->id);
1445 
1446 	if (chan->busy) {
1447 		spin_lock_irqsave(&chan->vchan.lock, flags);
1448 		stm32_mdma_stop(chan);
1449 		chan->desc = NULL;
1450 		spin_unlock_irqrestore(&chan->vchan.lock, flags);
1451 	}
1452 
1453 	pm_runtime_put(dmadev->ddev.dev);
1454 	vchan_free_chan_resources(to_virt_chan(c));
1455 	dmam_pool_destroy(chan->desc_pool);
1456 	chan->desc_pool = NULL;
1457 }
1458 
1459 static struct dma_chan *stm32_mdma_of_xlate(struct of_phandle_args *dma_spec,
1460 					    struct of_dma *ofdma)
1461 {
1462 	struct stm32_mdma_device *dmadev = ofdma->of_dma_data;
1463 	struct stm32_mdma_chan *chan;
1464 	struct dma_chan *c;
1465 	struct stm32_mdma_chan_config config;
1466 
1467 	if (dma_spec->args_count < 5) {
1468 		dev_err(mdma2dev(dmadev), "Bad number of args\n");
1469 		return NULL;
1470 	}
1471 
1472 	config.request = dma_spec->args[0];
1473 	config.priority_level = dma_spec->args[1];
1474 	config.transfer_config = dma_spec->args[2];
1475 	config.mask_addr = dma_spec->args[3];
1476 	config.mask_data = dma_spec->args[4];
1477 
1478 	if (config.request >= dmadev->nr_requests) {
1479 		dev_err(mdma2dev(dmadev), "Bad request line\n");
1480 		return NULL;
1481 	}
1482 
1483 	if (config.priority_level > STM32_MDMA_VERY_HIGH_PRIORITY) {
1484 		dev_err(mdma2dev(dmadev), "Priority level not supported\n");
1485 		return NULL;
1486 	}
1487 
1488 	c = dma_get_any_slave_channel(&dmadev->ddev);
1489 	if (!c) {
1490 		dev_err(mdma2dev(dmadev), "No more channels available\n");
1491 		return NULL;
1492 	}
1493 
1494 	chan = to_stm32_mdma_chan(c);
1495 	chan->chan_config = config;
1496 
1497 	return c;
1498 }
1499 
1500 static const struct of_device_id stm32_mdma_of_match[] = {
1501 	{ .compatible = "st,stm32h7-mdma", },
1502 	{ /* sentinel */ },
1503 };
1504 MODULE_DEVICE_TABLE(of, stm32_mdma_of_match);
1505 
1506 static int stm32_mdma_probe(struct platform_device *pdev)
1507 {
1508 	struct stm32_mdma_chan *chan;
1509 	struct stm32_mdma_device *dmadev;
1510 	struct dma_device *dd;
1511 	struct device_node *of_node;
1512 	struct resource *res;
1513 	struct reset_control *rst;
1514 	u32 nr_channels, nr_requests;
1515 	int i, count, ret;
1516 
1517 	of_node = pdev->dev.of_node;
1518 	if (!of_node)
1519 		return -ENODEV;
1520 
1521 	ret = device_property_read_u32(&pdev->dev, "dma-channels",
1522 				       &nr_channels);
1523 	if (ret) {
1524 		nr_channels = STM32_MDMA_MAX_CHANNELS;
1525 		dev_warn(&pdev->dev, "MDMA defaulting on %i channels\n",
1526 			 nr_channels);
1527 	}
1528 
1529 	ret = device_property_read_u32(&pdev->dev, "dma-requests",
1530 				       &nr_requests);
1531 	if (ret) {
1532 		nr_requests = STM32_MDMA_MAX_REQUESTS;
1533 		dev_warn(&pdev->dev, "MDMA defaulting on %i request lines\n",
1534 			 nr_requests);
1535 	}
1536 
1537 	count = device_property_count_u32(&pdev->dev, "st,ahb-addr-masks");
1538 	if (count < 0)
1539 		count = 0;
1540 
1541 	dmadev = devm_kzalloc(&pdev->dev,
1542 			      struct_size(dmadev, ahb_addr_masks, count),
1543 			      GFP_KERNEL);
1544 	if (!dmadev)
1545 		return -ENOMEM;
1546 
1547 	dmadev->nr_channels = nr_channels;
1548 	dmadev->nr_requests = nr_requests;
1549 	device_property_read_u32_array(&pdev->dev, "st,ahb-addr-masks",
1550 				       dmadev->ahb_addr_masks,
1551 				       count);
1552 	dmadev->nr_ahb_addr_masks = count;
1553 
1554 	res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1555 	dmadev->base = devm_ioremap_resource(&pdev->dev, res);
1556 	if (IS_ERR(dmadev->base))
1557 		return PTR_ERR(dmadev->base);
1558 
1559 	dmadev->clk = devm_clk_get(&pdev->dev, NULL);
1560 	if (IS_ERR(dmadev->clk))
1561 		return dev_err_probe(&pdev->dev, PTR_ERR(dmadev->clk),
1562 				     "Missing clock controller\n");
1563 
1564 	ret = clk_prepare_enable(dmadev->clk);
1565 	if (ret < 0) {
1566 		dev_err(&pdev->dev, "clk_prep_enable error: %d\n", ret);
1567 		return ret;
1568 	}
1569 
1570 	rst = devm_reset_control_get(&pdev->dev, NULL);
1571 	if (IS_ERR(rst)) {
1572 		ret = PTR_ERR(rst);
1573 		if (ret == -EPROBE_DEFER)
1574 			goto err_clk;
1575 	} else {
1576 		reset_control_assert(rst);
1577 		udelay(2);
1578 		reset_control_deassert(rst);
1579 	}
1580 
1581 	dd = &dmadev->ddev;
1582 	dma_cap_set(DMA_SLAVE, dd->cap_mask);
1583 	dma_cap_set(DMA_PRIVATE, dd->cap_mask);
1584 	dma_cap_set(DMA_CYCLIC, dd->cap_mask);
1585 	dma_cap_set(DMA_MEMCPY, dd->cap_mask);
1586 	dd->device_alloc_chan_resources = stm32_mdma_alloc_chan_resources;
1587 	dd->device_free_chan_resources = stm32_mdma_free_chan_resources;
1588 	dd->device_tx_status = stm32_mdma_tx_status;
1589 	dd->device_issue_pending = stm32_mdma_issue_pending;
1590 	dd->device_prep_slave_sg = stm32_mdma_prep_slave_sg;
1591 	dd->device_prep_dma_cyclic = stm32_mdma_prep_dma_cyclic;
1592 	dd->device_prep_dma_memcpy = stm32_mdma_prep_dma_memcpy;
1593 	dd->device_config = stm32_mdma_slave_config;
1594 	dd->device_pause = stm32_mdma_pause;
1595 	dd->device_resume = stm32_mdma_resume;
1596 	dd->device_terminate_all = stm32_mdma_terminate_all;
1597 	dd->device_synchronize = stm32_mdma_synchronize;
1598 	dd->descriptor_reuse = true;
1599 
1600 	dd->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
1601 		BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
1602 		BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) |
1603 		BIT(DMA_SLAVE_BUSWIDTH_8_BYTES);
1604 	dd->dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
1605 		BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
1606 		BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) |
1607 		BIT(DMA_SLAVE_BUSWIDTH_8_BYTES);
1608 	dd->directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV) |
1609 		BIT(DMA_MEM_TO_MEM);
1610 	dd->residue_granularity = DMA_RESIDUE_GRANULARITY_BURST;
1611 	dd->max_burst = STM32_MDMA_MAX_BURST;
1612 	dd->dev = &pdev->dev;
1613 	INIT_LIST_HEAD(&dd->channels);
1614 
1615 	for (i = 0; i < dmadev->nr_channels; i++) {
1616 		chan = &dmadev->chan[i];
1617 		chan->id = i;
1618 		chan->vchan.desc_free = stm32_mdma_desc_free;
1619 		vchan_init(&chan->vchan, dd);
1620 	}
1621 
1622 	dmadev->irq = platform_get_irq(pdev, 0);
1623 	if (dmadev->irq < 0) {
1624 		ret = dmadev->irq;
1625 		goto err_clk;
1626 	}
1627 
1628 	ret = devm_request_irq(&pdev->dev, dmadev->irq, stm32_mdma_irq_handler,
1629 			       0, dev_name(&pdev->dev), dmadev);
1630 	if (ret) {
1631 		dev_err(&pdev->dev, "failed to request IRQ\n");
1632 		goto err_clk;
1633 	}
1634 
1635 	ret = dmaenginem_async_device_register(dd);
1636 	if (ret)
1637 		goto err_clk;
1638 
1639 	ret = of_dma_controller_register(of_node, stm32_mdma_of_xlate, dmadev);
1640 	if (ret < 0) {
1641 		dev_err(&pdev->dev,
1642 			"STM32 MDMA DMA OF registration failed %d\n", ret);
1643 		goto err_clk;
1644 	}
1645 
1646 	platform_set_drvdata(pdev, dmadev);
1647 	pm_runtime_set_active(&pdev->dev);
1648 	pm_runtime_enable(&pdev->dev);
1649 	pm_runtime_get_noresume(&pdev->dev);
1650 	pm_runtime_put(&pdev->dev);
1651 
1652 	dev_info(&pdev->dev, "STM32 MDMA driver registered\n");
1653 
1654 	return 0;
1655 
1656 err_clk:
1657 	clk_disable_unprepare(dmadev->clk);
1658 
1659 	return ret;
1660 }
1661 
1662 #ifdef CONFIG_PM
1663 static int stm32_mdma_runtime_suspend(struct device *dev)
1664 {
1665 	struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
1666 
1667 	clk_disable_unprepare(dmadev->clk);
1668 
1669 	return 0;
1670 }
1671 
1672 static int stm32_mdma_runtime_resume(struct device *dev)
1673 {
1674 	struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
1675 	int ret;
1676 
1677 	ret = clk_prepare_enable(dmadev->clk);
1678 	if (ret) {
1679 		dev_err(dev, "failed to prepare_enable clock\n");
1680 		return ret;
1681 	}
1682 
1683 	return 0;
1684 }
1685 #endif
1686 
1687 #ifdef CONFIG_PM_SLEEP
1688 static int stm32_mdma_pm_suspend(struct device *dev)
1689 {
1690 	struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
1691 	u32 ccr, id;
1692 	int ret;
1693 
1694 	ret = pm_runtime_resume_and_get(dev);
1695 	if (ret < 0)
1696 		return ret;
1697 
1698 	for (id = 0; id < dmadev->nr_channels; id++) {
1699 		ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(id));
1700 		if (ccr & STM32_MDMA_CCR_EN) {
1701 			dev_warn(dev, "Suspend is prevented by Chan %i\n", id);
1702 			return -EBUSY;
1703 		}
1704 	}
1705 
1706 	pm_runtime_put_sync(dev);
1707 
1708 	pm_runtime_force_suspend(dev);
1709 
1710 	return 0;
1711 }
1712 
1713 static int stm32_mdma_pm_resume(struct device *dev)
1714 {
1715 	return pm_runtime_force_resume(dev);
1716 }
1717 #endif
1718 
1719 static const struct dev_pm_ops stm32_mdma_pm_ops = {
1720 	SET_SYSTEM_SLEEP_PM_OPS(stm32_mdma_pm_suspend, stm32_mdma_pm_resume)
1721 	SET_RUNTIME_PM_OPS(stm32_mdma_runtime_suspend,
1722 			   stm32_mdma_runtime_resume, NULL)
1723 };
1724 
1725 static struct platform_driver stm32_mdma_driver = {
1726 	.probe = stm32_mdma_probe,
1727 	.driver = {
1728 		.name = "stm32-mdma",
1729 		.of_match_table = stm32_mdma_of_match,
1730 		.pm = &stm32_mdma_pm_ops,
1731 	},
1732 };
1733 
1734 static int __init stm32_mdma_init(void)
1735 {
1736 	return platform_driver_register(&stm32_mdma_driver);
1737 }
1738 
1739 subsys_initcall(stm32_mdma_init);
1740 
1741 MODULE_DESCRIPTION("Driver for STM32 MDMA controller");
1742 MODULE_AUTHOR("M'boumba Cedric Madianga <cedric.madianga@gmail.com>");
1743 MODULE_AUTHOR("Pierre-Yves Mordret <pierre-yves.mordret@st.com>");
1744 MODULE_LICENSE("GPL v2");
1745