xref: /openbmc/linux/drivers/dma/stm32-mdma.c (revision b82621ac)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  *
4  * Copyright (C) STMicroelectronics SA 2017
5  * Author(s): M'boumba Cedric Madianga <cedric.madianga@gmail.com>
6  *            Pierre-Yves Mordret <pierre-yves.mordret@st.com>
7  *
8  * Driver for STM32 MDMA controller
9  *
10  * Inspired by stm32-dma.c and dma-jz4780.c
11  */
12 
13 #include <linux/bitfield.h>
14 #include <linux/clk.h>
15 #include <linux/delay.h>
16 #include <linux/dmaengine.h>
17 #include <linux/dma-mapping.h>
18 #include <linux/dmapool.h>
19 #include <linux/err.h>
20 #include <linux/init.h>
21 #include <linux/iopoll.h>
22 #include <linux/jiffies.h>
23 #include <linux/list.h>
24 #include <linux/log2.h>
25 #include <linux/module.h>
26 #include <linux/of.h>
27 #include <linux/of_device.h>
28 #include <linux/of_dma.h>
29 #include <linux/platform_device.h>
30 #include <linux/pm_runtime.h>
31 #include <linux/reset.h>
32 #include <linux/slab.h>
33 
34 #include "virt-dma.h"
35 
36 #define STM32_MDMA_GISR0		0x0000 /* MDMA Int Status Reg 1 */
37 
38 /* MDMA Channel x interrupt/status register */
39 #define STM32_MDMA_CISR(x)		(0x40 + 0x40 * (x)) /* x = 0..62 */
40 #define STM32_MDMA_CISR_CRQA		BIT(16)
41 #define STM32_MDMA_CISR_TCIF		BIT(4)
42 #define STM32_MDMA_CISR_BTIF		BIT(3)
43 #define STM32_MDMA_CISR_BRTIF		BIT(2)
44 #define STM32_MDMA_CISR_CTCIF		BIT(1)
45 #define STM32_MDMA_CISR_TEIF		BIT(0)
46 
47 /* MDMA Channel x interrupt flag clear register */
48 #define STM32_MDMA_CIFCR(x)		(0x44 + 0x40 * (x))
49 #define STM32_MDMA_CIFCR_CLTCIF		BIT(4)
50 #define STM32_MDMA_CIFCR_CBTIF		BIT(3)
51 #define STM32_MDMA_CIFCR_CBRTIF		BIT(2)
52 #define STM32_MDMA_CIFCR_CCTCIF		BIT(1)
53 #define STM32_MDMA_CIFCR_CTEIF		BIT(0)
54 #define STM32_MDMA_CIFCR_CLEAR_ALL	(STM32_MDMA_CIFCR_CLTCIF \
55 					| STM32_MDMA_CIFCR_CBTIF \
56 					| STM32_MDMA_CIFCR_CBRTIF \
57 					| STM32_MDMA_CIFCR_CCTCIF \
58 					| STM32_MDMA_CIFCR_CTEIF)
59 
60 /* MDMA Channel x error status register */
61 #define STM32_MDMA_CESR(x)		(0x48 + 0x40 * (x))
62 #define STM32_MDMA_CESR_BSE		BIT(11)
63 #define STM32_MDMA_CESR_ASR		BIT(10)
64 #define STM32_MDMA_CESR_TEMD		BIT(9)
65 #define STM32_MDMA_CESR_TELD		BIT(8)
66 #define STM32_MDMA_CESR_TED		BIT(7)
67 #define STM32_MDMA_CESR_TEA_MASK	GENMASK(6, 0)
68 
69 /* MDMA Channel x control register */
70 #define STM32_MDMA_CCR(x)		(0x4C + 0x40 * (x))
71 #define STM32_MDMA_CCR_SWRQ		BIT(16)
72 #define STM32_MDMA_CCR_WEX		BIT(14)
73 #define STM32_MDMA_CCR_HEX		BIT(13)
74 #define STM32_MDMA_CCR_BEX		BIT(12)
75 #define STM32_MDMA_CCR_SM		BIT(8)
76 #define STM32_MDMA_CCR_PL_MASK		GENMASK(7, 6)
77 #define STM32_MDMA_CCR_PL(n)		FIELD_PREP(STM32_MDMA_CCR_PL_MASK, (n))
78 #define STM32_MDMA_CCR_TCIE		BIT(5)
79 #define STM32_MDMA_CCR_BTIE		BIT(4)
80 #define STM32_MDMA_CCR_BRTIE		BIT(3)
81 #define STM32_MDMA_CCR_CTCIE		BIT(2)
82 #define STM32_MDMA_CCR_TEIE		BIT(1)
83 #define STM32_MDMA_CCR_EN		BIT(0)
84 #define STM32_MDMA_CCR_IRQ_MASK		(STM32_MDMA_CCR_TCIE \
85 					| STM32_MDMA_CCR_BTIE \
86 					| STM32_MDMA_CCR_BRTIE \
87 					| STM32_MDMA_CCR_CTCIE \
88 					| STM32_MDMA_CCR_TEIE)
89 
90 /* MDMA Channel x transfer configuration register */
91 #define STM32_MDMA_CTCR(x)		(0x50 + 0x40 * (x))
92 #define STM32_MDMA_CTCR_BWM		BIT(31)
93 #define STM32_MDMA_CTCR_SWRM		BIT(30)
94 #define STM32_MDMA_CTCR_TRGM_MSK	GENMASK(29, 28)
95 #define STM32_MDMA_CTCR_TRGM(n)		FIELD_PREP(STM32_MDMA_CTCR_TRGM_MSK, (n))
96 #define STM32_MDMA_CTCR_TRGM_GET(n)	FIELD_GET(STM32_MDMA_CTCR_TRGM_MSK, (n))
97 #define STM32_MDMA_CTCR_PAM_MASK	GENMASK(27, 26)
98 #define STM32_MDMA_CTCR_PAM(n)		FIELD_PREP(STM32_MDMA_CTCR_PAM_MASK, (n))
99 #define STM32_MDMA_CTCR_PKE		BIT(25)
100 #define STM32_MDMA_CTCR_TLEN_MSK	GENMASK(24, 18)
101 #define STM32_MDMA_CTCR_TLEN(n)		FIELD_PREP(STM32_MDMA_CTCR_TLEN_MSK, (n))
102 #define STM32_MDMA_CTCR_TLEN_GET(n)	FIELD_GET(STM32_MDMA_CTCR_TLEN_MSK, (n))
103 #define STM32_MDMA_CTCR_LEN2_MSK	GENMASK(25, 18)
104 #define STM32_MDMA_CTCR_LEN2(n)		FIELD_PREP(STM32_MDMA_CTCR_LEN2_MSK, (n))
105 #define STM32_MDMA_CTCR_LEN2_GET(n)	FIELD_GET(STM32_MDMA_CTCR_LEN2_MSK, (n))
106 #define STM32_MDMA_CTCR_DBURST_MASK	GENMASK(17, 15)
107 #define STM32_MDMA_CTCR_DBURST(n)	FIELD_PREP(STM32_MDMA_CTCR_DBURST_MASK, (n))
108 #define STM32_MDMA_CTCR_SBURST_MASK	GENMASK(14, 12)
109 #define STM32_MDMA_CTCR_SBURST(n)	FIELD_PREP(STM32_MDMA_CTCR_SBURST_MASK, (n))
110 #define STM32_MDMA_CTCR_DINCOS_MASK	GENMASK(11, 10)
111 #define STM32_MDMA_CTCR_DINCOS(n)	FIELD_PREP(STM32_MDMA_CTCR_DINCOS_MASK, (n))
112 #define STM32_MDMA_CTCR_SINCOS_MASK	GENMASK(9, 8)
113 #define STM32_MDMA_CTCR_SINCOS(n)	FIELD_PREP(STM32_MDMA_CTCR_SINCOS_MASK, (n))
114 #define STM32_MDMA_CTCR_DSIZE_MASK	GENMASK(7, 6)
115 #define STM32_MDMA_CTCR_DSIZE(n)	FIELD_PREP(STM32_MDMA_CTCR_DSIZE_MASK, (n))
116 #define STM32_MDMA_CTCR_SSIZE_MASK	GENMASK(5, 4)
117 #define STM32_MDMA_CTCR_SSIZE(n)	FIELD_PREP(STM32_MDMA_CTCR_SSIZE_MASK, (n))
118 #define STM32_MDMA_CTCR_DINC_MASK	GENMASK(3, 2)
119 #define STM32_MDMA_CTCR_DINC(n)		FIELD_PREP(STM32_MDMA_CTCR_DINC_MASK, (n))
120 #define STM32_MDMA_CTCR_SINC_MASK	GENMASK(1, 0)
121 #define STM32_MDMA_CTCR_SINC(n)		FIELD_PREP(STM32_MDMA_CTCR_SINC_MASK, (n))
122 #define STM32_MDMA_CTCR_CFG_MASK	(STM32_MDMA_CTCR_SINC_MASK \
123 					| STM32_MDMA_CTCR_DINC_MASK \
124 					| STM32_MDMA_CTCR_SINCOS_MASK \
125 					| STM32_MDMA_CTCR_DINCOS_MASK \
126 					| STM32_MDMA_CTCR_LEN2_MSK \
127 					| STM32_MDMA_CTCR_TRGM_MSK)
128 
129 /* MDMA Channel x block number of data register */
130 #define STM32_MDMA_CBNDTR(x)		(0x54 + 0x40 * (x))
131 #define STM32_MDMA_CBNDTR_BRC_MK	GENMASK(31, 20)
132 #define STM32_MDMA_CBNDTR_BRC(n)	FIELD_PREP(STM32_MDMA_CBNDTR_BRC_MK, (n))
133 #define STM32_MDMA_CBNDTR_BRC_GET(n)	FIELD_GET(STM32_MDMA_CBNDTR_BRC_MK, (n))
134 
135 #define STM32_MDMA_CBNDTR_BRDUM		BIT(19)
136 #define STM32_MDMA_CBNDTR_BRSUM		BIT(18)
137 #define STM32_MDMA_CBNDTR_BNDT_MASK	GENMASK(16, 0)
138 #define STM32_MDMA_CBNDTR_BNDT(n)	FIELD_PREP(STM32_MDMA_CBNDTR_BNDT_MASK, (n))
139 
140 /* MDMA Channel x source address register */
141 #define STM32_MDMA_CSAR(x)		(0x58 + 0x40 * (x))
142 
143 /* MDMA Channel x destination address register */
144 #define STM32_MDMA_CDAR(x)		(0x5C + 0x40 * (x))
145 
146 /* MDMA Channel x block repeat address update register */
147 #define STM32_MDMA_CBRUR(x)		(0x60 + 0x40 * (x))
148 #define STM32_MDMA_CBRUR_DUV_MASK	GENMASK(31, 16)
149 #define STM32_MDMA_CBRUR_DUV(n)		FIELD_PREP(STM32_MDMA_CBRUR_DUV_MASK, (n))
150 #define STM32_MDMA_CBRUR_SUV_MASK	GENMASK(15, 0)
151 #define STM32_MDMA_CBRUR_SUV(n)		FIELD_PREP(STM32_MDMA_CBRUR_SUV_MASK, (n))
152 
153 /* MDMA Channel x link address register */
154 #define STM32_MDMA_CLAR(x)		(0x64 + 0x40 * (x))
155 
156 /* MDMA Channel x trigger and bus selection register */
157 #define STM32_MDMA_CTBR(x)		(0x68 + 0x40 * (x))
158 #define STM32_MDMA_CTBR_DBUS		BIT(17)
159 #define STM32_MDMA_CTBR_SBUS		BIT(16)
160 #define STM32_MDMA_CTBR_TSEL_MASK	GENMASK(5, 0)
161 #define STM32_MDMA_CTBR_TSEL(n)		FIELD_PREP(STM32_MDMA_CTBR_TSEL_MASK, (n))
162 
163 /* MDMA Channel x mask address register */
164 #define STM32_MDMA_CMAR(x)		(0x70 + 0x40 * (x))
165 
166 /* MDMA Channel x mask data register */
167 #define STM32_MDMA_CMDR(x)		(0x74 + 0x40 * (x))
168 
169 #define STM32_MDMA_MAX_BUF_LEN		128
170 #define STM32_MDMA_MAX_BLOCK_LEN	65536
171 #define STM32_MDMA_MAX_CHANNELS		32
172 #define STM32_MDMA_MAX_REQUESTS		256
173 #define STM32_MDMA_MAX_BURST		128
174 #define STM32_MDMA_VERY_HIGH_PRIORITY	0x3
175 
176 enum stm32_mdma_trigger_mode {
177 	STM32_MDMA_BUFFER,
178 	STM32_MDMA_BLOCK,
179 	STM32_MDMA_BLOCK_REP,
180 	STM32_MDMA_LINKED_LIST,
181 };
182 
183 enum stm32_mdma_width {
184 	STM32_MDMA_BYTE,
185 	STM32_MDMA_HALF_WORD,
186 	STM32_MDMA_WORD,
187 	STM32_MDMA_DOUBLE_WORD,
188 };
189 
190 enum stm32_mdma_inc_mode {
191 	STM32_MDMA_FIXED = 0,
192 	STM32_MDMA_INC = 2,
193 	STM32_MDMA_DEC = 3,
194 };
195 
196 struct stm32_mdma_chan_config {
197 	u32 request;
198 	u32 priority_level;
199 	u32 transfer_config;
200 	u32 mask_addr;
201 	u32 mask_data;
202 	bool m2m_hw; /* True when MDMA is triggered by STM32 DMA */
203 };
204 
205 struct stm32_mdma_hwdesc {
206 	u32 ctcr;
207 	u32 cbndtr;
208 	u32 csar;
209 	u32 cdar;
210 	u32 cbrur;
211 	u32 clar;
212 	u32 ctbr;
213 	u32 dummy;
214 	u32 cmar;
215 	u32 cmdr;
216 } __aligned(64);
217 
218 struct stm32_mdma_desc_node {
219 	struct stm32_mdma_hwdesc *hwdesc;
220 	dma_addr_t hwdesc_phys;
221 };
222 
223 struct stm32_mdma_desc {
224 	struct virt_dma_desc vdesc;
225 	u32 ccr;
226 	bool cyclic;
227 	u32 count;
228 	struct stm32_mdma_desc_node node[];
229 };
230 
231 struct stm32_mdma_dma_config {
232 	u32 request;	/* STM32 DMA channel stream id, triggering MDMA */
233 	u32 cmar;	/* STM32 DMA interrupt flag clear register address */
234 	u32 cmdr;	/* STM32 DMA Transfer Complete flag */
235 };
236 
237 struct stm32_mdma_chan {
238 	struct virt_dma_chan vchan;
239 	struct dma_pool *desc_pool;
240 	u32 id;
241 	struct stm32_mdma_desc *desc;
242 	u32 curr_hwdesc;
243 	struct dma_slave_config dma_config;
244 	struct stm32_mdma_chan_config chan_config;
245 	bool busy;
246 	u32 mem_burst;
247 	u32 mem_width;
248 };
249 
250 struct stm32_mdma_device {
251 	struct dma_device ddev;
252 	void __iomem *base;
253 	struct clk *clk;
254 	int irq;
255 	u32 nr_channels;
256 	u32 nr_requests;
257 	u32 nr_ahb_addr_masks;
258 	u32 chan_reserved;
259 	struct stm32_mdma_chan chan[STM32_MDMA_MAX_CHANNELS];
260 	u32 ahb_addr_masks[];
261 };
262 
263 static struct stm32_mdma_device *stm32_mdma_get_dev(
264 	struct stm32_mdma_chan *chan)
265 {
266 	return container_of(chan->vchan.chan.device, struct stm32_mdma_device,
267 			    ddev);
268 }
269 
270 static struct stm32_mdma_chan *to_stm32_mdma_chan(struct dma_chan *c)
271 {
272 	return container_of(c, struct stm32_mdma_chan, vchan.chan);
273 }
274 
275 static struct stm32_mdma_desc *to_stm32_mdma_desc(struct virt_dma_desc *vdesc)
276 {
277 	return container_of(vdesc, struct stm32_mdma_desc, vdesc);
278 }
279 
280 static struct device *chan2dev(struct stm32_mdma_chan *chan)
281 {
282 	return &chan->vchan.chan.dev->device;
283 }
284 
285 static struct device *mdma2dev(struct stm32_mdma_device *mdma_dev)
286 {
287 	return mdma_dev->ddev.dev;
288 }
289 
290 static u32 stm32_mdma_read(struct stm32_mdma_device *dmadev, u32 reg)
291 {
292 	return readl_relaxed(dmadev->base + reg);
293 }
294 
295 static void stm32_mdma_write(struct stm32_mdma_device *dmadev, u32 reg, u32 val)
296 {
297 	writel_relaxed(val, dmadev->base + reg);
298 }
299 
300 static void stm32_mdma_set_bits(struct stm32_mdma_device *dmadev, u32 reg,
301 				u32 mask)
302 {
303 	void __iomem *addr = dmadev->base + reg;
304 
305 	writel_relaxed(readl_relaxed(addr) | mask, addr);
306 }
307 
308 static void stm32_mdma_clr_bits(struct stm32_mdma_device *dmadev, u32 reg,
309 				u32 mask)
310 {
311 	void __iomem *addr = dmadev->base + reg;
312 
313 	writel_relaxed(readl_relaxed(addr) & ~mask, addr);
314 }
315 
316 static struct stm32_mdma_desc *stm32_mdma_alloc_desc(
317 		struct stm32_mdma_chan *chan, u32 count)
318 {
319 	struct stm32_mdma_desc *desc;
320 	int i;
321 
322 	desc = kzalloc(struct_size(desc, node, count), GFP_NOWAIT);
323 	if (!desc)
324 		return NULL;
325 
326 	for (i = 0; i < count; i++) {
327 		desc->node[i].hwdesc =
328 			dma_pool_alloc(chan->desc_pool, GFP_NOWAIT,
329 				       &desc->node[i].hwdesc_phys);
330 		if (!desc->node[i].hwdesc)
331 			goto err;
332 	}
333 
334 	desc->count = count;
335 
336 	return desc;
337 
338 err:
339 	dev_err(chan2dev(chan), "Failed to allocate descriptor\n");
340 	while (--i >= 0)
341 		dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
342 			      desc->node[i].hwdesc_phys);
343 	kfree(desc);
344 	return NULL;
345 }
346 
347 static void stm32_mdma_desc_free(struct virt_dma_desc *vdesc)
348 {
349 	struct stm32_mdma_desc *desc = to_stm32_mdma_desc(vdesc);
350 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(vdesc->tx.chan);
351 	int i;
352 
353 	for (i = 0; i < desc->count; i++)
354 		dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
355 			      desc->node[i].hwdesc_phys);
356 	kfree(desc);
357 }
358 
359 static int stm32_mdma_get_width(struct stm32_mdma_chan *chan,
360 				enum dma_slave_buswidth width)
361 {
362 	switch (width) {
363 	case DMA_SLAVE_BUSWIDTH_1_BYTE:
364 	case DMA_SLAVE_BUSWIDTH_2_BYTES:
365 	case DMA_SLAVE_BUSWIDTH_4_BYTES:
366 	case DMA_SLAVE_BUSWIDTH_8_BYTES:
367 		return ffs(width) - 1;
368 	default:
369 		dev_err(chan2dev(chan), "Dma bus width %i not supported\n",
370 			width);
371 		return -EINVAL;
372 	}
373 }
374 
375 static enum dma_slave_buswidth stm32_mdma_get_max_width(dma_addr_t addr,
376 							u32 buf_len, u32 tlen)
377 {
378 	enum dma_slave_buswidth max_width = DMA_SLAVE_BUSWIDTH_8_BYTES;
379 
380 	for (max_width = DMA_SLAVE_BUSWIDTH_8_BYTES;
381 	     max_width > DMA_SLAVE_BUSWIDTH_1_BYTE;
382 	     max_width >>= 1) {
383 		/*
384 		 * Address and buffer length both have to be aligned on
385 		 * bus width
386 		 */
387 		if ((((buf_len | addr) & (max_width - 1)) == 0) &&
388 		    tlen >= max_width)
389 			break;
390 	}
391 
392 	return max_width;
393 }
394 
395 static u32 stm32_mdma_get_best_burst(u32 buf_len, u32 tlen, u32 max_burst,
396 				     enum dma_slave_buswidth width)
397 {
398 	u32 best_burst;
399 
400 	best_burst = min((u32)1 << __ffs(tlen | buf_len),
401 			 max_burst * width) / width;
402 
403 	return (best_burst > 0) ? best_burst : 1;
404 }
405 
406 static int stm32_mdma_disable_chan(struct stm32_mdma_chan *chan)
407 {
408 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
409 	u32 ccr, cisr, id, reg;
410 	int ret;
411 
412 	id = chan->id;
413 	reg = STM32_MDMA_CCR(id);
414 
415 	/* Disable interrupts */
416 	stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_IRQ_MASK);
417 
418 	ccr = stm32_mdma_read(dmadev, reg);
419 	if (ccr & STM32_MDMA_CCR_EN) {
420 		stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_EN);
421 
422 		/* Ensure that any ongoing transfer has been completed */
423 		ret = readl_relaxed_poll_timeout_atomic(
424 				dmadev->base + STM32_MDMA_CISR(id), cisr,
425 				(cisr & STM32_MDMA_CISR_CTCIF), 10, 1000);
426 		if (ret) {
427 			dev_err(chan2dev(chan), "%s: timeout!\n", __func__);
428 			return -EBUSY;
429 		}
430 	}
431 
432 	return 0;
433 }
434 
435 static void stm32_mdma_stop(struct stm32_mdma_chan *chan)
436 {
437 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
438 	u32 status;
439 	int ret;
440 
441 	/* Disable DMA */
442 	ret = stm32_mdma_disable_chan(chan);
443 	if (ret < 0)
444 		return;
445 
446 	/* Clear interrupt status if it is there */
447 	status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
448 	if (status) {
449 		dev_dbg(chan2dev(chan), "%s(): clearing interrupt: 0x%08x\n",
450 			__func__, status);
451 		stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status);
452 	}
453 
454 	chan->busy = false;
455 }
456 
457 static void stm32_mdma_set_bus(struct stm32_mdma_device *dmadev, u32 *ctbr,
458 			       u32 ctbr_mask, u32 src_addr)
459 {
460 	u32 mask;
461 	int i;
462 
463 	/* Check if memory device is on AHB or AXI */
464 	*ctbr &= ~ctbr_mask;
465 	mask = src_addr & 0xF0000000;
466 	for (i = 0; i < dmadev->nr_ahb_addr_masks; i++) {
467 		if (mask == dmadev->ahb_addr_masks[i]) {
468 			*ctbr |= ctbr_mask;
469 			break;
470 		}
471 	}
472 }
473 
474 static int stm32_mdma_set_xfer_param(struct stm32_mdma_chan *chan,
475 				     enum dma_transfer_direction direction,
476 				     u32 *mdma_ccr, u32 *mdma_ctcr,
477 				     u32 *mdma_ctbr, dma_addr_t addr,
478 				     u32 buf_len)
479 {
480 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
481 	struct stm32_mdma_chan_config *chan_config = &chan->chan_config;
482 	enum dma_slave_buswidth src_addr_width, dst_addr_width;
483 	phys_addr_t src_addr, dst_addr;
484 	int src_bus_width, dst_bus_width;
485 	u32 src_maxburst, dst_maxburst, src_best_burst, dst_best_burst;
486 	u32 ccr, ctcr, ctbr, tlen;
487 
488 	src_addr_width = chan->dma_config.src_addr_width;
489 	dst_addr_width = chan->dma_config.dst_addr_width;
490 	src_maxburst = chan->dma_config.src_maxburst;
491 	dst_maxburst = chan->dma_config.dst_maxburst;
492 
493 	ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id));
494 	ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id));
495 	ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id));
496 
497 	/* Enable HW request mode */
498 	ctcr &= ~STM32_MDMA_CTCR_SWRM;
499 
500 	/* Set DINC, SINC, DINCOS, SINCOS, TRGM and TLEN retrieve from DT */
501 	ctcr &= ~STM32_MDMA_CTCR_CFG_MASK;
502 	ctcr |= chan_config->transfer_config & STM32_MDMA_CTCR_CFG_MASK;
503 
504 	/*
505 	 * For buffer transfer length (TLEN) we have to set
506 	 * the number of bytes - 1 in CTCR register
507 	 */
508 	tlen = STM32_MDMA_CTCR_LEN2_GET(ctcr);
509 	ctcr &= ~STM32_MDMA_CTCR_LEN2_MSK;
510 	ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1));
511 
512 	/* Disable Pack Enable */
513 	ctcr &= ~STM32_MDMA_CTCR_PKE;
514 
515 	/* Check burst size constraints */
516 	if (src_maxburst * src_addr_width > STM32_MDMA_MAX_BURST ||
517 	    dst_maxburst * dst_addr_width > STM32_MDMA_MAX_BURST) {
518 		dev_err(chan2dev(chan),
519 			"burst size * bus width higher than %d bytes\n",
520 			STM32_MDMA_MAX_BURST);
521 		return -EINVAL;
522 	}
523 
524 	if ((!is_power_of_2(src_maxburst) && src_maxburst > 0) ||
525 	    (!is_power_of_2(dst_maxburst) && dst_maxburst > 0)) {
526 		dev_err(chan2dev(chan), "burst size must be a power of 2\n");
527 		return -EINVAL;
528 	}
529 
530 	/*
531 	 * Configure channel control:
532 	 * - Clear SW request as in this case this is a HW one
533 	 * - Clear WEX, HEX and BEX bits
534 	 * - Set priority level
535 	 */
536 	ccr &= ~(STM32_MDMA_CCR_SWRQ | STM32_MDMA_CCR_WEX | STM32_MDMA_CCR_HEX |
537 		 STM32_MDMA_CCR_BEX | STM32_MDMA_CCR_PL_MASK);
538 	ccr |= STM32_MDMA_CCR_PL(chan_config->priority_level);
539 
540 	/* Configure Trigger selection */
541 	ctbr &= ~STM32_MDMA_CTBR_TSEL_MASK;
542 	ctbr |= STM32_MDMA_CTBR_TSEL(chan_config->request);
543 
544 	switch (direction) {
545 	case DMA_MEM_TO_DEV:
546 		dst_addr = chan->dma_config.dst_addr;
547 
548 		/* Set device data size */
549 		if (chan_config->m2m_hw)
550 			dst_addr_width = stm32_mdma_get_max_width(dst_addr, buf_len,
551 								  STM32_MDMA_MAX_BUF_LEN);
552 		dst_bus_width = stm32_mdma_get_width(chan, dst_addr_width);
553 		if (dst_bus_width < 0)
554 			return dst_bus_width;
555 		ctcr &= ~STM32_MDMA_CTCR_DSIZE_MASK;
556 		ctcr |= STM32_MDMA_CTCR_DSIZE(dst_bus_width);
557 		if (chan_config->m2m_hw) {
558 			ctcr &= ~STM32_MDMA_CTCR_DINCOS_MASK;
559 			ctcr |= STM32_MDMA_CTCR_DINCOS(dst_bus_width);
560 		}
561 
562 		/* Set device burst value */
563 		if (chan_config->m2m_hw)
564 			dst_maxburst = STM32_MDMA_MAX_BUF_LEN / dst_addr_width;
565 
566 		dst_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
567 							   dst_maxburst,
568 							   dst_addr_width);
569 		chan->mem_burst = dst_best_burst;
570 		ctcr &= ~STM32_MDMA_CTCR_DBURST_MASK;
571 		ctcr |= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst)));
572 
573 		/* Set memory data size */
574 		src_addr_width = stm32_mdma_get_max_width(addr, buf_len, tlen);
575 		chan->mem_width = src_addr_width;
576 		src_bus_width = stm32_mdma_get_width(chan, src_addr_width);
577 		if (src_bus_width < 0)
578 			return src_bus_width;
579 		ctcr &= ~STM32_MDMA_CTCR_SSIZE_MASK |
580 			STM32_MDMA_CTCR_SINCOS_MASK;
581 		ctcr |= STM32_MDMA_CTCR_SSIZE(src_bus_width) |
582 			STM32_MDMA_CTCR_SINCOS(src_bus_width);
583 
584 		/* Set memory burst value */
585 		src_maxburst = STM32_MDMA_MAX_BUF_LEN / src_addr_width;
586 		src_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
587 							   src_maxburst,
588 							   src_addr_width);
589 		chan->mem_burst = src_best_burst;
590 		ctcr &= ~STM32_MDMA_CTCR_SBURST_MASK;
591 		ctcr |= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst)));
592 
593 		/* Select bus */
594 		stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
595 				   dst_addr);
596 
597 		if (dst_bus_width != src_bus_width)
598 			ctcr |= STM32_MDMA_CTCR_PKE;
599 
600 		/* Set destination address */
601 		stm32_mdma_write(dmadev, STM32_MDMA_CDAR(chan->id), dst_addr);
602 		break;
603 
604 	case DMA_DEV_TO_MEM:
605 		src_addr = chan->dma_config.src_addr;
606 
607 		/* Set device data size */
608 		if (chan_config->m2m_hw)
609 			src_addr_width = stm32_mdma_get_max_width(src_addr, buf_len,
610 								  STM32_MDMA_MAX_BUF_LEN);
611 
612 		src_bus_width = stm32_mdma_get_width(chan, src_addr_width);
613 		if (src_bus_width < 0)
614 			return src_bus_width;
615 		ctcr &= ~STM32_MDMA_CTCR_SSIZE_MASK;
616 		ctcr |= STM32_MDMA_CTCR_SSIZE(src_bus_width);
617 		if (chan_config->m2m_hw) {
618 			ctcr &= ~STM32_MDMA_CTCR_SINCOS_MASK;
619 			ctcr |= STM32_MDMA_CTCR_SINCOS(src_bus_width);
620 		}
621 
622 		/* Set device burst value */
623 		if (chan_config->m2m_hw)
624 			src_maxburst = STM32_MDMA_MAX_BUF_LEN / src_addr_width;
625 
626 		src_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
627 							   src_maxburst,
628 							   src_addr_width);
629 		ctcr &= ~STM32_MDMA_CTCR_SBURST_MASK;
630 		ctcr |= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst)));
631 
632 		/* Set memory data size */
633 		dst_addr_width = stm32_mdma_get_max_width(addr, buf_len, tlen);
634 		chan->mem_width = dst_addr_width;
635 		dst_bus_width = stm32_mdma_get_width(chan, dst_addr_width);
636 		if (dst_bus_width < 0)
637 			return dst_bus_width;
638 		ctcr &= ~(STM32_MDMA_CTCR_DSIZE_MASK |
639 			STM32_MDMA_CTCR_DINCOS_MASK);
640 		ctcr |= STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
641 			STM32_MDMA_CTCR_DINCOS(dst_bus_width);
642 
643 		/* Set memory burst value */
644 		dst_maxburst = STM32_MDMA_MAX_BUF_LEN / dst_addr_width;
645 		dst_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
646 							   dst_maxburst,
647 							   dst_addr_width);
648 		ctcr &= ~STM32_MDMA_CTCR_DBURST_MASK;
649 		ctcr |= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst)));
650 
651 		/* Select bus */
652 		stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
653 				   src_addr);
654 
655 		if (dst_bus_width != src_bus_width)
656 			ctcr |= STM32_MDMA_CTCR_PKE;
657 
658 		/* Set source address */
659 		stm32_mdma_write(dmadev, STM32_MDMA_CSAR(chan->id), src_addr);
660 		break;
661 
662 	default:
663 		dev_err(chan2dev(chan), "Dma direction is not supported\n");
664 		return -EINVAL;
665 	}
666 
667 	*mdma_ccr = ccr;
668 	*mdma_ctcr = ctcr;
669 	*mdma_ctbr = ctbr;
670 
671 	return 0;
672 }
673 
674 static void stm32_mdma_dump_hwdesc(struct stm32_mdma_chan *chan,
675 				   struct stm32_mdma_desc_node *node)
676 {
677 	dev_dbg(chan2dev(chan), "hwdesc:  %pad\n", &node->hwdesc_phys);
678 	dev_dbg(chan2dev(chan), "CTCR:    0x%08x\n", node->hwdesc->ctcr);
679 	dev_dbg(chan2dev(chan), "CBNDTR:  0x%08x\n", node->hwdesc->cbndtr);
680 	dev_dbg(chan2dev(chan), "CSAR:    0x%08x\n", node->hwdesc->csar);
681 	dev_dbg(chan2dev(chan), "CDAR:    0x%08x\n", node->hwdesc->cdar);
682 	dev_dbg(chan2dev(chan), "CBRUR:   0x%08x\n", node->hwdesc->cbrur);
683 	dev_dbg(chan2dev(chan), "CLAR:    0x%08x\n", node->hwdesc->clar);
684 	dev_dbg(chan2dev(chan), "CTBR:    0x%08x\n", node->hwdesc->ctbr);
685 	dev_dbg(chan2dev(chan), "CMAR:    0x%08x\n", node->hwdesc->cmar);
686 	dev_dbg(chan2dev(chan), "CMDR:    0x%08x\n\n", node->hwdesc->cmdr);
687 }
688 
689 static void stm32_mdma_setup_hwdesc(struct stm32_mdma_chan *chan,
690 				    struct stm32_mdma_desc *desc,
691 				    enum dma_transfer_direction dir, u32 count,
692 				    dma_addr_t src_addr, dma_addr_t dst_addr,
693 				    u32 len, u32 ctcr, u32 ctbr, bool is_last,
694 				    bool is_first, bool is_cyclic)
695 {
696 	struct stm32_mdma_chan_config *config = &chan->chan_config;
697 	struct stm32_mdma_hwdesc *hwdesc;
698 	u32 next = count + 1;
699 
700 	hwdesc = desc->node[count].hwdesc;
701 	hwdesc->ctcr = ctcr;
702 	hwdesc->cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK |
703 			STM32_MDMA_CBNDTR_BRDUM |
704 			STM32_MDMA_CBNDTR_BRSUM |
705 			STM32_MDMA_CBNDTR_BNDT_MASK);
706 	hwdesc->cbndtr |= STM32_MDMA_CBNDTR_BNDT(len);
707 	hwdesc->csar = src_addr;
708 	hwdesc->cdar = dst_addr;
709 	hwdesc->cbrur = 0;
710 	hwdesc->ctbr = ctbr;
711 	hwdesc->cmar = config->mask_addr;
712 	hwdesc->cmdr = config->mask_data;
713 
714 	if (is_last) {
715 		if (is_cyclic)
716 			hwdesc->clar = desc->node[0].hwdesc_phys;
717 		else
718 			hwdesc->clar = 0;
719 	} else {
720 		hwdesc->clar = desc->node[next].hwdesc_phys;
721 	}
722 
723 	stm32_mdma_dump_hwdesc(chan, &desc->node[count]);
724 }
725 
726 static int stm32_mdma_setup_xfer(struct stm32_mdma_chan *chan,
727 				 struct stm32_mdma_desc *desc,
728 				 struct scatterlist *sgl, u32 sg_len,
729 				 enum dma_transfer_direction direction)
730 {
731 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
732 	struct dma_slave_config *dma_config = &chan->dma_config;
733 	struct stm32_mdma_chan_config *chan_config = &chan->chan_config;
734 	struct scatterlist *sg;
735 	dma_addr_t src_addr, dst_addr;
736 	u32 m2m_hw_period, ccr, ctcr, ctbr;
737 	int i, ret = 0;
738 
739 	if (chan_config->m2m_hw)
740 		m2m_hw_period = sg_dma_len(sgl);
741 
742 	for_each_sg(sgl, sg, sg_len, i) {
743 		if (sg_dma_len(sg) > STM32_MDMA_MAX_BLOCK_LEN) {
744 			dev_err(chan2dev(chan), "Invalid block len\n");
745 			return -EINVAL;
746 		}
747 
748 		if (direction == DMA_MEM_TO_DEV) {
749 			src_addr = sg_dma_address(sg);
750 			dst_addr = dma_config->dst_addr;
751 			if (chan_config->m2m_hw && (i & 1))
752 				dst_addr += m2m_hw_period;
753 			ret = stm32_mdma_set_xfer_param(chan, direction, &ccr,
754 							&ctcr, &ctbr, src_addr,
755 							sg_dma_len(sg));
756 			stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
757 					   src_addr);
758 		} else {
759 			src_addr = dma_config->src_addr;
760 			if (chan_config->m2m_hw && (i & 1))
761 				src_addr += m2m_hw_period;
762 			dst_addr = sg_dma_address(sg);
763 			ret = stm32_mdma_set_xfer_param(chan, direction, &ccr,
764 							&ctcr, &ctbr, dst_addr,
765 							sg_dma_len(sg));
766 			stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
767 					   dst_addr);
768 		}
769 
770 		if (ret < 0)
771 			return ret;
772 
773 		stm32_mdma_setup_hwdesc(chan, desc, direction, i, src_addr,
774 					dst_addr, sg_dma_len(sg), ctcr, ctbr,
775 					i == sg_len - 1, i == 0, false);
776 	}
777 
778 	/* Enable interrupts */
779 	ccr &= ~STM32_MDMA_CCR_IRQ_MASK;
780 	ccr |= STM32_MDMA_CCR_TEIE | STM32_MDMA_CCR_CTCIE;
781 	if (sg_len > 1)
782 		ccr |= STM32_MDMA_CCR_BTIE;
783 	desc->ccr = ccr;
784 
785 	return 0;
786 }
787 
788 static struct dma_async_tx_descriptor *
789 stm32_mdma_prep_slave_sg(struct dma_chan *c, struct scatterlist *sgl,
790 			 u32 sg_len, enum dma_transfer_direction direction,
791 			 unsigned long flags, void *context)
792 {
793 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
794 	struct stm32_mdma_chan_config *chan_config = &chan->chan_config;
795 	struct stm32_mdma_desc *desc;
796 	int i, ret;
797 
798 	/*
799 	 * Once DMA is in setup cyclic mode the channel we cannot assign this
800 	 * channel anymore. The DMA channel needs to be aborted or terminated
801 	 * for allowing another request.
802 	 */
803 	if (chan->desc && chan->desc->cyclic) {
804 		dev_err(chan2dev(chan),
805 			"Request not allowed when dma in cyclic mode\n");
806 		return NULL;
807 	}
808 
809 	desc = stm32_mdma_alloc_desc(chan, sg_len);
810 	if (!desc)
811 		return NULL;
812 
813 	ret = stm32_mdma_setup_xfer(chan, desc, sgl, sg_len, direction);
814 	if (ret < 0)
815 		goto xfer_setup_err;
816 
817 	/*
818 	 * In case of M2M HW transfer triggered by STM32 DMA, we do not have to clear the
819 	 * transfer complete flag by hardware in order to let the CPU rearm the STM32 DMA
820 	 * with the next sg element and update some data in dmaengine framework.
821 	 */
822 	if (chan_config->m2m_hw && direction == DMA_MEM_TO_DEV) {
823 		struct stm32_mdma_hwdesc *hwdesc;
824 
825 		for (i = 0; i < sg_len; i++) {
826 			hwdesc = desc->node[i].hwdesc;
827 			hwdesc->cmar = 0;
828 			hwdesc->cmdr = 0;
829 		}
830 	}
831 
832 	desc->cyclic = false;
833 
834 	return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
835 
836 xfer_setup_err:
837 	for (i = 0; i < desc->count; i++)
838 		dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
839 			      desc->node[i].hwdesc_phys);
840 	kfree(desc);
841 	return NULL;
842 }
843 
844 static struct dma_async_tx_descriptor *
845 stm32_mdma_prep_dma_cyclic(struct dma_chan *c, dma_addr_t buf_addr,
846 			   size_t buf_len, size_t period_len,
847 			   enum dma_transfer_direction direction,
848 			   unsigned long flags)
849 {
850 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
851 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
852 	struct dma_slave_config *dma_config = &chan->dma_config;
853 	struct stm32_mdma_chan_config *chan_config = &chan->chan_config;
854 	struct stm32_mdma_desc *desc;
855 	dma_addr_t src_addr, dst_addr;
856 	u32 ccr, ctcr, ctbr, count;
857 	int i, ret;
858 
859 	/*
860 	 * Once DMA is in setup cyclic mode the channel we cannot assign this
861 	 * channel anymore. The DMA channel needs to be aborted or terminated
862 	 * for allowing another request.
863 	 */
864 	if (chan->desc && chan->desc->cyclic) {
865 		dev_err(chan2dev(chan),
866 			"Request not allowed when dma in cyclic mode\n");
867 		return NULL;
868 	}
869 
870 	if (!buf_len || !period_len || period_len > STM32_MDMA_MAX_BLOCK_LEN) {
871 		dev_err(chan2dev(chan), "Invalid buffer/period len\n");
872 		return NULL;
873 	}
874 
875 	if (buf_len % period_len) {
876 		dev_err(chan2dev(chan), "buf_len not multiple of period_len\n");
877 		return NULL;
878 	}
879 
880 	count = buf_len / period_len;
881 
882 	desc = stm32_mdma_alloc_desc(chan, count);
883 	if (!desc)
884 		return NULL;
885 
886 	/* Select bus */
887 	if (direction == DMA_MEM_TO_DEV) {
888 		src_addr = buf_addr;
889 		ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, &ctcr,
890 						&ctbr, src_addr, period_len);
891 		stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
892 				   src_addr);
893 	} else {
894 		dst_addr = buf_addr;
895 		ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, &ctcr,
896 						&ctbr, dst_addr, period_len);
897 		stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
898 				   dst_addr);
899 	}
900 
901 	if (ret < 0)
902 		goto xfer_setup_err;
903 
904 	/* Enable interrupts */
905 	ccr &= ~STM32_MDMA_CCR_IRQ_MASK;
906 	ccr |= STM32_MDMA_CCR_TEIE | STM32_MDMA_CCR_CTCIE | STM32_MDMA_CCR_BTIE;
907 	desc->ccr = ccr;
908 
909 	/* Configure hwdesc list */
910 	for (i = 0; i < count; i++) {
911 		if (direction == DMA_MEM_TO_DEV) {
912 			src_addr = buf_addr + i * period_len;
913 			dst_addr = dma_config->dst_addr;
914 			if (chan_config->m2m_hw && (i & 1))
915 				dst_addr += period_len;
916 		} else {
917 			src_addr = dma_config->src_addr;
918 			if (chan_config->m2m_hw && (i & 1))
919 				src_addr += period_len;
920 			dst_addr = buf_addr + i * period_len;
921 		}
922 
923 		stm32_mdma_setup_hwdesc(chan, desc, direction, i, src_addr,
924 					dst_addr, period_len, ctcr, ctbr,
925 					i == count - 1, i == 0, true);
926 	}
927 
928 	desc->cyclic = true;
929 
930 	return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
931 
932 xfer_setup_err:
933 	for (i = 0; i < desc->count; i++)
934 		dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
935 			      desc->node[i].hwdesc_phys);
936 	kfree(desc);
937 	return NULL;
938 }
939 
940 static struct dma_async_tx_descriptor *
941 stm32_mdma_prep_dma_memcpy(struct dma_chan *c, dma_addr_t dest, dma_addr_t src,
942 			   size_t len, unsigned long flags)
943 {
944 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
945 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
946 	enum dma_slave_buswidth max_width;
947 	struct stm32_mdma_desc *desc;
948 	struct stm32_mdma_hwdesc *hwdesc;
949 	u32 ccr, ctcr, ctbr, cbndtr, count, max_burst, mdma_burst;
950 	u32 best_burst, tlen;
951 	size_t xfer_count, offset;
952 	int src_bus_width, dst_bus_width;
953 	int i;
954 
955 	/*
956 	 * Once DMA is in setup cyclic mode the channel we cannot assign this
957 	 * channel anymore. The DMA channel needs to be aborted or terminated
958 	 * to allow another request
959 	 */
960 	if (chan->desc && chan->desc->cyclic) {
961 		dev_err(chan2dev(chan),
962 			"Request not allowed when dma in cyclic mode\n");
963 		return NULL;
964 	}
965 
966 	count = DIV_ROUND_UP(len, STM32_MDMA_MAX_BLOCK_LEN);
967 	desc = stm32_mdma_alloc_desc(chan, count);
968 	if (!desc)
969 		return NULL;
970 
971 	ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id));
972 	ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id));
973 	ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id));
974 	cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id));
975 
976 	/* Enable sw req, some interrupts and clear other bits */
977 	ccr &= ~(STM32_MDMA_CCR_WEX | STM32_MDMA_CCR_HEX |
978 		 STM32_MDMA_CCR_BEX | STM32_MDMA_CCR_PL_MASK |
979 		 STM32_MDMA_CCR_IRQ_MASK);
980 	ccr |= STM32_MDMA_CCR_TEIE;
981 
982 	/* Enable SW request mode, dest/src inc and clear other bits */
983 	ctcr &= ~(STM32_MDMA_CTCR_BWM | STM32_MDMA_CTCR_TRGM_MSK |
984 		  STM32_MDMA_CTCR_PAM_MASK | STM32_MDMA_CTCR_PKE |
985 		  STM32_MDMA_CTCR_TLEN_MSK | STM32_MDMA_CTCR_DBURST_MASK |
986 		  STM32_MDMA_CTCR_SBURST_MASK | STM32_MDMA_CTCR_DINCOS_MASK |
987 		  STM32_MDMA_CTCR_SINCOS_MASK | STM32_MDMA_CTCR_DSIZE_MASK |
988 		  STM32_MDMA_CTCR_SSIZE_MASK | STM32_MDMA_CTCR_DINC_MASK |
989 		  STM32_MDMA_CTCR_SINC_MASK);
990 	ctcr |= STM32_MDMA_CTCR_SWRM | STM32_MDMA_CTCR_SINC(STM32_MDMA_INC) |
991 		STM32_MDMA_CTCR_DINC(STM32_MDMA_INC);
992 
993 	/* Reset HW request */
994 	ctbr &= ~STM32_MDMA_CTBR_TSEL_MASK;
995 
996 	/* Select bus */
997 	stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS, src);
998 	stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS, dest);
999 
1000 	/* Clear CBNDTR registers */
1001 	cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK | STM32_MDMA_CBNDTR_BRDUM |
1002 			STM32_MDMA_CBNDTR_BRSUM | STM32_MDMA_CBNDTR_BNDT_MASK);
1003 
1004 	if (len <= STM32_MDMA_MAX_BLOCK_LEN) {
1005 		cbndtr |= STM32_MDMA_CBNDTR_BNDT(len);
1006 		if (len <= STM32_MDMA_MAX_BUF_LEN) {
1007 			/* Setup a buffer transfer */
1008 			ccr |= STM32_MDMA_CCR_TCIE | STM32_MDMA_CCR_CTCIE;
1009 			ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BUFFER);
1010 		} else {
1011 			/* Setup a block transfer */
1012 			ccr |= STM32_MDMA_CCR_BTIE | STM32_MDMA_CCR_CTCIE;
1013 			ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BLOCK);
1014 		}
1015 
1016 		tlen = STM32_MDMA_MAX_BUF_LEN;
1017 		ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1));
1018 
1019 		/* Set source best burst size */
1020 		max_width = stm32_mdma_get_max_width(src, len, tlen);
1021 		src_bus_width = stm32_mdma_get_width(chan, max_width);
1022 
1023 		max_burst = tlen / max_width;
1024 		best_burst = stm32_mdma_get_best_burst(len, tlen, max_burst,
1025 						       max_width);
1026 		mdma_burst = ilog2(best_burst);
1027 
1028 		ctcr |= STM32_MDMA_CTCR_SBURST(mdma_burst) |
1029 			STM32_MDMA_CTCR_SSIZE(src_bus_width) |
1030 			STM32_MDMA_CTCR_SINCOS(src_bus_width);
1031 
1032 		/* Set destination best burst size */
1033 		max_width = stm32_mdma_get_max_width(dest, len, tlen);
1034 		dst_bus_width = stm32_mdma_get_width(chan, max_width);
1035 
1036 		max_burst = tlen / max_width;
1037 		best_burst = stm32_mdma_get_best_burst(len, tlen, max_burst,
1038 						       max_width);
1039 		mdma_burst = ilog2(best_burst);
1040 
1041 		ctcr |= STM32_MDMA_CTCR_DBURST(mdma_burst) |
1042 			STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
1043 			STM32_MDMA_CTCR_DINCOS(dst_bus_width);
1044 
1045 		if (dst_bus_width != src_bus_width)
1046 			ctcr |= STM32_MDMA_CTCR_PKE;
1047 
1048 		/* Prepare hardware descriptor */
1049 		hwdesc = desc->node[0].hwdesc;
1050 		hwdesc->ctcr = ctcr;
1051 		hwdesc->cbndtr = cbndtr;
1052 		hwdesc->csar = src;
1053 		hwdesc->cdar = dest;
1054 		hwdesc->cbrur = 0;
1055 		hwdesc->clar = 0;
1056 		hwdesc->ctbr = ctbr;
1057 		hwdesc->cmar = 0;
1058 		hwdesc->cmdr = 0;
1059 
1060 		stm32_mdma_dump_hwdesc(chan, &desc->node[0]);
1061 	} else {
1062 		/* Setup a LLI transfer */
1063 		ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_LINKED_LIST) |
1064 			STM32_MDMA_CTCR_TLEN((STM32_MDMA_MAX_BUF_LEN - 1));
1065 		ccr |= STM32_MDMA_CCR_BTIE | STM32_MDMA_CCR_CTCIE;
1066 		tlen = STM32_MDMA_MAX_BUF_LEN;
1067 
1068 		for (i = 0, offset = 0; offset < len;
1069 		     i++, offset += xfer_count) {
1070 			xfer_count = min_t(size_t, len - offset,
1071 					   STM32_MDMA_MAX_BLOCK_LEN);
1072 
1073 			/* Set source best burst size */
1074 			max_width = stm32_mdma_get_max_width(src, len, tlen);
1075 			src_bus_width = stm32_mdma_get_width(chan, max_width);
1076 
1077 			max_burst = tlen / max_width;
1078 			best_burst = stm32_mdma_get_best_burst(len, tlen,
1079 							       max_burst,
1080 							       max_width);
1081 			mdma_burst = ilog2(best_burst);
1082 
1083 			ctcr |= STM32_MDMA_CTCR_SBURST(mdma_burst) |
1084 				STM32_MDMA_CTCR_SSIZE(src_bus_width) |
1085 				STM32_MDMA_CTCR_SINCOS(src_bus_width);
1086 
1087 			/* Set destination best burst size */
1088 			max_width = stm32_mdma_get_max_width(dest, len, tlen);
1089 			dst_bus_width = stm32_mdma_get_width(chan, max_width);
1090 
1091 			max_burst = tlen / max_width;
1092 			best_burst = stm32_mdma_get_best_burst(len, tlen,
1093 							       max_burst,
1094 							       max_width);
1095 			mdma_burst = ilog2(best_burst);
1096 
1097 			ctcr |= STM32_MDMA_CTCR_DBURST(mdma_burst) |
1098 				STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
1099 				STM32_MDMA_CTCR_DINCOS(dst_bus_width);
1100 
1101 			if (dst_bus_width != src_bus_width)
1102 				ctcr |= STM32_MDMA_CTCR_PKE;
1103 
1104 			/* Prepare hardware descriptor */
1105 			stm32_mdma_setup_hwdesc(chan, desc, DMA_MEM_TO_MEM, i,
1106 						src + offset, dest + offset,
1107 						xfer_count, ctcr, ctbr,
1108 						i == count - 1, i == 0, false);
1109 		}
1110 	}
1111 
1112 	desc->ccr = ccr;
1113 
1114 	desc->cyclic = false;
1115 
1116 	return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
1117 }
1118 
1119 static void stm32_mdma_dump_reg(struct stm32_mdma_chan *chan)
1120 {
1121 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1122 
1123 	dev_dbg(chan2dev(chan), "CCR:     0x%08x\n",
1124 		stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)));
1125 	dev_dbg(chan2dev(chan), "CTCR:    0x%08x\n",
1126 		stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id)));
1127 	dev_dbg(chan2dev(chan), "CBNDTR:  0x%08x\n",
1128 		stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id)));
1129 	dev_dbg(chan2dev(chan), "CSAR:    0x%08x\n",
1130 		stm32_mdma_read(dmadev, STM32_MDMA_CSAR(chan->id)));
1131 	dev_dbg(chan2dev(chan), "CDAR:    0x%08x\n",
1132 		stm32_mdma_read(dmadev, STM32_MDMA_CDAR(chan->id)));
1133 	dev_dbg(chan2dev(chan), "CBRUR:   0x%08x\n",
1134 		stm32_mdma_read(dmadev, STM32_MDMA_CBRUR(chan->id)));
1135 	dev_dbg(chan2dev(chan), "CLAR:    0x%08x\n",
1136 		stm32_mdma_read(dmadev, STM32_MDMA_CLAR(chan->id)));
1137 	dev_dbg(chan2dev(chan), "CTBR:    0x%08x\n",
1138 		stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id)));
1139 	dev_dbg(chan2dev(chan), "CMAR:    0x%08x\n",
1140 		stm32_mdma_read(dmadev, STM32_MDMA_CMAR(chan->id)));
1141 	dev_dbg(chan2dev(chan), "CMDR:    0x%08x\n",
1142 		stm32_mdma_read(dmadev, STM32_MDMA_CMDR(chan->id)));
1143 }
1144 
1145 static void stm32_mdma_start_transfer(struct stm32_mdma_chan *chan)
1146 {
1147 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1148 	struct virt_dma_desc *vdesc;
1149 	struct stm32_mdma_hwdesc *hwdesc;
1150 	u32 id = chan->id;
1151 	u32 status, reg;
1152 
1153 	vdesc = vchan_next_desc(&chan->vchan);
1154 	if (!vdesc) {
1155 		chan->desc = NULL;
1156 		return;
1157 	}
1158 
1159 	list_del(&vdesc->node);
1160 
1161 	chan->desc = to_stm32_mdma_desc(vdesc);
1162 	hwdesc = chan->desc->node[0].hwdesc;
1163 	chan->curr_hwdesc = 0;
1164 
1165 	stm32_mdma_write(dmadev, STM32_MDMA_CCR(id), chan->desc->ccr);
1166 	stm32_mdma_write(dmadev, STM32_MDMA_CTCR(id), hwdesc->ctcr);
1167 	stm32_mdma_write(dmadev, STM32_MDMA_CBNDTR(id), hwdesc->cbndtr);
1168 	stm32_mdma_write(dmadev, STM32_MDMA_CSAR(id), hwdesc->csar);
1169 	stm32_mdma_write(dmadev, STM32_MDMA_CDAR(id), hwdesc->cdar);
1170 	stm32_mdma_write(dmadev, STM32_MDMA_CBRUR(id), hwdesc->cbrur);
1171 	stm32_mdma_write(dmadev, STM32_MDMA_CLAR(id), hwdesc->clar);
1172 	stm32_mdma_write(dmadev, STM32_MDMA_CTBR(id), hwdesc->ctbr);
1173 	stm32_mdma_write(dmadev, STM32_MDMA_CMAR(id), hwdesc->cmar);
1174 	stm32_mdma_write(dmadev, STM32_MDMA_CMDR(id), hwdesc->cmdr);
1175 
1176 	/* Clear interrupt status if it is there */
1177 	status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(id));
1178 	if (status)
1179 		stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(id), status);
1180 
1181 	stm32_mdma_dump_reg(chan);
1182 
1183 	/* Start DMA */
1184 	stm32_mdma_set_bits(dmadev, STM32_MDMA_CCR(id), STM32_MDMA_CCR_EN);
1185 
1186 	/* Set SW request in case of MEM2MEM transfer */
1187 	if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM) {
1188 		reg = STM32_MDMA_CCR(id);
1189 		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ);
1190 	}
1191 
1192 	chan->busy = true;
1193 
1194 	dev_dbg(chan2dev(chan), "vchan %pK: started\n", &chan->vchan);
1195 }
1196 
1197 static void stm32_mdma_issue_pending(struct dma_chan *c)
1198 {
1199 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1200 	unsigned long flags;
1201 
1202 	spin_lock_irqsave(&chan->vchan.lock, flags);
1203 
1204 	if (!vchan_issue_pending(&chan->vchan))
1205 		goto end;
1206 
1207 	dev_dbg(chan2dev(chan), "vchan %pK: issued\n", &chan->vchan);
1208 
1209 	if (!chan->desc && !chan->busy)
1210 		stm32_mdma_start_transfer(chan);
1211 
1212 end:
1213 	spin_unlock_irqrestore(&chan->vchan.lock, flags);
1214 }
1215 
1216 static int stm32_mdma_pause(struct dma_chan *c)
1217 {
1218 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1219 	unsigned long flags;
1220 	int ret;
1221 
1222 	spin_lock_irqsave(&chan->vchan.lock, flags);
1223 	ret = stm32_mdma_disable_chan(chan);
1224 	spin_unlock_irqrestore(&chan->vchan.lock, flags);
1225 
1226 	if (!ret)
1227 		dev_dbg(chan2dev(chan), "vchan %pK: pause\n", &chan->vchan);
1228 
1229 	return ret;
1230 }
1231 
1232 static int stm32_mdma_resume(struct dma_chan *c)
1233 {
1234 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1235 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1236 	struct stm32_mdma_hwdesc *hwdesc;
1237 	unsigned long flags;
1238 	u32 status, reg;
1239 
1240 	hwdesc = chan->desc->node[chan->curr_hwdesc].hwdesc;
1241 
1242 	spin_lock_irqsave(&chan->vchan.lock, flags);
1243 
1244 	/* Re-configure control register */
1245 	stm32_mdma_write(dmadev, STM32_MDMA_CCR(chan->id), chan->desc->ccr);
1246 
1247 	/* Clear interrupt status if it is there */
1248 	status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
1249 	if (status)
1250 		stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status);
1251 
1252 	stm32_mdma_dump_reg(chan);
1253 
1254 	/* Re-start DMA */
1255 	reg = STM32_MDMA_CCR(chan->id);
1256 	stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_EN);
1257 
1258 	/* Set SW request in case of MEM2MEM transfer */
1259 	if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM)
1260 		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ);
1261 
1262 	spin_unlock_irqrestore(&chan->vchan.lock, flags);
1263 
1264 	dev_dbg(chan2dev(chan), "vchan %pK: resume\n", &chan->vchan);
1265 
1266 	return 0;
1267 }
1268 
1269 static int stm32_mdma_terminate_all(struct dma_chan *c)
1270 {
1271 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1272 	unsigned long flags;
1273 	LIST_HEAD(head);
1274 
1275 	spin_lock_irqsave(&chan->vchan.lock, flags);
1276 	if (chan->desc) {
1277 		vchan_terminate_vdesc(&chan->desc->vdesc);
1278 		if (chan->busy)
1279 			stm32_mdma_stop(chan);
1280 		chan->desc = NULL;
1281 	}
1282 	vchan_get_all_descriptors(&chan->vchan, &head);
1283 	spin_unlock_irqrestore(&chan->vchan.lock, flags);
1284 
1285 	vchan_dma_desc_free_list(&chan->vchan, &head);
1286 
1287 	return 0;
1288 }
1289 
1290 static void stm32_mdma_synchronize(struct dma_chan *c)
1291 {
1292 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1293 
1294 	vchan_synchronize(&chan->vchan);
1295 }
1296 
1297 static int stm32_mdma_slave_config(struct dma_chan *c,
1298 				   struct dma_slave_config *config)
1299 {
1300 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1301 
1302 	memcpy(&chan->dma_config, config, sizeof(*config));
1303 
1304 	/* Check if user is requesting STM32 DMA to trigger MDMA */
1305 	if (config->peripheral_size) {
1306 		struct stm32_mdma_dma_config *mdma_config;
1307 
1308 		mdma_config = (struct stm32_mdma_dma_config *)chan->dma_config.peripheral_config;
1309 		chan->chan_config.request = mdma_config->request;
1310 		chan->chan_config.mask_addr = mdma_config->cmar;
1311 		chan->chan_config.mask_data = mdma_config->cmdr;
1312 		chan->chan_config.m2m_hw = true;
1313 	}
1314 
1315 	return 0;
1316 }
1317 
1318 static size_t stm32_mdma_desc_residue(struct stm32_mdma_chan *chan,
1319 				      struct stm32_mdma_desc *desc,
1320 				      u32 curr_hwdesc)
1321 {
1322 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1323 	struct stm32_mdma_hwdesc *hwdesc;
1324 	u32 cbndtr, residue, modulo, burst_size;
1325 	int i;
1326 
1327 	residue = 0;
1328 	for (i = curr_hwdesc + 1; i < desc->count; i++) {
1329 		hwdesc = desc->node[i].hwdesc;
1330 		residue += STM32_MDMA_CBNDTR_BNDT(hwdesc->cbndtr);
1331 	}
1332 	cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id));
1333 	residue += cbndtr & STM32_MDMA_CBNDTR_BNDT_MASK;
1334 
1335 	if (!chan->mem_burst)
1336 		return residue;
1337 
1338 	burst_size = chan->mem_burst * chan->mem_width;
1339 	modulo = residue % burst_size;
1340 	if (modulo)
1341 		residue = residue - modulo + burst_size;
1342 
1343 	return residue;
1344 }
1345 
1346 static enum dma_status stm32_mdma_tx_status(struct dma_chan *c,
1347 					    dma_cookie_t cookie,
1348 					    struct dma_tx_state *state)
1349 {
1350 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1351 	struct virt_dma_desc *vdesc;
1352 	enum dma_status status;
1353 	unsigned long flags;
1354 	u32 residue = 0;
1355 
1356 	status = dma_cookie_status(c, cookie, state);
1357 	if ((status == DMA_COMPLETE) || (!state))
1358 		return status;
1359 
1360 	spin_lock_irqsave(&chan->vchan.lock, flags);
1361 
1362 	vdesc = vchan_find_desc(&chan->vchan, cookie);
1363 	if (chan->desc && cookie == chan->desc->vdesc.tx.cookie)
1364 		residue = stm32_mdma_desc_residue(chan, chan->desc,
1365 						  chan->curr_hwdesc);
1366 	else if (vdesc)
1367 		residue = stm32_mdma_desc_residue(chan,
1368 						  to_stm32_mdma_desc(vdesc), 0);
1369 	dma_set_residue(state, residue);
1370 
1371 	spin_unlock_irqrestore(&chan->vchan.lock, flags);
1372 
1373 	return status;
1374 }
1375 
1376 static void stm32_mdma_xfer_end(struct stm32_mdma_chan *chan)
1377 {
1378 	vchan_cookie_complete(&chan->desc->vdesc);
1379 	chan->desc = NULL;
1380 	chan->busy = false;
1381 
1382 	/* Start the next transfer if this driver has a next desc */
1383 	stm32_mdma_start_transfer(chan);
1384 }
1385 
1386 static irqreturn_t stm32_mdma_irq_handler(int irq, void *devid)
1387 {
1388 	struct stm32_mdma_device *dmadev = devid;
1389 	struct stm32_mdma_chan *chan;
1390 	u32 reg, id, ccr, ien, status;
1391 
1392 	/* Find out which channel generates the interrupt */
1393 	status = readl_relaxed(dmadev->base + STM32_MDMA_GISR0);
1394 	if (!status) {
1395 		dev_dbg(mdma2dev(dmadev), "spurious it\n");
1396 		return IRQ_NONE;
1397 	}
1398 	id = __ffs(status);
1399 	chan = &dmadev->chan[id];
1400 
1401 	/* Handle interrupt for the channel */
1402 	spin_lock(&chan->vchan.lock);
1403 	status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(id));
1404 	/* Mask Channel ReQuest Active bit which can be set in case of MEM2MEM */
1405 	status &= ~STM32_MDMA_CISR_CRQA;
1406 	ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(id));
1407 	ien = (ccr & STM32_MDMA_CCR_IRQ_MASK) >> 1;
1408 
1409 	if (!(status & ien)) {
1410 		spin_unlock(&chan->vchan.lock);
1411 		if (chan->busy)
1412 			dev_warn(chan2dev(chan),
1413 				 "spurious it (status=0x%04x, ien=0x%04x)\n", status, ien);
1414 		else
1415 			dev_dbg(chan2dev(chan),
1416 				"spurious it (status=0x%04x, ien=0x%04x)\n", status, ien);
1417 		return IRQ_NONE;
1418 	}
1419 
1420 	reg = STM32_MDMA_CIFCR(id);
1421 
1422 	if (status & STM32_MDMA_CISR_TEIF) {
1423 		dev_err(chan2dev(chan), "Transfer Err: stat=0x%08x\n",
1424 			readl_relaxed(dmadev->base + STM32_MDMA_CESR(id)));
1425 		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CTEIF);
1426 		status &= ~STM32_MDMA_CISR_TEIF;
1427 	}
1428 
1429 	if (status & STM32_MDMA_CISR_CTCIF) {
1430 		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CCTCIF);
1431 		status &= ~STM32_MDMA_CISR_CTCIF;
1432 		stm32_mdma_xfer_end(chan);
1433 	}
1434 
1435 	if (status & STM32_MDMA_CISR_BRTIF) {
1436 		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBRTIF);
1437 		status &= ~STM32_MDMA_CISR_BRTIF;
1438 	}
1439 
1440 	if (status & STM32_MDMA_CISR_BTIF) {
1441 		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBTIF);
1442 		status &= ~STM32_MDMA_CISR_BTIF;
1443 		chan->curr_hwdesc++;
1444 		if (chan->desc && chan->desc->cyclic) {
1445 			if (chan->curr_hwdesc == chan->desc->count)
1446 				chan->curr_hwdesc = 0;
1447 			vchan_cyclic_callback(&chan->desc->vdesc);
1448 		}
1449 	}
1450 
1451 	if (status & STM32_MDMA_CISR_TCIF) {
1452 		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CLTCIF);
1453 		status &= ~STM32_MDMA_CISR_TCIF;
1454 	}
1455 
1456 	if (status) {
1457 		stm32_mdma_set_bits(dmadev, reg, status);
1458 		dev_err(chan2dev(chan), "DMA error: status=0x%08x\n", status);
1459 		if (!(ccr & STM32_MDMA_CCR_EN))
1460 			dev_err(chan2dev(chan), "chan disabled by HW\n");
1461 	}
1462 
1463 	spin_unlock(&chan->vchan.lock);
1464 
1465 	return IRQ_HANDLED;
1466 }
1467 
1468 static int stm32_mdma_alloc_chan_resources(struct dma_chan *c)
1469 {
1470 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1471 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1472 	int ret;
1473 
1474 	chan->desc_pool = dmam_pool_create(dev_name(&c->dev->device),
1475 					   c->device->dev,
1476 					   sizeof(struct stm32_mdma_hwdesc),
1477 					  __alignof__(struct stm32_mdma_hwdesc),
1478 					   0);
1479 	if (!chan->desc_pool) {
1480 		dev_err(chan2dev(chan), "failed to allocate descriptor pool\n");
1481 		return -ENOMEM;
1482 	}
1483 
1484 	ret = pm_runtime_resume_and_get(dmadev->ddev.dev);
1485 	if (ret < 0)
1486 		return ret;
1487 
1488 	ret = stm32_mdma_disable_chan(chan);
1489 	if (ret < 0)
1490 		pm_runtime_put(dmadev->ddev.dev);
1491 
1492 	return ret;
1493 }
1494 
1495 static void stm32_mdma_free_chan_resources(struct dma_chan *c)
1496 {
1497 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1498 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1499 	unsigned long flags;
1500 
1501 	dev_dbg(chan2dev(chan), "Freeing channel %d\n", chan->id);
1502 
1503 	if (chan->busy) {
1504 		spin_lock_irqsave(&chan->vchan.lock, flags);
1505 		stm32_mdma_stop(chan);
1506 		chan->desc = NULL;
1507 		spin_unlock_irqrestore(&chan->vchan.lock, flags);
1508 	}
1509 
1510 	pm_runtime_put(dmadev->ddev.dev);
1511 	vchan_free_chan_resources(to_virt_chan(c));
1512 	dmam_pool_destroy(chan->desc_pool);
1513 	chan->desc_pool = NULL;
1514 }
1515 
1516 static bool stm32_mdma_filter_fn(struct dma_chan *c, void *fn_param)
1517 {
1518 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1519 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1520 
1521 	/* Check if chan is marked Secure */
1522 	if (dmadev->chan_reserved & BIT(chan->id))
1523 		return false;
1524 
1525 	return true;
1526 }
1527 
1528 static struct dma_chan *stm32_mdma_of_xlate(struct of_phandle_args *dma_spec,
1529 					    struct of_dma *ofdma)
1530 {
1531 	struct stm32_mdma_device *dmadev = ofdma->of_dma_data;
1532 	dma_cap_mask_t mask = dmadev->ddev.cap_mask;
1533 	struct stm32_mdma_chan *chan;
1534 	struct dma_chan *c;
1535 	struct stm32_mdma_chan_config config;
1536 
1537 	if (dma_spec->args_count < 5) {
1538 		dev_err(mdma2dev(dmadev), "Bad number of args\n");
1539 		return NULL;
1540 	}
1541 
1542 	config.request = dma_spec->args[0];
1543 	config.priority_level = dma_spec->args[1];
1544 	config.transfer_config = dma_spec->args[2];
1545 	config.mask_addr = dma_spec->args[3];
1546 	config.mask_data = dma_spec->args[4];
1547 
1548 	if (config.request >= dmadev->nr_requests) {
1549 		dev_err(mdma2dev(dmadev), "Bad request line\n");
1550 		return NULL;
1551 	}
1552 
1553 	if (config.priority_level > STM32_MDMA_VERY_HIGH_PRIORITY) {
1554 		dev_err(mdma2dev(dmadev), "Priority level not supported\n");
1555 		return NULL;
1556 	}
1557 
1558 	c = __dma_request_channel(&mask, stm32_mdma_filter_fn, &config, ofdma->of_node);
1559 	if (!c) {
1560 		dev_err(mdma2dev(dmadev), "No more channels available\n");
1561 		return NULL;
1562 	}
1563 
1564 	chan = to_stm32_mdma_chan(c);
1565 	chan->chan_config = config;
1566 
1567 	return c;
1568 }
1569 
1570 static const struct of_device_id stm32_mdma_of_match[] = {
1571 	{ .compatible = "st,stm32h7-mdma", },
1572 	{ /* sentinel */ },
1573 };
1574 MODULE_DEVICE_TABLE(of, stm32_mdma_of_match);
1575 
1576 static int stm32_mdma_probe(struct platform_device *pdev)
1577 {
1578 	struct stm32_mdma_chan *chan;
1579 	struct stm32_mdma_device *dmadev;
1580 	struct dma_device *dd;
1581 	struct device_node *of_node;
1582 	struct resource *res;
1583 	struct reset_control *rst;
1584 	u32 nr_channels, nr_requests;
1585 	int i, count, ret;
1586 
1587 	of_node = pdev->dev.of_node;
1588 	if (!of_node)
1589 		return -ENODEV;
1590 
1591 	ret = device_property_read_u32(&pdev->dev, "dma-channels",
1592 				       &nr_channels);
1593 	if (ret) {
1594 		nr_channels = STM32_MDMA_MAX_CHANNELS;
1595 		dev_warn(&pdev->dev, "MDMA defaulting on %i channels\n",
1596 			 nr_channels);
1597 	}
1598 
1599 	ret = device_property_read_u32(&pdev->dev, "dma-requests",
1600 				       &nr_requests);
1601 	if (ret) {
1602 		nr_requests = STM32_MDMA_MAX_REQUESTS;
1603 		dev_warn(&pdev->dev, "MDMA defaulting on %i request lines\n",
1604 			 nr_requests);
1605 	}
1606 
1607 	count = device_property_count_u32(&pdev->dev, "st,ahb-addr-masks");
1608 	if (count < 0)
1609 		count = 0;
1610 
1611 	dmadev = devm_kzalloc(&pdev->dev,
1612 			      struct_size(dmadev, ahb_addr_masks, count),
1613 			      GFP_KERNEL);
1614 	if (!dmadev)
1615 		return -ENOMEM;
1616 
1617 	dmadev->nr_channels = nr_channels;
1618 	dmadev->nr_requests = nr_requests;
1619 	device_property_read_u32_array(&pdev->dev, "st,ahb-addr-masks",
1620 				       dmadev->ahb_addr_masks,
1621 				       count);
1622 	dmadev->nr_ahb_addr_masks = count;
1623 
1624 	res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1625 	dmadev->base = devm_ioremap_resource(&pdev->dev, res);
1626 	if (IS_ERR(dmadev->base))
1627 		return PTR_ERR(dmadev->base);
1628 
1629 	dmadev->clk = devm_clk_get(&pdev->dev, NULL);
1630 	if (IS_ERR(dmadev->clk))
1631 		return dev_err_probe(&pdev->dev, PTR_ERR(dmadev->clk),
1632 				     "Missing clock controller\n");
1633 
1634 	ret = clk_prepare_enable(dmadev->clk);
1635 	if (ret < 0) {
1636 		dev_err(&pdev->dev, "clk_prep_enable error: %d\n", ret);
1637 		return ret;
1638 	}
1639 
1640 	rst = devm_reset_control_get(&pdev->dev, NULL);
1641 	if (IS_ERR(rst)) {
1642 		ret = PTR_ERR(rst);
1643 		if (ret == -EPROBE_DEFER)
1644 			goto err_clk;
1645 	} else {
1646 		reset_control_assert(rst);
1647 		udelay(2);
1648 		reset_control_deassert(rst);
1649 	}
1650 
1651 	dd = &dmadev->ddev;
1652 	dma_cap_set(DMA_SLAVE, dd->cap_mask);
1653 	dma_cap_set(DMA_PRIVATE, dd->cap_mask);
1654 	dma_cap_set(DMA_CYCLIC, dd->cap_mask);
1655 	dma_cap_set(DMA_MEMCPY, dd->cap_mask);
1656 	dd->device_alloc_chan_resources = stm32_mdma_alloc_chan_resources;
1657 	dd->device_free_chan_resources = stm32_mdma_free_chan_resources;
1658 	dd->device_tx_status = stm32_mdma_tx_status;
1659 	dd->device_issue_pending = stm32_mdma_issue_pending;
1660 	dd->device_prep_slave_sg = stm32_mdma_prep_slave_sg;
1661 	dd->device_prep_dma_cyclic = stm32_mdma_prep_dma_cyclic;
1662 	dd->device_prep_dma_memcpy = stm32_mdma_prep_dma_memcpy;
1663 	dd->device_config = stm32_mdma_slave_config;
1664 	dd->device_pause = stm32_mdma_pause;
1665 	dd->device_resume = stm32_mdma_resume;
1666 	dd->device_terminate_all = stm32_mdma_terminate_all;
1667 	dd->device_synchronize = stm32_mdma_synchronize;
1668 	dd->descriptor_reuse = true;
1669 
1670 	dd->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
1671 		BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
1672 		BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) |
1673 		BIT(DMA_SLAVE_BUSWIDTH_8_BYTES);
1674 	dd->dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
1675 		BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
1676 		BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) |
1677 		BIT(DMA_SLAVE_BUSWIDTH_8_BYTES);
1678 	dd->directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV) |
1679 		BIT(DMA_MEM_TO_MEM);
1680 	dd->residue_granularity = DMA_RESIDUE_GRANULARITY_BURST;
1681 	dd->max_burst = STM32_MDMA_MAX_BURST;
1682 	dd->dev = &pdev->dev;
1683 	INIT_LIST_HEAD(&dd->channels);
1684 
1685 	for (i = 0; i < dmadev->nr_channels; i++) {
1686 		chan = &dmadev->chan[i];
1687 		chan->id = i;
1688 
1689 		if (stm32_mdma_read(dmadev, STM32_MDMA_CCR(i)) & STM32_MDMA_CCR_SM)
1690 			dmadev->chan_reserved |= BIT(i);
1691 
1692 		chan->vchan.desc_free = stm32_mdma_desc_free;
1693 		vchan_init(&chan->vchan, dd);
1694 	}
1695 
1696 	dmadev->irq = platform_get_irq(pdev, 0);
1697 	if (dmadev->irq < 0) {
1698 		ret = dmadev->irq;
1699 		goto err_clk;
1700 	}
1701 
1702 	ret = devm_request_irq(&pdev->dev, dmadev->irq, stm32_mdma_irq_handler,
1703 			       0, dev_name(&pdev->dev), dmadev);
1704 	if (ret) {
1705 		dev_err(&pdev->dev, "failed to request IRQ\n");
1706 		goto err_clk;
1707 	}
1708 
1709 	ret = dmaenginem_async_device_register(dd);
1710 	if (ret)
1711 		goto err_clk;
1712 
1713 	ret = of_dma_controller_register(of_node, stm32_mdma_of_xlate, dmadev);
1714 	if (ret < 0) {
1715 		dev_err(&pdev->dev,
1716 			"STM32 MDMA DMA OF registration failed %d\n", ret);
1717 		goto err_clk;
1718 	}
1719 
1720 	platform_set_drvdata(pdev, dmadev);
1721 	pm_runtime_set_active(&pdev->dev);
1722 	pm_runtime_enable(&pdev->dev);
1723 	pm_runtime_get_noresume(&pdev->dev);
1724 	pm_runtime_put(&pdev->dev);
1725 
1726 	dev_info(&pdev->dev, "STM32 MDMA driver registered\n");
1727 
1728 	return 0;
1729 
1730 err_clk:
1731 	clk_disable_unprepare(dmadev->clk);
1732 
1733 	return ret;
1734 }
1735 
1736 #ifdef CONFIG_PM
1737 static int stm32_mdma_runtime_suspend(struct device *dev)
1738 {
1739 	struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
1740 
1741 	clk_disable_unprepare(dmadev->clk);
1742 
1743 	return 0;
1744 }
1745 
1746 static int stm32_mdma_runtime_resume(struct device *dev)
1747 {
1748 	struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
1749 	int ret;
1750 
1751 	ret = clk_prepare_enable(dmadev->clk);
1752 	if (ret) {
1753 		dev_err(dev, "failed to prepare_enable clock\n");
1754 		return ret;
1755 	}
1756 
1757 	return 0;
1758 }
1759 #endif
1760 
1761 #ifdef CONFIG_PM_SLEEP
1762 static int stm32_mdma_pm_suspend(struct device *dev)
1763 {
1764 	struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
1765 	u32 ccr, id;
1766 	int ret;
1767 
1768 	ret = pm_runtime_resume_and_get(dev);
1769 	if (ret < 0)
1770 		return ret;
1771 
1772 	for (id = 0; id < dmadev->nr_channels; id++) {
1773 		ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(id));
1774 		if (ccr & STM32_MDMA_CCR_EN) {
1775 			dev_warn(dev, "Suspend is prevented by Chan %i\n", id);
1776 			return -EBUSY;
1777 		}
1778 	}
1779 
1780 	pm_runtime_put_sync(dev);
1781 
1782 	pm_runtime_force_suspend(dev);
1783 
1784 	return 0;
1785 }
1786 
1787 static int stm32_mdma_pm_resume(struct device *dev)
1788 {
1789 	return pm_runtime_force_resume(dev);
1790 }
1791 #endif
1792 
1793 static const struct dev_pm_ops stm32_mdma_pm_ops = {
1794 	SET_SYSTEM_SLEEP_PM_OPS(stm32_mdma_pm_suspend, stm32_mdma_pm_resume)
1795 	SET_RUNTIME_PM_OPS(stm32_mdma_runtime_suspend,
1796 			   stm32_mdma_runtime_resume, NULL)
1797 };
1798 
1799 static struct platform_driver stm32_mdma_driver = {
1800 	.probe = stm32_mdma_probe,
1801 	.driver = {
1802 		.name = "stm32-mdma",
1803 		.of_match_table = stm32_mdma_of_match,
1804 		.pm = &stm32_mdma_pm_ops,
1805 	},
1806 };
1807 
1808 static int __init stm32_mdma_init(void)
1809 {
1810 	return platform_driver_register(&stm32_mdma_driver);
1811 }
1812 
1813 subsys_initcall(stm32_mdma_init);
1814 
1815 MODULE_DESCRIPTION("Driver for STM32 MDMA controller");
1816 MODULE_AUTHOR("M'boumba Cedric Madianga <cedric.madianga@gmail.com>");
1817 MODULE_AUTHOR("Pierre-Yves Mordret <pierre-yves.mordret@st.com>");
1818 MODULE_LICENSE("GPL v2");
1819