xref: /openbmc/linux/drivers/dma/dw/regs.h (revision e5c86679)
1 /*
2  * Driver for the Synopsys DesignWare AHB DMA Controller
3  *
4  * Copyright (C) 2005-2007 Atmel Corporation
5  * Copyright (C) 2010-2011 ST Microelectronics
6  * Copyright (C) 2016 Intel Corporation
7  *
8  * This program is free software; you can redistribute it and/or modify
9  * it under the terms of the GNU General Public License version 2 as
10  * published by the Free Software Foundation.
11  */
12 
13 #include <linux/bitops.h>
14 #include <linux/interrupt.h>
15 #include <linux/dmaengine.h>
16 
17 #include <linux/io-64-nonatomic-hi-lo.h>
18 
19 #include "internal.h"
20 
21 #define DW_DMA_MAX_NR_REQUESTS	16
22 
23 /* flow controller */
24 enum dw_dma_fc {
25 	DW_DMA_FC_D_M2M,
26 	DW_DMA_FC_D_M2P,
27 	DW_DMA_FC_D_P2M,
28 	DW_DMA_FC_D_P2P,
29 	DW_DMA_FC_P_P2M,
30 	DW_DMA_FC_SP_P2P,
31 	DW_DMA_FC_P_M2P,
32 	DW_DMA_FC_DP_P2P,
33 };
34 
35 /*
36  * Redefine this macro to handle differences between 32- and 64-bit
37  * addressing, big vs. little endian, etc.
38  */
39 #define DW_REG(name)		u32 name; u32 __pad_##name
40 
41 /* Hardware register definitions. */
42 struct dw_dma_chan_regs {
43 	DW_REG(SAR);		/* Source Address Register */
44 	DW_REG(DAR);		/* Destination Address Register */
45 	DW_REG(LLP);		/* Linked List Pointer */
46 	u32	CTL_LO;		/* Control Register Low */
47 	u32	CTL_HI;		/* Control Register High */
48 	DW_REG(SSTAT);
49 	DW_REG(DSTAT);
50 	DW_REG(SSTATAR);
51 	DW_REG(DSTATAR);
52 	u32	CFG_LO;		/* Configuration Register Low */
53 	u32	CFG_HI;		/* Configuration Register High */
54 	DW_REG(SGR);
55 	DW_REG(DSR);
56 };
57 
58 struct dw_dma_irq_regs {
59 	DW_REG(XFER);
60 	DW_REG(BLOCK);
61 	DW_REG(SRC_TRAN);
62 	DW_REG(DST_TRAN);
63 	DW_REG(ERROR);
64 };
65 
66 struct dw_dma_regs {
67 	/* per-channel registers */
68 	struct dw_dma_chan_regs	CHAN[DW_DMA_MAX_NR_CHANNELS];
69 
70 	/* irq handling */
71 	struct dw_dma_irq_regs	RAW;		/* r */
72 	struct dw_dma_irq_regs	STATUS;		/* r (raw & mask) */
73 	struct dw_dma_irq_regs	MASK;		/* rw (set = irq enabled) */
74 	struct dw_dma_irq_regs	CLEAR;		/* w (ack, affects "raw") */
75 
76 	DW_REG(STATUS_INT);			/* r */
77 
78 	/* software handshaking */
79 	DW_REG(REQ_SRC);
80 	DW_REG(REQ_DST);
81 	DW_REG(SGL_REQ_SRC);
82 	DW_REG(SGL_REQ_DST);
83 	DW_REG(LAST_SRC);
84 	DW_REG(LAST_DST);
85 
86 	/* miscellaneous */
87 	DW_REG(CFG);
88 	DW_REG(CH_EN);
89 	DW_REG(ID);
90 	DW_REG(TEST);
91 
92 	/* iDMA 32-bit support */
93 	DW_REG(CLASS_PRIORITY0);
94 	DW_REG(CLASS_PRIORITY1);
95 
96 	/* optional encoded params, 0x3c8..0x3f7 */
97 	u32	__reserved;
98 
99 	/* per-channel configuration registers */
100 	u32	DWC_PARAMS[DW_DMA_MAX_NR_CHANNELS];
101 	u32	MULTI_BLK_TYPE;
102 	u32	MAX_BLK_SIZE;
103 
104 	/* top-level parameters */
105 	u32	DW_PARAMS;
106 
107 	/* component ID */
108 	u32	COMP_TYPE;
109 	u32	COMP_VERSION;
110 
111 	/* iDMA 32-bit support */
112 	DW_REG(FIFO_PARTITION0);
113 	DW_REG(FIFO_PARTITION1);
114 
115 	DW_REG(SAI_ERR);
116 	DW_REG(GLOBAL_CFG);
117 };
118 
119 /*
120  * Big endian I/O access when reading and writing to the DMA controller
121  * registers.  This is needed on some platforms, like the Atmel AVR32
122  * architecture.
123  */
124 
125 #ifdef CONFIG_DW_DMAC_BIG_ENDIAN_IO
126 #define dma_readl_native ioread32be
127 #define dma_writel_native iowrite32be
128 #else
129 #define dma_readl_native readl
130 #define dma_writel_native writel
131 #endif
132 
133 /* Bitfields in DW_PARAMS */
134 #define DW_PARAMS_NR_CHAN	8		/* number of channels */
135 #define DW_PARAMS_NR_MASTER	11		/* number of AHB masters */
136 #define DW_PARAMS_DATA_WIDTH(n)	(15 + 2 * (n))
137 #define DW_PARAMS_DATA_WIDTH1	15		/* master 1 data width */
138 #define DW_PARAMS_DATA_WIDTH2	17		/* master 2 data width */
139 #define DW_PARAMS_DATA_WIDTH3	19		/* master 3 data width */
140 #define DW_PARAMS_DATA_WIDTH4	21		/* master 4 data width */
141 #define DW_PARAMS_EN		28		/* encoded parameters */
142 
143 /* Bitfields in DWC_PARAMS */
144 #define DWC_PARAMS_MBLK_EN	11		/* multi block transfer */
145 
146 /* bursts size */
147 enum dw_dma_msize {
148 	DW_DMA_MSIZE_1,
149 	DW_DMA_MSIZE_4,
150 	DW_DMA_MSIZE_8,
151 	DW_DMA_MSIZE_16,
152 	DW_DMA_MSIZE_32,
153 	DW_DMA_MSIZE_64,
154 	DW_DMA_MSIZE_128,
155 	DW_DMA_MSIZE_256,
156 };
157 
158 /* Bitfields in LLP */
159 #define DWC_LLP_LMS(x)		((x) & 3)	/* list master select */
160 #define DWC_LLP_LOC(x)		((x) & ~3)	/* next lli */
161 
162 /* Bitfields in CTL_LO */
163 #define DWC_CTLL_INT_EN		(1 << 0)	/* irqs enabled? */
164 #define DWC_CTLL_DST_WIDTH(n)	((n)<<1)	/* bytes per element */
165 #define DWC_CTLL_SRC_WIDTH(n)	((n)<<4)
166 #define DWC_CTLL_DST_INC	(0<<7)		/* DAR update/not */
167 #define DWC_CTLL_DST_DEC	(1<<7)
168 #define DWC_CTLL_DST_FIX	(2<<7)
169 #define DWC_CTLL_SRC_INC	(0<<9)		/* SAR update/not */
170 #define DWC_CTLL_SRC_DEC	(1<<9)
171 #define DWC_CTLL_SRC_FIX	(2<<9)
172 #define DWC_CTLL_DST_MSIZE(n)	((n)<<11)	/* burst, #elements */
173 #define DWC_CTLL_SRC_MSIZE(n)	((n)<<14)
174 #define DWC_CTLL_S_GATH_EN	(1 << 17)	/* src gather, !FIX */
175 #define DWC_CTLL_D_SCAT_EN	(1 << 18)	/* dst scatter, !FIX */
176 #define DWC_CTLL_FC(n)		((n) << 20)
177 #define DWC_CTLL_FC_M2M		(0 << 20)	/* mem-to-mem */
178 #define DWC_CTLL_FC_M2P		(1 << 20)	/* mem-to-periph */
179 #define DWC_CTLL_FC_P2M		(2 << 20)	/* periph-to-mem */
180 #define DWC_CTLL_FC_P2P		(3 << 20)	/* periph-to-periph */
181 /* plus 4 transfer types for peripheral-as-flow-controller */
182 #define DWC_CTLL_DMS(n)		((n)<<23)	/* dst master select */
183 #define DWC_CTLL_SMS(n)		((n)<<25)	/* src master select */
184 #define DWC_CTLL_LLP_D_EN	(1 << 27)	/* dest block chain */
185 #define DWC_CTLL_LLP_S_EN	(1 << 28)	/* src block chain */
186 
187 /* Bitfields in CTL_HI */
188 #define DWC_CTLH_BLOCK_TS_MASK	GENMASK(11, 0)
189 #define DWC_CTLH_BLOCK_TS(x)	((x) & DWC_CTLH_BLOCK_TS_MASK)
190 #define DWC_CTLH_DONE		(1 << 12)
191 
192 /* Bitfields in CFG_LO */
193 #define DWC_CFGL_CH_PRIOR_MASK	(0x7 << 5)	/* priority mask */
194 #define DWC_CFGL_CH_PRIOR(x)	((x) << 5)	/* priority */
195 #define DWC_CFGL_CH_SUSP	(1 << 8)	/* pause xfer */
196 #define DWC_CFGL_FIFO_EMPTY	(1 << 9)	/* pause xfer */
197 #define DWC_CFGL_HS_DST		(1 << 10)	/* handshake w/dst */
198 #define DWC_CFGL_HS_SRC		(1 << 11)	/* handshake w/src */
199 #define DWC_CFGL_LOCK_CH_XFER	(0 << 12)	/* scope of LOCK_CH */
200 #define DWC_CFGL_LOCK_CH_BLOCK	(1 << 12)
201 #define DWC_CFGL_LOCK_CH_XACT	(2 << 12)
202 #define DWC_CFGL_LOCK_BUS_XFER	(0 << 14)	/* scope of LOCK_BUS */
203 #define DWC_CFGL_LOCK_BUS_BLOCK	(1 << 14)
204 #define DWC_CFGL_LOCK_BUS_XACT	(2 << 14)
205 #define DWC_CFGL_LOCK_CH	(1 << 15)	/* channel lockout */
206 #define DWC_CFGL_LOCK_BUS	(1 << 16)	/* busmaster lockout */
207 #define DWC_CFGL_HS_DST_POL	(1 << 18)	/* dst handshake active low */
208 #define DWC_CFGL_HS_SRC_POL	(1 << 19)	/* src handshake active low */
209 #define DWC_CFGL_MAX_BURST(x)	((x) << 20)
210 #define DWC_CFGL_RELOAD_SAR	(1 << 30)
211 #define DWC_CFGL_RELOAD_DAR	(1 << 31)
212 
213 /* Bitfields in CFG_HI */
214 #define DWC_CFGH_FCMODE		(1 << 0)
215 #define DWC_CFGH_FIFO_MODE	(1 << 1)
216 #define DWC_CFGH_PROTCTL(x)	((x) << 2)
217 #define DWC_CFGH_DS_UPD_EN	(1 << 5)
218 #define DWC_CFGH_SS_UPD_EN	(1 << 6)
219 #define DWC_CFGH_SRC_PER(x)	((x) << 7)
220 #define DWC_CFGH_DST_PER(x)	((x) << 11)
221 
222 /* Bitfields in SGR */
223 #define DWC_SGR_SGI(x)		((x) << 0)
224 #define DWC_SGR_SGC(x)		((x) << 20)
225 
226 /* Bitfields in DSR */
227 #define DWC_DSR_DSI(x)		((x) << 0)
228 #define DWC_DSR_DSC(x)		((x) << 20)
229 
230 /* Bitfields in CFG */
231 #define DW_CFG_DMA_EN		(1 << 0)
232 
233 /* iDMA 32-bit support */
234 
235 /* Bitfields in CTL_HI */
236 #define IDMA32C_CTLH_BLOCK_TS_MASK	GENMASK(16, 0)
237 #define IDMA32C_CTLH_BLOCK_TS(x)	((x) & IDMA32C_CTLH_BLOCK_TS_MASK)
238 #define IDMA32C_CTLH_DONE		(1 << 17)
239 
240 /* Bitfields in CFG_LO */
241 #define IDMA32C_CFGL_DST_BURST_ALIGN	(1 << 0)	/* dst burst align */
242 #define IDMA32C_CFGL_SRC_BURST_ALIGN	(1 << 1)	/* src burst align */
243 #define IDMA32C_CFGL_CH_DRAIN		(1 << 10)	/* drain FIFO */
244 #define IDMA32C_CFGL_DST_OPT_BL		(1 << 20)	/* optimize dst burst length */
245 #define IDMA32C_CFGL_SRC_OPT_BL		(1 << 21)	/* optimize src burst length */
246 
247 /* Bitfields in CFG_HI */
248 #define IDMA32C_CFGH_SRC_PER(x)		((x) << 0)
249 #define IDMA32C_CFGH_DST_PER(x)		((x) << 4)
250 #define IDMA32C_CFGH_RD_ISSUE_THD(x)	((x) << 8)
251 #define IDMA32C_CFGH_RW_ISSUE_THD(x)	((x) << 18)
252 #define IDMA32C_CFGH_SRC_PER_EXT(x)	((x) << 28)	/* src peripheral extension */
253 #define IDMA32C_CFGH_DST_PER_EXT(x)	((x) << 30)	/* dst peripheral extension */
254 
255 /* Bitfields in FIFO_PARTITION */
256 #define IDMA32C_FP_PSIZE_CH0(x)		((x) << 0)
257 #define IDMA32C_FP_PSIZE_CH1(x)		((x) << 13)
258 #define IDMA32C_FP_UPDATE		(1 << 26)
259 
260 enum dw_dmac_flags {
261 	DW_DMA_IS_CYCLIC = 0,
262 	DW_DMA_IS_SOFT_LLP = 1,
263 	DW_DMA_IS_PAUSED = 2,
264 	DW_DMA_IS_INITIALIZED = 3,
265 };
266 
267 struct dw_dma_chan {
268 	struct dma_chan			chan;
269 	void __iomem			*ch_regs;
270 	u8				mask;
271 	u8				priority;
272 	enum dma_transfer_direction	direction;
273 
274 	/* software emulation of the LLP transfers */
275 	struct list_head	*tx_node_active;
276 
277 	spinlock_t		lock;
278 
279 	/* these other elements are all protected by lock */
280 	unsigned long		flags;
281 	struct list_head	active_list;
282 	struct list_head	queue;
283 	struct dw_cyclic_desc	*cdesc;
284 
285 	unsigned int		descs_allocated;
286 
287 	/* hardware configuration */
288 	unsigned int		block_size;
289 	bool			nollp;
290 
291 	/* custom slave configuration */
292 	struct dw_dma_slave	dws;
293 
294 	/* configuration passed via .device_config */
295 	struct dma_slave_config dma_sconfig;
296 };
297 
298 static inline struct dw_dma_chan_regs __iomem *
299 __dwc_regs(struct dw_dma_chan *dwc)
300 {
301 	return dwc->ch_regs;
302 }
303 
304 #define channel_readl(dwc, name) \
305 	dma_readl_native(&(__dwc_regs(dwc)->name))
306 #define channel_writel(dwc, name, val) \
307 	dma_writel_native((val), &(__dwc_regs(dwc)->name))
308 
309 static inline struct dw_dma_chan *to_dw_dma_chan(struct dma_chan *chan)
310 {
311 	return container_of(chan, struct dw_dma_chan, chan);
312 }
313 
314 struct dw_dma {
315 	struct dma_device	dma;
316 	char			name[20];
317 	void __iomem		*regs;
318 	struct dma_pool		*desc_pool;
319 	struct tasklet_struct	tasklet;
320 
321 	/* channels */
322 	struct dw_dma_chan	*chan;
323 	u8			all_chan_mask;
324 	u8			in_use;
325 
326 	/* platform data */
327 	struct dw_dma_platform_data	*pdata;
328 };
329 
330 static inline struct dw_dma_regs __iomem *__dw_regs(struct dw_dma *dw)
331 {
332 	return dw->regs;
333 }
334 
335 #define dma_readl(dw, name) \
336 	dma_readl_native(&(__dw_regs(dw)->name))
337 #define dma_writel(dw, name, val) \
338 	dma_writel_native((val), &(__dw_regs(dw)->name))
339 
340 #define idma32_readq(dw, name)				\
341 	hi_lo_readq(&(__dw_regs(dw)->name))
342 #define idma32_writeq(dw, name, val)			\
343 	hi_lo_writeq((val), &(__dw_regs(dw)->name))
344 
345 #define channel_set_bit(dw, reg, mask) \
346 	dma_writel(dw, reg, ((mask) << 8) | (mask))
347 #define channel_clear_bit(dw, reg, mask) \
348 	dma_writel(dw, reg, ((mask) << 8) | 0)
349 
350 static inline struct dw_dma *to_dw_dma(struct dma_device *ddev)
351 {
352 	return container_of(ddev, struct dw_dma, dma);
353 }
354 
355 #ifdef CONFIG_DW_DMAC_BIG_ENDIAN_IO
356 typedef __be32 __dw32;
357 #else
358 typedef __le32 __dw32;
359 #endif
360 
361 /* LLI == Linked List Item; a.k.a. DMA block descriptor */
362 struct dw_lli {
363 	/* values that are not changed by hardware */
364 	__dw32		sar;
365 	__dw32		dar;
366 	__dw32		llp;		/* chain to next lli */
367 	__dw32		ctllo;
368 	/* values that may get written back: */
369 	__dw32		ctlhi;
370 	/* sstat and dstat can snapshot peripheral register state.
371 	 * silicon config may discard either or both...
372 	 */
373 	__dw32		sstat;
374 	__dw32		dstat;
375 };
376 
377 struct dw_desc {
378 	/* FIRST values the hardware uses */
379 	struct dw_lli			lli;
380 
381 #ifdef CONFIG_DW_DMAC_BIG_ENDIAN_IO
382 #define lli_set(d, reg, v)		((d)->lli.reg |= cpu_to_be32(v))
383 #define lli_clear(d, reg, v)		((d)->lli.reg &= ~cpu_to_be32(v))
384 #define lli_read(d, reg)		be32_to_cpu((d)->lli.reg)
385 #define lli_write(d, reg, v)		((d)->lli.reg = cpu_to_be32(v))
386 #else
387 #define lli_set(d, reg, v)		((d)->lli.reg |= cpu_to_le32(v))
388 #define lli_clear(d, reg, v)		((d)->lli.reg &= ~cpu_to_le32(v))
389 #define lli_read(d, reg)		le32_to_cpu((d)->lli.reg)
390 #define lli_write(d, reg, v)		((d)->lli.reg = cpu_to_le32(v))
391 #endif
392 
393 	/* THEN values for driver housekeeping */
394 	struct list_head		desc_node;
395 	struct list_head		tx_list;
396 	struct dma_async_tx_descriptor	txd;
397 	size_t				len;
398 	size_t				total_len;
399 	u32				residue;
400 };
401 
402 #define to_dw_desc(h)	list_entry(h, struct dw_desc, desc_node)
403 
404 static inline struct dw_desc *
405 txd_to_dw_desc(struct dma_async_tx_descriptor *txd)
406 {
407 	return container_of(txd, struct dw_desc, txd);
408 }
409