xref: /openbmc/linux/drivers/dma/dw/regs.h (revision d0e22329)
1 /*
2  * Driver for the Synopsys DesignWare AHB DMA Controller
3  *
4  * Copyright (C) 2005-2007 Atmel Corporation
5  * Copyright (C) 2010-2011 ST Microelectronics
6  * Copyright (C) 2016 Intel Corporation
7  *
8  * This program is free software; you can redistribute it and/or modify
9  * it under the terms of the GNU General Public License version 2 as
10  * published by the Free Software Foundation.
11  */
12 
13 #include <linux/bitops.h>
14 #include <linux/interrupt.h>
15 #include <linux/dmaengine.h>
16 
17 #include <linux/io-64-nonatomic-hi-lo.h>
18 
19 #include "internal.h"
20 
21 #define DW_DMA_MAX_NR_REQUESTS	16
22 
23 /* flow controller */
24 enum dw_dma_fc {
25 	DW_DMA_FC_D_M2M,
26 	DW_DMA_FC_D_M2P,
27 	DW_DMA_FC_D_P2M,
28 	DW_DMA_FC_D_P2P,
29 	DW_DMA_FC_P_P2M,
30 	DW_DMA_FC_SP_P2P,
31 	DW_DMA_FC_P_M2P,
32 	DW_DMA_FC_DP_P2P,
33 };
34 
35 /*
36  * Redefine this macro to handle differences between 32- and 64-bit
37  * addressing, big vs. little endian, etc.
38  */
39 #define DW_REG(name)		u32 name; u32 __pad_##name
40 
41 /* Hardware register definitions. */
42 struct dw_dma_chan_regs {
43 	DW_REG(SAR);		/* Source Address Register */
44 	DW_REG(DAR);		/* Destination Address Register */
45 	DW_REG(LLP);		/* Linked List Pointer */
46 	u32	CTL_LO;		/* Control Register Low */
47 	u32	CTL_HI;		/* Control Register High */
48 	DW_REG(SSTAT);
49 	DW_REG(DSTAT);
50 	DW_REG(SSTATAR);
51 	DW_REG(DSTATAR);
52 	u32	CFG_LO;		/* Configuration Register Low */
53 	u32	CFG_HI;		/* Configuration Register High */
54 	DW_REG(SGR);
55 	DW_REG(DSR);
56 };
57 
58 struct dw_dma_irq_regs {
59 	DW_REG(XFER);
60 	DW_REG(BLOCK);
61 	DW_REG(SRC_TRAN);
62 	DW_REG(DST_TRAN);
63 	DW_REG(ERROR);
64 };
65 
66 struct dw_dma_regs {
67 	/* per-channel registers */
68 	struct dw_dma_chan_regs	CHAN[DW_DMA_MAX_NR_CHANNELS];
69 
70 	/* irq handling */
71 	struct dw_dma_irq_regs	RAW;		/* r */
72 	struct dw_dma_irq_regs	STATUS;		/* r (raw & mask) */
73 	struct dw_dma_irq_regs	MASK;		/* rw (set = irq enabled) */
74 	struct dw_dma_irq_regs	CLEAR;		/* w (ack, affects "raw") */
75 
76 	DW_REG(STATUS_INT);			/* r */
77 
78 	/* software handshaking */
79 	DW_REG(REQ_SRC);
80 	DW_REG(REQ_DST);
81 	DW_REG(SGL_REQ_SRC);
82 	DW_REG(SGL_REQ_DST);
83 	DW_REG(LAST_SRC);
84 	DW_REG(LAST_DST);
85 
86 	/* miscellaneous */
87 	DW_REG(CFG);
88 	DW_REG(CH_EN);
89 	DW_REG(ID);
90 	DW_REG(TEST);
91 
92 	/* iDMA 32-bit support */
93 	DW_REG(CLASS_PRIORITY0);
94 	DW_REG(CLASS_PRIORITY1);
95 
96 	/* optional encoded params, 0x3c8..0x3f7 */
97 	u32	__reserved;
98 
99 	/* per-channel configuration registers */
100 	u32	DWC_PARAMS[DW_DMA_MAX_NR_CHANNELS];
101 	u32	MULTI_BLK_TYPE;
102 	u32	MAX_BLK_SIZE;
103 
104 	/* top-level parameters */
105 	u32	DW_PARAMS;
106 
107 	/* component ID */
108 	u32	COMP_TYPE;
109 	u32	COMP_VERSION;
110 
111 	/* iDMA 32-bit support */
112 	DW_REG(FIFO_PARTITION0);
113 	DW_REG(FIFO_PARTITION1);
114 
115 	DW_REG(SAI_ERR);
116 	DW_REG(GLOBAL_CFG);
117 };
118 
119 /* Bitfields in DW_PARAMS */
120 #define DW_PARAMS_NR_CHAN	8		/* number of channels */
121 #define DW_PARAMS_NR_MASTER	11		/* number of AHB masters */
122 #define DW_PARAMS_DATA_WIDTH(n)	(15 + 2 * (n))
123 #define DW_PARAMS_DATA_WIDTH1	15		/* master 1 data width */
124 #define DW_PARAMS_DATA_WIDTH2	17		/* master 2 data width */
125 #define DW_PARAMS_DATA_WIDTH3	19		/* master 3 data width */
126 #define DW_PARAMS_DATA_WIDTH4	21		/* master 4 data width */
127 #define DW_PARAMS_EN		28		/* encoded parameters */
128 
129 /* Bitfields in DWC_PARAMS */
130 #define DWC_PARAMS_MBLK_EN	11		/* multi block transfer */
131 
132 /* bursts size */
133 enum dw_dma_msize {
134 	DW_DMA_MSIZE_1,
135 	DW_DMA_MSIZE_4,
136 	DW_DMA_MSIZE_8,
137 	DW_DMA_MSIZE_16,
138 	DW_DMA_MSIZE_32,
139 	DW_DMA_MSIZE_64,
140 	DW_DMA_MSIZE_128,
141 	DW_DMA_MSIZE_256,
142 };
143 
144 /* Bitfields in LLP */
145 #define DWC_LLP_LMS(x)		((x) & 3)	/* list master select */
146 #define DWC_LLP_LOC(x)		((x) & ~3)	/* next lli */
147 
148 /* Bitfields in CTL_LO */
149 #define DWC_CTLL_INT_EN		(1 << 0)	/* irqs enabled? */
150 #define DWC_CTLL_DST_WIDTH(n)	((n)<<1)	/* bytes per element */
151 #define DWC_CTLL_SRC_WIDTH(n)	((n)<<4)
152 #define DWC_CTLL_DST_INC	(0<<7)		/* DAR update/not */
153 #define DWC_CTLL_DST_DEC	(1<<7)
154 #define DWC_CTLL_DST_FIX	(2<<7)
155 #define DWC_CTLL_SRC_INC	(0<<9)		/* SAR update/not */
156 #define DWC_CTLL_SRC_DEC	(1<<9)
157 #define DWC_CTLL_SRC_FIX	(2<<9)
158 #define DWC_CTLL_DST_MSIZE(n)	((n)<<11)	/* burst, #elements */
159 #define DWC_CTLL_SRC_MSIZE(n)	((n)<<14)
160 #define DWC_CTLL_S_GATH_EN	(1 << 17)	/* src gather, !FIX */
161 #define DWC_CTLL_D_SCAT_EN	(1 << 18)	/* dst scatter, !FIX */
162 #define DWC_CTLL_FC(n)		((n) << 20)
163 #define DWC_CTLL_FC_M2M		(0 << 20)	/* mem-to-mem */
164 #define DWC_CTLL_FC_M2P		(1 << 20)	/* mem-to-periph */
165 #define DWC_CTLL_FC_P2M		(2 << 20)	/* periph-to-mem */
166 #define DWC_CTLL_FC_P2P		(3 << 20)	/* periph-to-periph */
167 /* plus 4 transfer types for peripheral-as-flow-controller */
168 #define DWC_CTLL_DMS(n)		((n)<<23)	/* dst master select */
169 #define DWC_CTLL_SMS(n)		((n)<<25)	/* src master select */
170 #define DWC_CTLL_LLP_D_EN	(1 << 27)	/* dest block chain */
171 #define DWC_CTLL_LLP_S_EN	(1 << 28)	/* src block chain */
172 
173 /* Bitfields in CTL_HI */
174 #define DWC_CTLH_BLOCK_TS_MASK	GENMASK(11, 0)
175 #define DWC_CTLH_BLOCK_TS(x)	((x) & DWC_CTLH_BLOCK_TS_MASK)
176 #define DWC_CTLH_DONE		(1 << 12)
177 
178 /* Bitfields in CFG_LO */
179 #define DWC_CFGL_CH_PRIOR_MASK	(0x7 << 5)	/* priority mask */
180 #define DWC_CFGL_CH_PRIOR(x)	((x) << 5)	/* priority */
181 #define DWC_CFGL_CH_SUSP	(1 << 8)	/* pause xfer */
182 #define DWC_CFGL_FIFO_EMPTY	(1 << 9)	/* pause xfer */
183 #define DWC_CFGL_HS_DST		(1 << 10)	/* handshake w/dst */
184 #define DWC_CFGL_HS_SRC		(1 << 11)	/* handshake w/src */
185 #define DWC_CFGL_LOCK_CH_XFER	(0 << 12)	/* scope of LOCK_CH */
186 #define DWC_CFGL_LOCK_CH_BLOCK	(1 << 12)
187 #define DWC_CFGL_LOCK_CH_XACT	(2 << 12)
188 #define DWC_CFGL_LOCK_BUS_XFER	(0 << 14)	/* scope of LOCK_BUS */
189 #define DWC_CFGL_LOCK_BUS_BLOCK	(1 << 14)
190 #define DWC_CFGL_LOCK_BUS_XACT	(2 << 14)
191 #define DWC_CFGL_LOCK_CH	(1 << 15)	/* channel lockout */
192 #define DWC_CFGL_LOCK_BUS	(1 << 16)	/* busmaster lockout */
193 #define DWC_CFGL_HS_DST_POL	(1 << 18)	/* dst handshake active low */
194 #define DWC_CFGL_HS_SRC_POL	(1 << 19)	/* src handshake active low */
195 #define DWC_CFGL_MAX_BURST(x)	((x) << 20)
196 #define DWC_CFGL_RELOAD_SAR	(1 << 30)
197 #define DWC_CFGL_RELOAD_DAR	(1 << 31)
198 
199 /* Bitfields in CFG_HI */
200 #define DWC_CFGH_FCMODE		(1 << 0)
201 #define DWC_CFGH_FIFO_MODE	(1 << 1)
202 #define DWC_CFGH_PROTCTL(x)	((x) << 2)
203 #define DWC_CFGH_PROTCTL_DATA	(0 << 2)	/* data access - always set */
204 #define DWC_CFGH_PROTCTL_PRIV	(1 << 2)	/* privileged -> AHB HPROT[1] */
205 #define DWC_CFGH_PROTCTL_BUFFER	(2 << 2)	/* bufferable -> AHB HPROT[2] */
206 #define DWC_CFGH_PROTCTL_CACHE	(4 << 2)	/* cacheable  -> AHB HPROT[3] */
207 #define DWC_CFGH_DS_UPD_EN	(1 << 5)
208 #define DWC_CFGH_SS_UPD_EN	(1 << 6)
209 #define DWC_CFGH_SRC_PER(x)	((x) << 7)
210 #define DWC_CFGH_DST_PER(x)	((x) << 11)
211 
212 /* Bitfields in SGR */
213 #define DWC_SGR_SGI(x)		((x) << 0)
214 #define DWC_SGR_SGC(x)		((x) << 20)
215 
216 /* Bitfields in DSR */
217 #define DWC_DSR_DSI(x)		((x) << 0)
218 #define DWC_DSR_DSC(x)		((x) << 20)
219 
220 /* Bitfields in CFG */
221 #define DW_CFG_DMA_EN		(1 << 0)
222 
223 /* iDMA 32-bit support */
224 
225 /* Bitfields in CTL_HI */
226 #define IDMA32C_CTLH_BLOCK_TS_MASK	GENMASK(16, 0)
227 #define IDMA32C_CTLH_BLOCK_TS(x)	((x) & IDMA32C_CTLH_BLOCK_TS_MASK)
228 #define IDMA32C_CTLH_DONE		(1 << 17)
229 
230 /* Bitfields in CFG_LO */
231 #define IDMA32C_CFGL_DST_BURST_ALIGN	(1 << 0)	/* dst burst align */
232 #define IDMA32C_CFGL_SRC_BURST_ALIGN	(1 << 1)	/* src burst align */
233 #define IDMA32C_CFGL_CH_DRAIN		(1 << 10)	/* drain FIFO */
234 #define IDMA32C_CFGL_DST_OPT_BL		(1 << 20)	/* optimize dst burst length */
235 #define IDMA32C_CFGL_SRC_OPT_BL		(1 << 21)	/* optimize src burst length */
236 
237 /* Bitfields in CFG_HI */
238 #define IDMA32C_CFGH_SRC_PER(x)		((x) << 0)
239 #define IDMA32C_CFGH_DST_PER(x)		((x) << 4)
240 #define IDMA32C_CFGH_RD_ISSUE_THD(x)	((x) << 8)
241 #define IDMA32C_CFGH_RW_ISSUE_THD(x)	((x) << 18)
242 #define IDMA32C_CFGH_SRC_PER_EXT(x)	((x) << 28)	/* src peripheral extension */
243 #define IDMA32C_CFGH_DST_PER_EXT(x)	((x) << 30)	/* dst peripheral extension */
244 
245 /* Bitfields in FIFO_PARTITION */
246 #define IDMA32C_FP_PSIZE_CH0(x)		((x) << 0)
247 #define IDMA32C_FP_PSIZE_CH1(x)		((x) << 13)
248 #define IDMA32C_FP_UPDATE		(1 << 26)
249 
250 enum dw_dmac_flags {
251 	DW_DMA_IS_CYCLIC = 0,
252 	DW_DMA_IS_SOFT_LLP = 1,
253 	DW_DMA_IS_PAUSED = 2,
254 	DW_DMA_IS_INITIALIZED = 3,
255 };
256 
257 struct dw_dma_chan {
258 	struct dma_chan			chan;
259 	void __iomem			*ch_regs;
260 	u8				mask;
261 	u8				priority;
262 	enum dma_transfer_direction	direction;
263 
264 	/* software emulation of the LLP transfers */
265 	struct list_head	*tx_node_active;
266 
267 	spinlock_t		lock;
268 
269 	/* these other elements are all protected by lock */
270 	unsigned long		flags;
271 	struct list_head	active_list;
272 	struct list_head	queue;
273 
274 	unsigned int		descs_allocated;
275 
276 	/* hardware configuration */
277 	unsigned int		block_size;
278 	bool			nollp;
279 
280 	/* custom slave configuration */
281 	struct dw_dma_slave	dws;
282 
283 	/* configuration passed via .device_config */
284 	struct dma_slave_config dma_sconfig;
285 };
286 
287 static inline struct dw_dma_chan_regs __iomem *
288 __dwc_regs(struct dw_dma_chan *dwc)
289 {
290 	return dwc->ch_regs;
291 }
292 
293 #define channel_readl(dwc, name) \
294 	readl(&(__dwc_regs(dwc)->name))
295 #define channel_writel(dwc, name, val) \
296 	writel((val), &(__dwc_regs(dwc)->name))
297 
298 static inline struct dw_dma_chan *to_dw_dma_chan(struct dma_chan *chan)
299 {
300 	return container_of(chan, struct dw_dma_chan, chan);
301 }
302 
303 struct dw_dma {
304 	struct dma_device	dma;
305 	char			name[20];
306 	void __iomem		*regs;
307 	struct dma_pool		*desc_pool;
308 	struct tasklet_struct	tasklet;
309 
310 	/* channels */
311 	struct dw_dma_chan	*chan;
312 	u8			all_chan_mask;
313 	u8			in_use;
314 
315 	/* platform data */
316 	struct dw_dma_platform_data	*pdata;
317 };
318 
319 static inline struct dw_dma_regs __iomem *__dw_regs(struct dw_dma *dw)
320 {
321 	return dw->regs;
322 }
323 
324 #define dma_readl(dw, name) \
325 	readl(&(__dw_regs(dw)->name))
326 #define dma_writel(dw, name, val) \
327 	writel((val), &(__dw_regs(dw)->name))
328 
329 #define idma32_readq(dw, name)				\
330 	hi_lo_readq(&(__dw_regs(dw)->name))
331 #define idma32_writeq(dw, name, val)			\
332 	hi_lo_writeq((val), &(__dw_regs(dw)->name))
333 
334 #define channel_set_bit(dw, reg, mask) \
335 	dma_writel(dw, reg, ((mask) << 8) | (mask))
336 #define channel_clear_bit(dw, reg, mask) \
337 	dma_writel(dw, reg, ((mask) << 8) | 0)
338 
339 static inline struct dw_dma *to_dw_dma(struct dma_device *ddev)
340 {
341 	return container_of(ddev, struct dw_dma, dma);
342 }
343 
344 /* LLI == Linked List Item; a.k.a. DMA block descriptor */
345 struct dw_lli {
346 	/* values that are not changed by hardware */
347 	__le32		sar;
348 	__le32		dar;
349 	__le32		llp;		/* chain to next lli */
350 	__le32		ctllo;
351 	/* values that may get written back: */
352 	__le32		ctlhi;
353 	/* sstat and dstat can snapshot peripheral register state.
354 	 * silicon config may discard either or both...
355 	 */
356 	__le32		sstat;
357 	__le32		dstat;
358 };
359 
360 struct dw_desc {
361 	/* FIRST values the hardware uses */
362 	struct dw_lli			lli;
363 
364 #define lli_set(d, reg, v)		((d)->lli.reg |= cpu_to_le32(v))
365 #define lli_clear(d, reg, v)		((d)->lli.reg &= ~cpu_to_le32(v))
366 #define lli_read(d, reg)		le32_to_cpu((d)->lli.reg)
367 #define lli_write(d, reg, v)		((d)->lli.reg = cpu_to_le32(v))
368 
369 	/* THEN values for driver housekeeping */
370 	struct list_head		desc_node;
371 	struct list_head		tx_list;
372 	struct dma_async_tx_descriptor	txd;
373 	size_t				len;
374 	size_t				total_len;
375 	u32				residue;
376 };
377 
378 #define to_dw_desc(h)	list_entry(h, struct dw_desc, desc_node)
379 
380 static inline struct dw_desc *
381 txd_to_dw_desc(struct dma_async_tx_descriptor *txd)
382 {
383 	return container_of(txd, struct dw_desc, txd);
384 }
385