xref: /openbmc/qemu/hw/dma/xlnx_dpdma.c (revision 990d2c18)
1 /*
2  * xlnx_dpdma.c
3  *
4  *  Copyright (C) 2015 : GreenSocs Ltd
5  *      http://www.greensocs.com/ , email: info@greensocs.com
6  *
7  *  Developed by :
8  *  Frederic Konrad   <fred.konrad@greensocs.com>
9  *
10  * This program is free software; you can redistribute it and/or modify
11  * it under the terms of the GNU General Public License as published by
12  * the Free Software Foundation, either version 2 of the License, or
13  * (at your option) any later version.
14  *
15  * This program is distributed in the hope that it will be useful,
16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
18  * GNU General Public License for more details.
19  *
20  * You should have received a copy of the GNU General Public License along
21  * with this program; if not, see <http://www.gnu.org/licenses/>.
22  *
23  */
24 
25 #include "qemu/osdep.h"
26 #include "qemu/cutils.h"
27 #include "qemu/log.h"
28 #include "qemu/module.h"
29 #include "hw/dma/xlnx_dpdma.h"
30 #include "hw/irq.h"
31 #include "migration/vmstate.h"
32 
33 #ifndef DEBUG_DPDMA
34 #define DEBUG_DPDMA 0
35 #endif
36 
37 #define DPRINTF(fmt, ...) do {                                                 \
38     if (DEBUG_DPDMA) {                                                         \
39         qemu_log("xlnx_dpdma: " fmt , ## __VA_ARGS__);                         \
40     }                                                                          \
41 } while (0)
42 
43 /*
44  * Registers offset for DPDMA.
45  */
46 #define DPDMA_ERR_CTRL                        (0x0000)
47 #define DPDMA_ISR                             (0x0004 >> 2)
48 #define DPDMA_IMR                             (0x0008 >> 2)
49 #define DPDMA_IEN                             (0x000C >> 2)
50 #define DPDMA_IDS                             (0x0010 >> 2)
51 #define DPDMA_EISR                            (0x0014 >> 2)
52 #define DPDMA_EIMR                            (0x0018 >> 2)
53 #define DPDMA_EIEN                            (0x001C >> 2)
54 #define DPDMA_EIDS                            (0x0020 >> 2)
55 #define DPDMA_CNTL                            (0x0100 >> 2)
56 
57 #define DPDMA_GBL                             (0x0104 >> 2)
58 #define DPDMA_GBL_TRG_CH(n)                   (1 << n)
59 #define DPDMA_GBL_RTRG_CH(n)                  (1 << 6 << n)
60 
61 #define DPDMA_ALC0_CNTL                       (0x0108 >> 2)
62 #define DPDMA_ALC0_STATUS                     (0x010C >> 2)
63 #define DPDMA_ALC0_MAX                        (0x0110 >> 2)
64 #define DPDMA_ALC0_MIN                        (0x0114 >> 2)
65 #define DPDMA_ALC0_ACC                        (0x0118 >> 2)
66 #define DPDMA_ALC0_ACC_TRAN                   (0x011C >> 2)
67 #define DPDMA_ALC1_CNTL                       (0x0120 >> 2)
68 #define DPDMA_ALC1_STATUS                     (0x0124 >> 2)
69 #define DPDMA_ALC1_MAX                        (0x0128 >> 2)
70 #define DPDMA_ALC1_MIN                        (0x012C >> 2)
71 #define DPDMA_ALC1_ACC                        (0x0130 >> 2)
72 #define DPDMA_ALC1_ACC_TRAN                   (0x0134 >> 2)
73 
74 #define DPDMA_DSCR_STRT_ADDRE_CH(n)           ((0x0200 + n * 0x100) >> 2)
75 #define DPDMA_DSCR_STRT_ADDR_CH(n)            ((0x0204 + n * 0x100) >> 2)
76 #define DPDMA_DSCR_NEXT_ADDRE_CH(n)           ((0x0208 + n * 0x100) >> 2)
77 #define DPDMA_DSCR_NEXT_ADDR_CH(n)            ((0x020C + n * 0x100) >> 2)
78 #define DPDMA_PYLD_CUR_ADDRE_CH(n)            ((0x0210 + n * 0x100) >> 2)
79 #define DPDMA_PYLD_CUR_ADDR_CH(n)             ((0x0214 + n * 0x100) >> 2)
80 
81 #define DPDMA_CNTL_CH(n)                      ((0x0218 + n * 0x100) >> 2)
82 #define DPDMA_CNTL_CH_EN                      (1)
83 #define DPDMA_CNTL_CH_PAUSED                  (1 << 1)
84 
85 #define DPDMA_STATUS_CH(n)                    ((0x021C + n * 0x100) >> 2)
86 #define DPDMA_STATUS_BURST_TYPE               (1 << 4)
87 #define DPDMA_STATUS_MODE                     (1 << 5)
88 #define DPDMA_STATUS_EN_CRC                   (1 << 6)
89 #define DPDMA_STATUS_LAST_DSCR                (1 << 7)
90 #define DPDMA_STATUS_LDSCR_FRAME              (1 << 8)
91 #define DPDMA_STATUS_IGNR_DONE                (1 << 9)
92 #define DPDMA_STATUS_DSCR_DONE                (1 << 10)
93 #define DPDMA_STATUS_EN_DSCR_UP               (1 << 11)
94 #define DPDMA_STATUS_EN_DSCR_INTR             (1 << 12)
95 #define DPDMA_STATUS_PREAMBLE_OFF             (13)
96 
97 #define DPDMA_VDO_CH(n)                       ((0x0220 + n * 0x100) >> 2)
98 #define DPDMA_PYLD_SZ_CH(n)                   ((0x0224 + n * 0x100) >> 2)
99 #define DPDMA_DSCR_ID_CH(n)                   ((0x0228 + n * 0x100) >> 2)
100 
101 /*
102  * Descriptor control field.
103  */
104 #define CONTROL_PREAMBLE_VALUE                0xA5
105 
106 #define DSCR_CTRL_PREAMBLE                    0xFF
107 #define DSCR_CTRL_EN_DSCR_DONE_INTR           (1 << 8)
108 #define DSCR_CTRL_EN_DSCR_UPDATE              (1 << 9)
109 #define DSCR_CTRL_IGNORE_DONE                 (1 << 10)
110 #define DSCR_CTRL_AXI_BURST_TYPE              (1 << 11)
111 #define DSCR_CTRL_AXCACHE                     (0x0F << 12)
112 #define DSCR_CTRL_AXPROT                      (0x2 << 16)
113 #define DSCR_CTRL_DESCRIPTOR_MODE             (1 << 18)
114 #define DSCR_CTRL_LAST_DESCRIPTOR             (1 << 19)
115 #define DSCR_CTRL_ENABLE_CRC                  (1 << 20)
116 #define DSCR_CTRL_LAST_DESCRIPTOR_OF_FRAME    (1 << 21)
117 
118 /*
119  * Descriptor timestamp field.
120  */
121 #define STATUS_DONE                           (1 << 31)
122 
123 #define DPDMA_FRAG_MAX_SZ                     (4096)
124 
125 enum DPDMABurstType {
126     DPDMA_INCR = 0,
127     DPDMA_FIXED = 1
128 };
129 
130 enum DPDMAMode {
131     DPDMA_CONTIGOUS = 0,
132     DPDMA_FRAGMENTED = 1
133 };
134 
135 struct DPDMADescriptor {
136     uint32_t control;
137     uint32_t descriptor_id;
138     /* transfer size in byte. */
139     uint32_t xfer_size;
140     uint32_t line_size_stride;
141     uint32_t timestamp_lsb;
142     uint32_t timestamp_msb;
143     /* contains extension for both descriptor and source. */
144     uint32_t address_extension;
145     uint32_t next_descriptor;
146     uint32_t source_address;
147     uint32_t address_extension_23;
148     uint32_t address_extension_45;
149     uint32_t source_address2;
150     uint32_t source_address3;
151     uint32_t source_address4;
152     uint32_t source_address5;
153     uint32_t crc;
154 };
155 
156 typedef enum DPDMABurstType DPDMABurstType;
157 typedef enum DPDMAMode DPDMAMode;
158 typedef struct DPDMADescriptor DPDMADescriptor;
159 
160 static bool xlnx_dpdma_desc_is_last(DPDMADescriptor *desc)
161 {
162     return ((desc->control & DSCR_CTRL_LAST_DESCRIPTOR) != 0);
163 }
164 
165 static bool xlnx_dpdma_desc_is_last_of_frame(DPDMADescriptor *desc)
166 {
167     return ((desc->control & DSCR_CTRL_LAST_DESCRIPTOR_OF_FRAME) != 0);
168 }
169 
170 static uint64_t xlnx_dpdma_desc_get_source_address(DPDMADescriptor *desc,
171                                                      uint8_t frag)
172 {
173     uint64_t addr = 0;
174     assert(frag < 5);
175 
176     switch (frag) {
177     case 0:
178         addr = (uint64_t)desc->source_address
179             + (extract64(desc->address_extension, 16, 16) << 32);
180         break;
181     case 1:
182         addr = (uint64_t)desc->source_address2
183             + (extract64(desc->address_extension_23, 0, 16) << 32);
184         break;
185     case 2:
186         addr = (uint64_t)desc->source_address3
187             + (extract64(desc->address_extension_23, 16, 16) << 32);
188         break;
189     case 3:
190         addr = (uint64_t)desc->source_address4
191             + (extract64(desc->address_extension_45, 0, 16) << 32);
192         break;
193     case 4:
194         addr = (uint64_t)desc->source_address5
195             + (extract64(desc->address_extension_45, 16, 16) << 32);
196         break;
197     default:
198         addr = 0;
199         break;
200     }
201 
202     return addr;
203 }
204 
205 static uint32_t xlnx_dpdma_desc_get_transfer_size(DPDMADescriptor *desc)
206 {
207     return desc->xfer_size;
208 }
209 
210 static uint32_t xlnx_dpdma_desc_get_line_size(DPDMADescriptor *desc)
211 {
212     return extract32(desc->line_size_stride, 0, 18);
213 }
214 
215 static uint32_t xlnx_dpdma_desc_get_line_stride(DPDMADescriptor *desc)
216 {
217     return extract32(desc->line_size_stride, 18, 14) * 16;
218 }
219 
220 static inline bool xlnx_dpdma_desc_crc_enabled(DPDMADescriptor *desc)
221 {
222     return (desc->control & DSCR_CTRL_ENABLE_CRC) != 0;
223 }
224 
225 static inline bool xlnx_dpdma_desc_check_crc(DPDMADescriptor *desc)
226 {
227     uint32_t *p = (uint32_t *)desc;
228     uint32_t crc = 0;
229     uint8_t i;
230 
231     /*
232      * CRC is calculated on the whole descriptor except the last 32bits word
233      * using 32bits addition.
234      */
235     for (i = 0; i < 15; i++) {
236         crc += p[i];
237     }
238 
239     return crc == desc->crc;
240 }
241 
242 static inline bool xlnx_dpdma_desc_completion_interrupt(DPDMADescriptor *desc)
243 {
244     return (desc->control & DSCR_CTRL_EN_DSCR_DONE_INTR) != 0;
245 }
246 
247 static inline bool xlnx_dpdma_desc_is_valid(DPDMADescriptor *desc)
248 {
249     return (desc->control & DSCR_CTRL_PREAMBLE) == CONTROL_PREAMBLE_VALUE;
250 }
251 
252 static inline bool xlnx_dpdma_desc_is_contiguous(DPDMADescriptor *desc)
253 {
254     return (desc->control & DSCR_CTRL_DESCRIPTOR_MODE) == 0;
255 }
256 
257 static inline bool xlnx_dpdma_desc_update_enabled(DPDMADescriptor *desc)
258 {
259     return (desc->control & DSCR_CTRL_EN_DSCR_UPDATE) != 0;
260 }
261 
262 static inline void xlnx_dpdma_desc_set_done(DPDMADescriptor *desc)
263 {
264     desc->timestamp_msb |= STATUS_DONE;
265 }
266 
267 static inline bool xlnx_dpdma_desc_is_already_done(DPDMADescriptor *desc)
268 {
269     return (desc->timestamp_msb & STATUS_DONE) != 0;
270 }
271 
272 static inline bool xlnx_dpdma_desc_ignore_done_bit(DPDMADescriptor *desc)
273 {
274     return (desc->control & DSCR_CTRL_IGNORE_DONE) != 0;
275 }
276 
277 static const VMStateDescription vmstate_xlnx_dpdma = {
278     .name = TYPE_XLNX_DPDMA,
279     .version_id = 1,
280     .fields = (const VMStateField[]) {
281         VMSTATE_UINT32_ARRAY(registers, XlnxDPDMAState,
282                              XLNX_DPDMA_REG_ARRAY_SIZE),
283         VMSTATE_BOOL_ARRAY(operation_finished, XlnxDPDMAState, 6),
284         VMSTATE_END_OF_LIST()
285     }
286 };
287 
288 static void xlnx_dpdma_update_irq(XlnxDPDMAState *s)
289 {
290     bool flags;
291 
292     flags = ((s->registers[DPDMA_ISR] & (~s->registers[DPDMA_IMR]))
293           || (s->registers[DPDMA_EISR] & (~s->registers[DPDMA_EIMR])));
294     qemu_set_irq(s->irq, flags);
295 }
296 
297 static uint64_t xlnx_dpdma_descriptor_start_address(XlnxDPDMAState *s,
298                                                       uint8_t channel)
299 {
300     return (s->registers[DPDMA_DSCR_STRT_ADDRE_CH(channel)] << 16)
301           + s->registers[DPDMA_DSCR_STRT_ADDR_CH(channel)];
302 }
303 
304 static uint64_t xlnx_dpdma_descriptor_next_address(XlnxDPDMAState *s,
305                                                      uint8_t channel)
306 {
307     return ((uint64_t)s->registers[DPDMA_DSCR_NEXT_ADDRE_CH(channel)] << 32)
308            + s->registers[DPDMA_DSCR_NEXT_ADDR_CH(channel)];
309 }
310 
311 static bool xlnx_dpdma_is_channel_enabled(XlnxDPDMAState *s,
312                                             uint8_t channel)
313 {
314     return (s->registers[DPDMA_CNTL_CH(channel)] & DPDMA_CNTL_CH_EN) != 0;
315 }
316 
317 static bool xlnx_dpdma_is_channel_paused(XlnxDPDMAState *s,
318                                            uint8_t channel)
319 {
320     return (s->registers[DPDMA_CNTL_CH(channel)] & DPDMA_CNTL_CH_PAUSED) != 0;
321 }
322 
323 static inline bool xlnx_dpdma_is_channel_retriggered(XlnxDPDMAState *s,
324                                                        uint8_t channel)
325 {
326     /* Clear the retriggered bit after reading it. */
327     bool channel_is_retriggered = s->registers[DPDMA_GBL]
328                                 & DPDMA_GBL_RTRG_CH(channel);
329     s->registers[DPDMA_GBL] &= ~DPDMA_GBL_RTRG_CH(channel);
330     return channel_is_retriggered;
331 }
332 
333 static inline bool xlnx_dpdma_is_channel_triggered(XlnxDPDMAState *s,
334                                                      uint8_t channel)
335 {
336     return s->registers[DPDMA_GBL] & DPDMA_GBL_TRG_CH(channel);
337 }
338 
339 static void xlnx_dpdma_update_desc_info(XlnxDPDMAState *s, uint8_t channel,
340                                           DPDMADescriptor *desc)
341 {
342     s->registers[DPDMA_DSCR_NEXT_ADDRE_CH(channel)] =
343                                 extract32(desc->address_extension, 0, 16);
344     s->registers[DPDMA_DSCR_NEXT_ADDR_CH(channel)] = desc->next_descriptor;
345     s->registers[DPDMA_PYLD_CUR_ADDRE_CH(channel)] =
346                                 extract32(desc->address_extension, 16, 16);
347     s->registers[DPDMA_PYLD_CUR_ADDR_CH(channel)] = desc->source_address;
348     s->registers[DPDMA_VDO_CH(channel)] =
349                                 extract32(desc->line_size_stride, 18, 14)
350                                 + (extract32(desc->line_size_stride, 0, 18)
351                                   << 14);
352     s->registers[DPDMA_PYLD_SZ_CH(channel)] = desc->xfer_size;
353     s->registers[DPDMA_DSCR_ID_CH(channel)] = desc->descriptor_id;
354 
355     /* Compute the status register with the descriptor information. */
356     s->registers[DPDMA_STATUS_CH(channel)] =
357                                 extract32(desc->control, 0, 8) << 13;
358     if ((desc->control & DSCR_CTRL_EN_DSCR_DONE_INTR) != 0) {
359         s->registers[DPDMA_STATUS_CH(channel)] |= DPDMA_STATUS_EN_DSCR_INTR;
360     }
361     if ((desc->control & DSCR_CTRL_EN_DSCR_UPDATE) != 0) {
362         s->registers[DPDMA_STATUS_CH(channel)] |= DPDMA_STATUS_EN_DSCR_UP;
363     }
364     if ((desc->timestamp_msb & STATUS_DONE) != 0) {
365         s->registers[DPDMA_STATUS_CH(channel)] |= DPDMA_STATUS_DSCR_DONE;
366     }
367     if ((desc->control & DSCR_CTRL_IGNORE_DONE) != 0) {
368         s->registers[DPDMA_STATUS_CH(channel)] |= DPDMA_STATUS_IGNR_DONE;
369     }
370     if ((desc->control & DSCR_CTRL_LAST_DESCRIPTOR_OF_FRAME) != 0) {
371         s->registers[DPDMA_STATUS_CH(channel)] |= DPDMA_STATUS_LDSCR_FRAME;
372     }
373     if ((desc->control & DSCR_CTRL_LAST_DESCRIPTOR) != 0) {
374         s->registers[DPDMA_STATUS_CH(channel)] |= DPDMA_STATUS_LAST_DSCR;
375     }
376     if ((desc->control & DSCR_CTRL_ENABLE_CRC) != 0) {
377         s->registers[DPDMA_STATUS_CH(channel)] |= DPDMA_STATUS_EN_CRC;
378     }
379     if ((desc->control & DSCR_CTRL_DESCRIPTOR_MODE) != 0) {
380         s->registers[DPDMA_STATUS_CH(channel)] |= DPDMA_STATUS_MODE;
381     }
382     if ((desc->control & DSCR_CTRL_AXI_BURST_TYPE) != 0) {
383         s->registers[DPDMA_STATUS_CH(channel)] |= DPDMA_STATUS_BURST_TYPE;
384     }
385 }
386 
387 static void xlnx_dpdma_dump_descriptor(DPDMADescriptor *desc)
388 {
389     if (DEBUG_DPDMA) {
390         qemu_log("DUMP DESCRIPTOR:\n");
391         qemu_hexdump(stdout, "", desc, sizeof(DPDMADescriptor));
392     }
393 }
394 
395 static uint64_t xlnx_dpdma_read(void *opaque, hwaddr offset,
396                                 unsigned size)
397 {
398     XlnxDPDMAState *s = XLNX_DPDMA(opaque);
399 
400     DPRINTF("read @%" HWADDR_PRIx "\n", offset);
401     offset = offset >> 2;
402 
403     switch (offset) {
404     /*
405      * Trying to read a write only register.
406      */
407     case DPDMA_GBL:
408         return 0;
409     default:
410         assert(offset <= (0xFFC >> 2));
411         return s->registers[offset];
412     }
413     return 0;
414 }
415 
416 static void xlnx_dpdma_write(void *opaque, hwaddr offset,
417                                uint64_t value, unsigned size)
418 {
419     XlnxDPDMAState *s = XLNX_DPDMA(opaque);
420 
421     DPRINTF("write @%" HWADDR_PRIx " = %" PRIx64 "\n", offset, value);
422     offset = offset >> 2;
423 
424     switch (offset) {
425     case DPDMA_ISR:
426         s->registers[DPDMA_ISR] &= ~value;
427         xlnx_dpdma_update_irq(s);
428         break;
429     case DPDMA_IEN:
430         s->registers[DPDMA_IMR] &= ~value;
431         break;
432     case DPDMA_IDS:
433         s->registers[DPDMA_IMR] |= value;
434         break;
435     case DPDMA_EISR:
436         s->registers[DPDMA_EISR] &= ~value;
437         xlnx_dpdma_update_irq(s);
438         break;
439     case DPDMA_EIEN:
440         s->registers[DPDMA_EIMR] &= ~value;
441         break;
442     case DPDMA_EIDS:
443         s->registers[DPDMA_EIMR] |= value;
444         break;
445     case DPDMA_IMR:
446     case DPDMA_EIMR:
447     case DPDMA_DSCR_NEXT_ADDRE_CH(0):
448     case DPDMA_DSCR_NEXT_ADDRE_CH(1):
449     case DPDMA_DSCR_NEXT_ADDRE_CH(2):
450     case DPDMA_DSCR_NEXT_ADDRE_CH(3):
451     case DPDMA_DSCR_NEXT_ADDRE_CH(4):
452     case DPDMA_DSCR_NEXT_ADDRE_CH(5):
453     case DPDMA_DSCR_NEXT_ADDR_CH(0):
454     case DPDMA_DSCR_NEXT_ADDR_CH(1):
455     case DPDMA_DSCR_NEXT_ADDR_CH(2):
456     case DPDMA_DSCR_NEXT_ADDR_CH(3):
457     case DPDMA_DSCR_NEXT_ADDR_CH(4):
458     case DPDMA_DSCR_NEXT_ADDR_CH(5):
459     case DPDMA_PYLD_CUR_ADDRE_CH(0):
460     case DPDMA_PYLD_CUR_ADDRE_CH(1):
461     case DPDMA_PYLD_CUR_ADDRE_CH(2):
462     case DPDMA_PYLD_CUR_ADDRE_CH(3):
463     case DPDMA_PYLD_CUR_ADDRE_CH(4):
464     case DPDMA_PYLD_CUR_ADDRE_CH(5):
465     case DPDMA_PYLD_CUR_ADDR_CH(0):
466     case DPDMA_PYLD_CUR_ADDR_CH(1):
467     case DPDMA_PYLD_CUR_ADDR_CH(2):
468     case DPDMA_PYLD_CUR_ADDR_CH(3):
469     case DPDMA_PYLD_CUR_ADDR_CH(4):
470     case DPDMA_PYLD_CUR_ADDR_CH(5):
471     case DPDMA_STATUS_CH(0):
472     case DPDMA_STATUS_CH(1):
473     case DPDMA_STATUS_CH(2):
474     case DPDMA_STATUS_CH(3):
475     case DPDMA_STATUS_CH(4):
476     case DPDMA_STATUS_CH(5):
477     case DPDMA_VDO_CH(0):
478     case DPDMA_VDO_CH(1):
479     case DPDMA_VDO_CH(2):
480     case DPDMA_VDO_CH(3):
481     case DPDMA_VDO_CH(4):
482     case DPDMA_VDO_CH(5):
483     case DPDMA_PYLD_SZ_CH(0):
484     case DPDMA_PYLD_SZ_CH(1):
485     case DPDMA_PYLD_SZ_CH(2):
486     case DPDMA_PYLD_SZ_CH(3):
487     case DPDMA_PYLD_SZ_CH(4):
488     case DPDMA_PYLD_SZ_CH(5):
489     case DPDMA_DSCR_ID_CH(0):
490     case DPDMA_DSCR_ID_CH(1):
491     case DPDMA_DSCR_ID_CH(2):
492     case DPDMA_DSCR_ID_CH(3):
493     case DPDMA_DSCR_ID_CH(4):
494     case DPDMA_DSCR_ID_CH(5):
495         /*
496          * Trying to write to a read only register..
497          */
498         break;
499     case DPDMA_GBL:
500         /*
501          * This is a write only register so it's read as zero in the read
502          * callback.
503          * We store the value anyway so we can know if the channel is
504          * enabled.
505          */
506         s->registers[offset] |= value & 0x00000FFF;
507         break;
508     case DPDMA_DSCR_STRT_ADDRE_CH(0):
509     case DPDMA_DSCR_STRT_ADDRE_CH(1):
510     case DPDMA_DSCR_STRT_ADDRE_CH(2):
511     case DPDMA_DSCR_STRT_ADDRE_CH(3):
512     case DPDMA_DSCR_STRT_ADDRE_CH(4):
513     case DPDMA_DSCR_STRT_ADDRE_CH(5):
514         value &= 0x0000FFFF;
515         s->registers[offset] = value;
516         break;
517     case DPDMA_CNTL_CH(0):
518         s->registers[DPDMA_GBL] &= ~DPDMA_GBL_TRG_CH(0);
519         value &= 0x3FFFFFFF;
520         s->registers[offset] = value;
521         break;
522     case DPDMA_CNTL_CH(1):
523         s->registers[DPDMA_GBL] &= ~DPDMA_GBL_TRG_CH(1);
524         value &= 0x3FFFFFFF;
525         s->registers[offset] = value;
526         break;
527     case DPDMA_CNTL_CH(2):
528         s->registers[DPDMA_GBL] &= ~DPDMA_GBL_TRG_CH(2);
529         value &= 0x3FFFFFFF;
530         s->registers[offset] = value;
531         break;
532     case DPDMA_CNTL_CH(3):
533         s->registers[DPDMA_GBL] &= ~DPDMA_GBL_TRG_CH(3);
534         value &= 0x3FFFFFFF;
535         s->registers[offset] = value;
536         break;
537     case DPDMA_CNTL_CH(4):
538         s->registers[DPDMA_GBL] &= ~DPDMA_GBL_TRG_CH(4);
539         value &= 0x3FFFFFFF;
540         s->registers[offset] = value;
541         break;
542     case DPDMA_CNTL_CH(5):
543         s->registers[DPDMA_GBL] &= ~DPDMA_GBL_TRG_CH(5);
544         value &= 0x3FFFFFFF;
545         s->registers[offset] = value;
546         break;
547     default:
548         assert(offset <= (0xFFC >> 2));
549         s->registers[offset] = value;
550         break;
551     }
552 }
553 
554 static const MemoryRegionOps dma_ops = {
555     .read = xlnx_dpdma_read,
556     .write = xlnx_dpdma_write,
557     .endianness = DEVICE_NATIVE_ENDIAN,
558     .valid = {
559         .min_access_size = 4,
560         .max_access_size = 4,
561     },
562     .impl = {
563         .min_access_size = 4,
564         .max_access_size = 4,
565     },
566 };
567 
568 static void xlnx_dpdma_init(Object *obj)
569 {
570     SysBusDevice *sbd = SYS_BUS_DEVICE(obj);
571     XlnxDPDMAState *s = XLNX_DPDMA(obj);
572 
573     memory_region_init_io(&s->iomem, obj, &dma_ops, s,
574                           TYPE_XLNX_DPDMA, 0x1000);
575     sysbus_init_mmio(sbd, &s->iomem);
576     sysbus_init_irq(sbd, &s->irq);
577 }
578 
579 static void xlnx_dpdma_reset(DeviceState *dev)
580 {
581     XlnxDPDMAState *s = XLNX_DPDMA(dev);
582     size_t i;
583 
584     memset(s->registers, 0, sizeof(s->registers));
585     s->registers[DPDMA_IMR] =  0x07FFFFFF;
586     s->registers[DPDMA_EIMR] = 0xFFFFFFFF;
587     s->registers[DPDMA_ALC0_MIN] = 0x0000FFFF;
588     s->registers[DPDMA_ALC1_MIN] = 0x0000FFFF;
589 
590     for (i = 0; i < 6; i++) {
591         s->data[i] = NULL;
592         s->operation_finished[i] = true;
593     }
594 }
595 
596 static void xlnx_dpdma_class_init(ObjectClass *oc, void *data)
597 {
598     DeviceClass *dc = DEVICE_CLASS(oc);
599 
600     dc->vmsd = &vmstate_xlnx_dpdma;
601     device_class_set_legacy_reset(dc, xlnx_dpdma_reset);
602 }
603 
604 static const TypeInfo xlnx_dpdma_info = {
605     .name          = TYPE_XLNX_DPDMA,
606     .parent        = TYPE_SYS_BUS_DEVICE,
607     .instance_size = sizeof(XlnxDPDMAState),
608     .instance_init = xlnx_dpdma_init,
609     .class_init    = xlnx_dpdma_class_init,
610 };
611 
612 static void xlnx_dpdma_register_types(void)
613 {
614     type_register_static(&xlnx_dpdma_info);
615 }
616 
617 static MemTxResult xlnx_dpdma_read_descriptor(XlnxDPDMAState *s,
618                                               uint64_t desc_addr,
619                                               DPDMADescriptor *desc)
620 {
621     MemTxResult res = dma_memory_read(&address_space_memory, desc_addr,
622                                       desc, sizeof(DPDMADescriptor),
623                                       MEMTXATTRS_UNSPECIFIED);
624     if (res) {
625         return res;
626     }
627 
628     /* Convert from LE into host endianness.  */
629     desc->control = le32_to_cpu(desc->control);
630     desc->descriptor_id = le32_to_cpu(desc->descriptor_id);
631     desc->xfer_size = le32_to_cpu(desc->xfer_size);
632     desc->line_size_stride = le32_to_cpu(desc->line_size_stride);
633     desc->timestamp_lsb = le32_to_cpu(desc->timestamp_lsb);
634     desc->timestamp_msb = le32_to_cpu(desc->timestamp_msb);
635     desc->address_extension = le32_to_cpu(desc->address_extension);
636     desc->next_descriptor = le32_to_cpu(desc->next_descriptor);
637     desc->source_address = le32_to_cpu(desc->source_address);
638     desc->address_extension_23 = le32_to_cpu(desc->address_extension_23);
639     desc->address_extension_45 = le32_to_cpu(desc->address_extension_45);
640     desc->source_address2 = le32_to_cpu(desc->source_address2);
641     desc->source_address3 = le32_to_cpu(desc->source_address3);
642     desc->source_address4 = le32_to_cpu(desc->source_address4);
643     desc->source_address5 = le32_to_cpu(desc->source_address5);
644     desc->crc = le32_to_cpu(desc->crc);
645 
646     return res;
647 }
648 
649 static MemTxResult xlnx_dpdma_write_descriptor(uint64_t desc_addr,
650                                                DPDMADescriptor *desc)
651 {
652     DPDMADescriptor tmp_desc = *desc;
653 
654     /* Convert from host endianness into LE.  */
655     tmp_desc.control = cpu_to_le32(tmp_desc.control);
656     tmp_desc.descriptor_id = cpu_to_le32(tmp_desc.descriptor_id);
657     tmp_desc.xfer_size = cpu_to_le32(tmp_desc.xfer_size);
658     tmp_desc.line_size_stride = cpu_to_le32(tmp_desc.line_size_stride);
659     tmp_desc.timestamp_lsb = cpu_to_le32(tmp_desc.timestamp_lsb);
660     tmp_desc.timestamp_msb = cpu_to_le32(tmp_desc.timestamp_msb);
661     tmp_desc.address_extension = cpu_to_le32(tmp_desc.address_extension);
662     tmp_desc.next_descriptor = cpu_to_le32(tmp_desc.next_descriptor);
663     tmp_desc.source_address = cpu_to_le32(tmp_desc.source_address);
664     tmp_desc.address_extension_23 = cpu_to_le32(tmp_desc.address_extension_23);
665     tmp_desc.address_extension_45 = cpu_to_le32(tmp_desc.address_extension_45);
666     tmp_desc.source_address2 = cpu_to_le32(tmp_desc.source_address2);
667     tmp_desc.source_address3 = cpu_to_le32(tmp_desc.source_address3);
668     tmp_desc.source_address4 = cpu_to_le32(tmp_desc.source_address4);
669     tmp_desc.source_address5 = cpu_to_le32(tmp_desc.source_address5);
670     tmp_desc.crc = cpu_to_le32(tmp_desc.crc);
671 
672     return dma_memory_write(&address_space_memory, desc_addr, &tmp_desc,
673                             sizeof(DPDMADescriptor), MEMTXATTRS_UNSPECIFIED);
674 }
675 
676 size_t xlnx_dpdma_start_operation(XlnxDPDMAState *s, uint8_t channel,
677                                     bool one_desc)
678 {
679     uint64_t desc_addr;
680     uint64_t source_addr[6];
681     DPDMADescriptor desc;
682     bool done = false;
683     size_t ptr = 0;
684 
685     assert(channel <= 5);
686 
687     DPRINTF("start dpdma channel 0x%" PRIX8 "\n", channel);
688 
689     if (!xlnx_dpdma_is_channel_triggered(s, channel)) {
690         DPRINTF("Channel isn't triggered..\n");
691         return 0;
692     }
693 
694     if (!xlnx_dpdma_is_channel_enabled(s, channel)) {
695         DPRINTF("Channel isn't enabled..\n");
696         return 0;
697     }
698 
699     if (xlnx_dpdma_is_channel_paused(s, channel)) {
700         DPRINTF("Channel is paused..\n");
701         return 0;
702     }
703 
704     do {
705         if ((s->operation_finished[channel])
706           || xlnx_dpdma_is_channel_retriggered(s, channel)) {
707             desc_addr = xlnx_dpdma_descriptor_start_address(s, channel);
708             s->operation_finished[channel] = false;
709         } else {
710             desc_addr = xlnx_dpdma_descriptor_next_address(s, channel);
711         }
712 
713         if (xlnx_dpdma_read_descriptor(s, desc_addr, &desc)) {
714             s->registers[DPDMA_EISR] |= ((1 << 1) << channel);
715             xlnx_dpdma_update_irq(s);
716             s->operation_finished[channel] = true;
717             DPRINTF("Can't get the descriptor.\n");
718             break;
719         }
720 
721         xlnx_dpdma_update_desc_info(s, channel, &desc);
722 
723 #ifdef DEBUG_DPDMA
724         xlnx_dpdma_dump_descriptor(&desc);
725 #endif
726 
727         DPRINTF("location of the descriptor: %" PRIx64 "\n", desc_addr);
728         if (!xlnx_dpdma_desc_is_valid(&desc)) {
729             s->registers[DPDMA_EISR] |= ((1 << 7) << channel);
730             xlnx_dpdma_update_irq(s);
731             s->operation_finished[channel] = true;
732             DPRINTF("Invalid descriptor..\n");
733             break;
734         }
735 
736         if (xlnx_dpdma_desc_crc_enabled(&desc)
737             && !xlnx_dpdma_desc_check_crc(&desc)) {
738             s->registers[DPDMA_EISR] |= ((1 << 13) << channel);
739             xlnx_dpdma_update_irq(s);
740             s->operation_finished[channel] = true;
741             DPRINTF("Bad CRC for descriptor..\n");
742             break;
743         }
744 
745         if (xlnx_dpdma_desc_is_already_done(&desc)
746             && !xlnx_dpdma_desc_ignore_done_bit(&desc)) {
747             /* We are trying to process an already processed descriptor. */
748             s->registers[DPDMA_EISR] |= ((1 << 25) << channel);
749             xlnx_dpdma_update_irq(s);
750             s->operation_finished[channel] = true;
751             DPRINTF("Already processed descriptor..\n");
752             break;
753         }
754 
755         done = xlnx_dpdma_desc_is_last(&desc)
756              || xlnx_dpdma_desc_is_last_of_frame(&desc);
757 
758         s->operation_finished[channel] = done;
759         if (s->data[channel]) {
760             int64_t transfer_len = xlnx_dpdma_desc_get_transfer_size(&desc);
761             uint32_t line_size = xlnx_dpdma_desc_get_line_size(&desc);
762             uint32_t line_stride = xlnx_dpdma_desc_get_line_stride(&desc);
763             if (xlnx_dpdma_desc_is_contiguous(&desc)) {
764                 source_addr[0] = xlnx_dpdma_desc_get_source_address(&desc, 0);
765                 while (transfer_len != 0) {
766                     if (dma_memory_read(&address_space_memory,
767                                         source_addr[0],
768                                         &s->data[channel][ptr],
769                                         line_size,
770                                         MEMTXATTRS_UNSPECIFIED)) {
771                         s->registers[DPDMA_ISR] |= ((1 << 12) << channel);
772                         xlnx_dpdma_update_irq(s);
773                         DPRINTF("Can't get data.\n");
774                         break;
775                     }
776                     ptr += line_size;
777                     transfer_len -= line_size;
778                     source_addr[0] += line_stride;
779                 }
780             } else {
781                 DPRINTF("Source address:\n");
782                 int frag;
783                 for (frag = 0; frag < 5; frag++) {
784                     source_addr[frag] =
785                           xlnx_dpdma_desc_get_source_address(&desc, frag);
786                     DPRINTF("Fragment %u: %" PRIx64 "\n", frag + 1,
787                             source_addr[frag]);
788                 }
789 
790                 frag = 0;
791                 while ((transfer_len < 0) && (frag < 5)) {
792                     size_t fragment_len = DPDMA_FRAG_MAX_SZ
793                                     - (source_addr[frag] % DPDMA_FRAG_MAX_SZ);
794 
795                     if (dma_memory_read(&address_space_memory,
796                                         source_addr[frag],
797                                         &(s->data[channel][ptr]),
798                                         fragment_len,
799                                         MEMTXATTRS_UNSPECIFIED)) {
800                         s->registers[DPDMA_ISR] |= ((1 << 12) << channel);
801                         xlnx_dpdma_update_irq(s);
802                         DPRINTF("Can't get data.\n");
803                         break;
804                     }
805                     ptr += fragment_len;
806                     transfer_len -= fragment_len;
807                     frag += 1;
808                 }
809             }
810         }
811 
812         if (xlnx_dpdma_desc_update_enabled(&desc)) {
813             /* The descriptor need to be updated when it's completed. */
814             DPRINTF("update the descriptor with the done flag set.\n");
815             xlnx_dpdma_desc_set_done(&desc);
816             if (xlnx_dpdma_write_descriptor(desc_addr, &desc)) {
817                 DPRINTF("Can't write the descriptor.\n");
818                 /* TODO: check hardware behaviour for memory write failure */
819             }
820         }
821 
822         if (xlnx_dpdma_desc_completion_interrupt(&desc)) {
823             DPRINTF("completion interrupt enabled!\n");
824             s->registers[DPDMA_ISR] |= (1 << channel);
825             xlnx_dpdma_update_irq(s);
826         }
827 
828     } while (!done && !one_desc);
829 
830     return ptr;
831 }
832 
833 void xlnx_dpdma_set_host_data_location(XlnxDPDMAState *s, uint8_t channel,
834                                          void *p)
835 {
836     if (!s) {
837         qemu_log_mask(LOG_UNIMP, "DPDMA client not attached to valid DPDMA"
838                       " instance\n");
839         return;
840     }
841 
842     assert(channel <= 5);
843     s->data[channel] = p;
844 }
845 
846 void xlnx_dpdma_trigger_vsync_irq(XlnxDPDMAState *s)
847 {
848     s->registers[DPDMA_ISR] |= (1 << 27);
849     xlnx_dpdma_update_irq(s);
850 }
851 
852 type_init(xlnx_dpdma_register_types)
853