1 /* 2 * xlnx_dpdma.c 3 * 4 * Copyright (C) 2015 : GreenSocs Ltd 5 * http://www.greensocs.com/ , email: info@greensocs.com 6 * 7 * Developed by : 8 * Frederic Konrad <fred.konrad@greensocs.com> 9 * 10 * This program is free software; you can redistribute it and/or modify 11 * it under the terms of the GNU General Public License as published by 12 * the Free Software Foundation, either version 2 of the License, or 13 * (at your option) any later version. 14 * 15 * This program is distributed in the hope that it will be useful, 16 * but WITHOUT ANY WARRANTY; without even the implied warranty of 17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 18 * GNU General Public License for more details. 19 * 20 * You should have received a copy of the GNU General Public License along 21 * with this program; if not, see <http://www.gnu.org/licenses/>. 22 * 23 */ 24 25 #include "qemu/osdep.h" 26 #include "qemu-common.h" 27 #include "qemu/log.h" 28 #include "qemu/module.h" 29 #include "hw/dma/xlnx_dpdma.h" 30 #include "hw/irq.h" 31 32 #ifndef DEBUG_DPDMA 33 #define DEBUG_DPDMA 0 34 #endif 35 36 #define DPRINTF(fmt, ...) do { \ 37 if (DEBUG_DPDMA) { \ 38 qemu_log("xlnx_dpdma: " fmt , ## __VA_ARGS__); \ 39 } \ 40 } while (0) 41 42 /* 43 * Registers offset for DPDMA. 44 */ 45 #define DPDMA_ERR_CTRL (0x0000) 46 #define DPDMA_ISR (0x0004 >> 2) 47 #define DPDMA_IMR (0x0008 >> 2) 48 #define DPDMA_IEN (0x000C >> 2) 49 #define DPDMA_IDS (0x0010 >> 2) 50 #define DPDMA_EISR (0x0014 >> 2) 51 #define DPDMA_EIMR (0x0018 >> 2) 52 #define DPDMA_EIEN (0x001C >> 2) 53 #define DPDMA_EIDS (0x0020 >> 2) 54 #define DPDMA_CNTL (0x0100 >> 2) 55 56 #define DPDMA_GBL (0x0104 >> 2) 57 #define DPDMA_GBL_TRG_CH(n) (1 << n) 58 #define DPDMA_GBL_RTRG_CH(n) (1 << 6 << n) 59 60 #define DPDMA_ALC0_CNTL (0x0108 >> 2) 61 #define DPDMA_ALC0_STATUS (0x010C >> 2) 62 #define DPDMA_ALC0_MAX (0x0110 >> 2) 63 #define DPDMA_ALC0_MIN (0x0114 >> 2) 64 #define DPDMA_ALC0_ACC (0x0118 >> 2) 65 #define DPDMA_ALC0_ACC_TRAN (0x011C >> 2) 66 #define DPDMA_ALC1_CNTL (0x0120 >> 2) 67 #define DPDMA_ALC1_STATUS (0x0124 >> 2) 68 #define DPDMA_ALC1_MAX (0x0128 >> 2) 69 #define DPDMA_ALC1_MIN (0x012C >> 2) 70 #define DPDMA_ALC1_ACC (0x0130 >> 2) 71 #define DPDMA_ALC1_ACC_TRAN (0x0134 >> 2) 72 73 #define DPDMA_DSCR_STRT_ADDRE_CH(n) ((0x0200 + n * 0x100) >> 2) 74 #define DPDMA_DSCR_STRT_ADDR_CH(n) ((0x0204 + n * 0x100) >> 2) 75 #define DPDMA_DSCR_NEXT_ADDRE_CH(n) ((0x0208 + n * 0x100) >> 2) 76 #define DPDMA_DSCR_NEXT_ADDR_CH(n) ((0x020C + n * 0x100) >> 2) 77 #define DPDMA_PYLD_CUR_ADDRE_CH(n) ((0x0210 + n * 0x100) >> 2) 78 #define DPDMA_PYLD_CUR_ADDR_CH(n) ((0x0214 + n * 0x100) >> 2) 79 80 #define DPDMA_CNTL_CH(n) ((0x0218 + n * 0x100) >> 2) 81 #define DPDMA_CNTL_CH_EN (1) 82 #define DPDMA_CNTL_CH_PAUSED (1 << 1) 83 84 #define DPDMA_STATUS_CH(n) ((0x021C + n * 0x100) >> 2) 85 #define DPDMA_STATUS_BURST_TYPE (1 << 4) 86 #define DPDMA_STATUS_MODE (1 << 5) 87 #define DPDMA_STATUS_EN_CRC (1 << 6) 88 #define DPDMA_STATUS_LAST_DSCR (1 << 7) 89 #define DPDMA_STATUS_LDSCR_FRAME (1 << 8) 90 #define DPDMA_STATUS_IGNR_DONE (1 << 9) 91 #define DPDMA_STATUS_DSCR_DONE (1 << 10) 92 #define DPDMA_STATUS_EN_DSCR_UP (1 << 11) 93 #define DPDMA_STATUS_EN_DSCR_INTR (1 << 12) 94 #define DPDMA_STATUS_PREAMBLE_OFF (13) 95 96 #define DPDMA_VDO_CH(n) ((0x0220 + n * 0x100) >> 2) 97 #define DPDMA_PYLD_SZ_CH(n) ((0x0224 + n * 0x100) >> 2) 98 #define DPDMA_DSCR_ID_CH(n) ((0x0228 + n * 0x100) >> 2) 99 100 /* 101 * Descriptor control field. 102 */ 103 #define CONTROL_PREAMBLE_VALUE 0xA5 104 105 #define DSCR_CTRL_PREAMBLE 0xFF 106 #define DSCR_CTRL_EN_DSCR_DONE_INTR (1 << 8) 107 #define DSCR_CTRL_EN_DSCR_UPDATE (1 << 9) 108 #define DSCR_CTRL_IGNORE_DONE (1 << 10) 109 #define DSCR_CTRL_AXI_BURST_TYPE (1 << 11) 110 #define DSCR_CTRL_AXCACHE (0x0F << 12) 111 #define DSCR_CTRL_AXPROT (0x2 << 16) 112 #define DSCR_CTRL_DESCRIPTOR_MODE (1 << 18) 113 #define DSCR_CTRL_LAST_DESCRIPTOR (1 << 19) 114 #define DSCR_CTRL_ENABLE_CRC (1 << 20) 115 #define DSCR_CTRL_LAST_DESCRIPTOR_OF_FRAME (1 << 21) 116 117 /* 118 * Descriptor timestamp field. 119 */ 120 #define STATUS_DONE (1 << 31) 121 122 #define DPDMA_FRAG_MAX_SZ (4096) 123 124 enum DPDMABurstType { 125 DPDMA_INCR = 0, 126 DPDMA_FIXED = 1 127 }; 128 129 enum DPDMAMode { 130 DPDMA_CONTIGOUS = 0, 131 DPDMA_FRAGMENTED = 1 132 }; 133 134 struct DPDMADescriptor { 135 uint32_t control; 136 uint32_t descriptor_id; 137 /* transfer size in byte. */ 138 uint32_t xfer_size; 139 uint32_t line_size_stride; 140 uint32_t timestamp_lsb; 141 uint32_t timestamp_msb; 142 /* contains extension for both descriptor and source. */ 143 uint32_t address_extension; 144 uint32_t next_descriptor; 145 uint32_t source_address; 146 uint32_t address_extension_23; 147 uint32_t address_extension_45; 148 uint32_t source_address2; 149 uint32_t source_address3; 150 uint32_t source_address4; 151 uint32_t source_address5; 152 uint32_t crc; 153 }; 154 155 typedef enum DPDMABurstType DPDMABurstType; 156 typedef enum DPDMAMode DPDMAMode; 157 typedef struct DPDMADescriptor DPDMADescriptor; 158 159 static bool xlnx_dpdma_desc_is_last(DPDMADescriptor *desc) 160 { 161 return ((desc->control & DSCR_CTRL_LAST_DESCRIPTOR) != 0); 162 } 163 164 static bool xlnx_dpdma_desc_is_last_of_frame(DPDMADescriptor *desc) 165 { 166 return ((desc->control & DSCR_CTRL_LAST_DESCRIPTOR_OF_FRAME) != 0); 167 } 168 169 static uint64_t xlnx_dpdma_desc_get_source_address(DPDMADescriptor *desc, 170 uint8_t frag) 171 { 172 uint64_t addr = 0; 173 assert(frag < 5); 174 175 switch (frag) { 176 case 0: 177 addr = desc->source_address 178 + (extract32(desc->address_extension, 16, 12) << 20); 179 break; 180 case 1: 181 addr = desc->source_address2 182 + (extract32(desc->address_extension_23, 0, 12) << 8); 183 break; 184 case 2: 185 addr = desc->source_address3 186 + (extract32(desc->address_extension_23, 16, 12) << 20); 187 break; 188 case 3: 189 addr = desc->source_address4 190 + (extract32(desc->address_extension_45, 0, 12) << 8); 191 break; 192 case 4: 193 addr = desc->source_address5 194 + (extract32(desc->address_extension_45, 16, 12) << 20); 195 break; 196 default: 197 addr = 0; 198 break; 199 } 200 201 return addr; 202 } 203 204 static uint32_t xlnx_dpdma_desc_get_transfer_size(DPDMADescriptor *desc) 205 { 206 return desc->xfer_size; 207 } 208 209 static uint32_t xlnx_dpdma_desc_get_line_size(DPDMADescriptor *desc) 210 { 211 return extract32(desc->line_size_stride, 0, 18); 212 } 213 214 static uint32_t xlnx_dpdma_desc_get_line_stride(DPDMADescriptor *desc) 215 { 216 return extract32(desc->line_size_stride, 18, 14) * 16; 217 } 218 219 static inline bool xlnx_dpdma_desc_crc_enabled(DPDMADescriptor *desc) 220 { 221 return (desc->control & DSCR_CTRL_ENABLE_CRC) != 0; 222 } 223 224 static inline bool xlnx_dpdma_desc_check_crc(DPDMADescriptor *desc) 225 { 226 uint32_t *p = (uint32_t *)desc; 227 uint32_t crc = 0; 228 uint8_t i; 229 230 /* 231 * CRC is calculated on the whole descriptor except the last 32bits word 232 * using 32bits addition. 233 */ 234 for (i = 0; i < 15; i++) { 235 crc += p[i]; 236 } 237 238 return crc == desc->crc; 239 } 240 241 static inline bool xlnx_dpdma_desc_completion_interrupt(DPDMADescriptor *desc) 242 { 243 return (desc->control & DSCR_CTRL_EN_DSCR_DONE_INTR) != 0; 244 } 245 246 static inline bool xlnx_dpdma_desc_is_valid(DPDMADescriptor *desc) 247 { 248 return (desc->control & DSCR_CTRL_PREAMBLE) == CONTROL_PREAMBLE_VALUE; 249 } 250 251 static inline bool xlnx_dpdma_desc_is_contiguous(DPDMADescriptor *desc) 252 { 253 return (desc->control & DSCR_CTRL_DESCRIPTOR_MODE) == 0; 254 } 255 256 static inline bool xlnx_dpdma_desc_update_enabled(DPDMADescriptor *desc) 257 { 258 return (desc->control & DSCR_CTRL_EN_DSCR_UPDATE) != 0; 259 } 260 261 static inline void xlnx_dpdma_desc_set_done(DPDMADescriptor *desc) 262 { 263 desc->timestamp_msb |= STATUS_DONE; 264 } 265 266 static inline bool xlnx_dpdma_desc_is_already_done(DPDMADescriptor *desc) 267 { 268 return (desc->timestamp_msb & STATUS_DONE) != 0; 269 } 270 271 static inline bool xlnx_dpdma_desc_ignore_done_bit(DPDMADescriptor *desc) 272 { 273 return (desc->control & DSCR_CTRL_IGNORE_DONE) != 0; 274 } 275 276 static const VMStateDescription vmstate_xlnx_dpdma = { 277 .name = TYPE_XLNX_DPDMA, 278 .version_id = 1, 279 .fields = (VMStateField[]) { 280 VMSTATE_UINT32_ARRAY(registers, XlnxDPDMAState, 281 XLNX_DPDMA_REG_ARRAY_SIZE), 282 VMSTATE_BOOL_ARRAY(operation_finished, XlnxDPDMAState, 6), 283 VMSTATE_END_OF_LIST() 284 } 285 }; 286 287 static void xlnx_dpdma_update_irq(XlnxDPDMAState *s) 288 { 289 bool flags; 290 291 flags = ((s->registers[DPDMA_ISR] & (~s->registers[DPDMA_IMR])) 292 || (s->registers[DPDMA_EISR] & (~s->registers[DPDMA_EIMR]))); 293 qemu_set_irq(s->irq, flags); 294 } 295 296 static uint64_t xlnx_dpdma_descriptor_start_address(XlnxDPDMAState *s, 297 uint8_t channel) 298 { 299 return (s->registers[DPDMA_DSCR_STRT_ADDRE_CH(channel)] << 16) 300 + s->registers[DPDMA_DSCR_STRT_ADDR_CH(channel)]; 301 } 302 303 static uint64_t xlnx_dpdma_descriptor_next_address(XlnxDPDMAState *s, 304 uint8_t channel) 305 { 306 return ((uint64_t)s->registers[DPDMA_DSCR_NEXT_ADDRE_CH(channel)] << 32) 307 + s->registers[DPDMA_DSCR_NEXT_ADDR_CH(channel)]; 308 } 309 310 static bool xlnx_dpdma_is_channel_enabled(XlnxDPDMAState *s, 311 uint8_t channel) 312 { 313 return (s->registers[DPDMA_CNTL_CH(channel)] & DPDMA_CNTL_CH_EN) != 0; 314 } 315 316 static bool xlnx_dpdma_is_channel_paused(XlnxDPDMAState *s, 317 uint8_t channel) 318 { 319 return (s->registers[DPDMA_CNTL_CH(channel)] & DPDMA_CNTL_CH_PAUSED) != 0; 320 } 321 322 static inline bool xlnx_dpdma_is_channel_retriggered(XlnxDPDMAState *s, 323 uint8_t channel) 324 { 325 /* Clear the retriggered bit after reading it. */ 326 bool channel_is_retriggered = s->registers[DPDMA_GBL] 327 & DPDMA_GBL_RTRG_CH(channel); 328 s->registers[DPDMA_GBL] &= ~DPDMA_GBL_RTRG_CH(channel); 329 return channel_is_retriggered; 330 } 331 332 static inline bool xlnx_dpdma_is_channel_triggered(XlnxDPDMAState *s, 333 uint8_t channel) 334 { 335 return s->registers[DPDMA_GBL] & DPDMA_GBL_TRG_CH(channel); 336 } 337 338 static void xlnx_dpdma_update_desc_info(XlnxDPDMAState *s, uint8_t channel, 339 DPDMADescriptor *desc) 340 { 341 s->registers[DPDMA_DSCR_NEXT_ADDRE_CH(channel)] = 342 extract32(desc->address_extension, 0, 16); 343 s->registers[DPDMA_DSCR_NEXT_ADDR_CH(channel)] = desc->next_descriptor; 344 s->registers[DPDMA_PYLD_CUR_ADDRE_CH(channel)] = 345 extract32(desc->address_extension, 16, 16); 346 s->registers[DPDMA_PYLD_CUR_ADDR_CH(channel)] = desc->source_address; 347 s->registers[DPDMA_VDO_CH(channel)] = 348 extract32(desc->line_size_stride, 18, 14) 349 + (extract32(desc->line_size_stride, 0, 18) 350 << 14); 351 s->registers[DPDMA_PYLD_SZ_CH(channel)] = desc->xfer_size; 352 s->registers[DPDMA_DSCR_ID_CH(channel)] = desc->descriptor_id; 353 354 /* Compute the status register with the descriptor information. */ 355 s->registers[DPDMA_STATUS_CH(channel)] = 356 extract32(desc->control, 0, 8) << 13; 357 if ((desc->control & DSCR_CTRL_EN_DSCR_DONE_INTR) != 0) { 358 s->registers[DPDMA_STATUS_CH(channel)] |= DPDMA_STATUS_EN_DSCR_INTR; 359 } 360 if ((desc->control & DSCR_CTRL_EN_DSCR_UPDATE) != 0) { 361 s->registers[DPDMA_STATUS_CH(channel)] |= DPDMA_STATUS_EN_DSCR_UP; 362 } 363 if ((desc->timestamp_msb & STATUS_DONE) != 0) { 364 s->registers[DPDMA_STATUS_CH(channel)] |= DPDMA_STATUS_DSCR_DONE; 365 } 366 if ((desc->control & DSCR_CTRL_IGNORE_DONE) != 0) { 367 s->registers[DPDMA_STATUS_CH(channel)] |= DPDMA_STATUS_IGNR_DONE; 368 } 369 if ((desc->control & DSCR_CTRL_LAST_DESCRIPTOR_OF_FRAME) != 0) { 370 s->registers[DPDMA_STATUS_CH(channel)] |= DPDMA_STATUS_LDSCR_FRAME; 371 } 372 if ((desc->control & DSCR_CTRL_LAST_DESCRIPTOR) != 0) { 373 s->registers[DPDMA_STATUS_CH(channel)] |= DPDMA_STATUS_LAST_DSCR; 374 } 375 if ((desc->control & DSCR_CTRL_ENABLE_CRC) != 0) { 376 s->registers[DPDMA_STATUS_CH(channel)] |= DPDMA_STATUS_EN_CRC; 377 } 378 if ((desc->control & DSCR_CTRL_DESCRIPTOR_MODE) != 0) { 379 s->registers[DPDMA_STATUS_CH(channel)] |= DPDMA_STATUS_MODE; 380 } 381 if ((desc->control & DSCR_CTRL_AXI_BURST_TYPE) != 0) { 382 s->registers[DPDMA_STATUS_CH(channel)] |= DPDMA_STATUS_BURST_TYPE; 383 } 384 } 385 386 static void xlnx_dpdma_dump_descriptor(DPDMADescriptor *desc) 387 { 388 if (DEBUG_DPDMA) { 389 qemu_log("DUMP DESCRIPTOR:\n"); 390 qemu_hexdump((char *)desc, stdout, "", sizeof(DPDMADescriptor)); 391 } 392 } 393 394 static uint64_t xlnx_dpdma_read(void *opaque, hwaddr offset, 395 unsigned size) 396 { 397 XlnxDPDMAState *s = XLNX_DPDMA(opaque); 398 399 DPRINTF("read @%" HWADDR_PRIx "\n", offset); 400 offset = offset >> 2; 401 402 switch (offset) { 403 /* 404 * Trying to read a write only register. 405 */ 406 case DPDMA_GBL: 407 return 0; 408 default: 409 assert(offset <= (0xFFC >> 2)); 410 return s->registers[offset]; 411 } 412 return 0; 413 } 414 415 static void xlnx_dpdma_write(void *opaque, hwaddr offset, 416 uint64_t value, unsigned size) 417 { 418 XlnxDPDMAState *s = XLNX_DPDMA(opaque); 419 420 DPRINTF("write @%" HWADDR_PRIx " = %" PRIx64 "\n", offset, value); 421 offset = offset >> 2; 422 423 switch (offset) { 424 case DPDMA_ISR: 425 s->registers[DPDMA_ISR] &= ~value; 426 xlnx_dpdma_update_irq(s); 427 break; 428 case DPDMA_IEN: 429 s->registers[DPDMA_IMR] &= ~value; 430 break; 431 case DPDMA_IDS: 432 s->registers[DPDMA_IMR] |= value; 433 break; 434 case DPDMA_EISR: 435 s->registers[DPDMA_EISR] &= ~value; 436 xlnx_dpdma_update_irq(s); 437 break; 438 case DPDMA_EIEN: 439 s->registers[DPDMA_EIMR] &= ~value; 440 break; 441 case DPDMA_EIDS: 442 s->registers[DPDMA_EIMR] |= value; 443 break; 444 case DPDMA_IMR: 445 case DPDMA_EIMR: 446 case DPDMA_DSCR_NEXT_ADDRE_CH(0): 447 case DPDMA_DSCR_NEXT_ADDRE_CH(1): 448 case DPDMA_DSCR_NEXT_ADDRE_CH(2): 449 case DPDMA_DSCR_NEXT_ADDRE_CH(3): 450 case DPDMA_DSCR_NEXT_ADDRE_CH(4): 451 case DPDMA_DSCR_NEXT_ADDRE_CH(5): 452 case DPDMA_DSCR_NEXT_ADDR_CH(0): 453 case DPDMA_DSCR_NEXT_ADDR_CH(1): 454 case DPDMA_DSCR_NEXT_ADDR_CH(2): 455 case DPDMA_DSCR_NEXT_ADDR_CH(3): 456 case DPDMA_DSCR_NEXT_ADDR_CH(4): 457 case DPDMA_DSCR_NEXT_ADDR_CH(5): 458 case DPDMA_PYLD_CUR_ADDRE_CH(0): 459 case DPDMA_PYLD_CUR_ADDRE_CH(1): 460 case DPDMA_PYLD_CUR_ADDRE_CH(2): 461 case DPDMA_PYLD_CUR_ADDRE_CH(3): 462 case DPDMA_PYLD_CUR_ADDRE_CH(4): 463 case DPDMA_PYLD_CUR_ADDRE_CH(5): 464 case DPDMA_PYLD_CUR_ADDR_CH(0): 465 case DPDMA_PYLD_CUR_ADDR_CH(1): 466 case DPDMA_PYLD_CUR_ADDR_CH(2): 467 case DPDMA_PYLD_CUR_ADDR_CH(3): 468 case DPDMA_PYLD_CUR_ADDR_CH(4): 469 case DPDMA_PYLD_CUR_ADDR_CH(5): 470 case DPDMA_STATUS_CH(0): 471 case DPDMA_STATUS_CH(1): 472 case DPDMA_STATUS_CH(2): 473 case DPDMA_STATUS_CH(3): 474 case DPDMA_STATUS_CH(4): 475 case DPDMA_STATUS_CH(5): 476 case DPDMA_VDO_CH(0): 477 case DPDMA_VDO_CH(1): 478 case DPDMA_VDO_CH(2): 479 case DPDMA_VDO_CH(3): 480 case DPDMA_VDO_CH(4): 481 case DPDMA_VDO_CH(5): 482 case DPDMA_PYLD_SZ_CH(0): 483 case DPDMA_PYLD_SZ_CH(1): 484 case DPDMA_PYLD_SZ_CH(2): 485 case DPDMA_PYLD_SZ_CH(3): 486 case DPDMA_PYLD_SZ_CH(4): 487 case DPDMA_PYLD_SZ_CH(5): 488 case DPDMA_DSCR_ID_CH(0): 489 case DPDMA_DSCR_ID_CH(1): 490 case DPDMA_DSCR_ID_CH(2): 491 case DPDMA_DSCR_ID_CH(3): 492 case DPDMA_DSCR_ID_CH(4): 493 case DPDMA_DSCR_ID_CH(5): 494 /* 495 * Trying to write to a read only register.. 496 */ 497 break; 498 case DPDMA_GBL: 499 /* 500 * This is a write only register so it's read as zero in the read 501 * callback. 502 * We store the value anyway so we can know if the channel is 503 * enabled. 504 */ 505 s->registers[offset] |= value & 0x00000FFF; 506 break; 507 case DPDMA_DSCR_STRT_ADDRE_CH(0): 508 case DPDMA_DSCR_STRT_ADDRE_CH(1): 509 case DPDMA_DSCR_STRT_ADDRE_CH(2): 510 case DPDMA_DSCR_STRT_ADDRE_CH(3): 511 case DPDMA_DSCR_STRT_ADDRE_CH(4): 512 case DPDMA_DSCR_STRT_ADDRE_CH(5): 513 value &= 0x0000FFFF; 514 s->registers[offset] = value; 515 break; 516 case DPDMA_CNTL_CH(0): 517 s->registers[DPDMA_GBL] &= ~DPDMA_GBL_TRG_CH(0); 518 value &= 0x3FFFFFFF; 519 s->registers[offset] = value; 520 break; 521 case DPDMA_CNTL_CH(1): 522 s->registers[DPDMA_GBL] &= ~DPDMA_GBL_TRG_CH(1); 523 value &= 0x3FFFFFFF; 524 s->registers[offset] = value; 525 break; 526 case DPDMA_CNTL_CH(2): 527 s->registers[DPDMA_GBL] &= ~DPDMA_GBL_TRG_CH(2); 528 value &= 0x3FFFFFFF; 529 s->registers[offset] = value; 530 break; 531 case DPDMA_CNTL_CH(3): 532 s->registers[DPDMA_GBL] &= ~DPDMA_GBL_TRG_CH(3); 533 value &= 0x3FFFFFFF; 534 s->registers[offset] = value; 535 break; 536 case DPDMA_CNTL_CH(4): 537 s->registers[DPDMA_GBL] &= ~DPDMA_GBL_TRG_CH(4); 538 value &= 0x3FFFFFFF; 539 s->registers[offset] = value; 540 break; 541 case DPDMA_CNTL_CH(5): 542 s->registers[DPDMA_GBL] &= ~DPDMA_GBL_TRG_CH(5); 543 value &= 0x3FFFFFFF; 544 s->registers[offset] = value; 545 break; 546 default: 547 assert(offset <= (0xFFC >> 2)); 548 s->registers[offset] = value; 549 break; 550 } 551 } 552 553 static const MemoryRegionOps dma_ops = { 554 .read = xlnx_dpdma_read, 555 .write = xlnx_dpdma_write, 556 .endianness = DEVICE_NATIVE_ENDIAN, 557 .valid = { 558 .min_access_size = 4, 559 .max_access_size = 4, 560 }, 561 .impl = { 562 .min_access_size = 4, 563 .max_access_size = 4, 564 }, 565 }; 566 567 static void xlnx_dpdma_init(Object *obj) 568 { 569 SysBusDevice *sbd = SYS_BUS_DEVICE(obj); 570 XlnxDPDMAState *s = XLNX_DPDMA(obj); 571 572 memory_region_init_io(&s->iomem, obj, &dma_ops, s, 573 TYPE_XLNX_DPDMA, 0x1000); 574 sysbus_init_mmio(sbd, &s->iomem); 575 sysbus_init_irq(sbd, &s->irq); 576 } 577 578 static void xlnx_dpdma_reset(DeviceState *dev) 579 { 580 XlnxDPDMAState *s = XLNX_DPDMA(dev); 581 size_t i; 582 583 memset(s->registers, 0, sizeof(s->registers)); 584 s->registers[DPDMA_IMR] = 0x07FFFFFF; 585 s->registers[DPDMA_EIMR] = 0xFFFFFFFF; 586 s->registers[DPDMA_ALC0_MIN] = 0x0000FFFF; 587 s->registers[DPDMA_ALC1_MIN] = 0x0000FFFF; 588 589 for (i = 0; i < 6; i++) { 590 s->data[i] = NULL; 591 s->operation_finished[i] = true; 592 } 593 } 594 595 static void xlnx_dpdma_class_init(ObjectClass *oc, void *data) 596 { 597 DeviceClass *dc = DEVICE_CLASS(oc); 598 599 dc->vmsd = &vmstate_xlnx_dpdma; 600 dc->reset = xlnx_dpdma_reset; 601 } 602 603 static const TypeInfo xlnx_dpdma_info = { 604 .name = TYPE_XLNX_DPDMA, 605 .parent = TYPE_SYS_BUS_DEVICE, 606 .instance_size = sizeof(XlnxDPDMAState), 607 .instance_init = xlnx_dpdma_init, 608 .class_init = xlnx_dpdma_class_init, 609 }; 610 611 static void xlnx_dpdma_register_types(void) 612 { 613 type_register_static(&xlnx_dpdma_info); 614 } 615 616 size_t xlnx_dpdma_start_operation(XlnxDPDMAState *s, uint8_t channel, 617 bool one_desc) 618 { 619 uint64_t desc_addr; 620 uint64_t source_addr[6]; 621 DPDMADescriptor desc; 622 bool done = false; 623 size_t ptr = 0; 624 625 assert(channel <= 5); 626 627 DPRINTF("start dpdma channel 0x%" PRIX8 "\n", channel); 628 629 if (!xlnx_dpdma_is_channel_triggered(s, channel)) { 630 DPRINTF("Channel isn't triggered..\n"); 631 return 0; 632 } 633 634 if (!xlnx_dpdma_is_channel_enabled(s, channel)) { 635 DPRINTF("Channel isn't enabled..\n"); 636 return 0; 637 } 638 639 if (xlnx_dpdma_is_channel_paused(s, channel)) { 640 DPRINTF("Channel is paused..\n"); 641 return 0; 642 } 643 644 do { 645 if ((s->operation_finished[channel]) 646 || xlnx_dpdma_is_channel_retriggered(s, channel)) { 647 desc_addr = xlnx_dpdma_descriptor_start_address(s, channel); 648 s->operation_finished[channel] = false; 649 } else { 650 desc_addr = xlnx_dpdma_descriptor_next_address(s, channel); 651 } 652 653 if (dma_memory_read(&address_space_memory, desc_addr, &desc, 654 sizeof(DPDMADescriptor))) { 655 s->registers[DPDMA_EISR] |= ((1 << 1) << channel); 656 xlnx_dpdma_update_irq(s); 657 s->operation_finished[channel] = true; 658 DPRINTF("Can't get the descriptor.\n"); 659 break; 660 } 661 662 xlnx_dpdma_update_desc_info(s, channel, &desc); 663 664 #ifdef DEBUG_DPDMA 665 xlnx_dpdma_dump_descriptor(&desc); 666 #endif 667 668 DPRINTF("location of the descriptor: %" PRIx64 "\n", desc_addr); 669 if (!xlnx_dpdma_desc_is_valid(&desc)) { 670 s->registers[DPDMA_EISR] |= ((1 << 7) << channel); 671 xlnx_dpdma_update_irq(s); 672 s->operation_finished[channel] = true; 673 DPRINTF("Invalid descriptor..\n"); 674 break; 675 } 676 677 if (xlnx_dpdma_desc_crc_enabled(&desc) 678 && !xlnx_dpdma_desc_check_crc(&desc)) { 679 s->registers[DPDMA_EISR] |= ((1 << 13) << channel); 680 xlnx_dpdma_update_irq(s); 681 s->operation_finished[channel] = true; 682 DPRINTF("Bad CRC for descriptor..\n"); 683 break; 684 } 685 686 if (xlnx_dpdma_desc_is_already_done(&desc) 687 && !xlnx_dpdma_desc_ignore_done_bit(&desc)) { 688 /* We are trying to process an already processed descriptor. */ 689 s->registers[DPDMA_EISR] |= ((1 << 25) << channel); 690 xlnx_dpdma_update_irq(s); 691 s->operation_finished[channel] = true; 692 DPRINTF("Already processed descriptor..\n"); 693 break; 694 } 695 696 done = xlnx_dpdma_desc_is_last(&desc) 697 || xlnx_dpdma_desc_is_last_of_frame(&desc); 698 699 s->operation_finished[channel] = done; 700 if (s->data[channel]) { 701 int64_t transfer_len = xlnx_dpdma_desc_get_transfer_size(&desc); 702 uint32_t line_size = xlnx_dpdma_desc_get_line_size(&desc); 703 uint32_t line_stride = xlnx_dpdma_desc_get_line_stride(&desc); 704 if (xlnx_dpdma_desc_is_contiguous(&desc)) { 705 source_addr[0] = xlnx_dpdma_desc_get_source_address(&desc, 0); 706 while (transfer_len != 0) { 707 if (dma_memory_read(&address_space_memory, 708 source_addr[0], 709 &s->data[channel][ptr], 710 line_size)) { 711 s->registers[DPDMA_ISR] |= ((1 << 12) << channel); 712 xlnx_dpdma_update_irq(s); 713 DPRINTF("Can't get data.\n"); 714 break; 715 } 716 ptr += line_size; 717 transfer_len -= line_size; 718 source_addr[0] += line_stride; 719 } 720 } else { 721 DPRINTF("Source address:\n"); 722 int frag; 723 for (frag = 0; frag < 5; frag++) { 724 source_addr[frag] = 725 xlnx_dpdma_desc_get_source_address(&desc, frag); 726 DPRINTF("Fragment %u: %" PRIx64 "\n", frag + 1, 727 source_addr[frag]); 728 } 729 730 frag = 0; 731 while ((transfer_len < 0) && (frag < 5)) { 732 size_t fragment_len = DPDMA_FRAG_MAX_SZ 733 - (source_addr[frag] % DPDMA_FRAG_MAX_SZ); 734 735 if (dma_memory_read(&address_space_memory, 736 source_addr[frag], 737 &(s->data[channel][ptr]), 738 fragment_len)) { 739 s->registers[DPDMA_ISR] |= ((1 << 12) << channel); 740 xlnx_dpdma_update_irq(s); 741 DPRINTF("Can't get data.\n"); 742 break; 743 } 744 ptr += fragment_len; 745 transfer_len -= fragment_len; 746 frag += 1; 747 } 748 } 749 } 750 751 if (xlnx_dpdma_desc_update_enabled(&desc)) { 752 /* The descriptor need to be updated when it's completed. */ 753 DPRINTF("update the descriptor with the done flag set.\n"); 754 xlnx_dpdma_desc_set_done(&desc); 755 dma_memory_write(&address_space_memory, desc_addr, &desc, 756 sizeof(DPDMADescriptor)); 757 } 758 759 if (xlnx_dpdma_desc_completion_interrupt(&desc)) { 760 DPRINTF("completion interrupt enabled!\n"); 761 s->registers[DPDMA_ISR] |= (1 << channel); 762 xlnx_dpdma_update_irq(s); 763 } 764 765 } while (!done && !one_desc); 766 767 return ptr; 768 } 769 770 void xlnx_dpdma_set_host_data_location(XlnxDPDMAState *s, uint8_t channel, 771 void *p) 772 { 773 if (!s) { 774 qemu_log_mask(LOG_UNIMP, "DPDMA client not attached to valid DPDMA" 775 " instance\n"); 776 return; 777 } 778 779 assert(channel <= 5); 780 s->data[channel] = p; 781 } 782 783 void xlnx_dpdma_trigger_vsync_irq(XlnxDPDMAState *s) 784 { 785 s->registers[DPDMA_ISR] |= (1 << 27); 786 xlnx_dpdma_update_irq(s); 787 } 788 789 type_init(xlnx_dpdma_register_types) 790