1 /* 2 * xlnx_dpdma.c 3 * 4 * Copyright (C) 2015 : GreenSocs Ltd 5 * http://www.greensocs.com/ , email: info@greensocs.com 6 * 7 * Developed by : 8 * Frederic Konrad <fred.konrad@greensocs.com> 9 * 10 * This program is free software; you can redistribute it and/or modify 11 * it under the terms of the GNU General Public License as published by 12 * the Free Software Foundation, either version 2 of the License, or 13 * (at your option) any later version. 14 * 15 * This program is distributed in the hope that it will be useful, 16 * but WITHOUT ANY WARRANTY; without even the implied warranty of 17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 18 * GNU General Public License for more details. 19 * 20 * You should have received a copy of the GNU General Public License along 21 * with this program; if not, see <http://www.gnu.org/licenses/>. 22 * 23 */ 24 25 #include "qemu/osdep.h" 26 #include "qemu/log.h" 27 #include "hw/dma/xlnx_dpdma.h" 28 29 #ifndef DEBUG_DPDMA 30 #define DEBUG_DPDMA 0 31 #endif 32 33 #define DPRINTF(fmt, ...) do { \ 34 if (DEBUG_DPDMA) { \ 35 qemu_log("xlnx_dpdma: " fmt , ## __VA_ARGS__); \ 36 } \ 37 } while (0); 38 39 /* 40 * Registers offset for DPDMA. 41 */ 42 #define DPDMA_ERR_CTRL (0x0000) 43 #define DPDMA_ISR (0x0004 >> 2) 44 #define DPDMA_IMR (0x0008 >> 2) 45 #define DPDMA_IEN (0x000C >> 2) 46 #define DPDMA_IDS (0x0010 >> 2) 47 #define DPDMA_EISR (0x0014 >> 2) 48 #define DPDMA_EIMR (0x0018 >> 2) 49 #define DPDMA_EIEN (0x001C >> 2) 50 #define DPDMA_EIDS (0x0020 >> 2) 51 #define DPDMA_CNTL (0x0100 >> 2) 52 53 #define DPDMA_GBL (0x0104 >> 2) 54 #define DPDMA_GBL_TRG_CH(n) (1 << n) 55 #define DPDMA_GBL_RTRG_CH(n) (1 << 6 << n) 56 57 #define DPDMA_ALC0_CNTL (0x0108 >> 2) 58 #define DPDMA_ALC0_STATUS (0x010C >> 2) 59 #define DPDMA_ALC0_MAX (0x0110 >> 2) 60 #define DPDMA_ALC0_MIN (0x0114 >> 2) 61 #define DPDMA_ALC0_ACC (0x0118 >> 2) 62 #define DPDMA_ALC0_ACC_TRAN (0x011C >> 2) 63 #define DPDMA_ALC1_CNTL (0x0120 >> 2) 64 #define DPDMA_ALC1_STATUS (0x0124 >> 2) 65 #define DPDMA_ALC1_MAX (0x0128 >> 2) 66 #define DPDMA_ALC1_MIN (0x012C >> 2) 67 #define DPDMA_ALC1_ACC (0x0130 >> 2) 68 #define DPDMA_ALC1_ACC_TRAN (0x0134 >> 2) 69 70 #define DPDMA_DSCR_STRT_ADDRE_CH(n) ((0x0200 + n * 0x100) >> 2) 71 #define DPDMA_DSCR_STRT_ADDR_CH(n) ((0x0204 + n * 0x100) >> 2) 72 #define DPDMA_DSCR_NEXT_ADDRE_CH(n) ((0x0208 + n * 0x100) >> 2) 73 #define DPDMA_DSCR_NEXT_ADDR_CH(n) ((0x020C + n * 0x100) >> 2) 74 #define DPDMA_PYLD_CUR_ADDRE_CH(n) ((0x0210 + n * 0x100) >> 2) 75 #define DPDMA_PYLD_CUR_ADDR_CH(n) ((0x0214 + n * 0x100) >> 2) 76 77 #define DPDMA_CNTL_CH(n) ((0x0218 + n * 0x100) >> 2) 78 #define DPDMA_CNTL_CH_EN (1) 79 #define DPDMA_CNTL_CH_PAUSED (1 << 1) 80 81 #define DPDMA_STATUS_CH(n) ((0x021C + n * 0x100) >> 2) 82 #define DPDMA_STATUS_BURST_TYPE (1 << 4) 83 #define DPDMA_STATUS_MODE (1 << 5) 84 #define DPDMA_STATUS_EN_CRC (1 << 6) 85 #define DPDMA_STATUS_LAST_DSCR (1 << 7) 86 #define DPDMA_STATUS_LDSCR_FRAME (1 << 8) 87 #define DPDMA_STATUS_IGNR_DONE (1 << 9) 88 #define DPDMA_STATUS_DSCR_DONE (1 << 10) 89 #define DPDMA_STATUS_EN_DSCR_UP (1 << 11) 90 #define DPDMA_STATUS_EN_DSCR_INTR (1 << 12) 91 #define DPDMA_STATUS_PREAMBLE_OFF (13) 92 93 #define DPDMA_VDO_CH(n) ((0x0220 + n * 0x100) >> 2) 94 #define DPDMA_PYLD_SZ_CH(n) ((0x0224 + n * 0x100) >> 2) 95 #define DPDMA_DSCR_ID_CH(n) ((0x0228 + n * 0x100) >> 2) 96 97 /* 98 * Descriptor control field. 99 */ 100 #define CONTROL_PREAMBLE_VALUE 0xA5 101 102 #define DSCR_CTRL_PREAMBLE 0xFF 103 #define DSCR_CTRL_EN_DSCR_DONE_INTR (1 << 8) 104 #define DSCR_CTRL_EN_DSCR_UPDATE (1 << 9) 105 #define DSCR_CTRL_IGNORE_DONE (1 << 10) 106 #define DSCR_CTRL_AXI_BURST_TYPE (1 << 11) 107 #define DSCR_CTRL_AXCACHE (0x0F << 12) 108 #define DSCR_CTRL_AXPROT (0x2 << 16) 109 #define DSCR_CTRL_DESCRIPTOR_MODE (1 << 18) 110 #define DSCR_CTRL_LAST_DESCRIPTOR (1 << 19) 111 #define DSCR_CTRL_ENABLE_CRC (1 << 20) 112 #define DSCR_CTRL_LAST_DESCRIPTOR_OF_FRAME (1 << 21) 113 114 /* 115 * Descriptor timestamp field. 116 */ 117 #define STATUS_DONE (1 << 31) 118 119 #define DPDMA_FRAG_MAX_SZ (4096) 120 121 enum DPDMABurstType { 122 DPDMA_INCR = 0, 123 DPDMA_FIXED = 1 124 }; 125 126 enum DPDMAMode { 127 DPDMA_CONTIGOUS = 0, 128 DPDMA_FRAGMENTED = 1 129 }; 130 131 struct DPDMADescriptor { 132 uint32_t control; 133 uint32_t descriptor_id; 134 /* transfer size in byte. */ 135 uint32_t xfer_size; 136 uint32_t line_size_stride; 137 uint32_t timestamp_lsb; 138 uint32_t timestamp_msb; 139 /* contains extension for both descriptor and source. */ 140 uint32_t address_extension; 141 uint32_t next_descriptor; 142 uint32_t source_address; 143 uint32_t address_extension_23; 144 uint32_t address_extension_45; 145 uint32_t source_address2; 146 uint32_t source_address3; 147 uint32_t source_address4; 148 uint32_t source_address5; 149 uint32_t crc; 150 }; 151 152 typedef enum DPDMABurstType DPDMABurstType; 153 typedef enum DPDMAMode DPDMAMode; 154 typedef struct DPDMADescriptor DPDMADescriptor; 155 156 static bool xlnx_dpdma_desc_is_last(DPDMADescriptor *desc) 157 { 158 return ((desc->control & DSCR_CTRL_LAST_DESCRIPTOR) != 0); 159 } 160 161 static bool xlnx_dpdma_desc_is_last_of_frame(DPDMADescriptor *desc) 162 { 163 return ((desc->control & DSCR_CTRL_LAST_DESCRIPTOR_OF_FRAME) != 0); 164 } 165 166 static uint64_t xlnx_dpdma_desc_get_source_address(DPDMADescriptor *desc, 167 uint8_t frag) 168 { 169 uint64_t addr = 0; 170 assert(frag < 5); 171 172 switch (frag) { 173 case 0: 174 addr = desc->source_address 175 + (extract32(desc->address_extension, 16, 12) << 20); 176 break; 177 case 1: 178 addr = desc->source_address2 179 + (extract32(desc->address_extension_23, 0, 12) << 8); 180 break; 181 case 2: 182 addr = desc->source_address3 183 + (extract32(desc->address_extension_23, 16, 12) << 20); 184 break; 185 case 3: 186 addr = desc->source_address4 187 + (extract32(desc->address_extension_45, 0, 12) << 8); 188 break; 189 case 4: 190 addr = desc->source_address5 191 + (extract32(desc->address_extension_45, 16, 12) << 20); 192 break; 193 default: 194 addr = 0; 195 break; 196 } 197 198 return addr; 199 } 200 201 static uint32_t xlnx_dpdma_desc_get_transfer_size(DPDMADescriptor *desc) 202 { 203 return desc->xfer_size; 204 } 205 206 static uint32_t xlnx_dpdma_desc_get_line_size(DPDMADescriptor *desc) 207 { 208 return extract32(desc->line_size_stride, 0, 18); 209 } 210 211 static uint32_t xlnx_dpdma_desc_get_line_stride(DPDMADescriptor *desc) 212 { 213 return extract32(desc->line_size_stride, 18, 14) * 16; 214 } 215 216 static inline bool xlnx_dpdma_desc_crc_enabled(DPDMADescriptor *desc) 217 { 218 return (desc->control & DSCR_CTRL_ENABLE_CRC) != 0; 219 } 220 221 static inline bool xlnx_dpdma_desc_check_crc(DPDMADescriptor *desc) 222 { 223 uint32_t *p = (uint32_t *)desc; 224 uint32_t crc = 0; 225 uint8_t i; 226 227 /* 228 * CRC is calculated on the whole descriptor except the last 32bits word 229 * using 32bits addition. 230 */ 231 for (i = 0; i < 15; i++) { 232 crc += p[i]; 233 } 234 235 return crc == desc->crc; 236 } 237 238 static inline bool xlnx_dpdma_desc_completion_interrupt(DPDMADescriptor *desc) 239 { 240 return (desc->control & DSCR_CTRL_EN_DSCR_DONE_INTR) != 0; 241 } 242 243 static inline bool xlnx_dpdma_desc_is_valid(DPDMADescriptor *desc) 244 { 245 return (desc->control & DSCR_CTRL_PREAMBLE) == CONTROL_PREAMBLE_VALUE; 246 } 247 248 static inline bool xlnx_dpdma_desc_is_contiguous(DPDMADescriptor *desc) 249 { 250 return (desc->control & DSCR_CTRL_DESCRIPTOR_MODE) == 0; 251 } 252 253 static inline bool xlnx_dpdma_desc_update_enabled(DPDMADescriptor *desc) 254 { 255 return (desc->control & DSCR_CTRL_EN_DSCR_UPDATE) != 0; 256 } 257 258 static inline void xlnx_dpdma_desc_set_done(DPDMADescriptor *desc) 259 { 260 desc->timestamp_msb |= STATUS_DONE; 261 } 262 263 static inline bool xlnx_dpdma_desc_is_already_done(DPDMADescriptor *desc) 264 { 265 return (desc->timestamp_msb & STATUS_DONE) != 0; 266 } 267 268 static inline bool xlnx_dpdma_desc_ignore_done_bit(DPDMADescriptor *desc) 269 { 270 return (desc->control & DSCR_CTRL_IGNORE_DONE) != 0; 271 } 272 273 static const VMStateDescription vmstate_xlnx_dpdma = { 274 .name = TYPE_XLNX_DPDMA, 275 .version_id = 1, 276 .fields = (VMStateField[]) { 277 VMSTATE_UINT32_ARRAY(registers, XlnxDPDMAState, 278 XLNX_DPDMA_REG_ARRAY_SIZE), 279 VMSTATE_BOOL_ARRAY(operation_finished, XlnxDPDMAState, 6), 280 VMSTATE_END_OF_LIST() 281 } 282 }; 283 284 static void xlnx_dpdma_update_irq(XlnxDPDMAState *s) 285 { 286 bool flags; 287 288 flags = ((s->registers[DPDMA_ISR] & (~s->registers[DPDMA_IMR])) 289 || (s->registers[DPDMA_EISR] & (~s->registers[DPDMA_EIMR]))); 290 qemu_set_irq(s->irq, flags); 291 } 292 293 static uint64_t xlnx_dpdma_descriptor_start_address(XlnxDPDMAState *s, 294 uint8_t channel) 295 { 296 return (s->registers[DPDMA_DSCR_STRT_ADDRE_CH(channel)] << 16) 297 + s->registers[DPDMA_DSCR_STRT_ADDR_CH(channel)]; 298 } 299 300 static uint64_t xlnx_dpdma_descriptor_next_address(XlnxDPDMAState *s, 301 uint8_t channel) 302 { 303 return ((uint64_t)s->registers[DPDMA_DSCR_NEXT_ADDRE_CH(channel)] << 32) 304 + s->registers[DPDMA_DSCR_NEXT_ADDR_CH(channel)]; 305 } 306 307 static bool xlnx_dpdma_is_channel_enabled(XlnxDPDMAState *s, 308 uint8_t channel) 309 { 310 return (s->registers[DPDMA_CNTL_CH(channel)] & DPDMA_CNTL_CH_EN) != 0; 311 } 312 313 static bool xlnx_dpdma_is_channel_paused(XlnxDPDMAState *s, 314 uint8_t channel) 315 { 316 return (s->registers[DPDMA_CNTL_CH(channel)] & DPDMA_CNTL_CH_PAUSED) != 0; 317 } 318 319 static inline bool xlnx_dpdma_is_channel_retriggered(XlnxDPDMAState *s, 320 uint8_t channel) 321 { 322 /* Clear the retriggered bit after reading it. */ 323 bool channel_is_retriggered = s->registers[DPDMA_GBL] 324 & DPDMA_GBL_RTRG_CH(channel); 325 s->registers[DPDMA_GBL] &= ~DPDMA_GBL_RTRG_CH(channel); 326 return channel_is_retriggered; 327 } 328 329 static inline bool xlnx_dpdma_is_channel_triggered(XlnxDPDMAState *s, 330 uint8_t channel) 331 { 332 return s->registers[DPDMA_GBL] & DPDMA_GBL_TRG_CH(channel); 333 } 334 335 static void xlnx_dpdma_update_desc_info(XlnxDPDMAState *s, uint8_t channel, 336 DPDMADescriptor *desc) 337 { 338 s->registers[DPDMA_DSCR_NEXT_ADDRE_CH(channel)] = 339 extract32(desc->address_extension, 0, 16); 340 s->registers[DPDMA_DSCR_NEXT_ADDR_CH(channel)] = desc->next_descriptor; 341 s->registers[DPDMA_PYLD_CUR_ADDRE_CH(channel)] = 342 extract32(desc->address_extension, 16, 16); 343 s->registers[DPDMA_PYLD_CUR_ADDR_CH(channel)] = desc->source_address; 344 s->registers[DPDMA_VDO_CH(channel)] = 345 extract32(desc->line_size_stride, 18, 14) 346 + (extract32(desc->line_size_stride, 0, 18) 347 << 14); 348 s->registers[DPDMA_PYLD_SZ_CH(channel)] = desc->xfer_size; 349 s->registers[DPDMA_DSCR_ID_CH(channel)] = desc->descriptor_id; 350 351 /* Compute the status register with the descriptor information. */ 352 s->registers[DPDMA_STATUS_CH(channel)] = 353 extract32(desc->control, 0, 8) << 13; 354 if ((desc->control & DSCR_CTRL_EN_DSCR_DONE_INTR) != 0) { 355 s->registers[DPDMA_STATUS_CH(channel)] |= DPDMA_STATUS_EN_DSCR_INTR; 356 } 357 if ((desc->control & DSCR_CTRL_EN_DSCR_UPDATE) != 0) { 358 s->registers[DPDMA_STATUS_CH(channel)] |= DPDMA_STATUS_EN_DSCR_UP; 359 } 360 if ((desc->timestamp_msb & STATUS_DONE) != 0) { 361 s->registers[DPDMA_STATUS_CH(channel)] |= DPDMA_STATUS_DSCR_DONE; 362 } 363 if ((desc->control & DSCR_CTRL_IGNORE_DONE) != 0) { 364 s->registers[DPDMA_STATUS_CH(channel)] |= DPDMA_STATUS_IGNR_DONE; 365 } 366 if ((desc->control & DSCR_CTRL_LAST_DESCRIPTOR_OF_FRAME) != 0) { 367 s->registers[DPDMA_STATUS_CH(channel)] |= DPDMA_STATUS_LDSCR_FRAME; 368 } 369 if ((desc->control & DSCR_CTRL_LAST_DESCRIPTOR) != 0) { 370 s->registers[DPDMA_STATUS_CH(channel)] |= DPDMA_STATUS_LAST_DSCR; 371 } 372 if ((desc->control & DSCR_CTRL_ENABLE_CRC) != 0) { 373 s->registers[DPDMA_STATUS_CH(channel)] |= DPDMA_STATUS_EN_CRC; 374 } 375 if ((desc->control & DSCR_CTRL_DESCRIPTOR_MODE) != 0) { 376 s->registers[DPDMA_STATUS_CH(channel)] |= DPDMA_STATUS_MODE; 377 } 378 if ((desc->control & DSCR_CTRL_AXI_BURST_TYPE) != 0) { 379 s->registers[DPDMA_STATUS_CH(channel)] |= DPDMA_STATUS_BURST_TYPE; 380 } 381 } 382 383 static void xlnx_dpdma_dump_descriptor(DPDMADescriptor *desc) 384 { 385 if (DEBUG_DPDMA) { 386 qemu_log("DUMP DESCRIPTOR:\n"); 387 qemu_hexdump((char *)desc, stdout, "", sizeof(DPDMADescriptor)); 388 } 389 } 390 391 static uint64_t xlnx_dpdma_read(void *opaque, hwaddr offset, 392 unsigned size) 393 { 394 XlnxDPDMAState *s = XLNX_DPDMA(opaque); 395 396 DPRINTF("read @%" HWADDR_PRIx "\n", offset); 397 offset = offset >> 2; 398 399 switch (offset) { 400 /* 401 * Trying to read a write only register. 402 */ 403 case DPDMA_GBL: 404 return 0; 405 default: 406 assert(offset <= (0xFFC >> 2)); 407 return s->registers[offset]; 408 } 409 return 0; 410 } 411 412 static void xlnx_dpdma_write(void *opaque, hwaddr offset, 413 uint64_t value, unsigned size) 414 { 415 XlnxDPDMAState *s = XLNX_DPDMA(opaque); 416 417 DPRINTF("write @%" HWADDR_PRIx " = %" PRIx64 "\n", offset, value); 418 offset = offset >> 2; 419 420 switch (offset) { 421 case DPDMA_ISR: 422 s->registers[DPDMA_ISR] &= ~value; 423 xlnx_dpdma_update_irq(s); 424 break; 425 case DPDMA_IEN: 426 s->registers[DPDMA_IMR] &= ~value; 427 break; 428 case DPDMA_IDS: 429 s->registers[DPDMA_IMR] |= value; 430 break; 431 case DPDMA_EISR: 432 s->registers[DPDMA_EISR] &= ~value; 433 xlnx_dpdma_update_irq(s); 434 break; 435 case DPDMA_EIEN: 436 s->registers[DPDMA_EIMR] &= ~value; 437 break; 438 case DPDMA_EIDS: 439 s->registers[DPDMA_EIMR] |= value; 440 break; 441 case DPDMA_IMR: 442 case DPDMA_EIMR: 443 case DPDMA_DSCR_NEXT_ADDRE_CH(0): 444 case DPDMA_DSCR_NEXT_ADDRE_CH(1): 445 case DPDMA_DSCR_NEXT_ADDRE_CH(2): 446 case DPDMA_DSCR_NEXT_ADDRE_CH(3): 447 case DPDMA_DSCR_NEXT_ADDRE_CH(4): 448 case DPDMA_DSCR_NEXT_ADDRE_CH(5): 449 case DPDMA_DSCR_NEXT_ADDR_CH(0): 450 case DPDMA_DSCR_NEXT_ADDR_CH(1): 451 case DPDMA_DSCR_NEXT_ADDR_CH(2): 452 case DPDMA_DSCR_NEXT_ADDR_CH(3): 453 case DPDMA_DSCR_NEXT_ADDR_CH(4): 454 case DPDMA_DSCR_NEXT_ADDR_CH(5): 455 case DPDMA_PYLD_CUR_ADDRE_CH(0): 456 case DPDMA_PYLD_CUR_ADDRE_CH(1): 457 case DPDMA_PYLD_CUR_ADDRE_CH(2): 458 case DPDMA_PYLD_CUR_ADDRE_CH(3): 459 case DPDMA_PYLD_CUR_ADDRE_CH(4): 460 case DPDMA_PYLD_CUR_ADDRE_CH(5): 461 case DPDMA_PYLD_CUR_ADDR_CH(0): 462 case DPDMA_PYLD_CUR_ADDR_CH(1): 463 case DPDMA_PYLD_CUR_ADDR_CH(2): 464 case DPDMA_PYLD_CUR_ADDR_CH(3): 465 case DPDMA_PYLD_CUR_ADDR_CH(4): 466 case DPDMA_PYLD_CUR_ADDR_CH(5): 467 case DPDMA_STATUS_CH(0): 468 case DPDMA_STATUS_CH(1): 469 case DPDMA_STATUS_CH(2): 470 case DPDMA_STATUS_CH(3): 471 case DPDMA_STATUS_CH(4): 472 case DPDMA_STATUS_CH(5): 473 case DPDMA_VDO_CH(0): 474 case DPDMA_VDO_CH(1): 475 case DPDMA_VDO_CH(2): 476 case DPDMA_VDO_CH(3): 477 case DPDMA_VDO_CH(4): 478 case DPDMA_VDO_CH(5): 479 case DPDMA_PYLD_SZ_CH(0): 480 case DPDMA_PYLD_SZ_CH(1): 481 case DPDMA_PYLD_SZ_CH(2): 482 case DPDMA_PYLD_SZ_CH(3): 483 case DPDMA_PYLD_SZ_CH(4): 484 case DPDMA_PYLD_SZ_CH(5): 485 case DPDMA_DSCR_ID_CH(0): 486 case DPDMA_DSCR_ID_CH(1): 487 case DPDMA_DSCR_ID_CH(2): 488 case DPDMA_DSCR_ID_CH(3): 489 case DPDMA_DSCR_ID_CH(4): 490 case DPDMA_DSCR_ID_CH(5): 491 /* 492 * Trying to write to a read only register.. 493 */ 494 break; 495 case DPDMA_GBL: 496 /* 497 * This is a write only register so it's read as zero in the read 498 * callback. 499 * We store the value anyway so we can know if the channel is 500 * enabled. 501 */ 502 s->registers[offset] |= value & 0x00000FFF; 503 break; 504 case DPDMA_DSCR_STRT_ADDRE_CH(0): 505 case DPDMA_DSCR_STRT_ADDRE_CH(1): 506 case DPDMA_DSCR_STRT_ADDRE_CH(2): 507 case DPDMA_DSCR_STRT_ADDRE_CH(3): 508 case DPDMA_DSCR_STRT_ADDRE_CH(4): 509 case DPDMA_DSCR_STRT_ADDRE_CH(5): 510 value &= 0x0000FFFF; 511 s->registers[offset] = value; 512 break; 513 case DPDMA_CNTL_CH(0): 514 s->registers[DPDMA_GBL] &= ~DPDMA_GBL_TRG_CH(0); 515 value &= 0x3FFFFFFF; 516 s->registers[offset] = value; 517 break; 518 case DPDMA_CNTL_CH(1): 519 s->registers[DPDMA_GBL] &= ~DPDMA_GBL_TRG_CH(1); 520 value &= 0x3FFFFFFF; 521 s->registers[offset] = value; 522 break; 523 case DPDMA_CNTL_CH(2): 524 s->registers[DPDMA_GBL] &= ~DPDMA_GBL_TRG_CH(2); 525 value &= 0x3FFFFFFF; 526 s->registers[offset] = value; 527 break; 528 case DPDMA_CNTL_CH(3): 529 s->registers[DPDMA_GBL] &= ~DPDMA_GBL_TRG_CH(3); 530 value &= 0x3FFFFFFF; 531 s->registers[offset] = value; 532 break; 533 case DPDMA_CNTL_CH(4): 534 s->registers[DPDMA_GBL] &= ~DPDMA_GBL_TRG_CH(4); 535 value &= 0x3FFFFFFF; 536 s->registers[offset] = value; 537 break; 538 case DPDMA_CNTL_CH(5): 539 s->registers[DPDMA_GBL] &= ~DPDMA_GBL_TRG_CH(5); 540 value &= 0x3FFFFFFF; 541 s->registers[offset] = value; 542 break; 543 default: 544 assert(offset <= (0xFFC >> 2)); 545 s->registers[offset] = value; 546 break; 547 } 548 } 549 550 static const MemoryRegionOps dma_ops = { 551 .read = xlnx_dpdma_read, 552 .write = xlnx_dpdma_write, 553 .endianness = DEVICE_NATIVE_ENDIAN, 554 .valid = { 555 .min_access_size = 4, 556 .max_access_size = 4, 557 }, 558 .impl = { 559 .min_access_size = 4, 560 .max_access_size = 4, 561 }, 562 }; 563 564 static void xlnx_dpdma_init(Object *obj) 565 { 566 SysBusDevice *sbd = SYS_BUS_DEVICE(obj); 567 XlnxDPDMAState *s = XLNX_DPDMA(obj); 568 569 memory_region_init_io(&s->iomem, obj, &dma_ops, s, 570 TYPE_XLNX_DPDMA, 0x1000); 571 sysbus_init_mmio(sbd, &s->iomem); 572 sysbus_init_irq(sbd, &s->irq); 573 } 574 575 static void xlnx_dpdma_reset(DeviceState *dev) 576 { 577 XlnxDPDMAState *s = XLNX_DPDMA(dev); 578 size_t i; 579 580 memset(s->registers, 0, sizeof(s->registers)); 581 s->registers[DPDMA_IMR] = 0x07FFFFFF; 582 s->registers[DPDMA_EIMR] = 0xFFFFFFFF; 583 s->registers[DPDMA_ALC0_MIN] = 0x0000FFFF; 584 s->registers[DPDMA_ALC1_MIN] = 0x0000FFFF; 585 586 for (i = 0; i < 6; i++) { 587 s->data[i] = NULL; 588 s->operation_finished[i] = true; 589 } 590 } 591 592 static void xlnx_dpdma_class_init(ObjectClass *oc, void *data) 593 { 594 DeviceClass *dc = DEVICE_CLASS(oc); 595 596 dc->vmsd = &vmstate_xlnx_dpdma; 597 dc->reset = xlnx_dpdma_reset; 598 } 599 600 static const TypeInfo xlnx_dpdma_info = { 601 .name = TYPE_XLNX_DPDMA, 602 .parent = TYPE_SYS_BUS_DEVICE, 603 .instance_size = sizeof(XlnxDPDMAState), 604 .instance_init = xlnx_dpdma_init, 605 .class_init = xlnx_dpdma_class_init, 606 }; 607 608 static void xlnx_dpdma_register_types(void) 609 { 610 type_register_static(&xlnx_dpdma_info); 611 } 612 613 size_t xlnx_dpdma_start_operation(XlnxDPDMAState *s, uint8_t channel, 614 bool one_desc) 615 { 616 uint64_t desc_addr; 617 uint64_t source_addr[6]; 618 DPDMADescriptor desc; 619 bool done = false; 620 size_t ptr = 0; 621 622 assert(channel <= 5); 623 624 DPRINTF("start dpdma channel 0x%" PRIX8 "\n", channel); 625 626 if (!xlnx_dpdma_is_channel_triggered(s, channel)) { 627 DPRINTF("Channel isn't triggered..\n"); 628 return 0; 629 } 630 631 if (!xlnx_dpdma_is_channel_enabled(s, channel)) { 632 DPRINTF("Channel isn't enabled..\n"); 633 return 0; 634 } 635 636 if (xlnx_dpdma_is_channel_paused(s, channel)) { 637 DPRINTF("Channel is paused..\n"); 638 return 0; 639 } 640 641 do { 642 if ((s->operation_finished[channel]) 643 || xlnx_dpdma_is_channel_retriggered(s, channel)) { 644 desc_addr = xlnx_dpdma_descriptor_start_address(s, channel); 645 s->operation_finished[channel] = false; 646 } else { 647 desc_addr = xlnx_dpdma_descriptor_next_address(s, channel); 648 } 649 650 if (dma_memory_read(&address_space_memory, desc_addr, &desc, 651 sizeof(DPDMADescriptor))) { 652 s->registers[DPDMA_EISR] |= ((1 << 1) << channel); 653 xlnx_dpdma_update_irq(s); 654 s->operation_finished[channel] = true; 655 DPRINTF("Can't get the descriptor.\n"); 656 break; 657 } 658 659 xlnx_dpdma_update_desc_info(s, channel, &desc); 660 661 #ifdef DEBUG_DPDMA 662 xlnx_dpdma_dump_descriptor(&desc); 663 #endif 664 665 DPRINTF("location of the descriptor: %" PRIx64 "\n", desc_addr); 666 if (!xlnx_dpdma_desc_is_valid(&desc)) { 667 s->registers[DPDMA_EISR] |= ((1 << 7) << channel); 668 xlnx_dpdma_update_irq(s); 669 s->operation_finished[channel] = true; 670 DPRINTF("Invalid descriptor..\n"); 671 break; 672 } 673 674 if (xlnx_dpdma_desc_crc_enabled(&desc) 675 && !xlnx_dpdma_desc_check_crc(&desc)) { 676 s->registers[DPDMA_EISR] |= ((1 << 13) << channel); 677 xlnx_dpdma_update_irq(s); 678 s->operation_finished[channel] = true; 679 DPRINTF("Bad CRC for descriptor..\n"); 680 break; 681 } 682 683 if (xlnx_dpdma_desc_is_already_done(&desc) 684 && !xlnx_dpdma_desc_ignore_done_bit(&desc)) { 685 /* We are trying to process an already processed descriptor. */ 686 s->registers[DPDMA_EISR] |= ((1 << 25) << channel); 687 xlnx_dpdma_update_irq(s); 688 s->operation_finished[channel] = true; 689 DPRINTF("Already processed descriptor..\n"); 690 break; 691 } 692 693 done = xlnx_dpdma_desc_is_last(&desc) 694 || xlnx_dpdma_desc_is_last_of_frame(&desc); 695 696 s->operation_finished[channel] = done; 697 if (s->data[channel]) { 698 int64_t transfer_len = xlnx_dpdma_desc_get_transfer_size(&desc); 699 uint32_t line_size = xlnx_dpdma_desc_get_line_size(&desc); 700 uint32_t line_stride = xlnx_dpdma_desc_get_line_stride(&desc); 701 if (xlnx_dpdma_desc_is_contiguous(&desc)) { 702 source_addr[0] = xlnx_dpdma_desc_get_source_address(&desc, 0); 703 while (transfer_len != 0) { 704 if (dma_memory_read(&address_space_memory, 705 source_addr[0], 706 &s->data[channel][ptr], 707 line_size)) { 708 s->registers[DPDMA_ISR] |= ((1 << 12) << channel); 709 xlnx_dpdma_update_irq(s); 710 DPRINTF("Can't get data.\n"); 711 break; 712 } 713 ptr += line_size; 714 transfer_len -= line_size; 715 source_addr[0] += line_stride; 716 } 717 } else { 718 DPRINTF("Source address:\n"); 719 int frag; 720 for (frag = 0; frag < 5; frag++) { 721 source_addr[frag] = 722 xlnx_dpdma_desc_get_source_address(&desc, frag); 723 DPRINTF("Fragment %u: %" PRIx64 "\n", frag + 1, 724 source_addr[frag]); 725 } 726 727 frag = 0; 728 while ((transfer_len < 0) && (frag < 5)) { 729 size_t fragment_len = DPDMA_FRAG_MAX_SZ 730 - (source_addr[frag] % DPDMA_FRAG_MAX_SZ); 731 732 if (dma_memory_read(&address_space_memory, 733 source_addr[frag], 734 &(s->data[channel][ptr]), 735 fragment_len)) { 736 s->registers[DPDMA_ISR] |= ((1 << 12) << channel); 737 xlnx_dpdma_update_irq(s); 738 DPRINTF("Can't get data.\n"); 739 break; 740 } 741 ptr += fragment_len; 742 transfer_len -= fragment_len; 743 frag += 1; 744 } 745 } 746 } 747 748 if (xlnx_dpdma_desc_update_enabled(&desc)) { 749 /* The descriptor need to be updated when it's completed. */ 750 DPRINTF("update the descriptor with the done flag set.\n"); 751 xlnx_dpdma_desc_set_done(&desc); 752 dma_memory_write(&address_space_memory, desc_addr, &desc, 753 sizeof(DPDMADescriptor)); 754 } 755 756 if (xlnx_dpdma_desc_completion_interrupt(&desc)) { 757 DPRINTF("completion interrupt enabled!\n"); 758 s->registers[DPDMA_ISR] |= (1 << channel); 759 xlnx_dpdma_update_irq(s); 760 } 761 762 } while (!done && !one_desc); 763 764 return ptr; 765 } 766 767 void xlnx_dpdma_set_host_data_location(XlnxDPDMAState *s, uint8_t channel, 768 void *p) 769 { 770 if (!s) { 771 qemu_log_mask(LOG_UNIMP, "DPDMA client not attached to valid DPDMA" 772 " instance\n"); 773 return; 774 } 775 776 assert(channel <= 5); 777 s->data[channel] = p; 778 } 779 780 void xlnx_dpdma_trigger_vsync_irq(XlnxDPDMAState *s) 781 { 782 s->registers[DPDMA_ISR] |= (1 << 27); 783 xlnx_dpdma_update_irq(s); 784 } 785 786 type_init(xlnx_dpdma_register_types) 787