Lines Matching full:bm

201     BMDMAState *bm = DO_UPCAST(BMDMAState, dma, dma);  in bmdma_start_dma()  local
203 bm->dma_cb = dma_cb; in bmdma_start_dma()
204 bm->cur_prd_last = 0; in bmdma_start_dma()
205 bm->cur_prd_addr = 0; in bmdma_start_dma()
206 bm->cur_prd_len = 0; in bmdma_start_dma()
208 if (bm->status & BM_STATUS_DMAING) { in bmdma_start_dma()
209 bm->dma_cb(bmdma_active_if(bm), 0); in bmdma_start_dma()
223 BMDMAState *bm = DO_UPCAST(BMDMAState, dma, dma); in bmdma_prepare_buf() local
224 IDEState *s = bmdma_active_if(bm); in bmdma_prepare_buf()
225 PCIDevice *pci_dev = PCI_DEVICE(bm->pci_dev); in bmdma_prepare_buf()
236 if (bm->cur_prd_len == 0) { in bmdma_prepare_buf()
238 if (bm->cur_prd_last || in bmdma_prepare_buf()
239 (bm->cur_addr - bm->addr) >= BMDMA_PAGE_SIZE) { in bmdma_prepare_buf()
242 pci_dma_read(pci_dev, bm->cur_addr, &prd, 8); in bmdma_prepare_buf()
243 bm->cur_addr += 8; in bmdma_prepare_buf()
249 bm->cur_prd_len = len; in bmdma_prepare_buf()
250 bm->cur_prd_addr = prd.addr; in bmdma_prepare_buf()
251 bm->cur_prd_last = (prd.size & 0x80000000); in bmdma_prepare_buf()
253 l = bm->cur_prd_len; in bmdma_prepare_buf()
259 sg_len = MIN(limit - s->sg.size, bm->cur_prd_len); in bmdma_prepare_buf()
261 qemu_sglist_add(&s->sg, bm->cur_prd_addr, sg_len); in bmdma_prepare_buf()
264 bm->cur_prd_addr += l; in bmdma_prepare_buf()
265 bm->cur_prd_len -= l; in bmdma_prepare_buf()
275 BMDMAState *bm = DO_UPCAST(BMDMAState, dma, dma); in bmdma_rw_buf() local
276 IDEState *s = bmdma_active_if(bm); in bmdma_rw_buf()
277 PCIDevice *pci_dev = PCI_DEVICE(bm->pci_dev); in bmdma_rw_buf()
288 if (bm->cur_prd_len == 0) { in bmdma_rw_buf()
290 if (bm->cur_prd_last || in bmdma_rw_buf()
291 (bm->cur_addr - bm->addr) >= BMDMA_PAGE_SIZE) in bmdma_rw_buf()
293 pci_dma_read(pci_dev, bm->cur_addr, &prd, 8); in bmdma_rw_buf()
294 bm->cur_addr += 8; in bmdma_rw_buf()
300 bm->cur_prd_len = len; in bmdma_rw_buf()
301 bm->cur_prd_addr = prd.addr; in bmdma_rw_buf()
302 bm->cur_prd_last = (prd.size & 0x80000000); in bmdma_rw_buf()
304 if (l > bm->cur_prd_len) in bmdma_rw_buf()
305 l = bm->cur_prd_len; in bmdma_rw_buf()
308 pci_dma_write(pci_dev, bm->cur_prd_addr, in bmdma_rw_buf()
311 pci_dma_read(pci_dev, bm->cur_prd_addr, in bmdma_rw_buf()
314 bm->cur_prd_addr += l; in bmdma_rw_buf()
315 bm->cur_prd_len -= l; in bmdma_rw_buf()
324 BMDMAState *bm = DO_UPCAST(BMDMAState, dma, dma); in bmdma_set_inactive() local
326 bm->dma_cb = NULL; in bmdma_set_inactive()
328 bm->status |= BM_STATUS_DMAING; in bmdma_set_inactive()
330 bm->status &= ~BM_STATUS_DMAING; in bmdma_set_inactive()
336 BMDMAState *bm = DO_UPCAST(BMDMAState, dma, dma); in bmdma_restart_dma() local
338 bm->cur_addr = bm->addr; in bmdma_restart_dma()
341 static void bmdma_cancel(BMDMAState *bm) in bmdma_cancel() argument
343 if (bm->status & BM_STATUS_DMAING) { in bmdma_cancel()
345 bmdma_set_inactive(&bm->dma, false); in bmdma_cancel()
351 BMDMAState *bm = DO_UPCAST(BMDMAState, dma, dma); in bmdma_reset() local
354 bmdma_cancel(bm); in bmdma_reset()
355 bm->cmd = 0; in bmdma_reset()
356 bm->status = 0; in bmdma_reset()
357 bm->addr = 0; in bmdma_reset()
358 bm->cur_addr = 0; in bmdma_reset()
359 bm->cur_prd_last = 0; in bmdma_reset()
360 bm->cur_prd_addr = 0; in bmdma_reset()
361 bm->cur_prd_len = 0; in bmdma_reset()
366 BMDMAState *bm = opaque; in bmdma_irq() local
370 qemu_set_irq(bm->irq, level); in bmdma_irq()
374 bm->status |= BM_STATUS_INT; in bmdma_irq()
377 qemu_set_irq(bm->irq, level); in bmdma_irq()
380 void bmdma_cmd_writeb(BMDMAState *bm, uint32_t val) in bmdma_cmd_writeb() argument
385 if ((val & BM_CMD_START) != (bm->cmd & BM_CMD_START)) { in bmdma_cmd_writeb()
387 ide_cancel_dma_sync(ide_bus_active_if(bm->bus)); in bmdma_cmd_writeb()
388 bm->status &= ~BM_STATUS_DMAING; in bmdma_cmd_writeb()
390 bm->cur_addr = bm->addr; in bmdma_cmd_writeb()
391 if (!(bm->status & BM_STATUS_DMAING)) { in bmdma_cmd_writeb()
392 bm->status |= BM_STATUS_DMAING; in bmdma_cmd_writeb()
394 if (bm->dma_cb) in bmdma_cmd_writeb()
395 bm->dma_cb(bmdma_active_if(bm), 0); in bmdma_cmd_writeb()
400 bm->cmd = val & 0x09; in bmdma_cmd_writeb()
403 void bmdma_status_writeb(BMDMAState *bm, uint32_t val) in bmdma_status_writeb() argument
405 bm->status = (val & 0x60) | (bm->status & BM_STATUS_DMAING) in bmdma_status_writeb()
406 | (bm->status & ~val & (BM_STATUS_ERROR | BM_STATUS_INT)); in bmdma_status_writeb()
412 BMDMAState *bm = opaque; in bmdma_addr_read() local
416 data = (bm->addr >> (addr * 8)) & mask; in bmdma_addr_read()
424 BMDMAState *bm = opaque; in bmdma_addr_write() local
429 bm->addr &= ~(mask << shift); in bmdma_addr_write()
430 bm->addr |= ((data & mask) << shift) & ~3; in bmdma_addr_write()
441 BMDMAState *bm = opaque; in ide_bmdma_current_needed() local
443 return (bm->cur_prd_len != 0); in ide_bmdma_current_needed()
448 BMDMAState *bm = opaque; in ide_bmdma_status_needed() local
455 return ((bm->status & abused_bits) != 0); in ide_bmdma_status_needed()
460 BMDMAState *bm = opaque; in ide_bmdma_pre_save() local
463 if (!(bm->status & BM_STATUS_DMAING) && bm->dma_cb) { in ide_bmdma_pre_save()
464 bm->bus->error_status = in ide_bmdma_pre_save()
465 ide_dma_cmd_to_retry(bmdma_active_if(bm)->dma_cmd); in ide_bmdma_pre_save()
467 bm->migration_retry_unit = bm->bus->retry_unit; in ide_bmdma_pre_save()
468 bm->migration_retry_sector_num = bm->bus->retry_sector_num; in ide_bmdma_pre_save()
469 bm->migration_retry_nsector = bm->bus->retry_nsector; in ide_bmdma_pre_save()
470 bm->migration_compat_status = in ide_bmdma_pre_save()
471 (bm->status & ~abused_bits) | (bm->bus->error_status & abused_bits); in ide_bmdma_pre_save()
476 /* This function accesses bm->bus->error_status which is loaded only after
481 BMDMAState *bm = opaque; in ide_bmdma_post_load() local
484 if (bm->status == 0) { in ide_bmdma_post_load()
485 bm->status = bm->migration_compat_status & ~abused_bits; in ide_bmdma_post_load()
486 bm->bus->error_status |= bm->migration_compat_status & abused_bits; in ide_bmdma_post_load()
488 if (bm->bus->error_status) { in ide_bmdma_post_load()
489 bm->bus->retry_sector_num = bm->migration_retry_sector_num; in ide_bmdma_post_load()
490 bm->bus->retry_nsector = bm->migration_retry_nsector; in ide_bmdma_post_load()
491 bm->bus->retry_unit = bm->migration_retry_unit; in ide_bmdma_post_load()
600 void bmdma_init(IDEBus *bus, BMDMAState *bm, PCIIDEState *d) in bmdma_init() argument
602 if (bus->dma == &bm->dma) { in bmdma_init()
606 bm->dma.ops = &bmdma_ops; in bmdma_init()
607 bus->dma = &bm->dma; in bmdma_init()
608 bm->irq = bus->irq; in bmdma_init()
609 bus->irq = qemu_allocate_irq(bmdma_irq, bm, 0); in bmdma_init()
610 bm->bus = bus; in bmdma_init()
611 bm->pci_dev = d; in bmdma_init()