Lines Matching refs:qiov

286 void qemu_iovec_init(QEMUIOVector *qiov, int alloc_hint)  in qemu_iovec_init()  argument
288 qiov->iov = g_new(struct iovec, alloc_hint); in qemu_iovec_init()
289 qiov->niov = 0; in qemu_iovec_init()
290 qiov->nalloc = alloc_hint; in qemu_iovec_init()
291 qiov->size = 0; in qemu_iovec_init()
294 void qemu_iovec_init_external(QEMUIOVector *qiov, struct iovec *iov, int niov) in qemu_iovec_init_external() argument
298 qiov->iov = iov; in qemu_iovec_init_external()
299 qiov->niov = niov; in qemu_iovec_init_external()
300 qiov->nalloc = -1; in qemu_iovec_init_external()
301 qiov->size = 0; in qemu_iovec_init_external()
303 qiov->size += iov[i].iov_len; in qemu_iovec_init_external()
306 void qemu_iovec_add(QEMUIOVector *qiov, void *base, size_t len) in qemu_iovec_add() argument
308 assert(qiov->nalloc != -1); in qemu_iovec_add()
310 if (qiov->niov == qiov->nalloc) { in qemu_iovec_add()
311 qiov->nalloc = 2 * qiov->nalloc + 1; in qemu_iovec_add()
312 qiov->iov = g_renew(struct iovec, qiov->iov, qiov->nalloc); in qemu_iovec_add()
314 qiov->iov[qiov->niov].iov_base = base; in qemu_iovec_add()
315 qiov->iov[qiov->niov].iov_len = len; in qemu_iovec_add()
316 qiov->size += len; in qemu_iovec_add()
317 ++qiov->niov; in qemu_iovec_add()
398 struct iovec *qemu_iovec_slice(QEMUIOVector *qiov, in qemu_iovec_slice() argument
404 assert(offset + len <= qiov->size); in qemu_iovec_slice()
406 iov = iov_skip_offset(qiov->iov, offset, head); in qemu_iovec_slice()
420 int qemu_iovec_subvec_niov(QEMUIOVector *qiov, size_t offset, size_t len) in qemu_iovec_subvec_niov() argument
425 qemu_iovec_slice(qiov, offset, len, &head, &tail, &niov); in qemu_iovec_subvec_niov()
433 bool qemu_iovec_is_zero(QEMUIOVector *qiov, size_t offset, size_t bytes) in qemu_iovec_is_zero() argument
438 assert(offset + bytes <= qiov->size); in qemu_iovec_is_zero()
440 iov = iov_skip_offset(qiov->iov, offset, &current_offset); in qemu_iovec_is_zero()
458 void qemu_iovec_init_slice(QEMUIOVector *qiov, QEMUIOVector *source, in qemu_iovec_init_slice() argument
471 qemu_iovec_init_buf(qiov, slice_iov[0].iov_base + slice_head, len); in qemu_iovec_init_slice()
473 qemu_iovec_init(qiov, slice_niov); in qemu_iovec_init_slice()
474 qemu_iovec_concat_iov(qiov, slice_iov, slice_niov, slice_head, len); in qemu_iovec_init_slice()
478 void qemu_iovec_destroy(QEMUIOVector *qiov) in qemu_iovec_destroy() argument
480 if (qiov->nalloc != -1) { in qemu_iovec_destroy()
481 g_free(qiov->iov); in qemu_iovec_destroy()
484 memset(qiov, 0, sizeof(*qiov)); in qemu_iovec_destroy()
487 void qemu_iovec_reset(QEMUIOVector *qiov) in qemu_iovec_reset() argument
489 assert(qiov->nalloc != -1); in qemu_iovec_reset()
491 qiov->niov = 0; in qemu_iovec_reset()
492 qiov->size = 0; in qemu_iovec_reset()
495 size_t qemu_iovec_to_buf(QEMUIOVector *qiov, size_t offset, in qemu_iovec_to_buf() argument
498 return iov_to_buf(qiov->iov, qiov->niov, offset, buf, bytes); in qemu_iovec_to_buf()
501 size_t qemu_iovec_from_buf(QEMUIOVector *qiov, size_t offset, in qemu_iovec_from_buf() argument
504 return iov_from_buf(qiov->iov, qiov->niov, offset, buf, bytes); in qemu_iovec_from_buf()
507 size_t qemu_iovec_memset(QEMUIOVector *qiov, size_t offset, in qemu_iovec_memset() argument
510 return iov_memset(qiov->iov, qiov->niov, offset, fillc, bytes); in qemu_iovec_memset()
712 void qemu_iovec_discard_back(QEMUIOVector *qiov, size_t bytes) in qemu_iovec_discard_back() argument
715 unsigned int niov = qiov->niov; in qemu_iovec_discard_back()
717 assert(qiov->size >= bytes); in qemu_iovec_discard_back()
718 total = iov_discard_back(qiov->iov, &niov, bytes); in qemu_iovec_discard_back()
721 qiov->niov = niov; in qemu_iovec_discard_back()
722 qiov->size -= bytes; in qemu_iovec_discard_back()