Lines Matching refs:iov
25 size_t iov_from_buf_full(const struct iovec *iov, unsigned int iov_cnt, in iov_from_buf_full() argument
31 if (offset < iov[i].iov_len) { in iov_from_buf_full()
32 size_t len = MIN(iov[i].iov_len - offset, bytes - done); in iov_from_buf_full()
33 memcpy(iov[i].iov_base + offset, buf + done, len); in iov_from_buf_full()
37 offset -= iov[i].iov_len; in iov_from_buf_full()
44 size_t iov_to_buf_full(const struct iovec *iov, const unsigned int iov_cnt, in iov_to_buf_full() argument
50 if (offset < iov[i].iov_len) { in iov_to_buf_full()
51 size_t len = MIN(iov[i].iov_len - offset, bytes - done); in iov_to_buf_full()
52 memcpy(buf + done, iov[i].iov_base + offset, len); in iov_to_buf_full()
56 offset -= iov[i].iov_len; in iov_to_buf_full()
63 size_t iov_memset(const struct iovec *iov, const unsigned int iov_cnt, in iov_memset() argument
69 if (offset < iov[i].iov_len) { in iov_memset()
70 size_t len = MIN(iov[i].iov_len - offset, bytes - done); in iov_memset()
71 memset(iov[i].iov_base + offset, fillc, len); in iov_memset()
75 offset -= iov[i].iov_len; in iov_memset()
82 size_t iov_size(const struct iovec *iov, const unsigned int iov_cnt) in iov_size() argument
89 len += iov[i].iov_len; in iov_size()
96 do_send_recv(int sockfd, int flags, struct iovec *iov, unsigned iov_cnt, in do_send_recv() argument
103 msg.msg_iov = iov; in do_send_recv()
119 ? send(sockfd, iov[i].iov_base + off, iov[i].iov_len - off, flags) in do_send_recv()
120 : recv(sockfd, iov[i].iov_base + off, iov[i].iov_len - off, flags); in do_send_recv()
124 if (off < iov[i].iov_len) { in do_send_recv()
163 struct iovec *local_iov, *iov; in iov_send_recv_with_flags() local
172 iov = local_iov; in iov_send_recv_with_flags()
177 for (niov = 0; niov < iov_cnt && offset >= iov[niov].iov_len; ++niov) { in iov_send_recv_with_flags()
178 offset -= iov[niov].iov_len; in iov_send_recv_with_flags()
184 iov += niov; in iov_send_recv_with_flags()
190 iov[0].iov_base += offset; in iov_send_recv_with_flags()
191 iov[0].iov_len -= offset; in iov_send_recv_with_flags()
196 for (niov = 0; niov < iov_cnt && iov[niov].iov_len <= tail; ++niov) { in iov_send_recv_with_flags()
197 tail -= iov[niov].iov_len; in iov_send_recv_with_flags()
203 assert(iov[niov].iov_len > tail); in iov_send_recv_with_flags()
204 orig_len = iov[niov].iov_len; in iov_send_recv_with_flags()
205 iov[niov++].iov_len = tail; in iov_send_recv_with_flags()
206 ret = do_send_recv(sockfd, sockflags, iov, niov, do_send); in iov_send_recv_with_flags()
208 iov[niov-1].iov_len = orig_len; in iov_send_recv_with_flags()
210 ret = do_send_recv(sockfd, sockflags, iov, niov, do_send); in iov_send_recv_with_flags()
213 iov[0].iov_base -= offset; in iov_send_recv_with_flags()
214 iov[0].iov_len += offset; in iov_send_recv_with_flags()
243 void iov_hexdump(const struct iovec *iov, const unsigned int iov_cnt, in iov_hexdump() argument
251 size += iov[v].iov_len; in iov_hexdump()
255 iov_to_buf(iov, iov_cnt, 0, buf, size); in iov_hexdump()
261 const struct iovec *iov, unsigned int iov_cnt, in iov_copy() argument
268 if (offset >= iov[i].iov_len) { in iov_copy()
269 offset -= iov[i].iov_len; in iov_copy()
272 len = MIN(bytes, iov[i].iov_len - offset); in iov_copy()
274 dst_iov[j].iov_base = iov[i].iov_base + offset; in iov_copy()
288 qiov->iov = g_new(struct iovec, alloc_hint); in qemu_iovec_init()
294 void qemu_iovec_init_external(QEMUIOVector *qiov, struct iovec *iov, int niov) in qemu_iovec_init_external() argument
298 qiov->iov = iov; in qemu_iovec_init_external()
303 qiov->size += iov[i].iov_len; in qemu_iovec_init_external()
312 qiov->iov = g_renew(struct iovec, qiov->iov, qiov->nalloc); in qemu_iovec_add()
314 qiov->iov[qiov->niov].iov_base = base; in qemu_iovec_add()
315 qiov->iov[qiov->niov].iov_len = len; in qemu_iovec_add()
369 qemu_iovec_concat_iov(dst, src->iov, src->niov, soffset, sbytes); in qemu_iovec_concat()
379 static struct iovec *iov_skip_offset(struct iovec *iov, size_t offset, in iov_skip_offset() argument
382 while (offset > 0 && offset >= iov->iov_len) { in iov_skip_offset()
383 offset -= iov->iov_len; in iov_skip_offset()
384 iov++; in iov_skip_offset()
388 return iov; in iov_skip_offset()
402 struct iovec *iov, *end_iov; in qemu_iovec_slice() local
406 iov = iov_skip_offset(qiov->iov, offset, head); in qemu_iovec_slice()
407 end_iov = iov_skip_offset(iov, *head + len, tail); in qemu_iovec_slice()
415 *niov = end_iov - iov; in qemu_iovec_slice()
417 return iov; in qemu_iovec_slice()
435 struct iovec *iov; in qemu_iovec_is_zero() local
440 iov = iov_skip_offset(qiov->iov, offset, ¤t_offset); in qemu_iovec_is_zero()
443 uint8_t *base = (uint8_t *)iov->iov_base + current_offset; in qemu_iovec_is_zero()
444 size_t len = MIN(iov->iov_len - current_offset, bytes); in qemu_iovec_is_zero()
452 iov++; in qemu_iovec_is_zero()
481 g_free(qiov->iov); in qemu_iovec_destroy()
498 return iov_to_buf(qiov->iov, qiov->niov, offset, buf, bytes); in qemu_iovec_to_buf()
504 return iov_from_buf(qiov->iov, qiov->niov, offset, buf, bytes); in qemu_iovec_from_buf()
510 return iov_memset(qiov->iov, qiov->niov, offset, fillc, bytes); in qemu_iovec_memset()
531 uint8_t *p = (uint8_t *)a->iov[i].iov_base; in qemu_iovec_compare()
532 uint8_t *q = (uint8_t *)b->iov[i].iov_base; in qemu_iovec_compare()
534 assert(a->iov[i].iov_len == b->iov[i].iov_len); in qemu_iovec_compare()
535 while (len < a->iov[i].iov_len && *p++ == *q++) { in qemu_iovec_compare()
541 if (len != a->iov[i].iov_len) { in qemu_iovec_compare()
592 sortelems[i].src_iov = &src->iov[i]; in qemu_iovec_clone()
615 qemu_iovec_add(dest, sortelems[i].dest_base, src->iov[i].iov_len); in qemu_iovec_clone()
627 size_t iov_discard_front_undoable(struct iovec **iov, in iov_discard_front_undoable() argument
639 for (cur = *iov; *iov_cnt > 0; cur++) { in iov_discard_front_undoable()
657 *iov = cur; in iov_discard_front_undoable()
661 size_t iov_discard_front(struct iovec **iov, unsigned int *iov_cnt, in iov_discard_front() argument
664 return iov_discard_front_undoable(iov, iov_cnt, bytes, NULL); in iov_discard_front()
667 size_t iov_discard_back_undoable(struct iovec *iov, in iov_discard_back_undoable() argument
683 cur = iov + (*iov_cnt - 1); in iov_discard_back_undoable()
706 size_t iov_discard_back(struct iovec *iov, unsigned int *iov_cnt, in iov_discard_back() argument
709 return iov_discard_back_undoable(iov, iov_cnt, bytes, NULL); in iov_discard_back()
718 total = iov_discard_back(qiov->iov, &niov, bytes); in qemu_iovec_discard_back()