Lines Matching refs:vq

287 map_ring(VuDev *dev, VuVirtq *vq)  in map_ring()  argument
289 vq->vring.desc = qva_to_va(dev, vq->vra.desc_user_addr); in map_ring()
290 vq->vring.used = qva_to_va(dev, vq->vra.used_user_addr); in map_ring()
291 vq->vring.avail = qva_to_va(dev, vq->vra.avail_user_addr); in map_ring()
294 DPRINT(" vring_desc at %p\n", vq->vring.desc); in map_ring()
295 DPRINT(" vring_used at %p\n", vq->vring.used); in map_ring()
296 DPRINT(" vring_avail at %p\n", vq->vring.avail); in map_ring()
298 return !(vq->vring.desc && vq->vring.used && vq->vring.avail); in map_ring()
302 vu_is_vq_usable(VuDev *dev, VuVirtq *vq) in vu_is_vq_usable() argument
308 if (likely(vq->vring.avail)) { in vu_is_vq_usable()
318 if (!vq->vra.desc_user_addr || !vq->vra.used_user_addr || in vu_is_vq_usable()
319 !vq->vra.avail_user_addr) { in vu_is_vq_usable()
322 if (map_ring(dev, vq)) { in vu_is_vq_usable()
335 VuVirtq *vq = &dev->vq[i]; in unmap_rings() local
336 const uintptr_t desc = (uintptr_t)vq->vring.desc; in unmap_rings()
337 const uintptr_t used = (uintptr_t)vq->vring.used; in unmap_rings()
338 const uintptr_t avail = (uintptr_t)vq->vring.avail; in unmap_rings()
351 vq->vring.desc = NULL; in unmap_rings()
352 vq->vring.used = NULL; in unmap_rings()
353 vq->vring.avail = NULL; in unmap_rings()
753 VuVirtq *vq = &dev->vq[index]; in vu_kick_cb() local
754 int sock = vq->kick_fd; in vu_kick_cb()
761 dev->remove_watch(dev, dev->vq[index].kick_fd); in vu_kick_cb()
764 kick_data, vq->handler, index); in vu_kick_cb()
765 if (vq->handler) { in vu_kick_cb()
766 vq->handler(dev, index); in vu_kick_cb()
806 dev->vq[i].enable = enabled; in vu_set_enable_all_rings()
1130 if (dev->vq[i].vring.desc) { in vu_set_mem_table_exec()
1131 if (map_ring(dev, &dev->vq[i])) { in vu_set_mem_table_exec()
1203 dev->vq[index].vring.num = num; in vu_set_vring_num_exec()
1213 VuVirtq *vq = &dev->vq[index]; in vu_set_vring_addr_exec() local
1223 vq->vra = *vra; in vu_set_vring_addr_exec()
1224 vq->vring.flags = vra->flags; in vu_set_vring_addr_exec()
1225 vq->vring.log_guest_addr = vra->log_guest_addr; in vu_set_vring_addr_exec()
1228 if (map_ring(dev, vq)) { in vu_set_vring_addr_exec()
1233 vq->used_idx = le16toh(vq->vring.used->idx); in vu_set_vring_addr_exec()
1235 if (vq->last_avail_idx != vq->used_idx) { in vu_set_vring_addr_exec()
1240 vq->last_avail_idx, vq->used_idx, in vu_set_vring_addr_exec()
1244 vq->shadow_avail_idx = vq->last_avail_idx = vq->used_idx; in vu_set_vring_addr_exec()
1259 dev->vq[index].shadow_avail_idx = dev->vq[index].last_avail_idx = num; in vu_set_vring_base_exec()
1270 vmsg->payload.state.num = dev->vq[index].last_avail_idx; in vu_get_vring_base_exec()
1273 dev->vq[index].started = false; in vu_get_vring_base_exec()
1278 if (dev->vq[index].call_fd != -1) { in vu_get_vring_base_exec()
1279 close(dev->vq[index].call_fd); in vu_get_vring_base_exec()
1280 dev->vq[index].call_fd = -1; in vu_get_vring_base_exec()
1282 if (dev->vq[index].kick_fd != -1) { in vu_get_vring_base_exec()
1283 dev->remove_watch(dev, dev->vq[index].kick_fd); in vu_get_vring_base_exec()
1284 close(dev->vq[index].kick_fd); in vu_get_vring_base_exec()
1285 dev->vq[index].kick_fd = -1; in vu_get_vring_base_exec()
1332 vu_check_queue_inflights(VuDev *dev, VuVirtq *vq) in vu_check_queue_inflights() argument
1340 if (unlikely(!vq->inflight)) { in vu_check_queue_inflights()
1344 if (unlikely(!vq->inflight->version)) { in vu_check_queue_inflights()
1346 vq->inflight->version = INFLIGHT_VERSION; in vu_check_queue_inflights()
1350 vq->used_idx = le16toh(vq->vring.used->idx); in vu_check_queue_inflights()
1351 vq->resubmit_num = 0; in vu_check_queue_inflights()
1352 vq->resubmit_list = NULL; in vu_check_queue_inflights()
1353 vq->counter = 0; in vu_check_queue_inflights()
1355 if (unlikely(vq->inflight->used_idx != vq->used_idx)) { in vu_check_queue_inflights()
1356 vq->inflight->desc[vq->inflight->last_batch_head].inflight = 0; in vu_check_queue_inflights()
1360 vq->inflight->used_idx = vq->used_idx; in vu_check_queue_inflights()
1363 for (i = 0; i < vq->inflight->desc_num; i++) { in vu_check_queue_inflights()
1364 if (vq->inflight->desc[i].inflight == 1) { in vu_check_queue_inflights()
1365 vq->inuse++; in vu_check_queue_inflights()
1369 vq->shadow_avail_idx = vq->last_avail_idx = vq->inuse + vq->used_idx; in vu_check_queue_inflights()
1371 if (vq->inuse) { in vu_check_queue_inflights()
1372 vq->resubmit_list = calloc(vq->inuse, sizeof(VuVirtqInflightDesc)); in vu_check_queue_inflights()
1373 if (!vq->resubmit_list) { in vu_check_queue_inflights()
1377 for (i = 0; i < vq->inflight->desc_num; i++) { in vu_check_queue_inflights()
1378 if (vq->inflight->desc[i].inflight) { in vu_check_queue_inflights()
1379 vq->resubmit_list[vq->resubmit_num].index = i; in vu_check_queue_inflights()
1380 vq->resubmit_list[vq->resubmit_num].counter = in vu_check_queue_inflights()
1381 vq->inflight->desc[i].counter; in vu_check_queue_inflights()
1382 vq->resubmit_num++; in vu_check_queue_inflights()
1386 if (vq->resubmit_num > 1) { in vu_check_queue_inflights()
1387 qsort(vq->resubmit_list, vq->resubmit_num, in vu_check_queue_inflights()
1390 vq->counter = vq->resubmit_list[0].counter + 1; in vu_check_queue_inflights()
1394 if (eventfd_write(vq->kick_fd, 1)) { in vu_check_queue_inflights()
1413 if (dev->vq[index].kick_fd != -1) { in vu_set_vring_kick_exec()
1414 dev->remove_watch(dev, dev->vq[index].kick_fd); in vu_set_vring_kick_exec()
1415 close(dev->vq[index].kick_fd); in vu_set_vring_kick_exec()
1416 dev->vq[index].kick_fd = -1; in vu_set_vring_kick_exec()
1419 dev->vq[index].kick_fd = nofd ? -1 : vmsg->fds[0]; in vu_set_vring_kick_exec()
1420 DPRINT("Got kick_fd: %d for vq: %d\n", dev->vq[index].kick_fd, index); in vu_set_vring_kick_exec()
1422 dev->vq[index].started = true; in vu_set_vring_kick_exec()
1427 if (dev->vq[index].kick_fd != -1 && dev->vq[index].handler) { in vu_set_vring_kick_exec()
1428 dev->set_watch(dev, dev->vq[index].kick_fd, VU_WATCH_IN, in vu_set_vring_kick_exec()
1432 dev->vq[index].kick_fd, index); in vu_set_vring_kick_exec()
1435 if (vu_check_queue_inflights(dev, &dev->vq[index])) { in vu_set_vring_kick_exec()
1442 void vu_set_queue_handler(VuDev *dev, VuVirtq *vq, in vu_set_queue_handler() argument
1445 int qidx = vq - dev->vq; in vu_set_queue_handler()
1447 vq->handler = handler; in vu_set_queue_handler()
1448 if (vq->kick_fd >= 0) { in vu_set_queue_handler()
1450 dev->set_watch(dev, vq->kick_fd, VU_WATCH_IN, in vu_set_queue_handler()
1453 dev->remove_watch(dev, vq->kick_fd); in vu_set_queue_handler()
1458 bool vu_set_queue_host_notifier(VuDev *dev, VuVirtq *vq, int fd, in vu_set_queue_host_notifier() argument
1461 int qidx = vq - dev->vq; in vu_set_queue_host_notifier()
1607 if (dev->vq[index].call_fd != -1) { in vu_set_vring_call_exec()
1608 close(dev->vq[index].call_fd); in vu_set_vring_call_exec()
1609 dev->vq[index].call_fd = -1; in vu_set_vring_call_exec()
1612 dev->vq[index].call_fd = nofd ? -1 : vmsg->fds[0]; in vu_set_vring_call_exec()
1615 if (dev->vq[index].call_fd != -1 && eventfd_write(vmsg->fds[0], 1)) { in vu_set_vring_call_exec()
1619 DPRINT("Got call_fd: %d for vq: %d\n", dev->vq[index].call_fd, index); in vu_set_vring_call_exec()
1636 if (dev->vq[index].err_fd != -1) { in vu_set_vring_err_exec()
1637 close(dev->vq[index].err_fd); in vu_set_vring_err_exec()
1638 dev->vq[index].err_fd = -1; in vu_set_vring_err_exec()
1641 dev->vq[index].err_fd = nofd ? -1 : vmsg->fds[0]; in vu_set_vring_err_exec()
1748 dev->vq[index].enable = enable; in vu_set_vring_enable_exec()
2011 dev->vq[i].inflight = (VuVirtqInflight *)rc; in vu_set_inflight_fd()
2012 dev->vq[i].inflight->desc_num = queue_size; in vu_set_inflight_fd()
2030 dev->vq[index].handler, index); in vu_handle_vring_kick()
2032 if (!dev->vq[index].started) { in vu_handle_vring_kick()
2033 dev->vq[index].started = true; in vu_handle_vring_kick()
2040 if (dev->vq[index].handler) { in vu_handle_vring_kick()
2041 dev->vq[index].handler(dev, index); in vu_handle_vring_kick()
2199 VuVirtq *vq = &dev->vq[i]; in vu_deinit() local
2201 if (vq->call_fd != -1) { in vu_deinit()
2202 close(vq->call_fd); in vu_deinit()
2203 vq->call_fd = -1; in vu_deinit()
2206 if (vq->kick_fd != -1) { in vu_deinit()
2207 dev->remove_watch(dev, vq->kick_fd); in vu_deinit()
2208 close(vq->kick_fd); in vu_deinit()
2209 vq->kick_fd = -1; in vu_deinit()
2212 if (vq->err_fd != -1) { in vu_deinit()
2213 close(vq->err_fd); in vu_deinit()
2214 vq->err_fd = -1; in vu_deinit()
2217 if (vq->resubmit_list) { in vu_deinit()
2218 free(vq->resubmit_list); in vu_deinit()
2219 vq->resubmit_list = NULL; in vu_deinit()
2222 vq->inflight = NULL; in vu_deinit()
2246 free(dev->vq); in vu_deinit()
2247 dev->vq = NULL; in vu_deinit()
2290 dev->vq = malloc(max_queues * sizeof(dev->vq[0])); in vu_init()
2291 if (!dev->vq) { in vu_init()
2299 dev->vq[i] = (VuVirtq) { in vu_init()
2312 return &dev->vq[qidx]; in vu_get_queue()
2316 vu_queue_enabled(VuDev *dev, VuVirtq *vq) in vu_queue_enabled() argument
2318 return vq->enable; in vu_queue_enabled()
2322 vu_queue_started(const VuDev *dev, const VuVirtq *vq) in vu_queue_started() argument
2324 return vq->started; in vu_queue_started()
2328 vring_avail_flags(VuVirtq *vq) in vring_avail_flags() argument
2330 return le16toh(vq->vring.avail->flags); in vring_avail_flags()
2334 vring_avail_idx(VuVirtq *vq) in vring_avail_idx() argument
2336 vq->shadow_avail_idx = le16toh(vq->vring.avail->idx); in vring_avail_idx()
2338 return vq->shadow_avail_idx; in vring_avail_idx()
2342 vring_avail_ring(VuVirtq *vq, int i) in vring_avail_ring() argument
2344 return le16toh(vq->vring.avail->ring[i]); in vring_avail_ring()
2348 vring_get_used_event(VuVirtq *vq) in vring_get_used_event() argument
2350 return vring_avail_ring(vq, vq->vring.num); in vring_get_used_event()
2354 virtqueue_num_heads(VuDev *dev, VuVirtq *vq, unsigned int idx) in virtqueue_num_heads() argument
2356 uint16_t num_heads = vring_avail_idx(vq) - idx; in virtqueue_num_heads()
2359 if (num_heads > vq->vring.num) { in virtqueue_num_heads()
2361 idx, vq->shadow_avail_idx); in virtqueue_num_heads()
2374 virtqueue_get_head(VuDev *dev, VuVirtq *vq, in virtqueue_get_head() argument
2379 *head = vring_avail_ring(vq, idx % vq->vring.num); in virtqueue_get_head()
2382 if (*head >= vq->vring.num) { in virtqueue_get_head()
2450 vu_queue_get_avail_bytes(VuDev *dev, VuVirtq *vq, unsigned int *in_bytes, in vu_queue_get_avail_bytes() argument
2458 idx = vq->last_avail_idx; in vu_queue_get_avail_bytes()
2461 if (!vu_is_vq_usable(dev, vq)) { in vu_queue_get_avail_bytes()
2465 while ((rc = virtqueue_num_heads(dev, vq, idx)) > 0) { in vu_queue_get_avail_bytes()
2472 max = vq->vring.num; in vu_queue_get_avail_bytes()
2474 if (!virtqueue_get_head(dev, vq, idx++, &i)) { in vu_queue_get_avail_bytes()
2477 desc = vq->vring.desc; in vu_queue_get_avail_bytes()
2560 vu_queue_avail_bytes(VuDev *dev, VuVirtq *vq, unsigned int in_bytes, in vu_queue_avail_bytes() argument
2565 vu_queue_get_avail_bytes(dev, vq, &in_total, &out_total, in vu_queue_avail_bytes()
2574 vu_queue_empty(VuDev *dev, VuVirtq *vq) in vu_queue_empty() argument
2576 if (!vu_is_vq_usable(dev, vq)) { in vu_queue_empty()
2580 if (vq->shadow_avail_idx != vq->last_avail_idx) { in vu_queue_empty()
2584 return vring_avail_idx(vq) == vq->last_avail_idx; in vu_queue_empty()
2588 vring_notify(VuDev *dev, VuVirtq *vq) in vring_notify() argument
2598 !vq->inuse && vu_queue_empty(dev, vq)) { in vring_notify()
2603 return !(vring_avail_flags(vq) & VRING_AVAIL_F_NO_INTERRUPT); in vring_notify()
2606 v = vq->signalled_used_valid; in vring_notify()
2607 vq->signalled_used_valid = true; in vring_notify()
2608 old = vq->signalled_used; in vring_notify()
2609 new = vq->signalled_used = vq->used_idx; in vring_notify()
2610 return !v || vring_need_event(vring_get_used_event(vq), new, old); in vring_notify()
2613 static void _vu_queue_notify(VuDev *dev, VuVirtq *vq, bool sync) in _vu_queue_notify() argument
2615 if (!vu_is_vq_usable(dev, vq)) { in _vu_queue_notify()
2619 if (!vring_notify(dev, vq)) { in _vu_queue_notify()
2624 if (vq->call_fd < 0 && in _vu_queue_notify()
2633 .index = vq - dev->vq, in _vu_queue_notify()
2651 if (eventfd_write(vq->call_fd, 1) < 0) { in _vu_queue_notify()
2656 void vu_queue_notify(VuDev *dev, VuVirtq *vq) in vu_queue_notify() argument
2658 _vu_queue_notify(dev, vq, false); in vu_queue_notify()
2661 void vu_queue_notify_sync(VuDev *dev, VuVirtq *vq) in vu_queue_notify_sync() argument
2663 _vu_queue_notify(dev, vq, true); in vu_queue_notify_sync()
2677 vring_used_flags_set_bit(VuVirtq *vq, int mask) in vring_used_flags_set_bit() argument
2681 flags = (uint16_t *)((char*)vq->vring.used + in vring_used_flags_set_bit()
2687 vring_used_flags_unset_bit(VuVirtq *vq, int mask) in vring_used_flags_unset_bit() argument
2691 flags = (uint16_t *)((char*)vq->vring.used + in vring_used_flags_unset_bit()
2697 vring_set_avail_event(VuVirtq *vq, uint16_t val) in vring_set_avail_event() argument
2701 if (!vq->notification) { in vring_set_avail_event()
2705 memcpy(&vq->vring.used->ring[vq->vring.num], &val_le, sizeof(uint16_t)); in vring_set_avail_event()
2709 vu_queue_set_notification(VuDev *dev, VuVirtq *vq, int enable) in vu_queue_set_notification() argument
2711 vq->notification = enable; in vu_queue_set_notification()
2713 vring_set_avail_event(vq, vring_avail_idx(vq)); in vu_queue_set_notification()
2715 vring_used_flags_unset_bit(vq, VRING_USED_F_NO_NOTIFY); in vu_queue_set_notification()
2717 vring_used_flags_set_bit(vq, VRING_USED_F_NO_NOTIFY); in vu_queue_set_notification()
2786 vu_queue_map_desc(VuDev *dev, VuVirtq *vq, unsigned int idx, size_t sz) in vu_queue_map_desc() argument
2788 struct vring_desc *desc = vq->vring.desc; in vu_queue_map_desc()
2791 unsigned int max = vq->vring.num; in vu_queue_map_desc()
2879 vu_queue_inflight_get(VuDev *dev, VuVirtq *vq, int desc_idx) in vu_queue_inflight_get() argument
2885 if (unlikely(!vq->inflight)) { in vu_queue_inflight_get()
2889 vq->inflight->desc[desc_idx].counter = vq->counter++; in vu_queue_inflight_get()
2890 vq->inflight->desc[desc_idx].inflight = 1; in vu_queue_inflight_get()
2896 vu_queue_inflight_pre_put(VuDev *dev, VuVirtq *vq, int desc_idx) in vu_queue_inflight_pre_put() argument
2902 if (unlikely(!vq->inflight)) { in vu_queue_inflight_pre_put()
2906 vq->inflight->last_batch_head = desc_idx; in vu_queue_inflight_pre_put()
2912 vu_queue_inflight_post_put(VuDev *dev, VuVirtq *vq, int desc_idx) in vu_queue_inflight_post_put() argument
2918 if (unlikely(!vq->inflight)) { in vu_queue_inflight_post_put()
2924 vq->inflight->desc[desc_idx].inflight = 0; in vu_queue_inflight_post_put()
2928 vq->inflight->used_idx = vq->used_idx; in vu_queue_inflight_post_put()
2934 vu_queue_pop(VuDev *dev, VuVirtq *vq, size_t sz) in vu_queue_pop() argument
2940 if (!vu_is_vq_usable(dev, vq)) { in vu_queue_pop()
2944 if (unlikely(vq->resubmit_list && vq->resubmit_num > 0)) { in vu_queue_pop()
2945 i = (--vq->resubmit_num); in vu_queue_pop()
2946 elem = vu_queue_map_desc(dev, vq, vq->resubmit_list[i].index, sz); in vu_queue_pop()
2948 if (!vq->resubmit_num) { in vu_queue_pop()
2949 free(vq->resubmit_list); in vu_queue_pop()
2950 vq->resubmit_list = NULL; in vu_queue_pop()
2956 if (vu_queue_empty(dev, vq)) { in vu_queue_pop()
2965 if (vq->inuse >= vq->vring.num) { in vu_queue_pop()
2970 if (!virtqueue_get_head(dev, vq, vq->last_avail_idx++, &head)) { in vu_queue_pop()
2975 vring_set_avail_event(vq, vq->last_avail_idx); in vu_queue_pop()
2978 elem = vu_queue_map_desc(dev, vq, head, sz); in vu_queue_pop()
2984 vq->inuse++; in vu_queue_pop()
2986 vu_queue_inflight_get(dev, vq, head); in vu_queue_pop()
2992 vu_queue_detach_element(VuDev *dev, VuVirtq *vq, VuVirtqElement *elem, in vu_queue_detach_element() argument
2995 vq->inuse--; in vu_queue_detach_element()
3000 vu_queue_unpop(VuDev *dev, VuVirtq *vq, VuVirtqElement *elem, in vu_queue_unpop() argument
3003 vq->last_avail_idx--; in vu_queue_unpop()
3004 vu_queue_detach_element(dev, vq, elem, len); in vu_queue_unpop()
3008 vu_queue_rewind(VuDev *dev, VuVirtq *vq, unsigned int num) in vu_queue_rewind() argument
3010 if (num > vq->inuse) { in vu_queue_rewind()
3013 vq->last_avail_idx -= num; in vu_queue_rewind()
3014 vq->inuse -= num; in vu_queue_rewind()
3019 void vring_used_write(VuDev *dev, VuVirtq *vq, in vring_used_write() argument
3022 struct vring_used *used = vq->vring.used; in vring_used_write()
3025 vu_log_write(dev, vq->vring.log_guest_addr + in vring_used_write()
3032 vu_log_queue_fill(VuDev *dev, VuVirtq *vq, in vu_log_queue_fill() argument
3036 struct vring_desc *desc = vq->vring.desc; in vu_log_queue_fill()
3042 max = vq->vring.num; in vu_log_queue_fill()
3091 vu_queue_fill(VuDev *dev, VuVirtq *vq, in vu_queue_fill() argument
3097 if (!vu_is_vq_usable(dev, vq)) { in vu_queue_fill()
3101 vu_log_queue_fill(dev, vq, elem, len); in vu_queue_fill()
3103 idx = (idx + vq->used_idx) % vq->vring.num; in vu_queue_fill()
3107 vring_used_write(dev, vq, &uelem, idx); in vu_queue_fill()
3111 void vring_used_idx_set(VuDev *dev, VuVirtq *vq, uint16_t val) in vring_used_idx_set() argument
3113 vq->vring.used->idx = htole16(val); in vring_used_idx_set()
3115 vq->vring.log_guest_addr + offsetof(struct vring_used, idx), in vring_used_idx_set()
3116 sizeof(vq->vring.used->idx)); in vring_used_idx_set()
3118 vq->used_idx = val; in vring_used_idx_set()
3122 vu_queue_flush(VuDev *dev, VuVirtq *vq, unsigned int count) in vu_queue_flush() argument
3126 if (!vu_is_vq_usable(dev, vq)) { in vu_queue_flush()
3133 old = vq->used_idx; in vu_queue_flush()
3135 vring_used_idx_set(dev, vq, new); in vu_queue_flush()
3136 vq->inuse -= count; in vu_queue_flush()
3137 if (unlikely((int16_t)(new - vq->signalled_used) < (uint16_t)(new - old))) { in vu_queue_flush()
3138 vq->signalled_used_valid = false; in vu_queue_flush()
3143 vu_queue_push(VuDev *dev, VuVirtq *vq, in vu_queue_push() argument
3146 vu_queue_fill(dev, vq, elem, len, 0); in vu_queue_push()
3147 vu_queue_inflight_pre_put(dev, vq, elem->index); in vu_queue_push()
3148 vu_queue_flush(dev, vq, 1); in vu_queue_push()
3149 vu_queue_inflight_post_put(dev, vq, elem->index); in vu_queue_push()