Lines Matching full:g
52 static void vg_cleanup_mapping(VuGpu *g,
111 vg_sock_fd_close(VuGpu *g) in vg_sock_fd_close() argument
113 if (g->sock_fd >= 0) { in vg_sock_fd_close()
114 close(g->sock_fd); in vg_sock_fd_close()
115 g->sock_fd = -1; in vg_sock_fd_close()
122 VuGpu *g = user_data; in source_wait_cb() local
124 if (!vg_recv_msg(g, VHOST_USER_GPU_DMABUF_UPDATE, 0, NULL)) { in source_wait_cb()
129 g->wait_in = 0; in source_wait_cb()
130 vg_handle_ctrl(&g->dev.parent, 0); in source_wait_cb()
136 vg_wait_ok(VuGpu *g) in vg_wait_ok() argument
138 assert(g->wait_in == 0); in vg_wait_ok()
139 g->wait_in = g_unix_fd_add(g->sock_fd, G_IO_IN | G_IO_HUP, in vg_wait_ok()
140 source_wait_cb, g); in vg_wait_ok()
191 vg_recv_msg(VuGpu *g, uint32_t expect_req, uint32_t expect_size, in vg_recv_msg() argument
196 if (vg_sock_fd_read(g->sock_fd, &req, sizeof(req)) < 0 || in vg_recv_msg()
197 vg_sock_fd_read(g->sock_fd, &flags, sizeof(flags)) < 0 || in vg_recv_msg()
198 vg_sock_fd_read(g->sock_fd, &size, sizeof(size)) < 0) { in vg_recv_msg()
206 if (size && vg_sock_fd_read(g->sock_fd, payload, size) != size) { in vg_recv_msg()
213 vg_sock_fd_close(g); in vg_recv_msg()
218 virtio_gpu_find_resource(VuGpu *g, uint32_t resource_id) in virtio_gpu_find_resource() argument
222 QTAILQ_FOREACH(res, &g->reslist, next) { in virtio_gpu_find_resource()
231 vg_ctrl_response(VuGpu *g, in vg_ctrl_response() argument
249 vu_queue_push(&g->dev.parent, cmd->vq, &cmd->elem, s); in vg_ctrl_response()
250 vu_queue_notify(&g->dev.parent, cmd->vq); in vg_ctrl_response()
255 vg_ctrl_response_nodata(VuGpu *g, in vg_ctrl_response_nodata() argument
263 vg_ctrl_response(g, cmd, &resp, sizeof(resp)); in vg_ctrl_response_nodata()
354 vg_resource_create_2d(VuGpu *g, in vg_resource_create_2d() argument
370 res = virtio_gpu_find_resource(g, c2d.resource_id); in vg_resource_create_2d()
391 vugbm_buffer_create(&res->buffer, &g->gdev, c2d.width, c2d.height); in vg_resource_create_2d()
406 QTAILQ_INSERT_HEAD(&g->reslist, res, next); in vg_resource_create_2d()
410 vg_disable_scanout(VuGpu *g, int scanout_id) in vg_disable_scanout() argument
412 struct virtio_gpu_scanout *scanout = &g->scanout[scanout_id]; in vg_disable_scanout()
419 res = virtio_gpu_find_resource(g, scanout->resource_id); in vg_disable_scanout()
427 if (g->sock_fd >= 0) { in vg_disable_scanout()
433 vg_send_msg(g, &msg, -1); in vg_disable_scanout()
438 vg_resource_destroy(VuGpu *g, in vg_resource_destroy() argument
446 vg_disable_scanout(g, i); in vg_resource_destroy()
452 vg_cleanup_mapping(g, res); in vg_resource_destroy()
454 QTAILQ_REMOVE(&g->reslist, res, next); in vg_resource_destroy()
459 vg_resource_unref(VuGpu *g, in vg_resource_unref() argument
468 res = virtio_gpu_find_resource(g, unref.resource_id); in vg_resource_unref()
475 vg_resource_destroy(g, res); in vg_resource_unref()
479 vg_create_mapping_iov(VuGpu *g, in vg_create_mapping_iov() argument
509 (*iov)[i].iov_base = vu_gpa_to_va(&g->dev.parent, &len, ents[i].addr); in vg_create_mapping_iov()
524 vg_resource_attach_backing(VuGpu *g, in vg_resource_attach_backing() argument
534 res = virtio_gpu_find_resource(g, ab.resource_id); in vg_resource_attach_backing()
547 ret = vg_create_mapping_iov(g, &ab, cmd, &res->iov); in vg_resource_attach_backing()
557 void vg_cleanup_mapping_iov(VuGpu *g, in vg_cleanup_mapping_iov() argument
564 vg_cleanup_mapping(VuGpu *g, in vg_cleanup_mapping() argument
567 vg_cleanup_mapping_iov(g, res->iov, res->iov_cnt); in vg_cleanup_mapping()
573 vg_resource_detach_backing(VuGpu *g, in vg_resource_detach_backing() argument
582 res = virtio_gpu_find_resource(g, detach.resource_id); in vg_resource_detach_backing()
590 vg_cleanup_mapping(g, res); in vg_resource_detach_backing()
594 vg_transfer_to_host_2d(VuGpu *g, in vg_transfer_to_host_2d() argument
607 res = virtio_gpu_find_resource(g, t2d.resource_id); in vg_transfer_to_host_2d()
653 vg_set_scanout(VuGpu *g, in vg_set_scanout() argument
672 vg_disable_scanout(g, ss.scanout_id); in vg_set_scanout()
677 res = virtio_gpu_find_resource(g, ss.resource_id); in vg_set_scanout()
699 scanout = &g->scanout[ss.scanout_id]; in vg_set_scanout()
701 ores = virtio_gpu_find_resource(g, scanout->resource_id); in vg_set_scanout()
733 vg_send_msg(g, &msg, fd); in vg_set_scanout()
746 vg_send_msg(g, &msg, -1); in vg_set_scanout()
751 vg_resource_flush(VuGpu *g, in vg_resource_flush() argument
762 res = virtio_gpu_find_resource(g, rf.resource_id); in vg_resource_flush()
794 scanout = &g->scanout[i]; in vg_resource_flush()
818 vg_send_msg(g, &vmsg, -1); in vg_resource_flush()
819 vg_wait_ok(g); in vg_resource_flush()
850 vg_send_msg(g, msg, -1); in vg_resource_flush()
949 update_cursor_data_simple(VuGpu *g, uint32_t resource_id, gpointer data) in update_cursor_data_simple() argument
953 res = virtio_gpu_find_resource(g, resource_id); in update_cursor_data_simple()
964 vg_process_cursor_cmd(VuGpu *g, struct virtio_gpu_update_cursor *cursor) in vg_process_cursor_cmd() argument
979 vg_send_msg(g, &msg, -1); in vg_process_cursor_cmd()
997 if (g->virgl) { in vg_process_cursor_cmd()
998 vg_virgl_update_cursor_data(g, cursor->resource_id, in vg_process_cursor_cmd()
1001 update_cursor_data_simple(g, cursor->resource_id, in vg_process_cursor_cmd()
1004 vg_send_msg(g, &msg, -1); in vg_process_cursor_cmd()
1016 VuGpu *g = container_of(dev, VuGpu, dev.parent); in vg_handle_cursor() local
1036 vg_process_cursor_cmd(g, &cursor); in vg_handle_cursor()
1075 VuGpu *g = user_data; in protocol_features_cb() local
1081 if (!vg_recv_msg(g, msg.request, in protocol_features_cb()
1093 vg_send_msg(g, &msg, -1); in protocol_features_cb()
1095 g->wait_in = 0; in protocol_features_cb()
1096 vg_handle_ctrl(&g->dev.parent, 0); in protocol_features_cb()
1098 if (g->edid_inited && !(protocol_features & protocol_edid)) { in protocol_features_cb()
1104 g->use_modifiers = !!(protocol_features & protocol_dmabuf2); in protocol_features_cb()
1110 set_gpu_protocol_features(VuGpu *g) in set_gpu_protocol_features() argument
1116 vg_send_msg(g, &msg, -1); in set_gpu_protocol_features()
1117 assert(g->wait_in == 0); in set_gpu_protocol_features()
1118 g->wait_in = g_unix_fd_add(g->sock_fd, G_IO_IN | G_IO_HUP, in set_gpu_protocol_features()
1119 protocol_features_cb, g); in set_gpu_protocol_features()
1125 VuGpu *g = container_of(dev, VuGpu, dev.parent); in vg_process_msg() local
1130 g_return_val_if_fail(g->sock_fd == -1, 1); in vg_process_msg()
1131 g->sock_fd = msg->fds[0]; in vg_process_msg()
1132 set_gpu_protocol_features(g); in vg_process_msg()
1158 VuGpu *g = container_of(dev, VuGpu, dev.parent); in vg_set_features() local
1161 if (virgl && !g->virgl_inited) { in vg_set_features()
1162 if (!vg_virgl_init(g)) { in vg_set_features()
1165 g->virgl_inited = true; in vg_set_features()
1168 g->edid_inited = !!(features & (1 << VIRTIO_GPU_F_EDID)); in vg_set_features()
1170 g->virgl = virgl; in vg_set_features()
1176 VuGpu *g = container_of(dev, VuGpu, dev.parent); in vg_get_config() local
1183 g->virtio_config.num_capsets = vg_virgl_get_num_capsets(); in vg_get_config()
1186 memcpy(config, &g->virtio_config, len); in vg_get_config()
1196 VuGpu *g = container_of(dev, VuGpu, dev.parent); in vg_set_config() local
1200 g->virtio_config.events_read &= ~config->events_clear; in vg_set_config()
1216 vg_destroy(VuGpu *g) in vg_destroy() argument
1220 vug_deinit(&g->dev); in vg_destroy()
1222 vg_sock_fd_close(g); in vg_destroy()
1224 QTAILQ_FOREACH_SAFE(res, &g->reslist, next, tmp) { in vg_destroy()
1225 vg_resource_destroy(g, res); in vg_destroy()
1228 vugbm_device_destroy(&g->gdev); in vg_destroy()
1252 VuGpu g = { .sock_fd = -1, .drm_rnode_fd = -1 }; in main() local
1254 QTAILQ_INIT(&g.reslist); in main()
1255 QTAILQ_INIT(&g.fenceq); in main()
1276 g.drm_rnode_fd = qemu_drm_rendernode_open(opt_render_node); in main()
1277 if (opt_render_node && g.drm_rnode_fd == -1) { in main()
1282 vugbm_device_init(&g.gdev, g.drm_rnode_fd); in main()
1305 if (!vug_init(&g.dev, VHOST_USER_GPU_MAX_QUEUES, fd, vg_panic, &vuiface)) { in main()
1314 vg_destroy(&g); in main()
1315 if (g.drm_rnode_fd >= 0) { in main()
1316 close(g.drm_rnode_fd); in main()