1 /*
2  * Copyright (C) 2015 Red Hat, Inc.
3  * All Rights Reserved.
4  *
5  * Permission is hereby granted, free of charge, to any person obtaining
6  * a copy of this software and associated documentation files (the
7  * "Software"), to deal in the Software without restriction, including
8  * without limitation the rights to use, copy, modify, merge, publish,
9  * distribute, sublicense, and/or sell copies of the Software, and to
10  * permit persons to whom the Software is furnished to do so, subject to
11  * the following conditions:
12  *
13  * The above copyright notice and this permission notice (including the
14  * next paragraph) shall be included in all copies or substantial
15  * portions of the Software.
16  *
17  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
18  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
19  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
20  * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
21  * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
22  * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
23  * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
24  */
25 
26 #ifndef VIRTIO_DRV_H
27 #define VIRTIO_DRV_H
28 
29 #include <linux/virtio.h>
30 #include <linux/virtio_ids.h>
31 #include <linux/virtio_config.h>
32 #include <linux/virtio_gpu.h>
33 
34 #include <drm/drm_atomic.h>
35 #include <drm/drm_drv.h>
36 #include <drm/drm_encoder.h>
37 #include <drm/drm_fb_helper.h>
38 #include <drm/drm_fourcc.h>
39 #include <drm/drm_gem.h>
40 #include <drm/drm_gem_shmem_helper.h>
41 #include <drm/drm_ioctl.h>
42 #include <drm/drm_probe_helper.h>
43 #include <drm/virtgpu_drm.h>
44 
45 #define DRIVER_NAME "virtio_gpu"
46 #define DRIVER_DESC "virtio GPU"
47 #define DRIVER_DATE "0"
48 
49 #define DRIVER_MAJOR 0
50 #define DRIVER_MINOR 1
51 #define DRIVER_PATCHLEVEL 0
52 
53 #define STATE_INITIALIZING 0
54 #define STATE_OK 1
55 #define STATE_ERR 2
56 
57 struct virtio_gpu_object_params {
58 	unsigned long size;
59 	bool dumb;
60 	/* 3d */
61 	bool virgl;
62 	bool blob;
63 
64 	/* classic resources only */
65 	uint32_t format;
66 	uint32_t width;
67 	uint32_t height;
68 	uint32_t target;
69 	uint32_t bind;
70 	uint32_t depth;
71 	uint32_t array_size;
72 	uint32_t last_level;
73 	uint32_t nr_samples;
74 	uint32_t flags;
75 
76 	/* blob resources only */
77 	uint32_t ctx_id;
78 	uint32_t blob_mem;
79 	uint32_t blob_flags;
80 	uint64_t blob_id;
81 };
82 
83 struct virtio_gpu_object {
84 	struct drm_gem_shmem_object base;
85 	uint32_t hw_res_handle;
86 	bool dumb;
87 	bool created;
88 	bool host3d_blob, guest_blob;
89 	uint32_t blob_mem, blob_flags;
90 
91 	int uuid_state;
92 	uuid_t uuid;
93 };
94 #define gem_to_virtio_gpu_obj(gobj) \
95 	container_of((gobj), struct virtio_gpu_object, base.base)
96 
97 struct virtio_gpu_object_shmem {
98 	struct virtio_gpu_object base;
99 	struct sg_table *pages;
100 	uint32_t mapped;
101 };
102 
103 struct virtio_gpu_object_vram {
104 	struct virtio_gpu_object base;
105 	uint32_t map_state;
106 	uint32_t map_info;
107 	struct drm_mm_node vram_node;
108 };
109 
110 #define to_virtio_gpu_shmem(virtio_gpu_object) \
111 	container_of((virtio_gpu_object), struct virtio_gpu_object_shmem, base)
112 
113 #define to_virtio_gpu_vram(virtio_gpu_object) \
114 	container_of((virtio_gpu_object), struct virtio_gpu_object_vram, base)
115 
116 struct virtio_gpu_object_array {
117 	struct ww_acquire_ctx ticket;
118 	struct list_head next;
119 	u32 nents, total;
120 	struct drm_gem_object *objs[];
121 };
122 
123 struct virtio_gpu_vbuffer;
124 struct virtio_gpu_device;
125 
126 typedef void (*virtio_gpu_resp_cb)(struct virtio_gpu_device *vgdev,
127 				   struct virtio_gpu_vbuffer *vbuf);
128 
129 struct virtio_gpu_fence_driver {
130 	atomic64_t       last_fence_id;
131 	uint64_t         current_fence_id;
132 	uint64_t         context;
133 	struct list_head fences;
134 	spinlock_t       lock;
135 };
136 
137 struct virtio_gpu_fence {
138 	struct dma_fence f;
139 	uint64_t fence_id;
140 	struct virtio_gpu_fence_driver *drv;
141 	struct list_head node;
142 };
143 
144 struct virtio_gpu_vbuffer {
145 	char *buf;
146 	int size;
147 
148 	void *data_buf;
149 	uint32_t data_size;
150 
151 	char *resp_buf;
152 	int resp_size;
153 	virtio_gpu_resp_cb resp_cb;
154 	void *resp_cb_data;
155 
156 	struct virtio_gpu_object_array *objs;
157 	struct list_head list;
158 };
159 
160 struct virtio_gpu_output {
161 	int index;
162 	struct drm_crtc crtc;
163 	struct drm_connector conn;
164 	struct drm_encoder enc;
165 	struct virtio_gpu_display_one info;
166 	struct virtio_gpu_update_cursor cursor;
167 	struct edid *edid;
168 	int cur_x;
169 	int cur_y;
170 	bool needs_modeset;
171 };
172 #define drm_crtc_to_virtio_gpu_output(x) \
173 	container_of(x, struct virtio_gpu_output, crtc)
174 
175 struct virtio_gpu_framebuffer {
176 	struct drm_framebuffer base;
177 	struct virtio_gpu_fence *fence;
178 };
179 #define to_virtio_gpu_framebuffer(x) \
180 	container_of(x, struct virtio_gpu_framebuffer, base)
181 
182 struct virtio_gpu_queue {
183 	struct virtqueue *vq;
184 	spinlock_t qlock;
185 	wait_queue_head_t ack_queue;
186 	struct work_struct dequeue_work;
187 };
188 
189 struct virtio_gpu_drv_capset {
190 	uint32_t id;
191 	uint32_t max_version;
192 	uint32_t max_size;
193 };
194 
195 struct virtio_gpu_drv_cap_cache {
196 	struct list_head head;
197 	void *caps_cache;
198 	uint32_t id;
199 	uint32_t version;
200 	uint32_t size;
201 	atomic_t is_valid;
202 };
203 
204 struct virtio_gpu_device {
205 	struct device *dev;
206 	struct drm_device *ddev;
207 
208 	struct virtio_device *vdev;
209 
210 	struct virtio_gpu_output outputs[VIRTIO_GPU_MAX_SCANOUTS];
211 	uint32_t num_scanouts;
212 
213 	struct virtio_gpu_queue ctrlq;
214 	struct virtio_gpu_queue cursorq;
215 	struct kmem_cache *vbufs;
216 
217 	atomic_t pending_commands;
218 
219 	struct ida	resource_ida;
220 
221 	wait_queue_head_t resp_wq;
222 	/* current display info */
223 	spinlock_t display_info_lock;
224 	bool display_info_pending;
225 
226 	struct virtio_gpu_fence_driver fence_drv;
227 
228 	struct ida	ctx_id_ida;
229 
230 	bool has_virgl_3d;
231 	bool has_edid;
232 	bool has_indirect;
233 	bool has_resource_assign_uuid;
234 	bool has_resource_blob;
235 	bool has_host_visible;
236 	struct virtio_shm_region host_visible_region;
237 	struct drm_mm host_visible_mm;
238 
239 	struct work_struct config_changed_work;
240 
241 	struct work_struct obj_free_work;
242 	spinlock_t obj_free_lock;
243 	struct list_head obj_free_list;
244 
245 	struct virtio_gpu_drv_capset *capsets;
246 	uint32_t num_capsets;
247 	struct list_head cap_cache;
248 
249 	/* protects uuid state when exporting */
250 	spinlock_t resource_export_lock;
251 	/* protects map state and host_visible_mm */
252 	spinlock_t host_visible_lock;
253 };
254 
255 struct virtio_gpu_fpriv {
256 	uint32_t ctx_id;
257 	bool context_created;
258 	struct mutex context_lock;
259 };
260 
261 /* virtgpu_ioctl.c */
262 #define DRM_VIRTIO_NUM_IOCTLS 11
263 extern struct drm_ioctl_desc virtio_gpu_ioctls[DRM_VIRTIO_NUM_IOCTLS];
264 void virtio_gpu_create_context(struct drm_device *dev, struct drm_file *file);
265 
266 /* virtgpu_kms.c */
267 int virtio_gpu_init(struct drm_device *dev);
268 void virtio_gpu_deinit(struct drm_device *dev);
269 void virtio_gpu_release(struct drm_device *dev);
270 int virtio_gpu_driver_open(struct drm_device *dev, struct drm_file *file);
271 void virtio_gpu_driver_postclose(struct drm_device *dev, struct drm_file *file);
272 
273 /* virtgpu_gem.c */
274 int virtio_gpu_gem_object_open(struct drm_gem_object *obj,
275 			       struct drm_file *file);
276 void virtio_gpu_gem_object_close(struct drm_gem_object *obj,
277 				 struct drm_file *file);
278 int virtio_gpu_mode_dumb_create(struct drm_file *file_priv,
279 				struct drm_device *dev,
280 				struct drm_mode_create_dumb *args);
281 int virtio_gpu_mode_dumb_mmap(struct drm_file *file_priv,
282 			      struct drm_device *dev,
283 			      uint32_t handle, uint64_t *offset_p);
284 
285 struct virtio_gpu_object_array *virtio_gpu_array_alloc(u32 nents);
286 struct virtio_gpu_object_array*
287 virtio_gpu_array_from_handles(struct drm_file *drm_file, u32 *handles, u32 nents);
288 void virtio_gpu_array_add_obj(struct virtio_gpu_object_array *objs,
289 			      struct drm_gem_object *obj);
290 int virtio_gpu_array_lock_resv(struct virtio_gpu_object_array *objs);
291 void virtio_gpu_array_unlock_resv(struct virtio_gpu_object_array *objs);
292 void virtio_gpu_array_add_fence(struct virtio_gpu_object_array *objs,
293 				struct dma_fence *fence);
294 void virtio_gpu_array_put_free(struct virtio_gpu_object_array *objs);
295 void virtio_gpu_array_put_free_delayed(struct virtio_gpu_device *vgdev,
296 				       struct virtio_gpu_object_array *objs);
297 void virtio_gpu_array_put_free_work(struct work_struct *work);
298 
299 /* virtgpu_vq.c */
300 int virtio_gpu_alloc_vbufs(struct virtio_gpu_device *vgdev);
301 void virtio_gpu_free_vbufs(struct virtio_gpu_device *vgdev);
302 void virtio_gpu_cmd_create_resource(struct virtio_gpu_device *vgdev,
303 				    struct virtio_gpu_object *bo,
304 				    struct virtio_gpu_object_params *params,
305 				    struct virtio_gpu_object_array *objs,
306 				    struct virtio_gpu_fence *fence);
307 void virtio_gpu_cmd_unref_resource(struct virtio_gpu_device *vgdev,
308 				   struct virtio_gpu_object *bo);
309 void virtio_gpu_cmd_transfer_to_host_2d(struct virtio_gpu_device *vgdev,
310 					uint64_t offset,
311 					uint32_t width, uint32_t height,
312 					uint32_t x, uint32_t y,
313 					struct virtio_gpu_object_array *objs,
314 					struct virtio_gpu_fence *fence);
315 void virtio_gpu_cmd_resource_flush(struct virtio_gpu_device *vgdev,
316 				   uint32_t resource_id,
317 				   uint32_t x, uint32_t y,
318 				   uint32_t width, uint32_t height);
319 void virtio_gpu_cmd_set_scanout(struct virtio_gpu_device *vgdev,
320 				uint32_t scanout_id, uint32_t resource_id,
321 				uint32_t width, uint32_t height,
322 				uint32_t x, uint32_t y);
323 void virtio_gpu_object_attach(struct virtio_gpu_device *vgdev,
324 			      struct virtio_gpu_object *obj,
325 			      struct virtio_gpu_mem_entry *ents,
326 			      unsigned int nents);
327 int virtio_gpu_attach_status_page(struct virtio_gpu_device *vgdev);
328 int virtio_gpu_detach_status_page(struct virtio_gpu_device *vgdev);
329 void virtio_gpu_cursor_ping(struct virtio_gpu_device *vgdev,
330 			    struct virtio_gpu_output *output);
331 int virtio_gpu_cmd_get_display_info(struct virtio_gpu_device *vgdev);
332 int virtio_gpu_cmd_get_capset_info(struct virtio_gpu_device *vgdev, int idx);
333 int virtio_gpu_cmd_get_capset(struct virtio_gpu_device *vgdev,
334 			      int idx, int version,
335 			      struct virtio_gpu_drv_cap_cache **cache_p);
336 int virtio_gpu_cmd_get_edids(struct virtio_gpu_device *vgdev);
337 void virtio_gpu_cmd_context_create(struct virtio_gpu_device *vgdev, uint32_t id,
338 				   uint32_t nlen, const char *name);
339 void virtio_gpu_cmd_context_destroy(struct virtio_gpu_device *vgdev,
340 				    uint32_t id);
341 void virtio_gpu_cmd_context_attach_resource(struct virtio_gpu_device *vgdev,
342 					    uint32_t ctx_id,
343 					    struct virtio_gpu_object_array *objs);
344 void virtio_gpu_cmd_context_detach_resource(struct virtio_gpu_device *vgdev,
345 					    uint32_t ctx_id,
346 					    struct virtio_gpu_object_array *objs);
347 void virtio_gpu_cmd_submit(struct virtio_gpu_device *vgdev,
348 			   void *data, uint32_t data_size,
349 			   uint32_t ctx_id,
350 			   struct virtio_gpu_object_array *objs,
351 			   struct virtio_gpu_fence *fence);
352 void virtio_gpu_cmd_transfer_from_host_3d(struct virtio_gpu_device *vgdev,
353 					  uint32_t ctx_id,
354 					  uint64_t offset, uint32_t level,
355 					  uint32_t stride,
356 					  uint32_t layer_stride,
357 					  struct drm_virtgpu_3d_box *box,
358 					  struct virtio_gpu_object_array *objs,
359 					  struct virtio_gpu_fence *fence);
360 void virtio_gpu_cmd_transfer_to_host_3d(struct virtio_gpu_device *vgdev,
361 					uint32_t ctx_id,
362 					uint64_t offset, uint32_t level,
363 					uint32_t stride,
364 					uint32_t layer_stride,
365 					struct drm_virtgpu_3d_box *box,
366 					struct virtio_gpu_object_array *objs,
367 					struct virtio_gpu_fence *fence);
368 void
369 virtio_gpu_cmd_resource_create_3d(struct virtio_gpu_device *vgdev,
370 				  struct virtio_gpu_object *bo,
371 				  struct virtio_gpu_object_params *params,
372 				  struct virtio_gpu_object_array *objs,
373 				  struct virtio_gpu_fence *fence);
374 void virtio_gpu_ctrl_ack(struct virtqueue *vq);
375 void virtio_gpu_cursor_ack(struct virtqueue *vq);
376 void virtio_gpu_fence_ack(struct virtqueue *vq);
377 void virtio_gpu_dequeue_ctrl_func(struct work_struct *work);
378 void virtio_gpu_dequeue_cursor_func(struct work_struct *work);
379 void virtio_gpu_dequeue_fence_func(struct work_struct *work);
380 
381 void virtio_gpu_notify(struct virtio_gpu_device *vgdev);
382 
383 int
384 virtio_gpu_cmd_resource_assign_uuid(struct virtio_gpu_device *vgdev,
385 				    struct virtio_gpu_object_array *objs);
386 
387 int virtio_gpu_cmd_map(struct virtio_gpu_device *vgdev,
388 		       struct virtio_gpu_object_array *objs, uint64_t offset);
389 
390 void virtio_gpu_cmd_unmap(struct virtio_gpu_device *vgdev,
391 			  struct virtio_gpu_object *bo);
392 
393 void
394 virtio_gpu_cmd_resource_create_blob(struct virtio_gpu_device *vgdev,
395 				    struct virtio_gpu_object *bo,
396 				    struct virtio_gpu_object_params *params,
397 				    struct virtio_gpu_mem_entry *ents,
398 				    uint32_t nents);
399 void
400 virtio_gpu_cmd_set_scanout_blob(struct virtio_gpu_device *vgdev,
401 				uint32_t scanout_id,
402 				struct virtio_gpu_object *bo,
403 				struct drm_framebuffer *fb,
404 				uint32_t width, uint32_t height,
405 				uint32_t x, uint32_t y);
406 
407 /* virtgpu_display.c */
408 int virtio_gpu_modeset_init(struct virtio_gpu_device *vgdev);
409 void virtio_gpu_modeset_fini(struct virtio_gpu_device *vgdev);
410 
411 /* virtgpu_plane.c */
412 uint32_t virtio_gpu_translate_format(uint32_t drm_fourcc);
413 struct drm_plane *virtio_gpu_plane_init(struct virtio_gpu_device *vgdev,
414 					enum drm_plane_type type,
415 					int index);
416 
417 /* virtgpu_fence.c */
418 struct virtio_gpu_fence *virtio_gpu_fence_alloc(
419 	struct virtio_gpu_device *vgdev);
420 void virtio_gpu_fence_emit(struct virtio_gpu_device *vgdev,
421 			  struct virtio_gpu_ctrl_hdr *cmd_hdr,
422 			  struct virtio_gpu_fence *fence);
423 void virtio_gpu_fence_event_process(struct virtio_gpu_device *vdev,
424 				    u64 fence_id);
425 
426 /* virtgpu_object.c */
427 void virtio_gpu_cleanup_object(struct virtio_gpu_object *bo);
428 struct drm_gem_object *virtio_gpu_create_object(struct drm_device *dev,
429 						size_t size);
430 int virtio_gpu_object_create(struct virtio_gpu_device *vgdev,
431 			     struct virtio_gpu_object_params *params,
432 			     struct virtio_gpu_object **bo_ptr,
433 			     struct virtio_gpu_fence *fence);
434 
435 bool virtio_gpu_is_shmem(struct virtio_gpu_object *bo);
436 
437 int virtio_gpu_resource_id_get(struct virtio_gpu_device *vgdev,
438 			       uint32_t *resid);
439 /* virtgpu_prime.c */
440 int virtio_gpu_resource_assign_uuid(struct virtio_gpu_device *vgdev,
441 				    struct virtio_gpu_object *bo);
442 struct dma_buf *virtgpu_gem_prime_export(struct drm_gem_object *obj,
443 					 int flags);
444 struct drm_gem_object *virtgpu_gem_prime_import(struct drm_device *dev,
445 						struct dma_buf *buf);
446 int virtgpu_gem_prime_get_uuid(struct drm_gem_object *obj,
447 			       uuid_t *uuid);
448 struct drm_gem_object *virtgpu_gem_prime_import_sg_table(
449 	struct drm_device *dev, struct dma_buf_attachment *attach,
450 	struct sg_table *sgt);
451 
452 /* virtgpu_debugfs.c */
453 void virtio_gpu_debugfs_init(struct drm_minor *minor);
454 
455 /* virtgpu_vram.c */
456 bool virtio_gpu_is_vram(struct virtio_gpu_object *bo);
457 int virtio_gpu_vram_create(struct virtio_gpu_device *vgdev,
458 			   struct virtio_gpu_object_params *params,
459 			   struct virtio_gpu_object **bo_ptr);
460 #endif
461