1 /*
2  * Copyright (C) 2015 Red Hat, Inc.
3  * All Rights Reserved.
4  *
5  * Permission is hereby granted, free of charge, to any person obtaining
6  * a copy of this software and associated documentation files (the
7  * "Software"), to deal in the Software without restriction, including
8  * without limitation the rights to use, copy, modify, merge, publish,
9  * distribute, sublicense, and/or sell copies of the Software, and to
10  * permit persons to whom the Software is furnished to do so, subject to
11  * the following conditions:
12  *
13  * The above copyright notice and this permission notice (including the
14  * next paragraph) shall be included in all copies or substantial
15  * portions of the Software.
16  *
17  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
18  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
19  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
20  * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
21  * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
22  * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
23  * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
24  */
25 
26 #include "virtgpu_drv.h"
27 #include <drm/drm_plane_helper.h>
28 #include <drm/drm_atomic_helper.h>
29 
30 static const uint32_t virtio_gpu_formats[] = {
31 	DRM_FORMAT_XRGB8888,
32 	DRM_FORMAT_ARGB8888,
33 	DRM_FORMAT_BGRX8888,
34 	DRM_FORMAT_BGRA8888,
35 	DRM_FORMAT_RGBX8888,
36 	DRM_FORMAT_RGBA8888,
37 	DRM_FORMAT_XBGR8888,
38 	DRM_FORMAT_ABGR8888,
39 };
40 
41 static const uint32_t virtio_gpu_cursor_formats[] = {
42 	DRM_FORMAT_ARGB8888,
43 };
44 
45 static void virtio_gpu_plane_destroy(struct drm_plane *plane)
46 {
47 	drm_plane_cleanup(plane);
48 	kfree(plane);
49 }
50 
51 static const struct drm_plane_funcs virtio_gpu_plane_funcs = {
52 	.update_plane		= drm_atomic_helper_update_plane,
53 	.disable_plane		= drm_atomic_helper_disable_plane,
54 	.destroy		= virtio_gpu_plane_destroy,
55 	.reset			= drm_atomic_helper_plane_reset,
56 	.atomic_duplicate_state = drm_atomic_helper_plane_duplicate_state,
57 	.atomic_destroy_state	= drm_atomic_helper_plane_destroy_state,
58 };
59 
60 static int virtio_gpu_plane_atomic_check(struct drm_plane *plane,
61 					 struct drm_plane_state *state)
62 {
63 	return 0;
64 }
65 
66 static void virtio_gpu_primary_plane_update(struct drm_plane *plane,
67 					    struct drm_plane_state *old_state)
68 {
69 	struct drm_device *dev = plane->dev;
70 	struct virtio_gpu_device *vgdev = dev->dev_private;
71 	struct virtio_gpu_output *output = NULL;
72 	struct virtio_gpu_framebuffer *vgfb;
73 	struct virtio_gpu_object *bo;
74 	uint32_t handle;
75 
76 	if (plane->state->crtc)
77 		output = drm_crtc_to_virtio_gpu_output(plane->state->crtc);
78 	if (old_state->crtc)
79 		output = drm_crtc_to_virtio_gpu_output(old_state->crtc);
80 	if (WARN_ON(!output))
81 		return;
82 
83 	if (plane->state->fb) {
84 		vgfb = to_virtio_gpu_framebuffer(plane->state->fb);
85 		bo = gem_to_virtio_gpu_obj(vgfb->obj);
86 		handle = bo->hw_res_handle;
87 		if (bo->dumb) {
88 			virtio_gpu_cmd_transfer_to_host_2d
89 				(vgdev, handle, 0,
90 				 cpu_to_le32(plane->state->src_w >> 16),
91 				 cpu_to_le32(plane->state->src_h >> 16),
92 				 cpu_to_le32(plane->state->src_x >> 16),
93 				 cpu_to_le32(plane->state->src_y >> 16), NULL);
94 		}
95 	} else {
96 		handle = 0;
97 	}
98 
99 	DRM_DEBUG("handle 0x%x, crtc %dx%d+%d+%d, src %dx%d+%d+%d\n", handle,
100 		  plane->state->crtc_w, plane->state->crtc_h,
101 		  plane->state->crtc_x, plane->state->crtc_y,
102 		  plane->state->src_w >> 16,
103 		  plane->state->src_h >> 16,
104 		  plane->state->src_x >> 16,
105 		  plane->state->src_y >> 16);
106 	virtio_gpu_cmd_set_scanout(vgdev, output->index, handle,
107 				   plane->state->src_w >> 16,
108 				   plane->state->src_h >> 16,
109 				   plane->state->src_x >> 16,
110 				   plane->state->src_y >> 16);
111 	virtio_gpu_cmd_resource_flush(vgdev, handle,
112 				      plane->state->src_x >> 16,
113 				      plane->state->src_y >> 16,
114 				      plane->state->src_w >> 16,
115 				      plane->state->src_h >> 16);
116 }
117 
118 static void virtio_gpu_cursor_plane_update(struct drm_plane *plane,
119 					   struct drm_plane_state *old_state)
120 {
121 	struct drm_device *dev = plane->dev;
122 	struct virtio_gpu_device *vgdev = dev->dev_private;
123 	struct virtio_gpu_output *output = NULL;
124 	struct virtio_gpu_framebuffer *vgfb;
125 	struct virtio_gpu_fence *fence = NULL;
126 	struct virtio_gpu_object *bo = NULL;
127 	uint32_t handle;
128 	int ret = 0;
129 
130 	if (plane->state->crtc)
131 		output = drm_crtc_to_virtio_gpu_output(plane->state->crtc);
132 	if (old_state->crtc)
133 		output = drm_crtc_to_virtio_gpu_output(old_state->crtc);
134 	if (WARN_ON(!output))
135 		return;
136 
137 	if (plane->state->fb) {
138 		vgfb = to_virtio_gpu_framebuffer(plane->state->fb);
139 		bo = gem_to_virtio_gpu_obj(vgfb->obj);
140 		handle = bo->hw_res_handle;
141 	} else {
142 		handle = 0;
143 	}
144 
145 	if (bo && bo->dumb && (plane->state->fb != old_state->fb)) {
146 		/* new cursor -- update & wait */
147 		virtio_gpu_cmd_transfer_to_host_2d
148 			(vgdev, handle, 0,
149 			 cpu_to_le32(plane->state->crtc_w),
150 			 cpu_to_le32(plane->state->crtc_h),
151 			 0, 0, &fence);
152 		ret = virtio_gpu_object_reserve(bo, false);
153 		if (!ret) {
154 			reservation_object_add_excl_fence(bo->tbo.resv,
155 							  &fence->f);
156 			dma_fence_put(&fence->f);
157 			fence = NULL;
158 			virtio_gpu_object_unreserve(bo);
159 			virtio_gpu_object_wait(bo, false);
160 		}
161 	}
162 
163 	if (plane->state->fb != old_state->fb) {
164 		DRM_DEBUG("update, handle %d, pos +%d+%d, hot %d,%d\n", handle,
165 			  plane->state->crtc_x,
166 			  plane->state->crtc_y,
167 			  plane->state->fb ? plane->state->fb->hot_x : 0,
168 			  plane->state->fb ? plane->state->fb->hot_y : 0);
169 		output->cursor.hdr.type =
170 			cpu_to_le32(VIRTIO_GPU_CMD_UPDATE_CURSOR);
171 		output->cursor.resource_id = cpu_to_le32(handle);
172 		if (plane->state->fb) {
173 			output->cursor.hot_x =
174 				cpu_to_le32(plane->state->fb->hot_x);
175 			output->cursor.hot_y =
176 				cpu_to_le32(plane->state->fb->hot_y);
177 		} else {
178 			output->cursor.hot_x = cpu_to_le32(0);
179 			output->cursor.hot_y = cpu_to_le32(0);
180 		}
181 	} else {
182 		DRM_DEBUG("move +%d+%d\n",
183 			  plane->state->crtc_x,
184 			  plane->state->crtc_y);
185 		output->cursor.hdr.type =
186 			cpu_to_le32(VIRTIO_GPU_CMD_MOVE_CURSOR);
187 	}
188 	output->cursor.pos.x = cpu_to_le32(plane->state->crtc_x);
189 	output->cursor.pos.y = cpu_to_le32(plane->state->crtc_y);
190 	virtio_gpu_cursor_ping(vgdev, output);
191 }
192 
193 static const struct drm_plane_helper_funcs virtio_gpu_primary_helper_funcs = {
194 	.atomic_check		= virtio_gpu_plane_atomic_check,
195 	.atomic_update		= virtio_gpu_primary_plane_update,
196 };
197 
198 static const struct drm_plane_helper_funcs virtio_gpu_cursor_helper_funcs = {
199 	.atomic_check		= virtio_gpu_plane_atomic_check,
200 	.atomic_update		= virtio_gpu_cursor_plane_update,
201 };
202 
203 struct drm_plane *virtio_gpu_plane_init(struct virtio_gpu_device *vgdev,
204 					enum drm_plane_type type,
205 					int index)
206 {
207 	struct drm_device *dev = vgdev->ddev;
208 	const struct drm_plane_helper_funcs *funcs;
209 	struct drm_plane *plane;
210 	const uint32_t *formats;
211 	int ret, nformats;
212 
213 	plane = kzalloc(sizeof(*plane), GFP_KERNEL);
214 	if (!plane)
215 		return ERR_PTR(-ENOMEM);
216 
217 	if (type == DRM_PLANE_TYPE_CURSOR) {
218 		formats = virtio_gpu_cursor_formats;
219 		nformats = ARRAY_SIZE(virtio_gpu_cursor_formats);
220 		funcs = &virtio_gpu_cursor_helper_funcs;
221 	} else {
222 		formats = virtio_gpu_formats;
223 		nformats = ARRAY_SIZE(virtio_gpu_formats);
224 		funcs = &virtio_gpu_primary_helper_funcs;
225 	}
226 	ret = drm_universal_plane_init(dev, plane, 1 << index,
227 				       &virtio_gpu_plane_funcs,
228 				       formats, nformats,
229 				       type, NULL);
230 	if (ret)
231 		goto err_plane_init;
232 
233 	drm_plane_helper_add(plane, funcs);
234 	return plane;
235 
236 err_plane_init:
237 	kfree(plane);
238 	return ERR_PTR(ret);
239 }
240