1 /*
2  * Copyright (C) 2015 Red Hat, Inc.
3  * All Rights Reserved.
4  *
5  * Permission is hereby granted, free of charge, to any person obtaining
6  * a copy of this software and associated documentation files (the
7  * "Software"), to deal in the Software without restriction, including
8  * without limitation the rights to use, copy, modify, merge, publish,
9  * distribute, sublicense, and/or sell copies of the Software, and to
10  * permit persons to whom the Software is furnished to do so, subject to
11  * the following conditions:
12  *
13  * The above copyright notice and this permission notice (including the
14  * next paragraph) shall be included in all copies or substantial
15  * portions of the Software.
16  *
17  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
18  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
19  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
20  * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
21  * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
22  * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
23  * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
24  */
25 
26 #include "virtgpu_drv.h"
27 #include <drm/drm_plane_helper.h>
28 #include <drm/drm_atomic_helper.h>
29 
30 static const uint32_t virtio_gpu_formats[] = {
31 	DRM_FORMAT_XRGB8888,
32 	DRM_FORMAT_ARGB8888,
33 	DRM_FORMAT_BGRX8888,
34 	DRM_FORMAT_BGRA8888,
35 	DRM_FORMAT_RGBX8888,
36 	DRM_FORMAT_RGBA8888,
37 	DRM_FORMAT_XBGR8888,
38 	DRM_FORMAT_ABGR8888,
39 };
40 
41 static const uint32_t virtio_gpu_cursor_formats[] = {
42 #ifdef __BIG_ENDIAN
43 	DRM_FORMAT_BGRA8888,
44 #else
45 	DRM_FORMAT_ARGB8888,
46 #endif
47 };
48 
49 uint32_t virtio_gpu_translate_format(uint32_t drm_fourcc)
50 {
51 	uint32_t format;
52 
53 	switch (drm_fourcc) {
54 #ifdef __BIG_ENDIAN
55 	case DRM_FORMAT_XRGB8888:
56 		format = VIRTIO_GPU_FORMAT_X8R8G8B8_UNORM;
57 		break;
58 	case DRM_FORMAT_ARGB8888:
59 		format = VIRTIO_GPU_FORMAT_A8R8G8B8_UNORM;
60 		break;
61 	case DRM_FORMAT_BGRX8888:
62 		format = VIRTIO_GPU_FORMAT_B8G8R8X8_UNORM;
63 		break;
64 	case DRM_FORMAT_BGRA8888:
65 		format = VIRTIO_GPU_FORMAT_B8G8R8A8_UNORM;
66 		break;
67 	case DRM_FORMAT_RGBX8888:
68 		format = VIRTIO_GPU_FORMAT_R8G8B8X8_UNORM;
69 		break;
70 	case DRM_FORMAT_RGBA8888:
71 		format = VIRTIO_GPU_FORMAT_R8G8B8A8_UNORM;
72 		break;
73 	case DRM_FORMAT_XBGR8888:
74 		format = VIRTIO_GPU_FORMAT_X8B8G8R8_UNORM;
75 		break;
76 	case DRM_FORMAT_ABGR8888:
77 		format = VIRTIO_GPU_FORMAT_A8B8G8R8_UNORM;
78 		break;
79 #else
80 	case DRM_FORMAT_XRGB8888:
81 		format = VIRTIO_GPU_FORMAT_B8G8R8X8_UNORM;
82 		break;
83 	case DRM_FORMAT_ARGB8888:
84 		format = VIRTIO_GPU_FORMAT_B8G8R8A8_UNORM;
85 		break;
86 	case DRM_FORMAT_BGRX8888:
87 		format = VIRTIO_GPU_FORMAT_X8R8G8B8_UNORM;
88 		break;
89 	case DRM_FORMAT_BGRA8888:
90 		format = VIRTIO_GPU_FORMAT_A8R8G8B8_UNORM;
91 		break;
92 	case DRM_FORMAT_RGBX8888:
93 		format = VIRTIO_GPU_FORMAT_X8B8G8R8_UNORM;
94 		break;
95 	case DRM_FORMAT_RGBA8888:
96 		format = VIRTIO_GPU_FORMAT_A8B8G8R8_UNORM;
97 		break;
98 	case DRM_FORMAT_XBGR8888:
99 		format = VIRTIO_GPU_FORMAT_R8G8B8X8_UNORM;
100 		break;
101 	case DRM_FORMAT_ABGR8888:
102 		format = VIRTIO_GPU_FORMAT_R8G8B8A8_UNORM;
103 		break;
104 #endif
105 	default:
106 		/*
107 		 * This should not happen, we handle everything listed
108 		 * in virtio_gpu_formats[].
109 		 */
110 		format = 0;
111 		break;
112 	}
113 	WARN_ON(format == 0);
114 	return format;
115 }
116 
117 static void virtio_gpu_plane_destroy(struct drm_plane *plane)
118 {
119 	drm_plane_cleanup(plane);
120 	kfree(plane);
121 }
122 
123 static const struct drm_plane_funcs virtio_gpu_plane_funcs = {
124 	.update_plane		= drm_atomic_helper_update_plane,
125 	.disable_plane		= drm_atomic_helper_disable_plane,
126 	.destroy		= virtio_gpu_plane_destroy,
127 	.reset			= drm_atomic_helper_plane_reset,
128 	.atomic_duplicate_state = drm_atomic_helper_plane_duplicate_state,
129 	.atomic_destroy_state	= drm_atomic_helper_plane_destroy_state,
130 };
131 
132 static int virtio_gpu_plane_atomic_check(struct drm_plane *plane,
133 					 struct drm_plane_state *state)
134 {
135 	return 0;
136 }
137 
138 static void virtio_gpu_primary_plane_update(struct drm_plane *plane,
139 					    struct drm_plane_state *old_state)
140 {
141 	struct drm_device *dev = plane->dev;
142 	struct virtio_gpu_device *vgdev = dev->dev_private;
143 	struct virtio_gpu_output *output = NULL;
144 	struct virtio_gpu_framebuffer *vgfb;
145 	struct virtio_gpu_object *bo;
146 	uint32_t handle;
147 
148 	if (plane->state->crtc)
149 		output = drm_crtc_to_virtio_gpu_output(plane->state->crtc);
150 	if (old_state->crtc)
151 		output = drm_crtc_to_virtio_gpu_output(old_state->crtc);
152 	if (WARN_ON(!output))
153 		return;
154 
155 	if (plane->state->fb) {
156 		vgfb = to_virtio_gpu_framebuffer(plane->state->fb);
157 		bo = gem_to_virtio_gpu_obj(vgfb->obj);
158 		handle = bo->hw_res_handle;
159 		if (bo->dumb) {
160 			virtio_gpu_cmd_transfer_to_host_2d
161 				(vgdev, handle, 0,
162 				 cpu_to_le32(plane->state->src_w >> 16),
163 				 cpu_to_le32(plane->state->src_h >> 16),
164 				 cpu_to_le32(plane->state->src_x >> 16),
165 				 cpu_to_le32(plane->state->src_y >> 16), NULL);
166 		}
167 	} else {
168 		handle = 0;
169 	}
170 
171 	DRM_DEBUG("handle 0x%x, crtc %dx%d+%d+%d, src %dx%d+%d+%d\n", handle,
172 		  plane->state->crtc_w, plane->state->crtc_h,
173 		  plane->state->crtc_x, plane->state->crtc_y,
174 		  plane->state->src_w >> 16,
175 		  plane->state->src_h >> 16,
176 		  plane->state->src_x >> 16,
177 		  plane->state->src_y >> 16);
178 	virtio_gpu_cmd_set_scanout(vgdev, output->index, handle,
179 				   plane->state->src_w >> 16,
180 				   plane->state->src_h >> 16,
181 				   plane->state->src_x >> 16,
182 				   plane->state->src_y >> 16);
183 	virtio_gpu_cmd_resource_flush(vgdev, handle,
184 				      plane->state->src_x >> 16,
185 				      plane->state->src_y >> 16,
186 				      plane->state->src_w >> 16,
187 				      plane->state->src_h >> 16);
188 }
189 
190 static void virtio_gpu_cursor_plane_update(struct drm_plane *plane,
191 					   struct drm_plane_state *old_state)
192 {
193 	struct drm_device *dev = plane->dev;
194 	struct virtio_gpu_device *vgdev = dev->dev_private;
195 	struct virtio_gpu_output *output = NULL;
196 	struct virtio_gpu_framebuffer *vgfb;
197 	struct virtio_gpu_fence *fence = NULL;
198 	struct virtio_gpu_object *bo = NULL;
199 	uint32_t handle;
200 	int ret = 0;
201 
202 	if (plane->state->crtc)
203 		output = drm_crtc_to_virtio_gpu_output(plane->state->crtc);
204 	if (old_state->crtc)
205 		output = drm_crtc_to_virtio_gpu_output(old_state->crtc);
206 	if (WARN_ON(!output))
207 		return;
208 
209 	if (plane->state->fb) {
210 		vgfb = to_virtio_gpu_framebuffer(plane->state->fb);
211 		bo = gem_to_virtio_gpu_obj(vgfb->obj);
212 		handle = bo->hw_res_handle;
213 	} else {
214 		handle = 0;
215 	}
216 
217 	if (bo && bo->dumb && (plane->state->fb != old_state->fb)) {
218 		/* new cursor -- update & wait */
219 		virtio_gpu_cmd_transfer_to_host_2d
220 			(vgdev, handle, 0,
221 			 cpu_to_le32(plane->state->crtc_w),
222 			 cpu_to_le32(plane->state->crtc_h),
223 			 0, 0, &fence);
224 		ret = virtio_gpu_object_reserve(bo, false);
225 		if (!ret) {
226 			reservation_object_add_excl_fence(bo->tbo.resv,
227 							  &fence->f);
228 			dma_fence_put(&fence->f);
229 			fence = NULL;
230 			virtio_gpu_object_unreserve(bo);
231 			virtio_gpu_object_wait(bo, false);
232 		}
233 	}
234 
235 	if (plane->state->fb != old_state->fb) {
236 		DRM_DEBUG("update, handle %d, pos +%d+%d, hot %d,%d\n", handle,
237 			  plane->state->crtc_x,
238 			  plane->state->crtc_y,
239 			  plane->state->fb ? plane->state->fb->hot_x : 0,
240 			  plane->state->fb ? plane->state->fb->hot_y : 0);
241 		output->cursor.hdr.type =
242 			cpu_to_le32(VIRTIO_GPU_CMD_UPDATE_CURSOR);
243 		output->cursor.resource_id = cpu_to_le32(handle);
244 		if (plane->state->fb) {
245 			output->cursor.hot_x =
246 				cpu_to_le32(plane->state->fb->hot_x);
247 			output->cursor.hot_y =
248 				cpu_to_le32(plane->state->fb->hot_y);
249 		} else {
250 			output->cursor.hot_x = cpu_to_le32(0);
251 			output->cursor.hot_y = cpu_to_le32(0);
252 		}
253 	} else {
254 		DRM_DEBUG("move +%d+%d\n",
255 			  plane->state->crtc_x,
256 			  plane->state->crtc_y);
257 		output->cursor.hdr.type =
258 			cpu_to_le32(VIRTIO_GPU_CMD_MOVE_CURSOR);
259 	}
260 	output->cursor.pos.x = cpu_to_le32(plane->state->crtc_x);
261 	output->cursor.pos.y = cpu_to_le32(plane->state->crtc_y);
262 	virtio_gpu_cursor_ping(vgdev, output);
263 }
264 
265 static const struct drm_plane_helper_funcs virtio_gpu_primary_helper_funcs = {
266 	.atomic_check		= virtio_gpu_plane_atomic_check,
267 	.atomic_update		= virtio_gpu_primary_plane_update,
268 };
269 
270 static const struct drm_plane_helper_funcs virtio_gpu_cursor_helper_funcs = {
271 	.atomic_check		= virtio_gpu_plane_atomic_check,
272 	.atomic_update		= virtio_gpu_cursor_plane_update,
273 };
274 
275 struct drm_plane *virtio_gpu_plane_init(struct virtio_gpu_device *vgdev,
276 					enum drm_plane_type type,
277 					int index)
278 {
279 	struct drm_device *dev = vgdev->ddev;
280 	const struct drm_plane_helper_funcs *funcs;
281 	struct drm_plane *plane;
282 	const uint32_t *formats;
283 	int ret, nformats;
284 
285 	plane = kzalloc(sizeof(*plane), GFP_KERNEL);
286 	if (!plane)
287 		return ERR_PTR(-ENOMEM);
288 
289 	if (type == DRM_PLANE_TYPE_CURSOR) {
290 		formats = virtio_gpu_cursor_formats;
291 		nformats = ARRAY_SIZE(virtio_gpu_cursor_formats);
292 		funcs = &virtio_gpu_cursor_helper_funcs;
293 	} else {
294 		formats = virtio_gpu_formats;
295 		nformats = ARRAY_SIZE(virtio_gpu_formats);
296 		funcs = &virtio_gpu_primary_helper_funcs;
297 	}
298 	ret = drm_universal_plane_init(dev, plane, 1 << index,
299 				       &virtio_gpu_plane_funcs,
300 				       formats, nformats,
301 				       NULL, type, NULL);
302 	if (ret)
303 		goto err_plane_init;
304 
305 	drm_plane_helper_add(plane, funcs);
306 	return plane;
307 
308 err_plane_init:
309 	kfree(plane);
310 	return ERR_PTR(ret);
311 }
312