xref: /openbmc/linux/drivers/gpu/drm/gud/gud_pipe.c (revision 171ec346)
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright 2020 Noralf Trønnes
4  */
5 
6 #include <linux/dma-buf.h>
7 #include <linux/lz4.h>
8 #include <linux/usb.h>
9 #include <linux/workqueue.h>
10 
11 #include <drm/drm_atomic.h>
12 #include <drm/drm_connector.h>
13 #include <drm/drm_damage_helper.h>
14 #include <drm/drm_drv.h>
15 #include <drm/drm_format_helper.h>
16 #include <drm/drm_fourcc.h>
17 #include <drm/drm_framebuffer.h>
18 #include <drm/drm_gem_shmem_helper.h>
19 #include <drm/drm_print.h>
20 #include <drm/drm_rect.h>
21 #include <drm/drm_simple_kms_helper.h>
22 #include <drm/gud.h>
23 
24 #include "gud_internal.h"
25 
26 /*
27  * FIXME: The driver is probably broken on Big Endian machines.
28  * See discussion:
29  * https://lore.kernel.org/dri-devel/CAKb7UvihLX0hgBOP3VBG7O+atwZcUVCPVuBdfmDMpg0NjXe-cQ@mail.gmail.com/
30  */
31 
32 static bool gud_is_big_endian(void)
33 {
34 #if defined(__BIG_ENDIAN)
35 	return true;
36 #else
37 	return false;
38 #endif
39 }
40 
41 static size_t gud_xrgb8888_to_r124(u8 *dst, const struct drm_format_info *format,
42 				   void *src, struct drm_framebuffer *fb,
43 				   struct drm_rect *rect)
44 {
45 	unsigned int block_width = drm_format_info_block_width(format, 0);
46 	unsigned int bits_per_pixel = 8 / block_width;
47 	unsigned int x, y, width, height;
48 	u8 pix, *pix8, *block = dst; /* Assign to silence compiler warning */
49 	size_t len;
50 	void *buf;
51 
52 	WARN_ON_ONCE(format->char_per_block[0] != 1);
53 
54 	/* Start on a byte boundary */
55 	rect->x1 = ALIGN_DOWN(rect->x1, block_width);
56 	width = drm_rect_width(rect);
57 	height = drm_rect_height(rect);
58 	len = drm_format_info_min_pitch(format, 0, width) * height;
59 
60 	buf = kmalloc(width * height, GFP_KERNEL);
61 	if (!buf)
62 		return 0;
63 
64 	drm_fb_xrgb8888_to_gray8(buf, src, fb, rect);
65 	pix8 = buf;
66 
67 	for (y = 0; y < height; y++) {
68 		for (x = 0; x < width; x++) {
69 			unsigned int pixpos = x % block_width; /* within byte from the left */
70 			unsigned int pixshift = (block_width - pixpos - 1) * bits_per_pixel;
71 
72 			if (!pixpos) {
73 				block = dst++;
74 				*block = 0;
75 			}
76 
77 			pix = (*pix8++) >> (8 - bits_per_pixel);
78 			*block |= pix << pixshift;
79 		}
80 	}
81 
82 	kfree(buf);
83 
84 	return len;
85 }
86 
87 static size_t gud_xrgb8888_to_color(u8 *dst, const struct drm_format_info *format,
88 				    void *src, struct drm_framebuffer *fb,
89 				    struct drm_rect *rect)
90 {
91 	unsigned int block_width = drm_format_info_block_width(format, 0);
92 	unsigned int bits_per_pixel = 8 / block_width;
93 	u8 r, g, b, pix, *block = dst; /* Assign to silence compiler warning */
94 	unsigned int x, y, width;
95 	u32 *pix32;
96 	size_t len;
97 
98 	/* Start on a byte boundary */
99 	rect->x1 = ALIGN_DOWN(rect->x1, block_width);
100 	width = drm_rect_width(rect);
101 	len = drm_format_info_min_pitch(format, 0, width) * drm_rect_height(rect);
102 
103 	for (y = rect->y1; y < rect->y2; y++) {
104 		pix32 = src + (y * fb->pitches[0]);
105 		pix32 += rect->x1;
106 
107 		for (x = 0; x < width; x++) {
108 			unsigned int pixpos = x % block_width; /* within byte from the left */
109 			unsigned int pixshift = (block_width - pixpos - 1) * bits_per_pixel;
110 
111 			if (!pixpos) {
112 				block = dst++;
113 				*block = 0;
114 			}
115 
116 			r = *pix32 >> 16;
117 			g = *pix32 >> 8;
118 			b = *pix32++;
119 
120 			switch (format->format) {
121 			case GUD_DRM_FORMAT_XRGB1111:
122 				pix = ((r >> 7) << 2) | ((g >> 7) << 1) | (b >> 7);
123 				break;
124 			default:
125 				WARN_ON_ONCE(1);
126 				return len;
127 			}
128 
129 			*block |= pix << pixshift;
130 		}
131 	}
132 
133 	return len;
134 }
135 
136 static int gud_prep_flush(struct gud_device *gdrm, struct drm_framebuffer *fb,
137 			  const struct drm_format_info *format, struct drm_rect *rect,
138 			  struct gud_set_buffer_req *req)
139 {
140 	struct dma_buf_attachment *import_attach = fb->obj[0]->import_attach;
141 	u8 compression = gdrm->compression;
142 	struct dma_buf_map map;
143 	void *vaddr, *buf;
144 	size_t pitch, len;
145 	int ret = 0;
146 
147 	pitch = drm_format_info_min_pitch(format, 0, drm_rect_width(rect));
148 	len = pitch * drm_rect_height(rect);
149 	if (len > gdrm->bulk_len)
150 		return -E2BIG;
151 
152 	ret = drm_gem_shmem_vmap(fb->obj[0], &map);
153 	if (ret)
154 		return ret;
155 
156 	vaddr = map.vaddr + fb->offsets[0];
157 
158 	if (import_attach) {
159 		ret = dma_buf_begin_cpu_access(import_attach->dmabuf, DMA_FROM_DEVICE);
160 		if (ret)
161 			goto vunmap;
162 	}
163 retry:
164 	if (compression)
165 		buf = gdrm->compress_buf;
166 	else
167 		buf = gdrm->bulk_buf;
168 
169 	/*
170 	 * Imported buffers are assumed to be write-combined and thus uncached
171 	 * with slow reads (at least on ARM).
172 	 */
173 	if (format != fb->format) {
174 		if (format->format == GUD_DRM_FORMAT_R1) {
175 			len = gud_xrgb8888_to_r124(buf, format, vaddr, fb, rect);
176 			if (!len) {
177 				ret = -ENOMEM;
178 				goto end_cpu_access;
179 			}
180 		} else if (format->format == DRM_FORMAT_RGB565) {
181 			drm_fb_xrgb8888_to_rgb565(buf, vaddr, fb, rect, gud_is_big_endian());
182 		} else {
183 			len = gud_xrgb8888_to_color(buf, format, vaddr, fb, rect);
184 		}
185 	} else if (gud_is_big_endian() && format->cpp[0] > 1) {
186 		drm_fb_swab(buf, vaddr, fb, rect, !import_attach);
187 	} else if (compression && !import_attach && pitch == fb->pitches[0]) {
188 		/* can compress directly from the framebuffer */
189 		buf = vaddr + rect->y1 * pitch;
190 	} else {
191 		drm_fb_memcpy(buf, vaddr, fb, rect);
192 	}
193 
194 	memset(req, 0, sizeof(*req));
195 	req->x = cpu_to_le32(rect->x1);
196 	req->y = cpu_to_le32(rect->y1);
197 	req->width = cpu_to_le32(drm_rect_width(rect));
198 	req->height = cpu_to_le32(drm_rect_height(rect));
199 	req->length = cpu_to_le32(len);
200 
201 	if (compression & GUD_COMPRESSION_LZ4) {
202 		int complen;
203 
204 		complen = LZ4_compress_default(buf, gdrm->bulk_buf, len, len, gdrm->lz4_comp_mem);
205 		if (complen <= 0) {
206 			compression = 0;
207 			goto retry;
208 		}
209 
210 		req->compression = GUD_COMPRESSION_LZ4;
211 		req->compressed_length = cpu_to_le32(complen);
212 	}
213 
214 end_cpu_access:
215 	if (import_attach)
216 		dma_buf_end_cpu_access(import_attach->dmabuf, DMA_FROM_DEVICE);
217 vunmap:
218 	drm_gem_shmem_vunmap(fb->obj[0], &map);
219 
220 	return ret;
221 }
222 
223 static int gud_flush_rect(struct gud_device *gdrm, struct drm_framebuffer *fb,
224 			  const struct drm_format_info *format, struct drm_rect *rect)
225 {
226 	struct usb_device *usb = gud_to_usb_device(gdrm);
227 	struct gud_set_buffer_req req;
228 	int ret, actual_length;
229 	size_t len, trlen;
230 
231 	drm_dbg(&gdrm->drm, "Flushing [FB:%d] " DRM_RECT_FMT "\n", fb->base.id, DRM_RECT_ARG(rect));
232 
233 	ret = gud_prep_flush(gdrm, fb, format, rect, &req);
234 	if (ret)
235 		return ret;
236 
237 	len = le32_to_cpu(req.length);
238 
239 	if (req.compression)
240 		trlen = le32_to_cpu(req.compressed_length);
241 	else
242 		trlen = len;
243 
244 	gdrm->stats_length += len;
245 	/* Did it wrap around? */
246 	if (gdrm->stats_length <= len && gdrm->stats_actual_length) {
247 		gdrm->stats_length = len;
248 		gdrm->stats_actual_length = 0;
249 	}
250 	gdrm->stats_actual_length += trlen;
251 
252 	if (!(gdrm->flags & GUD_DISPLAY_FLAG_FULL_UPDATE) || gdrm->prev_flush_failed) {
253 		ret = gud_usb_set(gdrm, GUD_REQ_SET_BUFFER, 0, &req, sizeof(req));
254 		if (ret)
255 			return ret;
256 	}
257 
258 	ret = usb_bulk_msg(usb, gdrm->bulk_pipe, gdrm->bulk_buf, trlen,
259 			   &actual_length, msecs_to_jiffies(3000));
260 	if (!ret && trlen != actual_length)
261 		ret = -EIO;
262 	if (ret)
263 		gdrm->stats_num_errors++;
264 
265 	return ret;
266 }
267 
268 void gud_clear_damage(struct gud_device *gdrm)
269 {
270 	gdrm->damage.x1 = INT_MAX;
271 	gdrm->damage.y1 = INT_MAX;
272 	gdrm->damage.x2 = 0;
273 	gdrm->damage.y2 = 0;
274 }
275 
276 static void gud_add_damage(struct gud_device *gdrm, struct drm_rect *damage)
277 {
278 	gdrm->damage.x1 = min(gdrm->damage.x1, damage->x1);
279 	gdrm->damage.y1 = min(gdrm->damage.y1, damage->y1);
280 	gdrm->damage.x2 = max(gdrm->damage.x2, damage->x2);
281 	gdrm->damage.y2 = max(gdrm->damage.y2, damage->y2);
282 }
283 
284 static void gud_retry_failed_flush(struct gud_device *gdrm, struct drm_framebuffer *fb,
285 				   struct drm_rect *damage)
286 {
287 	/*
288 	 * pipe_update waits for the worker when the display mode is going to change.
289 	 * This ensures that the width and height is still the same making it safe to
290 	 * add back the damage.
291 	 */
292 
293 	mutex_lock(&gdrm->damage_lock);
294 	if (!gdrm->fb) {
295 		drm_framebuffer_get(fb);
296 		gdrm->fb = fb;
297 	}
298 	gud_add_damage(gdrm, damage);
299 	mutex_unlock(&gdrm->damage_lock);
300 
301 	/* Retry only once to avoid a possible storm in case of continues errors. */
302 	if (!gdrm->prev_flush_failed)
303 		queue_work(system_long_wq, &gdrm->work);
304 	gdrm->prev_flush_failed = true;
305 }
306 
307 void gud_flush_work(struct work_struct *work)
308 {
309 	struct gud_device *gdrm = container_of(work, struct gud_device, work);
310 	const struct drm_format_info *format;
311 	struct drm_framebuffer *fb;
312 	struct drm_rect damage;
313 	unsigned int i, lines;
314 	int idx, ret = 0;
315 	size_t pitch;
316 
317 	if (!drm_dev_enter(&gdrm->drm, &idx))
318 		return;
319 
320 	mutex_lock(&gdrm->damage_lock);
321 	fb = gdrm->fb;
322 	gdrm->fb = NULL;
323 	damage = gdrm->damage;
324 	gud_clear_damage(gdrm);
325 	mutex_unlock(&gdrm->damage_lock);
326 
327 	if (!fb)
328 		goto out;
329 
330 	format = fb->format;
331 	if (format->format == DRM_FORMAT_XRGB8888 && gdrm->xrgb8888_emulation_format)
332 		format = gdrm->xrgb8888_emulation_format;
333 
334 	/* Split update if it's too big */
335 	pitch = drm_format_info_min_pitch(format, 0, drm_rect_width(&damage));
336 	lines = drm_rect_height(&damage);
337 
338 	if (gdrm->bulk_len < lines * pitch)
339 		lines = gdrm->bulk_len / pitch;
340 
341 	for (i = 0; i < DIV_ROUND_UP(drm_rect_height(&damage), lines); i++) {
342 		struct drm_rect rect = damage;
343 
344 		rect.y1 += i * lines;
345 		rect.y2 = min_t(u32, rect.y1 + lines, damage.y2);
346 
347 		ret = gud_flush_rect(gdrm, fb, format, &rect);
348 		if (ret) {
349 			if (ret != -ENODEV && ret != -ECONNRESET &&
350 			    ret != -ESHUTDOWN && ret != -EPROTO) {
351 				bool prev_flush_failed = gdrm->prev_flush_failed;
352 
353 				gud_retry_failed_flush(gdrm, fb, &damage);
354 				if (!prev_flush_failed)
355 					dev_err_ratelimited(fb->dev->dev,
356 							    "Failed to flush framebuffer: error=%d\n", ret);
357 			}
358 			break;
359 		}
360 
361 		gdrm->prev_flush_failed = false;
362 	}
363 
364 	drm_framebuffer_put(fb);
365 out:
366 	drm_dev_exit(idx);
367 }
368 
369 static void gud_fb_queue_damage(struct gud_device *gdrm, struct drm_framebuffer *fb,
370 				struct drm_rect *damage)
371 {
372 	struct drm_framebuffer *old_fb = NULL;
373 
374 	mutex_lock(&gdrm->damage_lock);
375 
376 	if (fb != gdrm->fb) {
377 		old_fb = gdrm->fb;
378 		drm_framebuffer_get(fb);
379 		gdrm->fb = fb;
380 	}
381 
382 	gud_add_damage(gdrm, damage);
383 
384 	mutex_unlock(&gdrm->damage_lock);
385 
386 	queue_work(system_long_wq, &gdrm->work);
387 
388 	if (old_fb)
389 		drm_framebuffer_put(old_fb);
390 }
391 
392 int gud_pipe_check(struct drm_simple_display_pipe *pipe,
393 		   struct drm_plane_state *new_plane_state,
394 		   struct drm_crtc_state *new_crtc_state)
395 {
396 	struct gud_device *gdrm = to_gud_device(pipe->crtc.dev);
397 	struct drm_plane_state *old_plane_state = pipe->plane.state;
398 	const struct drm_display_mode *mode = &new_crtc_state->mode;
399 	struct drm_atomic_state *state = new_plane_state->state;
400 	struct drm_framebuffer *old_fb = old_plane_state->fb;
401 	struct drm_connector_state *connector_state = NULL;
402 	struct drm_framebuffer *fb = new_plane_state->fb;
403 	const struct drm_format_info *format = fb->format;
404 	struct drm_connector *connector;
405 	unsigned int i, num_properties;
406 	struct gud_state_req *req;
407 	int idx, ret;
408 	size_t len;
409 
410 	if (WARN_ON_ONCE(!fb))
411 		return -EINVAL;
412 
413 	if (old_plane_state->rotation != new_plane_state->rotation)
414 		new_crtc_state->mode_changed = true;
415 
416 	if (old_fb && old_fb->format != format)
417 		new_crtc_state->mode_changed = true;
418 
419 	if (!new_crtc_state->mode_changed && !new_crtc_state->connectors_changed)
420 		return 0;
421 
422 	/* Only one connector is supported */
423 	if (hweight32(new_crtc_state->connector_mask) != 1)
424 		return -EINVAL;
425 
426 	if (format->format == DRM_FORMAT_XRGB8888 && gdrm->xrgb8888_emulation_format)
427 		format = gdrm->xrgb8888_emulation_format;
428 
429 	for_each_new_connector_in_state(state, connector, connector_state, i) {
430 		if (connector_state->crtc)
431 			break;
432 	}
433 
434 	/*
435 	 * DRM_IOCTL_MODE_OBJ_SETPROPERTY on the rotation property will not have
436 	 * the connector included in the state.
437 	 */
438 	if (!connector_state) {
439 		struct drm_connector_list_iter conn_iter;
440 
441 		drm_connector_list_iter_begin(pipe->crtc.dev, &conn_iter);
442 		drm_for_each_connector_iter(connector, &conn_iter) {
443 			if (connector->state->crtc) {
444 				connector_state = connector->state;
445 				break;
446 			}
447 		}
448 		drm_connector_list_iter_end(&conn_iter);
449 	}
450 
451 	if (WARN_ON_ONCE(!connector_state))
452 		return -ENOENT;
453 
454 	len = struct_size(req, properties,
455 			  GUD_PROPERTIES_MAX_NUM + GUD_CONNECTOR_PROPERTIES_MAX_NUM);
456 	req = kzalloc(len, GFP_KERNEL);
457 	if (!req)
458 		return -ENOMEM;
459 
460 	gud_from_display_mode(&req->mode, mode);
461 
462 	req->format = gud_from_fourcc(format->format);
463 	if (WARN_ON_ONCE(!req->format)) {
464 		ret = -EINVAL;
465 		goto out;
466 	}
467 
468 	req->connector = drm_connector_index(connector_state->connector);
469 
470 	ret = gud_connector_fill_properties(connector_state, req->properties);
471 	if (ret < 0)
472 		goto out;
473 
474 	num_properties = ret;
475 	for (i = 0; i < gdrm->num_properties; i++) {
476 		u16 prop = gdrm->properties[i];
477 		u64 val;
478 
479 		switch (prop) {
480 		case GUD_PROPERTY_ROTATION:
481 			/* DRM UAPI matches the protocol so use value directly */
482 			val = new_plane_state->rotation;
483 			break;
484 		default:
485 			WARN_ON_ONCE(1);
486 			ret = -EINVAL;
487 			goto out;
488 		}
489 
490 		req->properties[num_properties + i].prop = cpu_to_le16(prop);
491 		req->properties[num_properties + i].val = cpu_to_le64(val);
492 		num_properties++;
493 	}
494 
495 	if (drm_dev_enter(fb->dev, &idx)) {
496 		len = struct_size(req, properties, num_properties);
497 		ret = gud_usb_set(gdrm, GUD_REQ_SET_STATE_CHECK, 0, req, len);
498 		drm_dev_exit(idx);
499 	}  else {
500 		ret = -ENODEV;
501 	}
502 out:
503 	kfree(req);
504 
505 	return ret;
506 }
507 
508 void gud_pipe_update(struct drm_simple_display_pipe *pipe,
509 		     struct drm_plane_state *old_state)
510 {
511 	struct drm_device *drm = pipe->crtc.dev;
512 	struct gud_device *gdrm = to_gud_device(drm);
513 	struct drm_plane_state *state = pipe->plane.state;
514 	struct drm_framebuffer *fb = state->fb;
515 	struct drm_crtc *crtc = &pipe->crtc;
516 	struct drm_rect damage;
517 	int idx;
518 
519 	if (crtc->state->mode_changed || !crtc->state->enable) {
520 		cancel_work_sync(&gdrm->work);
521 		mutex_lock(&gdrm->damage_lock);
522 		if (gdrm->fb) {
523 			drm_framebuffer_put(gdrm->fb);
524 			gdrm->fb = NULL;
525 		}
526 		gud_clear_damage(gdrm);
527 		mutex_unlock(&gdrm->damage_lock);
528 	}
529 
530 	if (!drm_dev_enter(drm, &idx))
531 		return;
532 
533 	if (!old_state->fb)
534 		gud_usb_set_u8(gdrm, GUD_REQ_SET_CONTROLLER_ENABLE, 1);
535 
536 	if (fb && (crtc->state->mode_changed || crtc->state->connectors_changed))
537 		gud_usb_set(gdrm, GUD_REQ_SET_STATE_COMMIT, 0, NULL, 0);
538 
539 	if (crtc->state->active_changed)
540 		gud_usb_set_u8(gdrm, GUD_REQ_SET_DISPLAY_ENABLE, crtc->state->active);
541 
542 	if (drm_atomic_helper_damage_merged(old_state, state, &damage)) {
543 		if (gdrm->flags & GUD_DISPLAY_FLAG_FULL_UPDATE)
544 			drm_rect_init(&damage, 0, 0, fb->width, fb->height);
545 		gud_fb_queue_damage(gdrm, fb, &damage);
546 	}
547 
548 	if (!crtc->state->enable)
549 		gud_usb_set_u8(gdrm, GUD_REQ_SET_CONTROLLER_ENABLE, 0);
550 
551 	drm_dev_exit(idx);
552 }
553