1 /*
2  * SPDX-License-Identifier: MIT
3  *
4  * Copyright © 2016 Intel Corporation
5  */
6 
7 #include "i915_drv.h"
8 #include "i915_selftest.h"
9 
10 #include "mock_dmabuf.h"
11 #include "selftests/mock_gem_device.h"
12 
13 static int igt_dmabuf_export(void *arg)
14 {
15 	struct drm_i915_private *i915 = arg;
16 	struct drm_i915_gem_object *obj;
17 	struct dma_buf *dmabuf;
18 
19 	obj = i915_gem_object_create_shmem(i915, PAGE_SIZE);
20 	if (IS_ERR(obj))
21 		return PTR_ERR(obj);
22 
23 	dmabuf = i915_gem_prime_export(&obj->base, 0);
24 	i915_gem_object_put(obj);
25 	if (IS_ERR(dmabuf)) {
26 		pr_err("i915_gem_prime_export failed with err=%d\n",
27 		       (int)PTR_ERR(dmabuf));
28 		return PTR_ERR(dmabuf);
29 	}
30 
31 	dma_buf_put(dmabuf);
32 	return 0;
33 }
34 
35 static int igt_dmabuf_import_self(void *arg)
36 {
37 	struct drm_i915_private *i915 = arg;
38 	struct drm_i915_gem_object *obj, *import_obj;
39 	struct drm_gem_object *import;
40 	struct dma_buf *dmabuf;
41 	int err;
42 
43 	obj = i915_gem_object_create_shmem(i915, PAGE_SIZE);
44 	if (IS_ERR(obj))
45 		return PTR_ERR(obj);
46 
47 	dmabuf = i915_gem_prime_export(&obj->base, 0);
48 	if (IS_ERR(dmabuf)) {
49 		pr_err("i915_gem_prime_export failed with err=%d\n",
50 		       (int)PTR_ERR(dmabuf));
51 		err = PTR_ERR(dmabuf);
52 		goto out;
53 	}
54 
55 	import = i915_gem_prime_import(&i915->drm, dmabuf);
56 	if (IS_ERR(import)) {
57 		pr_err("i915_gem_prime_import failed with err=%d\n",
58 		       (int)PTR_ERR(import));
59 		err = PTR_ERR(import);
60 		goto out_dmabuf;
61 	}
62 
63 	if (import != &obj->base) {
64 		pr_err("i915_gem_prime_import created a new object!\n");
65 		err = -EINVAL;
66 		goto out_import;
67 	}
68 	import_obj = to_intel_bo(import);
69 
70 	i915_gem_object_lock(import_obj, NULL);
71 	err = __i915_gem_object_get_pages(import_obj);
72 	i915_gem_object_unlock(import_obj);
73 	if (err) {
74 		pr_err("Same object dma-buf get_pages failed!\n");
75 		goto out_import;
76 	}
77 
78 	err = 0;
79 out_import:
80 	i915_gem_object_put(import_obj);
81 out_dmabuf:
82 	dma_buf_put(dmabuf);
83 out:
84 	i915_gem_object_put(obj);
85 	return err;
86 }
87 
88 static int igt_dmabuf_import_same_driver_lmem(void *arg)
89 {
90 	struct drm_i915_private *i915 = arg;
91 	struct intel_memory_region *lmem = i915->mm.regions[INTEL_REGION_LMEM];
92 	struct drm_i915_gem_object *obj;
93 	struct drm_gem_object *import;
94 	struct dma_buf *dmabuf;
95 	int err;
96 
97 	if (!lmem)
98 		return 0;
99 
100 	force_different_devices = true;
101 
102 	obj = __i915_gem_object_create_user(i915, PAGE_SIZE, &lmem, 1);
103 	if (IS_ERR(obj)) {
104 		pr_err("__i915_gem_object_create_user failed with err=%ld\n",
105 		       PTR_ERR(dmabuf));
106 		err = PTR_ERR(obj);
107 		goto out_ret;
108 	}
109 
110 	dmabuf = i915_gem_prime_export(&obj->base, 0);
111 	if (IS_ERR(dmabuf)) {
112 		pr_err("i915_gem_prime_export failed with err=%ld\n",
113 		       PTR_ERR(dmabuf));
114 		err = PTR_ERR(dmabuf);
115 		goto out;
116 	}
117 
118 	/*
119 	 * We expect an import of an LMEM-only object to fail with
120 	 * -EOPNOTSUPP because it can't be migrated to SMEM.
121 	 */
122 	import = i915_gem_prime_import(&i915->drm, dmabuf);
123 	if (!IS_ERR(import)) {
124 		drm_gem_object_put(import);
125 		pr_err("i915_gem_prime_import succeeded when it shouldn't have\n");
126 		err = -EINVAL;
127 	} else if (PTR_ERR(import) != -EOPNOTSUPP) {
128 		pr_err("i915_gem_prime_import failed with the wrong err=%ld\n",
129 		       PTR_ERR(import));
130 		err = PTR_ERR(import);
131 	}
132 
133 	dma_buf_put(dmabuf);
134 out:
135 	i915_gem_object_put(obj);
136 out_ret:
137 	force_different_devices = false;
138 	return err;
139 }
140 
141 static int igt_dmabuf_import_same_driver(struct drm_i915_private *i915,
142 					 struct intel_memory_region **regions,
143 					 unsigned int num_regions)
144 {
145 	struct drm_i915_gem_object *obj, *import_obj;
146 	struct drm_gem_object *import;
147 	struct dma_buf *dmabuf;
148 	struct dma_buf_attachment *import_attach;
149 	struct sg_table *st;
150 	long timeout;
151 	int err;
152 
153 	force_different_devices = true;
154 
155 	obj = __i915_gem_object_create_user(i915, PAGE_SIZE,
156 					    regions, num_regions);
157 	if (IS_ERR(obj)) {
158 		pr_err("__i915_gem_object_create_user failed with err=%ld\n",
159 		       PTR_ERR(dmabuf));
160 		err = PTR_ERR(obj);
161 		goto out_ret;
162 	}
163 
164 	dmabuf = i915_gem_prime_export(&obj->base, 0);
165 	if (IS_ERR(dmabuf)) {
166 		pr_err("i915_gem_prime_export failed with err=%ld\n",
167 		       PTR_ERR(dmabuf));
168 		err = PTR_ERR(dmabuf);
169 		goto out;
170 	}
171 
172 	import = i915_gem_prime_import(&i915->drm, dmabuf);
173 	if (IS_ERR(import)) {
174 		pr_err("i915_gem_prime_import failed with err=%ld\n",
175 		       PTR_ERR(import));
176 		err = PTR_ERR(import);
177 		goto out_dmabuf;
178 	}
179 
180 	if (import == &obj->base) {
181 		pr_err("i915_gem_prime_import reused gem object!\n");
182 		err = -EINVAL;
183 		goto out_import;
184 	}
185 
186 	import_obj = to_intel_bo(import);
187 
188 	i915_gem_object_lock(import_obj, NULL);
189 	err = __i915_gem_object_get_pages(import_obj);
190 	if (err) {
191 		pr_err("Different objects dma-buf get_pages failed!\n");
192 		i915_gem_object_unlock(import_obj);
193 		goto out_import;
194 	}
195 
196 	/*
197 	 * If the exported object is not in system memory, something
198 	 * weird is going on. TODO: When p2p is supported, this is no
199 	 * longer considered weird.
200 	 */
201 	if (obj->mm.region != i915->mm.regions[INTEL_REGION_SMEM]) {
202 		pr_err("Exported dma-buf is not in system memory\n");
203 		err = -EINVAL;
204 	}
205 
206 	i915_gem_object_unlock(import_obj);
207 
208 	/* Now try a fake an importer */
209 	import_attach = dma_buf_attach(dmabuf, obj->base.dev->dev);
210 	if (IS_ERR(import_attach)) {
211 		err = PTR_ERR(import_attach);
212 		goto out_import;
213 	}
214 
215 	st = dma_buf_map_attachment(import_attach, DMA_BIDIRECTIONAL);
216 	if (IS_ERR(st)) {
217 		err = PTR_ERR(st);
218 		goto out_detach;
219 	}
220 
221 	timeout = dma_resv_wait_timeout(dmabuf->resv, false, true, 5 * HZ);
222 	if (!timeout) {
223 		pr_err("dmabuf wait for exclusive fence timed out.\n");
224 		timeout = -ETIME;
225 	}
226 	err = timeout > 0 ? 0 : timeout;
227 	dma_buf_unmap_attachment(import_attach, st, DMA_BIDIRECTIONAL);
228 out_detach:
229 	dma_buf_detach(dmabuf, import_attach);
230 out_import:
231 	i915_gem_object_put(import_obj);
232 out_dmabuf:
233 	dma_buf_put(dmabuf);
234 out:
235 	i915_gem_object_put(obj);
236 out_ret:
237 	force_different_devices = false;
238 	return err;
239 }
240 
241 static int igt_dmabuf_import_same_driver_smem(void *arg)
242 {
243 	struct drm_i915_private *i915 = arg;
244 	struct intel_memory_region *smem = i915->mm.regions[INTEL_REGION_SMEM];
245 
246 	return igt_dmabuf_import_same_driver(i915, &smem, 1);
247 }
248 
249 static int igt_dmabuf_import_same_driver_lmem_smem(void *arg)
250 {
251 	struct drm_i915_private *i915 = arg;
252 	struct intel_memory_region *regions[2];
253 
254 	if (!i915->mm.regions[INTEL_REGION_LMEM])
255 		return 0;
256 
257 	regions[0] = i915->mm.regions[INTEL_REGION_LMEM];
258 	regions[1] = i915->mm.regions[INTEL_REGION_SMEM];
259 	return igt_dmabuf_import_same_driver(i915, regions, 2);
260 }
261 
262 static int igt_dmabuf_import(void *arg)
263 {
264 	struct drm_i915_private *i915 = arg;
265 	struct drm_i915_gem_object *obj;
266 	struct dma_buf *dmabuf;
267 	void *obj_map, *dma_map;
268 	struct dma_buf_map map;
269 	u32 pattern[] = { 0, 0xaa, 0xcc, 0x55, 0xff };
270 	int err, i;
271 
272 	dmabuf = mock_dmabuf(1);
273 	if (IS_ERR(dmabuf))
274 		return PTR_ERR(dmabuf);
275 
276 	obj = to_intel_bo(i915_gem_prime_import(&i915->drm, dmabuf));
277 	if (IS_ERR(obj)) {
278 		pr_err("i915_gem_prime_import failed with err=%d\n",
279 		       (int)PTR_ERR(obj));
280 		err = PTR_ERR(obj);
281 		goto out_dmabuf;
282 	}
283 
284 	if (obj->base.dev != &i915->drm) {
285 		pr_err("i915_gem_prime_import created a non-i915 object!\n");
286 		err = -EINVAL;
287 		goto out_obj;
288 	}
289 
290 	if (obj->base.size != PAGE_SIZE) {
291 		pr_err("i915_gem_prime_import is wrong size found %lld, expected %ld\n",
292 		       (long long)obj->base.size, PAGE_SIZE);
293 		err = -EINVAL;
294 		goto out_obj;
295 	}
296 
297 	err = dma_buf_vmap(dmabuf, &map);
298 	dma_map = err ? NULL : map.vaddr;
299 	if (!dma_map) {
300 		pr_err("dma_buf_vmap failed\n");
301 		err = -ENOMEM;
302 		goto out_obj;
303 	}
304 
305 	if (0) { /* Can not yet map dmabuf */
306 		obj_map = i915_gem_object_pin_map(obj, I915_MAP_WB);
307 		if (IS_ERR(obj_map)) {
308 			err = PTR_ERR(obj_map);
309 			pr_err("i915_gem_object_pin_map failed with err=%d\n", err);
310 			goto out_dma_map;
311 		}
312 
313 		for (i = 0; i < ARRAY_SIZE(pattern); i++) {
314 			memset(dma_map, pattern[i], PAGE_SIZE);
315 			if (memchr_inv(obj_map, pattern[i], PAGE_SIZE)) {
316 				err = -EINVAL;
317 				pr_err("imported vmap not all set to %x!\n", pattern[i]);
318 				i915_gem_object_unpin_map(obj);
319 				goto out_dma_map;
320 			}
321 		}
322 
323 		for (i = 0; i < ARRAY_SIZE(pattern); i++) {
324 			memset(obj_map, pattern[i], PAGE_SIZE);
325 			if (memchr_inv(dma_map, pattern[i], PAGE_SIZE)) {
326 				err = -EINVAL;
327 				pr_err("exported vmap not all set to %x!\n", pattern[i]);
328 				i915_gem_object_unpin_map(obj);
329 				goto out_dma_map;
330 			}
331 		}
332 
333 		i915_gem_object_unpin_map(obj);
334 	}
335 
336 	err = 0;
337 out_dma_map:
338 	dma_buf_vunmap(dmabuf, &map);
339 out_obj:
340 	i915_gem_object_put(obj);
341 out_dmabuf:
342 	dma_buf_put(dmabuf);
343 	return err;
344 }
345 
346 static int igt_dmabuf_import_ownership(void *arg)
347 {
348 	struct drm_i915_private *i915 = arg;
349 	struct drm_i915_gem_object *obj;
350 	struct dma_buf *dmabuf;
351 	struct dma_buf_map map;
352 	void *ptr;
353 	int err;
354 
355 	dmabuf = mock_dmabuf(1);
356 	if (IS_ERR(dmabuf))
357 		return PTR_ERR(dmabuf);
358 
359 	err = dma_buf_vmap(dmabuf, &map);
360 	ptr = err ? NULL : map.vaddr;
361 	if (!ptr) {
362 		pr_err("dma_buf_vmap failed\n");
363 		err = -ENOMEM;
364 		goto err_dmabuf;
365 	}
366 
367 	memset(ptr, 0xc5, PAGE_SIZE);
368 	dma_buf_vunmap(dmabuf, &map);
369 
370 	obj = to_intel_bo(i915_gem_prime_import(&i915->drm, dmabuf));
371 	if (IS_ERR(obj)) {
372 		pr_err("i915_gem_prime_import failed with err=%d\n",
373 		       (int)PTR_ERR(obj));
374 		err = PTR_ERR(obj);
375 		goto err_dmabuf;
376 	}
377 
378 	dma_buf_put(dmabuf);
379 
380 	err = i915_gem_object_pin_pages_unlocked(obj);
381 	if (err) {
382 		pr_err("i915_gem_object_pin_pages failed with err=%d\n", err);
383 		goto out_obj;
384 	}
385 
386 	err = 0;
387 	i915_gem_object_unpin_pages(obj);
388 out_obj:
389 	i915_gem_object_put(obj);
390 	return err;
391 
392 err_dmabuf:
393 	dma_buf_put(dmabuf);
394 	return err;
395 }
396 
397 static int igt_dmabuf_export_vmap(void *arg)
398 {
399 	struct drm_i915_private *i915 = arg;
400 	struct drm_i915_gem_object *obj;
401 	struct dma_buf *dmabuf;
402 	struct dma_buf_map map;
403 	void *ptr;
404 	int err;
405 
406 	obj = i915_gem_object_create_shmem(i915, PAGE_SIZE);
407 	if (IS_ERR(obj))
408 		return PTR_ERR(obj);
409 
410 	dmabuf = i915_gem_prime_export(&obj->base, 0);
411 	if (IS_ERR(dmabuf)) {
412 		pr_err("i915_gem_prime_export failed with err=%d\n",
413 		       (int)PTR_ERR(dmabuf));
414 		err = PTR_ERR(dmabuf);
415 		goto err_obj;
416 	}
417 	i915_gem_object_put(obj);
418 
419 	err = dma_buf_vmap(dmabuf, &map);
420 	ptr = err ? NULL : map.vaddr;
421 	if (!ptr) {
422 		pr_err("dma_buf_vmap failed\n");
423 		err = -ENOMEM;
424 		goto out;
425 	}
426 
427 	if (memchr_inv(ptr, 0, dmabuf->size)) {
428 		pr_err("Exported object not initialiased to zero!\n");
429 		err = -EINVAL;
430 		goto out;
431 	}
432 
433 	memset(ptr, 0xc5, dmabuf->size);
434 
435 	err = 0;
436 	dma_buf_vunmap(dmabuf, &map);
437 out:
438 	dma_buf_put(dmabuf);
439 	return err;
440 
441 err_obj:
442 	i915_gem_object_put(obj);
443 	return err;
444 }
445 
446 int i915_gem_dmabuf_mock_selftests(void)
447 {
448 	static const struct i915_subtest tests[] = {
449 		SUBTEST(igt_dmabuf_export),
450 		SUBTEST(igt_dmabuf_import_self),
451 		SUBTEST(igt_dmabuf_import),
452 		SUBTEST(igt_dmabuf_import_ownership),
453 		SUBTEST(igt_dmabuf_export_vmap),
454 	};
455 	struct drm_i915_private *i915;
456 	int err;
457 
458 	i915 = mock_gem_device();
459 	if (!i915)
460 		return -ENOMEM;
461 
462 	err = i915_subtests(tests, i915);
463 
464 	mock_destroy_device(i915);
465 	return err;
466 }
467 
468 int i915_gem_dmabuf_live_selftests(struct drm_i915_private *i915)
469 {
470 	static const struct i915_subtest tests[] = {
471 		SUBTEST(igt_dmabuf_export),
472 		SUBTEST(igt_dmabuf_import_same_driver_lmem),
473 		SUBTEST(igt_dmabuf_import_same_driver_smem),
474 		SUBTEST(igt_dmabuf_import_same_driver_lmem_smem),
475 	};
476 
477 	return i915_subtests(tests, i915);
478 }
479