1 /* 2 * SPDX-License-Identifier: MIT 3 * 4 * Copyright © 2016 Intel Corporation 5 */ 6 7 #include "i915_drv.h" 8 #include "i915_selftest.h" 9 10 #include "mock_dmabuf.h" 11 #include "selftests/mock_gem_device.h" 12 13 static int igt_dmabuf_export(void *arg) 14 { 15 struct drm_i915_private *i915 = arg; 16 struct drm_i915_gem_object *obj; 17 struct dma_buf *dmabuf; 18 19 obj = i915_gem_object_create_shmem(i915, PAGE_SIZE); 20 if (IS_ERR(obj)) 21 return PTR_ERR(obj); 22 23 dmabuf = i915_gem_prime_export(&obj->base, 0); 24 i915_gem_object_put(obj); 25 if (IS_ERR(dmabuf)) { 26 pr_err("i915_gem_prime_export failed with err=%d\n", 27 (int)PTR_ERR(dmabuf)); 28 return PTR_ERR(dmabuf); 29 } 30 31 dma_buf_put(dmabuf); 32 return 0; 33 } 34 35 static int igt_dmabuf_import_self(void *arg) 36 { 37 struct drm_i915_private *i915 = arg; 38 struct drm_i915_gem_object *obj, *import_obj; 39 struct drm_gem_object *import; 40 struct dma_buf *dmabuf; 41 int err; 42 43 obj = i915_gem_object_create_shmem(i915, PAGE_SIZE); 44 if (IS_ERR(obj)) 45 return PTR_ERR(obj); 46 47 dmabuf = i915_gem_prime_export(&obj->base, 0); 48 if (IS_ERR(dmabuf)) { 49 pr_err("i915_gem_prime_export failed with err=%d\n", 50 (int)PTR_ERR(dmabuf)); 51 err = PTR_ERR(dmabuf); 52 goto out; 53 } 54 55 import = i915_gem_prime_import(&i915->drm, dmabuf); 56 if (IS_ERR(import)) { 57 pr_err("i915_gem_prime_import failed with err=%d\n", 58 (int)PTR_ERR(import)); 59 err = PTR_ERR(import); 60 goto out_dmabuf; 61 } 62 import_obj = to_intel_bo(import); 63 64 if (import != &obj->base) { 65 pr_err("i915_gem_prime_import created a new object!\n"); 66 err = -EINVAL; 67 goto out_import; 68 } 69 70 i915_gem_object_lock(import_obj, NULL); 71 err = __i915_gem_object_get_pages(import_obj); 72 i915_gem_object_unlock(import_obj); 73 if (err) { 74 pr_err("Same object dma-buf get_pages failed!\n"); 75 goto out_import; 76 } 77 78 err = 0; 79 out_import: 80 i915_gem_object_put(import_obj); 81 out_dmabuf: 82 dma_buf_put(dmabuf); 83 out: 84 i915_gem_object_put(obj); 85 return err; 86 } 87 88 static int igt_dmabuf_import_same_driver_lmem(void *arg) 89 { 90 struct drm_i915_private *i915 = arg; 91 struct intel_memory_region *lmem = i915->mm.regions[INTEL_REGION_LMEM]; 92 struct drm_i915_gem_object *obj; 93 struct drm_gem_object *import; 94 struct dma_buf *dmabuf; 95 int err; 96 97 if (!lmem) 98 return 0; 99 100 force_different_devices = true; 101 102 obj = __i915_gem_object_create_user(i915, PAGE_SIZE, &lmem, 1); 103 if (IS_ERR(obj)) { 104 pr_err("__i915_gem_object_create_user failed with err=%ld\n", 105 PTR_ERR(dmabuf)); 106 err = PTR_ERR(obj); 107 goto out_ret; 108 } 109 110 dmabuf = i915_gem_prime_export(&obj->base, 0); 111 if (IS_ERR(dmabuf)) { 112 pr_err("i915_gem_prime_export failed with err=%ld\n", 113 PTR_ERR(dmabuf)); 114 err = PTR_ERR(dmabuf); 115 goto out; 116 } 117 118 /* 119 * We expect an import of an LMEM-only object to fail with 120 * -EOPNOTSUPP because it can't be migrated to SMEM. 121 */ 122 import = i915_gem_prime_import(&i915->drm, dmabuf); 123 if (!IS_ERR(import)) { 124 drm_gem_object_put(import); 125 pr_err("i915_gem_prime_import succeeded when it shouldn't have\n"); 126 err = -EINVAL; 127 } else if (PTR_ERR(import) != -EOPNOTSUPP) { 128 pr_err("i915_gem_prime_import failed with the wrong err=%ld\n", 129 PTR_ERR(import)); 130 err = PTR_ERR(import); 131 } else { 132 err = 0; 133 } 134 135 dma_buf_put(dmabuf); 136 out: 137 i915_gem_object_put(obj); 138 out_ret: 139 force_different_devices = false; 140 return err; 141 } 142 143 static int igt_dmabuf_import_same_driver(struct drm_i915_private *i915, 144 struct intel_memory_region **regions, 145 unsigned int num_regions) 146 { 147 struct drm_i915_gem_object *obj, *import_obj; 148 struct drm_gem_object *import; 149 struct dma_buf *dmabuf; 150 struct dma_buf_attachment *import_attach; 151 struct sg_table *st; 152 long timeout; 153 int err; 154 155 force_different_devices = true; 156 157 obj = __i915_gem_object_create_user(i915, PAGE_SIZE, 158 regions, num_regions); 159 if (IS_ERR(obj)) { 160 pr_err("__i915_gem_object_create_user failed with err=%ld\n", 161 PTR_ERR(dmabuf)); 162 err = PTR_ERR(obj); 163 goto out_ret; 164 } 165 166 dmabuf = i915_gem_prime_export(&obj->base, 0); 167 if (IS_ERR(dmabuf)) { 168 pr_err("i915_gem_prime_export failed with err=%ld\n", 169 PTR_ERR(dmabuf)); 170 err = PTR_ERR(dmabuf); 171 goto out; 172 } 173 174 import = i915_gem_prime_import(&i915->drm, dmabuf); 175 if (IS_ERR(import)) { 176 pr_err("i915_gem_prime_import failed with err=%ld\n", 177 PTR_ERR(import)); 178 err = PTR_ERR(import); 179 goto out_dmabuf; 180 } 181 import_obj = to_intel_bo(import); 182 183 if (import == &obj->base) { 184 pr_err("i915_gem_prime_import reused gem object!\n"); 185 err = -EINVAL; 186 goto out_import; 187 } 188 189 i915_gem_object_lock(import_obj, NULL); 190 err = __i915_gem_object_get_pages(import_obj); 191 if (err) { 192 pr_err("Different objects dma-buf get_pages failed!\n"); 193 i915_gem_object_unlock(import_obj); 194 goto out_import; 195 } 196 197 /* 198 * If the exported object is not in system memory, something 199 * weird is going on. TODO: When p2p is supported, this is no 200 * longer considered weird. 201 */ 202 if (obj->mm.region != i915->mm.regions[INTEL_REGION_SMEM]) { 203 pr_err("Exported dma-buf is not in system memory\n"); 204 err = -EINVAL; 205 } 206 207 i915_gem_object_unlock(import_obj); 208 209 /* Now try a fake an importer */ 210 import_attach = dma_buf_attach(dmabuf, obj->base.dev->dev); 211 if (IS_ERR(import_attach)) { 212 err = PTR_ERR(import_attach); 213 goto out_import; 214 } 215 216 st = dma_buf_map_attachment(import_attach, DMA_BIDIRECTIONAL); 217 if (IS_ERR(st)) { 218 err = PTR_ERR(st); 219 goto out_detach; 220 } 221 222 timeout = dma_resv_wait_timeout(dmabuf->resv, false, true, 5 * HZ); 223 if (!timeout) { 224 pr_err("dmabuf wait for exclusive fence timed out.\n"); 225 timeout = -ETIME; 226 } 227 err = timeout > 0 ? 0 : timeout; 228 dma_buf_unmap_attachment(import_attach, st, DMA_BIDIRECTIONAL); 229 out_detach: 230 dma_buf_detach(dmabuf, import_attach); 231 out_import: 232 i915_gem_object_put(import_obj); 233 out_dmabuf: 234 dma_buf_put(dmabuf); 235 out: 236 i915_gem_object_put(obj); 237 out_ret: 238 force_different_devices = false; 239 return err; 240 } 241 242 static int igt_dmabuf_import_same_driver_smem(void *arg) 243 { 244 struct drm_i915_private *i915 = arg; 245 struct intel_memory_region *smem = i915->mm.regions[INTEL_REGION_SMEM]; 246 247 return igt_dmabuf_import_same_driver(i915, &smem, 1); 248 } 249 250 static int igt_dmabuf_import_same_driver_lmem_smem(void *arg) 251 { 252 struct drm_i915_private *i915 = arg; 253 struct intel_memory_region *regions[2]; 254 255 if (!i915->mm.regions[INTEL_REGION_LMEM]) 256 return 0; 257 258 regions[0] = i915->mm.regions[INTEL_REGION_LMEM]; 259 regions[1] = i915->mm.regions[INTEL_REGION_SMEM]; 260 return igt_dmabuf_import_same_driver(i915, regions, 2); 261 } 262 263 static int igt_dmabuf_import(void *arg) 264 { 265 struct drm_i915_private *i915 = arg; 266 struct drm_i915_gem_object *obj; 267 struct dma_buf *dmabuf; 268 void *obj_map, *dma_map; 269 struct dma_buf_map map; 270 u32 pattern[] = { 0, 0xaa, 0xcc, 0x55, 0xff }; 271 int err, i; 272 273 dmabuf = mock_dmabuf(1); 274 if (IS_ERR(dmabuf)) 275 return PTR_ERR(dmabuf); 276 277 obj = to_intel_bo(i915_gem_prime_import(&i915->drm, dmabuf)); 278 if (IS_ERR(obj)) { 279 pr_err("i915_gem_prime_import failed with err=%d\n", 280 (int)PTR_ERR(obj)); 281 err = PTR_ERR(obj); 282 goto out_dmabuf; 283 } 284 285 if (obj->base.dev != &i915->drm) { 286 pr_err("i915_gem_prime_import created a non-i915 object!\n"); 287 err = -EINVAL; 288 goto out_obj; 289 } 290 291 if (obj->base.size != PAGE_SIZE) { 292 pr_err("i915_gem_prime_import is wrong size found %lld, expected %ld\n", 293 (long long)obj->base.size, PAGE_SIZE); 294 err = -EINVAL; 295 goto out_obj; 296 } 297 298 err = dma_buf_vmap(dmabuf, &map); 299 dma_map = err ? NULL : map.vaddr; 300 if (!dma_map) { 301 pr_err("dma_buf_vmap failed\n"); 302 err = -ENOMEM; 303 goto out_obj; 304 } 305 306 if (0) { /* Can not yet map dmabuf */ 307 obj_map = i915_gem_object_pin_map(obj, I915_MAP_WB); 308 if (IS_ERR(obj_map)) { 309 err = PTR_ERR(obj_map); 310 pr_err("i915_gem_object_pin_map failed with err=%d\n", err); 311 goto out_dma_map; 312 } 313 314 for (i = 0; i < ARRAY_SIZE(pattern); i++) { 315 memset(dma_map, pattern[i], PAGE_SIZE); 316 if (memchr_inv(obj_map, pattern[i], PAGE_SIZE)) { 317 err = -EINVAL; 318 pr_err("imported vmap not all set to %x!\n", pattern[i]); 319 i915_gem_object_unpin_map(obj); 320 goto out_dma_map; 321 } 322 } 323 324 for (i = 0; i < ARRAY_SIZE(pattern); i++) { 325 memset(obj_map, pattern[i], PAGE_SIZE); 326 if (memchr_inv(dma_map, pattern[i], PAGE_SIZE)) { 327 err = -EINVAL; 328 pr_err("exported vmap not all set to %x!\n", pattern[i]); 329 i915_gem_object_unpin_map(obj); 330 goto out_dma_map; 331 } 332 } 333 334 i915_gem_object_unpin_map(obj); 335 } 336 337 err = 0; 338 out_dma_map: 339 dma_buf_vunmap(dmabuf, &map); 340 out_obj: 341 i915_gem_object_put(obj); 342 out_dmabuf: 343 dma_buf_put(dmabuf); 344 return err; 345 } 346 347 static int igt_dmabuf_import_ownership(void *arg) 348 { 349 struct drm_i915_private *i915 = arg; 350 struct drm_i915_gem_object *obj; 351 struct dma_buf *dmabuf; 352 struct dma_buf_map map; 353 void *ptr; 354 int err; 355 356 dmabuf = mock_dmabuf(1); 357 if (IS_ERR(dmabuf)) 358 return PTR_ERR(dmabuf); 359 360 err = dma_buf_vmap(dmabuf, &map); 361 ptr = err ? NULL : map.vaddr; 362 if (!ptr) { 363 pr_err("dma_buf_vmap failed\n"); 364 err = -ENOMEM; 365 goto err_dmabuf; 366 } 367 368 memset(ptr, 0xc5, PAGE_SIZE); 369 dma_buf_vunmap(dmabuf, &map); 370 371 obj = to_intel_bo(i915_gem_prime_import(&i915->drm, dmabuf)); 372 if (IS_ERR(obj)) { 373 pr_err("i915_gem_prime_import failed with err=%d\n", 374 (int)PTR_ERR(obj)); 375 err = PTR_ERR(obj); 376 goto err_dmabuf; 377 } 378 379 dma_buf_put(dmabuf); 380 381 err = i915_gem_object_pin_pages_unlocked(obj); 382 if (err) { 383 pr_err("i915_gem_object_pin_pages failed with err=%d\n", err); 384 goto out_obj; 385 } 386 387 err = 0; 388 i915_gem_object_unpin_pages(obj); 389 out_obj: 390 i915_gem_object_put(obj); 391 return err; 392 393 err_dmabuf: 394 dma_buf_put(dmabuf); 395 return err; 396 } 397 398 static int igt_dmabuf_export_vmap(void *arg) 399 { 400 struct drm_i915_private *i915 = arg; 401 struct drm_i915_gem_object *obj; 402 struct dma_buf *dmabuf; 403 struct dma_buf_map map; 404 void *ptr; 405 int err; 406 407 obj = i915_gem_object_create_shmem(i915, PAGE_SIZE); 408 if (IS_ERR(obj)) 409 return PTR_ERR(obj); 410 411 dmabuf = i915_gem_prime_export(&obj->base, 0); 412 if (IS_ERR(dmabuf)) { 413 pr_err("i915_gem_prime_export failed with err=%d\n", 414 (int)PTR_ERR(dmabuf)); 415 err = PTR_ERR(dmabuf); 416 goto err_obj; 417 } 418 i915_gem_object_put(obj); 419 420 err = dma_buf_vmap(dmabuf, &map); 421 ptr = err ? NULL : map.vaddr; 422 if (!ptr) { 423 pr_err("dma_buf_vmap failed\n"); 424 err = -ENOMEM; 425 goto out; 426 } 427 428 if (memchr_inv(ptr, 0, dmabuf->size)) { 429 pr_err("Exported object not initialiased to zero!\n"); 430 err = -EINVAL; 431 goto out; 432 } 433 434 memset(ptr, 0xc5, dmabuf->size); 435 436 err = 0; 437 dma_buf_vunmap(dmabuf, &map); 438 out: 439 dma_buf_put(dmabuf); 440 return err; 441 442 err_obj: 443 i915_gem_object_put(obj); 444 return err; 445 } 446 447 int i915_gem_dmabuf_mock_selftests(void) 448 { 449 static const struct i915_subtest tests[] = { 450 SUBTEST(igt_dmabuf_export), 451 SUBTEST(igt_dmabuf_import_self), 452 SUBTEST(igt_dmabuf_import), 453 SUBTEST(igt_dmabuf_import_ownership), 454 SUBTEST(igt_dmabuf_export_vmap), 455 }; 456 struct drm_i915_private *i915; 457 int err; 458 459 i915 = mock_gem_device(); 460 if (!i915) 461 return -ENOMEM; 462 463 err = i915_subtests(tests, i915); 464 465 mock_destroy_device(i915); 466 return err; 467 } 468 469 int i915_gem_dmabuf_live_selftests(struct drm_i915_private *i915) 470 { 471 static const struct i915_subtest tests[] = { 472 SUBTEST(igt_dmabuf_export), 473 SUBTEST(igt_dmabuf_import_same_driver_lmem), 474 SUBTEST(igt_dmabuf_import_same_driver_smem), 475 SUBTEST(igt_dmabuf_import_same_driver_lmem_smem), 476 }; 477 478 return i915_subtests(tests, i915); 479 } 480