Lines Matching refs:bo_bucket

1913 			struct kfd_criu_bo_bucket *bo_bucket;  in criu_checkpoint_bos()  local
1932 bo_bucket = &bo_buckets[bo_index]; in criu_checkpoint_bos()
1935 bo_bucket->gpu_id = pdd->user_gpu_id; in criu_checkpoint_bos()
1936 bo_bucket->addr = (uint64_t)kgd_mem->va; in criu_checkpoint_bos()
1937 bo_bucket->size = amdgpu_bo_size(dumper_bo); in criu_checkpoint_bos()
1938 bo_bucket->alloc_flags = (uint32_t)kgd_mem->alloc_flags; in criu_checkpoint_bos()
1941 if (bo_bucket->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_USERPTR) { in criu_checkpoint_bos()
1949 if (bo_bucket->alloc_flags in criu_checkpoint_bos()
1952 bo_bucket->alloc_flags & in criu_checkpoint_bos()
1954 &bo_bucket->dmabuf_fd); in criu_checkpoint_bos()
1958 bo_bucket->dmabuf_fd = KFD_INVALID_FD; in criu_checkpoint_bos()
1961 if (bo_bucket->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_DOORBELL) in criu_checkpoint_bos()
1962 bo_bucket->offset = KFD_MMAP_TYPE_DOORBELL | in criu_checkpoint_bos()
1964 else if (bo_bucket->alloc_flags & in criu_checkpoint_bos()
1966 bo_bucket->offset = KFD_MMAP_TYPE_MMIO | in criu_checkpoint_bos()
1969 bo_bucket->offset = amdgpu_bo_mmap_offset(dumper_bo); in criu_checkpoint_bos()
1978 bo_bucket->size, in criu_checkpoint_bos()
1979 bo_bucket->addr, in criu_checkpoint_bos()
1980 bo_bucket->offset, in criu_checkpoint_bos()
1981 bo_bucket->gpu_id, in criu_checkpoint_bos()
1982 bo_bucket->alloc_flags, in criu_checkpoint_bos()
2292 struct kfd_criu_bo_bucket *bo_bucket, in criu_restore_memory_of_gpu() argument
2301 if (bo_bucket->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_DOORBELL) { in criu_restore_memory_of_gpu()
2302 if (bo_bucket->size != in criu_restore_memory_of_gpu()
2309 } else if (bo_bucket->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_MMIO_REMAP) { in criu_restore_memory_of_gpu()
2311 if (bo_bucket->size != PAGE_SIZE) { in criu_restore_memory_of_gpu()
2320 } else if (bo_bucket->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_USERPTR) { in criu_restore_memory_of_gpu()
2324 ret = amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu(pdd->dev->adev, bo_bucket->addr, in criu_restore_memory_of_gpu()
2325 bo_bucket->size, pdd->drm_priv, kgd_mem, in criu_restore_memory_of_gpu()
2326 &offset, bo_bucket->alloc_flags, criu_resume); in criu_restore_memory_of_gpu()
2332 bo_bucket->size, bo_bucket->addr, offset); in criu_restore_memory_of_gpu()
2346 if (bo_bucket->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_DOORBELL) in criu_restore_memory_of_gpu()
2347 bo_bucket->restored_offset = KFD_MMAP_TYPE_DOORBELL | KFD_MMAP_GPU_ID(pdd->dev->id); in criu_restore_memory_of_gpu()
2348 if (bo_bucket->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_MMIO_REMAP) { in criu_restore_memory_of_gpu()
2349 bo_bucket->restored_offset = KFD_MMAP_TYPE_MMIO | KFD_MMAP_GPU_ID(pdd->dev->id); in criu_restore_memory_of_gpu()
2350 } else if (bo_bucket->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_GTT) { in criu_restore_memory_of_gpu()
2351 bo_bucket->restored_offset = offset; in criu_restore_memory_of_gpu()
2352 } else if (bo_bucket->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_VRAM) { in criu_restore_memory_of_gpu()
2353 bo_bucket->restored_offset = offset; in criu_restore_memory_of_gpu()
2355 WRITE_ONCE(pdd->vram_usage, pdd->vram_usage + bo_bucket->size); in criu_restore_memory_of_gpu()
2361 struct kfd_criu_bo_bucket *bo_bucket, in criu_restore_bo() argument
2370 bo_bucket->size, bo_bucket->addr, bo_bucket->gpu_id, bo_bucket->alloc_flags, in criu_restore_bo()
2373 pdd = kfd_process_device_data_by_id(p, bo_bucket->gpu_id); in criu_restore_bo()
2379 ret = criu_restore_memory_of_gpu(pdd, bo_bucket, bo_priv, &kgd_mem); in criu_restore_bo()
2411 if (bo_bucket->alloc_flags in criu_restore_bo()
2414 &bo_bucket->dmabuf_fd); in criu_restore_bo()
2418 bo_bucket->dmabuf_fd = KFD_INVALID_FD; in criu_restore_bo()