Lines Matching full:region

24 #include "hw/vfio/vfio-region.h"
40 VFIORegion *region = opaque; in vfio_region_write() local
41 VFIODevice *vbasedev = region->vbasedev; in vfio_region_write()
68 ret = vbasedev->io_ops->region_write(vbasedev, region->nr, in vfio_region_write()
69 addr, size, &buf, region->post_wr); in vfio_region_write()
71 error_report("%s(%s:region%d+0x%"HWADDR_PRIx", 0x%"PRIx64 in vfio_region_write()
73 __func__, vbasedev->name, region->nr, in vfio_region_write()
77 trace_vfio_region_write(vbasedev->name, region->nr, addr, data, size); in vfio_region_write()
93 VFIORegion *region = opaque; in vfio_region_read() local
94 VFIODevice *vbasedev = region->vbasedev; in vfio_region_read()
104 ret = vbasedev->io_ops->region_read(vbasedev, region->nr, addr, size, &buf); in vfio_region_read()
106 error_report("%s(%s:region%d+0x%"HWADDR_PRIx", %d) failed: %s", in vfio_region_read()
107 __func__, vbasedev->name, region->nr, in vfio_region_read()
129 trace_vfio_region_read(vbasedev->name, region->nr, addr, size, data); in vfio_region_read()
151 static int vfio_setup_region_sparse_mmaps(VFIORegion *region, in vfio_setup_region_sparse_mmaps() argument
165 trace_vfio_region_sparse_mmap_header(region->vbasedev->name, in vfio_setup_region_sparse_mmaps()
166 region->nr, sparse->nr_areas); in vfio_setup_region_sparse_mmaps()
168 region->mmaps = g_new0(VFIOMmap, sparse->nr_areas); in vfio_setup_region_sparse_mmaps()
175 region->mmaps[j].offset = sparse->areas[i].offset; in vfio_setup_region_sparse_mmaps()
176 region->mmaps[j].size = sparse->areas[i].size; in vfio_setup_region_sparse_mmaps()
181 region->nr_mmaps = j; in vfio_setup_region_sparse_mmaps()
182 region->mmaps = g_realloc(region->mmaps, j * sizeof(VFIOMmap)); in vfio_setup_region_sparse_mmaps()
187 int vfio_region_setup(Object *obj, VFIODevice *vbasedev, VFIORegion *region, in vfio_region_setup() argument
198 region->vbasedev = vbasedev; in vfio_region_setup()
199 region->flags = info->flags; in vfio_region_setup()
200 region->size = info->size; in vfio_region_setup()
201 region->fd_offset = info->offset; in vfio_region_setup()
202 region->nr = index; in vfio_region_setup()
203 region->post_wr = false; in vfio_region_setup()
205 if (region->size) { in vfio_region_setup()
206 region->mem = g_new0(MemoryRegion, 1); in vfio_region_setup()
207 memory_region_init_io(region->mem, obj, &vfio_region_ops, in vfio_region_setup()
208 region, name, region->size); in vfio_region_setup()
211 region->flags & VFIO_REGION_INFO_FLAG_MMAP) { in vfio_region_setup()
213 ret = vfio_setup_region_sparse_mmaps(region, info); in vfio_region_setup()
216 region->nr_mmaps = 1; in vfio_region_setup()
217 region->mmaps = g_new0(VFIOMmap, region->nr_mmaps); in vfio_region_setup()
218 region->mmaps[0].offset = 0; in vfio_region_setup()
219 region->mmaps[0].size = region->size; in vfio_region_setup()
225 region->flags, region->fd_offset, region->size); in vfio_region_setup()
229 static void vfio_subregion_unmap(VFIORegion *region, int index) in vfio_subregion_unmap() argument
231 trace_vfio_region_unmap(memory_region_name(&region->mmaps[index].mem), in vfio_subregion_unmap()
232 region->mmaps[index].offset, in vfio_subregion_unmap()
233 region->mmaps[index].offset + in vfio_subregion_unmap()
234 region->mmaps[index].size - 1); in vfio_subregion_unmap()
235 memory_region_del_subregion(region->mem, &region->mmaps[index].mem); in vfio_subregion_unmap()
236 munmap(region->mmaps[index].mmap, region->mmaps[index].size); in vfio_subregion_unmap()
237 object_unparent(OBJECT(&region->mmaps[index].mem)); in vfio_subregion_unmap()
238 region->mmaps[index].mmap = NULL; in vfio_subregion_unmap()
241 int vfio_region_mmap(VFIORegion *region) in vfio_region_mmap() argument
247 if (!region->mem) { in vfio_region_mmap()
251 prot |= region->flags & VFIO_REGION_INFO_FLAG_READ ? PROT_READ : 0; in vfio_region_mmap()
252 prot |= region->flags & VFIO_REGION_INFO_FLAG_WRITE ? PROT_WRITE : 0; in vfio_region_mmap()
254 for (i = 0; i < region->nr_mmaps; i++) { in vfio_region_mmap()
255 size_t align = MIN(1ULL << ctz64(region->mmaps[i].size), 1 * GiB); in vfio_region_mmap()
266 * the region size here can exceed host memory, therefore we manually in vfio_region_mmap()
269 map_base = mmap(0, region->mmaps[i].size + align, PROT_NONE, in vfio_region_mmap()
276 fd = vfio_device_get_region_fd(region->vbasedev, region->nr); in vfio_region_mmap()
280 munmap(map_align + region->mmaps[i].size, in vfio_region_mmap()
283 region->mmaps[i].mmap = mmap(map_align, region->mmaps[i].size, prot, in vfio_region_mmap()
285 region->fd_offset + in vfio_region_mmap()
286 region->mmaps[i].offset); in vfio_region_mmap()
287 if (region->mmaps[i].mmap == MAP_FAILED) { in vfio_region_mmap()
293 memory_region_name(region->mem), i); in vfio_region_mmap()
294 memory_region_init_ram_device_ptr(&region->mmaps[i].mem, in vfio_region_mmap()
295 memory_region_owner(region->mem), in vfio_region_mmap()
296 name, region->mmaps[i].size, in vfio_region_mmap()
297 region->mmaps[i].mmap); in vfio_region_mmap()
299 memory_region_add_subregion(region->mem, region->mmaps[i].offset, in vfio_region_mmap()
300 &region->mmaps[i].mem); in vfio_region_mmap()
302 trace_vfio_region_mmap(memory_region_name(&region->mmaps[i].mem), in vfio_region_mmap()
303 region->mmaps[i].offset, in vfio_region_mmap()
304 region->mmaps[i].offset + in vfio_region_mmap()
305 region->mmaps[i].size - 1); in vfio_region_mmap()
311 trace_vfio_region_mmap_fault(memory_region_name(region->mem), i, in vfio_region_mmap()
312 region->fd_offset + region->mmaps[i].offset, in vfio_region_mmap()
313 region->fd_offset + region->mmaps[i].offset + in vfio_region_mmap()
314 region->mmaps[i].size - 1, ret); in vfio_region_mmap()
316 region->mmaps[i].mmap = NULL; in vfio_region_mmap()
319 vfio_subregion_unmap(region, i); in vfio_region_mmap()
325 void vfio_region_unmap(VFIORegion *region) in vfio_region_unmap() argument
329 if (!region->mem) { in vfio_region_unmap()
333 for (i = 0; i < region->nr_mmaps; i++) { in vfio_region_unmap()
334 if (region->mmaps[i].mmap) { in vfio_region_unmap()
335 vfio_subregion_unmap(region, i); in vfio_region_unmap()
340 void vfio_region_exit(VFIORegion *region) in vfio_region_exit() argument
344 if (!region->mem) { in vfio_region_exit()
348 for (i = 0; i < region->nr_mmaps; i++) { in vfio_region_exit()
349 if (region->mmaps[i].mmap) { in vfio_region_exit()
350 memory_region_del_subregion(region->mem, &region->mmaps[i].mem); in vfio_region_exit()
354 trace_vfio_region_exit(region->vbasedev->name, region->nr); in vfio_region_exit()
357 void vfio_region_finalize(VFIORegion *region) in vfio_region_finalize() argument
361 if (!region->mem) { in vfio_region_finalize()
365 for (i = 0; i < region->nr_mmaps; i++) { in vfio_region_finalize()
366 if (region->mmaps[i].mmap) { in vfio_region_finalize()
367 munmap(region->mmaps[i].mmap, region->mmaps[i].size); in vfio_region_finalize()
368 object_unparent(OBJECT(&region->mmaps[i].mem)); in vfio_region_finalize()
372 object_unparent(OBJECT(region->mem)); in vfio_region_finalize()
374 g_free(region->mem); in vfio_region_finalize()
375 g_free(region->mmaps); in vfio_region_finalize()
377 trace_vfio_region_finalize(region->vbasedev->name, region->nr); in vfio_region_finalize()
379 region->mem = NULL; in vfio_region_finalize()
380 region->mmaps = NULL; in vfio_region_finalize()
381 region->nr_mmaps = 0; in vfio_region_finalize()
382 region->size = 0; in vfio_region_finalize()
383 region->flags = 0; in vfio_region_finalize()
384 region->nr = 0; in vfio_region_finalize()
387 void vfio_region_mmaps_set_enabled(VFIORegion *region, bool enabled) in vfio_region_mmaps_set_enabled() argument
391 if (!region->mem) { in vfio_region_mmaps_set_enabled()
395 for (i = 0; i < region->nr_mmaps; i++) { in vfio_region_mmaps_set_enabled()
396 if (region->mmaps[i].mmap) { in vfio_region_mmaps_set_enabled()
397 memory_region_set_enabled(&region->mmaps[i].mem, enabled); in vfio_region_mmaps_set_enabled()
401 trace_vfio_region_mmaps_set_enabled(memory_region_name(region->mem), in vfio_region_mmaps_set_enabled()