Lines Matching refs:dmirror
83 struct dmirror *dmirror; member
90 struct dmirror { struct
147 dmirror_select_device(struct dmirror *dmirror) in dmirror_select_device() argument
149 return (dmirror->mdevice->zone_device_type == in dmirror_select_device()
163 struct dmirror *dmirror; in dmirror_fops_open() local
167 dmirror = kzalloc(sizeof(*dmirror), GFP_KERNEL); in dmirror_fops_open()
168 if (dmirror == NULL) in dmirror_fops_open()
171 dmirror->mdevice = container_of(cdev, struct dmirror_device, cdevice); in dmirror_fops_open()
172 mutex_init(&dmirror->mutex); in dmirror_fops_open()
173 xa_init(&dmirror->pt); in dmirror_fops_open()
175 ret = mmu_interval_notifier_insert(&dmirror->notifier, current->mm, in dmirror_fops_open()
178 kfree(dmirror); in dmirror_fops_open()
182 filp->private_data = dmirror; in dmirror_fops_open()
188 struct dmirror *dmirror = filp->private_data; in dmirror_fops_release() local
190 mmu_interval_notifier_remove(&dmirror->notifier); in dmirror_fops_release()
191 xa_destroy(&dmirror->pt); in dmirror_fops_release()
192 kfree(dmirror); in dmirror_fops_release()
207 static int dmirror_do_fault(struct dmirror *dmirror, struct hmm_range *range) in dmirror_do_fault() argument
233 entry = xa_store(&dmirror->pt, pfn, entry, GFP_ATOMIC); in dmirror_do_fault()
241 static void dmirror_do_update(struct dmirror *dmirror, unsigned long start, in dmirror_do_update() argument
252 xa_for_each_range(&dmirror->pt, pfn, entry, start >> PAGE_SHIFT, in dmirror_do_update()
254 xa_erase(&dmirror->pt, pfn); in dmirror_do_update()
261 struct dmirror *dmirror = container_of(mni, struct dmirror, notifier); in dmirror_interval_invalidate() local
268 range->owner == dmirror->mdevice) in dmirror_interval_invalidate()
272 mutex_lock(&dmirror->mutex); in dmirror_interval_invalidate()
273 else if (!mutex_trylock(&dmirror->mutex)) in dmirror_interval_invalidate()
277 dmirror_do_update(dmirror, range->start, range->end); in dmirror_interval_invalidate()
279 mutex_unlock(&dmirror->mutex); in dmirror_interval_invalidate()
287 static int dmirror_range_fault(struct dmirror *dmirror, in dmirror_range_fault() argument
290 struct mm_struct *mm = dmirror->notifier.mm; in dmirror_range_fault()
311 mutex_lock(&dmirror->mutex); in dmirror_range_fault()
314 mutex_unlock(&dmirror->mutex); in dmirror_range_fault()
320 ret = dmirror_do_fault(dmirror, range); in dmirror_range_fault()
322 mutex_unlock(&dmirror->mutex); in dmirror_range_fault()
327 static int dmirror_fault(struct dmirror *dmirror, unsigned long start, in dmirror_fault() argument
330 struct mm_struct *mm = dmirror->notifier.mm; in dmirror_fault()
334 .notifier = &dmirror->notifier, in dmirror_fault()
339 .dev_private_owner = dmirror->mdevice, in dmirror_fault()
351 ret = dmirror_range_fault(dmirror, &range); in dmirror_fault()
360 static int dmirror_do_read(struct dmirror *dmirror, unsigned long start, in dmirror_do_read() argument
372 entry = xa_load(&dmirror->pt, pfn); in dmirror_do_read()
386 static int dmirror_read(struct dmirror *dmirror, struct hmm_dmirror_cmd *cmd) in dmirror_read() argument
403 mutex_lock(&dmirror->mutex); in dmirror_read()
404 ret = dmirror_do_read(dmirror, start, end, &bounce); in dmirror_read()
405 mutex_unlock(&dmirror->mutex); in dmirror_read()
410 ret = dmirror_fault(dmirror, start, end, false); in dmirror_read()
426 static int dmirror_do_write(struct dmirror *dmirror, unsigned long start, in dmirror_do_write() argument
438 entry = xa_load(&dmirror->pt, pfn); in dmirror_do_write()
452 static int dmirror_write(struct dmirror *dmirror, struct hmm_dmirror_cmd *cmd) in dmirror_write() argument
474 mutex_lock(&dmirror->mutex); in dmirror_write()
475 ret = dmirror_do_write(dmirror, start, end, &bounce); in dmirror_write()
476 mutex_unlock(&dmirror->mutex); in dmirror_write()
481 ret = dmirror_fault(dmirror, start, end, true); in dmirror_write()
640 struct dmirror *dmirror) in dmirror_migrate_alloc_and_copy() argument
642 struct dmirror_device *mdevice = dmirror->mdevice; in dmirror_migrate_alloc_and_copy()
682 rpage->zone_device_data = dmirror; in dmirror_migrate_alloc_and_copy()
693 static int dmirror_check_atomic(struct dmirror *dmirror, unsigned long start, in dmirror_check_atomic() argument
701 entry = xa_load(&dmirror->pt, pfn); in dmirror_check_atomic()
710 struct page **pages, struct dmirror *dmirror) in dmirror_atomic_map() argument
716 mutex_lock(&dmirror->mutex); in dmirror_atomic_map()
726 entry = xa_store(&dmirror->pt, pfn, entry, GFP_ATOMIC); in dmirror_atomic_map()
728 mutex_unlock(&dmirror->mutex); in dmirror_atomic_map()
735 mutex_unlock(&dmirror->mutex); in dmirror_atomic_map()
740 struct dmirror *dmirror) in dmirror_migrate_finalize_and_map() argument
749 mutex_lock(&dmirror->mutex); in dmirror_migrate_finalize_and_map()
766 entry = xa_store(&dmirror->pt, pfn, entry, GFP_ATOMIC); in dmirror_migrate_finalize_and_map()
768 mutex_unlock(&dmirror->mutex); in dmirror_migrate_finalize_and_map()
773 mutex_unlock(&dmirror->mutex); in dmirror_migrate_finalize_and_map()
777 static int dmirror_exclusive(struct dmirror *dmirror, in dmirror_exclusive() argument
782 struct mm_struct *mm = dmirror->notifier.mm; in dmirror_exclusive()
814 mapped = dmirror_atomic_map(addr, next, pages, dmirror); in dmirror_exclusive()
835 mutex_lock(&dmirror->mutex); in dmirror_exclusive()
836 ret = dmirror_do_read(dmirror, start, end, &bounce); in dmirror_exclusive()
837 mutex_unlock(&dmirror->mutex); in dmirror_exclusive()
850 struct dmirror *dmirror) in dmirror_devmem_fault_alloc_and_copy() argument
877 xa_erase(&dmirror->pt, addr >> PAGE_SHIFT); in dmirror_devmem_fault_alloc_and_copy()
900 static int dmirror_migrate_to_system(struct dmirror *dmirror, in dmirror_migrate_to_system() argument
905 struct mm_struct *mm = dmirror->notifier.mm; in dmirror_migrate_to_system()
939 args.pgmap_owner = dmirror->mdevice; in dmirror_migrate_to_system()
940 args.flags = dmirror_select_device(dmirror); in dmirror_migrate_to_system()
947 dmirror_devmem_fault_alloc_and_copy(&args, dmirror); in dmirror_migrate_to_system()
960 static int dmirror_migrate_to_device(struct dmirror *dmirror, in dmirror_migrate_to_device() argument
965 struct mm_struct *mm = dmirror->notifier.mm; in dmirror_migrate_to_device()
999 args.pgmap_owner = dmirror->mdevice; in dmirror_migrate_to_device()
1006 dmirror_migrate_alloc_and_copy(&args, dmirror); in dmirror_migrate_to_device()
1008 dmirror_migrate_finalize_and_map(&args, dmirror); in dmirror_migrate_to_device()
1021 mutex_lock(&dmirror->mutex); in dmirror_migrate_to_device()
1022 ret = dmirror_do_read(dmirror, start, end, &bounce); in dmirror_migrate_to_device()
1023 mutex_unlock(&dmirror->mutex); in dmirror_migrate_to_device()
1039 static void dmirror_mkentry(struct dmirror *dmirror, struct hmm_range *range, in dmirror_mkentry() argument
1056 if (dmirror->mdevice == dmirror_page_to_device(page)) in dmirror_mkentry()
1062 if (dmirror->mdevice == dmirror_page_to_device(page)) in dmirror_mkentry()
1086 struct dmirror *dmirror = dmi->dmirror; in dmirror_snapshot_invalidate() local
1089 mutex_lock(&dmirror->mutex); in dmirror_snapshot_invalidate()
1090 else if (!mutex_trylock(&dmirror->mutex)) in dmirror_snapshot_invalidate()
1099 mutex_unlock(&dmirror->mutex); in dmirror_snapshot_invalidate()
1107 static int dmirror_range_snapshot(struct dmirror *dmirror, in dmirror_range_snapshot() argument
1111 struct mm_struct *mm = dmirror->notifier.mm; in dmirror_range_snapshot()
1119 notifier.dmirror = dmirror; in dmirror_range_snapshot()
1145 mutex_lock(&dmirror->mutex); in dmirror_range_snapshot()
1148 mutex_unlock(&dmirror->mutex); in dmirror_range_snapshot()
1156 dmirror_mkentry(dmirror, range, perm + i, range->hmm_pfns[i]); in dmirror_range_snapshot()
1158 mutex_unlock(&dmirror->mutex); in dmirror_range_snapshot()
1164 static int dmirror_snapshot(struct dmirror *dmirror, in dmirror_snapshot() argument
1167 struct mm_struct *mm = dmirror->notifier.mm; in dmirror_snapshot()
1177 .dev_private_owner = dmirror->mdevice, in dmirror_snapshot()
1202 ret = dmirror_range_snapshot(dmirror, &range, perm); in dmirror_snapshot()
1305 struct dmirror *dmirror; in dmirror_fops_unlocked_ioctl() local
1308 dmirror = filp->private_data; in dmirror_fops_unlocked_ioctl()
1309 if (!dmirror) in dmirror_fops_unlocked_ioctl()
1325 ret = dmirror_read(dmirror, &cmd); in dmirror_fops_unlocked_ioctl()
1329 ret = dmirror_write(dmirror, &cmd); in dmirror_fops_unlocked_ioctl()
1333 ret = dmirror_migrate_to_device(dmirror, &cmd); in dmirror_fops_unlocked_ioctl()
1337 ret = dmirror_migrate_to_system(dmirror, &cmd); in dmirror_fops_unlocked_ioctl()
1341 ret = dmirror_exclusive(dmirror, &cmd); in dmirror_fops_unlocked_ioctl()
1345 ret = dmirror_check_atomic(dmirror, cmd.addr, in dmirror_fops_unlocked_ioctl()
1350 ret = dmirror_snapshot(dmirror, &cmd); in dmirror_fops_unlocked_ioctl()
1354 dmirror_device_remove_chunks(dmirror->mdevice); in dmirror_fops_unlocked_ioctl()
1428 struct dmirror *dmirror; in dmirror_devmem_fault() local
1437 dmirror = rpage->zone_device_data; in dmirror_devmem_fault()
1445 args.pgmap_owner = dmirror->mdevice; in dmirror_devmem_fault()
1446 args.flags = dmirror_select_device(dmirror); in dmirror_devmem_fault()
1452 ret = dmirror_devmem_fault_alloc_and_copy(&args, dmirror); in dmirror_devmem_fault()