Lines Matching refs:cur_gfn

674 	gfn_t cur_gfn, last_gfn;  in kvm_arch_sync_dirty_log()  local
680 cur_gfn = memslot->base_gfn; in kvm_arch_sync_dirty_log()
682 for (; cur_gfn <= last_gfn; cur_gfn += _PAGE_ENTRIES) { in kvm_arch_sync_dirty_log()
683 gaddr = gfn_to_gpa(cur_gfn); in kvm_arch_sync_dirty_log()
684 vmaddr = gfn_to_hva_memslot(memslot, cur_gfn); in kvm_arch_sync_dirty_log()
692 mark_page_dirty(kvm, cur_gfn + i); in kvm_arch_sync_dirty_log()
2196 unsigned long pgstev, hva, cur_gfn = args->start_gfn; in kvm_s390_peek_cmma() local
2200 hva = gfn_to_hva(kvm, cur_gfn); in kvm_s390_peek_cmma()
2210 cur_gfn++; in kvm_s390_peek_cmma()
2223 unsigned long cur_gfn) in kvm_s390_next_dirty_cmma() argument
2225 struct kvm_memory_slot *ms = gfn_to_memslot_approx(slots, cur_gfn); in kvm_s390_next_dirty_cmma()
2226 unsigned long ofs = cur_gfn - ms->base_gfn; in kvm_s390_next_dirty_cmma()
2229 if (ms->base_gfn + ms->npages <= cur_gfn) { in kvm_s390_next_dirty_cmma()
2239 if (cur_gfn < ms->base_gfn) in kvm_s390_next_dirty_cmma()
2253 unsigned long mem_end, cur_gfn, next_gfn, hva, pgstev; in kvm_s390_get_cmma() local
2260 cur_gfn = kvm_s390_next_dirty_cmma(slots, args->start_gfn); in kvm_s390_get_cmma()
2261 ms = gfn_to_memslot(kvm, cur_gfn); in kvm_s390_get_cmma()
2263 args->start_gfn = cur_gfn; in kvm_s390_get_cmma()
2266 next_gfn = kvm_s390_next_dirty_cmma(slots, cur_gfn + 1); in kvm_s390_get_cmma()
2270 hva = gfn_to_hva(kvm, cur_gfn); in kvm_s390_get_cmma()
2274 if (test_and_clear_bit(cur_gfn - ms->base_gfn, kvm_second_dirty_bitmap(ms))) in kvm_s390_get_cmma()
2281 if (next_gfn > cur_gfn + KVM_S390_MAX_BIT_DISTANCE) in kvm_s390_get_cmma()
2284 if (cur_gfn == next_gfn) in kvm_s390_get_cmma()
2285 next_gfn = kvm_s390_next_dirty_cmma(slots, cur_gfn + 1); in kvm_s390_get_cmma()
2290 cur_gfn++; in kvm_s390_get_cmma()
2292 if (cur_gfn - ms->base_gfn >= ms->npages) { in kvm_s390_get_cmma()
2293 ms = gfn_to_memslot(kvm, cur_gfn); in kvm_s390_get_cmma()