Lines Matching refs:arch

44 	if (kvm->arch.online_vcores == 1 && local_paca->kvm_hstate.kvm_vcpu)  in global_invalidates()
56 cpumask_setall(&kvm->arch.need_tlb_flush); in global_invalidates()
58 cpumask_clear_cpu(cpu, &kvm->arch.need_tlb_flush); in global_invalidates()
76 head = &kvm->arch.hpt.rev[i]; in kvmppc_add_revmap_chain()
79 tail = &kvm->arch.hpt.rev[head->back]; in kvmppc_add_revmap_chain()
142 rmap = real_vmalloc_addr(&memslot->arch.rmap[gfn - memslot->base_gfn]); in revmap_for_hpte()
166 next = real_vmalloc_addr(&kvm->arch.hpt.rev[rev->forw]); in remove_revmap_chain()
167 prev = real_vmalloc_addr(&kvm->arch.hpt.rev[rev->back]); in remove_revmap_chain()
243 rmap = &memslot->arch.rmap[slot_fn]; in kvmppc_do_h_enter()
303 if (pte_index >= kvmppc_hpt_npte(&kvm->arch.hpt)) in kvmppc_do_h_enter()
307 hpte = (__be64 *)(kvm->arch.hpt.virt + (pte_index << 4)); in kvmppc_do_h_enter()
338 hpte = (__be64 *)(kvm->arch.hpt.virt + (pte_index << 4)); in kvmppc_do_h_enter()
355 rev = &kvm->arch.hpt.rev[pte_index]; in kvmppc_do_h_enter()
405 vcpu->arch.pgdir, true, in kvmppc_h_enter()
406 &vcpu->arch.regs.gpr[4]); in kvmppc_h_enter()
469 "r" (rbvalues[i]), "r" (kvm->arch.lpid)); in do_tlbies()
472 fixup_tlbie_lpid(rbvalues[i - 1], kvm->arch.lpid); in do_tlbies()
496 if (pte_index >= kvmppc_hpt_npte(&kvm->arch.hpt)) in kvmppc_do_h_remove()
498 hpte = (__be64 *)(kvm->arch.hpt.virt + (pte_index << 4)); in kvmppc_do_h_remove()
514 rev = real_vmalloc_addr(&kvm->arch.hpt.rev[pte_index]); in kvmppc_do_h_remove()
537 atomic64_inc(&kvm->arch.mmio_update); in kvmppc_do_h_remove()
551 &vcpu->arch.regs.gpr[4]); in kvmppc_h_remove()
558 unsigned long *args = &vcpu->arch.regs.gpr[4]; in kvmppc_h_bulk_remove()
585 pte_index >= kvmppc_hpt_npte(&kvm->arch.hpt)) { in kvmppc_h_bulk_remove()
591 hp = (__be64 *) (kvm->arch.hpt.virt + (pte_index << 4)); in kvmppc_h_bulk_remove()
628 rev = real_vmalloc_addr(&kvm->arch.hpt.rev[pte_index]); in kvmppc_h_bulk_remove()
637 atomic64_inc(&kvm->arch.mmio_update); in kvmppc_h_bulk_remove()
685 if (pte_index >= kvmppc_hpt_npte(&kvm->arch.hpt)) in kvmppc_h_protect()
688 hpte = (__be64 *)(kvm->arch.hpt.virt + (pte_index << 4)); in kvmppc_h_protect()
708 rev = real_vmalloc_addr(&kvm->arch.hpt.rev[pte_index]); in kvmppc_h_protect()
739 atomic64_inc(&kvm->arch.mmio_update); in kvmppc_h_protect()
756 if (pte_index >= kvmppc_hpt_npte(&kvm->arch.hpt)) in kvmppc_h_read()
762 rev = real_vmalloc_addr(&kvm->arch.hpt.rev[pte_index]); in kvmppc_h_read()
764 hpte = (__be64 *)(kvm->arch.hpt.virt + (pte_index << 4)); in kvmppc_h_read()
779 vcpu->arch.regs.gpr[4 + i * 2] = v; in kvmppc_h_read()
780 vcpu->arch.regs.gpr[5 + i * 2] = r; in kvmppc_h_read()
798 if (pte_index >= kvmppc_hpt_npte(&kvm->arch.hpt)) in kvmppc_h_clear_ref()
801 rev = real_vmalloc_addr(&kvm->arch.hpt.rev[pte_index]); in kvmppc_h_clear_ref()
802 hpte = (__be64 *)(kvm->arch.hpt.virt + (pte_index << 4)); in kvmppc_h_clear_ref()
827 vcpu->arch.regs.gpr[4] = gr; in kvmppc_h_clear_ref()
846 if (pte_index >= kvmppc_hpt_npte(&kvm->arch.hpt)) in kvmppc_h_clear_mod()
849 rev = real_vmalloc_addr(&kvm->arch.hpt.rev[pte_index]); in kvmppc_h_clear_mod()
850 hpte = (__be64 *)(kvm->arch.hpt.virt + (pte_index << 4)); in kvmppc_h_clear_mod()
875 vcpu->arch.regs.gpr[4] = gr; in kvmppc_h_clear_mod()
1070 entry = &vcpu->arch.mmio_cache.entry[i]; in mmio_cache_search()
1084 unsigned int index = vcpu->arch.mmio_cache.index; in next_mmio_cache_entry()
1086 vcpu->arch.mmio_cache.index++; in next_mmio_cache_entry()
1087 if (vcpu->arch.mmio_cache.index == MMIO_HPTE_CACHE_SIZE) in next_mmio_cache_entry()
1088 vcpu->arch.mmio_cache.index = 0; in next_mmio_cache_entry()
1090 return &vcpu->arch.mmio_cache.entry[index]; in next_mmio_cache_entry()
1126 hash = (vsid ^ ((eaddr & somask) >> pshift)) & kvmppc_hpt_mask(&kvm->arch.hpt); in kvmppc_hv_find_lock_hpte()
1137 hpte = (__be64 *)(kvm->arch.hpt.virt + (hash << 7)); in kvmppc_hv_find_lock_hpte()
1173 hash = hash ^ kvmppc_hpt_mask(&kvm->arch.hpt); in kvmppc_hv_find_lock_hpte()
1207 mmio_update = atomic64_read(&kvm->arch.mmio_update); in kvmppc_hpte_hv_fault()
1222 hpte = (__be64 *)(kvm->arch.hpt.virt + (index << 4)); in kvmppc_hpte_hv_fault()
1229 rev = real_vmalloc_addr(&kvm->arch.hpt.rev[index]); in kvmppc_hpte_hv_fault()
1241 key = (vcpu->arch.shregs.msr & MSR_PR) ? SLB_VSID_KP : SLB_VSID_KS; in kvmppc_hpte_hv_fault()
1258 if (data && (vcpu->arch.shregs.msr & MSR_DR)) { in kvmppc_hpte_hv_fault()
1259 unsigned int perm = hpte_get_skey_perm(gr, vcpu->arch.amr); in kvmppc_hpte_hv_fault()
1267 vcpu->arch.pgfault_addr = addr; in kvmppc_hpte_hv_fault()
1268 vcpu->arch.pgfault_index = index; in kvmppc_hpte_hv_fault()
1269 vcpu->arch.pgfault_hpte[0] = v; in kvmppc_hpte_hv_fault()
1270 vcpu->arch.pgfault_hpte[1] = r; in kvmppc_hpte_hv_fault()
1271 vcpu->arch.pgfault_cache = cache_entry; in kvmppc_hpte_hv_fault()
1294 if (data && (vcpu->arch.shregs.msr & MSR_IR)) in kvmppc_hpte_hv_fault()