Lines Matching refs:vmid

20 void kvm_riscv_local_hfence_gvma_vmid_gpa(unsigned long vmid,  in kvm_riscv_local_hfence_gvma_vmid_gpa()  argument
27 kvm_riscv_local_hfence_gvma_vmid_all(vmid); in kvm_riscv_local_hfence_gvma_vmid_gpa()
35 : : "r" (pos >> 2), "r" (vmid) : "memory"); in kvm_riscv_local_hfence_gvma_vmid_gpa()
40 : : "r" (pos >> 2), "r" (vmid) : "memory"); in kvm_riscv_local_hfence_gvma_vmid_gpa()
44 void kvm_riscv_local_hfence_gvma_vmid_all(unsigned long vmid) in kvm_riscv_local_hfence_gvma_vmid_all() argument
46 asm volatile(HFENCE_GVMA(zero, %0) : : "r" (vmid) : "memory"); in kvm_riscv_local_hfence_gvma_vmid_all()
77 void kvm_riscv_local_hfence_vvma_asid_gva(unsigned long vmid, in kvm_riscv_local_hfence_vvma_asid_gva() argument
86 kvm_riscv_local_hfence_vvma_asid_all(vmid, asid); in kvm_riscv_local_hfence_vvma_asid_gva()
90 hgatp = csr_swap(CSR_HGATP, vmid << HGATP_VMID_SHIFT); in kvm_riscv_local_hfence_vvma_asid_gva()
107 void kvm_riscv_local_hfence_vvma_asid_all(unsigned long vmid, in kvm_riscv_local_hfence_vvma_asid_all() argument
112 hgatp = csr_swap(CSR_HGATP, vmid << HGATP_VMID_SHIFT); in kvm_riscv_local_hfence_vvma_asid_all()
119 void kvm_riscv_local_hfence_vvma_gva(unsigned long vmid, in kvm_riscv_local_hfence_vvma_gva() argument
126 kvm_riscv_local_hfence_vvma_all(vmid); in kvm_riscv_local_hfence_vvma_gva()
130 hgatp = csr_swap(CSR_HGATP, vmid << HGATP_VMID_SHIFT); in kvm_riscv_local_hfence_vvma_gva()
147 void kvm_riscv_local_hfence_vvma_all(unsigned long vmid) in kvm_riscv_local_hfence_vvma_all() argument
151 hgatp = csr_swap(CSR_HGATP, vmid << HGATP_VMID_SHIFT); in kvm_riscv_local_hfence_vvma_all()
160 unsigned long vmid; in kvm_riscv_local_tlb_sanitize() local
177 vmid = READ_ONCE(vcpu->kvm->arch.vmid.vmid); in kvm_riscv_local_tlb_sanitize()
178 kvm_riscv_local_hfence_gvma_vmid_all(vmid); in kvm_riscv_local_tlb_sanitize()
189 struct kvm_vmid *vmid; in kvm_riscv_hfence_gvma_vmid_all_process() local
191 vmid = &vcpu->kvm->arch.vmid; in kvm_riscv_hfence_gvma_vmid_all_process()
192 kvm_riscv_local_hfence_gvma_vmid_all(READ_ONCE(vmid->vmid)); in kvm_riscv_hfence_gvma_vmid_all_process()
197 struct kvm_vmid *vmid; in kvm_riscv_hfence_vvma_all_process() local
199 vmid = &vcpu->kvm->arch.vmid; in kvm_riscv_hfence_vvma_all_process()
200 kvm_riscv_local_hfence_vvma_all(READ_ONCE(vmid->vmid)); in kvm_riscv_hfence_vvma_all_process()
255 struct kvm_vmid *v = &vcpu->kvm->arch.vmid; in kvm_riscv_hfence_process()
263 READ_ONCE(v->vmid), in kvm_riscv_hfence_process()
269 READ_ONCE(v->vmid), d.asid, in kvm_riscv_hfence_process()
275 READ_ONCE(v->vmid), d.asid); in kvm_riscv_hfence_process()
280 READ_ONCE(v->vmid), in kvm_riscv_hfence_process()