Lines Matching refs:vmid
32 #define vmid2idx(vmid) ((vmid) & ~VMID_MASK) argument
42 #define vmid_gen_match(vmid) \ argument
43 (!(((vmid) ^ atomic64_read(&vmid_generation)) >> kvm_arm_vmid_bits))
48 u64 vmid; in flush_context() local
53 vmid = atomic64_xchg_relaxed(&per_cpu(active_vmids, cpu), 0); in flush_context()
56 if (vmid == 0) in flush_context()
57 vmid = per_cpu(reserved_vmids, cpu); in flush_context()
58 __set_bit(vmid2idx(vmid), vmid_map); in flush_context()
59 per_cpu(reserved_vmids, cpu) = vmid; in flush_context()
72 static bool check_update_reserved_vmid(u64 vmid, u64 newvmid) in check_update_reserved_vmid() argument
83 if (per_cpu(reserved_vmids, cpu) == vmid) { in check_update_reserved_vmid()
95 u64 vmid = atomic64_read(&kvm_vmid->id); in new_vmid() local
98 if (vmid != 0) { in new_vmid()
99 u64 newvmid = generation | (vmid & ~VMID_MASK); in new_vmid()
101 if (check_update_reserved_vmid(vmid, newvmid)) { in new_vmid()
106 if (!__test_and_set_bit(vmid2idx(vmid), vmid_map)) { in new_vmid()
112 vmid = find_next_zero_bit(vmid_map, NUM_USER_VMIDS, cur_idx); in new_vmid()
113 if (vmid != NUM_USER_VMIDS) in new_vmid()
122 vmid = find_next_zero_bit(vmid_map, NUM_USER_VMIDS, 1); in new_vmid()
125 __set_bit(vmid, vmid_map); in new_vmid()
126 cur_idx = vmid; in new_vmid()
127 vmid = idx2vmid(vmid) | generation; in new_vmid()
128 atomic64_set(&kvm_vmid->id, vmid); in new_vmid()
129 return vmid; in new_vmid()
141 u64 vmid, old_active_vmid; in kvm_arm_vmid_update() local
143 vmid = atomic64_read(&kvm_vmid->id); in kvm_arm_vmid_update()
156 if (old_active_vmid != 0 && vmid_gen_match(vmid) && in kvm_arm_vmid_update()
158 old_active_vmid, vmid)) in kvm_arm_vmid_update()
164 vmid = atomic64_read(&kvm_vmid->id); in kvm_arm_vmid_update()
165 if (!vmid_gen_match(vmid)) in kvm_arm_vmid_update()
166 vmid = new_vmid(kvm_vmid); in kvm_arm_vmid_update()
168 atomic64_set(this_cpu_ptr(&active_vmids), vmid); in kvm_arm_vmid_update()