Lines Matching defs:vcpu

247 static struct kvm_mmu_role_regs vcpu_to_role_regs(struct kvm_vcpu *vcpu)  in vcpu_to_role_regs()
258 static unsigned long get_guest_cr3(struct kvm_vcpu *vcpu) in get_guest_cr3()
263 static inline unsigned long kvm_mmu_get_guest_pgd(struct kvm_vcpu *vcpu, in kvm_mmu_get_guest_pgd()
296 static void mark_mmio_spte(struct kvm_vcpu *vcpu, u64 *sptep, u64 gfn, in mark_mmio_spte()
320 static bool check_mmio_spte(struct kvm_vcpu *vcpu, u64 spte) in check_mmio_spte()
644 static inline bool is_tdp_mmu_active(struct kvm_vcpu *vcpu) in is_tdp_mmu_active()
649 static void walk_shadow_page_lockless_begin(struct kvm_vcpu *vcpu) in walk_shadow_page_lockless_begin()
668 static void walk_shadow_page_lockless_end(struct kvm_vcpu *vcpu) in walk_shadow_page_lockless_end()
683 static int mmu_topup_memory_caches(struct kvm_vcpu *vcpu, bool maybe_indirect) in mmu_topup_memory_caches()
706 static void mmu_free_memory_caches(struct kvm_vcpu *vcpu) in mmu_free_memory_caches()
901 static struct kvm_memory_slot *gfn_to_memslot_dirty_bitmap(struct kvm_vcpu *vcpu, in gfn_to_memslot_dirty_bitmap()
1430 static bool kvm_vcpu_write_protect_gfn(struct kvm_vcpu *vcpu, u64 gfn) in kvm_vcpu_write_protect_gfn()
1659 static void rmap_add(struct kvm_vcpu *vcpu, const struct kvm_memory_slot *slot, in rmap_add()
1912 static bool kvm_sync_page_check(struct kvm_vcpu *vcpu, struct kvm_mmu_page *sp) in kvm_sync_page_check()
1945 static int kvm_sync_spte(struct kvm_vcpu *vcpu, struct kvm_mmu_page *sp, int i) in kvm_sync_spte()
1953 static int __kvm_sync_page(struct kvm_vcpu *vcpu, struct kvm_mmu_page *sp) in __kvm_sync_page()
1981 static int kvm_sync_page(struct kvm_vcpu *vcpu, struct kvm_mmu_page *sp, in kvm_sync_page()
2087 static int mmu_sync_children(struct kvm_vcpu *vcpu, in mmu_sync_children()
2146 struct kvm_vcpu *vcpu, in kvm_mmu_find_shadow_page()
2267 struct kvm_vcpu *vcpu, in __kvm_mmu_get_shadow_page()
2288 static struct kvm_mmu_page *kvm_mmu_get_shadow_page(struct kvm_vcpu *vcpu, in kvm_mmu_get_shadow_page()
2347 static struct kvm_mmu_page *kvm_mmu_get_child_sp(struct kvm_vcpu *vcpu, in kvm_mmu_get_child_sp()
2361 struct kvm_vcpu *vcpu, hpa_t root, in shadow_walk_init_using_root()
2390 struct kvm_vcpu *vcpu, u64 addr) in shadow_walk_init()
2458 static void link_shadow_page(struct kvm_vcpu *vcpu, u64 *sptep, in link_shadow_page()
2464 static void validate_direct_spte(struct kvm_vcpu *vcpu, u64 *sptep, in validate_direct_spte()
2708 static int make_mmu_pages_available(struct kvm_vcpu *vcpu) in make_mmu_pages_available()
2769 static int kvm_mmu_unprotect_page_virt(struct kvm_vcpu *vcpu, gva_t gva) in kvm_mmu_unprotect_page_virt()
2900 static int mmu_set_spte(struct kvm_vcpu *vcpu, struct kvm_memory_slot *slot, in mmu_set_spte()
2971 static int direct_pte_prefetch_many(struct kvm_vcpu *vcpu, in direct_pte_prefetch_many()
2999 static void __direct_pte_prefetch(struct kvm_vcpu *vcpu, in __direct_pte_prefetch()
3024 static void direct_pte_prefetch(struct kvm_vcpu *vcpu, u64 *sptep) in direct_pte_prefetch()
3161 void kvm_mmu_hugepage_adjust(struct kvm_vcpu *vcpu, struct kvm_page_fault *fault) in kvm_mmu_hugepage_adjust()
3217 static int direct_map(struct kvm_vcpu *vcpu, struct kvm_page_fault *fault) in direct_map()
3268 static int kvm_handle_error_pfn(struct kvm_vcpu *vcpu, struct kvm_page_fault *fault) in kvm_handle_error_pfn()
3291 static int kvm_handle_noslot_fault(struct kvm_vcpu *vcpu, in kvm_handle_noslot_fault()
3361 static bool fast_pf_fix_direct_spte(struct kvm_vcpu *vcpu, in fast_pf_fix_direct_spte()
3407 static u64 *fast_pf_get_last_sptep(struct kvm_vcpu *vcpu, gpa_t gpa, u64 *spte) in fast_pf_get_last_sptep()
3424 static int fast_page_fault(struct kvm_vcpu *vcpu, struct kvm_page_fault *fault) in fast_page_fault()
3642 static hpa_t mmu_alloc_root(struct kvm_vcpu *vcpu, gfn_t gfn, int quadrant, in mmu_alloc_root()
3660 static int mmu_alloc_direct_roots(struct kvm_vcpu *vcpu) in mmu_alloc_direct_roots()
3768 static int mmu_alloc_shadow_roots(struct kvm_vcpu *vcpu) in mmu_alloc_shadow_roots()
3886 static int mmu_alloc_special_roots(struct kvm_vcpu *vcpu) in mmu_alloc_special_roots()
3993 void kvm_mmu_sync_roots(struct kvm_vcpu *vcpu) in kvm_mmu_sync_roots()
4034 void kvm_mmu_sync_prev_roots(struct kvm_vcpu *vcpu) in kvm_mmu_sync_prev_roots()
4047 static gpa_t nonpaging_gva_to_gpa(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu, in nonpaging_gva_to_gpa()
4056 static bool mmio_info_in_cache(struct kvm_vcpu *vcpu, u64 addr, bool direct) in mmio_info_in_cache()
4077 static int get_walk(struct kvm_vcpu *vcpu, u64 addr, u64 *sptes, int *root_level) in get_walk()
4097 static bool get_mmio_spte(struct kvm_vcpu *vcpu, u64 addr, u64 *sptep) in get_mmio_spte()
4146 static int handle_mmio_page_fault(struct kvm_vcpu *vcpu, u64 addr, bool direct) in handle_mmio_page_fault()
4180 static bool page_fault_handle_page_track(struct kvm_vcpu *vcpu, in page_fault_handle_page_track()
4199 static void shadow_page_table_clear_flood(struct kvm_vcpu *vcpu, gva_t addr) in shadow_page_table_clear_flood()
4210 static u32 alloc_apf_token(struct kvm_vcpu *vcpu) in alloc_apf_token()
4221 static bool kvm_arch_setup_async_pf(struct kvm_vcpu *vcpu, gpa_t cr2_or_gpa, in kvm_arch_setup_async_pf()
4235 void kvm_arch_async_page_ready(struct kvm_vcpu *vcpu, struct kvm_async_pf *work) in kvm_arch_async_page_ready()
4254 static int __kvm_faultin_pfn(struct kvm_vcpu *vcpu, struct kvm_page_fault *fault) in __kvm_faultin_pfn()
4315 static int kvm_faultin_pfn(struct kvm_vcpu *vcpu, struct kvm_page_fault *fault, in kvm_faultin_pfn()
4340 static bool is_page_fault_stale(struct kvm_vcpu *vcpu, in is_page_fault_stale()
4364 static int direct_page_fault(struct kvm_vcpu *vcpu, struct kvm_page_fault *fault) in direct_page_fault()
4405 static int nonpaging_page_fault(struct kvm_vcpu *vcpu, in nonpaging_page_fault()
4413 int kvm_handle_page_fault(struct kvm_vcpu *vcpu, u64 error_code, in kvm_handle_page_fault()
4447 static int kvm_tdp_mmu_page_fault(struct kvm_vcpu *vcpu, in kvm_tdp_mmu_page_fault()
4482 int kvm_tdp_page_fault(struct kvm_vcpu *vcpu, struct kvm_page_fault *fault) in kvm_tdp_page_fault()
4619 void kvm_mmu_new_pgd(struct kvm_vcpu *vcpu, gpa_t new_pgd) in kvm_mmu_new_pgd()
4665 static bool sync_mmio_spte(struct kvm_vcpu *vcpu, u64 *sptep, gfn_t gfn, in sync_mmio_spte()
4783 static void reset_guest_rsvds_bits_mask(struct kvm_vcpu *vcpu, in reset_guest_rsvds_bits_mask()
4832 static void reset_rsvds_bits_mask_ept(struct kvm_vcpu *vcpu, in reset_rsvds_bits_mask_ept()
4850 static void reset_shadow_zero_bits_mask(struct kvm_vcpu *vcpu, in reset_shadow_zero_bits_mask()
5085 static void reset_guest_paging_metadata(struct kvm_vcpu *vcpu, in reset_guest_paging_metadata()
5110 static union kvm_cpu_role kvm_calc_cpu_role(struct kvm_vcpu *vcpu, in kvm_calc_cpu_role()
5150 void __kvm_mmu_refresh_passthrough_bits(struct kvm_vcpu *vcpu, in __kvm_mmu_refresh_passthrough_bits()
5165 static inline int kvm_mmu_get_tdp_level(struct kvm_vcpu *vcpu) in kvm_mmu_get_tdp_level()
5179 kvm_calc_tdp_mmu_root_page_role(struct kvm_vcpu *vcpu, in kvm_calc_tdp_mmu_root_page_role()
5197 static void init_kvm_tdp_mmu(struct kvm_vcpu *vcpu, in init_kvm_tdp_mmu()
5226 static void shadow_mmu_init_context(struct kvm_vcpu *vcpu, struct kvm_mmu *context, in shadow_mmu_init_context()
5248 static void kvm_init_shadow_mmu(struct kvm_vcpu *vcpu, in kvm_init_shadow_mmu()
5273 void kvm_init_shadow_npt_mmu(struct kvm_vcpu *vcpu, unsigned long cr0, in kvm_init_shadow_npt_mmu()
5300 kvm_calc_shadow_ept_root_page_role(struct kvm_vcpu *vcpu, bool accessed_dirty, in kvm_calc_shadow_ept_root_page_role()
5324 void kvm_init_shadow_ept_mmu(struct kvm_vcpu *vcpu, bool execonly, in kvm_init_shadow_ept_mmu()
5353 static void init_kvm_softmmu(struct kvm_vcpu *vcpu, in init_kvm_softmmu()
5365 static void init_kvm_nested_mmu(struct kvm_vcpu *vcpu, in init_kvm_nested_mmu()
5404 void kvm_init_mmu(struct kvm_vcpu *vcpu) in kvm_init_mmu()
5418 void kvm_mmu_after_set_cpuid(struct kvm_vcpu *vcpu) in kvm_mmu_after_set_cpuid()
5447 void kvm_mmu_reset_context(struct kvm_vcpu *vcpu) in kvm_mmu_reset_context()
5454 int kvm_mmu_load(struct kvm_vcpu *vcpu) in kvm_mmu_load()
5487 void kvm_mmu_unload(struct kvm_vcpu *vcpu) in kvm_mmu_unload()
5542 void kvm_mmu_free_obsolete_roots(struct kvm_vcpu *vcpu) in kvm_mmu_free_obsolete_roots()
5548 static u64 mmu_pte_write_fetch_gpte(struct kvm_vcpu *vcpu, gpa_t *gpa, in mmu_pte_write_fetch_gpte()
5647 void kvm_mmu_track_write(struct kvm_vcpu *vcpu, gpa_t gpa, const u8 *new, in kvm_mmu_track_write()
5696 int noinline kvm_mmu_page_fault(struct kvm_vcpu *vcpu, gpa_t cr2_or_gpa, u64 error_code, in kvm_mmu_page_fault()
5768 static void __kvm_mmu_invalidate_addr(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu, in __kvm_mmu_invalidate_addr()
5805 void kvm_mmu_invalidate_addr(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu, in kvm_mmu_invalidate_addr()
5834 void kvm_mmu_invlpg(struct kvm_vcpu *vcpu, gva_t gva) in kvm_mmu_invlpg()
5852 void kvm_mmu_invpcid_gva(struct kvm_vcpu *vcpu, gva_t gva, unsigned long pcid) in kvm_mmu_invpcid_gva()
5966 static int __kvm_mmu_create(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu) in __kvm_mmu_create()
6019 int kvm_mmu_create(struct kvm_vcpu *vcpu) in kvm_mmu_create()
6938 void kvm_mmu_destroy(struct kvm_vcpu *vcpu) in kvm_mmu_destroy()