xref: /openbmc/linux/arch/x86/kvm/mmu.h (revision 22246614)
1 #ifndef __KVM_X86_MMU_H
2 #define __KVM_X86_MMU_H
3 
4 #include <linux/kvm_host.h>
5 
6 #define PT64_PT_BITS 9
7 #define PT64_ENT_PER_PAGE (1 << PT64_PT_BITS)
8 #define PT32_PT_BITS 10
9 #define PT32_ENT_PER_PAGE (1 << PT32_PT_BITS)
10 
11 #define PT_WRITABLE_SHIFT 1
12 
13 #define PT_PRESENT_MASK (1ULL << 0)
14 #define PT_WRITABLE_MASK (1ULL << PT_WRITABLE_SHIFT)
15 #define PT_USER_MASK (1ULL << 2)
16 #define PT_PWT_MASK (1ULL << 3)
17 #define PT_PCD_MASK (1ULL << 4)
18 #define PT_ACCESSED_MASK (1ULL << 5)
19 #define PT_DIRTY_MASK (1ULL << 6)
20 #define PT_PAGE_SIZE_MASK (1ULL << 7)
21 #define PT_PAT_MASK (1ULL << 7)
22 #define PT_GLOBAL_MASK (1ULL << 8)
23 #define PT64_NX_SHIFT 63
24 #define PT64_NX_MASK (1ULL << PT64_NX_SHIFT)
25 
26 #define PT_PAT_SHIFT 7
27 #define PT_DIR_PAT_SHIFT 12
28 #define PT_DIR_PAT_MASK (1ULL << PT_DIR_PAT_SHIFT)
29 
30 #define PT32_DIR_PSE36_SIZE 4
31 #define PT32_DIR_PSE36_SHIFT 13
32 #define PT32_DIR_PSE36_MASK \
33 	(((1ULL << PT32_DIR_PSE36_SIZE) - 1) << PT32_DIR_PSE36_SHIFT)
34 
35 #define PT64_ROOT_LEVEL 4
36 #define PT32_ROOT_LEVEL 2
37 #define PT32E_ROOT_LEVEL 3
38 
39 static inline void kvm_mmu_free_some_pages(struct kvm_vcpu *vcpu)
40 {
41 	if (unlikely(vcpu->kvm->arch.n_free_mmu_pages < KVM_MIN_FREE_MMU_PAGES))
42 		__kvm_mmu_free_some_pages(vcpu);
43 }
44 
45 static inline int kvm_mmu_reload(struct kvm_vcpu *vcpu)
46 {
47 	if (likely(vcpu->arch.mmu.root_hpa != INVALID_PAGE))
48 		return 0;
49 
50 	return kvm_mmu_load(vcpu);
51 }
52 
53 static inline int is_long_mode(struct kvm_vcpu *vcpu)
54 {
55 #ifdef CONFIG_X86_64
56 	return vcpu->arch.shadow_efer & EFER_LME;
57 #else
58 	return 0;
59 #endif
60 }
61 
62 static inline int is_pae(struct kvm_vcpu *vcpu)
63 {
64 	return vcpu->arch.cr4 & X86_CR4_PAE;
65 }
66 
67 static inline int is_pse(struct kvm_vcpu *vcpu)
68 {
69 	return vcpu->arch.cr4 & X86_CR4_PSE;
70 }
71 
72 static inline int is_paging(struct kvm_vcpu *vcpu)
73 {
74 	return vcpu->arch.cr0 & X86_CR0_PG;
75 }
76 
77 #endif
78