Lines Matching +full:gpa +full:- +full:1

39 #define PAE_PTE_PAGE_MASK           ((-1llu << 12) & ((1llu << 52) - 1))
40 #define PAE_PTE_LARGE_PAGE_MASK ((-1llu << (21)) & ((1llu << 52) - 1))
41 #define PAE_PTE_SUPER_PAGE_MASK ((-1llu << (30)) & ((1llu << 52) - 1))
45 uint64_t gpa; member
68 return (addr >> (level_shift * (level - 1) + 12)) & ((1 << level_shift) - 1); in gpt_entry()
83 uint64_t gpa = pt->pte[level] & page_mask; in get_pt_entry() local
86 gpa = pt->pte[level]; in get_pt_entry()
89 index = gpt_entry(pt->gva, level, pae); in get_pt_entry()
90 address_space_read(&address_space_memory, gpa + index * pte_size(pae), in get_pt_entry()
93 pt->pte[level - 1] = pte; in get_pt_entry()
102 uint64_t pte = pt->pte[level]; in test_pt_entry()
104 if (pt->write_access) { in test_pt_entry()
105 pt->err_code |= MMU_PAGE_WT; in test_pt_entry()
107 if (pt->user_access) { in test_pt_entry()
108 pt->err_code |= MMU_PAGE_US; in test_pt_entry()
110 if (pt->exec_access) { in test_pt_entry()
111 pt->err_code |= MMU_PAGE_NX; in test_pt_entry()
123 pt->err_code |= MMU_PAGE_PT; in test_pt_entry()
127 pt->err_code |= MMU_PAGE_PT; in test_pt_entry()
130 uint32_t cr0 = rvmcs(cpu->accel->fd, VMCS_GUEST_CR0); in test_pt_entry()
133 if (pt->write_access && !pte_write_access(pte)) { in test_pt_entry()
138 if (pt->user_access && !pte_user_access(pte)) { in test_pt_entry()
142 if (pae && pt->exec_access && !pte_exec_access(pte)) { in test_pt_entry()
159 VM_PANIC_ON(!pte_large_page(pt->pte[largeness])) in large_page_gpa()
161 /* 1Gib large page */ in large_page_gpa()
163 return (pt->pte[2] & PAE_PTE_SUPER_PAGE_MASK) | (pt->gva & 0x3fffffff); in large_page_gpa()
166 VM_PANIC_ON(largeness != 1) in large_page_gpa()
170 return (pt->pte[1] & PAE_PTE_LARGE_PAGE_MASK) | (pt->gva & 0x1fffff); in large_page_gpa()
174 return pse_pte_to_page(pt->pte[1]) | (pt->gva & 0x3fffff); in large_page_gpa()
184 target_ulong cr3 = rvmcs(cpu->accel->fd, VMCS_GUEST_CR3); in walk_gpt()
190 pt->pte[top_level] = pae ? (cr3 & PAE_CR3_MASK) : (cr3 & LEGACY_CR3_MASK); in walk_gpt()
191 pt->gva = addr; in walk_gpt()
192 pt->user_access = (err_code & MMU_PAGE_US); in walk_gpt()
193 pt->write_access = (err_code & MMU_PAGE_WT); in walk_gpt()
194 pt->exec_access = (err_code & MMU_PAGE_NX); in walk_gpt()
196 for (level = top_level; level > 0; level--) { in walk_gpt()
199 if (!test_pt_entry(cpu, pt, level - 1, &largeness, pae)) { in walk_gpt()
209 pt->gpa = (pt->pte[0] & page_mask) | (pt->gva & 0xfff); in walk_gpt()
211 pt->gpa = large_page_gpa(pt, pae, largeness); in walk_gpt()
218 bool mmu_gva_to_gpa(CPUState *cpu, target_ulong gva, uint64_t *gpa) in mmu_gva_to_gpa() argument
225 *gpa = gva; in mmu_gva_to_gpa()
231 *gpa = pt.gpa; in mmu_gva_to_gpa()
240 uint64_t gpa; in vmx_write_mem() local
244 int copy = MIN(bytes, 0x1000 - (gva & 0xfff)); in vmx_write_mem()
246 if (!mmu_gva_to_gpa(cpu, gva, &gpa)) { in vmx_write_mem()
249 address_space_write(&address_space_memory, gpa, in vmx_write_mem()
253 bytes -= copy; in vmx_write_mem()
261 uint64_t gpa; in vmx_read_mem() local
265 int copy = MIN(bytes, 0x1000 - (gva & 0xfff)); in vmx_read_mem()
267 if (!mmu_gva_to_gpa(cpu, gva, &gpa)) { in vmx_read_mem()
270 address_space_read(&address_space_memory, gpa, MEMTXATTRS_UNSPECIFIED, in vmx_read_mem()
273 bytes -= copy; in vmx_read_mem()