Lines Matching +full:8 +full:- +full:cpu
21 #include "cpu.h"
33 if (!var->p) {
38 ar = var->type & 15;
39 ar |= (var->s & 1) << 4;
40 ar |= (var->dpl & 3) << 5;
41 ar |= (var->p & 1) << 7;
42 ar |= (var->avl & 1) << 12;
43 ar |= (var->l & 1) << 13;
44 ar |= (var->db & 1) << 14;
45 ar |= (var->g & 1) << 15;
49 bool x86_read_segment_descriptor(CPUState *cpu, in x86_read_segment_descriptor() argument
64 base = rvmcs(cpu->accel->fd, VMCS_GUEST_GDTR_BASE); in x86_read_segment_descriptor()
65 limit = rvmcs(cpu->accel->fd, VMCS_GUEST_GDTR_LIMIT); in x86_read_segment_descriptor()
67 base = rvmcs(cpu->accel->fd, VMCS_GUEST_LDTR_BASE); in x86_read_segment_descriptor()
68 limit = rvmcs(cpu->accel->fd, VMCS_GUEST_LDTR_LIMIT); in x86_read_segment_descriptor()
71 if (sel.index * 8 >= limit) { in x86_read_segment_descriptor()
75 vmx_read_mem(cpu, desc, base + sel.index * 8, sizeof(*desc)); in x86_read_segment_descriptor()
79 bool x86_write_segment_descriptor(CPUState *cpu, in x86_write_segment_descriptor() argument
87 base = rvmcs(cpu->accel->fd, VMCS_GUEST_GDTR_BASE); in x86_write_segment_descriptor()
88 limit = rvmcs(cpu->accel->fd, VMCS_GUEST_GDTR_LIMIT); in x86_write_segment_descriptor()
90 base = rvmcs(cpu->accel->fd, VMCS_GUEST_LDTR_BASE); in x86_write_segment_descriptor()
91 limit = rvmcs(cpu->accel->fd, VMCS_GUEST_LDTR_LIMIT); in x86_write_segment_descriptor()
94 if (sel.index * 8 >= limit) { in x86_write_segment_descriptor()
98 vmx_write_mem(cpu, base + sel.index * 8, desc, sizeof(*desc)); in x86_write_segment_descriptor()
102 bool x86_read_call_gate(CPUState *cpu, struct x86_call_gate *idt_desc, in x86_read_call_gate() argument
105 target_ulong base = rvmcs(cpu->accel->fd, VMCS_GUEST_IDTR_BASE); in x86_read_call_gate()
106 uint32_t limit = rvmcs(cpu->accel->fd, VMCS_GUEST_IDTR_LIMIT); in x86_read_call_gate()
109 if (gate * 8 >= limit) { in x86_read_call_gate()
114 vmx_read_mem(cpu, idt_desc, base + gate * 8, sizeof(*idt_desc)); in x86_read_call_gate()
118 bool x86_is_protected(CPUState *cpu) in x86_is_protected() argument
120 uint64_t cr0 = rvmcs(cpu->accel->fd, VMCS_GUEST_CR0); in x86_is_protected()
124 bool x86_is_real(CPUState *cpu) in x86_is_real() argument
126 return !x86_is_protected(cpu); in x86_is_real()
129 bool x86_is_v8086(CPUState *cpu) in x86_is_v8086() argument
131 X86CPU *x86_cpu = X86_CPU(cpu); in x86_is_v8086()
132 CPUX86State *env = &x86_cpu->env; in x86_is_v8086()
133 return x86_is_protected(cpu) && (env->eflags & VM_MASK); in x86_is_v8086()
136 bool x86_is_long_mode(CPUState *cpu) in x86_is_long_mode() argument
138 return rvmcs(cpu->accel->fd, VMCS_GUEST_IA32_EFER) & MSR_EFER_LMA; in x86_is_long_mode()
141 bool x86_is_long64_mode(CPUState *cpu) in x86_is_long64_mode() argument
144 vmx_read_segment_descriptor(cpu, &desc, R_CS); in x86_is_long64_mode()
146 return x86_is_long_mode(cpu) && ((desc.ar >> 13) & 1); in x86_is_long64_mode()
149 bool x86_is_paging_mode(CPUState *cpu) in x86_is_paging_mode() argument
151 uint64_t cr0 = rvmcs(cpu->accel->fd, VMCS_GUEST_CR0); in x86_is_paging_mode()
155 bool x86_is_pae_enabled(CPUState *cpu) in x86_is_pae_enabled() argument
157 uint64_t cr4 = rvmcs(cpu->accel->fd, VMCS_GUEST_CR4); in x86_is_pae_enabled()
161 target_ulong linear_addr(CPUState *cpu, target_ulong addr, X86Seg seg) in linear_addr() argument
163 return vmx_read_segment_base(cpu, seg) + addr; in linear_addr()
166 target_ulong linear_addr_size(CPUState *cpu, target_ulong addr, int size, in linear_addr_size() argument
179 return linear_addr(cpu, addr, seg); in linear_addr_size()
182 target_ulong linear_rip(CPUState *cpu, target_ulong rip) in linear_rip() argument
184 return linear_addr(cpu, rip, R_CS); in linear_rip()