Lines Matching full:vcpu

112 void kvm_smm_changed(struct kvm_vcpu *vcpu, bool entering_smm)  in kvm_smm_changed()  argument
114 trace_kvm_smm_transition(vcpu->vcpu_id, vcpu->arch.smbase, entering_smm); in kvm_smm_changed()
117 vcpu->arch.hflags |= HF_SMM_MASK; in kvm_smm_changed()
119 vcpu->arch.hflags &= ~(HF_SMM_MASK | HF_SMM_INSIDE_NMI_MASK); in kvm_smm_changed()
122 kvm_make_request(KVM_REQ_EVENT, vcpu); in kvm_smm_changed()
129 vcpu->arch.pdptrs_from_userspace = false; in kvm_smm_changed()
132 kvm_mmu_reset_context(vcpu); in kvm_smm_changed()
136 void process_smi(struct kvm_vcpu *vcpu) in process_smi() argument
138 vcpu->arch.smi_pending = true; in process_smi()
139 kvm_make_request(KVM_REQ_EVENT, vcpu); in process_smi()
156 static void enter_smm_save_seg_32(struct kvm_vcpu *vcpu, in enter_smm_save_seg_32() argument
162 kvm_get_segment(vcpu, &seg, n); in enter_smm_save_seg_32()
170 static void enter_smm_save_seg_64(struct kvm_vcpu *vcpu, in enter_smm_save_seg_64() argument
176 kvm_get_segment(vcpu, &seg, n); in enter_smm_save_seg_64()
184 static void enter_smm_save_state_32(struct kvm_vcpu *vcpu, in enter_smm_save_state_32() argument
191 smram->cr0 = kvm_read_cr0(vcpu); in enter_smm_save_state_32()
192 smram->cr3 = kvm_read_cr3(vcpu); in enter_smm_save_state_32()
193 smram->eflags = kvm_get_rflags(vcpu); in enter_smm_save_state_32()
194 smram->eip = kvm_rip_read(vcpu); in enter_smm_save_state_32()
197 smram->gprs[i] = kvm_register_read_raw(vcpu, i); in enter_smm_save_state_32()
199 kvm_get_dr(vcpu, 6, &val); in enter_smm_save_state_32()
201 kvm_get_dr(vcpu, 7, &val); in enter_smm_save_state_32()
204 enter_smm_save_seg_32(vcpu, &smram->tr, &smram->tr_sel, VCPU_SREG_TR); in enter_smm_save_state_32()
205 enter_smm_save_seg_32(vcpu, &smram->ldtr, &smram->ldtr_sel, VCPU_SREG_LDTR); in enter_smm_save_state_32()
207 static_call(kvm_x86_get_gdt)(vcpu, &dt); in enter_smm_save_state_32()
211 static_call(kvm_x86_get_idt)(vcpu, &dt); in enter_smm_save_state_32()
215 enter_smm_save_seg_32(vcpu, &smram->es, &smram->es_sel, VCPU_SREG_ES); in enter_smm_save_state_32()
216 enter_smm_save_seg_32(vcpu, &smram->cs, &smram->cs_sel, VCPU_SREG_CS); in enter_smm_save_state_32()
217 enter_smm_save_seg_32(vcpu, &smram->ss, &smram->ss_sel, VCPU_SREG_SS); in enter_smm_save_state_32()
219 enter_smm_save_seg_32(vcpu, &smram->ds, &smram->ds_sel, VCPU_SREG_DS); in enter_smm_save_state_32()
220 enter_smm_save_seg_32(vcpu, &smram->fs, &smram->fs_sel, VCPU_SREG_FS); in enter_smm_save_state_32()
221 enter_smm_save_seg_32(vcpu, &smram->gs, &smram->gs_sel, VCPU_SREG_GS); in enter_smm_save_state_32()
223 smram->cr4 = kvm_read_cr4(vcpu); in enter_smm_save_state_32()
225 smram->smbase = vcpu->arch.smbase; in enter_smm_save_state_32()
227 smram->int_shadow = static_call(kvm_x86_get_interrupt_shadow)(vcpu); in enter_smm_save_state_32()
231 static void enter_smm_save_state_64(struct kvm_vcpu *vcpu, in enter_smm_save_state_64() argument
239 smram->gprs[15 - i] = kvm_register_read_raw(vcpu, i); in enter_smm_save_state_64()
241 smram->rip = kvm_rip_read(vcpu); in enter_smm_save_state_64()
242 smram->rflags = kvm_get_rflags(vcpu); in enter_smm_save_state_64()
245 kvm_get_dr(vcpu, 6, &val); in enter_smm_save_state_64()
247 kvm_get_dr(vcpu, 7, &val); in enter_smm_save_state_64()
250 smram->cr0 = kvm_read_cr0(vcpu); in enter_smm_save_state_64()
251 smram->cr3 = kvm_read_cr3(vcpu); in enter_smm_save_state_64()
252 smram->cr4 = kvm_read_cr4(vcpu); in enter_smm_save_state_64()
254 smram->smbase = vcpu->arch.smbase; in enter_smm_save_state_64()
257 smram->efer = vcpu->arch.efer; in enter_smm_save_state_64()
259 enter_smm_save_seg_64(vcpu, &smram->tr, VCPU_SREG_TR); in enter_smm_save_state_64()
261 static_call(kvm_x86_get_idt)(vcpu, &dt); in enter_smm_save_state_64()
265 enter_smm_save_seg_64(vcpu, &smram->ldtr, VCPU_SREG_LDTR); in enter_smm_save_state_64()
267 static_call(kvm_x86_get_gdt)(vcpu, &dt); in enter_smm_save_state_64()
271 enter_smm_save_seg_64(vcpu, &smram->es, VCPU_SREG_ES); in enter_smm_save_state_64()
272 enter_smm_save_seg_64(vcpu, &smram->cs, VCPU_SREG_CS); in enter_smm_save_state_64()
273 enter_smm_save_seg_64(vcpu, &smram->ss, VCPU_SREG_SS); in enter_smm_save_state_64()
274 enter_smm_save_seg_64(vcpu, &smram->ds, VCPU_SREG_DS); in enter_smm_save_state_64()
275 enter_smm_save_seg_64(vcpu, &smram->fs, VCPU_SREG_FS); in enter_smm_save_state_64()
276 enter_smm_save_seg_64(vcpu, &smram->gs, VCPU_SREG_GS); in enter_smm_save_state_64()
278 smram->int_shadow = static_call(kvm_x86_get_interrupt_shadow)(vcpu); in enter_smm_save_state_64()
282 void enter_smm(struct kvm_vcpu *vcpu) in enter_smm() argument
294 if (guest_cpuid_has(vcpu, X86_FEATURE_LM)) in enter_smm()
295 enter_smm_save_state_64(vcpu, &smram.smram64); in enter_smm()
298 enter_smm_save_state_32(vcpu, &smram.smram32); in enter_smm()
301 * Give enter_smm() a chance to make ISA-specific changes to the vCPU in enter_smm()
308 if (static_call(kvm_x86_enter_smm)(vcpu, &smram)) in enter_smm()
311 kvm_smm_changed(vcpu, true); in enter_smm()
313 if (kvm_vcpu_write_guest(vcpu, vcpu->arch.smbase + 0xfe00, &smram, sizeof(smram))) in enter_smm()
316 if (static_call(kvm_x86_get_nmi_mask)(vcpu)) in enter_smm()
317 vcpu->arch.hflags |= HF_SMM_INSIDE_NMI_MASK; in enter_smm()
319 static_call(kvm_x86_set_nmi_mask)(vcpu, true); in enter_smm()
321 kvm_set_rflags(vcpu, X86_EFLAGS_FIXED); in enter_smm()
322 kvm_rip_write(vcpu, 0x8000); in enter_smm()
324 static_call(kvm_x86_set_interrupt_shadow)(vcpu, 0); in enter_smm()
326 cr0 = vcpu->arch.cr0 & ~(X86_CR0_PE | X86_CR0_EM | X86_CR0_TS | X86_CR0_PG); in enter_smm()
327 static_call(kvm_x86_set_cr0)(vcpu, cr0); in enter_smm()
328 vcpu->arch.cr0 = cr0; in enter_smm()
330 static_call(kvm_x86_set_cr4)(vcpu, 0); in enter_smm()
334 static_call(kvm_x86_set_idt)(vcpu, &dt); in enter_smm()
336 if (WARN_ON_ONCE(kvm_set_dr(vcpu, 7, DR7_FIXED_1))) in enter_smm()
339 cs.selector = (vcpu->arch.smbase >> 4) & 0xffff; in enter_smm()
340 cs.base = vcpu->arch.smbase; in enter_smm()
357 kvm_set_segment(vcpu, &cs, VCPU_SREG_CS); in enter_smm()
358 kvm_set_segment(vcpu, &ds, VCPU_SREG_DS); in enter_smm()
359 kvm_set_segment(vcpu, &ds, VCPU_SREG_ES); in enter_smm()
360 kvm_set_segment(vcpu, &ds, VCPU_SREG_FS); in enter_smm()
361 kvm_set_segment(vcpu, &ds, VCPU_SREG_GS); in enter_smm()
362 kvm_set_segment(vcpu, &ds, VCPU_SREG_SS); in enter_smm()
365 if (guest_cpuid_has(vcpu, X86_FEATURE_LM)) in enter_smm()
366 if (static_call(kvm_x86_set_efer)(vcpu, 0)) in enter_smm()
370 kvm_update_cpuid_runtime(vcpu); in enter_smm()
371 kvm_mmu_reset_context(vcpu); in enter_smm()
374 kvm_vm_dead(vcpu->kvm); in enter_smm()
392 static int rsm_load_seg_32(struct kvm_vcpu *vcpu, in rsm_load_seg_32() argument
402 kvm_set_segment(vcpu, &desc, n); in rsm_load_seg_32()
408 static int rsm_load_seg_64(struct kvm_vcpu *vcpu, in rsm_load_seg_64() argument
418 kvm_set_segment(vcpu, &desc, n); in rsm_load_seg_64()
423 static int rsm_enter_protected_mode(struct kvm_vcpu *vcpu, in rsm_enter_protected_mode() argument
436 bad = kvm_set_cr3(vcpu, cr3); in rsm_enter_protected_mode()
445 bad = kvm_set_cr4(vcpu, cr4 & ~X86_CR4_PCIDE); in rsm_enter_protected_mode()
449 bad = kvm_set_cr0(vcpu, cr0); in rsm_enter_protected_mode()
454 bad = kvm_set_cr4(vcpu, cr4); in rsm_enter_protected_mode()
458 bad = kvm_set_cr3(vcpu, cr3 | pcid); in rsm_enter_protected_mode()
471 struct kvm_vcpu *vcpu = ctxt->vcpu; in rsm_load_state_32() local
481 if (kvm_set_dr(vcpu, 6, smstate->dr6)) in rsm_load_state_32()
483 if (kvm_set_dr(vcpu, 7, smstate->dr7)) in rsm_load_state_32()
486 rsm_load_seg_32(vcpu, &smstate->tr, smstate->tr_sel, VCPU_SREG_TR); in rsm_load_state_32()
487 rsm_load_seg_32(vcpu, &smstate->ldtr, smstate->ldtr_sel, VCPU_SREG_LDTR); in rsm_load_state_32()
491 static_call(kvm_x86_set_gdt)(vcpu, &dt); in rsm_load_state_32()
495 static_call(kvm_x86_set_idt)(vcpu, &dt); in rsm_load_state_32()
497 rsm_load_seg_32(vcpu, &smstate->es, smstate->es_sel, VCPU_SREG_ES); in rsm_load_state_32()
498 rsm_load_seg_32(vcpu, &smstate->cs, smstate->cs_sel, VCPU_SREG_CS); in rsm_load_state_32()
499 rsm_load_seg_32(vcpu, &smstate->ss, smstate->ss_sel, VCPU_SREG_SS); in rsm_load_state_32()
501 rsm_load_seg_32(vcpu, &smstate->ds, smstate->ds_sel, VCPU_SREG_DS); in rsm_load_state_32()
502 rsm_load_seg_32(vcpu, &smstate->fs, smstate->fs_sel, VCPU_SREG_FS); in rsm_load_state_32()
503 rsm_load_seg_32(vcpu, &smstate->gs, smstate->gs_sel, VCPU_SREG_GS); in rsm_load_state_32()
505 vcpu->arch.smbase = smstate->smbase; in rsm_load_state_32()
507 r = rsm_enter_protected_mode(vcpu, smstate->cr0, in rsm_load_state_32()
513 static_call(kvm_x86_set_interrupt_shadow)(vcpu, 0); in rsm_load_state_32()
523 struct kvm_vcpu *vcpu = ctxt->vcpu; in rsm_load_state_64() local
533 if (kvm_set_dr(vcpu, 6, smstate->dr6)) in rsm_load_state_64()
535 if (kvm_set_dr(vcpu, 7, smstate->dr7)) in rsm_load_state_64()
538 vcpu->arch.smbase = smstate->smbase; in rsm_load_state_64()
540 if (kvm_set_msr(vcpu, MSR_EFER, smstate->efer & ~EFER_LMA)) in rsm_load_state_64()
543 rsm_load_seg_64(vcpu, &smstate->tr, VCPU_SREG_TR); in rsm_load_state_64()
547 static_call(kvm_x86_set_idt)(vcpu, &dt); in rsm_load_state_64()
549 rsm_load_seg_64(vcpu, &smstate->ldtr, VCPU_SREG_LDTR); in rsm_load_state_64()
553 static_call(kvm_x86_set_gdt)(vcpu, &dt); in rsm_load_state_64()
555 r = rsm_enter_protected_mode(vcpu, smstate->cr0, smstate->cr3, smstate->cr4); in rsm_load_state_64()
559 rsm_load_seg_64(vcpu, &smstate->es, VCPU_SREG_ES); in rsm_load_state_64()
560 rsm_load_seg_64(vcpu, &smstate->cs, VCPU_SREG_CS); in rsm_load_state_64()
561 rsm_load_seg_64(vcpu, &smstate->ss, VCPU_SREG_SS); in rsm_load_state_64()
562 rsm_load_seg_64(vcpu, &smstate->ds, VCPU_SREG_DS); in rsm_load_state_64()
563 rsm_load_seg_64(vcpu, &smstate->fs, VCPU_SREG_FS); in rsm_load_state_64()
564 rsm_load_seg_64(vcpu, &smstate->gs, VCPU_SREG_GS); in rsm_load_state_64()
566 static_call(kvm_x86_set_interrupt_shadow)(vcpu, 0); in rsm_load_state_64()
575 struct kvm_vcpu *vcpu = ctxt->vcpu; in emulator_leave_smm() local
581 smbase = vcpu->arch.smbase; in emulator_leave_smm()
583 ret = kvm_vcpu_read_guest(vcpu, smbase + 0xfe00, smram.bytes, sizeof(smram)); in emulator_leave_smm()
587 if ((vcpu->arch.hflags & HF_SMM_INSIDE_NMI_MASK) == 0) in emulator_leave_smm()
588 static_call(kvm_x86_set_nmi_mask)(vcpu, false); in emulator_leave_smm()
590 kvm_smm_changed(vcpu, false); in emulator_leave_smm()
594 * CR0/CR3/CR4/EFER. It's all a bit more complicated if the vCPU in emulator_leave_smm()
598 if (guest_cpuid_has(vcpu, X86_FEATURE_LM)) { in emulator_leave_smm()
603 cr4 = kvm_read_cr4(vcpu); in emulator_leave_smm()
605 kvm_set_cr4(vcpu, cr4 & ~X86_CR4_PCIDE); in emulator_leave_smm()
611 kvm_set_segment(vcpu, &cs_desc, VCPU_SREG_CS); in emulator_leave_smm()
616 cr0 = kvm_read_cr0(vcpu); in emulator_leave_smm()
618 kvm_set_cr0(vcpu, cr0 & ~(X86_CR0_PG | X86_CR0_PE)); in emulator_leave_smm()
621 if (guest_cpuid_has(vcpu, X86_FEATURE_LM)) { in emulator_leave_smm()
625 cr4 = kvm_read_cr4(vcpu); in emulator_leave_smm()
627 kvm_set_cr4(vcpu, cr4 & ~X86_CR4_PAE); in emulator_leave_smm()
631 kvm_set_msr(vcpu, MSR_EFER, efer); in emulator_leave_smm()
636 * Give leave_smm() a chance to make ISA-specific changes to the vCPU in emulator_leave_smm()
640 if (static_call(kvm_x86_leave_smm)(vcpu, &smram)) in emulator_leave_smm()
644 if (guest_cpuid_has(vcpu, X86_FEATURE_LM)) in emulator_leave_smm()