emulate.c (95a0c7c2d6cfde3fb5fdb713428ed0df4d6bdd58) emulate.c (f5caf621ee357279e759c0911daf6d55c7d36f03)
1/******************************************************************************
2 * emulate.c
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005 Keir Fraser
7 *
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode

--- 14 unchanged lines hidden (view full) ---

23#include <linux/kvm_host.h>
24#include "kvm_cache_regs.h"
25#include <asm/kvm_emulate.h>
26#include <linux/stringify.h>
27#include <asm/debugreg.h>
28
29#include "x86.h"
30#include "tss.h"
1/******************************************************************************
2 * emulate.c
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005 Keir Fraser
7 *
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode

--- 14 unchanged lines hidden (view full) ---

23#include <linux/kvm_host.h>
24#include "kvm_cache_regs.h"
25#include <asm/kvm_emulate.h>
26#include <linux/stringify.h>
27#include <asm/debugreg.h>
28
29#include "x86.h"
30#include "tss.h"
31#include "mmu.h"
31
32/*
33 * Operand types
34 */
35#define OpNone 0ull
36#define OpImplicit 1ull /* No generic decode */
37#define OpReg 2ull /* Register */
38#define OpMem 3ull /* Memory */

--- 644 unchanged lines hidden (view full) ---

683 bool write, bool fetch,
684 enum x86emul_mode mode, ulong *linear)
685{
686 struct desc_struct desc;
687 bool usable;
688 ulong la;
689 u32 lim;
690 u16 sel;
32
33/*
34 * Operand types
35 */
36#define OpNone 0ull
37#define OpImplicit 1ull /* No generic decode */
38#define OpReg 2ull /* Register */
39#define OpMem 3ull /* Memory */

--- 644 unchanged lines hidden (view full) ---

684 bool write, bool fetch,
685 enum x86emul_mode mode, ulong *linear)
686{
687 struct desc_struct desc;
688 bool usable;
689 ulong la;
690 u32 lim;
691 u16 sel;
692 u8 va_bits;
691
692 la = seg_base(ctxt, addr.seg) + addr.ea;
693 *max_size = 0;
694 switch (mode) {
695 case X86EMUL_MODE_PROT64:
696 *linear = la;
693
694 la = seg_base(ctxt, addr.seg) + addr.ea;
695 *max_size = 0;
696 switch (mode) {
697 case X86EMUL_MODE_PROT64:
698 *linear = la;
697 if (is_noncanonical_address(la))
699 va_bits = ctxt_virt_addr_bits(ctxt);
700 if (get_canonical(la, va_bits) != la)
698 goto bad;
699
701 goto bad;
702
700 *max_size = min_t(u64, ~0u, (1ull << 48) - la);
703 *max_size = min_t(u64, ~0u, (1ull << va_bits) - la);
701 if (size > *max_size)
702 goto bad;
703 break;
704 default:
705 *linear = la = (u32)la;
706 usable = ctxt->ops->get_segment(ctxt, &sel, &desc, NULL,
707 addr.seg);
708 if (!usable)

--- 1034 unchanged lines hidden (view full) ---

1743 if (ret != X86EMUL_CONTINUE)
1744 return ret;
1745 }
1746 } else if (ctxt->mode == X86EMUL_MODE_PROT64) {
1747 ret = ctxt->ops->read_std(ctxt, desc_addr+8, &base3,
1748 sizeof(base3), &ctxt->exception);
1749 if (ret != X86EMUL_CONTINUE)
1750 return ret;
704 if (size > *max_size)
705 goto bad;
706 break;
707 default:
708 *linear = la = (u32)la;
709 usable = ctxt->ops->get_segment(ctxt, &sel, &desc, NULL,
710 addr.seg);
711 if (!usable)

--- 1034 unchanged lines hidden (view full) ---

1746 if (ret != X86EMUL_CONTINUE)
1747 return ret;
1748 }
1749 } else if (ctxt->mode == X86EMUL_MODE_PROT64) {
1750 ret = ctxt->ops->read_std(ctxt, desc_addr+8, &base3,
1751 sizeof(base3), &ctxt->exception);
1752 if (ret != X86EMUL_CONTINUE)
1753 return ret;
1751 if (is_noncanonical_address(get_desc_base(&seg_desc) |
1752 ((u64)base3 << 32)))
1754 if (emul_is_noncanonical_address(get_desc_base(&seg_desc) |
1755 ((u64)base3 << 32), ctxt))
1753 return emulate_gp(ctxt, 0);
1754 }
1755load:
1756 ctxt->ops->set_segment(ctxt, selector, &seg_desc, base3, seg);
1757 if (desc)
1758 *desc = seg_desc;
1759 return X86EMUL_CONTINUE;
1760exception:

--- 567 unchanged lines hidden (view full) ---

2328}
2329
2330static int emulator_has_longmode(struct x86_emulate_ctxt *ctxt)
2331{
2332 u32 eax, ebx, ecx, edx;
2333
2334 eax = 0x80000001;
2335 ecx = 0;
1756 return emulate_gp(ctxt, 0);
1757 }
1758load:
1759 ctxt->ops->set_segment(ctxt, selector, &seg_desc, base3, seg);
1760 if (desc)
1761 *desc = seg_desc;
1762 return X86EMUL_CONTINUE;
1763exception:

--- 567 unchanged lines hidden (view full) ---

2331}
2332
2333static int emulator_has_longmode(struct x86_emulate_ctxt *ctxt)
2334{
2335 u32 eax, ebx, ecx, edx;
2336
2337 eax = 0x80000001;
2338 ecx = 0;
2336 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx);
2339 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx, false);
2337 return edx & bit(X86_FEATURE_LM);
2338}
2339
2340#define GET_SMSTATE(type, smbase, offset) \
2341 ({ \
2342 type __val; \
2343 int r = ctxt->ops->read_phys(ctxt, smbase + offset, &__val, \
2344 sizeof(__val)); \

--- 286 unchanged lines hidden (view full) ---

2631 ss->avl = 0;
2632}
2633
2634static bool vendor_intel(struct x86_emulate_ctxt *ctxt)
2635{
2636 u32 eax, ebx, ecx, edx;
2637
2638 eax = ecx = 0;
2340 return edx & bit(X86_FEATURE_LM);
2341}
2342
2343#define GET_SMSTATE(type, smbase, offset) \
2344 ({ \
2345 type __val; \
2346 int r = ctxt->ops->read_phys(ctxt, smbase + offset, &__val, \
2347 sizeof(__val)); \

--- 286 unchanged lines hidden (view full) ---

2634 ss->avl = 0;
2635}
2636
2637static bool vendor_intel(struct x86_emulate_ctxt *ctxt)
2638{
2639 u32 eax, ebx, ecx, edx;
2640
2641 eax = ecx = 0;
2639 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx);
2642 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx, false);
2640 return ebx == X86EMUL_CPUID_VENDOR_GenuineIntel_ebx
2641 && ecx == X86EMUL_CPUID_VENDOR_GenuineIntel_ecx
2642 && edx == X86EMUL_CPUID_VENDOR_GenuineIntel_edx;
2643}
2644
2645static bool em_syscall_is_enabled(struct x86_emulate_ctxt *ctxt)
2646{
2647 const struct x86_emulate_ops *ops = ctxt->ops;
2648 u32 eax, ebx, ecx, edx;
2649
2650 /*
2651 * syscall should always be enabled in longmode - so only become
2652 * vendor specific (cpuid) if other modes are active...
2653 */
2654 if (ctxt->mode == X86EMUL_MODE_PROT64)
2655 return true;
2656
2657 eax = 0x00000000;
2658 ecx = 0x00000000;
2643 return ebx == X86EMUL_CPUID_VENDOR_GenuineIntel_ebx
2644 && ecx == X86EMUL_CPUID_VENDOR_GenuineIntel_ecx
2645 && edx == X86EMUL_CPUID_VENDOR_GenuineIntel_edx;
2646}
2647
2648static bool em_syscall_is_enabled(struct x86_emulate_ctxt *ctxt)
2649{
2650 const struct x86_emulate_ops *ops = ctxt->ops;
2651 u32 eax, ebx, ecx, edx;
2652
2653 /*
2654 * syscall should always be enabled in longmode - so only become
2655 * vendor specific (cpuid) if other modes are active...
2656 */
2657 if (ctxt->mode == X86EMUL_MODE_PROT64)
2658 return true;
2659
2660 eax = 0x00000000;
2661 ecx = 0x00000000;
2659 ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx);
2662 ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx, false);
2660 /*
2661 * Intel ("GenuineIntel")
2662 * remark: Intel CPUs only support "syscall" in 64bit
2663 * longmode. Also an 64bit guest with a
2664 * 32bit compat-app running will #UD !! While this
2665 * behaviour can be fixed (by emulating) into AMD
2666 * response - CPUs of AMD can't behave like Intel.
2667 */

--- 167 unchanged lines hidden (view full) ---

2835 break;
2836 case X86EMUL_MODE_PROT64:
2837 cs_sel = (u16)(msr_data + 32);
2838 if (msr_data == 0x0)
2839 return emulate_gp(ctxt, 0);
2840 ss_sel = cs_sel + 8;
2841 cs.d = 0;
2842 cs.l = 1;
2663 /*
2664 * Intel ("GenuineIntel")
2665 * remark: Intel CPUs only support "syscall" in 64bit
2666 * longmode. Also an 64bit guest with a
2667 * 32bit compat-app running will #UD !! While this
2668 * behaviour can be fixed (by emulating) into AMD
2669 * response - CPUs of AMD can't behave like Intel.
2670 */

--- 167 unchanged lines hidden (view full) ---

2838 break;
2839 case X86EMUL_MODE_PROT64:
2840 cs_sel = (u16)(msr_data + 32);
2841 if (msr_data == 0x0)
2842 return emulate_gp(ctxt, 0);
2843 ss_sel = cs_sel + 8;
2844 cs.d = 0;
2845 cs.l = 1;
2843 if (is_noncanonical_address(rcx) ||
2844 is_noncanonical_address(rdx))
2846 if (emul_is_noncanonical_address(rcx, ctxt) ||
2847 emul_is_noncanonical_address(rdx, ctxt))
2845 return emulate_gp(ctxt, 0);
2846 break;
2847 }
2848 cs_sel |= SEGMENT_RPL_MASK;
2849 ss_sel |= SEGMENT_RPL_MASK;
2850
2851 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS);
2852 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS);

--- 693 unchanged lines hidden (view full) ---

3546static int em_movbe(struct x86_emulate_ctxt *ctxt)
3547{
3548 u32 ebx, ecx, edx, eax = 1;
3549 u16 tmp;
3550
3551 /*
3552 * Check MOVBE is set in the guest-visible CPUID leaf.
3553 */
2848 return emulate_gp(ctxt, 0);
2849 break;
2850 }
2851 cs_sel |= SEGMENT_RPL_MASK;
2852 ss_sel |= SEGMENT_RPL_MASK;
2853
2854 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS);
2855 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS);

--- 693 unchanged lines hidden (view full) ---

3549static int em_movbe(struct x86_emulate_ctxt *ctxt)
3550{
3551 u32 ebx, ecx, edx, eax = 1;
3552 u16 tmp;
3553
3554 /*
3555 * Check MOVBE is set in the guest-visible CPUID leaf.
3556 */
3554 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx);
3557 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx, false);
3555 if (!(ecx & FFL(MOVBE)))
3556 return emulate_ud(ctxt);
3557
3558 switch (ctxt->op_bytes) {
3559 case 2:
3560 /*
3561 * From MOVBE definition: "...When the operand size is 16 bits,
3562 * the upper word of the destination register remains unchanged

--- 188 unchanged lines hidden (view full) ---

3751 if (ctxt->mode == X86EMUL_MODE_PROT64)
3752 ctxt->op_bytes = 8;
3753 rc = read_descriptor(ctxt, ctxt->src.addr.mem,
3754 &desc_ptr.size, &desc_ptr.address,
3755 ctxt->op_bytes);
3756 if (rc != X86EMUL_CONTINUE)
3757 return rc;
3758 if (ctxt->mode == X86EMUL_MODE_PROT64 &&
3558 if (!(ecx & FFL(MOVBE)))
3559 return emulate_ud(ctxt);
3560
3561 switch (ctxt->op_bytes) {
3562 case 2:
3563 /*
3564 * From MOVBE definition: "...When the operand size is 16 bits,
3565 * the upper word of the destination register remains unchanged

--- 188 unchanged lines hidden (view full) ---

3754 if (ctxt->mode == X86EMUL_MODE_PROT64)
3755 ctxt->op_bytes = 8;
3756 rc = read_descriptor(ctxt, ctxt->src.addr.mem,
3757 &desc_ptr.size, &desc_ptr.address,
3758 ctxt->op_bytes);
3759 if (rc != X86EMUL_CONTINUE)
3760 return rc;
3761 if (ctxt->mode == X86EMUL_MODE_PROT64 &&
3759 is_noncanonical_address(desc_ptr.address))
3762 emul_is_noncanonical_address(desc_ptr.address, ctxt))
3760 return emulate_gp(ctxt, 0);
3761 if (lgdt)
3762 ctxt->ops->set_gdt(ctxt, &desc_ptr);
3763 else
3764 ctxt->ops->set_idt(ctxt, &desc_ptr);
3765 /* Disable writeback. */
3766 ctxt->dst.type = OP_NONE;
3767 return X86EMUL_CONTINUE;

--- 92 unchanged lines hidden (view full) ---

3860 ctxt->ops->get_msr(ctxt, MSR_MISC_FEATURES_ENABLES, &msr);
3861 if (msr & MSR_MISC_FEATURES_ENABLES_CPUID_FAULT &&
3862 ctxt->ops->cpl(ctxt)) {
3863 return emulate_gp(ctxt, 0);
3864 }
3865
3866 eax = reg_read(ctxt, VCPU_REGS_RAX);
3867 ecx = reg_read(ctxt, VCPU_REGS_RCX);
3763 return emulate_gp(ctxt, 0);
3764 if (lgdt)
3765 ctxt->ops->set_gdt(ctxt, &desc_ptr);
3766 else
3767 ctxt->ops->set_idt(ctxt, &desc_ptr);
3768 /* Disable writeback. */
3769 ctxt->dst.type = OP_NONE;
3770 return X86EMUL_CONTINUE;

--- 92 unchanged lines hidden (view full) ---

3863 ctxt->ops->get_msr(ctxt, MSR_MISC_FEATURES_ENABLES, &msr);
3864 if (msr & MSR_MISC_FEATURES_ENABLES_CPUID_FAULT &&
3865 ctxt->ops->cpl(ctxt)) {
3866 return emulate_gp(ctxt, 0);
3867 }
3868
3869 eax = reg_read(ctxt, VCPU_REGS_RAX);
3870 ecx = reg_read(ctxt, VCPU_REGS_RCX);
3868 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx);
3871 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx, true);
3869 *reg_write(ctxt, VCPU_REGS_RAX) = eax;
3870 *reg_write(ctxt, VCPU_REGS_RBX) = ebx;
3871 *reg_write(ctxt, VCPU_REGS_RCX) = ecx;
3872 *reg_write(ctxt, VCPU_REGS_RDX) = edx;
3873 return X86EMUL_CONTINUE;
3874}
3875
3876static int em_sahf(struct x86_emulate_ctxt *ctxt)

--- 42 unchanged lines hidden (view full) ---

3919 ctxt->dst.val = (s32) ctxt->src.val;
3920 return X86EMUL_CONTINUE;
3921}
3922
3923static int check_fxsr(struct x86_emulate_ctxt *ctxt)
3924{
3925 u32 eax = 1, ebx, ecx = 0, edx;
3926
3872 *reg_write(ctxt, VCPU_REGS_RAX) = eax;
3873 *reg_write(ctxt, VCPU_REGS_RBX) = ebx;
3874 *reg_write(ctxt, VCPU_REGS_RCX) = ecx;
3875 *reg_write(ctxt, VCPU_REGS_RDX) = edx;
3876 return X86EMUL_CONTINUE;
3877}
3878
3879static int em_sahf(struct x86_emulate_ctxt *ctxt)

--- 42 unchanged lines hidden (view full) ---

3922 ctxt->dst.val = (s32) ctxt->src.val;
3923 return X86EMUL_CONTINUE;
3924}
3925
3926static int check_fxsr(struct x86_emulate_ctxt *ctxt)
3927{
3928 u32 eax = 1, ebx, ecx = 0, edx;
3929
3927 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx);
3930 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx, false);
3928 if (!(edx & FFL(FXSR)))
3929 return emulate_ud(ctxt);
3930
3931 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM))
3932 return emulate_nm(ctxt);
3933
3934 /*
3935 * Don't emulate a case that should never be hit, instead of working

--- 156 unchanged lines hidden (view full) ---

4092 return emulate_gp(ctxt, 0);
4093
4094 break;
4095 }
4096 case 3: {
4097 u64 rsvd = 0;
4098
4099 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
3931 if (!(edx & FFL(FXSR)))
3932 return emulate_ud(ctxt);
3933
3934 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM))
3935 return emulate_nm(ctxt);
3936
3937 /*
3938 * Don't emulate a case that should never be hit, instead of working

--- 156 unchanged lines hidden (view full) ---

4095 return emulate_gp(ctxt, 0);
4096
4097 break;
4098 }
4099 case 3: {
4100 u64 rsvd = 0;
4101
4102 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
4100 if (efer & EFER_LMA)
4101 rsvd = CR3_L_MODE_RESERVED_BITS & ~CR3_PCID_INVD;
4103 if (efer & EFER_LMA) {
4104 u64 maxphyaddr;
4105 u32 eax = 0x80000008;
4102
4106
4107 if (ctxt->ops->get_cpuid(ctxt, &eax, NULL, NULL,
4108 NULL, false))
4109 maxphyaddr = eax & 0xff;
4110 else
4111 maxphyaddr = 36;
4112 rsvd = rsvd_bits(maxphyaddr, 62);
4113 }
4114
4103 if (new_val & rsvd)
4104 return emulate_gp(ctxt, 0);
4105
4106 break;
4107 }
4108 case 4: {
4109 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
4110

--- 1168 unchanged lines hidden (view full) ---

5279 struct operand *op)
5280{
5281 if (op->type == OP_MM)
5282 read_mmx_reg(ctxt, &op->mm_val, op->addr.mm);
5283}
5284
5285static int fastop(struct x86_emulate_ctxt *ctxt, void (*fop)(struct fastop *))
5286{
4115 if (new_val & rsvd)
4116 return emulate_gp(ctxt, 0);
4117
4118 break;
4119 }
4120 case 4: {
4121 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
4122

--- 1168 unchanged lines hidden (view full) ---

5291 struct operand *op)
5292{
5293 if (op->type == OP_MM)
5294 read_mmx_reg(ctxt, &op->mm_val, op->addr.mm);
5295}
5296
5297static int fastop(struct x86_emulate_ctxt *ctxt, void (*fop)(struct fastop *))
5298{
5287 register void *__sp asm(_ASM_SP);
5288 ulong flags = (ctxt->eflags & EFLAGS_MASK) | X86_EFLAGS_IF;
5289
5290 if (!(ctxt->d & ByteOp))
5291 fop += __ffs(ctxt->dst.bytes) * FASTOP_SIZE;
5292
5293 asm("push %[flags]; popf; call *%[fastop]; pushf; pop %[flags]\n"
5294 : "+a"(ctxt->dst.val), "+d"(ctxt->src.val), [flags]"+D"(flags),
5299 ulong flags = (ctxt->eflags & EFLAGS_MASK) | X86_EFLAGS_IF;
5300
5301 if (!(ctxt->d & ByteOp))
5302 fop += __ffs(ctxt->dst.bytes) * FASTOP_SIZE;
5303
5304 asm("push %[flags]; popf; call *%[fastop]; pushf; pop %[flags]\n"
5305 : "+a"(ctxt->dst.val), "+d"(ctxt->src.val), [flags]"+D"(flags),
5295 [fastop]"+S"(fop), "+r"(__sp)
5306 [fastop]"+S"(fop), ASM_CALL_CONSTRAINT
5296 : "c"(ctxt->src2.val));
5297
5298 ctxt->eflags = (ctxt->eflags & ~EFLAGS_MASK) | (flags & EFLAGS_MASK);
5299 if (!fop) /* exception is returned in fop variable */
5300 return emulate_de(ctxt);
5301 return X86EMUL_CONTINUE;
5302}
5303

--- 384 unchanged lines hidden ---
5307 : "c"(ctxt->src2.val));
5308
5309 ctxt->eflags = (ctxt->eflags & ~EFLAGS_MASK) | (flags & EFLAGS_MASK);
5310 if (!fop) /* exception is returned in fop variable */
5311 return emulate_de(ctxt);
5312 return X86EMUL_CONTINUE;
5313}
5314

--- 384 unchanged lines hidden ---