Lines Matching +full:- +full:s
21 #include "qemu/host-utils.h"
23 #include "exec/exec-all.h"
24 #include "tcg/tcg-op.h"
25 #include "tcg/tcg-op-gvec.h"
29 #include "exec/helper-proto.h"
30 #include "exec/helper-gen.h"
31 #include "helper-tcg.h"
32 #include "decode-new.h"
37 #include "exec/helper-info.c.inc"
93 int8_t override; /* -1 if no override, else R_CS, R_DS, etc */
104 uint8_t vex_v; /* vex vvvv register, without 1's complement. */
113 bool vex_w; /* used by AVX even on 32-bit processors */
178 /* The environment in which user-only runs is constrained. */
180 #define PE(S) true argument
181 #define CPL(S) 3 argument
182 #define IOPL(S) 0 argument
183 #define SVME(S) false argument
184 #define GUEST(S) false argument
186 #define PE(S) (((S)->flags & HF_PE_MASK) != 0) argument
187 #define CPL(S) ((S)->cpl) argument
188 #define IOPL(S) ((S)->iopl) argument
189 #define SVME(S) (((S)->flags & HF_SVME_MASK) != 0) argument
190 #define GUEST(S) (((S)->flags & HF_GUEST_MASK) != 0) argument
193 #define VM86(S) false argument
194 #define CODE32(S) true argument
195 #define SS32(S) true argument
196 #define ADDSEG(S) false argument
198 #define VM86(S) (((S)->flags & HF_VM_MASK) != 0) argument
199 #define CODE32(S) (((S)->flags & HF_CS32_MASK) != 0) argument
200 #define SS32(S) (((S)->flags & HF_SS32_MASK) != 0) argument
201 #define ADDSEG(S) (((S)->flags & HF_ADDSEG_MASK) != 0) argument
204 #define CODE64(S) false argument
206 #define CODE64(S) true argument
208 #define CODE64(S) (((S)->flags & HF_CS64_MASK) != 0) argument
211 #define LMA(S) (((S)->flags & HF_LMA_MASK) != 0) argument
213 #define LMA(S) false argument
217 #define REX_PREFIX(S) (((S)->prefix & PREFIX_REX) != 0) argument
218 #define REX_W(S) ((S)->vex_w) argument
219 #define REX_R(S) ((S)->rex_r + 0) argument
220 #define REX_X(S) ((S)->rex_x + 0) argument
221 #define REX_B(S) ((S)->rex_b + 0) argument
223 #define REX_PREFIX(S) false argument
224 #define REX_W(S) false argument
225 #define REX_R(S) 0 argument
226 #define REX_X(S) 0 argument
227 #define REX_B(S) 0 argument
231 * Many sysemu-only helpers are not reachable for user-only.
259 static void gen_jmp_rel(DisasContext *s, MemOp ot, int diff, int tb_num);
260 static void gen_jmp_rel_csize(DisasContext *s, int diff, int tb_num);
261 static void gen_exception_gpf(DisasContext *s);
330 static void set_cc_op_1(DisasContext *s, CCOp op, bool dirty) in set_cc_op_1() argument
334 if (s->cc_op == op) { in set_cc_op_1()
339 dead = cc_op_live(s->cc_op) & ~cc_op_live(op); in set_cc_op_1()
350 tcg_gen_discard_tl(s->cc_srcT); in set_cc_op_1()
353 if (dirty && s->cc_op == CC_OP_DYNAMIC) { in set_cc_op_1()
356 s->cc_op_dirty = dirty; in set_cc_op_1()
357 s->cc_op = op; in set_cc_op_1()
360 static void set_cc_op(DisasContext *s, CCOp op) in set_cc_op() argument
366 set_cc_op_1(s, op, op != CC_OP_DYNAMIC); in set_cc_op()
369 static void assume_cc_op(DisasContext *s, CCOp op) in assume_cc_op() argument
371 set_cc_op_1(s, op, false); in assume_cc_op()
374 static void gen_update_cc_op(DisasContext *s) in gen_update_cc_op() argument
376 if (s->cc_op_dirty) { in gen_update_cc_op()
377 tcg_gen_movi_i32(cpu_cc_op, s->cc_op); in gen_update_cc_op()
378 s->cc_op_dirty = false; in gen_update_cc_op()
393 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
394 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
395 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
396 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
397 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
409 * [AH, CH, DH, BH], ie "bits 15..8 of register N-4". Return
412 static inline bool byte_reg_is_xH(DisasContext *s, int reg) in byte_reg_is_xH() argument
415 if (reg < 4 || REX_PREFIX(s)) { in byte_reg_is_xH()
422 static inline MemOp mo_pushpop(DisasContext *s, MemOp ot) in mo_pushpop() argument
424 if (CODE64(s)) { in mo_pushpop()
432 static inline MemOp mo_stacksize(DisasContext *s) in mo_stacksize() argument
434 return CODE64(s) ? MO_64 : SS32(s) ? MO_32 : MO_16; in mo_stacksize()
437 /* Compute the result of writing t0 to the OT-sized register REG.
440 * register's TCGv.
443 * register's TCGv.
445 static TCGv gen_op_deposit_reg_v(DisasContext *s, MemOp ot, int reg, TCGv dest, TCGv t0) in gen_op_deposit_reg_v() argument
449 if (byte_reg_is_xH(s, reg)) { in gen_op_deposit_reg_v()
450 dest = dest ? dest : cpu_regs[reg - 4]; in gen_op_deposit_reg_v()
451 tcg_gen_deposit_tl(dest, cpu_regs[reg - 4], t0, 8, 8); in gen_op_deposit_reg_v()
452 return cpu_regs[reg - 4]; in gen_op_deposit_reg_v()
479 static void gen_op_mov_reg_v(DisasContext *s, MemOp ot, int reg, TCGv t0) in gen_op_mov_reg_v() argument
481 gen_op_deposit_reg_v(s, ot, reg, NULL, t0); in gen_op_mov_reg_v()
485 void gen_op_mov_v_reg(DisasContext *s, MemOp ot, TCGv t0, int reg) in gen_op_mov_v_reg() argument
487 if (ot == MO_8 && byte_reg_is_xH(s, reg)) { in gen_op_mov_v_reg()
488 tcg_gen_extract_tl(t0, cpu_regs[reg - 4], 8, 8); in gen_op_mov_v_reg()
494 static void gen_add_A0_im(DisasContext *s, int val) in gen_add_A0_im() argument
496 tcg_gen_addi_tl(s->A0, s->A0, val); in gen_add_A0_im()
497 if (!CODE64(s)) { in gen_add_A0_im()
498 tcg_gen_ext32u_tl(s->A0, s->A0); in gen_add_A0_im()
502 static inline void gen_op_jmp_v(DisasContext *s, TCGv dest) in gen_op_jmp_v() argument
505 s->pc_save = -1; in gen_op_jmp_v()
509 void gen_op_add_reg_im(DisasContext *s, MemOp size, int reg, int32_t val) in gen_op_add_reg_im() argument
511 tcg_gen_addi_tl(s->tmp0, cpu_regs[reg], val); in gen_op_add_reg_im()
512 gen_op_mov_reg_v(s, size, reg, s->tmp0); in gen_op_add_reg_im()
515 static inline void gen_op_add_reg(DisasContext *s, MemOp size, int reg, TCGv val) in gen_op_add_reg() argument
517 tcg_gen_add_tl(s->tmp0, cpu_regs[reg], val); in gen_op_add_reg()
518 gen_op_mov_reg_v(s, size, reg, s->tmp0); in gen_op_add_reg()
521 static inline void gen_op_ld_v(DisasContext *s, int idx, TCGv t0, TCGv a0) in gen_op_ld_v() argument
523 tcg_gen_qemu_ld_tl(t0, a0, s->mem_index, idx | MO_LE); in gen_op_ld_v()
526 static inline void gen_op_st_v(DisasContext *s, int idx, TCGv t0, TCGv a0) in gen_op_st_v() argument
528 tcg_gen_qemu_st_tl(t0, a0, s->mem_index, idx | MO_LE); in gen_op_st_v()
531 static void gen_update_eip_next(DisasContext *s) in gen_update_eip_next() argument
533 assert(s->pc_save != -1); in gen_update_eip_next()
534 if (tb_cflags(s->base.tb) & CF_PCREL) { in gen_update_eip_next()
535 tcg_gen_addi_tl(cpu_eip, cpu_eip, s->pc - s->pc_save); in gen_update_eip_next()
536 } else if (CODE64(s)) { in gen_update_eip_next()
537 tcg_gen_movi_tl(cpu_eip, s->pc); in gen_update_eip_next()
539 tcg_gen_movi_tl(cpu_eip, (uint32_t)(s->pc - s->cs_base)); in gen_update_eip_next()
541 s->pc_save = s->pc; in gen_update_eip_next()
544 static void gen_update_eip_cur(DisasContext *s) in gen_update_eip_cur() argument
546 assert(s->pc_save != -1); in gen_update_eip_cur()
547 if (tb_cflags(s->base.tb) & CF_PCREL) { in gen_update_eip_cur()
548 tcg_gen_addi_tl(cpu_eip, cpu_eip, s->base.pc_next - s->pc_save); in gen_update_eip_cur()
549 } else if (CODE64(s)) { in gen_update_eip_cur()
550 tcg_gen_movi_tl(cpu_eip, s->base.pc_next); in gen_update_eip_cur()
552 tcg_gen_movi_tl(cpu_eip, (uint32_t)(s->base.pc_next - s->cs_base)); in gen_update_eip_cur()
554 s->pc_save = s->base.pc_next; in gen_update_eip_cur()
557 static int cur_insn_len(DisasContext *s) in cur_insn_len() argument
559 return s->pc - s->base.pc_next; in cur_insn_len()
562 static TCGv_i32 cur_insn_len_i32(DisasContext *s) in cur_insn_len_i32() argument
564 return tcg_constant_i32(cur_insn_len(s)); in cur_insn_len_i32()
567 static TCGv_i32 eip_next_i32(DisasContext *s) in eip_next_i32() argument
569 assert(s->pc_save != -1); in eip_next_i32()
571 * This function has two users: lcall_real (always 16-bit mode), and in eip_next_i32()
572 * iret_protected (16, 32, or 64-bit mode). IRET only uses the value in eip_next_i32()
573 * when EFLAGS.NT is set, which is illegal in 64-bit mode, which is in eip_next_i32()
574 * why passing a 32-bit value isn't broken. To avoid using this where in eip_next_i32()
575 * we shouldn't, return -1 in 64-bit mode so that execution goes into in eip_next_i32()
578 if (CODE64(s)) { in eip_next_i32()
579 return tcg_constant_i32(-1); in eip_next_i32()
581 if (tb_cflags(s->base.tb) & CF_PCREL) { in eip_next_i32()
584 tcg_gen_addi_i32(ret, ret, s->pc - s->pc_save); in eip_next_i32()
587 return tcg_constant_i32(s->pc - s->cs_base); in eip_next_i32()
591 static TCGv eip_next_tl(DisasContext *s) in eip_next_tl() argument
593 assert(s->pc_save != -1); in eip_next_tl()
594 if (tb_cflags(s->base.tb) & CF_PCREL) { in eip_next_tl()
596 tcg_gen_addi_tl(ret, cpu_eip, s->pc - s->pc_save); in eip_next_tl()
598 } else if (CODE64(s)) { in eip_next_tl()
599 return tcg_constant_tl(s->pc); in eip_next_tl()
601 return tcg_constant_tl((uint32_t)(s->pc - s->cs_base)); in eip_next_tl()
605 static TCGv eip_cur_tl(DisasContext *s) in eip_cur_tl() argument
607 assert(s->pc_save != -1); in eip_cur_tl()
608 if (tb_cflags(s->base.tb) & CF_PCREL) { in eip_cur_tl()
610 tcg_gen_addi_tl(ret, cpu_eip, s->base.pc_next - s->pc_save); in eip_cur_tl()
612 } else if (CODE64(s)) { in eip_cur_tl()
613 return tcg_constant_tl(s->base.pc_next); in eip_cur_tl()
615 return tcg_constant_tl((uint32_t)(s->base.pc_next - s->cs_base)); in eip_cur_tl()
620 (OVR_SEG) and the default segment (DEF_SEG). OVR_SEG may be -1 to
622 static void gen_lea_v_seg_dest(DisasContext *s, MemOp aflag, TCGv dest, TCGv a0, in gen_lea_v_seg_dest() argument
636 if (ovr_seg < 0 && ADDSEG(s)) { in gen_lea_v_seg_dest()
649 if (ADDSEG(s)) { in gen_lea_v_seg_dest()
665 } else if (CODE64(s)) { in gen_lea_v_seg_dest()
675 static void gen_lea_v_seg(DisasContext *s, TCGv a0, in gen_lea_v_seg() argument
678 gen_lea_v_seg_dest(s, s->aflag, s->A0, a0, def_seg, ovr_seg); in gen_lea_v_seg()
681 static inline void gen_string_movl_A0_ESI(DisasContext *s) in gen_string_movl_A0_ESI() argument
683 gen_lea_v_seg(s, cpu_regs[R_ESI], R_DS, s->override); in gen_string_movl_A0_ESI()
686 static inline void gen_string_movl_A0_EDI(DisasContext *s) in gen_string_movl_A0_EDI() argument
688 gen_lea_v_seg(s, cpu_regs[R_EDI], R_ES, -1); in gen_string_movl_A0_EDI()
691 static inline TCGv gen_compute_Dshift(DisasContext *s, MemOp ot) in gen_compute_Dshift() argument
711 static void gen_op_j_ecx(DisasContext *s, TCGCond cond, TCGLabel *label1) in gen_op_j_ecx() argument
713 TCGv tmp = gen_ext_tl(NULL, cpu_regs[R_ECX], s->aflag, false); in gen_op_j_ecx()
718 static inline void gen_op_jz_ecx(DisasContext *s, TCGLabel *label1) in gen_op_jz_ecx() argument
720 gen_op_j_ecx(s, TCG_COND_EQ, label1); in gen_op_jz_ecx()
723 static inline void gen_op_jnz_ecx(DisasContext *s, TCGLabel *label1) in gen_op_jnz_ecx() argument
725 gen_op_j_ecx(s, TCG_COND_NE, label1); in gen_op_jnz_ecx()
766 static bool gen_check_io(DisasContext *s, MemOp ot, TCGv_i32 port, in gen_check_io() argument
774 gen_exception_gpf(s); in gen_check_io()
777 if (PE(s) && (CPL(s) > IOPL(s) || VM86(s))) { in gen_check_io()
780 if (GUEST(s)) { in gen_check_io()
781 gen_update_cc_op(s); in gen_check_io()
782 gen_update_eip_cur(s); in gen_check_io()
783 if (s->prefix & (PREFIX_REPZ | PREFIX_REPNZ)) { in gen_check_io()
789 cur_insn_len_i32(s)); in gen_check_io()
795 static void gen_movs(DisasContext *s, MemOp ot) in gen_movs() argument
799 gen_string_movl_A0_ESI(s); in gen_movs()
800 gen_op_ld_v(s, ot, s->T0, s->A0); in gen_movs()
801 gen_string_movl_A0_EDI(s); in gen_movs()
802 gen_op_st_v(s, ot, s->T0, s->A0); in gen_movs()
804 dshift = gen_compute_Dshift(s, ot); in gen_movs()
805 gen_op_add_reg(s, s->aflag, R_ESI, dshift); in gen_movs()
806 gen_op_add_reg(s, s->aflag, R_EDI, dshift); in gen_movs()
810 static void gen_mov_eflags(DisasContext *s, TCGv reg) in gen_mov_eflags() argument
816 if (s->cc_op == CC_OP_EFLAGS) { in gen_mov_eflags()
826 live = cc_op_live(s->cc_op) & ~USES_CC_SRCT; in gen_mov_eflags()
841 if (s->cc_op != CC_OP_DYNAMIC) { in gen_mov_eflags()
842 cc_op = tcg_constant_i32(s->cc_op); in gen_mov_eflags()
850 static void gen_compute_eflags(DisasContext *s) in gen_compute_eflags() argument
852 gen_mov_eflags(s, cpu_cc_src); in gen_compute_eflags()
853 set_cc_op(s, CC_OP_EFLAGS); in gen_compute_eflags()
871 .imm = 1ull << ((8 << size) - 1) }; in gen_prepare_sign_nz()
888 static CCPrepare gen_prepare_eflags_c(DisasContext *s, TCGv reg) in gen_prepare_eflags_c() argument
892 switch (s->cc_op) { in gen_prepare_eflags_c()
895 size = s->cc_op - CC_OP_SUBB; in gen_prepare_eflags_c()
896 tcg_gen_ext_tl(s->cc_srcT, s->cc_srcT, size); in gen_prepare_eflags_c()
898 return (CCPrepare) { .cond = TCG_COND_LTU, .reg = s->cc_srcT, in gen_prepare_eflags_c()
903 size = cc_op_size(s->cc_op); in gen_prepare_eflags_c()
919 /* (CC_SRC >> (DATA_BITS - 1)) & 1 */ in gen_prepare_eflags_c()
920 size = cc_op_size(s->cc_op); in gen_prepare_eflags_c()
928 size = cc_op_size(s->cc_op); in gen_prepare_eflags_c()
932 size = cc_op_size(s->cc_op); in gen_prepare_eflags_c()
949 gen_update_cc_op(s); in gen_prepare_eflags_c()
961 static CCPrepare gen_prepare_eflags_p(DisasContext *s, TCGv reg) in gen_prepare_eflags_p() argument
963 gen_compute_eflags(s); in gen_prepare_eflags_p()
968 /* compute eflags.S, trying to store it in reg if not NULL */
969 static CCPrepare gen_prepare_eflags_s(DisasContext *s, TCGv reg) in gen_prepare_eflags_s() argument
971 switch (s->cc_op) { in gen_prepare_eflags_s()
973 gen_compute_eflags(s); in gen_prepare_eflags_s()
984 return gen_prepare_sign_nz(cpu_cc_dst, cc_op_size(s->cc_op)); in gen_prepare_eflags_s()
989 static CCPrepare gen_prepare_eflags_o(DisasContext *s, TCGv reg) in gen_prepare_eflags_o() argument
991 switch (s->cc_op) { in gen_prepare_eflags_o()
1002 gen_compute_eflags(s); in gen_prepare_eflags_o()
1009 static CCPrepare gen_prepare_eflags_z(DisasContext *s, TCGv reg) in gen_prepare_eflags_z() argument
1011 switch (s->cc_op) { in gen_prepare_eflags_z()
1019 gen_update_cc_op(s); in gen_prepare_eflags_z()
1029 MemOp size = cc_op_size(s->cc_op); in gen_prepare_eflags_z()
1038 static CCPrepare gen_prepare_cc(DisasContext *s, int b, TCGv reg) in gen_prepare_cc() argument
1047 switch (s->cc_op) { in gen_prepare_cc()
1050 size = cc_op_size(s->cc_op); in gen_prepare_cc()
1053 tcg_gen_ext_tl(s->cc_srcT, s->cc_srcT, size); in gen_prepare_cc()
1055 cc = (CCPrepare) { .cond = TCG_COND_LEU, .reg = s->cc_srcT, in gen_prepare_cc()
1064 tcg_gen_ext_tl(s->cc_srcT, s->cc_srcT, size | MO_SIGN); in gen_prepare_cc()
1066 cc = (CCPrepare) { .cond = cond, .reg = s->cc_srcT, in gen_prepare_cc()
1077 size = s->cc_op - CC_OP_LOGICB; in gen_prepare_cc()
1102 cc = gen_prepare_eflags_o(s, reg); in gen_prepare_cc()
1105 cc = gen_prepare_eflags_c(s, reg); in gen_prepare_cc()
1108 cc = gen_prepare_eflags_z(s, reg); in gen_prepare_cc()
1111 gen_compute_eflags(s); in gen_prepare_cc()
1116 cc = gen_prepare_eflags_s(s, reg); in gen_prepare_cc()
1119 cc = gen_prepare_eflags_p(s, reg); in gen_prepare_cc()
1122 gen_compute_eflags(s); in gen_prepare_cc()
1126 tcg_gen_addi_tl(reg, cpu_cc_src, CC_O - CC_S); in gen_prepare_cc()
1132 gen_compute_eflags(s); in gen_prepare_cc()
1136 tcg_gen_addi_tl(reg, cpu_cc_src, CC_O - CC_S); in gen_prepare_cc()
1150 static void gen_setcc1(DisasContext *s, int b, TCGv reg) in gen_setcc1() argument
1152 CCPrepare cc = gen_prepare_cc(s, b, reg); in gen_setcc1()
1170 static inline void gen_compute_eflags_c(DisasContext *s, TCGv reg) in gen_compute_eflags_c() argument
1172 gen_setcc1(s, JCC_B << 1, reg); in gen_compute_eflags_c()
1177 static inline void gen_jcc1_noeob(DisasContext *s, int b, TCGLabel *l1) in gen_jcc1_noeob() argument
1179 CCPrepare cc = gen_prepare_cc(s, b, NULL); in gen_jcc1_noeob()
1192 static inline void gen_jcc1(DisasContext *s, int b, TCGLabel *l1) in gen_jcc1() argument
1194 CCPrepare cc = gen_prepare_cc(s, b, NULL); in gen_jcc1()
1200 gen_update_cc_op(s); in gen_jcc1()
1208 /* XXX: does not work with gdbstub "ice" single step - not a
1212 static TCGLabel *gen_jz_ecx_string(DisasContext *s) in gen_jz_ecx_string() argument
1217 gen_update_cc_op(s); in gen_jz_ecx_string()
1218 gen_op_jnz_ecx(s, l1); in gen_jz_ecx_string()
1220 gen_jmp_rel_csize(s, 0, 1); in gen_jz_ecx_string()
1225 static void gen_stos(DisasContext *s, MemOp ot) in gen_stos() argument
1227 gen_string_movl_A0_EDI(s); in gen_stos()
1228 gen_op_st_v(s, ot, s->T0, s->A0); in gen_stos()
1229 gen_op_add_reg(s, s->aflag, R_EDI, gen_compute_Dshift(s, ot)); in gen_stos()
1232 static void gen_lods(DisasContext *s, MemOp ot) in gen_lods() argument
1234 gen_string_movl_A0_ESI(s); in gen_lods()
1235 gen_op_ld_v(s, ot, s->T0, s->A0); in gen_lods()
1236 gen_op_mov_reg_v(s, ot, R_EAX, s->T0); in gen_lods()
1237 gen_op_add_reg(s, s->aflag, R_ESI, gen_compute_Dshift(s, ot)); in gen_lods()
1240 static void gen_scas(DisasContext *s, MemOp ot) in gen_scas() argument
1242 gen_string_movl_A0_EDI(s); in gen_scas()
1243 gen_op_ld_v(s, ot, s->T1, s->A0); in gen_scas()
1244 tcg_gen_mov_tl(cpu_cc_src, s->T1); in gen_scas()
1245 tcg_gen_mov_tl(s->cc_srcT, s->T0); in gen_scas()
1246 tcg_gen_sub_tl(cpu_cc_dst, s->T0, s->T1); in gen_scas()
1247 set_cc_op(s, CC_OP_SUBB + ot); in gen_scas()
1249 gen_op_add_reg(s, s->aflag, R_EDI, gen_compute_Dshift(s, ot)); in gen_scas()
1252 static void gen_cmps(DisasContext *s, MemOp ot) in gen_cmps() argument
1256 gen_string_movl_A0_EDI(s); in gen_cmps()
1257 gen_op_ld_v(s, ot, s->T1, s->A0); in gen_cmps()
1258 gen_string_movl_A0_ESI(s); in gen_cmps()
1259 gen_op_ld_v(s, ot, s->T0, s->A0); in gen_cmps()
1260 tcg_gen_mov_tl(cpu_cc_src, s->T1); in gen_cmps()
1261 tcg_gen_mov_tl(s->cc_srcT, s->T0); in gen_cmps()
1262 tcg_gen_sub_tl(cpu_cc_dst, s->T0, s->T1); in gen_cmps()
1263 set_cc_op(s, CC_OP_SUBB + ot); in gen_cmps()
1265 dshift = gen_compute_Dshift(s, ot); in gen_cmps()
1266 gen_op_add_reg(s, s->aflag, R_ESI, dshift); in gen_cmps()
1267 gen_op_add_reg(s, s->aflag, R_EDI, dshift); in gen_cmps()
1270 static void gen_bpt_io(DisasContext *s, TCGv_i32 t_port, int ot) in gen_bpt_io() argument
1272 if (s->flags & HF_IOBPT_MASK) { in gen_bpt_io()
1274 /* user-mode cpu should not be in IOBPT mode */ in gen_bpt_io()
1278 TCGv t_next = eip_next_tl(s); in gen_bpt_io()
1284 static void gen_ins(DisasContext *s, MemOp ot) in gen_ins() argument
1286 gen_string_movl_A0_EDI(s); in gen_ins()
1289 tcg_gen_movi_tl(s->T0, 0); in gen_ins()
1290 gen_op_st_v(s, ot, s->T0, s->A0); in gen_ins()
1291 tcg_gen_trunc_tl_i32(s->tmp2_i32, cpu_regs[R_EDX]); in gen_ins()
1292 tcg_gen_andi_i32(s->tmp2_i32, s->tmp2_i32, 0xffff); in gen_ins()
1293 gen_helper_in_func(ot, s->T0, s->tmp2_i32); in gen_ins()
1294 gen_op_st_v(s, ot, s->T0, s->A0); in gen_ins()
1295 gen_op_add_reg(s, s->aflag, R_EDI, gen_compute_Dshift(s, ot)); in gen_ins()
1296 gen_bpt_io(s, s->tmp2_i32, ot); in gen_ins()
1299 static void gen_outs(DisasContext *s, MemOp ot) in gen_outs() argument
1301 gen_string_movl_A0_ESI(s); in gen_outs()
1302 gen_op_ld_v(s, ot, s->T0, s->A0); in gen_outs()
1304 tcg_gen_trunc_tl_i32(s->tmp2_i32, cpu_regs[R_EDX]); in gen_outs()
1305 tcg_gen_andi_i32(s->tmp2_i32, s->tmp2_i32, 0xffff); in gen_outs()
1306 tcg_gen_trunc_tl_i32(s->tmp3_i32, s->T0); in gen_outs()
1307 gen_helper_out_func(ot, s->tmp2_i32, s->tmp3_i32); in gen_outs()
1308 gen_op_add_reg(s, s->aflag, R_ESI, gen_compute_Dshift(s, ot)); in gen_outs()
1309 gen_bpt_io(s, s->tmp2_i32, ot); in gen_outs()
1313 static void gen_repz(DisasContext *s, MemOp ot, in gen_repz() argument
1314 void (*fn)(DisasContext *s, MemOp ot)) in gen_repz() argument
1317 l2 = gen_jz_ecx_string(s); in gen_repz()
1318 fn(s, ot); in gen_repz()
1319 gen_op_add_reg_im(s, s->aflag, R_ECX, -1); in gen_repz()
1324 if (s->repz_opt) { in gen_repz()
1325 gen_op_jz_ecx(s, l2); in gen_repz()
1327 gen_jmp_rel_csize(s, -cur_insn_len(s), 0); in gen_repz()
1330 static void gen_repz_nz(DisasContext *s, MemOp ot, in gen_repz_nz() argument
1331 void (*fn)(DisasContext *s, MemOp ot)) in gen_repz_nz() argument
1334 int nz = (s->prefix & PREFIX_REPNZ) ? 1 : 0; in gen_repz_nz()
1336 l2 = gen_jz_ecx_string(s); in gen_repz_nz()
1337 fn(s, ot); in gen_repz_nz()
1338 gen_op_add_reg_im(s, s->aflag, R_ECX, -1); in gen_repz_nz()
1339 gen_jcc1(s, (JCC_Z << 1) | (nz ^ 1), l2); in gen_repz_nz()
1340 if (s->repz_opt) { in gen_repz_nz()
1341 gen_op_jz_ecx(s, l2); in gen_repz_nz()
1346 * is no control flow junction - no need to set CC_OP_DYNAMIC. in gen_repz_nz()
1348 gen_jmp_rel_csize(s, -cur_insn_len(s), 0); in gen_repz_nz()
1407 static void gen_exception(DisasContext *s, int trapno) in gen_exception() argument
1409 gen_update_cc_op(s); in gen_exception()
1410 gen_update_eip_cur(s); in gen_exception()
1412 s->base.is_jmp = DISAS_NORETURN; in gen_exception()
1417 static void gen_illegal_opcode(DisasContext *s) in gen_illegal_opcode() argument
1419 gen_exception(s, EXCP06_ILLOP); in gen_illegal_opcode()
1423 static void gen_exception_gpf(DisasContext *s) in gen_exception_gpf() argument
1425 gen_exception(s, EXCP0D_GPF); in gen_exception_gpf()
1429 static bool check_cpl0(DisasContext *s) in check_cpl0() argument
1431 if (CPL(s) == 0) { in check_cpl0()
1434 gen_exception_gpf(s); in check_cpl0()
1439 static void gen_shiftd_rm_T1(DisasContext *s, MemOp ot, in gen_shiftd_rm_T1() argument
1448 portion by constructing it as a 32-bit value. */ in gen_shiftd_rm_T1()
1450 tcg_gen_deposit_tl(s->tmp0, s->T0, s->T1, 16, 16); in gen_shiftd_rm_T1()
1451 tcg_gen_mov_tl(s->T1, s->T0); in gen_shiftd_rm_T1()
1452 tcg_gen_mov_tl(s->T0, s->tmp0); in gen_shiftd_rm_T1()
1454 tcg_gen_deposit_tl(s->T1, s->T0, s->T1, 16, 16); in gen_shiftd_rm_T1()
1462 /* Concatenate the two 32-bit values and use a 64-bit shift. */ in gen_shiftd_rm_T1()
1463 tcg_gen_subi_tl(s->tmp0, count, 1); in gen_shiftd_rm_T1()
1465 tcg_gen_concat_tl_i64(s->T0, s->T0, s->T1); in gen_shiftd_rm_T1()
1466 tcg_gen_shr_i64(s->tmp0, s->T0, s->tmp0); in gen_shiftd_rm_T1()
1467 tcg_gen_shr_i64(s->T0, s->T0, count); in gen_shiftd_rm_T1()
1469 tcg_gen_concat_tl_i64(s->T0, s->T1, s->T0); in gen_shiftd_rm_T1()
1470 tcg_gen_shl_i64(s->tmp0, s->T0, s->tmp0); in gen_shiftd_rm_T1()
1471 tcg_gen_shl_i64(s->T0, s->T0, count); in gen_shiftd_rm_T1()
1472 tcg_gen_shri_i64(s->tmp0, s->tmp0, 32); in gen_shiftd_rm_T1()
1473 tcg_gen_shri_i64(s->T0, s->T0, 32); in gen_shiftd_rm_T1()
1478 tcg_gen_subi_tl(s->tmp0, count, 1); in gen_shiftd_rm_T1()
1480 tcg_gen_shr_tl(s->tmp0, s->T0, s->tmp0); in gen_shiftd_rm_T1()
1482 tcg_gen_subfi_tl(s->tmp4, mask + 1, count); in gen_shiftd_rm_T1()
1483 tcg_gen_shr_tl(s->T0, s->T0, count); in gen_shiftd_rm_T1()
1484 tcg_gen_shl_tl(s->T1, s->T1, s->tmp4); in gen_shiftd_rm_T1()
1486 tcg_gen_shl_tl(s->tmp0, s->T0, s->tmp0); in gen_shiftd_rm_T1()
1489 tcg_gen_subfi_tl(s->tmp4, 33, count); in gen_shiftd_rm_T1()
1490 tcg_gen_shr_tl(s->tmp4, s->T1, s->tmp4); in gen_shiftd_rm_T1()
1491 tcg_gen_or_tl(s->tmp0, s->tmp0, s->tmp4); in gen_shiftd_rm_T1()
1494 tcg_gen_subfi_tl(s->tmp4, mask + 1, count); in gen_shiftd_rm_T1()
1495 tcg_gen_shl_tl(s->T0, s->T0, count); in gen_shiftd_rm_T1()
1496 tcg_gen_shr_tl(s->T1, s->T1, s->tmp4); in gen_shiftd_rm_T1()
1498 tcg_gen_movi_tl(s->tmp4, 0); in gen_shiftd_rm_T1()
1499 tcg_gen_movcond_tl(TCG_COND_EQ, s->T1, count, s->tmp4, in gen_shiftd_rm_T1()
1500 s->tmp4, s->T1); in gen_shiftd_rm_T1()
1501 tcg_gen_or_tl(s->T0, s->T0, s->T1); in gen_shiftd_rm_T1()
1508 static uint64_t advance_pc(CPUX86State *env, DisasContext *s, int num_bytes) in advance_pc() argument
1510 uint64_t pc = s->pc; in advance_pc()
1513 if (s->base.num_insns > 1 && in advance_pc()
1514 !is_same_page(&s->base, s->pc + num_bytes - 1)) { in advance_pc()
1515 siglongjmp(s->jmpbuf, 2); in advance_pc()
1518 s->pc += num_bytes; in advance_pc()
1519 if (unlikely(cur_insn_len(s) > X86_MAX_INSN_LENGTH)) { in advance_pc()
1520 /* If the instruction's 16th byte is on a different page than the 1st, a in advance_pc()
1525 if (((s->pc - 1) ^ (pc - 1)) & TARGET_PAGE_MASK) { in advance_pc()
1526 (void)translator_ldub(env, &s->base, in advance_pc()
1527 (s->pc - 1) & TARGET_PAGE_MASK); in advance_pc()
1529 siglongjmp(s->jmpbuf, 1); in advance_pc()
1535 static inline uint8_t x86_ldub_code(CPUX86State *env, DisasContext *s) in x86_ldub_code() argument
1537 return translator_ldub(env, &s->base, advance_pc(env, s, 1)); in x86_ldub_code()
1540 static inline uint16_t x86_lduw_code(CPUX86State *env, DisasContext *s) in x86_lduw_code() argument
1542 return translator_lduw(env, &s->base, advance_pc(env, s, 2)); in x86_lduw_code()
1545 static inline uint32_t x86_ldl_code(CPUX86State *env, DisasContext *s) in x86_ldl_code() argument
1547 return translator_ldl(env, &s->base, advance_pc(env, s, 4)); in x86_ldl_code()
1551 static inline uint64_t x86_ldq_code(CPUX86State *env, DisasContext *s) in x86_ldq_code() argument
1553 return translator_ldq(env, &s->base, advance_pc(env, s, 8)); in x86_ldq_code()
1559 static AddressParts gen_lea_modrm_0(CPUX86State *env, DisasContext *s, in gen_lea_modrm_0() argument
1567 index = -1; in gen_lea_modrm_0()
1573 base = rm | REX_B(s); in gen_lea_modrm_0()
1577 simplifies multi-byte nop, as well as bndcl, bndcu, bndcn. */ in gen_lea_modrm_0()
1581 switch (s->aflag) { in gen_lea_modrm_0()
1586 int code = x86_ldub_code(env, s); in gen_lea_modrm_0()
1588 index = ((code >> 3) & 7) | REX_X(s); in gen_lea_modrm_0()
1590 index = -1; /* no index */ in gen_lea_modrm_0()
1592 base = (code & 7) | REX_B(s); in gen_lea_modrm_0()
1599 base = -1; in gen_lea_modrm_0()
1600 disp = (int32_t)x86_ldl_code(env, s); in gen_lea_modrm_0()
1601 if (CODE64(s) && !havesib) { in gen_lea_modrm_0()
1602 base = -2; in gen_lea_modrm_0()
1603 disp += s->pc + s->rip_offset; in gen_lea_modrm_0()
1608 disp = (int8_t)x86_ldub_code(env, s); in gen_lea_modrm_0()
1612 disp = (int32_t)x86_ldl_code(env, s); in gen_lea_modrm_0()
1617 if (base == R_ESP && s->popl_esp_hack) { in gen_lea_modrm_0()
1618 disp += s->popl_esp_hack; in gen_lea_modrm_0()
1628 base = -1; in gen_lea_modrm_0()
1629 disp = x86_lduw_code(env, s); in gen_lea_modrm_0()
1633 disp = (int8_t)x86_ldub_code(env, s); in gen_lea_modrm_0()
1635 disp = (int16_t)x86_lduw_code(env, s); in gen_lea_modrm_0()
1683 static TCGv gen_lea_modrm_1(DisasContext *s, AddressParts a, bool is_vsib) in gen_lea_modrm_1() argument
1691 tcg_gen_shli_tl(s->A0, cpu_regs[a.index], a.scale); in gen_lea_modrm_1()
1692 ea = s->A0; in gen_lea_modrm_1()
1695 tcg_gen_add_tl(s->A0, ea, cpu_regs[a.base]); in gen_lea_modrm_1()
1696 ea = s->A0; in gen_lea_modrm_1()
1702 if (tb_cflags(s->base.tb) & CF_PCREL && a.base == -2) { in gen_lea_modrm_1()
1703 /* With cpu_eip ~= pc_save, the expression is pc-relative. */ in gen_lea_modrm_1()
1704 tcg_gen_addi_tl(s->A0, cpu_eip, a.disp - s->pc_save); in gen_lea_modrm_1()
1706 tcg_gen_movi_tl(s->A0, a.disp); in gen_lea_modrm_1()
1708 ea = s->A0; in gen_lea_modrm_1()
1710 tcg_gen_addi_tl(s->A0, ea, a.disp); in gen_lea_modrm_1()
1711 ea = s->A0; in gen_lea_modrm_1()
1718 static void gen_bndck(DisasContext *s, X86DecodedInsn *decode, in gen_bndck() argument
1721 TCGv ea = gen_lea_modrm_1(s, decode->mem, false); in gen_bndck()
1723 tcg_gen_extu_tl_i64(s->tmp1_i64, ea); in gen_bndck()
1724 if (!CODE64(s)) { in gen_bndck()
1725 tcg_gen_ext32u_i64(s->tmp1_i64, s->tmp1_i64); in gen_bndck()
1727 tcg_gen_setcond_i64(cond, s->tmp1_i64, s->tmp1_i64, bndv); in gen_bndck()
1728 tcg_gen_extrl_i64_i32(s->tmp2_i32, s->tmp1_i64); in gen_bndck()
1729 gen_helper_bndck(tcg_env, s->tmp2_i32); in gen_bndck()
1733 static void gen_ld_modrm(DisasContext *s, X86DecodedInsn *decode, MemOp ot) in gen_ld_modrm() argument
1735 int modrm = s->modrm; in gen_ld_modrm()
1739 rm = (modrm & 7) | REX_B(s); in gen_ld_modrm()
1741 gen_op_mov_v_reg(s, ot, s->T0, rm); in gen_ld_modrm()
1743 gen_lea_modrm(s, decode); in gen_ld_modrm()
1744 gen_op_ld_v(s, ot, s->T0, s->A0); in gen_ld_modrm()
1749 static void gen_st_modrm(DisasContext *s, X86DecodedInsn *decode, MemOp ot) in gen_st_modrm() argument
1751 int modrm = s->modrm; in gen_st_modrm()
1755 rm = (modrm & 7) | REX_B(s); in gen_st_modrm()
1757 gen_op_mov_reg_v(s, ot, rm, s->T0); in gen_st_modrm()
1759 gen_lea_modrm(s, decode); in gen_st_modrm()
1760 gen_op_st_v(s, ot, s->T0, s->A0); in gen_st_modrm()
1764 static target_ulong insn_get_addr(CPUX86State *env, DisasContext *s, MemOp ot) in insn_get_addr() argument
1770 ret = x86_ldub_code(env, s); in insn_get_addr()
1773 ret = x86_lduw_code(env, s); in insn_get_addr()
1776 ret = x86_ldl_code(env, s); in insn_get_addr()
1780 ret = x86_ldq_code(env, s); in insn_get_addr()
1789 static inline uint32_t insn_get(CPUX86State *env, DisasContext *s, MemOp ot) in insn_get() argument
1795 ret = x86_ldub_code(env, s); in insn_get()
1798 ret = x86_lduw_code(env, s); in insn_get()
1804 ret = x86_ldl_code(env, s); in insn_get()
1812 static target_long insn_get_signed(CPUX86State *env, DisasContext *s, MemOp ot) in insn_get_signed() argument
1818 ret = (int8_t) x86_ldub_code(env, s); in insn_get_signed()
1821 ret = (int16_t) x86_lduw_code(env, s); in insn_get_signed()
1824 ret = (int32_t) x86_ldl_code(env, s); in insn_get_signed()
1828 ret = x86_ldq_code(env, s); in insn_get_signed()
1837 static void gen_conditional_jump_labels(DisasContext *s, target_long diff, in gen_conditional_jump_labels() argument
1843 gen_jmp_rel_csize(s, 0, 1); in gen_conditional_jump_labels()
1846 gen_jmp_rel(s, s->dflag, diff, 0); in gen_conditional_jump_labels()
1849 static void gen_jcc(DisasContext *s, int b, int diff) in gen_jcc() argument
1853 gen_jcc1(s, b, l1); in gen_jcc()
1854 gen_conditional_jump_labels(s, diff, NULL, l1); in gen_jcc()
1857 static void gen_cmovcc1(DisasContext *s, int b, TCGv dest, TCGv src) in gen_cmovcc1() argument
1859 CCPrepare cc = gen_prepare_cc(s, b, NULL); in gen_cmovcc1()
1868 static void gen_op_movl_seg_real(DisasContext *s, X86Seg seg_reg, TCGv seg) in gen_op_movl_seg_real() argument
1879 static void gen_movl_seg(DisasContext *s, X86Seg seg_reg, TCGv src) in gen_movl_seg() argument
1881 if (PE(s) && !VM86(s)) { in gen_movl_seg()
1882 tcg_gen_trunc_tl_i32(s->tmp2_i32, src); in gen_movl_seg()
1883 gen_helper_load_seg(tcg_env, tcg_constant_i32(seg_reg), s->tmp2_i32); in gen_movl_seg()
1889 s->base.is_jmp = DISAS_EOB_INHIBIT_IRQ; in gen_movl_seg()
1890 } else if (CODE32(s) && seg_reg < R_FS) { in gen_movl_seg()
1891 s->base.is_jmp = DISAS_EOB_NEXT; in gen_movl_seg()
1894 gen_op_movl_seg_real(s, seg_reg, src); in gen_movl_seg()
1896 s->base.is_jmp = DISAS_EOB_INHIBIT_IRQ; in gen_movl_seg()
1901 static void gen_far_call(DisasContext *s) in gen_far_call() argument
1904 tcg_gen_trunc_tl_i32(new_cs, s->T1); in gen_far_call()
1905 if (PE(s) && !VM86(s)) { in gen_far_call()
1906 gen_helper_lcall_protected(tcg_env, new_cs, s->T0, in gen_far_call()
1907 tcg_constant_i32(s->dflag - 1), in gen_far_call()
1908 eip_next_tl(s)); in gen_far_call()
1911 tcg_gen_trunc_tl_i32(new_eip, s->T0); in gen_far_call()
1913 tcg_constant_i32(s->dflag - 1), in gen_far_call()
1914 eip_next_i32(s)); in gen_far_call()
1916 s->base.is_jmp = DISAS_JUMP; in gen_far_call()
1919 static void gen_far_jmp(DisasContext *s) in gen_far_jmp() argument
1921 if (PE(s) && !VM86(s)) { in gen_far_jmp()
1923 tcg_gen_trunc_tl_i32(new_cs, s->T1); in gen_far_jmp()
1924 gen_helper_ljmp_protected(tcg_env, new_cs, s->T0, in gen_far_jmp()
1925 eip_next_tl(s)); in gen_far_jmp()
1927 gen_op_movl_seg_real(s, R_CS, s->T1); in gen_far_jmp()
1928 gen_op_jmp_v(s, s->T0); in gen_far_jmp()
1930 s->base.is_jmp = DISAS_JUMP; in gen_far_jmp()
1933 static void gen_svm_check_intercept(DisasContext *s, uint32_t type) in gen_svm_check_intercept() argument
1936 if (likely(!GUEST(s))) { in gen_svm_check_intercept()
1942 static inline void gen_stack_update(DisasContext *s, int addend) in gen_stack_update() argument
1944 gen_op_add_reg_im(s, mo_stacksize(s), R_ESP, addend); in gen_stack_update()
1947 static void gen_lea_ss_ofs(DisasContext *s, TCGv dest, TCGv src, target_ulong offset) in gen_lea_ss_ofs() argument
1953 gen_lea_v_seg_dest(s, mo_stacksize(s), dest, src, R_SS, -1); in gen_lea_ss_ofs()
1957 static void gen_push_v(DisasContext *s, TCGv val) in gen_push_v() argument
1959 MemOp d_ot = mo_pushpop(s, s->dflag); in gen_push_v()
1960 MemOp a_ot = mo_stacksize(s); in gen_push_v()
1967 gen_lea_ss_ofs(s, s->A0, new_esp, 0); in gen_push_v()
1968 gen_op_st_v(s, d_ot, val, s->A0); in gen_push_v()
1969 gen_op_mov_reg_v(s, a_ot, R_ESP, new_esp); in gen_push_v()
1973 static MemOp gen_pop_T0(DisasContext *s) in gen_pop_T0() argument
1975 MemOp d_ot = mo_pushpop(s, s->dflag); in gen_pop_T0()
1977 gen_lea_ss_ofs(s, s->T0, cpu_regs[R_ESP], 0); in gen_pop_T0()
1978 gen_op_ld_v(s, d_ot, s->T0, s->T0); in gen_pop_T0()
1983 static inline void gen_pop_update(DisasContext *s, MemOp ot) in gen_pop_update() argument
1985 gen_stack_update(s, 1 << ot); in gen_pop_update()
1988 static void gen_pusha(DisasContext *s) in gen_pusha() argument
1990 MemOp d_ot = s->dflag; in gen_pusha()
1995 gen_lea_ss_ofs(s, s->A0, cpu_regs[R_ESP], (i - 8) * size); in gen_pusha()
1996 gen_op_st_v(s, d_ot, cpu_regs[7 - i], s->A0); in gen_pusha()
1999 gen_stack_update(s, -8 * size); in gen_pusha()
2002 static void gen_popa(DisasContext *s) in gen_popa() argument
2004 MemOp d_ot = s->dflag; in gen_popa()
2010 if (7 - i == R_ESP) { in gen_popa()
2013 gen_lea_ss_ofs(s, s->A0, cpu_regs[R_ESP], i * size); in gen_popa()
2014 gen_op_ld_v(s, d_ot, s->T0, s->A0); in gen_popa()
2015 gen_op_mov_reg_v(s, d_ot, 7 - i, s->T0); in gen_popa()
2018 gen_stack_update(s, 8 * size); in gen_popa()
2021 static void gen_enter(DisasContext *s, int esp_addend, int level) in gen_enter() argument
2023 MemOp d_ot = mo_pushpop(s, s->dflag); in gen_enter()
2024 MemOp a_ot = mo_stacksize(s); in gen_enter()
2028 tcg_gen_subi_tl(s->T1, cpu_regs[R_ESP], size); in gen_enter()
2029 gen_lea_ss_ofs(s, s->A0, s->T1, 0); in gen_enter()
2030 gen_op_st_v(s, d_ot, cpu_regs[R_EBP], s->A0); in gen_enter()
2036 /* Copy level-1 pointers from the previous frame. */ in gen_enter()
2038 gen_lea_ss_ofs(s, s->A0, cpu_regs[R_EBP], -size * i); in gen_enter()
2039 gen_op_ld_v(s, d_ot, s->tmp0, s->A0); in gen_enter()
2041 gen_lea_ss_ofs(s, s->A0, s->T1, -size * i); in gen_enter()
2042 gen_op_st_v(s, d_ot, s->tmp0, s->A0); in gen_enter()
2046 gen_lea_ss_ofs(s, s->A0, s->T1, -size * level); in gen_enter()
2047 gen_op_st_v(s, d_ot, s->T1, s->A0); in gen_enter()
2051 gen_op_mov_reg_v(s, d_ot, R_EBP, s->T1); in gen_enter()
2054 tcg_gen_subi_tl(s->T1, s->T1, esp_addend + size * level); in gen_enter()
2055 gen_op_mov_reg_v(s, a_ot, R_ESP, s->T1); in gen_enter()
2058 static void gen_leave(DisasContext *s) in gen_leave() argument
2060 MemOp d_ot = mo_pushpop(s, s->dflag); in gen_leave()
2061 MemOp a_ot = mo_stacksize(s); in gen_leave()
2063 gen_lea_ss_ofs(s, s->A0, cpu_regs[R_EBP], 0); in gen_leave()
2064 gen_op_ld_v(s, d_ot, s->T0, s->A0); in gen_leave()
2066 tcg_gen_addi_tl(s->T1, cpu_regs[R_EBP], 1 << d_ot); in gen_leave()
2068 gen_op_mov_reg_v(s, d_ot, R_EBP, s->T0); in gen_leave()
2069 gen_op_mov_reg_v(s, a_ot, R_ESP, s->T1); in gen_leave()
2073 the instruction at all -- either a missing opcode, an unimplemented
2075 static void gen_unknown_opcode(CPUX86State *env, DisasContext *s) in gen_unknown_opcode() argument
2077 gen_illegal_opcode(s); in gen_unknown_opcode()
2082 target_ulong pc = s->base.pc_next, end = s->pc; in gen_unknown_opcode()
2086 fprintf(logfile, " %02x", translator_ldub(env, &s->base, pc)); in gen_unknown_opcode()
2096 static void gen_interrupt(DisasContext *s, uint8_t intno) in gen_interrupt() argument
2098 gen_update_cc_op(s); in gen_interrupt()
2099 gen_update_eip_cur(s); in gen_interrupt()
2101 cur_insn_len_i32(s)); in gen_interrupt()
2102 s->base.is_jmp = DISAS_NORETURN; in gen_interrupt()
2105 static void gen_set_hflag(DisasContext *s, uint32_t mask) in gen_set_hflag() argument
2107 if ((s->flags & mask) == 0) { in gen_set_hflag()
2112 s->flags |= mask; in gen_set_hflag()
2116 static void gen_reset_hflag(DisasContext *s, uint32_t mask) in gen_reset_hflag() argument
2118 if (s->flags & mask) { in gen_reset_hflag()
2123 s->flags &= ~mask; in gen_reset_hflag()
2127 static void gen_set_eflags(DisasContext *s, target_ulong mask) in gen_set_eflags() argument
2136 static void gen_reset_eflags(DisasContext *s, target_ulong mask) in gen_reset_eflags() argument
2146 static void gen_bnd_jmp(DisasContext *s) in gen_bnd_jmp() argument
2149 and if the BNDREGs are known to be in use (non-zero) already. in gen_bnd_jmp()
2151 if ((s->prefix & PREFIX_REPNZ) == 0 in gen_bnd_jmp()
2152 && (s->flags & HF_MPX_EN_MASK) != 0 in gen_bnd_jmp()
2153 && (s->flags & HF_MPX_IU_MASK) != 0) { in gen_bnd_jmp()
2164 gen_eob(DisasContext *s, int mode) in gen_eob() argument
2168 gen_update_cc_op(s); in gen_eob()
2172 if (s->flags & HF_INHIBIT_IRQ_MASK) { in gen_eob()
2173 gen_reset_hflag(s, HF_INHIBIT_IRQ_MASK); in gen_eob()
2176 gen_set_hflag(s, HF_INHIBIT_IRQ_MASK); in gen_eob()
2179 if (s->base.tb->flags & HF_RF_MASK) { in gen_eob()
2180 gen_reset_eflags(s, RF_MASK); in gen_eob()
2185 } else if ((s->flags & HF_TF_MASK) && mode != DISAS_EOB_INHIBIT_IRQ) { in gen_eob()
2195 s->base.is_jmp = DISAS_NORETURN; in gen_eob()
2199 static void gen_jmp_rel(DisasContext *s, MemOp ot, int diff, int tb_num) in gen_jmp_rel() argument
2201 bool use_goto_tb = s->jmp_opt; in gen_jmp_rel()
2202 target_ulong mask = -1; in gen_jmp_rel()
2203 target_ulong new_pc = s->pc + diff; in gen_jmp_rel()
2204 target_ulong new_eip = new_pc - s->cs_base; in gen_jmp_rel()
2206 assert(!s->cc_op_dirty); in gen_jmp_rel()
2208 /* In 64-bit mode, operand size is fixed at 64 bits. */ in gen_jmp_rel()
2209 if (!CODE64(s)) { in gen_jmp_rel()
2212 if (tb_cflags(s->base.tb) & CF_PCREL && CODE32(s)) { in gen_jmp_rel()
2221 if (tb_cflags(s->base.tb) & CF_PCREL) { in gen_jmp_rel()
2222 tcg_gen_addi_tl(cpu_eip, cpu_eip, new_pc - s->pc_save); in gen_jmp_rel()
2228 if (!use_goto_tb || !is_same_page(&s->base, new_pc)) { in gen_jmp_rel()
2232 } else if (!CODE64(s)) { in gen_jmp_rel()
2233 new_pc = (uint32_t)(new_eip + s->cs_base); in gen_jmp_rel()
2236 if (use_goto_tb && translator_use_goto_tb(&s->base, new_pc)) { in gen_jmp_rel()
2239 if (!(tb_cflags(s->base.tb) & CF_PCREL)) { in gen_jmp_rel()
2242 tcg_gen_exit_tb(s->base.tb, tb_num); in gen_jmp_rel()
2243 s->base.is_jmp = DISAS_NORETURN; in gen_jmp_rel()
2245 if (!(tb_cflags(s->base.tb) & CF_PCREL)) { in gen_jmp_rel()
2248 if (s->jmp_opt) { in gen_jmp_rel()
2249 gen_eob(s, DISAS_JUMP); /* jump to another page */ in gen_jmp_rel()
2251 gen_eob(s, DISAS_EOB_ONLY); /* exit to main loop */ in gen_jmp_rel()
2257 static void gen_jmp_rel_csize(DisasContext *s, int diff, int tb_num) in gen_jmp_rel_csize() argument
2260 gen_jmp_rel(s, CODE32(s) ? MO_32 : MO_16, diff, tb_num); in gen_jmp_rel_csize()
2263 static inline void gen_ldq_env_A0(DisasContext *s, int offset) in gen_ldq_env_A0() argument
2265 tcg_gen_qemu_ld_i64(s->tmp1_i64, s->A0, s->mem_index, MO_LEUQ); in gen_ldq_env_A0()
2266 tcg_gen_st_i64(s->tmp1_i64, tcg_env, offset); in gen_ldq_env_A0()
2269 static inline void gen_stq_env_A0(DisasContext *s, int offset) in gen_stq_env_A0() argument
2271 tcg_gen_ld_i64(s->tmp1_i64, tcg_env, offset); in gen_stq_env_A0()
2272 tcg_gen_qemu_st_i64(s->tmp1_i64, s->A0, s->mem_index, MO_LEUQ); in gen_stq_env_A0()
2275 static inline void gen_ldo_env_A0(DisasContext *s, int offset, bool align) in gen_ldo_env_A0() argument
2277 MemOp atom = (s->cpuid_ext_features & CPUID_EXT_AVX in gen_ldo_env_A0()
2280 int mem_index = s->mem_index; in gen_ldo_env_A0()
2283 tcg_gen_qemu_ld_i128(t, s->A0, mem_index, mop); in gen_ldo_env_A0()
2287 static inline void gen_sto_env_A0(DisasContext *s, int offset, bool align) in gen_sto_env_A0() argument
2289 MemOp atom = (s->cpuid_ext_features & CPUID_EXT_AVX in gen_sto_env_A0()
2292 int mem_index = s->mem_index; in gen_sto_env_A0()
2296 tcg_gen_qemu_st_i128(t, s->A0, mem_index, mop); in gen_sto_env_A0()
2299 static void gen_ldy_env_A0(DisasContext *s, int offset, bool align) in gen_ldy_env_A0() argument
2302 int mem_index = s->mem_index; in gen_ldy_env_A0()
2306 tcg_gen_qemu_ld_i128(t0, s->A0, mem_index, mop | (align ? MO_ALIGN_32 : 0)); in gen_ldy_env_A0()
2307 tcg_gen_addi_tl(s->tmp0, s->A0, 16); in gen_ldy_env_A0()
2308 tcg_gen_qemu_ld_i128(t1, s->tmp0, mem_index, mop); in gen_ldy_env_A0()
2314 static void gen_sty_env_A0(DisasContext *s, int offset, bool align) in gen_sty_env_A0() argument
2317 int mem_index = s->mem_index; in gen_sty_env_A0()
2321 tcg_gen_qemu_st_i128(t, s->A0, mem_index, mop | (align ? MO_ALIGN_32 : 0)); in gen_sty_env_A0()
2322 tcg_gen_addi_tl(s->tmp0, s->A0, 16); in gen_sty_env_A0()
2324 tcg_gen_qemu_st_i128(t, s->tmp0, mem_index, mop); in gen_sty_env_A0()
2329 static void gen_x87(DisasContext *s, X86DecodedInsn *decode) in gen_x87() argument
2332 int b = decode->b; in gen_x87()
2333 int modrm = s->modrm; in gen_x87()
2336 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) { in gen_x87()
2339 gen_exception(s, EXCP07_PREX); in gen_x87()
2347 TCGv ea = gen_lea_modrm_1(s, decode->mem, false); in gen_x87()
2352 gen_lea_v_seg(s, ea, decode->mem.def_seg, s->override); in gen_x87()
2365 tcg_gen_qemu_ld_i32(s->tmp2_i32, s->A0, in gen_x87()
2366 s->mem_index, MO_LEUL); in gen_x87()
2367 gen_helper_flds_FT0(tcg_env, s->tmp2_i32); in gen_x87()
2370 tcg_gen_qemu_ld_i32(s->tmp2_i32, s->A0, in gen_x87()
2371 s->mem_index, MO_LEUL); in gen_x87()
2372 gen_helper_fildl_FT0(tcg_env, s->tmp2_i32); in gen_x87()
2375 tcg_gen_qemu_ld_i64(s->tmp1_i64, s->A0, in gen_x87()
2376 s->mem_index, MO_LEUQ); in gen_x87()
2377 gen_helper_fldl_FT0(tcg_env, s->tmp1_i64); in gen_x87()
2381 tcg_gen_qemu_ld_i32(s->tmp2_i32, s->A0, in gen_x87()
2382 s->mem_index, MO_LESW); in gen_x87()
2383 gen_helper_fildl_FT0(tcg_env, s->tmp2_i32); in gen_x87()
2404 tcg_gen_qemu_ld_i32(s->tmp2_i32, s->A0, in gen_x87()
2405 s->mem_index, MO_LEUL); in gen_x87()
2406 gen_helper_flds_ST0(tcg_env, s->tmp2_i32); in gen_x87()
2409 tcg_gen_qemu_ld_i32(s->tmp2_i32, s->A0, in gen_x87()
2410 s->mem_index, MO_LEUL); in gen_x87()
2411 gen_helper_fildl_ST0(tcg_env, s->tmp2_i32); in gen_x87()
2414 tcg_gen_qemu_ld_i64(s->tmp1_i64, s->A0, in gen_x87()
2415 s->mem_index, MO_LEUQ); in gen_x87()
2416 gen_helper_fldl_ST0(tcg_env, s->tmp1_i64); in gen_x87()
2420 tcg_gen_qemu_ld_i32(s->tmp2_i32, s->A0, in gen_x87()
2421 s->mem_index, MO_LESW); in gen_x87()
2422 gen_helper_fildl_ST0(tcg_env, s->tmp2_i32); in gen_x87()
2430 gen_helper_fisttl_ST0(s->tmp2_i32, tcg_env); in gen_x87()
2431 tcg_gen_qemu_st_i32(s->tmp2_i32, s->A0, in gen_x87()
2432 s->mem_index, MO_LEUL); in gen_x87()
2435 gen_helper_fisttll_ST0(s->tmp1_i64, tcg_env); in gen_x87()
2436 tcg_gen_qemu_st_i64(s->tmp1_i64, s->A0, in gen_x87()
2437 s->mem_index, MO_LEUQ); in gen_x87()
2441 gen_helper_fistt_ST0(s->tmp2_i32, tcg_env); in gen_x87()
2442 tcg_gen_qemu_st_i32(s->tmp2_i32, s->A0, in gen_x87()
2443 s->mem_index, MO_LEUW); in gen_x87()
2451 gen_helper_fsts_ST0(s->tmp2_i32, tcg_env); in gen_x87()
2452 tcg_gen_qemu_st_i32(s->tmp2_i32, s->A0, in gen_x87()
2453 s->mem_index, MO_LEUL); in gen_x87()
2456 gen_helper_fistl_ST0(s->tmp2_i32, tcg_env); in gen_x87()
2457 tcg_gen_qemu_st_i32(s->tmp2_i32, s->A0, in gen_x87()
2458 s->mem_index, MO_LEUL); in gen_x87()
2461 gen_helper_fstl_ST0(s->tmp1_i64, tcg_env); in gen_x87()
2462 tcg_gen_qemu_st_i64(s->tmp1_i64, s->A0, in gen_x87()
2463 s->mem_index, MO_LEUQ); in gen_x87()
2467 gen_helper_fist_ST0(s->tmp2_i32, tcg_env); in gen_x87()
2468 tcg_gen_qemu_st_i32(s->tmp2_i32, s->A0, in gen_x87()
2469 s->mem_index, MO_LEUW); in gen_x87()
2479 gen_helper_fldenv(tcg_env, s->A0, in gen_x87()
2480 tcg_constant_i32(s->dflag - 1)); in gen_x87()
2484 tcg_gen_qemu_ld_i32(s->tmp2_i32, s->A0, in gen_x87()
2485 s->mem_index, MO_LEUW); in gen_x87()
2486 gen_helper_fldcw(tcg_env, s->tmp2_i32); in gen_x87()
2490 gen_helper_fstenv(tcg_env, s->A0, in gen_x87()
2491 tcg_constant_i32(s->dflag - 1)); in gen_x87()
2495 gen_helper_fnstcw(s->tmp2_i32, tcg_env); in gen_x87()
2496 tcg_gen_qemu_st_i32(s->tmp2_i32, s->A0, in gen_x87()
2497 s->mem_index, MO_LEUW); in gen_x87()
2501 gen_helper_fldt_ST0(tcg_env, s->A0); in gen_x87()
2504 gen_helper_fstt_ST0(tcg_env, s->A0); in gen_x87()
2508 gen_helper_frstor(tcg_env, s->A0, in gen_x87()
2509 tcg_constant_i32(s->dflag - 1)); in gen_x87()
2513 gen_helper_fsave(tcg_env, s->A0, in gen_x87()
2514 tcg_constant_i32(s->dflag - 1)); in gen_x87()
2518 gen_helper_fnstsw(s->tmp2_i32, tcg_env); in gen_x87()
2519 tcg_gen_qemu_st_i32(s->tmp2_i32, s->A0, in gen_x87()
2520 s->mem_index, MO_LEUW); in gen_x87()
2524 gen_helper_fbld_ST0(tcg_env, s->A0); in gen_x87()
2527 gen_helper_fbst_ST0(tcg_env, s->A0); in gen_x87()
2531 tcg_gen_qemu_ld_i64(s->tmp1_i64, s->A0, in gen_x87()
2532 s->mem_index, MO_LEUQ); in gen_x87()
2533 gen_helper_fildll_ST0(tcg_env, s->tmp1_i64); in gen_x87()
2536 gen_helper_fistll_ST0(s->tmp1_i64, tcg_env); in gen_x87()
2537 tcg_gen_qemu_st_i64(s->tmp1_i64, s->A0, in gen_x87()
2538 s->mem_index, MO_LEUQ); in gen_x87()
2546 int last_seg = s->override >= 0 ? s->override : decode->mem.def_seg; in gen_x87()
2548 tcg_gen_ld_i32(s->tmp2_i32, tcg_env, in gen_x87()
2551 tcg_gen_st16_i32(s->tmp2_i32, tcg_env, in gen_x87()
2578 translator_io_start(&s->base); in gen_x87()
2763 if (!(s->cpuid_features & CPUID_CMOV)) { in gen_x87()
2766 gen_update_cc_op(s); in gen_x87()
2769 assume_cc_op(s, CC_OP_EFLAGS); in gen_x87()
2772 if (!(s->cpuid_features & CPUID_CMOV)) { in gen_x87()
2775 gen_update_cc_op(s); in gen_x87()
2778 assume_cc_op(s, CC_OP_EFLAGS); in gen_x87()
2821 gen_helper_fnstsw(s->tmp2_i32, tcg_env); in gen_x87()
2822 tcg_gen_extu_i32_tl(s->T0, s->tmp2_i32); in gen_x87()
2823 gen_op_mov_reg_v(s, MO_16, R_EAX, s->T0); in gen_x87()
2830 if (!(s->cpuid_features & CPUID_CMOV)) { in gen_x87()
2833 gen_update_cc_op(s); in gen_x87()
2837 assume_cc_op(s, CC_OP_EFLAGS); in gen_x87()
2840 if (!(s->cpuid_features & CPUID_CMOV)) { in gen_x87()
2843 gen_update_cc_op(s); in gen_x87()
2847 assume_cc_op(s, CC_OP_EFLAGS); in gen_x87()
2861 if (!(s->cpuid_features & CPUID_CMOV)) { in gen_x87()
2866 gen_jcc1_noeob(s, op1, l1); in gen_x87()
2878 tcg_gen_ld_i32(s->tmp2_i32, tcg_env, in gen_x87()
2880 tcg_gen_st16_i32(s->tmp2_i32, tcg_env, in gen_x87()
2882 tcg_gen_st_tl(eip_cur_tl(s), in gen_x87()
2888 gen_illegal_opcode(s); in gen_x87()
2891 static void gen_multi0F(DisasContext *s, X86DecodedInsn *decode) in gen_multi0F() argument
2893 int prefixes = s->prefix; in gen_multi0F()
2894 MemOp dflag = s->dflag; in gen_multi0F()
2895 int b = decode->b + 0x100; in gen_multi0F()
2896 int modrm = s->modrm; in gen_multi0F()
2907 (s->prefix & PREFIX_REPNZ)) { in gen_multi0F()
2910 if (s->prefix & PREFIX_REPZ) { in gen_multi0F()
2911 if (!(s->cpuid_7_0_ecx_features & CPUID_7_0_ECX_RDPID)) { in gen_multi0F()
2914 gen_helper_rdpid(s->T0, tcg_env); in gen_multi0F()
2915 rm = (modrm & 7) | REX_B(s); in gen_multi0F()
2916 gen_op_mov_reg_v(s, dflag, rm, s->T0); in gen_multi0F()
2919 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_RDSEED)) { in gen_multi0F()
2927 (s->prefix & (PREFIX_REPZ | PREFIX_REPNZ)) || in gen_multi0F()
2928 !(s->cpuid_ext_features & CPUID_EXT_RDRAND)) { in gen_multi0F()
2932 translator_io_start(&s->base); in gen_multi0F()
2933 gen_helper_rdrand(s->T0, tcg_env); in gen_multi0F()
2934 rm = (modrm & 7) | REX_B(s); in gen_multi0F()
2935 gen_op_mov_reg_v(s, dflag, rm, s->T0); in gen_multi0F()
2936 assume_cc_op(s, CC_OP_EFLAGS); in gen_multi0F()
2949 if (!PE(s) || VM86(s)) in gen_multi0F()
2951 if (s->flags & HF_UMIP_MASK && !check_cpl0(s)) { in gen_multi0F()
2954 gen_svm_check_intercept(s, SVM_EXIT_LDTR_READ); in gen_multi0F()
2955 tcg_gen_ld32u_tl(s->T0, tcg_env, in gen_multi0F()
2958 gen_st_modrm(s, decode, ot); in gen_multi0F()
2961 if (!PE(s) || VM86(s)) in gen_multi0F()
2963 if (check_cpl0(s)) { in gen_multi0F()
2964 gen_svm_check_intercept(s, SVM_EXIT_LDTR_WRITE); in gen_multi0F()
2965 gen_ld_modrm(s, decode, MO_16); in gen_multi0F()
2966 tcg_gen_trunc_tl_i32(s->tmp2_i32, s->T0); in gen_multi0F()
2967 gen_helper_lldt(tcg_env, s->tmp2_i32); in gen_multi0F()
2971 if (!PE(s) || VM86(s)) in gen_multi0F()
2973 if (s->flags & HF_UMIP_MASK && !check_cpl0(s)) { in gen_multi0F()
2976 gen_svm_check_intercept(s, SVM_EXIT_TR_READ); in gen_multi0F()
2977 tcg_gen_ld32u_tl(s->T0, tcg_env, in gen_multi0F()
2980 gen_st_modrm(s, decode, ot); in gen_multi0F()
2983 if (!PE(s) || VM86(s)) in gen_multi0F()
2985 if (check_cpl0(s)) { in gen_multi0F()
2986 gen_svm_check_intercept(s, SVM_EXIT_TR_WRITE); in gen_multi0F()
2987 gen_ld_modrm(s, decode, MO_16); in gen_multi0F()
2988 tcg_gen_trunc_tl_i32(s->tmp2_i32, s->T0); in gen_multi0F()
2989 gen_helper_ltr(tcg_env, s->tmp2_i32); in gen_multi0F()
2994 if (!PE(s) || VM86(s)) in gen_multi0F()
2996 gen_ld_modrm(s, decode, MO_16); in gen_multi0F()
2997 gen_update_cc_op(s); in gen_multi0F()
2999 gen_helper_verr(tcg_env, s->T0); in gen_multi0F()
3001 gen_helper_verw(tcg_env, s->T0); in gen_multi0F()
3003 assume_cc_op(s, CC_OP_EFLAGS); in gen_multi0F()
3013 if (s->flags & HF_UMIP_MASK && !check_cpl0(s)) { in gen_multi0F()
3016 gen_svm_check_intercept(s, SVM_EXIT_GDTR_READ); in gen_multi0F()
3017 gen_lea_modrm(s, decode); in gen_multi0F()
3018 tcg_gen_ld32u_tl(s->T0, in gen_multi0F()
3020 gen_op_st_v(s, MO_16, s->T0, s->A0); in gen_multi0F()
3021 gen_add_A0_im(s, 2); in gen_multi0F()
3022 tcg_gen_ld_tl(s->T0, tcg_env, offsetof(CPUX86State, gdt.base)); in gen_multi0F()
3025 * all 32-bits are written regardless of operand size. in gen_multi0F()
3027 gen_op_st_v(s, CODE64(s) + MO_32, s->T0, s->A0); in gen_multi0F()
3031 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) || CPL(s) != 0) { in gen_multi0F()
3034 gen_update_cc_op(s); in gen_multi0F()
3035 gen_update_eip_cur(s); in gen_multi0F()
3036 gen_lea_v_seg(s, cpu_regs[R_EAX], R_DS, s->override); in gen_multi0F()
3037 gen_helper_monitor(tcg_env, s->A0); in gen_multi0F()
3041 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) || CPL(s) != 0) { in gen_multi0F()
3044 gen_update_cc_op(s); in gen_multi0F()
3045 gen_update_eip_cur(s); in gen_multi0F()
3046 gen_helper_mwait(tcg_env, cur_insn_len_i32(s)); in gen_multi0F()
3047 s->base.is_jmp = DISAS_NORETURN; in gen_multi0F()
3051 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_SMAP) in gen_multi0F()
3052 || CPL(s) != 0) { in gen_multi0F()
3055 gen_reset_eflags(s, AC_MASK); in gen_multi0F()
3056 s->base.is_jmp = DISAS_EOB_NEXT; in gen_multi0F()
3060 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_SMAP) in gen_multi0F()
3061 || CPL(s) != 0) { in gen_multi0F()
3064 gen_set_eflags(s, AC_MASK); in gen_multi0F()
3065 s->base.is_jmp = DISAS_EOB_NEXT; in gen_multi0F()
3069 if (s->flags & HF_UMIP_MASK && !check_cpl0(s)) { in gen_multi0F()
3072 gen_svm_check_intercept(s, SVM_EXIT_IDTR_READ); in gen_multi0F()
3073 gen_lea_modrm(s, decode); in gen_multi0F()
3074 tcg_gen_ld32u_tl(s->T0, tcg_env, offsetof(CPUX86State, idt.limit)); in gen_multi0F()
3075 gen_op_st_v(s, MO_16, s->T0, s->A0); in gen_multi0F()
3076 gen_add_A0_im(s, 2); in gen_multi0F()
3077 tcg_gen_ld_tl(s->T0, tcg_env, offsetof(CPUX86State, idt.base)); in gen_multi0F()
3080 * all 32-bits are written regardless of operand size. in gen_multi0F()
3082 gen_op_st_v(s, CODE64(s) + MO_32, s->T0, s->A0); in gen_multi0F()
3086 if ((s->cpuid_ext_features & CPUID_EXT_XSAVE) == 0 in gen_multi0F()
3087 || (s->prefix & (PREFIX_DATA | PREFIX_REPZ | PREFIX_REPNZ))) { in gen_multi0F()
3090 tcg_gen_trunc_tl_i32(s->tmp2_i32, cpu_regs[R_ECX]); in gen_multi0F()
3091 gen_helper_xgetbv(s->tmp1_i64, tcg_env, s->tmp2_i32); in gen_multi0F()
3092 tcg_gen_extr_i64_tl(cpu_regs[R_EAX], cpu_regs[R_EDX], s->tmp1_i64); in gen_multi0F()
3096 if ((s->cpuid_ext_features & CPUID_EXT_XSAVE) == 0 in gen_multi0F()
3097 || (s->prefix & (PREFIX_DATA | PREFIX_REPZ | PREFIX_REPNZ))) { in gen_multi0F()
3100 gen_svm_check_intercept(s, SVM_EXIT_XSETBV); in gen_multi0F()
3101 if (!check_cpl0(s)) { in gen_multi0F()
3104 tcg_gen_concat_tl_i64(s->tmp1_i64, cpu_regs[R_EAX], in gen_multi0F()
3106 tcg_gen_trunc_tl_i32(s->tmp2_i32, cpu_regs[R_ECX]); in gen_multi0F()
3107 gen_helper_xsetbv(tcg_env, s->tmp2_i32, s->tmp1_i64); in gen_multi0F()
3109 s->base.is_jmp = DISAS_EOB_NEXT; in gen_multi0F()
3113 if (!SVME(s) || !PE(s)) { in gen_multi0F()
3116 if (!check_cpl0(s)) { in gen_multi0F()
3119 gen_update_cc_op(s); in gen_multi0F()
3120 gen_update_eip_cur(s); in gen_multi0F()
3126 gen_helper_vmrun(tcg_env, tcg_constant_i32(s->aflag - 1), in gen_multi0F()
3127 cur_insn_len_i32(s)); in gen_multi0F()
3129 s->base.is_jmp = DISAS_NORETURN; in gen_multi0F()
3133 if (!SVME(s)) { in gen_multi0F()
3136 gen_update_cc_op(s); in gen_multi0F()
3137 gen_update_eip_cur(s); in gen_multi0F()
3142 if (!SVME(s) || !PE(s)) { in gen_multi0F()
3145 if (!check_cpl0(s)) { in gen_multi0F()
3148 gen_update_cc_op(s); in gen_multi0F()
3149 gen_update_eip_cur(s); in gen_multi0F()
3150 gen_helper_vmload(tcg_env, tcg_constant_i32(s->aflag - 1)); in gen_multi0F()
3154 if (!SVME(s) || !PE(s)) { in gen_multi0F()
3157 if (!check_cpl0(s)) { in gen_multi0F()
3160 gen_update_cc_op(s); in gen_multi0F()
3161 gen_update_eip_cur(s); in gen_multi0F()
3162 gen_helper_vmsave(tcg_env, tcg_constant_i32(s->aflag - 1)); in gen_multi0F()
3166 if ((!SVME(s) && !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) in gen_multi0F()
3167 || !PE(s)) { in gen_multi0F()
3170 if (!check_cpl0(s)) { in gen_multi0F()
3173 gen_update_cc_op(s); in gen_multi0F()
3175 s->base.is_jmp = DISAS_EOB_NEXT; in gen_multi0F()
3179 if (!SVME(s) || !PE(s)) { in gen_multi0F()
3182 if (!check_cpl0(s)) { in gen_multi0F()
3185 gen_update_cc_op(s); in gen_multi0F()
3186 gen_update_eip_cur(s); in gen_multi0F()
3191 if ((!SVME(s) && !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) in gen_multi0F()
3192 || !PE(s)) { in gen_multi0F()
3195 gen_svm_check_intercept(s, SVM_EXIT_SKINIT); in gen_multi0F()
3196 /* If not intercepted, not implemented -- raise #UD. */ in gen_multi0F()
3200 if (!SVME(s) || !PE(s)) { in gen_multi0F()
3203 if (!check_cpl0(s)) { in gen_multi0F()
3206 gen_svm_check_intercept(s, SVM_EXIT_INVLPGA); in gen_multi0F()
3207 if (s->aflag == MO_64) { in gen_multi0F()
3208 tcg_gen_mov_tl(s->A0, cpu_regs[R_EAX]); in gen_multi0F()
3210 tcg_gen_ext32u_tl(s->A0, cpu_regs[R_EAX]); in gen_multi0F()
3212 gen_helper_flush_page(tcg_env, s->A0); in gen_multi0F()
3213 s->base.is_jmp = DISAS_EOB_NEXT; in gen_multi0F()
3217 if (!check_cpl0(s)) { in gen_multi0F()
3220 gen_svm_check_intercept(s, SVM_EXIT_GDTR_WRITE); in gen_multi0F()
3221 gen_lea_modrm(s, decode); in gen_multi0F()
3222 gen_op_ld_v(s, MO_16, s->T1, s->A0); in gen_multi0F()
3223 gen_add_A0_im(s, 2); in gen_multi0F()
3224 gen_op_ld_v(s, CODE64(s) + MO_32, s->T0, s->A0); in gen_multi0F()
3226 tcg_gen_andi_tl(s->T0, s->T0, 0xffffff); in gen_multi0F()
3228 tcg_gen_st_tl(s->T0, tcg_env, offsetof(CPUX86State, gdt.base)); in gen_multi0F()
3229 tcg_gen_st32_tl(s->T1, tcg_env, offsetof(CPUX86State, gdt.limit)); in gen_multi0F()
3233 if (!check_cpl0(s)) { in gen_multi0F()
3236 gen_svm_check_intercept(s, SVM_EXIT_IDTR_WRITE); in gen_multi0F()
3237 gen_lea_modrm(s, decode); in gen_multi0F()
3238 gen_op_ld_v(s, MO_16, s->T1, s->A0); in gen_multi0F()
3239 gen_add_A0_im(s, 2); in gen_multi0F()
3240 gen_op_ld_v(s, CODE64(s) + MO_32, s->T0, s->A0); in gen_multi0F()
3242 tcg_gen_andi_tl(s->T0, s->T0, 0xffffff); in gen_multi0F()
3244 tcg_gen_st_tl(s->T0, tcg_env, offsetof(CPUX86State, idt.base)); in gen_multi0F()
3245 tcg_gen_st32_tl(s->T1, tcg_env, offsetof(CPUX86State, idt.limit)); in gen_multi0F()
3249 if (s->flags & HF_UMIP_MASK && !check_cpl0(s)) { in gen_multi0F()
3252 gen_svm_check_intercept(s, SVM_EXIT_READ_CR0); in gen_multi0F()
3253 tcg_gen_ld_tl(s->T0, tcg_env, offsetof(CPUX86State, cr[0])); in gen_multi0F()
3255 * In 32-bit mode, the higher 16 bits of the destination in gen_multi0F()
3257 * just like in 64-bit mode. in gen_multi0F()
3260 ot = (mod != 3 ? MO_16 : s->dflag); in gen_multi0F()
3261 gen_st_modrm(s, decode, ot); in gen_multi0F()
3264 if (s->prefix & (PREFIX_DATA | PREFIX_REPZ | PREFIX_REPNZ)) { in gen_multi0F()
3267 tcg_gen_trunc_tl_i32(s->tmp2_i32, cpu_regs[R_ECX]); in gen_multi0F()
3268 gen_helper_rdpkru(s->tmp1_i64, tcg_env, s->tmp2_i32); in gen_multi0F()
3269 tcg_gen_extr_i64_tl(cpu_regs[R_EAX], cpu_regs[R_EDX], s->tmp1_i64); in gen_multi0F()
3272 if (s->prefix & (PREFIX_DATA | PREFIX_REPZ | PREFIX_REPNZ)) { in gen_multi0F()
3275 tcg_gen_concat_tl_i64(s->tmp1_i64, cpu_regs[R_EAX], in gen_multi0F()
3277 tcg_gen_trunc_tl_i32(s->tmp2_i32, cpu_regs[R_ECX]); in gen_multi0F()
3278 gen_helper_wrpkru(tcg_env, s->tmp2_i32, s->tmp1_i64); in gen_multi0F()
3282 if (!check_cpl0(s)) { in gen_multi0F()
3285 gen_svm_check_intercept(s, SVM_EXIT_WRITE_CR0); in gen_multi0F()
3286 gen_ld_modrm(s, decode, MO_16); in gen_multi0F()
3291 tcg_gen_ld_tl(s->T1, tcg_env, offsetof(CPUX86State, cr[0])); in gen_multi0F()
3292 tcg_gen_andi_tl(s->T0, s->T0, 0xf); in gen_multi0F()
3293 tcg_gen_andi_tl(s->T1, s->T1, ~0xe); in gen_multi0F()
3294 tcg_gen_or_tl(s->T0, s->T0, s->T1); in gen_multi0F()
3295 gen_helper_write_crN(tcg_env, tcg_constant_i32(0), s->T0); in gen_multi0F()
3296 s->base.is_jmp = DISAS_EOB_NEXT; in gen_multi0F()
3300 if (!check_cpl0(s)) { in gen_multi0F()
3303 gen_svm_check_intercept(s, SVM_EXIT_INVLPG); in gen_multi0F()
3304 gen_lea_modrm(s, decode); in gen_multi0F()
3305 gen_helper_flush_page(tcg_env, s->A0); in gen_multi0F()
3306 s->base.is_jmp = DISAS_EOB_NEXT; in gen_multi0F()
3311 if (CODE64(s)) { in gen_multi0F()
3312 if (check_cpl0(s)) { in gen_multi0F()
3313 tcg_gen_mov_tl(s->T0, cpu_seg_base[R_GS]); in gen_multi0F()
3316 tcg_gen_st_tl(s->T0, tcg_env, in gen_multi0F()
3325 if (!(s->cpuid_ext2_features & CPUID_EXT2_RDTSCP)) { in gen_multi0F()
3328 gen_update_cc_op(s); in gen_multi0F()
3329 gen_update_eip_cur(s); in gen_multi0F()
3330 translator_io_start(&s->base); in gen_multi0F()
3332 gen_helper_rdpid(s->T0, tcg_env); in gen_multi0F()
3333 gen_op_mov_reg_v(s, dflag, R_ECX, s->T0); in gen_multi0F()
3342 if (s->flags & HF_MPX_EN_MASK) { in gen_multi0F()
3344 reg = ((modrm >> 3) & 7) | REX_R(s); in gen_multi0F()
3348 || s->aflag == MO_16) { in gen_multi0F()
3351 gen_bndck(s, decode, TCG_COND_LTU, cpu_bndl[reg]); in gen_multi0F()
3355 || s->aflag == MO_16) { in gen_multi0F()
3360 gen_bndck(s, decode, TCG_COND_GTU, notu); in gen_multi0F()
3362 /* bndmov -- from reg/mem */ in gen_multi0F()
3363 if (reg >= 4 || s->aflag == MO_16) { in gen_multi0F()
3367 int reg2 = (modrm & 7) | REX_B(s); in gen_multi0F()
3371 if (s->flags & HF_MPX_IU_MASK) { in gen_multi0F()
3376 gen_lea_modrm(s, decode); in gen_multi0F()
3377 if (CODE64(s)) { in gen_multi0F()
3378 tcg_gen_qemu_ld_i64(cpu_bndl[reg], s->A0, in gen_multi0F()
3379 s->mem_index, MO_LEUQ); in gen_multi0F()
3380 tcg_gen_addi_tl(s->A0, s->A0, 8); in gen_multi0F()
3381 tcg_gen_qemu_ld_i64(cpu_bndu[reg], s->A0, in gen_multi0F()
3382 s->mem_index, MO_LEUQ); in gen_multi0F()
3384 tcg_gen_qemu_ld_i64(cpu_bndl[reg], s->A0, in gen_multi0F()
3385 s->mem_index, MO_LEUL); in gen_multi0F()
3386 tcg_gen_addi_tl(s->A0, s->A0, 4); in gen_multi0F()
3387 tcg_gen_qemu_ld_i64(cpu_bndu[reg], s->A0, in gen_multi0F()
3388 s->mem_index, MO_LEUL); in gen_multi0F()
3390 /* bnd registers are now in-use */ in gen_multi0F()
3391 gen_set_hflag(s, HF_MPX_IU_MASK); in gen_multi0F()
3395 AddressParts a = decode->mem; in gen_multi0F()
3397 || s->aflag == MO_16 in gen_multi0F()
3398 || a.base < -1) { in gen_multi0F()
3402 tcg_gen_addi_tl(s->A0, cpu_regs[a.base], a.disp); in gen_multi0F()
3404 tcg_gen_movi_tl(s->A0, 0); in gen_multi0F()
3406 gen_lea_v_seg(s, s->A0, a.def_seg, s->override); in gen_multi0F()
3408 tcg_gen_mov_tl(s->T0, cpu_regs[a.index]); in gen_multi0F()
3410 tcg_gen_movi_tl(s->T0, 0); in gen_multi0F()
3412 if (CODE64(s)) { in gen_multi0F()
3413 gen_helper_bndldx64(cpu_bndl[reg], tcg_env, s->A0, s->T0); in gen_multi0F()
3417 gen_helper_bndldx32(cpu_bndu[reg], tcg_env, s->A0, s->T0); in gen_multi0F()
3421 gen_set_hflag(s, HF_MPX_IU_MASK); in gen_multi0F()
3426 if (s->flags & HF_MPX_EN_MASK) { in gen_multi0F()
3428 reg = ((modrm >> 3) & 7) | REX_R(s); in gen_multi0F()
3432 || s->aflag == MO_16) { in gen_multi0F()
3435 AddressParts a = decode->mem; in gen_multi0F()
3438 if (!CODE64(s)) { in gen_multi0F()
3441 } else if (a.base == -1) { in gen_multi0F()
3445 /* rip-relative generates #ud */ in gen_multi0F()
3448 tcg_gen_not_tl(s->A0, gen_lea_modrm_1(s, decode->mem, false)); in gen_multi0F()
3449 if (!CODE64(s)) { in gen_multi0F()
3450 tcg_gen_ext32u_tl(s->A0, s->A0); in gen_multi0F()
3452 tcg_gen_extu_tl_i64(cpu_bndu[reg], s->A0); in gen_multi0F()
3453 /* bnd registers are now in-use */ in gen_multi0F()
3454 gen_set_hflag(s, HF_MPX_IU_MASK); in gen_multi0F()
3459 || s->aflag == MO_16) { in gen_multi0F()
3462 gen_bndck(s, decode, TCG_COND_GTU, cpu_bndu[reg]); in gen_multi0F()
3464 /* bndmov -- to reg/mem */ in gen_multi0F()
3465 if (reg >= 4 || s->aflag == MO_16) { in gen_multi0F()
3469 int reg2 = (modrm & 7) | REX_B(s); in gen_multi0F()
3473 if (s->flags & HF_MPX_IU_MASK) { in gen_multi0F()
3478 gen_lea_modrm(s, decode); in gen_multi0F()
3479 if (CODE64(s)) { in gen_multi0F()
3480 tcg_gen_qemu_st_i64(cpu_bndl[reg], s->A0, in gen_multi0F()
3481 s->mem_index, MO_LEUQ); in gen_multi0F()
3482 tcg_gen_addi_tl(s->A0, s->A0, 8); in gen_multi0F()
3483 tcg_gen_qemu_st_i64(cpu_bndu[reg], s->A0, in gen_multi0F()
3484 s->mem_index, MO_LEUQ); in gen_multi0F()
3486 tcg_gen_qemu_st_i64(cpu_bndl[reg], s->A0, in gen_multi0F()
3487 s->mem_index, MO_LEUL); in gen_multi0F()
3488 tcg_gen_addi_tl(s->A0, s->A0, 4); in gen_multi0F()
3489 tcg_gen_qemu_st_i64(cpu_bndu[reg], s->A0, in gen_multi0F()
3490 s->mem_index, MO_LEUL); in gen_multi0F()
3495 AddressParts a = decode->mem; in gen_multi0F()
3497 || s->aflag == MO_16 in gen_multi0F()
3498 || a.base < -1) { in gen_multi0F()
3502 tcg_gen_addi_tl(s->A0, cpu_regs[a.base], a.disp); in gen_multi0F()
3504 tcg_gen_movi_tl(s->A0, 0); in gen_multi0F()
3506 gen_lea_v_seg(s, s->A0, a.def_seg, s->override); in gen_multi0F()
3508 tcg_gen_mov_tl(s->T0, cpu_regs[a.index]); in gen_multi0F()
3510 tcg_gen_movi_tl(s->T0, 0); in gen_multi0F()
3512 if (CODE64(s)) { in gen_multi0F()
3513 gen_helper_bndstx64(tcg_env, s->A0, s->T0, in gen_multi0F()
3516 gen_helper_bndstx32(tcg_env, s->A0, s->T0, in gen_multi0F()
3527 gen_illegal_opcode(s); in gen_multi0F()
3531 #include "decode-new.c.inc"
3626 uint32_t flags = dc->base.tb->flags; in i386_tr_init_disas_context()
3627 uint32_t cflags = tb_cflags(dc->base.tb); in i386_tr_init_disas_context()
3631 dc->cs_base = dc->base.tb->cs_base; in i386_tr_init_disas_context()
3632 dc->pc_save = dc->base.pc_next; in i386_tr_init_disas_context()
3633 dc->flags = flags; in i386_tr_init_disas_context()
3635 dc->cpl = cpl; in i386_tr_init_disas_context()
3636 dc->iopl = iopl; in i386_tr_init_disas_context()
3652 dc->cc_op = CC_OP_DYNAMIC; in i386_tr_init_disas_context()
3653 dc->cc_op_dirty = false; in i386_tr_init_disas_context()
3655 dc->mem_index = cpu_mmu_index(cpu, false); in i386_tr_init_disas_context()
3656 dc->cpuid_features = env->features[FEAT_1_EDX]; in i386_tr_init_disas_context()
3657 dc->cpuid_ext_features = env->features[FEAT_1_ECX]; in i386_tr_init_disas_context()
3658 dc->cpuid_ext2_features = env->features[FEAT_8000_0001_EDX]; in i386_tr_init_disas_context()
3659 dc->cpuid_ext3_features = env->features[FEAT_8000_0001_ECX]; in i386_tr_init_disas_context()
3660 dc->cpuid_7_0_ebx_features = env->features[FEAT_7_0_EBX]; in i386_tr_init_disas_context()
3661 dc->cpuid_7_0_ecx_features = env->features[FEAT_7_0_ECX]; in i386_tr_init_disas_context()
3662 dc->cpuid_7_1_eax_features = env->features[FEAT_7_1_EAX]; in i386_tr_init_disas_context()
3663 dc->cpuid_xsave_features = env->features[FEAT_XSAVE]; in i386_tr_init_disas_context()
3664 dc->jmp_opt = !((cflags & CF_NO_GOTO_TB) || in i386_tr_init_disas_context()
3679 dc->repz_opt = !dc->jmp_opt && !(cflags & CF_USE_ICOUNT); in i386_tr_init_disas_context()
3681 dc->T0 = tcg_temp_new(); in i386_tr_init_disas_context()
3682 dc->T1 = tcg_temp_new(); in i386_tr_init_disas_context()
3683 dc->A0 = tcg_temp_new(); in i386_tr_init_disas_context()
3685 dc->tmp0 = tcg_temp_new(); in i386_tr_init_disas_context()
3686 dc->tmp1_i64 = tcg_temp_new_i64(); in i386_tr_init_disas_context()
3687 dc->tmp2_i32 = tcg_temp_new_i32(); in i386_tr_init_disas_context()
3688 dc->tmp3_i32 = tcg_temp_new_i32(); in i386_tr_init_disas_context()
3689 dc->tmp4 = tcg_temp_new(); in i386_tr_init_disas_context()
3690 dc->cc_srcT = tcg_temp_new(); in i386_tr_init_disas_context()
3700 target_ulong pc_arg = dc->base.pc_next; in i386_tr_insn_start()
3702 dc->prev_insn_start = dc->base.insn_start; in i386_tr_insn_start()
3703 dc->prev_insn_end = tcg_last_op(); in i386_tr_insn_start()
3704 if (tb_cflags(dcbase->tb) & CF_PCREL) { in i386_tr_insn_start()
3707 tcg_gen_insn_start(pc_arg, dc->cc_op); in i386_tr_insn_start()
3713 bool orig_cc_op_dirty = dc->cc_op_dirty; in i386_tr_translate_insn()
3714 CCOp orig_cc_op = dc->cc_op; in i386_tr_translate_insn()
3715 target_ulong orig_pc_save = dc->pc_save; in i386_tr_translate_insn()
3721 if ((dc->base.pc_next & TARGET_PAGE_MASK) == TARGET_VSYSCALL_PAGE) { in i386_tr_translate_insn()
3723 dc->base.pc_next = dc->pc + 1; in i386_tr_translate_insn()
3728 switch (sigsetjmp(dc->jmpbuf, 0)) { in i386_tr_translate_insn()
3737 dc->pc = dc->base.pc_next; in i386_tr_translate_insn()
3738 assert(dc->cc_op_dirty == orig_cc_op_dirty); in i386_tr_translate_insn()
3739 assert(dc->cc_op == orig_cc_op); in i386_tr_translate_insn()
3740 assert(dc->pc_save == orig_pc_save); in i386_tr_translate_insn()
3741 dc->base.num_insns--; in i386_tr_translate_insn()
3742 tcg_remove_ops_after(dc->prev_insn_end); in i386_tr_translate_insn()
3743 dc->base.insn_start = dc->prev_insn_start; in i386_tr_translate_insn()
3744 dc->base.is_jmp = DISAS_TOO_MANY; in i386_tr_translate_insn()
3752 * 15-byte boundary was exceeded). in i386_tr_translate_insn()
3754 dc->base.pc_next = dc->pc; in i386_tr_translate_insn()
3755 if (dc->base.is_jmp == DISAS_NEXT) { in i386_tr_translate_insn()
3756 if (dc->flags & (HF_TF_MASK | HF_INHIBIT_IRQ_MASK)) { in i386_tr_translate_insn()
3764 dc->base.is_jmp = DISAS_EOB_NEXT; in i386_tr_translate_insn()
3765 } else if (!is_same_page(&dc->base, dc->base.pc_next)) { in i386_tr_translate_insn()
3766 dc->base.is_jmp = DISAS_TOO_MANY; in i386_tr_translate_insn()
3775 switch (dc->base.is_jmp) { in i386_tr_tb_stop()
3781 * - for exception and interrupts in i386_tr_tb_stop()
3782 * - for jump optimization (which is disabled by INHIBIT_IRQ/RF/TF) in i386_tr_tb_stop()
3783 * - for VMRUN because RF/TF handling for the host is done after vmexit, in i386_tr_tb_stop()
3785 * - for HLT/PAUSE/MWAIT to exit the main loop with specific EXCP_* values; in i386_tr_tb_stop()
3795 assert(dc->base.pc_next == dc->pc); in i386_tr_tb_stop()
3801 gen_eob(dc, dc->base.is_jmp); in i386_tr_tb_stop()