Lines Matching refs:s

236 static void gen_eob(DisasContext *s);
237 static void gen_jr(DisasContext *s);
238 static void gen_jmp_rel(DisasContext *s, MemOp ot, int diff, int tb_num);
239 static void gen_jmp_rel_csize(DisasContext *s, int diff, int tb_num);
241 static void gen_exception_gpf(DisasContext *s);
323 static void set_cc_op(DisasContext *s, CCOp op) in set_cc_op() argument
327 if (s->cc_op == op) { in set_cc_op()
332 dead = cc_op_live[s->cc_op] & ~cc_op_live[op]; in set_cc_op()
343 tcg_gen_discard_tl(s->cc_srcT); in set_cc_op()
349 s->cc_op_dirty = false; in set_cc_op()
352 if (s->cc_op == CC_OP_DYNAMIC) { in set_cc_op()
355 s->cc_op_dirty = true; in set_cc_op()
357 s->cc_op = op; in set_cc_op()
360 static void gen_update_cc_op(DisasContext *s) in gen_update_cc_op() argument
362 if (s->cc_op_dirty) { in gen_update_cc_op()
363 tcg_gen_movi_i32(cpu_cc_op, s->cc_op); in gen_update_cc_op()
364 s->cc_op_dirty = false; in gen_update_cc_op()
398 static inline bool byte_reg_is_xH(DisasContext *s, int reg) in byte_reg_is_xH() argument
401 if (reg < 4 || REX_PREFIX(s)) { in byte_reg_is_xH()
408 static inline MemOp mo_pushpop(DisasContext *s, MemOp ot) in mo_pushpop() argument
410 if (CODE64(s)) { in mo_pushpop()
418 static inline MemOp mo_stacksize(DisasContext *s) in mo_stacksize() argument
420 return CODE64(s) ? MO_64 : SS32(s) ? MO_32 : MO_16; in mo_stacksize()
455 static TCGv gen_op_deposit_reg_v(DisasContext *s, MemOp ot, int reg, TCGv dest, TCGv t0) in gen_op_deposit_reg_v() argument
459 if (byte_reg_is_xH(s, reg)) { in gen_op_deposit_reg_v()
489 static void gen_op_mov_reg_v(DisasContext *s, MemOp ot, int reg, TCGv t0) in gen_op_mov_reg_v() argument
491 gen_op_deposit_reg_v(s, ot, reg, NULL, t0); in gen_op_mov_reg_v()
495 void gen_op_mov_v_reg(DisasContext *s, MemOp ot, TCGv t0, int reg) in gen_op_mov_v_reg() argument
497 if (ot == MO_8 && byte_reg_is_xH(s, reg)) { in gen_op_mov_v_reg()
504 static void gen_add_A0_im(DisasContext *s, int val) in gen_add_A0_im() argument
506 tcg_gen_addi_tl(s->A0, s->A0, val); in gen_add_A0_im()
507 if (!CODE64(s)) { in gen_add_A0_im()
508 tcg_gen_ext32u_tl(s->A0, s->A0); in gen_add_A0_im()
512 static inline void gen_op_jmp_v(DisasContext *s, TCGv dest) in gen_op_jmp_v() argument
515 s->pc_save = -1; in gen_op_jmp_v()
519 void gen_op_add_reg_im(DisasContext *s, MemOp size, int reg, int32_t val) in gen_op_add_reg_im() argument
521 tcg_gen_addi_tl(s->tmp0, cpu_regs[reg], val); in gen_op_add_reg_im()
522 gen_op_mov_reg_v(s, size, reg, s->tmp0); in gen_op_add_reg_im()
525 static inline void gen_op_add_reg_T0(DisasContext *s, MemOp size, int reg) in gen_op_add_reg_T0() argument
527 tcg_gen_add_tl(s->tmp0, cpu_regs[reg], s->T0); in gen_op_add_reg_T0()
528 gen_op_mov_reg_v(s, size, reg, s->tmp0); in gen_op_add_reg_T0()
531 static inline void gen_op_ld_v(DisasContext *s, int idx, TCGv t0, TCGv a0) in gen_op_ld_v() argument
533 tcg_gen_qemu_ld_tl(t0, a0, s->mem_index, idx | MO_LE); in gen_op_ld_v()
536 static inline void gen_op_st_v(DisasContext *s, int idx, TCGv t0, TCGv a0) in gen_op_st_v() argument
538 tcg_gen_qemu_st_tl(t0, a0, s->mem_index, idx | MO_LE); in gen_op_st_v()
541 static inline void gen_op_st_rm_T0_A0(DisasContext *s, int idx, int d) in gen_op_st_rm_T0_A0() argument
544 gen_op_st_v(s, idx, s->T0, s->A0); in gen_op_st_rm_T0_A0()
546 gen_op_mov_reg_v(s, idx, d, s->T0); in gen_op_st_rm_T0_A0()
550 static void gen_update_eip_cur(DisasContext *s) in gen_update_eip_cur() argument
552 assert(s->pc_save != -1); in gen_update_eip_cur()
553 if (tb_cflags(s->base.tb) & CF_PCREL) { in gen_update_eip_cur()
554 tcg_gen_addi_tl(cpu_eip, cpu_eip, s->base.pc_next - s->pc_save); in gen_update_eip_cur()
555 } else if (CODE64(s)) { in gen_update_eip_cur()
556 tcg_gen_movi_tl(cpu_eip, s->base.pc_next); in gen_update_eip_cur()
558 tcg_gen_movi_tl(cpu_eip, (uint32_t)(s->base.pc_next - s->cs_base)); in gen_update_eip_cur()
560 s->pc_save = s->base.pc_next; in gen_update_eip_cur()
563 static void gen_update_eip_next(DisasContext *s) in gen_update_eip_next() argument
565 assert(s->pc_save != -1); in gen_update_eip_next()
566 if (tb_cflags(s->base.tb) & CF_PCREL) { in gen_update_eip_next()
567 tcg_gen_addi_tl(cpu_eip, cpu_eip, s->pc - s->pc_save); in gen_update_eip_next()
568 } else if (CODE64(s)) { in gen_update_eip_next()
569 tcg_gen_movi_tl(cpu_eip, s->pc); in gen_update_eip_next()
571 tcg_gen_movi_tl(cpu_eip, (uint32_t)(s->pc - s->cs_base)); in gen_update_eip_next()
573 s->pc_save = s->pc; in gen_update_eip_next()
576 static int cur_insn_len(DisasContext *s) in cur_insn_len() argument
578 return s->pc - s->base.pc_next; in cur_insn_len()
581 static TCGv_i32 cur_insn_len_i32(DisasContext *s) in cur_insn_len_i32() argument
583 return tcg_constant_i32(cur_insn_len(s)); in cur_insn_len_i32()
586 static TCGv_i32 eip_next_i32(DisasContext *s) in eip_next_i32() argument
588 assert(s->pc_save != -1); in eip_next_i32()
597 if (CODE64(s)) { in eip_next_i32()
600 if (tb_cflags(s->base.tb) & CF_PCREL) { in eip_next_i32()
603 tcg_gen_addi_i32(ret, ret, s->pc - s->pc_save); in eip_next_i32()
606 return tcg_constant_i32(s->pc - s->cs_base); in eip_next_i32()
610 static TCGv eip_next_tl(DisasContext *s) in eip_next_tl() argument
612 assert(s->pc_save != -1); in eip_next_tl()
613 if (tb_cflags(s->base.tb) & CF_PCREL) { in eip_next_tl()
615 tcg_gen_addi_tl(ret, cpu_eip, s->pc - s->pc_save); in eip_next_tl()
617 } else if (CODE64(s)) { in eip_next_tl()
618 return tcg_constant_tl(s->pc); in eip_next_tl()
620 return tcg_constant_tl((uint32_t)(s->pc - s->cs_base)); in eip_next_tl()
624 static TCGv eip_cur_tl(DisasContext *s) in eip_cur_tl() argument
626 assert(s->pc_save != -1); in eip_cur_tl()
627 if (tb_cflags(s->base.tb) & CF_PCREL) { in eip_cur_tl()
629 tcg_gen_addi_tl(ret, cpu_eip, s->base.pc_next - s->pc_save); in eip_cur_tl()
631 } else if (CODE64(s)) { in eip_cur_tl()
632 return tcg_constant_tl(s->base.pc_next); in eip_cur_tl()
634 return tcg_constant_tl((uint32_t)(s->base.pc_next - s->cs_base)); in eip_cur_tl()
641 static void gen_lea_v_seg(DisasContext *s, MemOp aflag, TCGv a0, in gen_lea_v_seg() argument
648 tcg_gen_mov_tl(s->A0, a0); in gen_lea_v_seg()
655 if (ovr_seg < 0 && ADDSEG(s)) { in gen_lea_v_seg()
659 tcg_gen_ext32u_tl(s->A0, a0); in gen_lea_v_seg()
665 tcg_gen_ext16u_tl(s->A0, a0); in gen_lea_v_seg()
666 a0 = s->A0; in gen_lea_v_seg()
668 if (ADDSEG(s)) { in gen_lea_v_seg()
683 tcg_gen_add_tl(s->A0, a0, seg); in gen_lea_v_seg()
684 } else if (CODE64(s)) { in gen_lea_v_seg()
685 tcg_gen_ext32u_tl(s->A0, a0); in gen_lea_v_seg()
686 tcg_gen_add_tl(s->A0, s->A0, seg); in gen_lea_v_seg()
688 tcg_gen_add_tl(s->A0, a0, seg); in gen_lea_v_seg()
689 tcg_gen_ext32u_tl(s->A0, s->A0); in gen_lea_v_seg()
694 static inline void gen_string_movl_A0_ESI(DisasContext *s) in gen_string_movl_A0_ESI() argument
696 gen_lea_v_seg(s, s->aflag, cpu_regs[R_ESI], R_DS, s->override); in gen_string_movl_A0_ESI()
699 static inline void gen_string_movl_A0_EDI(DisasContext *s) in gen_string_movl_A0_EDI() argument
701 gen_lea_v_seg(s, s->aflag, cpu_regs[R_EDI], R_ES, -1); in gen_string_movl_A0_EDI()
704 static inline void gen_op_movl_T0_Dshift(DisasContext *s, MemOp ot) in gen_op_movl_T0_Dshift() argument
706 tcg_gen_ld32s_tl(s->T0, tcg_env, offsetof(CPUX86State, df)); in gen_op_movl_T0_Dshift()
707 tcg_gen_shli_tl(s->T0, s->T0, ot); in gen_op_movl_T0_Dshift()
729 static void gen_op_j_ecx(DisasContext *s, TCGCond cond, TCGLabel *label1) in gen_op_j_ecx() argument
731 tcg_gen_mov_tl(s->tmp0, cpu_regs[R_ECX]); in gen_op_j_ecx()
732 gen_extu(s->aflag, s->tmp0); in gen_op_j_ecx()
733 tcg_gen_brcondi_tl(cond, s->tmp0, 0, label1); in gen_op_j_ecx()
736 static inline void gen_op_jz_ecx(DisasContext *s, TCGLabel *label1) in gen_op_jz_ecx() argument
738 gen_op_j_ecx(s, TCG_COND_EQ, label1); in gen_op_jz_ecx()
741 static inline void gen_op_jnz_ecx(DisasContext *s, TCGLabel *label1) in gen_op_jnz_ecx() argument
743 gen_op_j_ecx(s, TCG_COND_NE, label1); in gen_op_jnz_ecx()
784 static bool gen_check_io(DisasContext *s, MemOp ot, TCGv_i32 port, in gen_check_io() argument
792 gen_exception_gpf(s); in gen_check_io()
795 if (PE(s) && (CPL(s) > IOPL(s) || VM86(s))) { in gen_check_io()
798 if (GUEST(s)) { in gen_check_io()
799 gen_update_cc_op(s); in gen_check_io()
800 gen_update_eip_cur(s); in gen_check_io()
801 if (s->prefix & (PREFIX_REPZ | PREFIX_REPNZ)) { in gen_check_io()
807 cur_insn_len_i32(s)); in gen_check_io()
813 static void gen_movs(DisasContext *s, MemOp ot) in gen_movs() argument
815 gen_string_movl_A0_ESI(s); in gen_movs()
816 gen_op_ld_v(s, ot, s->T0, s->A0); in gen_movs()
817 gen_string_movl_A0_EDI(s); in gen_movs()
818 gen_op_st_v(s, ot, s->T0, s->A0); in gen_movs()
819 gen_op_movl_T0_Dshift(s, ot); in gen_movs()
820 gen_op_add_reg_T0(s, s->aflag, R_ESI); in gen_movs()
821 gen_op_add_reg_T0(s, s->aflag, R_EDI); in gen_movs()
824 static void gen_op_update1_cc(DisasContext *s) in gen_op_update1_cc() argument
826 tcg_gen_mov_tl(cpu_cc_dst, s->T0); in gen_op_update1_cc()
829 static void gen_op_update2_cc(DisasContext *s) in gen_op_update2_cc() argument
831 tcg_gen_mov_tl(cpu_cc_src, s->T1); in gen_op_update2_cc()
832 tcg_gen_mov_tl(cpu_cc_dst, s->T0); in gen_op_update2_cc()
835 static void gen_op_update3_cc(DisasContext *s, TCGv reg) in gen_op_update3_cc() argument
838 tcg_gen_mov_tl(cpu_cc_src, s->T1); in gen_op_update3_cc()
839 tcg_gen_mov_tl(cpu_cc_dst, s->T0); in gen_op_update3_cc()
842 static inline void gen_op_testl_T0_T1_cc(DisasContext *s) in gen_op_testl_T0_T1_cc() argument
844 tcg_gen_and_tl(cpu_cc_dst, s->T0, s->T1); in gen_op_testl_T0_T1_cc()
847 static void gen_op_update_neg_cc(DisasContext *s) in gen_op_update_neg_cc() argument
849 tcg_gen_mov_tl(cpu_cc_dst, s->T0); in gen_op_update_neg_cc()
850 tcg_gen_neg_tl(cpu_cc_src, s->T0); in gen_op_update_neg_cc()
851 tcg_gen_movi_tl(s->cc_srcT, 0); in gen_op_update_neg_cc()
855 static void gen_compute_eflags(DisasContext *s) in gen_compute_eflags() argument
860 if (s->cc_op == CC_OP_EFLAGS) { in gen_compute_eflags()
863 if (s->cc_op == CC_OP_CLR) { in gen_compute_eflags()
865 set_cc_op(s, CC_OP_EFLAGS); in gen_compute_eflags()
875 live = cc_op_live[s->cc_op] & ~USES_CC_SRCT; in gen_compute_eflags()
890 gen_update_cc_op(s); in gen_compute_eflags()
892 set_cc_op(s, CC_OP_EFLAGS); in gen_compute_eflags()
906 static CCPrepare gen_prepare_eflags_c(DisasContext *s, TCGv reg) in gen_prepare_eflags_c() argument
911 switch (s->cc_op) { in gen_prepare_eflags_c()
914 size = s->cc_op - CC_OP_SUBB; in gen_prepare_eflags_c()
915 t1 = gen_ext_tl(s->tmp0, cpu_cc_src, size, false); in gen_prepare_eflags_c()
917 t0 = t1 == cpu_cc_src ? s->tmp0 : reg; in gen_prepare_eflags_c()
918 tcg_gen_mov_tl(t0, s->cc_srcT); in gen_prepare_eflags_c()
924 size = s->cc_op - CC_OP_ADDB; in gen_prepare_eflags_c()
925 t1 = gen_ext_tl(s->tmp0, cpu_cc_src, size, false); in gen_prepare_eflags_c()
943 size = s->cc_op - CC_OP_SHLB; in gen_prepare_eflags_c()
953 size = s->cc_op - CC_OP_BMILGB; in gen_prepare_eflags_c()
971 gen_update_cc_op(s); in gen_prepare_eflags_c()
980 static CCPrepare gen_prepare_eflags_p(DisasContext *s, TCGv reg) in gen_prepare_eflags_p() argument
982 gen_compute_eflags(s); in gen_prepare_eflags_p()
988 static CCPrepare gen_prepare_eflags_s(DisasContext *s, TCGv reg) in gen_prepare_eflags_s() argument
990 switch (s->cc_op) { in gen_prepare_eflags_s()
992 gen_compute_eflags(s); in gen_prepare_eflags_s()
1005 MemOp size = (s->cc_op - CC_OP_ADDB) & 3; in gen_prepare_eflags_s()
1013 static CCPrepare gen_prepare_eflags_o(DisasContext *s, TCGv reg) in gen_prepare_eflags_o() argument
1015 switch (s->cc_op) { in gen_prepare_eflags_o()
1024 gen_compute_eflags(s); in gen_prepare_eflags_o()
1031 static CCPrepare gen_prepare_eflags_z(DisasContext *s, TCGv reg) in gen_prepare_eflags_z() argument
1033 switch (s->cc_op) { in gen_prepare_eflags_z()
1035 gen_compute_eflags(s); in gen_prepare_eflags_z()
1050 MemOp size = (s->cc_op - CC_OP_ADDB) & 3; in gen_prepare_eflags_z()
1059 static CCPrepare gen_prepare_cc(DisasContext *s, int b, TCGv reg) in gen_prepare_cc() argument
1069 switch (s->cc_op) { in gen_prepare_cc()
1072 size = s->cc_op - CC_OP_SUBB; in gen_prepare_cc()
1075 tcg_gen_mov_tl(s->tmp4, s->cc_srcT); in gen_prepare_cc()
1076 gen_extu(size, s->tmp4); in gen_prepare_cc()
1077 t0 = gen_ext_tl(s->tmp0, cpu_cc_src, size, false); in gen_prepare_cc()
1078 cc = (CCPrepare) { .cond = TCG_COND_LEU, .reg = s->tmp4, in gen_prepare_cc()
1088 tcg_gen_mov_tl(s->tmp4, s->cc_srcT); in gen_prepare_cc()
1089 gen_exts(size, s->tmp4); in gen_prepare_cc()
1090 t0 = gen_ext_tl(s->tmp0, cpu_cc_src, size, true); in gen_prepare_cc()
1091 cc = (CCPrepare) { .cond = cond, .reg = s->tmp4, in gen_prepare_cc()
1105 cc = gen_prepare_eflags_o(s, reg); in gen_prepare_cc()
1108 cc = gen_prepare_eflags_c(s, reg); in gen_prepare_cc()
1111 cc = gen_prepare_eflags_z(s, reg); in gen_prepare_cc()
1114 gen_compute_eflags(s); in gen_prepare_cc()
1119 cc = gen_prepare_eflags_s(s, reg); in gen_prepare_cc()
1122 cc = gen_prepare_eflags_p(s, reg); in gen_prepare_cc()
1125 gen_compute_eflags(s); in gen_prepare_cc()
1127 reg = s->tmp0; in gen_prepare_cc()
1136 gen_compute_eflags(s); in gen_prepare_cc()
1138 reg = s->tmp0; in gen_prepare_cc()
1155 static void gen_setcc1(DisasContext *s, int b, TCGv reg) in gen_setcc1() argument
1157 CCPrepare cc = gen_prepare_cc(s, b, reg); in gen_setcc1()
1185 static inline void gen_compute_eflags_c(DisasContext *s, TCGv reg) in gen_compute_eflags_c() argument
1187 gen_setcc1(s, JCC_B << 1, reg); in gen_compute_eflags_c()
1192 static inline void gen_jcc1_noeob(DisasContext *s, int b, TCGLabel *l1) in gen_jcc1_noeob() argument
1194 CCPrepare cc = gen_prepare_cc(s, b, s->T0); in gen_jcc1_noeob()
1197 tcg_gen_andi_tl(s->T0, cc.reg, cc.mask); in gen_jcc1_noeob()
1198 cc.reg = s->T0; in gen_jcc1_noeob()
1210 static inline void gen_jcc1(DisasContext *s, int b, TCGLabel *l1) in gen_jcc1() argument
1212 CCPrepare cc = gen_prepare_cc(s, b, s->T0); in gen_jcc1()
1214 gen_update_cc_op(s); in gen_jcc1()
1216 tcg_gen_andi_tl(s->T0, cc.reg, cc.mask); in gen_jcc1()
1217 cc.reg = s->T0; in gen_jcc1()
1219 set_cc_op(s, CC_OP_DYNAMIC); in gen_jcc1()
1229 static TCGLabel *gen_jz_ecx_string(DisasContext *s) in gen_jz_ecx_string() argument
1233 gen_op_jnz_ecx(s, l1); in gen_jz_ecx_string()
1235 gen_jmp_rel_csize(s, 0, 1); in gen_jz_ecx_string()
1240 static void gen_stos(DisasContext *s, MemOp ot) in gen_stos() argument
1242 gen_op_mov_v_reg(s, MO_32, s->T0, R_EAX); in gen_stos()
1243 gen_string_movl_A0_EDI(s); in gen_stos()
1244 gen_op_st_v(s, ot, s->T0, s->A0); in gen_stos()
1245 gen_op_movl_T0_Dshift(s, ot); in gen_stos()
1246 gen_op_add_reg_T0(s, s->aflag, R_EDI); in gen_stos()
1249 static void gen_lods(DisasContext *s, MemOp ot) in gen_lods() argument
1251 gen_string_movl_A0_ESI(s); in gen_lods()
1252 gen_op_ld_v(s, ot, s->T0, s->A0); in gen_lods()
1253 gen_op_mov_reg_v(s, ot, R_EAX, s->T0); in gen_lods()
1254 gen_op_movl_T0_Dshift(s, ot); in gen_lods()
1255 gen_op_add_reg_T0(s, s->aflag, R_ESI); in gen_lods()
1258 static void gen_scas(DisasContext *s, MemOp ot) in gen_scas() argument
1260 gen_string_movl_A0_EDI(s); in gen_scas()
1261 gen_op_ld_v(s, ot, s->T1, s->A0); in gen_scas()
1262 gen_op(s, OP_CMPL, ot, R_EAX); in gen_scas()
1263 gen_op_movl_T0_Dshift(s, ot); in gen_scas()
1264 gen_op_add_reg_T0(s, s->aflag, R_EDI); in gen_scas()
1267 static void gen_cmps(DisasContext *s, MemOp ot) in gen_cmps() argument
1269 gen_string_movl_A0_EDI(s); in gen_cmps()
1270 gen_op_ld_v(s, ot, s->T1, s->A0); in gen_cmps()
1271 gen_string_movl_A0_ESI(s); in gen_cmps()
1272 gen_op(s, OP_CMPL, ot, OR_TMP0); in gen_cmps()
1273 gen_op_movl_T0_Dshift(s, ot); in gen_cmps()
1274 gen_op_add_reg_T0(s, s->aflag, R_ESI); in gen_cmps()
1275 gen_op_add_reg_T0(s, s->aflag, R_EDI); in gen_cmps()
1278 static void gen_bpt_io(DisasContext *s, TCGv_i32 t_port, int ot) in gen_bpt_io() argument
1280 if (s->flags & HF_IOBPT_MASK) { in gen_bpt_io()
1286 TCGv t_next = eip_next_tl(s); in gen_bpt_io()
1292 static void gen_ins(DisasContext *s, MemOp ot) in gen_ins() argument
1294 gen_string_movl_A0_EDI(s); in gen_ins()
1297 tcg_gen_movi_tl(s->T0, 0); in gen_ins()
1298 gen_op_st_v(s, ot, s->T0, s->A0); in gen_ins()
1299 tcg_gen_trunc_tl_i32(s->tmp2_i32, cpu_regs[R_EDX]); in gen_ins()
1300 tcg_gen_andi_i32(s->tmp2_i32, s->tmp2_i32, 0xffff); in gen_ins()
1301 gen_helper_in_func(ot, s->T0, s->tmp2_i32); in gen_ins()
1302 gen_op_st_v(s, ot, s->T0, s->A0); in gen_ins()
1303 gen_op_movl_T0_Dshift(s, ot); in gen_ins()
1304 gen_op_add_reg_T0(s, s->aflag, R_EDI); in gen_ins()
1305 gen_bpt_io(s, s->tmp2_i32, ot); in gen_ins()
1308 static void gen_outs(DisasContext *s, MemOp ot) in gen_outs() argument
1310 gen_string_movl_A0_ESI(s); in gen_outs()
1311 gen_op_ld_v(s, ot, s->T0, s->A0); in gen_outs()
1313 tcg_gen_trunc_tl_i32(s->tmp2_i32, cpu_regs[R_EDX]); in gen_outs()
1314 tcg_gen_andi_i32(s->tmp2_i32, s->tmp2_i32, 0xffff); in gen_outs()
1315 tcg_gen_trunc_tl_i32(s->tmp3_i32, s->T0); in gen_outs()
1316 gen_helper_out_func(ot, s->tmp2_i32, s->tmp3_i32); in gen_outs()
1317 gen_op_movl_T0_Dshift(s, ot); in gen_outs()
1318 gen_op_add_reg_T0(s, s->aflag, R_ESI); in gen_outs()
1319 gen_bpt_io(s, s->tmp2_i32, ot); in gen_outs()
1323 static void gen_repz(DisasContext *s, MemOp ot, in gen_repz() argument
1324 void (*fn)(DisasContext *s, MemOp ot)) in gen_repz() argument
1327 gen_update_cc_op(s); in gen_repz()
1328 l2 = gen_jz_ecx_string(s); in gen_repz()
1329 fn(s, ot); in gen_repz()
1330 gen_op_add_reg_im(s, s->aflag, R_ECX, -1); in gen_repz()
1335 if (s->repz_opt) { in gen_repz()
1336 gen_op_jz_ecx(s, l2); in gen_repz()
1338 gen_jmp_rel_csize(s, -cur_insn_len(s), 0); in gen_repz()
1342 static inline void gen_repz_ ## op(DisasContext *s, MemOp ot) \
1343 { gen_repz(s, ot, gen_##op); }
1345 static void gen_repz2(DisasContext *s, MemOp ot, int nz, in gen_repz2() argument
1346 void (*fn)(DisasContext *s, MemOp ot)) in gen_repz2() argument
1349 gen_update_cc_op(s); in gen_repz2()
1350 l2 = gen_jz_ecx_string(s); in gen_repz2()
1351 fn(s, ot); in gen_repz2()
1352 gen_op_add_reg_im(s, s->aflag, R_ECX, -1); in gen_repz2()
1353 gen_update_cc_op(s); in gen_repz2()
1354 gen_jcc1(s, (JCC_Z << 1) | (nz ^ 1), l2); in gen_repz2()
1355 if (s->repz_opt) { in gen_repz2()
1356 gen_op_jz_ecx(s, l2); in gen_repz2()
1358 gen_jmp_rel_csize(s, -cur_insn_len(s), 0); in gen_repz2()
1362 static inline void gen_repz_ ## op(DisasContext *s, MemOp ot, int nz) \
1363 { gen_repz2(s, ot, nz, gen_##op); }
1429 static void gen_exception(DisasContext *s, int trapno) in gen_exception() argument
1431 gen_update_cc_op(s); in gen_exception()
1432 gen_update_eip_cur(s); in gen_exception()
1434 s->base.is_jmp = DISAS_NORETURN; in gen_exception()
1439 static void gen_illegal_opcode(DisasContext *s) in gen_illegal_opcode() argument
1441 gen_exception(s, EXCP06_ILLOP); in gen_illegal_opcode()
1445 static void gen_exception_gpf(DisasContext *s) in gen_exception_gpf() argument
1447 gen_exception(s, EXCP0D_GPF); in gen_exception_gpf()
1451 static bool check_cpl0(DisasContext *s) in check_cpl0() argument
1453 if (CPL(s) == 0) { in check_cpl0()
1456 gen_exception_gpf(s); in check_cpl0()
1461 static bool check_vm86_iopl(DisasContext *s) in check_vm86_iopl() argument
1463 if (!VM86(s) || IOPL(s) == 3) { in check_vm86_iopl()
1466 gen_exception_gpf(s); in check_vm86_iopl()
1471 static bool check_iopl(DisasContext *s) in check_iopl() argument
1473 if (VM86(s) ? IOPL(s) == 3 : CPL(s) <= IOPL(s)) { in check_iopl()
1476 gen_exception_gpf(s); in check_iopl()
1619 static void gen_shift_flags(DisasContext *s, MemOp ot, TCGv result, in gen_shift_flags() argument
1629 if (cc_op_live[s->cc_op] & USES_CC_DST) { in gen_shift_flags()
1635 if (cc_op_live[s->cc_op] & USES_CC_SRC) { in gen_shift_flags()
1643 tcg_gen_movi_i32(s->tmp2_i32, (is_right ? CC_OP_SARB : CC_OP_SHLB) + ot); in gen_shift_flags()
1644 if (s->cc_op == CC_OP_DYNAMIC) { in gen_shift_flags()
1647 tcg_gen_movi_i32(s->tmp3_i32, s->cc_op); in gen_shift_flags()
1648 oldop = s->tmp3_i32; in gen_shift_flags()
1655 tcg_gen_movcond_i32(TCG_COND_NE, cpu_cc_op, s32, z32, s->tmp2_i32, oldop); in gen_shift_flags()
1658 set_cc_op(s, CC_OP_DYNAMIC); in gen_shift_flags()
1661 static void gen_shift_rm_T1(DisasContext *s, MemOp ot, int op1, in gen_shift_rm_T1() argument
1668 gen_op_ld_v(s, ot, s->T0, s->A0); in gen_shift_rm_T1()
1670 gen_op_mov_v_reg(s, ot, s->T0, op1); in gen_shift_rm_T1()
1673 tcg_gen_andi_tl(s->T1, s->T1, mask); in gen_shift_rm_T1()
1674 tcg_gen_subi_tl(s->tmp0, s->T1, 1); in gen_shift_rm_T1()
1678 gen_exts(ot, s->T0); in gen_shift_rm_T1()
1679 tcg_gen_sar_tl(s->tmp0, s->T0, s->tmp0); in gen_shift_rm_T1()
1680 tcg_gen_sar_tl(s->T0, s->T0, s->T1); in gen_shift_rm_T1()
1682 gen_extu(ot, s->T0); in gen_shift_rm_T1()
1683 tcg_gen_shr_tl(s->tmp0, s->T0, s->tmp0); in gen_shift_rm_T1()
1684 tcg_gen_shr_tl(s->T0, s->T0, s->T1); in gen_shift_rm_T1()
1687 tcg_gen_shl_tl(s->tmp0, s->T0, s->tmp0); in gen_shift_rm_T1()
1688 tcg_gen_shl_tl(s->T0, s->T0, s->T1); in gen_shift_rm_T1()
1692 gen_op_st_rm_T0_A0(s, ot, op1); in gen_shift_rm_T1()
1694 gen_shift_flags(s, ot, s->T0, s->tmp0, s->T1, is_right); in gen_shift_rm_T1()
1697 static void gen_shift_rm_im(DisasContext *s, MemOp ot, int op1, int op2, in gen_shift_rm_im() argument
1704 gen_op_ld_v(s, ot, s->T0, s->A0); in gen_shift_rm_im()
1706 gen_op_mov_v_reg(s, ot, s->T0, op1); in gen_shift_rm_im()
1712 gen_exts(ot, s->T0); in gen_shift_rm_im()
1713 tcg_gen_sari_tl(s->tmp4, s->T0, op2 - 1); in gen_shift_rm_im()
1714 tcg_gen_sari_tl(s->T0, s->T0, op2); in gen_shift_rm_im()
1716 gen_extu(ot, s->T0); in gen_shift_rm_im()
1717 tcg_gen_shri_tl(s->tmp4, s->T0, op2 - 1); in gen_shift_rm_im()
1718 tcg_gen_shri_tl(s->T0, s->T0, op2); in gen_shift_rm_im()
1721 tcg_gen_shli_tl(s->tmp4, s->T0, op2 - 1); in gen_shift_rm_im()
1722 tcg_gen_shli_tl(s->T0, s->T0, op2); in gen_shift_rm_im()
1727 gen_op_st_rm_T0_A0(s, ot, op1); in gen_shift_rm_im()
1731 tcg_gen_mov_tl(cpu_cc_src, s->tmp4); in gen_shift_rm_im()
1732 tcg_gen_mov_tl(cpu_cc_dst, s->T0); in gen_shift_rm_im()
1733 set_cc_op(s, (is_right ? CC_OP_SARB : CC_OP_SHLB) + ot); in gen_shift_rm_im()
1737 static void gen_rot_rm_T1(DisasContext *s, MemOp ot, int op1, int is_right) in gen_rot_rm_T1() argument
1744 gen_op_ld_v(s, ot, s->T0, s->A0); in gen_rot_rm_T1()
1746 gen_op_mov_v_reg(s, ot, s->T0, op1); in gen_rot_rm_T1()
1749 tcg_gen_andi_tl(s->T1, s->T1, mask); in gen_rot_rm_T1()
1754 tcg_gen_ext8u_tl(s->T0, s->T0); in gen_rot_rm_T1()
1755 tcg_gen_muli_tl(s->T0, s->T0, 0x01010101); in gen_rot_rm_T1()
1759 tcg_gen_deposit_tl(s->T0, s->T0, s->T0, 16, 16); in gen_rot_rm_T1()
1764 tcg_gen_trunc_tl_i32(s->tmp2_i32, s->T0); in gen_rot_rm_T1()
1765 tcg_gen_trunc_tl_i32(s->tmp3_i32, s->T1); in gen_rot_rm_T1()
1767 tcg_gen_rotr_i32(s->tmp2_i32, s->tmp2_i32, s->tmp3_i32); in gen_rot_rm_T1()
1769 tcg_gen_rotl_i32(s->tmp2_i32, s->tmp2_i32, s->tmp3_i32); in gen_rot_rm_T1()
1771 tcg_gen_extu_i32_tl(s->T0, s->tmp2_i32); in gen_rot_rm_T1()
1776 tcg_gen_rotr_tl(s->T0, s->T0, s->T1); in gen_rot_rm_T1()
1778 tcg_gen_rotl_tl(s->T0, s->T0, s->T1); in gen_rot_rm_T1()
1784 gen_op_st_rm_T0_A0(s, ot, op1); in gen_rot_rm_T1()
1787 gen_compute_eflags(s); in gen_rot_rm_T1()
1794 tcg_gen_shri_tl(cpu_cc_src2, s->T0, mask - 1); in gen_rot_rm_T1()
1795 tcg_gen_shri_tl(cpu_cc_dst, s->T0, mask); in gen_rot_rm_T1()
1798 tcg_gen_shri_tl(cpu_cc_src2, s->T0, mask); in gen_rot_rm_T1()
1799 tcg_gen_andi_tl(cpu_cc_dst, s->T0, 1); in gen_rot_rm_T1()
1810 tcg_gen_trunc_tl_i32(t1, s->T1); in gen_rot_rm_T1()
1811 tcg_gen_movi_i32(s->tmp2_i32, CC_OP_ADCOX); in gen_rot_rm_T1()
1812 tcg_gen_movi_i32(s->tmp3_i32, CC_OP_EFLAGS); in gen_rot_rm_T1()
1814 s->tmp2_i32, s->tmp3_i32); in gen_rot_rm_T1()
1817 set_cc_op(s, CC_OP_DYNAMIC); in gen_rot_rm_T1()
1820 static void gen_rot_rm_im(DisasContext *s, MemOp ot, int op1, int op2, in gen_rot_rm_im() argument
1828 gen_op_ld_v(s, ot, s->T0, s->A0); in gen_rot_rm_im()
1830 gen_op_mov_v_reg(s, ot, s->T0, op1); in gen_rot_rm_im()
1838 tcg_gen_trunc_tl_i32(s->tmp2_i32, s->T0); in gen_rot_rm_im()
1840 tcg_gen_rotri_i32(s->tmp2_i32, s->tmp2_i32, op2); in gen_rot_rm_im()
1842 tcg_gen_rotli_i32(s->tmp2_i32, s->tmp2_i32, op2); in gen_rot_rm_im()
1844 tcg_gen_extu_i32_tl(s->T0, s->tmp2_i32); in gen_rot_rm_im()
1849 tcg_gen_rotri_tl(s->T0, s->T0, op2); in gen_rot_rm_im()
1851 tcg_gen_rotli_tl(s->T0, s->T0, op2); in gen_rot_rm_im()
1864 gen_extu(ot, s->T0); in gen_rot_rm_im()
1865 tcg_gen_shli_tl(s->tmp0, s->T0, shift); in gen_rot_rm_im()
1866 tcg_gen_shri_tl(s->T0, s->T0, mask + 1 - shift); in gen_rot_rm_im()
1867 tcg_gen_or_tl(s->T0, s->T0, s->tmp0); in gen_rot_rm_im()
1873 gen_op_st_rm_T0_A0(s, ot, op1); in gen_rot_rm_im()
1877 gen_compute_eflags(s); in gen_rot_rm_im()
1884 tcg_gen_shri_tl(cpu_cc_src2, s->T0, mask - 1); in gen_rot_rm_im()
1885 tcg_gen_shri_tl(cpu_cc_dst, s->T0, mask); in gen_rot_rm_im()
1888 tcg_gen_shri_tl(cpu_cc_src2, s->T0, mask); in gen_rot_rm_im()
1889 tcg_gen_andi_tl(cpu_cc_dst, s->T0, 1); in gen_rot_rm_im()
1893 set_cc_op(s, CC_OP_ADCOX); in gen_rot_rm_im()
1898 static void gen_rotc_rm_T1(DisasContext *s, MemOp ot, int op1, in gen_rotc_rm_T1() argument
1901 gen_compute_eflags(s); in gen_rotc_rm_T1()
1902 assert(s->cc_op == CC_OP_EFLAGS); in gen_rotc_rm_T1()
1906 gen_op_ld_v(s, ot, s->T0, s->A0); in gen_rotc_rm_T1()
1908 gen_op_mov_v_reg(s, ot, s->T0, op1); in gen_rotc_rm_T1()
1913 gen_helper_rcrb(s->T0, tcg_env, s->T0, s->T1); in gen_rotc_rm_T1()
1916 gen_helper_rcrw(s->T0, tcg_env, s->T0, s->T1); in gen_rotc_rm_T1()
1919 gen_helper_rcrl(s->T0, tcg_env, s->T0, s->T1); in gen_rotc_rm_T1()
1923 gen_helper_rcrq(s->T0, tcg_env, s->T0, s->T1); in gen_rotc_rm_T1()
1932 gen_helper_rclb(s->T0, tcg_env, s->T0, s->T1); in gen_rotc_rm_T1()
1935 gen_helper_rclw(s->T0, tcg_env, s->T0, s->T1); in gen_rotc_rm_T1()
1938 gen_helper_rcll(s->T0, tcg_env, s->T0, s->T1); in gen_rotc_rm_T1()
1942 gen_helper_rclq(s->T0, tcg_env, s->T0, s->T1); in gen_rotc_rm_T1()
1950 gen_op_st_rm_T0_A0(s, ot, op1); in gen_rotc_rm_T1()
1954 static void gen_shiftd_rm_T1(DisasContext *s, MemOp ot, int op1, in gen_shiftd_rm_T1() argument
1962 gen_op_ld_v(s, ot, s->T0, s->A0); in gen_shiftd_rm_T1()
1964 gen_op_mov_v_reg(s, ot, s->T0, op1); in gen_shiftd_rm_T1()
1976 tcg_gen_deposit_tl(s->tmp0, s->T0, s->T1, 16, 16); in gen_shiftd_rm_T1()
1977 tcg_gen_mov_tl(s->T1, s->T0); in gen_shiftd_rm_T1()
1978 tcg_gen_mov_tl(s->T0, s->tmp0); in gen_shiftd_rm_T1()
1980 tcg_gen_deposit_tl(s->T1, s->T0, s->T1, 16, 16); in gen_shiftd_rm_T1()
1989 tcg_gen_subi_tl(s->tmp0, count, 1); in gen_shiftd_rm_T1()
1991 tcg_gen_concat_tl_i64(s->T0, s->T0, s->T1); in gen_shiftd_rm_T1()
1992 tcg_gen_shr_i64(s->tmp0, s->T0, s->tmp0); in gen_shiftd_rm_T1()
1993 tcg_gen_shr_i64(s->T0, s->T0, count); in gen_shiftd_rm_T1()
1995 tcg_gen_concat_tl_i64(s->T0, s->T1, s->T0); in gen_shiftd_rm_T1()
1996 tcg_gen_shl_i64(s->tmp0, s->T0, s->tmp0); in gen_shiftd_rm_T1()
1997 tcg_gen_shl_i64(s->T0, s->T0, count); in gen_shiftd_rm_T1()
1998 tcg_gen_shri_i64(s->tmp0, s->tmp0, 32); in gen_shiftd_rm_T1()
1999 tcg_gen_shri_i64(s->T0, s->T0, 32); in gen_shiftd_rm_T1()
2004 tcg_gen_subi_tl(s->tmp0, count, 1); in gen_shiftd_rm_T1()
2006 tcg_gen_shr_tl(s->tmp0, s->T0, s->tmp0); in gen_shiftd_rm_T1()
2008 tcg_gen_subfi_tl(s->tmp4, mask + 1, count); in gen_shiftd_rm_T1()
2009 tcg_gen_shr_tl(s->T0, s->T0, count); in gen_shiftd_rm_T1()
2010 tcg_gen_shl_tl(s->T1, s->T1, s->tmp4); in gen_shiftd_rm_T1()
2012 tcg_gen_shl_tl(s->tmp0, s->T0, s->tmp0); in gen_shiftd_rm_T1()
2015 tcg_gen_subfi_tl(s->tmp4, 33, count); in gen_shiftd_rm_T1()
2016 tcg_gen_shr_tl(s->tmp4, s->T1, s->tmp4); in gen_shiftd_rm_T1()
2017 tcg_gen_or_tl(s->tmp0, s->tmp0, s->tmp4); in gen_shiftd_rm_T1()
2020 tcg_gen_subfi_tl(s->tmp4, mask + 1, count); in gen_shiftd_rm_T1()
2021 tcg_gen_shl_tl(s->T0, s->T0, count); in gen_shiftd_rm_T1()
2022 tcg_gen_shr_tl(s->T1, s->T1, s->tmp4); in gen_shiftd_rm_T1()
2024 tcg_gen_movi_tl(s->tmp4, 0); in gen_shiftd_rm_T1()
2025 tcg_gen_movcond_tl(TCG_COND_EQ, s->T1, count, s->tmp4, in gen_shiftd_rm_T1()
2026 s->tmp4, s->T1); in gen_shiftd_rm_T1()
2027 tcg_gen_or_tl(s->T0, s->T0, s->T1); in gen_shiftd_rm_T1()
2032 gen_op_st_rm_T0_A0(s, ot, op1); in gen_shiftd_rm_T1()
2034 gen_shift_flags(s, ot, s->T0, s->tmp0, count, is_right); in gen_shiftd_rm_T1()
2037 static void gen_shift(DisasContext *s1, int op, MemOp ot, int d, int s) in gen_shift() argument
2039 if (s != OR_TMP1) in gen_shift()
2040 gen_op_mov_v_reg(s1, ot, s1->T1, s); in gen_shift()
2096 static uint64_t advance_pc(CPUX86State *env, DisasContext *s, int num_bytes) in advance_pc() argument
2098 uint64_t pc = s->pc; in advance_pc()
2101 if (s->base.num_insns > 1 && in advance_pc()
2102 !is_same_page(&s->base, s->pc + num_bytes - 1)) { in advance_pc()
2103 siglongjmp(s->jmpbuf, 2); in advance_pc()
2106 s->pc += num_bytes; in advance_pc()
2107 if (unlikely(cur_insn_len(s) > X86_MAX_INSN_LENGTH)) { in advance_pc()
2113 if (((s->pc - 1) ^ (pc - 1)) & TARGET_PAGE_MASK) { in advance_pc()
2115 cpu_ldub_code(env, (s->pc - 1) & TARGET_PAGE_MASK); in advance_pc()
2118 siglongjmp(s->jmpbuf, 1); in advance_pc()
2124 static inline uint8_t x86_ldub_code(CPUX86State *env, DisasContext *s) in x86_ldub_code() argument
2126 return translator_ldub(env, &s->base, advance_pc(env, s, 1)); in x86_ldub_code()
2129 static inline int16_t x86_ldsw_code(CPUX86State *env, DisasContext *s) in x86_ldsw_code() argument
2131 return translator_lduw(env, &s->base, advance_pc(env, s, 2)); in x86_ldsw_code()
2134 static inline uint16_t x86_lduw_code(CPUX86State *env, DisasContext *s) in x86_lduw_code() argument
2136 return translator_lduw(env, &s->base, advance_pc(env, s, 2)); in x86_lduw_code()
2139 static inline uint32_t x86_ldl_code(CPUX86State *env, DisasContext *s) in x86_ldl_code() argument
2141 return translator_ldl(env, &s->base, advance_pc(env, s, 4)); in x86_ldl_code()
2145 static inline uint64_t x86_ldq_code(CPUX86State *env, DisasContext *s) in x86_ldq_code() argument
2147 return translator_ldq(env, &s->base, advance_pc(env, s, 8)); in x86_ldq_code()
2161 static AddressParts gen_lea_modrm_0(CPUX86State *env, DisasContext *s, in gen_lea_modrm_0() argument
2175 base = rm | REX_B(s); in gen_lea_modrm_0()
2183 switch (s->aflag) { in gen_lea_modrm_0()
2188 int code = x86_ldub_code(env, s); in gen_lea_modrm_0()
2190 index = ((code >> 3) & 7) | REX_X(s); in gen_lea_modrm_0()
2194 base = (code & 7) | REX_B(s); in gen_lea_modrm_0()
2202 disp = (int32_t)x86_ldl_code(env, s); in gen_lea_modrm_0()
2203 if (CODE64(s) && !havesib) { in gen_lea_modrm_0()
2205 disp += s->pc + s->rip_offset; in gen_lea_modrm_0()
2210 disp = (int8_t)x86_ldub_code(env, s); in gen_lea_modrm_0()
2214 disp = (int32_t)x86_ldl_code(env, s); in gen_lea_modrm_0()
2219 if (base == R_ESP && s->popl_esp_hack) { in gen_lea_modrm_0()
2220 disp += s->popl_esp_hack; in gen_lea_modrm_0()
2231 disp = x86_lduw_code(env, s); in gen_lea_modrm_0()
2235 disp = (int8_t)x86_ldub_code(env, s); in gen_lea_modrm_0()
2237 disp = (int16_t)x86_lduw_code(env, s); in gen_lea_modrm_0()
2285 static TCGv gen_lea_modrm_1(DisasContext *s, AddressParts a, bool is_vsib) in gen_lea_modrm_1() argument
2293 tcg_gen_shli_tl(s->A0, cpu_regs[a.index], a.scale); in gen_lea_modrm_1()
2294 ea = s->A0; in gen_lea_modrm_1()
2297 tcg_gen_add_tl(s->A0, ea, cpu_regs[a.base]); in gen_lea_modrm_1()
2298 ea = s->A0; in gen_lea_modrm_1()
2304 if (tb_cflags(s->base.tb) & CF_PCREL && a.base == -2) { in gen_lea_modrm_1()
2306 tcg_gen_addi_tl(s->A0, cpu_eip, a.disp - s->pc_save); in gen_lea_modrm_1()
2308 tcg_gen_movi_tl(s->A0, a.disp); in gen_lea_modrm_1()
2310 ea = s->A0; in gen_lea_modrm_1()
2312 tcg_gen_addi_tl(s->A0, ea, a.disp); in gen_lea_modrm_1()
2313 ea = s->A0; in gen_lea_modrm_1()
2319 static void gen_lea_modrm(CPUX86State *env, DisasContext *s, int modrm) in gen_lea_modrm() argument
2321 AddressParts a = gen_lea_modrm_0(env, s, modrm); in gen_lea_modrm()
2322 TCGv ea = gen_lea_modrm_1(s, a, false); in gen_lea_modrm()
2323 gen_lea_v_seg(s, s->aflag, ea, a.def_seg, s->override); in gen_lea_modrm()
2326 static void gen_nop_modrm(CPUX86State *env, DisasContext *s, int modrm) in gen_nop_modrm() argument
2328 (void)gen_lea_modrm_0(env, s, modrm); in gen_nop_modrm()
2332 static void gen_bndck(CPUX86State *env, DisasContext *s, int modrm, in gen_bndck() argument
2335 AddressParts a = gen_lea_modrm_0(env, s, modrm); in gen_bndck()
2336 TCGv ea = gen_lea_modrm_1(s, a, false); in gen_bndck()
2338 tcg_gen_extu_tl_i64(s->tmp1_i64, ea); in gen_bndck()
2339 if (!CODE64(s)) { in gen_bndck()
2340 tcg_gen_ext32u_i64(s->tmp1_i64, s->tmp1_i64); in gen_bndck()
2342 tcg_gen_setcond_i64(cond, s->tmp1_i64, s->tmp1_i64, bndv); in gen_bndck()
2343 tcg_gen_extrl_i64_i32(s->tmp2_i32, s->tmp1_i64); in gen_bndck()
2344 gen_helper_bndck(tcg_env, s->tmp2_i32); in gen_bndck()
2348 static void gen_add_A0_ds_seg(DisasContext *s) in gen_add_A0_ds_seg() argument
2350 gen_lea_v_seg(s, s->aflag, s->A0, R_DS, s->override); in gen_add_A0_ds_seg()
2355 static void gen_ldst_modrm(CPUX86State *env, DisasContext *s, int modrm, in gen_ldst_modrm() argument
2361 rm = (modrm & 7) | REX_B(s); in gen_ldst_modrm()
2365 gen_op_mov_v_reg(s, ot, s->T0, reg); in gen_ldst_modrm()
2366 gen_op_mov_reg_v(s, ot, rm, s->T0); in gen_ldst_modrm()
2368 gen_op_mov_v_reg(s, ot, s->T0, rm); in gen_ldst_modrm()
2370 gen_op_mov_reg_v(s, ot, reg, s->T0); in gen_ldst_modrm()
2373 gen_lea_modrm(env, s, modrm); in gen_ldst_modrm()
2376 gen_op_mov_v_reg(s, ot, s->T0, reg); in gen_ldst_modrm()
2377 gen_op_st_v(s, ot, s->T0, s->A0); in gen_ldst_modrm()
2379 gen_op_ld_v(s, ot, s->T0, s->A0); in gen_ldst_modrm()
2381 gen_op_mov_reg_v(s, ot, reg, s->T0); in gen_ldst_modrm()
2386 static target_ulong insn_get_addr(CPUX86State *env, DisasContext *s, MemOp ot) in insn_get_addr() argument
2392 ret = x86_ldub_code(env, s); in insn_get_addr()
2395 ret = x86_lduw_code(env, s); in insn_get_addr()
2398 ret = x86_ldl_code(env, s); in insn_get_addr()
2402 ret = x86_ldq_code(env, s); in insn_get_addr()
2411 static inline uint32_t insn_get(CPUX86State *env, DisasContext *s, MemOp ot) in insn_get() argument
2417 ret = x86_ldub_code(env, s); in insn_get()
2420 ret = x86_lduw_code(env, s); in insn_get()
2426 ret = x86_ldl_code(env, s); in insn_get()
2434 static target_long insn_get_signed(CPUX86State *env, DisasContext *s, MemOp ot) in insn_get_signed() argument
2440 ret = (int8_t) x86_ldub_code(env, s); in insn_get_signed()
2443 ret = (int16_t) x86_lduw_code(env, s); in insn_get_signed()
2446 ret = (int32_t) x86_ldl_code(env, s); in insn_get_signed()
2450 ret = x86_ldq_code(env, s); in insn_get_signed()
2468 static void gen_jcc(DisasContext *s, int b, int diff) in gen_jcc() argument
2472 gen_jcc1(s, b, l1); in gen_jcc()
2473 gen_jmp_rel_csize(s, 0, 1); in gen_jcc()
2475 gen_jmp_rel(s, s->dflag, diff, 0); in gen_jcc()
2478 static void gen_cmovcc1(CPUX86State *env, DisasContext *s, MemOp ot, int b, in gen_cmovcc1() argument
2483 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0); in gen_cmovcc1()
2485 cc = gen_prepare_cc(s, b, s->T1); in gen_cmovcc1()
2495 tcg_gen_movcond_tl(cc.cond, s->T0, cc.reg, cc.reg2, in gen_cmovcc1()
2496 s->T0, cpu_regs[reg]); in gen_cmovcc1()
2497 gen_op_mov_reg_v(s, ot, reg, s->T0); in gen_cmovcc1()
2500 static inline void gen_op_movl_T0_seg(DisasContext *s, X86Seg seg_reg) in gen_op_movl_T0_seg() argument
2502 tcg_gen_ld32u_tl(s->T0, tcg_env, in gen_op_movl_T0_seg()
2506 static inline void gen_op_movl_seg_T0_vm(DisasContext *s, X86Seg seg_reg) in gen_op_movl_seg_T0_vm() argument
2508 tcg_gen_ext16u_tl(s->T0, s->T0); in gen_op_movl_seg_T0_vm()
2509 tcg_gen_st32_tl(s->T0, tcg_env, in gen_op_movl_seg_T0_vm()
2511 tcg_gen_shli_tl(cpu_seg_base[seg_reg], s->T0, 4); in gen_op_movl_seg_T0_vm()
2516 static void gen_movl_seg_T0(DisasContext *s, X86Seg seg_reg) in gen_movl_seg_T0() argument
2518 if (PE(s) && !VM86(s)) { in gen_movl_seg_T0()
2519 tcg_gen_trunc_tl_i32(s->tmp2_i32, s->T0); in gen_movl_seg_T0()
2520 gen_helper_load_seg(tcg_env, tcg_constant_i32(seg_reg), s->tmp2_i32); in gen_movl_seg_T0()
2526 s->base.is_jmp = DISAS_EOB_INHIBIT_IRQ; in gen_movl_seg_T0()
2527 } else if (CODE32(s) && seg_reg < R_FS) { in gen_movl_seg_T0()
2528 s->base.is_jmp = DISAS_EOB_NEXT; in gen_movl_seg_T0()
2531 gen_op_movl_seg_T0_vm(s, seg_reg); in gen_movl_seg_T0()
2533 s->base.is_jmp = DISAS_EOB_INHIBIT_IRQ; in gen_movl_seg_T0()
2538 static void gen_svm_check_intercept(DisasContext *s, uint32_t type) in gen_svm_check_intercept() argument
2541 if (likely(!GUEST(s))) { in gen_svm_check_intercept()
2547 static inline void gen_stack_update(DisasContext *s, int addend) in gen_stack_update() argument
2549 gen_op_add_reg_im(s, mo_stacksize(s), R_ESP, addend); in gen_stack_update()
2553 static void gen_push_v(DisasContext *s, TCGv val) in gen_push_v() argument
2555 MemOp d_ot = mo_pushpop(s, s->dflag); in gen_push_v()
2556 MemOp a_ot = mo_stacksize(s); in gen_push_v()
2558 TCGv new_esp = s->A0; in gen_push_v()
2560 tcg_gen_subi_tl(s->A0, cpu_regs[R_ESP], size); in gen_push_v()
2562 if (!CODE64(s)) { in gen_push_v()
2563 if (ADDSEG(s)) { in gen_push_v()
2564 new_esp = s->tmp4; in gen_push_v()
2565 tcg_gen_mov_tl(new_esp, s->A0); in gen_push_v()
2567 gen_lea_v_seg(s, a_ot, s->A0, R_SS, -1); in gen_push_v()
2570 gen_op_st_v(s, d_ot, val, s->A0); in gen_push_v()
2571 gen_op_mov_reg_v(s, a_ot, R_ESP, new_esp); in gen_push_v()
2575 static MemOp gen_pop_T0(DisasContext *s) in gen_pop_T0() argument
2577 MemOp d_ot = mo_pushpop(s, s->dflag); in gen_pop_T0()
2579 gen_lea_v_seg(s, mo_stacksize(s), cpu_regs[R_ESP], R_SS, -1); in gen_pop_T0()
2580 gen_op_ld_v(s, d_ot, s->T0, s->A0); in gen_pop_T0()
2585 static inline void gen_pop_update(DisasContext *s, MemOp ot) in gen_pop_update() argument
2587 gen_stack_update(s, 1 << ot); in gen_pop_update()
2590 static inline void gen_stack_A0(DisasContext *s) in gen_stack_A0() argument
2592 gen_lea_v_seg(s, SS32(s) ? MO_32 : MO_16, cpu_regs[R_ESP], R_SS, -1); in gen_stack_A0()
2595 static void gen_pusha(DisasContext *s) in gen_pusha() argument
2597 MemOp s_ot = SS32(s) ? MO_32 : MO_16; in gen_pusha()
2598 MemOp d_ot = s->dflag; in gen_pusha()
2603 tcg_gen_addi_tl(s->A0, cpu_regs[R_ESP], (i - 8) * size); in gen_pusha()
2604 gen_lea_v_seg(s, s_ot, s->A0, R_SS, -1); in gen_pusha()
2605 gen_op_st_v(s, d_ot, cpu_regs[7 - i], s->A0); in gen_pusha()
2608 gen_stack_update(s, -8 * size); in gen_pusha()
2611 static void gen_popa(DisasContext *s) in gen_popa() argument
2613 MemOp s_ot = SS32(s) ? MO_32 : MO_16; in gen_popa()
2614 MemOp d_ot = s->dflag; in gen_popa()
2623 tcg_gen_addi_tl(s->A0, cpu_regs[R_ESP], i * size); in gen_popa()
2624 gen_lea_v_seg(s, s_ot, s->A0, R_SS, -1); in gen_popa()
2625 gen_op_ld_v(s, d_ot, s->T0, s->A0); in gen_popa()
2626 gen_op_mov_reg_v(s, d_ot, 7 - i, s->T0); in gen_popa()
2629 gen_stack_update(s, 8 * size); in gen_popa()
2632 static void gen_enter(DisasContext *s, int esp_addend, int level) in gen_enter() argument
2634 MemOp d_ot = mo_pushpop(s, s->dflag); in gen_enter()
2635 MemOp a_ot = CODE64(s) ? MO_64 : SS32(s) ? MO_32 : MO_16; in gen_enter()
2639 tcg_gen_subi_tl(s->T1, cpu_regs[R_ESP], size); in gen_enter()
2640 gen_lea_v_seg(s, a_ot, s->T1, R_SS, -1); in gen_enter()
2641 gen_op_st_v(s, d_ot, cpu_regs[R_EBP], s->A0); in gen_enter()
2649 tcg_gen_subi_tl(s->A0, cpu_regs[R_EBP], size * i); in gen_enter()
2650 gen_lea_v_seg(s, a_ot, s->A0, R_SS, -1); in gen_enter()
2651 gen_op_ld_v(s, d_ot, s->tmp0, s->A0); in gen_enter()
2653 tcg_gen_subi_tl(s->A0, s->T1, size * i); in gen_enter()
2654 gen_lea_v_seg(s, a_ot, s->A0, R_SS, -1); in gen_enter()
2655 gen_op_st_v(s, d_ot, s->tmp0, s->A0); in gen_enter()
2659 tcg_gen_subi_tl(s->A0, s->T1, size * level); in gen_enter()
2660 gen_lea_v_seg(s, a_ot, s->A0, R_SS, -1); in gen_enter()
2661 gen_op_st_v(s, d_ot, s->T1, s->A0); in gen_enter()
2665 gen_op_mov_reg_v(s, a_ot, R_EBP, s->T1); in gen_enter()
2668 tcg_gen_subi_tl(s->T1, s->T1, esp_addend + size * level); in gen_enter()
2669 gen_op_mov_reg_v(s, a_ot, R_ESP, s->T1); in gen_enter()
2672 static void gen_leave(DisasContext *s) in gen_leave() argument
2674 MemOp d_ot = mo_pushpop(s, s->dflag); in gen_leave()
2675 MemOp a_ot = mo_stacksize(s); in gen_leave()
2677 gen_lea_v_seg(s, a_ot, cpu_regs[R_EBP], R_SS, -1); in gen_leave()
2678 gen_op_ld_v(s, d_ot, s->T0, s->A0); in gen_leave()
2680 tcg_gen_addi_tl(s->T1, cpu_regs[R_EBP], 1 << d_ot); in gen_leave()
2682 gen_op_mov_reg_v(s, d_ot, R_EBP, s->T0); in gen_leave()
2683 gen_op_mov_reg_v(s, a_ot, R_ESP, s->T1); in gen_leave()
2689 static void gen_unknown_opcode(CPUX86State *env, DisasContext *s) in gen_unknown_opcode() argument
2691 gen_illegal_opcode(s); in gen_unknown_opcode()
2696 target_ulong pc = s->base.pc_next, end = s->pc; in gen_unknown_opcode()
2710 static void gen_interrupt(DisasContext *s, int intno) in gen_interrupt() argument
2712 gen_update_cc_op(s); in gen_interrupt()
2713 gen_update_eip_cur(s); in gen_interrupt()
2715 cur_insn_len_i32(s)); in gen_interrupt()
2716 s->base.is_jmp = DISAS_NORETURN; in gen_interrupt()
2719 static void gen_set_hflag(DisasContext *s, uint32_t mask) in gen_set_hflag() argument
2721 if ((s->flags & mask) == 0) { in gen_set_hflag()
2726 s->flags |= mask; in gen_set_hflag()
2730 static void gen_reset_hflag(DisasContext *s, uint32_t mask) in gen_reset_hflag() argument
2732 if (s->flags & mask) { in gen_reset_hflag()
2737 s->flags &= ~mask; in gen_reset_hflag()
2741 static void gen_set_eflags(DisasContext *s, target_ulong mask) in gen_set_eflags() argument
2750 static void gen_reset_eflags(DisasContext *s, target_ulong mask) in gen_reset_eflags() argument
2760 static void gen_bnd_jmp(DisasContext *s) in gen_bnd_jmp() argument
2765 if ((s->prefix & PREFIX_REPNZ) == 0 in gen_bnd_jmp()
2766 && (s->flags & HF_MPX_EN_MASK) != 0 in gen_bnd_jmp()
2767 && (s->flags & HF_MPX_IU_MASK) != 0) { in gen_bnd_jmp()
2777 do_gen_eob_worker(DisasContext *s, bool inhibit, bool recheck_tf, bool jr) in do_gen_eob_worker() argument
2779 gen_update_cc_op(s); in do_gen_eob_worker()
2782 if (inhibit && !(s->flags & HF_INHIBIT_IRQ_MASK)) { in do_gen_eob_worker()
2783 gen_set_hflag(s, HF_INHIBIT_IRQ_MASK); in do_gen_eob_worker()
2785 gen_reset_hflag(s, HF_INHIBIT_IRQ_MASK); in do_gen_eob_worker()
2788 if (s->base.tb->flags & HF_RF_MASK) { in do_gen_eob_worker()
2789 gen_reset_eflags(s, RF_MASK); in do_gen_eob_worker()
2794 } else if (s->flags & HF_TF_MASK) { in do_gen_eob_worker()
2801 s->base.is_jmp = DISAS_NORETURN; in do_gen_eob_worker()
2805 gen_eob_worker(DisasContext *s, bool inhibit, bool recheck_tf) in gen_eob_worker() argument
2807 do_gen_eob_worker(s, inhibit, recheck_tf, false); in gen_eob_worker()
2812 static void gen_eob_inhibit_irq(DisasContext *s, bool inhibit) in gen_eob_inhibit_irq() argument
2814 gen_eob_worker(s, inhibit, false); in gen_eob_inhibit_irq()
2818 static void gen_eob(DisasContext *s) in gen_eob() argument
2820 gen_eob_worker(s, false, false); in gen_eob()
2824 static void gen_jr(DisasContext *s) in gen_jr() argument
2826 do_gen_eob_worker(s, false, false, true); in gen_jr()
2830 static void gen_jmp_rel(DisasContext *s, MemOp ot, int diff, int tb_num) in gen_jmp_rel() argument
2832 bool use_goto_tb = s->jmp_opt; in gen_jmp_rel()
2834 target_ulong new_pc = s->pc + diff; in gen_jmp_rel()
2835 target_ulong new_eip = new_pc - s->cs_base; in gen_jmp_rel()
2838 if (!CODE64(s)) { in gen_jmp_rel()
2841 if (tb_cflags(s->base.tb) & CF_PCREL && CODE32(s)) { in gen_jmp_rel()
2850 gen_update_cc_op(s); in gen_jmp_rel()
2851 set_cc_op(s, CC_OP_DYNAMIC); in gen_jmp_rel()
2853 if (tb_cflags(s->base.tb) & CF_PCREL) { in gen_jmp_rel()
2854 tcg_gen_addi_tl(cpu_eip, cpu_eip, new_pc - s->pc_save); in gen_jmp_rel()
2860 if (!use_goto_tb || !is_same_page(&s->base, new_pc)) { in gen_jmp_rel()
2864 } else if (!CODE64(s)) { in gen_jmp_rel()
2865 new_pc = (uint32_t)(new_eip + s->cs_base); in gen_jmp_rel()
2868 if (use_goto_tb && translator_use_goto_tb(&s->base, new_pc)) { in gen_jmp_rel()
2871 if (!(tb_cflags(s->base.tb) & CF_PCREL)) { in gen_jmp_rel()
2874 tcg_gen_exit_tb(s->base.tb, tb_num); in gen_jmp_rel()
2875 s->base.is_jmp = DISAS_NORETURN; in gen_jmp_rel()
2877 if (!(tb_cflags(s->base.tb) & CF_PCREL)) { in gen_jmp_rel()
2880 if (s->jmp_opt) { in gen_jmp_rel()
2881 gen_jr(s); /* jump to another page */ in gen_jmp_rel()
2883 gen_eob(s); /* exit to main loop */ in gen_jmp_rel()
2889 static void gen_jmp_rel_csize(DisasContext *s, int diff, int tb_num) in gen_jmp_rel_csize() argument
2892 gen_jmp_rel(s, CODE32(s) ? MO_32 : MO_16, diff, tb_num); in gen_jmp_rel_csize()
2895 static inline void gen_ldq_env_A0(DisasContext *s, int offset) in gen_ldq_env_A0() argument
2897 tcg_gen_qemu_ld_i64(s->tmp1_i64, s->A0, s->mem_index, MO_LEUQ); in gen_ldq_env_A0()
2898 tcg_gen_st_i64(s->tmp1_i64, tcg_env, offset); in gen_ldq_env_A0()
2901 static inline void gen_stq_env_A0(DisasContext *s, int offset) in gen_stq_env_A0() argument
2903 tcg_gen_ld_i64(s->tmp1_i64, tcg_env, offset); in gen_stq_env_A0()
2904 tcg_gen_qemu_st_i64(s->tmp1_i64, s->A0, s->mem_index, MO_LEUQ); in gen_stq_env_A0()
2907 static inline void gen_ldo_env_A0(DisasContext *s, int offset, bool align) in gen_ldo_env_A0() argument
2909 MemOp atom = (s->cpuid_ext_features & CPUID_EXT_AVX in gen_ldo_env_A0()
2912 int mem_index = s->mem_index; in gen_ldo_env_A0()
2915 tcg_gen_qemu_ld_i128(t, s->A0, mem_index, mop); in gen_ldo_env_A0()
2919 static inline void gen_sto_env_A0(DisasContext *s, int offset, bool align) in gen_sto_env_A0() argument
2921 MemOp atom = (s->cpuid_ext_features & CPUID_EXT_AVX in gen_sto_env_A0()
2924 int mem_index = s->mem_index; in gen_sto_env_A0()
2928 tcg_gen_qemu_st_i128(t, s->A0, mem_index, mop); in gen_sto_env_A0()
2931 static void gen_ldy_env_A0(DisasContext *s, int offset, bool align) in gen_ldy_env_A0() argument
2934 int mem_index = s->mem_index; in gen_ldy_env_A0()
2938 tcg_gen_qemu_ld_i128(t0, s->A0, mem_index, mop | (align ? MO_ALIGN_32 : 0)); in gen_ldy_env_A0()
2939 tcg_gen_addi_tl(s->tmp0, s->A0, 16); in gen_ldy_env_A0()
2940 tcg_gen_qemu_ld_i128(t1, s->tmp0, mem_index, mop); in gen_ldy_env_A0()
2946 static void gen_sty_env_A0(DisasContext *s, int offset, bool align) in gen_sty_env_A0() argument
2949 int mem_index = s->mem_index; in gen_sty_env_A0()
2953 tcg_gen_qemu_st_i128(t, s->A0, mem_index, mop | (align ? MO_ALIGN_32 : 0)); in gen_sty_env_A0()
2954 tcg_gen_addi_tl(s->tmp0, s->A0, 16); in gen_sty_env_A0()
2956 tcg_gen_qemu_st_i128(t, s->tmp0, mem_index, mop); in gen_sty_env_A0()
2963 static void gen_cmpxchg8b(DisasContext *s, CPUX86State *env, int modrm) in gen_cmpxchg8b() argument
2968 gen_lea_modrm(env, s, modrm); in gen_cmpxchg8b()
2979 if (s->prefix & PREFIX_LOCK) { in gen_cmpxchg8b()
2980 tcg_gen_atomic_cmpxchg_i64(old, s->A0, cmp, val, s->mem_index, MO_TEUQ); in gen_cmpxchg8b()
2982 tcg_gen_nonatomic_cmpxchg_i64(old, s->A0, cmp, val, in gen_cmpxchg8b()
2983 s->mem_index, MO_TEUQ); in gen_cmpxchg8b()
3004 tcg_gen_extr_i64_tl(s->T0, s->T1, old); in gen_cmpxchg8b()
3006 s->T0, cpu_regs[R_EAX]); in gen_cmpxchg8b()
3008 s->T1, cpu_regs[R_EDX]); in gen_cmpxchg8b()
3012 gen_compute_eflags(s); in gen_cmpxchg8b()
3017 static void gen_cmpxchg16b(DisasContext *s, CPUX86State *env, int modrm) in gen_cmpxchg16b() argument
3023 gen_lea_modrm(env, s, modrm); in gen_cmpxchg16b()
3031 if (s->prefix & PREFIX_LOCK) { in gen_cmpxchg16b()
3032 tcg_gen_atomic_cmpxchg_i128(val, s->A0, cmp, val, s->mem_index, mop); in gen_cmpxchg16b()
3034 tcg_gen_nonatomic_cmpxchg_i128(val, s->A0, cmp, val, s->mem_index, mop); in gen_cmpxchg16b()
3037 tcg_gen_extr_i128_i64(s->T0, s->T1, val); in gen_cmpxchg16b()
3042 tcg_gen_xor_i64(t0, s->T0, cpu_regs[R_EAX]); in gen_cmpxchg16b()
3043 tcg_gen_xor_i64(t1, s->T1, cpu_regs[R_EDX]); in gen_cmpxchg16b()
3047 gen_compute_eflags(s); in gen_cmpxchg16b()
3056 tcg_gen_mov_i64(cpu_regs[R_EAX], s->T0); in gen_cmpxchg16b()
3057 tcg_gen_mov_i64(cpu_regs[R_EDX], s->T1); in gen_cmpxchg16b()
3063 static bool disas_insn(DisasContext *s, CPUState *cpu) in disas_insn() argument
3070 bool orig_cc_op_dirty = s->cc_op_dirty; in disas_insn()
3071 CCOp orig_cc_op = s->cc_op; in disas_insn()
3072 target_ulong orig_pc_save = s->pc_save; in disas_insn()
3074 s->pc = s->base.pc_next; in disas_insn()
3075 s->override = -1; in disas_insn()
3077 s->rex_r = 0; in disas_insn()
3078 s->rex_x = 0; in disas_insn()
3079 s->rex_b = 0; in disas_insn()
3081 s->rip_offset = 0; /* for relative ip address */ in disas_insn()
3082 s->vex_l = 0; in disas_insn()
3083 s->vex_v = 0; in disas_insn()
3084 s->vex_w = false; in disas_insn()
3085 switch (sigsetjmp(s->jmpbuf, 0)) { in disas_insn()
3089 gen_exception_gpf(s); in disas_insn()
3093 s->pc = s->base.pc_next; in disas_insn()
3099 s->cc_op_dirty = orig_cc_op_dirty; in disas_insn()
3100 s->cc_op = orig_cc_op; in disas_insn()
3101 s->pc_save = orig_pc_save; in disas_insn()
3103 s->base.num_insns--; in disas_insn()
3104 tcg_remove_ops_after(s->prev_insn_end); in disas_insn()
3105 s->base.is_jmp = DISAS_TOO_MANY; in disas_insn()
3114 s->prefix = prefixes; in disas_insn()
3115 b = x86_ldub_code(env, s); in disas_insn()
3121 b = x86_ldub_code(env, s) + 0x100; in disas_insn()
3135 s->override = R_CS; in disas_insn()
3138 s->override = R_SS; in disas_insn()
3141 s->override = R_DS; in disas_insn()
3144 s->override = R_ES; in disas_insn()
3147 s->override = R_FS; in disas_insn()
3150 s->override = R_GS; in disas_insn()
3160 if (CODE64(s)) { in disas_insn()
3163 s->vex_w = (b >> 3) & 1; in disas_insn()
3164 s->rex_r = (b & 0x4) << 1; in disas_insn()
3165 s->rex_x = (b & 0x2) << 2; in disas_insn()
3166 s->rex_b = (b & 0x1) << 3; in disas_insn()
3173 if (CODE32(s) && !VM86(s)) { in disas_insn()
3174 int vex2 = x86_ldub_code(env, s); in disas_insn()
3175 s->pc--; /* rewind the advance_pc() x86_ldub_code() did */ in disas_insn()
3177 if (!CODE64(s) && (vex2 & 0xc0) != 0xc0) { in disas_insn()
3182 disas_insn_new(s, cpu, b); in disas_insn()
3183 return s->pc; in disas_insn()
3189 if (CODE64(s)) { in disas_insn()
3193 dflag = (REX_W(s) ? MO_64 : prefixes & PREFIX_DATA ? MO_16 : MO_32); in disas_insn()
3198 if (CODE32(s) ^ ((prefixes & PREFIX_DATA) != 0)) { in disas_insn()
3204 if (CODE32(s) ^ ((prefixes & PREFIX_ADR) != 0)) { in disas_insn()
3211 s->prefix = prefixes; in disas_insn()
3212 s->aflag = aflag; in disas_insn()
3213 s->dflag = dflag; in disas_insn()
3236 modrm = x86_ldub_code(env, s); in disas_insn()
3237 reg = ((modrm >> 3) & 7) | REX_R(s); in disas_insn()
3239 rm = (modrm & 7) | REX_B(s); in disas_insn()
3241 gen_lea_modrm(env, s, modrm); in disas_insn()
3246 set_cc_op(s, CC_OP_CLR); in disas_insn()
3247 tcg_gen_movi_tl(s->T0, 0); in disas_insn()
3248 gen_op_mov_reg_v(s, ot, reg, s->T0); in disas_insn()
3253 gen_op_mov_v_reg(s, ot, s->T1, reg); in disas_insn()
3254 gen_op(s, op, ot, opreg); in disas_insn()
3257 modrm = x86_ldub_code(env, s); in disas_insn()
3259 reg = ((modrm >> 3) & 7) | REX_R(s); in disas_insn()
3260 rm = (modrm & 7) | REX_B(s); in disas_insn()
3262 gen_lea_modrm(env, s, modrm); in disas_insn()
3263 gen_op_ld_v(s, ot, s->T1, s->A0); in disas_insn()
3267 gen_op_mov_v_reg(s, ot, s->T1, rm); in disas_insn()
3269 gen_op(s, op, ot, reg); in disas_insn()
3272 val = insn_get(env, s, ot); in disas_insn()
3273 tcg_gen_movi_tl(s->T1, val); in disas_insn()
3274 gen_op(s, op, ot, OR_EAX); in disas_insn()
3281 if (CODE64(s)) in disas_insn()
3290 modrm = x86_ldub_code(env, s); in disas_insn()
3292 rm = (modrm & 7) | REX_B(s); in disas_insn()
3297 s->rip_offset = 1; in disas_insn()
3299 s->rip_offset = insn_const_size(ot); in disas_insn()
3300 gen_lea_modrm(env, s, modrm); in disas_insn()
3311 val = insn_get(env, s, ot); in disas_insn()
3314 val = (int8_t)insn_get(env, s, MO_8); in disas_insn()
3317 tcg_gen_movi_tl(s->T1, val); in disas_insn()
3318 gen_op(s, op, ot, opreg); in disas_insn()
3326 gen_inc(s, ot, OR_EAX + (b & 7), 1); in disas_insn()
3330 gen_inc(s, ot, OR_EAX + (b & 7), -1); in disas_insn()
3336 modrm = x86_ldub_code(env, s); in disas_insn()
3338 rm = (modrm & 7) | REX_B(s); in disas_insn()
3342 s->rip_offset = insn_const_size(ot); in disas_insn()
3344 gen_lea_modrm(env, s, modrm); in disas_insn()
3346 if (!(s->prefix & PREFIX_LOCK) in disas_insn()
3348 gen_op_ld_v(s, ot, s->T0, s->A0); in disas_insn()
3351 gen_op_mov_v_reg(s, ot, s->T0, rm); in disas_insn()
3356 val = insn_get(env, s, ot); in disas_insn()
3357 tcg_gen_movi_tl(s->T1, val); in disas_insn()
3358 gen_op_testl_T0_T1_cc(s); in disas_insn()
3359 set_cc_op(s, CC_OP_LOGICB + ot); in disas_insn()
3362 if (s->prefix & PREFIX_LOCK) { in disas_insn()
3366 tcg_gen_movi_tl(s->T0, ~0); in disas_insn()
3367 tcg_gen_atomic_xor_fetch_tl(s->T0, s->A0, s->T0, in disas_insn()
3368 s->mem_index, ot | MO_LE); in disas_insn()
3370 tcg_gen_not_tl(s->T0, s->T0); in disas_insn()
3372 gen_op_st_v(s, ot, s->T0, s->A0); in disas_insn()
3374 gen_op_mov_reg_v(s, ot, rm, s->T0); in disas_insn()
3379 if (s->prefix & PREFIX_LOCK) { in disas_insn()
3386 a0 = s->A0; in disas_insn()
3387 t0 = s->T0; in disas_insn()
3396 s->mem_index, ot | MO_LE); in disas_insn()
3399 tcg_gen_neg_tl(s->T0, t0); in disas_insn()
3401 tcg_gen_neg_tl(s->T0, s->T0); in disas_insn()
3403 gen_op_st_v(s, ot, s->T0, s->A0); in disas_insn()
3405 gen_op_mov_reg_v(s, ot, rm, s->T0); in disas_insn()
3408 gen_op_update_neg_cc(s); in disas_insn()
3409 set_cc_op(s, CC_OP_SUBB + ot); in disas_insn()
3414 gen_op_mov_v_reg(s, MO_8, s->T1, R_EAX); in disas_insn()
3415 tcg_gen_ext8u_tl(s->T0, s->T0); in disas_insn()
3416 tcg_gen_ext8u_tl(s->T1, s->T1); in disas_insn()
3418 tcg_gen_mul_tl(s->T0, s->T0, s->T1); in disas_insn()
3419 gen_op_mov_reg_v(s, MO_16, R_EAX, s->T0); in disas_insn()
3420 tcg_gen_mov_tl(cpu_cc_dst, s->T0); in disas_insn()
3421 tcg_gen_andi_tl(cpu_cc_src, s->T0, 0xff00); in disas_insn()
3422 set_cc_op(s, CC_OP_MULB); in disas_insn()
3425 gen_op_mov_v_reg(s, MO_16, s->T1, R_EAX); in disas_insn()
3426 tcg_gen_ext16u_tl(s->T0, s->T0); in disas_insn()
3427 tcg_gen_ext16u_tl(s->T1, s->T1); in disas_insn()
3429 tcg_gen_mul_tl(s->T0, s->T0, s->T1); in disas_insn()
3430 gen_op_mov_reg_v(s, MO_16, R_EAX, s->T0); in disas_insn()
3431 tcg_gen_mov_tl(cpu_cc_dst, s->T0); in disas_insn()
3432 tcg_gen_shri_tl(s->T0, s->T0, 16); in disas_insn()
3433 gen_op_mov_reg_v(s, MO_16, R_EDX, s->T0); in disas_insn()
3434 tcg_gen_mov_tl(cpu_cc_src, s->T0); in disas_insn()
3435 set_cc_op(s, CC_OP_MULW); in disas_insn()
3439 tcg_gen_trunc_tl_i32(s->tmp2_i32, s->T0); in disas_insn()
3440 tcg_gen_trunc_tl_i32(s->tmp3_i32, cpu_regs[R_EAX]); in disas_insn()
3441 tcg_gen_mulu2_i32(s->tmp2_i32, s->tmp3_i32, in disas_insn()
3442 s->tmp2_i32, s->tmp3_i32); in disas_insn()
3443 tcg_gen_extu_i32_tl(cpu_regs[R_EAX], s->tmp2_i32); in disas_insn()
3444 tcg_gen_extu_i32_tl(cpu_regs[R_EDX], s->tmp3_i32); in disas_insn()
3447 set_cc_op(s, CC_OP_MULL); in disas_insn()
3452 s->T0, cpu_regs[R_EAX]); in disas_insn()
3455 set_cc_op(s, CC_OP_MULQ); in disas_insn()
3463 gen_op_mov_v_reg(s, MO_8, s->T1, R_EAX); in disas_insn()
3464 tcg_gen_ext8s_tl(s->T0, s->T0); in disas_insn()
3465 tcg_gen_ext8s_tl(s->T1, s->T1); in disas_insn()
3467 tcg_gen_mul_tl(s->T0, s->T0, s->T1); in disas_insn()
3468 gen_op_mov_reg_v(s, MO_16, R_EAX, s->T0); in disas_insn()
3469 tcg_gen_mov_tl(cpu_cc_dst, s->T0); in disas_insn()
3470 tcg_gen_ext8s_tl(s->tmp0, s->T0); in disas_insn()
3471 tcg_gen_sub_tl(cpu_cc_src, s->T0, s->tmp0); in disas_insn()
3472 set_cc_op(s, CC_OP_MULB); in disas_insn()
3475 gen_op_mov_v_reg(s, MO_16, s->T1, R_EAX); in disas_insn()
3476 tcg_gen_ext16s_tl(s->T0, s->T0); in disas_insn()
3477 tcg_gen_ext16s_tl(s->T1, s->T1); in disas_insn()
3479 tcg_gen_mul_tl(s->T0, s->T0, s->T1); in disas_insn()
3480 gen_op_mov_reg_v(s, MO_16, R_EAX, s->T0); in disas_insn()
3481 tcg_gen_mov_tl(cpu_cc_dst, s->T0); in disas_insn()
3482 tcg_gen_ext16s_tl(s->tmp0, s->T0); in disas_insn()
3483 tcg_gen_sub_tl(cpu_cc_src, s->T0, s->tmp0); in disas_insn()
3484 tcg_gen_shri_tl(s->T0, s->T0, 16); in disas_insn()
3485 gen_op_mov_reg_v(s, MO_16, R_EDX, s->T0); in disas_insn()
3486 set_cc_op(s, CC_OP_MULW); in disas_insn()
3490 tcg_gen_trunc_tl_i32(s->tmp2_i32, s->T0); in disas_insn()
3491 tcg_gen_trunc_tl_i32(s->tmp3_i32, cpu_regs[R_EAX]); in disas_insn()
3492 tcg_gen_muls2_i32(s->tmp2_i32, s->tmp3_i32, in disas_insn()
3493 s->tmp2_i32, s->tmp3_i32); in disas_insn()
3494 tcg_gen_extu_i32_tl(cpu_regs[R_EAX], s->tmp2_i32); in disas_insn()
3495 tcg_gen_extu_i32_tl(cpu_regs[R_EDX], s->tmp3_i32); in disas_insn()
3496 tcg_gen_sari_i32(s->tmp2_i32, s->tmp2_i32, 31); in disas_insn()
3498 tcg_gen_sub_i32(s->tmp2_i32, s->tmp2_i32, s->tmp3_i32); in disas_insn()
3499 tcg_gen_extu_i32_tl(cpu_cc_src, s->tmp2_i32); in disas_insn()
3500 set_cc_op(s, CC_OP_MULL); in disas_insn()
3505 s->T0, cpu_regs[R_EAX]); in disas_insn()
3509 set_cc_op(s, CC_OP_MULQ); in disas_insn()
3517 gen_helper_divb_AL(tcg_env, s->T0); in disas_insn()
3520 gen_helper_divw_AX(tcg_env, s->T0); in disas_insn()
3524 gen_helper_divl_EAX(tcg_env, s->T0); in disas_insn()
3528 gen_helper_divq_EAX(tcg_env, s->T0); in disas_insn()
3536 gen_helper_idivb_AL(tcg_env, s->T0); in disas_insn()
3539 gen_helper_idivw_AX(tcg_env, s->T0); in disas_insn()
3543 gen_helper_idivl_EAX(tcg_env, s->T0); in disas_insn()
3547 gen_helper_idivq_EAX(tcg_env, s->T0); in disas_insn()
3561 modrm = x86_ldub_code(env, s); in disas_insn()
3563 rm = (modrm & 7) | REX_B(s); in disas_insn()
3568 if (CODE64(s)) { in disas_insn()
3573 ot = dflag != MO_16 ? MO_32 + REX_W(s) : MO_16; in disas_insn()
3576 ot = mo_pushpop(s, dflag); in disas_insn()
3580 gen_lea_modrm(env, s, modrm); in disas_insn()
3582 gen_op_ld_v(s, ot, s->T0, s->A0); in disas_insn()
3584 gen_op_mov_v_reg(s, ot, s->T0, rm); in disas_insn()
3593 gen_inc(s, ot, opreg, 1); in disas_insn()
3600 gen_inc(s, ot, opreg, -1); in disas_insn()
3605 tcg_gen_ext16u_tl(s->T0, s->T0); in disas_insn()
3607 gen_push_v(s, eip_next_tl(s)); in disas_insn()
3608 gen_op_jmp_v(s, s->T0); in disas_insn()
3609 gen_bnd_jmp(s); in disas_insn()
3610 s->base.is_jmp = DISAS_JUMP; in disas_insn()
3616 gen_op_ld_v(s, ot, s->T1, s->A0); in disas_insn()
3617 gen_add_A0_im(s, 1 << ot); in disas_insn()
3618 gen_op_ld_v(s, MO_16, s->T0, s->A0); in disas_insn()
3620 if (PE(s) && !VM86(s)) { in disas_insn()
3621 tcg_gen_trunc_tl_i32(s->tmp2_i32, s->T0); in disas_insn()
3622 gen_helper_lcall_protected(tcg_env, s->tmp2_i32, s->T1, in disas_insn()
3624 eip_next_tl(s)); in disas_insn()
3626 tcg_gen_trunc_tl_i32(s->tmp2_i32, s->T0); in disas_insn()
3627 tcg_gen_trunc_tl_i32(s->tmp3_i32, s->T1); in disas_insn()
3628 gen_helper_lcall_real(tcg_env, s->tmp2_i32, s->tmp3_i32, in disas_insn()
3630 eip_next_i32(s)); in disas_insn()
3632 s->base.is_jmp = DISAS_JUMP; in disas_insn()
3636 tcg_gen_ext16u_tl(s->T0, s->T0); in disas_insn()
3638 gen_op_jmp_v(s, s->T0); in disas_insn()
3639 gen_bnd_jmp(s); in disas_insn()
3640 s->base.is_jmp = DISAS_JUMP; in disas_insn()
3646 gen_op_ld_v(s, ot, s->T1, s->A0); in disas_insn()
3647 gen_add_A0_im(s, 1 << ot); in disas_insn()
3648 gen_op_ld_v(s, MO_16, s->T0, s->A0); in disas_insn()
3650 if (PE(s) && !VM86(s)) { in disas_insn()
3651 tcg_gen_trunc_tl_i32(s->tmp2_i32, s->T0); in disas_insn()
3652 gen_helper_ljmp_protected(tcg_env, s->tmp2_i32, s->T1, in disas_insn()
3653 eip_next_tl(s)); in disas_insn()
3655 gen_op_movl_seg_T0_vm(s, R_CS); in disas_insn()
3656 gen_op_jmp_v(s, s->T1); in disas_insn()
3658 s->base.is_jmp = DISAS_JUMP; in disas_insn()
3661 gen_push_v(s, s->T0); in disas_insn()
3672 modrm = x86_ldub_code(env, s); in disas_insn()
3673 reg = ((modrm >> 3) & 7) | REX_R(s); in disas_insn()
3675 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0); in disas_insn()
3676 gen_op_mov_v_reg(s, ot, s->T1, reg); in disas_insn()
3677 gen_op_testl_T0_T1_cc(s); in disas_insn()
3678 set_cc_op(s, CC_OP_LOGICB + ot); in disas_insn()
3684 val = insn_get(env, s, ot); in disas_insn()
3686 gen_op_mov_v_reg(s, ot, s->T0, OR_EAX); in disas_insn()
3687 tcg_gen_movi_tl(s->T1, val); in disas_insn()
3688 gen_op_testl_T0_T1_cc(s); in disas_insn()
3689 set_cc_op(s, CC_OP_LOGICB + ot); in disas_insn()
3696 gen_op_mov_v_reg(s, MO_32, s->T0, R_EAX); in disas_insn()
3697 tcg_gen_ext32s_tl(s->T0, s->T0); in disas_insn()
3698 gen_op_mov_reg_v(s, MO_64, R_EAX, s->T0); in disas_insn()
3702 gen_op_mov_v_reg(s, MO_16, s->T0, R_EAX); in disas_insn()
3703 tcg_gen_ext16s_tl(s->T0, s->T0); in disas_insn()
3704 gen_op_mov_reg_v(s, MO_32, R_EAX, s->T0); in disas_insn()
3707 gen_op_mov_v_reg(s, MO_8, s->T0, R_EAX); in disas_insn()
3708 tcg_gen_ext8s_tl(s->T0, s->T0); in disas_insn()
3709 gen_op_mov_reg_v(s, MO_16, R_EAX, s->T0); in disas_insn()
3719 gen_op_mov_v_reg(s, MO_64, s->T0, R_EAX); in disas_insn()
3720 tcg_gen_sari_tl(s->T0, s->T0, 63); in disas_insn()
3721 gen_op_mov_reg_v(s, MO_64, R_EDX, s->T0); in disas_insn()
3725 gen_op_mov_v_reg(s, MO_32, s->T0, R_EAX); in disas_insn()
3726 tcg_gen_ext32s_tl(s->T0, s->T0); in disas_insn()
3727 tcg_gen_sari_tl(s->T0, s->T0, 31); in disas_insn()
3728 gen_op_mov_reg_v(s, MO_32, R_EDX, s->T0); in disas_insn()
3731 gen_op_mov_v_reg(s, MO_16, s->T0, R_EAX); in disas_insn()
3732 tcg_gen_ext16s_tl(s->T0, s->T0); in disas_insn()
3733 tcg_gen_sari_tl(s->T0, s->T0, 15); in disas_insn()
3734 gen_op_mov_reg_v(s, MO_16, R_EDX, s->T0); in disas_insn()
3744 modrm = x86_ldub_code(env, s); in disas_insn()
3745 reg = ((modrm >> 3) & 7) | REX_R(s); in disas_insn()
3747 s->rip_offset = insn_const_size(ot); in disas_insn()
3749 s->rip_offset = 1; in disas_insn()
3750 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0); in disas_insn()
3752 val = insn_get(env, s, ot); in disas_insn()
3753 tcg_gen_movi_tl(s->T1, val); in disas_insn()
3755 val = (int8_t)insn_get(env, s, MO_8); in disas_insn()
3756 tcg_gen_movi_tl(s->T1, val); in disas_insn()
3758 gen_op_mov_v_reg(s, ot, s->T1, reg); in disas_insn()
3763 tcg_gen_muls2_i64(cpu_regs[reg], s->T1, s->T0, s->T1); in disas_insn()
3766 tcg_gen_sub_tl(cpu_cc_src, cpu_cc_src, s->T1); in disas_insn()
3770 tcg_gen_trunc_tl_i32(s->tmp2_i32, s->T0); in disas_insn()
3771 tcg_gen_trunc_tl_i32(s->tmp3_i32, s->T1); in disas_insn()
3772 tcg_gen_muls2_i32(s->tmp2_i32, s->tmp3_i32, in disas_insn()
3773 s->tmp2_i32, s->tmp3_i32); in disas_insn()
3774 tcg_gen_extu_i32_tl(cpu_regs[reg], s->tmp2_i32); in disas_insn()
3775 tcg_gen_sari_i32(s->tmp2_i32, s->tmp2_i32, 31); in disas_insn()
3777 tcg_gen_sub_i32(s->tmp2_i32, s->tmp2_i32, s->tmp3_i32); in disas_insn()
3778 tcg_gen_extu_i32_tl(cpu_cc_src, s->tmp2_i32); in disas_insn()
3781 tcg_gen_ext16s_tl(s->T0, s->T0); in disas_insn()
3782 tcg_gen_ext16s_tl(s->T1, s->T1); in disas_insn()
3784 tcg_gen_mul_tl(s->T0, s->T0, s->T1); in disas_insn()
3785 tcg_gen_mov_tl(cpu_cc_dst, s->T0); in disas_insn()
3786 tcg_gen_ext16s_tl(s->tmp0, s->T0); in disas_insn()
3787 tcg_gen_sub_tl(cpu_cc_src, s->T0, s->tmp0); in disas_insn()
3788 gen_op_mov_reg_v(s, ot, reg, s->T0); in disas_insn()
3791 set_cc_op(s, CC_OP_MULB + ot); in disas_insn()
3796 modrm = x86_ldub_code(env, s); in disas_insn()
3797 reg = ((modrm >> 3) & 7) | REX_R(s); in disas_insn()
3799 gen_op_mov_v_reg(s, ot, s->T0, reg); in disas_insn()
3801 rm = (modrm & 7) | REX_B(s); in disas_insn()
3802 gen_op_mov_v_reg(s, ot, s->T1, rm); in disas_insn()
3803 tcg_gen_add_tl(s->T0, s->T0, s->T1); in disas_insn()
3804 gen_op_mov_reg_v(s, ot, reg, s->T1); in disas_insn()
3805 gen_op_mov_reg_v(s, ot, rm, s->T0); in disas_insn()
3807 gen_lea_modrm(env, s, modrm); in disas_insn()
3808 if (s->prefix & PREFIX_LOCK) { in disas_insn()
3809 tcg_gen_atomic_fetch_add_tl(s->T1, s->A0, s->T0, in disas_insn()
3810 s->mem_index, ot | MO_LE); in disas_insn()
3811 tcg_gen_add_tl(s->T0, s->T0, s->T1); in disas_insn()
3813 gen_op_ld_v(s, ot, s->T1, s->A0); in disas_insn()
3814 tcg_gen_add_tl(s->T0, s->T0, s->T1); in disas_insn()
3815 gen_op_st_v(s, ot, s->T0, s->A0); in disas_insn()
3817 gen_op_mov_reg_v(s, ot, reg, s->T1); in disas_insn()
3819 gen_op_update2_cc(s); in disas_insn()
3820 set_cc_op(s, CC_OP_ADDB + ot); in disas_insn()
3828 modrm = x86_ldub_code(env, s); in disas_insn()
3829 reg = ((modrm >> 3) & 7) | REX_R(s); in disas_insn()
3834 gen_op_mov_v_reg(s, ot, newv, reg); in disas_insn()
3837 if (s->prefix & PREFIX_LOCK) { in disas_insn()
3841 gen_lea_modrm(env, s, modrm); in disas_insn()
3842 tcg_gen_atomic_cmpxchg_tl(oldv, s->A0, cmpv, newv, in disas_insn()
3843 s->mem_index, ot | MO_LE); in disas_insn()
3846 rm = (modrm & 7) | REX_B(s); in disas_insn()
3847 gen_op_mov_v_reg(s, ot, oldv, rm); in disas_insn()
3859 dest = gen_op_deposit_reg_v(s, ot, rm, newv, newv); in disas_insn()
3862 gen_lea_modrm(env, s, modrm); in disas_insn()
3863 gen_op_ld_v(s, ot, oldv, s->A0); in disas_insn()
3872 gen_op_st_v(s, ot, newv, s->A0); in disas_insn()
3879 dest = gen_op_deposit_reg_v(s, ot, R_EAX, newv, oldv); in disas_insn()
3882 tcg_gen_mov_tl(s->cc_srcT, cmpv); in disas_insn()
3884 set_cc_op(s, CC_OP_SUBB + ot); in disas_insn()
3888 modrm = x86_ldub_code(env, s); in disas_insn()
3897 if (!(s->cpuid_ext_features & CPUID_EXT_CX16)) { in disas_insn()
3900 gen_cmpxchg16b(s, env, modrm); in disas_insn()
3904 if (!(s->cpuid_features & CPUID_CX8)) { in disas_insn()
3907 gen_cmpxchg8b(s, env, modrm); in disas_insn()
3912 (s->prefix & (PREFIX_LOCK | PREFIX_REPNZ))) { in disas_insn()
3915 if (s->prefix & PREFIX_REPZ) { in disas_insn()
3916 if (!(s->cpuid_ext_features & CPUID_7_0_ECX_RDPID)) { in disas_insn()
3919 gen_helper_rdpid(s->T0, tcg_env); in disas_insn()
3920 rm = (modrm & 7) | REX_B(s); in disas_insn()
3921 gen_op_mov_reg_v(s, dflag, rm, s->T0); in disas_insn()
3924 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_RDSEED)) { in disas_insn()
3932 (s->prefix & (PREFIX_LOCK | PREFIX_REPZ | PREFIX_REPNZ)) || in disas_insn()
3933 !(s->cpuid_ext_features & CPUID_EXT_RDRAND)) { in disas_insn()
3937 translator_io_start(&s->base); in disas_insn()
3938 gen_helper_rdrand(s->T0, tcg_env); in disas_insn()
3939 rm = (modrm & 7) | REX_B(s); in disas_insn()
3940 gen_op_mov_reg_v(s, dflag, rm, s->T0); in disas_insn()
3941 set_cc_op(s, CC_OP_EFLAGS); in disas_insn()
3952 gen_op_mov_v_reg(s, MO_32, s->T0, (b & 7) | REX_B(s)); in disas_insn()
3953 gen_push_v(s, s->T0); in disas_insn()
3956 ot = gen_pop_T0(s); in disas_insn()
3958 gen_pop_update(s, ot); in disas_insn()
3959 gen_op_mov_reg_v(s, ot, (b & 7) | REX_B(s), s->T0); in disas_insn()
3962 if (CODE64(s)) in disas_insn()
3964 gen_pusha(s); in disas_insn()
3967 if (CODE64(s)) in disas_insn()
3969 gen_popa(s); in disas_insn()
3973 ot = mo_pushpop(s, dflag); in disas_insn()
3975 val = insn_get(env, s, ot); in disas_insn()
3977 val = (int8_t)insn_get(env, s, MO_8); in disas_insn()
3978 tcg_gen_movi_tl(s->T0, val); in disas_insn()
3979 gen_push_v(s, s->T0); in disas_insn()
3982 modrm = x86_ldub_code(env, s); in disas_insn()
3984 ot = gen_pop_T0(s); in disas_insn()
3987 gen_pop_update(s, ot); in disas_insn()
3988 rm = (modrm & 7) | REX_B(s); in disas_insn()
3989 gen_op_mov_reg_v(s, ot, rm, s->T0); in disas_insn()
3992 s->popl_esp_hack = 1 << ot; in disas_insn()
3993 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 1); in disas_insn()
3994 s->popl_esp_hack = 0; in disas_insn()
3995 gen_pop_update(s, ot); in disas_insn()
4001 val = x86_lduw_code(env, s); in disas_insn()
4002 level = x86_ldub_code(env, s); in disas_insn()
4003 gen_enter(s, val, level); in disas_insn()
4007 gen_leave(s); in disas_insn()
4013 if (CODE64(s)) in disas_insn()
4015 gen_op_movl_T0_seg(s, b >> 3); in disas_insn()
4016 gen_push_v(s, s->T0); in disas_insn()
4020 gen_op_movl_T0_seg(s, (b >> 3) & 7); in disas_insn()
4021 gen_push_v(s, s->T0); in disas_insn()
4026 if (CODE64(s)) in disas_insn()
4029 ot = gen_pop_T0(s); in disas_insn()
4030 gen_movl_seg_T0(s, reg); in disas_insn()
4031 gen_pop_update(s, ot); in disas_insn()
4035 ot = gen_pop_T0(s); in disas_insn()
4036 gen_movl_seg_T0(s, (b >> 3) & 7); in disas_insn()
4037 gen_pop_update(s, ot); in disas_insn()
4045 modrm = x86_ldub_code(env, s); in disas_insn()
4046 reg = ((modrm >> 3) & 7) | REX_R(s); in disas_insn()
4049 gen_ldst_modrm(env, s, modrm, ot, reg, 1); in disas_insn()
4054 modrm = x86_ldub_code(env, s); in disas_insn()
4057 s->rip_offset = insn_const_size(ot); in disas_insn()
4058 gen_lea_modrm(env, s, modrm); in disas_insn()
4060 val = insn_get(env, s, ot); in disas_insn()
4061 tcg_gen_movi_tl(s->T0, val); in disas_insn()
4063 gen_op_st_v(s, ot, s->T0, s->A0); in disas_insn()
4065 gen_op_mov_reg_v(s, ot, (modrm & 7) | REX_B(s), s->T0); in disas_insn()
4071 modrm = x86_ldub_code(env, s); in disas_insn()
4072 reg = ((modrm >> 3) & 7) | REX_R(s); in disas_insn()
4074 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0); in disas_insn()
4075 gen_op_mov_reg_v(s, ot, reg, s->T0); in disas_insn()
4078 modrm = x86_ldub_code(env, s); in disas_insn()
4082 gen_ldst_modrm(env, s, modrm, MO_16, OR_TMP0, 0); in disas_insn()
4083 gen_movl_seg_T0(s, reg); in disas_insn()
4086 modrm = x86_ldub_code(env, s); in disas_insn()
4091 gen_op_movl_T0_seg(s, reg); in disas_insn()
4093 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 1); in disas_insn()
4111 modrm = x86_ldub_code(env, s); in disas_insn()
4112 reg = ((modrm >> 3) & 7) | REX_R(s); in disas_insn()
4114 rm = (modrm & 7) | REX_B(s); in disas_insn()
4117 if (s_ot == MO_SB && byte_reg_is_xH(s, rm)) { in disas_insn()
4118 tcg_gen_sextract_tl(s->T0, cpu_regs[rm - 4], 8, 8); in disas_insn()
4120 gen_op_mov_v_reg(s, ot, s->T0, rm); in disas_insn()
4123 tcg_gen_ext8u_tl(s->T0, s->T0); in disas_insn()
4126 tcg_gen_ext8s_tl(s->T0, s->T0); in disas_insn()
4129 tcg_gen_ext16u_tl(s->T0, s->T0); in disas_insn()
4133 tcg_gen_ext16s_tl(s->T0, s->T0); in disas_insn()
4137 gen_op_mov_reg_v(s, d_ot, reg, s->T0); in disas_insn()
4139 gen_lea_modrm(env, s, modrm); in disas_insn()
4140 gen_op_ld_v(s, s_ot, s->T0, s->A0); in disas_insn()
4141 gen_op_mov_reg_v(s, d_ot, reg, s->T0); in disas_insn()
4147 modrm = x86_ldub_code(env, s); in disas_insn()
4151 reg = ((modrm >> 3) & 7) | REX_R(s); in disas_insn()
4153 AddressParts a = gen_lea_modrm_0(env, s, modrm); in disas_insn()
4154 TCGv ea = gen_lea_modrm_1(s, a, false); in disas_insn()
4155 gen_lea_v_seg(s, s->aflag, ea, -1, -1); in disas_insn()
4156 gen_op_mov_reg_v(s, dflag, reg, s->A0); in disas_insn()
4168 offset_addr = insn_get_addr(env, s, s->aflag); in disas_insn()
4169 tcg_gen_movi_tl(s->A0, offset_addr); in disas_insn()
4170 gen_add_A0_ds_seg(s); in disas_insn()
4172 gen_op_ld_v(s, ot, s->T0, s->A0); in disas_insn()
4173 gen_op_mov_reg_v(s, ot, R_EAX, s->T0); in disas_insn()
4175 gen_op_mov_v_reg(s, ot, s->T0, R_EAX); in disas_insn()
4176 gen_op_st_v(s, ot, s->T0, s->A0); in disas_insn()
4181 tcg_gen_mov_tl(s->A0, cpu_regs[R_EBX]); in disas_insn()
4182 tcg_gen_ext8u_tl(s->T0, cpu_regs[R_EAX]); in disas_insn()
4183 tcg_gen_add_tl(s->A0, s->A0, s->T0); in disas_insn()
4184 gen_extu(s->aflag, s->A0); in disas_insn()
4185 gen_add_A0_ds_seg(s); in disas_insn()
4186 gen_op_ld_v(s, MO_8, s->T0, s->A0); in disas_insn()
4187 gen_op_mov_reg_v(s, MO_8, R_EAX, s->T0); in disas_insn()
4190 val = insn_get(env, s, MO_8); in disas_insn()
4191 tcg_gen_movi_tl(s->T0, val); in disas_insn()
4192 gen_op_mov_reg_v(s, MO_8, (b & 7) | REX_B(s), s->T0); in disas_insn()
4199 tmp = x86_ldq_code(env, s); in disas_insn()
4200 reg = (b & 7) | REX_B(s); in disas_insn()
4201 tcg_gen_movi_tl(s->T0, tmp); in disas_insn()
4202 gen_op_mov_reg_v(s, MO_64, reg, s->T0); in disas_insn()
4207 val = insn_get(env, s, ot); in disas_insn()
4208 reg = (b & 7) | REX_B(s); in disas_insn()
4209 tcg_gen_movi_tl(s->T0, val); in disas_insn()
4210 gen_op_mov_reg_v(s, ot, reg, s->T0); in disas_insn()
4217 reg = (b & 7) | REX_B(s); in disas_insn()
4223 modrm = x86_ldub_code(env, s); in disas_insn()
4224 reg = ((modrm >> 3) & 7) | REX_R(s); in disas_insn()
4227 rm = (modrm & 7) | REX_B(s); in disas_insn()
4229 gen_op_mov_v_reg(s, ot, s->T0, reg); in disas_insn()
4230 gen_op_mov_v_reg(s, ot, s->T1, rm); in disas_insn()
4231 gen_op_mov_reg_v(s, ot, rm, s->T0); in disas_insn()
4232 gen_op_mov_reg_v(s, ot, reg, s->T1); in disas_insn()
4234 gen_lea_modrm(env, s, modrm); in disas_insn()
4235 gen_op_mov_v_reg(s, ot, s->T0, reg); in disas_insn()
4237 tcg_gen_atomic_xchg_tl(s->T1, s->A0, s->T0, in disas_insn()
4238 s->mem_index, ot | MO_LE); in disas_insn()
4239 gen_op_mov_reg_v(s, ot, reg, s->T1); in disas_insn()
4260 modrm = x86_ldub_code(env, s); in disas_insn()
4261 reg = ((modrm >> 3) & 7) | REX_R(s); in disas_insn()
4265 gen_lea_modrm(env, s, modrm); in disas_insn()
4266 gen_op_ld_v(s, ot, s->T1, s->A0); in disas_insn()
4267 gen_add_A0_im(s, 1 << ot); in disas_insn()
4269 gen_op_ld_v(s, MO_16, s->T0, s->A0); in disas_insn()
4270 gen_movl_seg_T0(s, op); in disas_insn()
4272 gen_op_mov_reg_v(s, ot, reg, s->T1); in disas_insn()
4284 modrm = x86_ldub_code(env, s); in disas_insn()
4290 s->rip_offset = 1; in disas_insn()
4292 gen_lea_modrm(env, s, modrm); in disas_insn()
4295 opreg = (modrm & 7) | REX_B(s); in disas_insn()
4300 gen_shift(s, op, ot, opreg, OR_ECX); in disas_insn()
4303 shift = x86_ldub_code(env, s); in disas_insn()
4305 gen_shifti(s, op, ot, opreg, shift); in disas_insn()
4337 modrm = x86_ldub_code(env, s); in disas_insn()
4339 rm = (modrm & 7) | REX_B(s); in disas_insn()
4340 reg = ((modrm >> 3) & 7) | REX_R(s); in disas_insn()
4342 gen_lea_modrm(env, s, modrm); in disas_insn()
4347 gen_op_mov_v_reg(s, ot, s->T1, reg); in disas_insn()
4350 TCGv imm = tcg_constant_tl(x86_ldub_code(env, s)); in disas_insn()
4351 gen_shiftd_rm_T1(s, ot, opreg, op, imm); in disas_insn()
4353 gen_shiftd_rm_T1(s, ot, opreg, op, cpu_regs[R_ECX]); in disas_insn()
4363 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) { in disas_insn()
4366 gen_exception(s, EXCP07_PREX); in disas_insn()
4369 modrm = x86_ldub_code(env, s); in disas_insn()
4375 AddressParts a = gen_lea_modrm_0(env, s, modrm); in disas_insn()
4376 TCGv ea = gen_lea_modrm_1(s, a, false); in disas_insn()
4381 gen_lea_v_seg(s, s->aflag, ea, a.def_seg, s->override); in disas_insn()
4394 tcg_gen_qemu_ld_i32(s->tmp2_i32, s->A0, in disas_insn()
4395 s->mem_index, MO_LEUL); in disas_insn()
4396 gen_helper_flds_FT0(tcg_env, s->tmp2_i32); in disas_insn()
4399 tcg_gen_qemu_ld_i32(s->tmp2_i32, s->A0, in disas_insn()
4400 s->mem_index, MO_LEUL); in disas_insn()
4401 gen_helper_fildl_FT0(tcg_env, s->tmp2_i32); in disas_insn()
4404 tcg_gen_qemu_ld_i64(s->tmp1_i64, s->A0, in disas_insn()
4405 s->mem_index, MO_LEUQ); in disas_insn()
4406 gen_helper_fldl_FT0(tcg_env, s->tmp1_i64); in disas_insn()
4410 tcg_gen_qemu_ld_i32(s->tmp2_i32, s->A0, in disas_insn()
4411 s->mem_index, MO_LESW); in disas_insn()
4412 gen_helper_fildl_FT0(tcg_env, s->tmp2_i32); in disas_insn()
4433 tcg_gen_qemu_ld_i32(s->tmp2_i32, s->A0, in disas_insn()
4434 s->mem_index, MO_LEUL); in disas_insn()
4435 gen_helper_flds_ST0(tcg_env, s->tmp2_i32); in disas_insn()
4438 tcg_gen_qemu_ld_i32(s->tmp2_i32, s->A0, in disas_insn()
4439 s->mem_index, MO_LEUL); in disas_insn()
4440 gen_helper_fildl_ST0(tcg_env, s->tmp2_i32); in disas_insn()
4443 tcg_gen_qemu_ld_i64(s->tmp1_i64, s->A0, in disas_insn()
4444 s->mem_index, MO_LEUQ); in disas_insn()
4445 gen_helper_fldl_ST0(tcg_env, s->tmp1_i64); in disas_insn()
4449 tcg_gen_qemu_ld_i32(s->tmp2_i32, s->A0, in disas_insn()
4450 s->mem_index, MO_LESW); in disas_insn()
4451 gen_helper_fildl_ST0(tcg_env, s->tmp2_i32); in disas_insn()
4459 gen_helper_fisttl_ST0(s->tmp2_i32, tcg_env); in disas_insn()
4460 tcg_gen_qemu_st_i32(s->tmp2_i32, s->A0, in disas_insn()
4461 s->mem_index, MO_LEUL); in disas_insn()
4464 gen_helper_fisttll_ST0(s->tmp1_i64, tcg_env); in disas_insn()
4465 tcg_gen_qemu_st_i64(s->tmp1_i64, s->A0, in disas_insn()
4466 s->mem_index, MO_LEUQ); in disas_insn()
4470 gen_helper_fistt_ST0(s->tmp2_i32, tcg_env); in disas_insn()
4471 tcg_gen_qemu_st_i32(s->tmp2_i32, s->A0, in disas_insn()
4472 s->mem_index, MO_LEUW); in disas_insn()
4480 gen_helper_fsts_ST0(s->tmp2_i32, tcg_env); in disas_insn()
4481 tcg_gen_qemu_st_i32(s->tmp2_i32, s->A0, in disas_insn()
4482 s->mem_index, MO_LEUL); in disas_insn()
4485 gen_helper_fistl_ST0(s->tmp2_i32, tcg_env); in disas_insn()
4486 tcg_gen_qemu_st_i32(s->tmp2_i32, s->A0, in disas_insn()
4487 s->mem_index, MO_LEUL); in disas_insn()
4490 gen_helper_fstl_ST0(s->tmp1_i64, tcg_env); in disas_insn()
4491 tcg_gen_qemu_st_i64(s->tmp1_i64, s->A0, in disas_insn()
4492 s->mem_index, MO_LEUQ); in disas_insn()
4496 gen_helper_fist_ST0(s->tmp2_i32, tcg_env); in disas_insn()
4497 tcg_gen_qemu_st_i32(s->tmp2_i32, s->A0, in disas_insn()
4498 s->mem_index, MO_LEUW); in disas_insn()
4508 gen_helper_fldenv(tcg_env, s->A0, in disas_insn()
4513 tcg_gen_qemu_ld_i32(s->tmp2_i32, s->A0, in disas_insn()
4514 s->mem_index, MO_LEUW); in disas_insn()
4515 gen_helper_fldcw(tcg_env, s->tmp2_i32); in disas_insn()
4519 gen_helper_fstenv(tcg_env, s->A0, in disas_insn()
4524 gen_helper_fnstcw(s->tmp2_i32, tcg_env); in disas_insn()
4525 tcg_gen_qemu_st_i32(s->tmp2_i32, s->A0, in disas_insn()
4526 s->mem_index, MO_LEUW); in disas_insn()
4530 gen_helper_fldt_ST0(tcg_env, s->A0); in disas_insn()
4533 gen_helper_fstt_ST0(tcg_env, s->A0); in disas_insn()
4537 gen_helper_frstor(tcg_env, s->A0, in disas_insn()
4542 gen_helper_fsave(tcg_env, s->A0, in disas_insn()
4547 gen_helper_fnstsw(s->tmp2_i32, tcg_env); in disas_insn()
4548 tcg_gen_qemu_st_i32(s->tmp2_i32, s->A0, in disas_insn()
4549 s->mem_index, MO_LEUW); in disas_insn()
4553 gen_helper_fbld_ST0(tcg_env, s->A0); in disas_insn()
4556 gen_helper_fbst_ST0(tcg_env, s->A0); in disas_insn()
4560 tcg_gen_qemu_ld_i64(s->tmp1_i64, s->A0, in disas_insn()
4561 s->mem_index, MO_LEUQ); in disas_insn()
4562 gen_helper_fildll_ST0(tcg_env, s->tmp1_i64); in disas_insn()
4565 gen_helper_fistll_ST0(s->tmp1_i64, tcg_env); in disas_insn()
4566 tcg_gen_qemu_st_i64(s->tmp1_i64, s->A0, in disas_insn()
4567 s->mem_index, MO_LEUQ); in disas_insn()
4575 int last_seg = s->override >= 0 ? s->override : a.def_seg; in disas_insn()
4577 tcg_gen_ld_i32(s->tmp2_i32, tcg_env, in disas_insn()
4580 tcg_gen_st16_i32(s->tmp2_i32, tcg_env, in disas_insn()
4607 translator_io_start(&s->base); in disas_insn()
4792 if (!(s->cpuid_features & CPUID_CMOV)) { in disas_insn()
4795 gen_update_cc_op(s); in disas_insn()
4798 set_cc_op(s, CC_OP_EFLAGS); in disas_insn()
4801 if (!(s->cpuid_features & CPUID_CMOV)) { in disas_insn()
4804 gen_update_cc_op(s); in disas_insn()
4807 set_cc_op(s, CC_OP_EFLAGS); in disas_insn()
4850 gen_helper_fnstsw(s->tmp2_i32, tcg_env); in disas_insn()
4851 tcg_gen_extu_i32_tl(s->T0, s->tmp2_i32); in disas_insn()
4852 gen_op_mov_reg_v(s, MO_16, R_EAX, s->T0); in disas_insn()
4859 if (!(s->cpuid_features & CPUID_CMOV)) { in disas_insn()
4862 gen_update_cc_op(s); in disas_insn()
4866 set_cc_op(s, CC_OP_EFLAGS); in disas_insn()
4869 if (!(s->cpuid_features & CPUID_CMOV)) { in disas_insn()
4872 gen_update_cc_op(s); in disas_insn()
4876 set_cc_op(s, CC_OP_EFLAGS); in disas_insn()
4890 if (!(s->cpuid_features & CPUID_CMOV)) { in disas_insn()
4895 gen_jcc1_noeob(s, op1, l1); in disas_insn()
4907 tcg_gen_ld_i32(s->tmp2_i32, tcg_env, in disas_insn()
4909 tcg_gen_st16_i32(s->tmp2_i32, tcg_env, in disas_insn()
4911 tcg_gen_st_tl(eip_cur_tl(s), in disas_insn()
4923 gen_repz_movs(s, ot); in disas_insn()
4925 gen_movs(s, ot); in disas_insn()
4933 gen_repz_stos(s, ot); in disas_insn()
4935 gen_stos(s, ot); in disas_insn()
4942 gen_repz_lods(s, ot); in disas_insn()
4944 gen_lods(s, ot); in disas_insn()
4951 gen_repz_scas(s, ot, 1); in disas_insn()
4953 gen_repz_scas(s, ot, 0); in disas_insn()
4955 gen_scas(s, ot); in disas_insn()
4963 gen_repz_cmps(s, ot, 1); in disas_insn()
4965 gen_repz_cmps(s, ot, 0); in disas_insn()
4967 gen_cmps(s, ot); in disas_insn()
4973 tcg_gen_trunc_tl_i32(s->tmp2_i32, cpu_regs[R_EDX]); in disas_insn()
4974 tcg_gen_ext16u_i32(s->tmp2_i32, s->tmp2_i32); in disas_insn()
4975 if (!gen_check_io(s, ot, s->tmp2_i32, in disas_insn()
4979 translator_io_start(&s->base); in disas_insn()
4981 gen_repz_ins(s, ot); in disas_insn()
4983 gen_ins(s, ot); in disas_insn()
4989 tcg_gen_trunc_tl_i32(s->tmp2_i32, cpu_regs[R_EDX]); in disas_insn()
4990 tcg_gen_ext16u_i32(s->tmp2_i32, s->tmp2_i32); in disas_insn()
4991 if (!gen_check_io(s, ot, s->tmp2_i32, SVM_IOIO_STR_MASK)) { in disas_insn()
4994 translator_io_start(&s->base); in disas_insn()
4996 gen_repz_outs(s, ot); in disas_insn()
4998 gen_outs(s, ot); in disas_insn()
5008 val = x86_ldub_code(env, s); in disas_insn()
5009 tcg_gen_movi_i32(s->tmp2_i32, val); in disas_insn()
5010 if (!gen_check_io(s, ot, s->tmp2_i32, SVM_IOIO_TYPE_MASK)) { in disas_insn()
5013 translator_io_start(&s->base); in disas_insn()
5014 gen_helper_in_func(ot, s->T1, s->tmp2_i32); in disas_insn()
5015 gen_op_mov_reg_v(s, ot, R_EAX, s->T1); in disas_insn()
5016 gen_bpt_io(s, s->tmp2_i32, ot); in disas_insn()
5021 val = x86_ldub_code(env, s); in disas_insn()
5022 tcg_gen_movi_i32(s->tmp2_i32, val); in disas_insn()
5023 if (!gen_check_io(s, ot, s->tmp2_i32, 0)) { in disas_insn()
5026 translator_io_start(&s->base); in disas_insn()
5027 gen_op_mov_v_reg(s, ot, s->T1, R_EAX); in disas_insn()
5028 tcg_gen_trunc_tl_i32(s->tmp3_i32, s->T1); in disas_insn()
5029 gen_helper_out_func(ot, s->tmp2_i32, s->tmp3_i32); in disas_insn()
5030 gen_bpt_io(s, s->tmp2_i32, ot); in disas_insn()
5035 tcg_gen_trunc_tl_i32(s->tmp2_i32, cpu_regs[R_EDX]); in disas_insn()
5036 tcg_gen_ext16u_i32(s->tmp2_i32, s->tmp2_i32); in disas_insn()
5037 if (!gen_check_io(s, ot, s->tmp2_i32, SVM_IOIO_TYPE_MASK)) { in disas_insn()
5040 translator_io_start(&s->base); in disas_insn()
5041 gen_helper_in_func(ot, s->T1, s->tmp2_i32); in disas_insn()
5042 gen_op_mov_reg_v(s, ot, R_EAX, s->T1); in disas_insn()
5043 gen_bpt_io(s, s->tmp2_i32, ot); in disas_insn()
5048 tcg_gen_trunc_tl_i32(s->tmp2_i32, cpu_regs[R_EDX]); in disas_insn()
5049 tcg_gen_ext16u_i32(s->tmp2_i32, s->tmp2_i32); in disas_insn()
5050 if (!gen_check_io(s, ot, s->tmp2_i32, 0)) { in disas_insn()
5053 translator_io_start(&s->base); in disas_insn()
5054 gen_op_mov_v_reg(s, ot, s->T1, R_EAX); in disas_insn()
5055 tcg_gen_trunc_tl_i32(s->tmp3_i32, s->T1); in disas_insn()
5056 gen_helper_out_func(ot, s->tmp2_i32, s->tmp3_i32); in disas_insn()
5057 gen_bpt_io(s, s->tmp2_i32, ot); in disas_insn()
5063 val = x86_ldsw_code(env, s); in disas_insn()
5064 ot = gen_pop_T0(s); in disas_insn()
5065 gen_stack_update(s, val + (1 << ot)); in disas_insn()
5067 gen_op_jmp_v(s, s->T0); in disas_insn()
5068 gen_bnd_jmp(s); in disas_insn()
5069 s->base.is_jmp = DISAS_JUMP; in disas_insn()
5072 ot = gen_pop_T0(s); in disas_insn()
5073 gen_pop_update(s, ot); in disas_insn()
5075 gen_op_jmp_v(s, s->T0); in disas_insn()
5076 gen_bnd_jmp(s); in disas_insn()
5077 s->base.is_jmp = DISAS_JUMP; in disas_insn()
5080 val = x86_ldsw_code(env, s); in disas_insn()
5082 if (PE(s) && !VM86(s)) { in disas_insn()
5083 gen_update_cc_op(s); in disas_insn()
5084 gen_update_eip_cur(s); in disas_insn()
5088 gen_stack_A0(s); in disas_insn()
5090 gen_op_ld_v(s, dflag, s->T0, s->A0); in disas_insn()
5093 gen_op_jmp_v(s, s->T0); in disas_insn()
5095 gen_add_A0_im(s, 1 << dflag); in disas_insn()
5096 gen_op_ld_v(s, dflag, s->T0, s->A0); in disas_insn()
5097 gen_op_movl_seg_T0_vm(s, R_CS); in disas_insn()
5099 gen_stack_update(s, val + (2 << dflag)); in disas_insn()
5101 s->base.is_jmp = DISAS_EOB_ONLY; in disas_insn()
5107 gen_svm_check_intercept(s, SVM_EXIT_IRET); in disas_insn()
5108 if (!PE(s) || VM86(s)) { in disas_insn()
5110 if (!check_vm86_iopl(s)) { in disas_insn()
5116 eip_next_i32(s)); in disas_insn()
5118 set_cc_op(s, CC_OP_EFLAGS); in disas_insn()
5119 s->base.is_jmp = DISAS_EOB_ONLY; in disas_insn()
5124 ? (int32_t)insn_get(env, s, MO_32) in disas_insn()
5125 : (int16_t)insn_get(env, s, MO_16)); in disas_insn()
5126 gen_push_v(s, eip_next_tl(s)); in disas_insn()
5127 gen_bnd_jmp(s); in disas_insn()
5128 gen_jmp_rel(s, dflag, diff, 0); in disas_insn()
5135 if (CODE64(s)) in disas_insn()
5138 offset = insn_get(env, s, ot); in disas_insn()
5139 selector = insn_get(env, s, MO_16); in disas_insn()
5141 tcg_gen_movi_tl(s->T0, selector); in disas_insn()
5142 tcg_gen_movi_tl(s->T1, offset); in disas_insn()
5148 ? (int32_t)insn_get(env, s, MO_32) in disas_insn()
5149 : (int16_t)insn_get(env, s, MO_16)); in disas_insn()
5150 gen_bnd_jmp(s); in disas_insn()
5151 gen_jmp_rel(s, dflag, diff, 0); in disas_insn()
5158 if (CODE64(s)) in disas_insn()
5161 offset = insn_get(env, s, ot); in disas_insn()
5162 selector = insn_get(env, s, MO_16); in disas_insn()
5164 tcg_gen_movi_tl(s->T0, selector); in disas_insn()
5165 tcg_gen_movi_tl(s->T1, offset); in disas_insn()
5170 int diff = (int8_t)insn_get(env, s, MO_8); in disas_insn()
5171 gen_jmp_rel(s, dflag, diff, 0); in disas_insn()
5176 int diff = (int8_t)insn_get(env, s, MO_8); in disas_insn()
5177 gen_bnd_jmp(s); in disas_insn()
5178 gen_jcc(s, b, diff); in disas_insn()
5184 ? (int32_t)insn_get(env, s, MO_32) in disas_insn()
5185 : (int16_t)insn_get(env, s, MO_16)); in disas_insn()
5186 gen_bnd_jmp(s); in disas_insn()
5187 gen_jcc(s, b, diff); in disas_insn()
5192 modrm = x86_ldub_code(env, s); in disas_insn()
5193 gen_setcc1(s, b, s->T0); in disas_insn()
5194 gen_ldst_modrm(env, s, modrm, MO_8, OR_TMP0, 1); in disas_insn()
5197 if (!(s->cpuid_features & CPUID_CMOV)) { in disas_insn()
5201 modrm = x86_ldub_code(env, s); in disas_insn()
5202 reg = ((modrm >> 3) & 7) | REX_R(s); in disas_insn()
5203 gen_cmovcc1(env, s, ot, b, modrm, reg); in disas_insn()
5209 gen_svm_check_intercept(s, SVM_EXIT_PUSHF); in disas_insn()
5210 if (check_vm86_iopl(s)) { in disas_insn()
5211 gen_update_cc_op(s); in disas_insn()
5212 gen_helper_read_eflags(s->T0, tcg_env); in disas_insn()
5213 gen_push_v(s, s->T0); in disas_insn()
5217 gen_svm_check_intercept(s, SVM_EXIT_POPF); in disas_insn()
5218 if (check_vm86_iopl(s)) { in disas_insn()
5221 if (CPL(s) == 0) { in disas_insn()
5223 } else if (CPL(s) <= IOPL(s)) { in disas_insn()
5230 ot = gen_pop_T0(s); in disas_insn()
5231 gen_helper_write_eflags(tcg_env, s->T0, tcg_constant_i32(mask)); in disas_insn()
5232 gen_pop_update(s, ot); in disas_insn()
5233 set_cc_op(s, CC_OP_EFLAGS); in disas_insn()
5235 s->base.is_jmp = DISAS_EOB_NEXT; in disas_insn()
5239 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM)) in disas_insn()
5241 tcg_gen_shri_tl(s->T0, cpu_regs[R_EAX], 8); in disas_insn()
5242 gen_compute_eflags(s); in disas_insn()
5244 tcg_gen_andi_tl(s->T0, s->T0, CC_S | CC_Z | CC_A | CC_P | CC_C); in disas_insn()
5245 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, s->T0); in disas_insn()
5248 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM)) in disas_insn()
5250 gen_compute_eflags(s); in disas_insn()
5252 tcg_gen_ori_tl(s->T0, cpu_cc_src, 0x02); in disas_insn()
5253 tcg_gen_deposit_tl(cpu_regs[R_EAX], cpu_regs[R_EAX], s->T0, 8, 8); in disas_insn()
5256 gen_compute_eflags(s); in disas_insn()
5260 gen_compute_eflags(s); in disas_insn()
5264 gen_compute_eflags(s); in disas_insn()
5268 tcg_gen_movi_i32(s->tmp2_i32, 1); in disas_insn()
5269 tcg_gen_st_i32(s->tmp2_i32, tcg_env, offsetof(CPUX86State, df)); in disas_insn()
5272 tcg_gen_movi_i32(s->tmp2_i32, -1); in disas_insn()
5273 tcg_gen_st_i32(s->tmp2_i32, tcg_env, offsetof(CPUX86State, df)); in disas_insn()
5280 modrm = x86_ldub_code(env, s); in disas_insn()
5283 rm = (modrm & 7) | REX_B(s); in disas_insn()
5285 s->rip_offset = 1; in disas_insn()
5286 gen_lea_modrm(env, s, modrm); in disas_insn()
5287 if (!(s->prefix & PREFIX_LOCK)) { in disas_insn()
5288 gen_op_ld_v(s, ot, s->T0, s->A0); in disas_insn()
5291 gen_op_mov_v_reg(s, ot, s->T0, rm); in disas_insn()
5294 val = x86_ldub_code(env, s); in disas_insn()
5295 tcg_gen_movi_tl(s->T1, val); in disas_insn()
5313 modrm = x86_ldub_code(env, s); in disas_insn()
5314 reg = ((modrm >> 3) & 7) | REX_R(s); in disas_insn()
5316 rm = (modrm & 7) | REX_B(s); in disas_insn()
5317 gen_op_mov_v_reg(s, MO_32, s->T1, reg); in disas_insn()
5319 AddressParts a = gen_lea_modrm_0(env, s, modrm); in disas_insn()
5321 gen_exts(ot, s->T1); in disas_insn()
5322 tcg_gen_sari_tl(s->tmp0, s->T1, 3 + ot); in disas_insn()
5323 tcg_gen_shli_tl(s->tmp0, s->tmp0, ot); in disas_insn()
5324 tcg_gen_add_tl(s->A0, gen_lea_modrm_1(s, a, false), s->tmp0); in disas_insn()
5325 gen_lea_v_seg(s, s->aflag, s->A0, a.def_seg, s->override); in disas_insn()
5326 if (!(s->prefix & PREFIX_LOCK)) { in disas_insn()
5327 gen_op_ld_v(s, ot, s->T0, s->A0); in disas_insn()
5330 gen_op_mov_v_reg(s, ot, s->T0, rm); in disas_insn()
5333 tcg_gen_andi_tl(s->T1, s->T1, (1 << (3 + ot)) - 1); in disas_insn()
5334 tcg_gen_movi_tl(s->tmp0, 1); in disas_insn()
5335 tcg_gen_shl_tl(s->tmp0, s->tmp0, s->T1); in disas_insn()
5336 if (s->prefix & PREFIX_LOCK) { in disas_insn()
5341 gen_op_ld_v(s, ot, s->T0, s->A0); in disas_insn()
5344 tcg_gen_atomic_fetch_or_tl(s->T0, s->A0, s->tmp0, in disas_insn()
5345 s->mem_index, ot | MO_LE); in disas_insn()
5348 tcg_gen_not_tl(s->tmp0, s->tmp0); in disas_insn()
5349 tcg_gen_atomic_fetch_and_tl(s->T0, s->A0, s->tmp0, in disas_insn()
5350 s->mem_index, ot | MO_LE); in disas_insn()
5354 tcg_gen_atomic_fetch_xor_tl(s->T0, s->A0, s->tmp0, in disas_insn()
5355 s->mem_index, ot | MO_LE); in disas_insn()
5358 tcg_gen_shr_tl(s->tmp4, s->T0, s->T1); in disas_insn()
5360 tcg_gen_shr_tl(s->tmp4, s->T0, s->T1); in disas_insn()
5366 tcg_gen_or_tl(s->T0, s->T0, s->tmp0); in disas_insn()
5369 tcg_gen_andc_tl(s->T0, s->T0, s->tmp0); in disas_insn()
5373 tcg_gen_xor_tl(s->T0, s->T0, s->tmp0); in disas_insn()
5378 gen_op_st_v(s, ot, s->T0, s->A0); in disas_insn()
5380 gen_op_mov_reg_v(s, ot, rm, s->T0); in disas_insn()
5388 switch (s->cc_op) { in disas_insn()
5404 tcg_gen_mov_tl(cpu_cc_src, s->tmp4); in disas_insn()
5405 set_cc_op(s, ((s->cc_op - CC_OP_MULB) & 3) + CC_OP_SARB); in disas_insn()
5409 gen_compute_eflags(s); in disas_insn()
5410 tcg_gen_deposit_tl(cpu_cc_src, cpu_cc_src, s->tmp4, in disas_insn()
5418 modrm = x86_ldub_code(env, s); in disas_insn()
5419 reg = ((modrm >> 3) & 7) | REX_R(s); in disas_insn()
5420 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0); in disas_insn()
5421 gen_extu(ot, s->T0); in disas_insn()
5426 ? s->cpuid_ext3_features & CPUID_EXT3_ABM in disas_insn()
5427 : s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI1)) { in disas_insn()
5430 tcg_gen_mov_tl(cpu_cc_src, s->T0); in disas_insn()
5434 tcg_gen_clzi_tl(s->T0, s->T0, TARGET_LONG_BITS); in disas_insn()
5435 tcg_gen_subi_tl(s->T0, s->T0, TARGET_LONG_BITS - size); in disas_insn()
5438 tcg_gen_ctzi_tl(s->T0, s->T0, size); in disas_insn()
5441 gen_op_update1_cc(s); in disas_insn()
5442 set_cc_op(s, CC_OP_BMILGB + ot); in disas_insn()
5446 tcg_gen_mov_tl(cpu_cc_dst, s->T0); in disas_insn()
5447 set_cc_op(s, CC_OP_LOGICB + ot); in disas_insn()
5456 tcg_gen_xori_tl(s->T1, cpu_regs[reg], TARGET_LONG_BITS - 1); in disas_insn()
5457 tcg_gen_clz_tl(s->T0, s->T0, s->T1); in disas_insn()
5458 tcg_gen_xori_tl(s->T0, s->T0, TARGET_LONG_BITS - 1); in disas_insn()
5460 tcg_gen_ctz_tl(s->T0, s->T0, cpu_regs[reg]); in disas_insn()
5463 gen_op_mov_reg_v(s, ot, reg, s->T0); in disas_insn()
5468 if (CODE64(s)) in disas_insn()
5470 gen_update_cc_op(s); in disas_insn()
5472 set_cc_op(s, CC_OP_EFLAGS); in disas_insn()
5475 if (CODE64(s)) in disas_insn()
5477 gen_update_cc_op(s); in disas_insn()
5479 set_cc_op(s, CC_OP_EFLAGS); in disas_insn()
5482 if (CODE64(s)) in disas_insn()
5484 gen_update_cc_op(s); in disas_insn()
5486 set_cc_op(s, CC_OP_EFLAGS); in disas_insn()
5489 if (CODE64(s)) in disas_insn()
5491 gen_update_cc_op(s); in disas_insn()
5493 set_cc_op(s, CC_OP_EFLAGS); in disas_insn()
5496 if (CODE64(s)) in disas_insn()
5498 val = x86_ldub_code(env, s); in disas_insn()
5500 gen_exception(s, EXCP00_DIVZ); in disas_insn()
5503 set_cc_op(s, CC_OP_LOGICB); in disas_insn()
5507 if (CODE64(s)) in disas_insn()
5509 val = x86_ldub_code(env, s); in disas_insn()
5511 set_cc_op(s, CC_OP_LOGICB); in disas_insn()
5521 if (REX_B(s)) { in disas_insn()
5525 gen_update_cc_op(s); in disas_insn()
5526 gen_update_eip_cur(s); in disas_insn()
5527 gen_helper_pause(tcg_env, cur_insn_len_i32(s)); in disas_insn()
5528 s->base.is_jmp = DISAS_NORETURN; in disas_insn()
5532 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) == in disas_insn()
5534 gen_exception(s, EXCP07_PREX); in disas_insn()
5537 translator_io_start(&s->base); in disas_insn()
5542 gen_interrupt(s, EXCP03_INT3); in disas_insn()
5545 val = x86_ldub_code(env, s); in disas_insn()
5546 if (check_vm86_iopl(s)) { in disas_insn()
5547 gen_interrupt(s, val); in disas_insn()
5551 if (CODE64(s)) in disas_insn()
5553 gen_update_cc_op(s); in disas_insn()
5554 gen_update_eip_cur(s); in disas_insn()
5555 gen_helper_into(tcg_env, cur_insn_len_i32(s)); in disas_insn()
5559 gen_svm_check_intercept(s, SVM_EXIT_ICEBP); in disas_insn()
5560 gen_debug(s); in disas_insn()
5564 if (check_iopl(s)) { in disas_insn()
5565 gen_reset_eflags(s, IF_MASK); in disas_insn()
5569 if (check_iopl(s)) { in disas_insn()
5570 gen_set_eflags(s, IF_MASK); in disas_insn()
5572 gen_update_eip_next(s); in disas_insn()
5573 gen_eob_inhibit_irq(s, true); in disas_insn()
5577 if (CODE64(s)) in disas_insn()
5580 modrm = x86_ldub_code(env, s); in disas_insn()
5585 gen_op_mov_v_reg(s, ot, s->T0, reg); in disas_insn()
5586 gen_lea_modrm(env, s, modrm); in disas_insn()
5587 tcg_gen_trunc_tl_i32(s->tmp2_i32, s->T0); in disas_insn()
5589 gen_helper_boundw(tcg_env, s->A0, s->tmp2_i32); in disas_insn()
5591 gen_helper_boundl(tcg_env, s->A0, s->tmp2_i32); in disas_insn()
5595 reg = (b & 7) | REX_B(s); in disas_insn()
5605 if (CODE64(s)) in disas_insn()
5607 gen_compute_eflags_c(s, s->T0); in disas_insn()
5608 tcg_gen_neg_tl(s->T0, s->T0); in disas_insn()
5609 gen_op_mov_reg_v(s, MO_8, R_EAX, s->T0); in disas_insn()
5617 int diff = (int8_t)insn_get(env, s, MO_8); in disas_insn()
5621 gen_update_cc_op(s); in disas_insn()
5626 gen_op_add_reg_im(s, s->aflag, R_ECX, -1); in disas_insn()
5627 gen_op_jz_ecx(s, l2); in disas_insn()
5628 gen_jcc1(s, (JCC_Z << 1) | (b ^ 1), l1); in disas_insn()
5631 gen_op_add_reg_im(s, s->aflag, R_ECX, -1); in disas_insn()
5632 gen_op_jnz_ecx(s, l1); in disas_insn()
5636 gen_op_jz_ecx(s, l1); in disas_insn()
5641 gen_jmp_rel_csize(s, 0, 1); in disas_insn()
5644 gen_jmp_rel(s, dflag, diff, 0); in disas_insn()
5649 if (check_cpl0(s)) { in disas_insn()
5650 gen_update_cc_op(s); in disas_insn()
5651 gen_update_eip_cur(s); in disas_insn()
5656 s->base.is_jmp = DISAS_EOB_NEXT; in disas_insn()
5661 gen_update_cc_op(s); in disas_insn()
5662 gen_update_eip_cur(s); in disas_insn()
5663 translator_io_start(&s->base); in disas_insn()
5667 gen_update_cc_op(s); in disas_insn()
5668 gen_update_eip_cur(s); in disas_insn()
5670 s->base.is_jmp = DISAS_NORETURN; in disas_insn()
5674 if (LMA(s) && env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1) { in disas_insn()
5677 if (!PE(s)) { in disas_insn()
5678 gen_exception_gpf(s); in disas_insn()
5681 s->base.is_jmp = DISAS_EOB_ONLY; in disas_insn()
5686 if (LMA(s) && env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1) { in disas_insn()
5689 if (!PE(s) || CPL(s) != 0) { in disas_insn()
5690 gen_exception_gpf(s); in disas_insn()
5693 s->base.is_jmp = DISAS_EOB_ONLY; in disas_insn()
5698 if (!LMA(s) && env->cpuid_vendor1 == CPUID_VENDOR_INTEL_1) { in disas_insn()
5701 gen_update_cc_op(s); in disas_insn()
5702 gen_update_eip_cur(s); in disas_insn()
5703 gen_helper_syscall(tcg_env, cur_insn_len_i32(s)); in disas_insn()
5707 gen_eob_worker(s, false, true); in disas_insn()
5711 if (!LMA(s) && env->cpuid_vendor1 == CPUID_VENDOR_INTEL_1) { in disas_insn()
5714 if (!PE(s) || CPL(s) != 0) { in disas_insn()
5715 gen_exception_gpf(s); in disas_insn()
5719 if (LMA(s)) { in disas_insn()
5720 set_cc_op(s, CC_OP_EFLAGS); in disas_insn()
5726 gen_eob_worker(s, false, true); in disas_insn()
5730 gen_update_cc_op(s); in disas_insn()
5731 gen_update_eip_cur(s); in disas_insn()
5735 if (check_cpl0(s)) { in disas_insn()
5736 gen_update_cc_op(s); in disas_insn()
5737 gen_update_eip_cur(s); in disas_insn()
5738 gen_helper_hlt(tcg_env, cur_insn_len_i32(s)); in disas_insn()
5739 s->base.is_jmp = DISAS_NORETURN; in disas_insn()
5743 modrm = x86_ldub_code(env, s); in disas_insn()
5748 if (!PE(s) || VM86(s)) in disas_insn()
5750 if (s->flags & HF_UMIP_MASK && !check_cpl0(s)) { in disas_insn()
5753 gen_svm_check_intercept(s, SVM_EXIT_LDTR_READ); in disas_insn()
5754 tcg_gen_ld32u_tl(s->T0, tcg_env, in disas_insn()
5757 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 1); in disas_insn()
5760 if (!PE(s) || VM86(s)) in disas_insn()
5762 if (check_cpl0(s)) { in disas_insn()
5763 gen_svm_check_intercept(s, SVM_EXIT_LDTR_WRITE); in disas_insn()
5764 gen_ldst_modrm(env, s, modrm, MO_16, OR_TMP0, 0); in disas_insn()
5765 tcg_gen_trunc_tl_i32(s->tmp2_i32, s->T0); in disas_insn()
5766 gen_helper_lldt(tcg_env, s->tmp2_i32); in disas_insn()
5770 if (!PE(s) || VM86(s)) in disas_insn()
5772 if (s->flags & HF_UMIP_MASK && !check_cpl0(s)) { in disas_insn()
5775 gen_svm_check_intercept(s, SVM_EXIT_TR_READ); in disas_insn()
5776 tcg_gen_ld32u_tl(s->T0, tcg_env, in disas_insn()
5779 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 1); in disas_insn()
5782 if (!PE(s) || VM86(s)) in disas_insn()
5784 if (check_cpl0(s)) { in disas_insn()
5785 gen_svm_check_intercept(s, SVM_EXIT_TR_WRITE); in disas_insn()
5786 gen_ldst_modrm(env, s, modrm, MO_16, OR_TMP0, 0); in disas_insn()
5787 tcg_gen_trunc_tl_i32(s->tmp2_i32, s->T0); in disas_insn()
5788 gen_helper_ltr(tcg_env, s->tmp2_i32); in disas_insn()
5793 if (!PE(s) || VM86(s)) in disas_insn()
5795 gen_ldst_modrm(env, s, modrm, MO_16, OR_TMP0, 0); in disas_insn()
5796 gen_update_cc_op(s); in disas_insn()
5798 gen_helper_verr(tcg_env, s->T0); in disas_insn()
5800 gen_helper_verw(tcg_env, s->T0); in disas_insn()
5802 set_cc_op(s, CC_OP_EFLAGS); in disas_insn()
5810 modrm = x86_ldub_code(env, s); in disas_insn()
5813 if (s->flags & HF_UMIP_MASK && !check_cpl0(s)) { in disas_insn()
5816 gen_svm_check_intercept(s, SVM_EXIT_GDTR_READ); in disas_insn()
5817 gen_lea_modrm(env, s, modrm); in disas_insn()
5818 tcg_gen_ld32u_tl(s->T0, in disas_insn()
5820 gen_op_st_v(s, MO_16, s->T0, s->A0); in disas_insn()
5821 gen_add_A0_im(s, 2); in disas_insn()
5822 tcg_gen_ld_tl(s->T0, tcg_env, offsetof(CPUX86State, gdt.base)); in disas_insn()
5824 tcg_gen_andi_tl(s->T0, s->T0, 0xffffff); in disas_insn()
5826 gen_op_st_v(s, CODE64(s) + MO_32, s->T0, s->A0); in disas_insn()
5830 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) || CPL(s) != 0) { in disas_insn()
5833 gen_update_cc_op(s); in disas_insn()
5834 gen_update_eip_cur(s); in disas_insn()
5835 tcg_gen_mov_tl(s->A0, cpu_regs[R_EAX]); in disas_insn()
5836 gen_extu(s->aflag, s->A0); in disas_insn()
5837 gen_add_A0_ds_seg(s); in disas_insn()
5838 gen_helper_monitor(tcg_env, s->A0); in disas_insn()
5842 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) || CPL(s) != 0) { in disas_insn()
5845 gen_update_cc_op(s); in disas_insn()
5846 gen_update_eip_cur(s); in disas_insn()
5847 gen_helper_mwait(tcg_env, cur_insn_len_i32(s)); in disas_insn()
5848 s->base.is_jmp = DISAS_NORETURN; in disas_insn()
5852 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_SMAP) in disas_insn()
5853 || CPL(s) != 0) { in disas_insn()
5856 gen_reset_eflags(s, AC_MASK); in disas_insn()
5857 s->base.is_jmp = DISAS_EOB_NEXT; in disas_insn()
5861 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_SMAP) in disas_insn()
5862 || CPL(s) != 0) { in disas_insn()
5865 gen_set_eflags(s, AC_MASK); in disas_insn()
5866 s->base.is_jmp = DISAS_EOB_NEXT; in disas_insn()
5870 if (s->flags & HF_UMIP_MASK && !check_cpl0(s)) { in disas_insn()
5873 gen_svm_check_intercept(s, SVM_EXIT_IDTR_READ); in disas_insn()
5874 gen_lea_modrm(env, s, modrm); in disas_insn()
5875 tcg_gen_ld32u_tl(s->T0, tcg_env, offsetof(CPUX86State, idt.limit)); in disas_insn()
5876 gen_op_st_v(s, MO_16, s->T0, s->A0); in disas_insn()
5877 gen_add_A0_im(s, 2); in disas_insn()
5878 tcg_gen_ld_tl(s->T0, tcg_env, offsetof(CPUX86State, idt.base)); in disas_insn()
5880 tcg_gen_andi_tl(s->T0, s->T0, 0xffffff); in disas_insn()
5882 gen_op_st_v(s, CODE64(s) + MO_32, s->T0, s->A0); in disas_insn()
5886 if ((s->cpuid_ext_features & CPUID_EXT_XSAVE) == 0 in disas_insn()
5887 || (s->prefix & (PREFIX_LOCK | PREFIX_DATA in disas_insn()
5891 tcg_gen_trunc_tl_i32(s->tmp2_i32, cpu_regs[R_ECX]); in disas_insn()
5892 gen_helper_xgetbv(s->tmp1_i64, tcg_env, s->tmp2_i32); in disas_insn()
5893 tcg_gen_extr_i64_tl(cpu_regs[R_EAX], cpu_regs[R_EDX], s->tmp1_i64); in disas_insn()
5897 if ((s->cpuid_ext_features & CPUID_EXT_XSAVE) == 0 in disas_insn()
5898 || (s->prefix & (PREFIX_LOCK | PREFIX_DATA in disas_insn()
5902 gen_svm_check_intercept(s, SVM_EXIT_XSETBV); in disas_insn()
5903 if (!check_cpl0(s)) { in disas_insn()
5906 tcg_gen_concat_tl_i64(s->tmp1_i64, cpu_regs[R_EAX], in disas_insn()
5908 tcg_gen_trunc_tl_i32(s->tmp2_i32, cpu_regs[R_ECX]); in disas_insn()
5909 gen_helper_xsetbv(tcg_env, s->tmp2_i32, s->tmp1_i64); in disas_insn()
5911 s->base.is_jmp = DISAS_EOB_NEXT; in disas_insn()
5915 if (!SVME(s) || !PE(s)) { in disas_insn()
5918 if (!check_cpl0(s)) { in disas_insn()
5921 gen_update_cc_op(s); in disas_insn()
5922 gen_update_eip_cur(s); in disas_insn()
5923 gen_helper_vmrun(tcg_env, tcg_constant_i32(s->aflag - 1), in disas_insn()
5924 cur_insn_len_i32(s)); in disas_insn()
5926 s->base.is_jmp = DISAS_NORETURN; in disas_insn()
5930 if (!SVME(s)) { in disas_insn()
5933 gen_update_cc_op(s); in disas_insn()
5934 gen_update_eip_cur(s); in disas_insn()
5939 if (!SVME(s) || !PE(s)) { in disas_insn()
5942 if (!check_cpl0(s)) { in disas_insn()
5945 gen_update_cc_op(s); in disas_insn()
5946 gen_update_eip_cur(s); in disas_insn()
5947 gen_helper_vmload(tcg_env, tcg_constant_i32(s->aflag - 1)); in disas_insn()
5951 if (!SVME(s) || !PE(s)) { in disas_insn()
5954 if (!check_cpl0(s)) { in disas_insn()
5957 gen_update_cc_op(s); in disas_insn()
5958 gen_update_eip_cur(s); in disas_insn()
5959 gen_helper_vmsave(tcg_env, tcg_constant_i32(s->aflag - 1)); in disas_insn()
5963 if ((!SVME(s) && !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) in disas_insn()
5964 || !PE(s)) { in disas_insn()
5967 if (!check_cpl0(s)) { in disas_insn()
5970 gen_update_cc_op(s); in disas_insn()
5972 s->base.is_jmp = DISAS_EOB_NEXT; in disas_insn()
5976 if (!SVME(s) || !PE(s)) { in disas_insn()
5979 if (!check_cpl0(s)) { in disas_insn()
5982 gen_update_cc_op(s); in disas_insn()
5983 gen_update_eip_cur(s); in disas_insn()
5988 if ((!SVME(s) && !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) in disas_insn()
5989 || !PE(s)) { in disas_insn()
5992 gen_svm_check_intercept(s, SVM_EXIT_SKINIT); in disas_insn()
5997 if (!SVME(s) || !PE(s)) { in disas_insn()
6000 if (!check_cpl0(s)) { in disas_insn()
6003 gen_svm_check_intercept(s, SVM_EXIT_INVLPGA); in disas_insn()
6004 if (s->aflag == MO_64) { in disas_insn()
6005 tcg_gen_mov_tl(s->A0, cpu_regs[R_EAX]); in disas_insn()
6007 tcg_gen_ext32u_tl(s->A0, cpu_regs[R_EAX]); in disas_insn()
6009 gen_helper_flush_page(tcg_env, s->A0); in disas_insn()
6010 s->base.is_jmp = DISAS_EOB_NEXT; in disas_insn()
6014 if (!check_cpl0(s)) { in disas_insn()
6017 gen_svm_check_intercept(s, SVM_EXIT_GDTR_WRITE); in disas_insn()
6018 gen_lea_modrm(env, s, modrm); in disas_insn()
6019 gen_op_ld_v(s, MO_16, s->T1, s->A0); in disas_insn()
6020 gen_add_A0_im(s, 2); in disas_insn()
6021 gen_op_ld_v(s, CODE64(s) + MO_32, s->T0, s->A0); in disas_insn()
6023 tcg_gen_andi_tl(s->T0, s->T0, 0xffffff); in disas_insn()
6025 tcg_gen_st_tl(s->T0, tcg_env, offsetof(CPUX86State, gdt.base)); in disas_insn()
6026 tcg_gen_st32_tl(s->T1, tcg_env, offsetof(CPUX86State, gdt.limit)); in disas_insn()
6030 if (!check_cpl0(s)) { in disas_insn()
6033 gen_svm_check_intercept(s, SVM_EXIT_IDTR_WRITE); in disas_insn()
6034 gen_lea_modrm(env, s, modrm); in disas_insn()
6035 gen_op_ld_v(s, MO_16, s->T1, s->A0); in disas_insn()
6036 gen_add_A0_im(s, 2); in disas_insn()
6037 gen_op_ld_v(s, CODE64(s) + MO_32, s->T0, s->A0); in disas_insn()
6039 tcg_gen_andi_tl(s->T0, s->T0, 0xffffff); in disas_insn()
6041 tcg_gen_st_tl(s->T0, tcg_env, offsetof(CPUX86State, idt.base)); in disas_insn()
6042 tcg_gen_st32_tl(s->T1, tcg_env, offsetof(CPUX86State, idt.limit)); in disas_insn()
6046 if (s->flags & HF_UMIP_MASK && !check_cpl0(s)) { in disas_insn()
6049 gen_svm_check_intercept(s, SVM_EXIT_READ_CR0); in disas_insn()
6050 tcg_gen_ld_tl(s->T0, tcg_env, offsetof(CPUX86State, cr[0])); in disas_insn()
6057 ot = (mod != 3 ? MO_16 : s->dflag); in disas_insn()
6058 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 1); in disas_insn()
6064 tcg_gen_trunc_tl_i32(s->tmp2_i32, cpu_regs[R_ECX]); in disas_insn()
6065 gen_helper_rdpkru(s->tmp1_i64, tcg_env, s->tmp2_i32); in disas_insn()
6066 tcg_gen_extr_i64_tl(cpu_regs[R_EAX], cpu_regs[R_EDX], s->tmp1_i64); in disas_insn()
6072 tcg_gen_concat_tl_i64(s->tmp1_i64, cpu_regs[R_EAX], in disas_insn()
6074 tcg_gen_trunc_tl_i32(s->tmp2_i32, cpu_regs[R_ECX]); in disas_insn()
6075 gen_helper_wrpkru(tcg_env, s->tmp2_i32, s->tmp1_i64); in disas_insn()
6079 if (!check_cpl0(s)) { in disas_insn()
6082 gen_svm_check_intercept(s, SVM_EXIT_WRITE_CR0); in disas_insn()
6083 gen_ldst_modrm(env, s, modrm, MO_16, OR_TMP0, 0); in disas_insn()
6088 tcg_gen_ld_tl(s->T1, tcg_env, offsetof(CPUX86State, cr[0])); in disas_insn()
6089 tcg_gen_andi_tl(s->T0, s->T0, 0xf); in disas_insn()
6090 tcg_gen_andi_tl(s->T1, s->T1, ~0xe); in disas_insn()
6091 tcg_gen_or_tl(s->T0, s->T0, s->T1); in disas_insn()
6092 gen_helper_write_crN(tcg_env, tcg_constant_i32(0), s->T0); in disas_insn()
6093 s->base.is_jmp = DISAS_EOB_NEXT; in disas_insn()
6097 if (!check_cpl0(s)) { in disas_insn()
6100 gen_svm_check_intercept(s, SVM_EXIT_INVLPG); in disas_insn()
6101 gen_lea_modrm(env, s, modrm); in disas_insn()
6102 gen_helper_flush_page(tcg_env, s->A0); in disas_insn()
6103 s->base.is_jmp = DISAS_EOB_NEXT; in disas_insn()
6108 if (CODE64(s)) { in disas_insn()
6109 if (check_cpl0(s)) { in disas_insn()
6110 tcg_gen_mov_tl(s->T0, cpu_seg_base[R_GS]); in disas_insn()
6113 tcg_gen_st_tl(s->T0, tcg_env, in disas_insn()
6122 if (!(s->cpuid_ext2_features & CPUID_EXT2_RDTSCP)) { in disas_insn()
6125 gen_update_cc_op(s); in disas_insn()
6126 gen_update_eip_cur(s); in disas_insn()
6127 translator_io_start(&s->base); in disas_insn()
6129 gen_helper_rdpid(s->T0, tcg_env); in disas_insn()
6130 gen_op_mov_reg_v(s, dflag, R_ECX, s->T0); in disas_insn()
6140 if (check_cpl0(s)) { in disas_insn()
6141 gen_svm_check_intercept(s, (b & 1) ? SVM_EXIT_WBINVD : SVM_EXIT_INVD); in disas_insn()
6147 if (CODE64(s)) { in disas_insn()
6152 modrm = x86_ldub_code(env, s); in disas_insn()
6153 reg = ((modrm >> 3) & 7) | REX_R(s); in disas_insn()
6155 rm = (modrm & 7) | REX_B(s); in disas_insn()
6158 gen_op_mov_v_reg(s, MO_32, s->T0, rm); in disas_insn()
6161 tcg_gen_ext32s_tl(s->T0, s->T0); in disas_insn()
6163 gen_op_mov_reg_v(s, d_ot, reg, s->T0); in disas_insn()
6165 gen_lea_modrm(env, s, modrm); in disas_insn()
6166 gen_op_ld_v(s, MO_32 | MO_SIGN, s->T0, s->A0); in disas_insn()
6167 gen_op_mov_reg_v(s, d_ot, reg, s->T0); in disas_insn()
6175 if (!PE(s) || VM86(s)) in disas_insn()
6181 modrm = x86_ldub_code(env, s); in disas_insn()
6186 gen_lea_modrm(env, s, modrm); in disas_insn()
6187 gen_op_ld_v(s, ot, t0, s->A0); in disas_insn()
6189 gen_op_mov_v_reg(s, ot, t0, rm); in disas_insn()
6191 gen_op_mov_v_reg(s, ot, t1, reg); in disas_insn()
6192 tcg_gen_andi_tl(s->tmp0, t0, 3); in disas_insn()
6196 tcg_gen_brcond_tl(TCG_COND_GE, s->tmp0, t1, label1); in disas_insn()
6202 gen_op_st_v(s, ot, t0, s->A0); in disas_insn()
6204 gen_op_mov_reg_v(s, ot, rm, t0); in disas_insn()
6206 gen_compute_eflags(s); in disas_insn()
6216 if (!PE(s) || VM86(s)) in disas_insn()
6219 modrm = x86_ldub_code(env, s); in disas_insn()
6220 reg = ((modrm >> 3) & 7) | REX_R(s); in disas_insn()
6221 gen_ldst_modrm(env, s, modrm, MO_16, OR_TMP0, 0); in disas_insn()
6223 gen_update_cc_op(s); in disas_insn()
6225 gen_helper_lar(t0, tcg_env, s->T0); in disas_insn()
6227 gen_helper_lsl(t0, tcg_env, s->T0); in disas_insn()
6229 tcg_gen_andi_tl(s->tmp0, cpu_cc_src, CC_Z); in disas_insn()
6231 tcg_gen_brcondi_tl(TCG_COND_EQ, s->tmp0, 0, label1); in disas_insn()
6232 gen_op_mov_reg_v(s, ot, reg, t0); in disas_insn()
6234 set_cc_op(s, CC_OP_EFLAGS); in disas_insn()
6238 modrm = x86_ldub_code(env, s); in disas_insn()
6248 gen_nop_modrm(env, s, modrm); in disas_insn()
6252 gen_nop_modrm(env, s, modrm); in disas_insn()
6257 modrm = x86_ldub_code(env, s); in disas_insn()
6258 if (s->flags & HF_MPX_EN_MASK) { in disas_insn()
6260 reg = ((modrm >> 3) & 7) | REX_R(s); in disas_insn()
6265 || s->aflag == MO_16) { in disas_insn()
6268 gen_bndck(env, s, modrm, TCG_COND_LTU, cpu_bndl[reg]); in disas_insn()
6273 || s->aflag == MO_16) { in disas_insn()
6278 gen_bndck(env, s, modrm, TCG_COND_GTU, notu); in disas_insn()
6281 if (reg >= 4 || s->aflag == MO_16) { in disas_insn()
6285 int reg2 = (modrm & 7) | REX_B(s); in disas_insn()
6289 if (s->flags & HF_MPX_IU_MASK) { in disas_insn()
6294 gen_lea_modrm(env, s, modrm); in disas_insn()
6295 if (CODE64(s)) { in disas_insn()
6296 tcg_gen_qemu_ld_i64(cpu_bndl[reg], s->A0, in disas_insn()
6297 s->mem_index, MO_LEUQ); in disas_insn()
6298 tcg_gen_addi_tl(s->A0, s->A0, 8); in disas_insn()
6299 tcg_gen_qemu_ld_i64(cpu_bndu[reg], s->A0, in disas_insn()
6300 s->mem_index, MO_LEUQ); in disas_insn()
6302 tcg_gen_qemu_ld_i64(cpu_bndl[reg], s->A0, in disas_insn()
6303 s->mem_index, MO_LEUL); in disas_insn()
6304 tcg_gen_addi_tl(s->A0, s->A0, 4); in disas_insn()
6305 tcg_gen_qemu_ld_i64(cpu_bndu[reg], s->A0, in disas_insn()
6306 s->mem_index, MO_LEUL); in disas_insn()
6309 gen_set_hflag(s, HF_MPX_IU_MASK); in disas_insn()
6313 AddressParts a = gen_lea_modrm_0(env, s, modrm); in disas_insn()
6316 || s->aflag == MO_16 in disas_insn()
6321 tcg_gen_addi_tl(s->A0, cpu_regs[a.base], a.disp); in disas_insn()
6323 tcg_gen_movi_tl(s->A0, 0); in disas_insn()
6325 gen_lea_v_seg(s, s->aflag, s->A0, a.def_seg, s->override); in disas_insn()
6327 tcg_gen_mov_tl(s->T0, cpu_regs[a.index]); in disas_insn()
6329 tcg_gen_movi_tl(s->T0, 0); in disas_insn()
6331 if (CODE64(s)) { in disas_insn()
6332 gen_helper_bndldx64(cpu_bndl[reg], tcg_env, s->A0, s->T0); in disas_insn()
6336 gen_helper_bndldx32(cpu_bndu[reg], tcg_env, s->A0, s->T0); in disas_insn()
6340 gen_set_hflag(s, HF_MPX_IU_MASK); in disas_insn()
6343 gen_nop_modrm(env, s, modrm); in disas_insn()
6346 modrm = x86_ldub_code(env, s); in disas_insn()
6347 if (s->flags & HF_MPX_EN_MASK) { in disas_insn()
6349 reg = ((modrm >> 3) & 7) | REX_R(s); in disas_insn()
6354 || s->aflag == MO_16) { in disas_insn()
6357 AddressParts a = gen_lea_modrm_0(env, s, modrm); in disas_insn()
6360 if (!CODE64(s)) { in disas_insn()
6370 tcg_gen_not_tl(s->A0, gen_lea_modrm_1(s, a, false)); in disas_insn()
6371 if (!CODE64(s)) { in disas_insn()
6372 tcg_gen_ext32u_tl(s->A0, s->A0); in disas_insn()
6374 tcg_gen_extu_tl_i64(cpu_bndu[reg], s->A0); in disas_insn()
6376 gen_set_hflag(s, HF_MPX_IU_MASK); in disas_insn()
6382 || s->aflag == MO_16) { in disas_insn()
6385 gen_bndck(env, s, modrm, TCG_COND_GTU, cpu_bndu[reg]); in disas_insn()
6388 if (reg >= 4 || s->aflag == MO_16) { in disas_insn()
6392 int reg2 = (modrm & 7) | REX_B(s); in disas_insn()
6396 if (s->flags & HF_MPX_IU_MASK) { in disas_insn()
6401 gen_lea_modrm(env, s, modrm); in disas_insn()
6402 if (CODE64(s)) { in disas_insn()
6403 tcg_gen_qemu_st_i64(cpu_bndl[reg], s->A0, in disas_insn()
6404 s->mem_index, MO_LEUQ); in disas_insn()
6405 tcg_gen_addi_tl(s->A0, s->A0, 8); in disas_insn()
6406 tcg_gen_qemu_st_i64(cpu_bndu[reg], s->A0, in disas_insn()
6407 s->mem_index, MO_LEUQ); in disas_insn()
6409 tcg_gen_qemu_st_i64(cpu_bndl[reg], s->A0, in disas_insn()
6410 s->mem_index, MO_LEUL); in disas_insn()
6411 tcg_gen_addi_tl(s->A0, s->A0, 4); in disas_insn()
6412 tcg_gen_qemu_st_i64(cpu_bndu[reg], s->A0, in disas_insn()
6413 s->mem_index, MO_LEUL); in disas_insn()
6418 AddressParts a = gen_lea_modrm_0(env, s, modrm); in disas_insn()
6421 || s->aflag == MO_16 in disas_insn()
6426 tcg_gen_addi_tl(s->A0, cpu_regs[a.base], a.disp); in disas_insn()
6428 tcg_gen_movi_tl(s->A0, 0); in disas_insn()
6430 gen_lea_v_seg(s, s->aflag, s->A0, a.def_seg, s->override); in disas_insn()
6432 tcg_gen_mov_tl(s->T0, cpu_regs[a.index]); in disas_insn()
6434 tcg_gen_movi_tl(s->T0, 0); in disas_insn()
6436 if (CODE64(s)) { in disas_insn()
6437 gen_helper_bndstx64(tcg_env, s->A0, s->T0, in disas_insn()
6440 gen_helper_bndstx32(tcg_env, s->A0, s->T0, in disas_insn()
6445 gen_nop_modrm(env, s, modrm); in disas_insn()
6448 modrm = x86_ldub_code(env, s); in disas_insn()
6449 gen_nop_modrm(env, s, modrm); in disas_insn()
6454 if (!check_cpl0(s)) { in disas_insn()
6457 modrm = x86_ldub_code(env, s); in disas_insn()
6464 rm = (modrm & 7) | REX_B(s); in disas_insn()
6465 reg = ((modrm >> 3) & 7) | REX_R(s); in disas_insn()
6469 (s->cpuid_ext3_features & CPUID_EXT3_CR8LEG)) { in disas_insn()
6481 ot = (CODE64(s) ? MO_64 : MO_32); in disas_insn()
6483 translator_io_start(&s->base); in disas_insn()
6485 gen_svm_check_intercept(s, SVM_EXIT_WRITE_CR0 + reg); in disas_insn()
6486 gen_op_mov_v_reg(s, ot, s->T0, rm); in disas_insn()
6487 gen_helper_write_crN(tcg_env, tcg_constant_i32(reg), s->T0); in disas_insn()
6488 s->base.is_jmp = DISAS_EOB_NEXT; in disas_insn()
6490 gen_svm_check_intercept(s, SVM_EXIT_READ_CR0 + reg); in disas_insn()
6491 gen_helper_read_crN(s->T0, tcg_env, tcg_constant_i32(reg)); in disas_insn()
6492 gen_op_mov_reg_v(s, ot, rm, s->T0); in disas_insn()
6498 if (check_cpl0(s)) { in disas_insn()
6499 modrm = x86_ldub_code(env, s); in disas_insn()
6505 rm = (modrm & 7) | REX_B(s); in disas_insn()
6506 reg = ((modrm >> 3) & 7) | REX_R(s); in disas_insn()
6507 if (CODE64(s)) in disas_insn()
6515 gen_svm_check_intercept(s, SVM_EXIT_WRITE_DR0 + reg); in disas_insn()
6516 gen_op_mov_v_reg(s, ot, s->T0, rm); in disas_insn()
6517 tcg_gen_movi_i32(s->tmp2_i32, reg); in disas_insn()
6518 gen_helper_set_dr(tcg_env, s->tmp2_i32, s->T0); in disas_insn()
6519 s->base.is_jmp = DISAS_EOB_NEXT; in disas_insn()
6521 gen_svm_check_intercept(s, SVM_EXIT_READ_DR0 + reg); in disas_insn()
6522 tcg_gen_movi_i32(s->tmp2_i32, reg); in disas_insn()
6523 gen_helper_get_dr(s->T0, tcg_env, s->tmp2_i32); in disas_insn()
6524 gen_op_mov_reg_v(s, ot, rm, s->T0); in disas_insn()
6529 if (check_cpl0(s)) { in disas_insn()
6530 gen_svm_check_intercept(s, SVM_EXIT_WRITE_CR0); in disas_insn()
6533 s->base.is_jmp = DISAS_EOB_NEXT; in disas_insn()
6538 if (!(s->cpuid_features & CPUID_SSE2)) in disas_insn()
6541 modrm = x86_ldub_code(env, s); in disas_insn()
6545 reg = ((modrm >> 3) & 7) | REX_R(s); in disas_insn()
6547 gen_ldst_modrm(env, s, modrm, ot, reg, 1); in disas_insn()
6550 modrm = x86_ldub_code(env, s); in disas_insn()
6553 if (!(s->cpuid_features & CPUID_FXSR) in disas_insn()
6557 if ((s->flags & HF_EM_MASK) || (s->flags & HF_TS_MASK)) { in disas_insn()
6558 gen_exception(s, EXCP07_PREX); in disas_insn()
6561 gen_lea_modrm(env, s, modrm); in disas_insn()
6562 gen_helper_fxsave(tcg_env, s->A0); in disas_insn()
6566 if (!(s->cpuid_features & CPUID_FXSR) in disas_insn()
6570 if ((s->flags & HF_EM_MASK) || (s->flags & HF_TS_MASK)) { in disas_insn()
6571 gen_exception(s, EXCP07_PREX); in disas_insn()
6574 gen_lea_modrm(env, s, modrm); in disas_insn()
6575 gen_helper_fxrstor(tcg_env, s->A0); in disas_insn()
6579 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK)) { in disas_insn()
6582 if (s->flags & HF_TS_MASK) { in disas_insn()
6583 gen_exception(s, EXCP07_PREX); in disas_insn()
6586 gen_lea_modrm(env, s, modrm); in disas_insn()
6587 tcg_gen_qemu_ld_i32(s->tmp2_i32, s->A0, s->mem_index, MO_LEUL); in disas_insn()
6588 gen_helper_ldmxcsr(tcg_env, s->tmp2_i32); in disas_insn()
6592 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK)) { in disas_insn()
6595 if (s->flags & HF_TS_MASK) { in disas_insn()
6596 gen_exception(s, EXCP07_PREX); in disas_insn()
6600 gen_lea_modrm(env, s, modrm); in disas_insn()
6601 tcg_gen_ld32u_tl(s->T0, tcg_env, offsetof(CPUX86State, mxcsr)); in disas_insn()
6602 gen_op_st_v(s, MO_32, s->T0, s->A0); in disas_insn()
6606 if ((s->cpuid_ext_features & CPUID_EXT_XSAVE) == 0 in disas_insn()
6611 gen_lea_modrm(env, s, modrm); in disas_insn()
6612 tcg_gen_concat_tl_i64(s->tmp1_i64, cpu_regs[R_EAX], in disas_insn()
6614 gen_helper_xsave(tcg_env, s->A0, s->tmp1_i64); in disas_insn()
6618 if ((s->cpuid_ext_features & CPUID_EXT_XSAVE) == 0 in disas_insn()
6623 gen_lea_modrm(env, s, modrm); in disas_insn()
6624 tcg_gen_concat_tl_i64(s->tmp1_i64, cpu_regs[R_EAX], in disas_insn()
6626 gen_helper_xrstor(tcg_env, s->A0, s->tmp1_i64); in disas_insn()
6629 s->base.is_jmp = DISAS_EOB_NEXT; in disas_insn()
6638 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_CLWB)) { in disas_insn()
6641 gen_nop_modrm(env, s, modrm); in disas_insn()
6644 if ((s->cpuid_ext_features & CPUID_EXT_XSAVE) == 0 in disas_insn()
6645 || (s->cpuid_xsave_features & CPUID_XSAVE_XSAVEOPT) == 0 in disas_insn()
6649 gen_lea_modrm(env, s, modrm); in disas_insn()
6650 tcg_gen_concat_tl_i64(s->tmp1_i64, cpu_regs[R_EAX], in disas_insn()
6652 gen_helper_xsaveopt(tcg_env, s->A0, s->tmp1_i64); in disas_insn()
6662 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_CLFLUSHOPT)) { in disas_insn()
6667 if ((s->prefix & (PREFIX_REPZ | PREFIX_REPNZ)) in disas_insn()
6668 || !(s->cpuid_features & CPUID_CLFLUSH)) { in disas_insn()
6672 gen_nop_modrm(env, s, modrm); in disas_insn()
6679 if (CODE64(s) in disas_insn()
6682 && (s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_FSGSBASE)) { in disas_insn()
6686 tcg_gen_movi_i32(s->tmp2_i32, CR4_FSGSBASE_MASK); in disas_insn()
6687 gen_helper_cr4_testbit(tcg_env, s->tmp2_i32); in disas_insn()
6690 treg = cpu_regs[(modrm & 7) | REX_B(s)]; in disas_insn()
6700 if (s->dflag == MO_32) { in disas_insn()
6712 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_PCOMMIT) in disas_insn()
6720 if (!(s->cpuid_features & CPUID_SSE) in disas_insn()
6727 if (!(s->cpuid_features & CPUID_SSE) in disas_insn()
6734 if (!(s->cpuid_features & CPUID_SSE2) in disas_insn()
6747 modrm = x86_ldub_code(env, s); in disas_insn()
6751 gen_nop_modrm(env, s, modrm); in disas_insn()
6754 gen_svm_check_intercept(s, SVM_EXIT_RSM); in disas_insn()
6755 if (!(s->flags & HF_SMM_MASK)) in disas_insn()
6761 gen_update_cc_op(s); in disas_insn()
6762 gen_update_eip_next(s); in disas_insn()
6765 s->base.is_jmp = DISAS_EOB_ONLY; in disas_insn()
6771 if (!(s->cpuid_ext_features & CPUID_EXT_POPCNT)) in disas_insn()
6774 modrm = x86_ldub_code(env, s); in disas_insn()
6775 reg = ((modrm >> 3) & 7) | REX_R(s); in disas_insn()
6777 if (s->prefix & PREFIX_DATA) { in disas_insn()
6783 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0); in disas_insn()
6784 gen_extu(ot, s->T0); in disas_insn()
6785 tcg_gen_mov_tl(cpu_cc_src, s->T0); in disas_insn()
6786 tcg_gen_ctpop_tl(s->T0, s->T0); in disas_insn()
6787 gen_op_mov_reg_v(s, ot, reg, s->T0); in disas_insn()
6789 set_cc_op(s, CC_OP_POPCNT); in disas_insn()
6799 disas_insn_new(s, cpu, b); in disas_insn()
6806 gen_illegal_opcode(s); in disas_insn()
6809 gen_unknown_opcode(env, s); in disas_insn()