Lines Matching +full:ext +full:- +full:gen

2  * RISC-V emulation for qemu: main translation routines.
4 * Copyright (c) 2016-2017 Sagar Karandikar, sagark@eecs.berkeley.edu
22 #include "tcg/tcg-op.h"
23 #include "exec/exec-all.h"
24 #include "exec/helper-proto.h"
25 #include "exec/helper-gen.h"
34 #include "exec/helper-info.c.inc"
37 #include "tcg/tcg-cpu.h"
50 * it may require the inputs to be sign- or zero-extended; which will
75 * which we have already installed into env->fp_status. Or -1 for
94 * - 100 -
95 * 1/8 101 -3
96 * 1/4 110 -2
97 * 1/2 111 -1
122 /* zicfiss extension, if shadow stack was enabled during TB gen */
126 static inline bool has_ext(DisasContext *ctx, uint32_t ext) in has_ext() argument
128 return ctx->misa_ext & ext; in has_ext()
136 #define get_xl(ctx) ((ctx)->xl)
144 #define get_address_xl(ctx) ((ctx)->address_xl)
159 #define get_ol(ctx) ((ctx)->ol)
171 #define get_xl_max(ctx) ((ctx)->misa_mxl_max)
175 * RISC-V requires NaN-boxing of narrower width floating point values.
176 * This applies when a 32-bit value is assigned to a 64-bit FP register.
191 * A narrow n-bit operation, where n < FLEN, checks that input operands
192 * are correctly Nan-boxed, i.e., all upper FLEN - n bits are 1.
193 * If so, the least-significant bits of the input are used, otherwise the
194 * input value is treated as an n-bit canonical NaN (v2.2 section 9.2).
196 * Here, the result is always nan-boxed, even the canonical nan.
216 assert(!ctx->insn_start_updated); in decode_save_opc()
217 ctx->insn_start_updated = true; in decode_save_opc()
218 tcg_set_insn_start_param(ctx->base.insn_start, 1, ctx->opcode); in decode_save_opc()
219 tcg_set_insn_start_param(ctx->base.insn_start, 2, excp_uw2); in decode_save_opc()
225 target_ulong dest = ctx->base.pc_next + diff; in gen_pc_plus_diff()
227 assert(ctx->pc_save != -1); in gen_pc_plus_diff()
228 if (tb_cflags(ctx->base.tb) & CF_PCREL) { in gen_pc_plus_diff()
229 tcg_gen_addi_tl(target, cpu_pc, dest - ctx->pc_save); in gen_pc_plus_diff()
244 ctx->pc_save = ctx->base.pc_next + diff; in gen_update_pc()
251 ctx->base.is_jmp = DISAS_NORETURN; in generate_exception()
256 tcg_gen_st_i32(tcg_constant_i32(ctx->opcode), tcg_env, in gen_exception_illegal()
258 if (ctx->virt_inst_excp) { in gen_exception_illegal()
274 if (ctx->itrigger) { in lookup_and_goto_ptr()
284 if (ctx->itrigger) { in exit_tb()
293 target_ulong dest = ctx->base.pc_next + diff; in gen_goto_tb()
299 if (translator_use_goto_tb(&ctx->base, dest) && !ctx->itrigger) { in gen_goto_tb()
301 * For pcrel, the pc must always be up-to-date on entry to in gen_goto_tb()
308 if (tb_cflags(ctx->base.tb) & CF_PCREL) { in gen_goto_tb()
315 tcg_gen_exit_tb(ctx->base.tb, n); in gen_goto_tb()
325 * The $zero register does not have cpu_gpr[0] allocated -- we supply the
330 static TCGv get_gpr(DisasContext *ctx, int reg_num, DisasExtend ext) in get_gpr() argument
335 return ctx->zero; in get_gpr()
340 switch (ext) { in get_gpr()
368 return ctx->zero; in get_gprh()
426 tcg_gen_movi_tl(cpu_gprh[reg_num], -(imm < 0)); in gen_set_gpri()
442 if (!ctx->cfg_ptr->ext_zfinx) { in get_fpr_hs()
469 if (!ctx->cfg_ptr->ext_zfinx) { in get_fpr_d()
494 if (!ctx->cfg_ptr->ext_zfinx) { in dest_fpr()
514 /* assume it is nanboxing (for normal) or sign-extended (for zfinx) */
517 if (!ctx->cfg_ptr->ext_zfinx) { in gen_set_fpr_hs()
541 if (!ctx->cfg_ptr->ext_zfinx) { in gen_set_fpr_d()
571 if (!has_ext(ctx, RVC) && !ctx->cfg_ptr->ext_zca) { in gen_jal()
580 gen_pc_plus_diff(succ_pc, ctx, ctx->cur_insn_len); in gen_jal()
584 ctx->base.is_jmp = DISAS_NORETURN; in gen_jal()
594 if (ctx->pm_mask_enabled) { in get_address()
599 if (ctx->pm_base_enabled) { in get_address()
613 if (ctx->pm_mask_enabled) { in get_address_indexed()
618 if (ctx->pm_base_enabled) { in get_address_indexed()
637 if (ctx->mstatus_fs != EXT_STATUS_DIRTY) { in mark_fs_dirty()
639 ctx->mstatus_fs = EXT_STATUS_DIRTY; in mark_fs_dirty()
646 if (ctx->virt_enabled) { in mark_fs_dirty()
666 if (ctx->mstatus_vs != EXT_STATUS_DIRTY) { in mark_vs_dirty()
668 ctx->mstatus_vs = EXT_STATUS_DIRTY; in mark_vs_dirty()
675 if (ctx->virt_enabled) { in mark_vs_dirty()
689 ctx->vstart_eq_zero = true; in finalize_rvv_inst()
694 if (ctx->frm == rm) { in gen_set_rm()
697 ctx->frm = rm; in gen_set_rm()
701 ctx->frm_valid = true; in gen_set_rm()
704 /* The helper may raise ILLEGAL_INSN -- record binv for unwind. */ in gen_set_rm()
711 if (ctx->frm == rm && ctx->frm_valid) { in gen_set_rm_chkfrm()
714 ctx->frm = rm; in gen_set_rm_chkfrm()
715 ctx->frm_valid = true; in gen_set_rm_chkfrm()
717 /* The helper may raise ILLEGAL_INSN -- record binv for unwind. */ in gen_set_rm_chkfrm()
738 #define REQUIRE_EXT(ctx, ext) do { \ argument
739 if (!has_ext(ctx, ext)) { \
769 if (!ctx->cfg_ptr->ext_##A && \
770 !ctx->cfg_ptr->ext_##B) { \
798 * shifts, the shamt is sign-extended. in ex_rvc_shiftri()
807 /* Include the auto-generated decoder for 32 bit insn */
808 #include "decode-insn32.c.inc"
813 TCGv dest = dest_gpr(ctx, a->rd); in gen_logic_imm_fn()
814 TCGv src1 = get_gpr(ctx, a->rs1, EXT_NONE); in gen_logic_imm_fn()
816 func(dest, src1, a->imm); in gen_logic_imm_fn()
819 TCGv src1h = get_gprh(ctx, a->rs1); in gen_logic_imm_fn()
820 TCGv desth = dest_gprh(ctx, a->rd); in gen_logic_imm_fn()
822 func(desth, src1h, -(a->imm < 0)); in gen_logic_imm_fn()
823 gen_set_gpr128(ctx, a->rd, dest, desth); in gen_logic_imm_fn()
825 gen_set_gpr(ctx, a->rd, dest); in gen_logic_imm_fn()
834 TCGv dest = dest_gpr(ctx, a->rd); in gen_logic()
835 TCGv src1 = get_gpr(ctx, a->rs1, EXT_NONE); in gen_logic()
836 TCGv src2 = get_gpr(ctx, a->rs2, EXT_NONE); in gen_logic()
841 TCGv src1h = get_gprh(ctx, a->rs1); in gen_logic()
842 TCGv src2h = get_gprh(ctx, a->rs2); in gen_logic()
843 TCGv desth = dest_gprh(ctx, a->rd); in gen_logic()
846 gen_set_gpr128(ctx, a->rd, dest, desth); in gen_logic()
848 gen_set_gpr(ctx, a->rd, dest); in gen_logic()
854 static bool gen_arith_imm_fn(DisasContext *ctx, arg_i *a, DisasExtend ext, in gen_arith_imm_fn() argument
858 TCGv dest = dest_gpr(ctx, a->rd); in gen_arith_imm_fn()
859 TCGv src1 = get_gpr(ctx, a->rs1, ext); in gen_arith_imm_fn()
862 func(dest, src1, a->imm); in gen_arith_imm_fn()
863 gen_set_gpr(ctx, a->rd, dest); in gen_arith_imm_fn()
869 TCGv src1h = get_gprh(ctx, a->rs1); in gen_arith_imm_fn()
870 TCGv desth = dest_gprh(ctx, a->rd); in gen_arith_imm_fn()
872 f128(dest, desth, src1, src1h, a->imm); in gen_arith_imm_fn()
873 gen_set_gpr128(ctx, a->rd, dest, desth); in gen_arith_imm_fn()
878 static bool gen_arith_imm_tl(DisasContext *ctx, arg_i *a, DisasExtend ext, in gen_arith_imm_tl() argument
882 TCGv dest = dest_gpr(ctx, a->rd); in gen_arith_imm_tl()
883 TCGv src1 = get_gpr(ctx, a->rs1, ext); in gen_arith_imm_tl()
884 TCGv src2 = tcg_constant_tl(a->imm); in gen_arith_imm_tl()
888 gen_set_gpr(ctx, a->rd, dest); in gen_arith_imm_tl()
894 TCGv src1h = get_gprh(ctx, a->rs1); in gen_arith_imm_tl()
895 TCGv src2h = tcg_constant_tl(-(a->imm < 0)); in gen_arith_imm_tl()
896 TCGv desth = dest_gprh(ctx, a->rd); in gen_arith_imm_tl()
899 gen_set_gpr128(ctx, a->rd, dest, desth); in gen_arith_imm_tl()
904 static bool gen_arith(DisasContext *ctx, arg_r *a, DisasExtend ext, in gen_arith() argument
908 TCGv dest = dest_gpr(ctx, a->rd); in gen_arith()
909 TCGv src1 = get_gpr(ctx, a->rs1, ext); in gen_arith()
910 TCGv src2 = get_gpr(ctx, a->rs2, ext); in gen_arith()
914 gen_set_gpr(ctx, a->rd, dest); in gen_arith()
920 TCGv src1h = get_gprh(ctx, a->rs1); in gen_arith()
921 TCGv src2h = get_gprh(ctx, a->rs2); in gen_arith()
922 TCGv desth = dest_gprh(ctx, a->rd); in gen_arith()
925 gen_set_gpr128(ctx, a->rd, dest, desth); in gen_arith()
930 static bool gen_arith_per_ol(DisasContext *ctx, arg_r *a, DisasExtend ext, in gen_arith_per_ol() argument
944 return gen_arith(ctx, a, ext, f_tl, f_128); in gen_arith_per_ol()
947 static bool gen_shift_imm_fn(DisasContext *ctx, arg_shift *a, DisasExtend ext, in gen_shift_imm_fn() argument
954 if (a->shamt >= max_len) { in gen_shift_imm_fn()
958 dest = dest_gpr(ctx, a->rd); in gen_shift_imm_fn()
959 src1 = get_gpr(ctx, a->rs1, ext); in gen_shift_imm_fn()
962 func(dest, src1, a->shamt); in gen_shift_imm_fn()
963 gen_set_gpr(ctx, a->rd, dest); in gen_shift_imm_fn()
965 TCGv src1h = get_gprh(ctx, a->rs1); in gen_shift_imm_fn()
966 TCGv desth = dest_gprh(ctx, a->rd); in gen_shift_imm_fn()
971 f128(dest, desth, src1, src1h, a->shamt); in gen_shift_imm_fn()
972 gen_set_gpr128(ctx, a->rd, dest, desth); in gen_shift_imm_fn()
978 DisasExtend ext, in gen_shift_imm_fn_per_ol() argument
992 return gen_shift_imm_fn(ctx, a, ext, f_tl, f_128); in gen_shift_imm_fn_per_ol()
995 static bool gen_shift_imm_tl(DisasContext *ctx, arg_shift *a, DisasExtend ext, in gen_shift_imm_tl() argument
1001 if (a->shamt >= max_len) { in gen_shift_imm_tl()
1005 dest = dest_gpr(ctx, a->rd); in gen_shift_imm_tl()
1006 src1 = get_gpr(ctx, a->rs1, ext); in gen_shift_imm_tl()
1007 src2 = tcg_constant_tl(a->shamt); in gen_shift_imm_tl()
1011 gen_set_gpr(ctx, a->rd, dest); in gen_shift_imm_tl()
1015 static bool gen_shift(DisasContext *ctx, arg_r *a, DisasExtend ext, in gen_shift() argument
1019 TCGv src2 = get_gpr(ctx, a->rs2, EXT_NONE); in gen_shift()
1023 tcg_gen_andi_tl(ext2, src2, max_len - 1); in gen_shift()
1025 TCGv dest = dest_gpr(ctx, a->rd); in gen_shift()
1026 TCGv src1 = get_gpr(ctx, a->rs1, ext); in gen_shift()
1030 gen_set_gpr(ctx, a->rd, dest); in gen_shift()
1032 TCGv src1h = get_gprh(ctx, a->rs1); in gen_shift()
1033 TCGv desth = dest_gprh(ctx, a->rd); in gen_shift()
1039 gen_set_gpr128(ctx, a->rd, dest, desth); in gen_shift()
1044 static bool gen_shift_per_ol(DisasContext *ctx, arg_r *a, DisasExtend ext, in gen_shift_per_ol() argument
1057 return gen_shift(ctx, a, ext, f_tl, f_128); in gen_shift_per_ol()
1060 static bool gen_unary(DisasContext *ctx, arg_r2 *a, DisasExtend ext, in gen_unary() argument
1063 TCGv dest = dest_gpr(ctx, a->rd); in gen_unary()
1064 TCGv src1 = get_gpr(ctx, a->rs1, ext); in gen_unary()
1068 gen_set_gpr(ctx, a->rd, dest); in gen_unary()
1072 static bool gen_unary_per_ol(DisasContext *ctx, arg_r2 *a, DisasExtend ext, in gen_unary_per_ol() argument
1085 return gen_unary(ctx, a, ext, f_tl); in gen_unary_per_ol()
1092 TCGv dest = dest_gpr(ctx, a->rd); in gen_amo()
1093 TCGv src1, src2 = get_gpr(ctx, a->rs2, EXT_NONE); in gen_amo()
1096 if (ctx->cfg_ptr->ext_zama16b && size >= MO_32) { in gen_amo()
1103 src1 = get_address(ctx, a->rs1, 0); in gen_amo()
1104 func(dest, src1, src2, ctx->mem_idx, mop); in gen_amo()
1106 gen_set_gpr(ctx, a->rd, dest); in gen_amo()
1112 TCGv dest = get_gpr(ctx, a->rd, EXT_NONE); in gen_cmpxchg()
1113 TCGv src1 = get_address(ctx, a->rs1, 0); in gen_cmpxchg()
1114 TCGv src2 = get_gpr(ctx, a->rs2, EXT_NONE); in gen_cmpxchg()
1117 tcg_gen_atomic_cmpxchg_tl(dest, src1, dest, src2, ctx->mem_idx, mop); in gen_cmpxchg()
1119 gen_set_gpr(ctx, a->rd, dest); in gen_cmpxchg()
1126 CPUState *cpu = ctx->cs; in opcode_at()
1129 return translator_ldl(env, &ctx->base, pc); in opcode_at()
1132 #define SS_MMU_INDEX(ctx) (ctx->mem_idx | MMU_IDX_SS_WRITE)
1156 #include "decode-xthead.c.inc"
1160 /* Include the auto-generated decoder for 16 bit insn */
1161 #include "decode-insn16.c.inc"
1166 /* Include decoders for factored-out extensions */
1167 #include "decode-XVentanaCondOps.c.inc"
1187 ctx->virt_inst_excp = false; in decode_opc()
1188 ctx->cur_insn_len = insn_len(opcode); in decode_opc()
1190 if (ctx->cur_insn_len == 2) { in decode_opc()
1191 ctx->opcode = opcode; in decode_opc()
1194 * extension that do not include the floating-point loads and stores in decode_opc()
1196 if ((has_ext(ctx, RVC) || ctx->cfg_ptr->ext_zca) && in decode_opc()
1203 translator_lduw(env, &ctx->base, in decode_opc()
1204 ctx->base.pc_next + 2)); in decode_opc()
1205 ctx->opcode = opcode32; in decode_opc()
1207 for (guint i = 0; i < ctx->decoders->len; ++i) { in decode_opc()
1208 riscv_cpu_decode_fn func = g_ptr_array_index(ctx->decoders, i); in decode_opc()
1224 uint32_t tb_flags = ctx->base.tb->flags; in riscv_tr_init_disas_context()
1226 ctx->pc_save = ctx->base.pc_first; in riscv_tr_init_disas_context()
1227 ctx->priv = FIELD_EX32(tb_flags, TB_FLAGS, PRIV); in riscv_tr_init_disas_context()
1228 ctx->mem_idx = FIELD_EX32(tb_flags, TB_FLAGS, MEM_IDX); in riscv_tr_init_disas_context()
1229 ctx->mstatus_fs = FIELD_EX32(tb_flags, TB_FLAGS, FS); in riscv_tr_init_disas_context()
1230 ctx->mstatus_vs = FIELD_EX32(tb_flags, TB_FLAGS, VS); in riscv_tr_init_disas_context()
1231 ctx->priv_ver = env->priv_ver; in riscv_tr_init_disas_context()
1232 ctx->virt_enabled = FIELD_EX32(tb_flags, TB_FLAGS, VIRT_ENABLED); in riscv_tr_init_disas_context()
1233 ctx->misa_ext = env->misa_ext; in riscv_tr_init_disas_context()
1234 ctx->frm = -1; /* unknown rounding mode */ in riscv_tr_init_disas_context()
1235 ctx->cfg_ptr = &(cpu->cfg); in riscv_tr_init_disas_context()
1236 ctx->vill = FIELD_EX32(tb_flags, TB_FLAGS, VILL); in riscv_tr_init_disas_context()
1237 ctx->sew = FIELD_EX32(tb_flags, TB_FLAGS, SEW); in riscv_tr_init_disas_context()
1238 ctx->lmul = sextract32(FIELD_EX32(tb_flags, TB_FLAGS, LMUL), 0, 3); in riscv_tr_init_disas_context()
1239 ctx->vta = FIELD_EX32(tb_flags, TB_FLAGS, VTA) && cpu->cfg.rvv_ta_all_1s; in riscv_tr_init_disas_context()
1240 ctx->vma = FIELD_EX32(tb_flags, TB_FLAGS, VMA) && cpu->cfg.rvv_ma_all_1s; in riscv_tr_init_disas_context()
1241 ctx->cfg_vta_all_1s = cpu->cfg.rvv_ta_all_1s; in riscv_tr_init_disas_context()
1242 ctx->vstart_eq_zero = FIELD_EX32(tb_flags, TB_FLAGS, VSTART_EQ_ZERO); in riscv_tr_init_disas_context()
1243 ctx->vl_eq_vlmax = FIELD_EX32(tb_flags, TB_FLAGS, VL_EQ_VLMAX); in riscv_tr_init_disas_context()
1244 ctx->misa_mxl_max = mcc->misa_mxl_max; in riscv_tr_init_disas_context()
1245 ctx->xl = FIELD_EX32(tb_flags, TB_FLAGS, XL); in riscv_tr_init_disas_context()
1246 ctx->address_xl = FIELD_EX32(tb_flags, TB_FLAGS, AXL); in riscv_tr_init_disas_context()
1247 ctx->cs = cs; in riscv_tr_init_disas_context()
1248 ctx->pm_mask_enabled = FIELD_EX32(tb_flags, TB_FLAGS, PM_MASK_ENABLED); in riscv_tr_init_disas_context()
1249 ctx->pm_base_enabled = FIELD_EX32(tb_flags, TB_FLAGS, PM_BASE_ENABLED); in riscv_tr_init_disas_context()
1250 ctx->ztso = cpu->cfg.ext_ztso; in riscv_tr_init_disas_context()
1251 ctx->itrigger = FIELD_EX32(tb_flags, TB_FLAGS, ITRIGGER); in riscv_tr_init_disas_context()
1252 ctx->bcfi_enabled = FIELD_EX32(tb_flags, TB_FLAGS, BCFI_ENABLED); in riscv_tr_init_disas_context()
1253 ctx->fcfi_lp_expected = FIELD_EX32(tb_flags, TB_FLAGS, FCFI_LP_EXPECTED); in riscv_tr_init_disas_context()
1254 ctx->fcfi_enabled = FIELD_EX32(tb_flags, TB_FLAGS, FCFI_ENABLED); in riscv_tr_init_disas_context()
1255 ctx->zero = tcg_constant_tl(0); in riscv_tr_init_disas_context()
1256 ctx->virt_inst_excp = false; in riscv_tr_init_disas_context()
1257 ctx->decoders = cpu->decoders; in riscv_tr_init_disas_context()
1267 target_ulong pc_next = ctx->base.pc_next; in riscv_tr_insn_start()
1269 if (tb_cflags(dcbase->tb) & CF_PCREL) { in riscv_tr_insn_start()
1274 ctx->insn_start_updated = false; in riscv_tr_insn_start()
1281 uint16_t opcode16 = translator_lduw(env, &ctx->base, ctx->base.pc_next); in riscv_tr_translate_insn()
1283 ctx->ol = ctx->xl; in riscv_tr_translate_insn()
1285 ctx->base.pc_next += ctx->cur_insn_len; in riscv_tr_translate_insn()
1294 if (ctx->fcfi_lp_expected) { in riscv_tr_translate_insn()
1296 tcg_ctx->emit_before_op = QTAILQ_NEXT(ctx->base.insn_start, link); in riscv_tr_translate_insn()
1301 tcg_ctx->emit_before_op = NULL; in riscv_tr_translate_insn()
1302 ctx->base.is_jmp = DISAS_NORETURN; in riscv_tr_translate_insn()
1306 if (ctx->base.is_jmp == DISAS_NEXT) { in riscv_tr_translate_insn()
1307 if (ctx->itrigger || !is_same_page(&ctx->base, ctx->base.pc_next)) { in riscv_tr_translate_insn()
1308 ctx->base.is_jmp = DISAS_TOO_MANY; in riscv_tr_translate_insn()
1310 unsigned page_ofs = ctx->base.pc_next & ~TARGET_PAGE_MASK; in riscv_tr_translate_insn()
1312 if (page_ofs > TARGET_PAGE_SIZE - MAX_INSN_LEN) { in riscv_tr_translate_insn()
1314 translator_lduw(env, &ctx->base, ctx->base.pc_next); in riscv_tr_translate_insn()
1317 if (!is_same_page(&ctx->base, ctx->base.pc_next + len - 1)) { in riscv_tr_translate_insn()
1318 ctx->base.is_jmp = DISAS_TOO_MANY; in riscv_tr_translate_insn()
1329 switch (ctx->base.is_jmp) { in riscv_tr_tb_stop()