1fcf5ef2aSThomas Huth /* 2fcf5ef2aSThomas Huth * SH4 translation 3fcf5ef2aSThomas Huth * 4fcf5ef2aSThomas Huth * Copyright (c) 2005 Samuel Tardieu 5fcf5ef2aSThomas Huth * 6fcf5ef2aSThomas Huth * This library is free software; you can redistribute it and/or 7fcf5ef2aSThomas Huth * modify it under the terms of the GNU Lesser General Public 8fcf5ef2aSThomas Huth * License as published by the Free Software Foundation; either 9fcf5ef2aSThomas Huth * version 2 of the License, or (at your option) any later version. 10fcf5ef2aSThomas Huth * 11fcf5ef2aSThomas Huth * This library is distributed in the hope that it will be useful, 12fcf5ef2aSThomas Huth * but WITHOUT ANY WARRANTY; without even the implied warranty of 13fcf5ef2aSThomas Huth * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 14fcf5ef2aSThomas Huth * Lesser General Public License for more details. 15fcf5ef2aSThomas Huth * 16fcf5ef2aSThomas Huth * You should have received a copy of the GNU Lesser General Public 17fcf5ef2aSThomas Huth * License along with this library; if not, see <http://www.gnu.org/licenses/>. 18fcf5ef2aSThomas Huth */ 19fcf5ef2aSThomas Huth 20fcf5ef2aSThomas Huth #define DEBUG_DISAS 21fcf5ef2aSThomas Huth 22fcf5ef2aSThomas Huth #include "qemu/osdep.h" 23fcf5ef2aSThomas Huth #include "cpu.h" 24fcf5ef2aSThomas Huth #include "disas/disas.h" 25fcf5ef2aSThomas Huth #include "exec/exec-all.h" 26fcf5ef2aSThomas Huth #include "tcg-op.h" 27fcf5ef2aSThomas Huth #include "exec/cpu_ldst.h" 28fcf5ef2aSThomas Huth 29fcf5ef2aSThomas Huth #include "exec/helper-proto.h" 30fcf5ef2aSThomas Huth #include "exec/helper-gen.h" 31fcf5ef2aSThomas Huth 32fcf5ef2aSThomas Huth #include "trace-tcg.h" 33fcf5ef2aSThomas Huth #include "exec/log.h" 34fcf5ef2aSThomas Huth 35fcf5ef2aSThomas Huth 36fcf5ef2aSThomas Huth typedef struct DisasContext { 37fcf5ef2aSThomas Huth struct TranslationBlock *tb; 38fcf5ef2aSThomas Huth target_ulong pc; 39fcf5ef2aSThomas Huth uint16_t opcode; 40a6215749SAurelien Jarno uint32_t tbflags; /* should stay unmodified during the TB translation */ 41a6215749SAurelien Jarno uint32_t envflags; /* should stay in sync with env->flags using TCG ops */ 42fcf5ef2aSThomas Huth int bstate; 43fcf5ef2aSThomas Huth int memidx; 44*3a3bb8d2SRichard Henderson int gbank; 45fcf5ef2aSThomas Huth uint32_t delayed_pc; 46fcf5ef2aSThomas Huth int singlestep_enabled; 47fcf5ef2aSThomas Huth uint32_t features; 48fcf5ef2aSThomas Huth int has_movcal; 49fcf5ef2aSThomas Huth } DisasContext; 50fcf5ef2aSThomas Huth 51fcf5ef2aSThomas Huth #if defined(CONFIG_USER_ONLY) 52fcf5ef2aSThomas Huth #define IS_USER(ctx) 1 53fcf5ef2aSThomas Huth #else 54a6215749SAurelien Jarno #define IS_USER(ctx) (!(ctx->tbflags & (1u << SR_MD))) 55fcf5ef2aSThomas Huth #endif 56fcf5ef2aSThomas Huth 57fcf5ef2aSThomas Huth enum { 58fcf5ef2aSThomas Huth BS_NONE = 0, /* We go out of the TB without reaching a branch or an 59fcf5ef2aSThomas Huth * exception condition 60fcf5ef2aSThomas Huth */ 61fcf5ef2aSThomas Huth BS_STOP = 1, /* We want to stop translation for any reason */ 62fcf5ef2aSThomas Huth BS_BRANCH = 2, /* We reached a branch condition */ 63fcf5ef2aSThomas Huth BS_EXCP = 3, /* We reached an exception condition */ 64fcf5ef2aSThomas Huth }; 65fcf5ef2aSThomas Huth 66fcf5ef2aSThomas Huth /* global register indexes */ 67fcf5ef2aSThomas Huth static TCGv_env cpu_env; 68*3a3bb8d2SRichard Henderson static TCGv cpu_gregs[32]; 69fcf5ef2aSThomas Huth static TCGv cpu_sr, cpu_sr_m, cpu_sr_q, cpu_sr_t; 70fcf5ef2aSThomas Huth static TCGv cpu_pc, cpu_ssr, cpu_spc, cpu_gbr; 71fcf5ef2aSThomas Huth static TCGv cpu_vbr, cpu_sgr, cpu_dbr, cpu_mach, cpu_macl; 72fcf5ef2aSThomas Huth static TCGv cpu_pr, cpu_fpscr, cpu_fpul, cpu_ldst; 73fcf5ef2aSThomas Huth static TCGv cpu_fregs[32]; 74fcf5ef2aSThomas Huth 75fcf5ef2aSThomas Huth /* internal register indexes */ 7647b9f4d5SAurelien Jarno static TCGv cpu_flags, cpu_delayed_pc, cpu_delayed_cond; 77fcf5ef2aSThomas Huth 78fcf5ef2aSThomas Huth #include "exec/gen-icount.h" 79fcf5ef2aSThomas Huth 80fcf5ef2aSThomas Huth void sh4_translate_init(void) 81fcf5ef2aSThomas Huth { 82fcf5ef2aSThomas Huth int i; 83fcf5ef2aSThomas Huth static int done_init = 0; 84fcf5ef2aSThomas Huth static const char * const gregnames[24] = { 85fcf5ef2aSThomas Huth "R0_BANK0", "R1_BANK0", "R2_BANK0", "R3_BANK0", 86fcf5ef2aSThomas Huth "R4_BANK0", "R5_BANK0", "R6_BANK0", "R7_BANK0", 87fcf5ef2aSThomas Huth "R8", "R9", "R10", "R11", "R12", "R13", "R14", "R15", 88fcf5ef2aSThomas Huth "R0_BANK1", "R1_BANK1", "R2_BANK1", "R3_BANK1", 89fcf5ef2aSThomas Huth "R4_BANK1", "R5_BANK1", "R6_BANK1", "R7_BANK1" 90fcf5ef2aSThomas Huth }; 91fcf5ef2aSThomas Huth static const char * const fregnames[32] = { 92fcf5ef2aSThomas Huth "FPR0_BANK0", "FPR1_BANK0", "FPR2_BANK0", "FPR3_BANK0", 93fcf5ef2aSThomas Huth "FPR4_BANK0", "FPR5_BANK0", "FPR6_BANK0", "FPR7_BANK0", 94fcf5ef2aSThomas Huth "FPR8_BANK0", "FPR9_BANK0", "FPR10_BANK0", "FPR11_BANK0", 95fcf5ef2aSThomas Huth "FPR12_BANK0", "FPR13_BANK0", "FPR14_BANK0", "FPR15_BANK0", 96fcf5ef2aSThomas Huth "FPR0_BANK1", "FPR1_BANK1", "FPR2_BANK1", "FPR3_BANK1", 97fcf5ef2aSThomas Huth "FPR4_BANK1", "FPR5_BANK1", "FPR6_BANK1", "FPR7_BANK1", 98fcf5ef2aSThomas Huth "FPR8_BANK1", "FPR9_BANK1", "FPR10_BANK1", "FPR11_BANK1", 99fcf5ef2aSThomas Huth "FPR12_BANK1", "FPR13_BANK1", "FPR14_BANK1", "FPR15_BANK1", 100fcf5ef2aSThomas Huth }; 101fcf5ef2aSThomas Huth 102*3a3bb8d2SRichard Henderson if (done_init) { 103fcf5ef2aSThomas Huth return; 104*3a3bb8d2SRichard Henderson } 105fcf5ef2aSThomas Huth 106fcf5ef2aSThomas Huth cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env"); 107fcf5ef2aSThomas Huth tcg_ctx.tcg_env = cpu_env; 108fcf5ef2aSThomas Huth 109*3a3bb8d2SRichard Henderson for (i = 0; i < 24; i++) { 110fcf5ef2aSThomas Huth cpu_gregs[i] = tcg_global_mem_new_i32(cpu_env, 111fcf5ef2aSThomas Huth offsetof(CPUSH4State, gregs[i]), 112fcf5ef2aSThomas Huth gregnames[i]); 113*3a3bb8d2SRichard Henderson } 114*3a3bb8d2SRichard Henderson memcpy(cpu_gregs + 24, cpu_gregs + 8, 8 * sizeof(TCGv)); 115fcf5ef2aSThomas Huth 116fcf5ef2aSThomas Huth cpu_pc = tcg_global_mem_new_i32(cpu_env, 117fcf5ef2aSThomas Huth offsetof(CPUSH4State, pc), "PC"); 118fcf5ef2aSThomas Huth cpu_sr = tcg_global_mem_new_i32(cpu_env, 119fcf5ef2aSThomas Huth offsetof(CPUSH4State, sr), "SR"); 120fcf5ef2aSThomas Huth cpu_sr_m = tcg_global_mem_new_i32(cpu_env, 121fcf5ef2aSThomas Huth offsetof(CPUSH4State, sr_m), "SR_M"); 122fcf5ef2aSThomas Huth cpu_sr_q = tcg_global_mem_new_i32(cpu_env, 123fcf5ef2aSThomas Huth offsetof(CPUSH4State, sr_q), "SR_Q"); 124fcf5ef2aSThomas Huth cpu_sr_t = tcg_global_mem_new_i32(cpu_env, 125fcf5ef2aSThomas Huth offsetof(CPUSH4State, sr_t), "SR_T"); 126fcf5ef2aSThomas Huth cpu_ssr = tcg_global_mem_new_i32(cpu_env, 127fcf5ef2aSThomas Huth offsetof(CPUSH4State, ssr), "SSR"); 128fcf5ef2aSThomas Huth cpu_spc = tcg_global_mem_new_i32(cpu_env, 129fcf5ef2aSThomas Huth offsetof(CPUSH4State, spc), "SPC"); 130fcf5ef2aSThomas Huth cpu_gbr = tcg_global_mem_new_i32(cpu_env, 131fcf5ef2aSThomas Huth offsetof(CPUSH4State, gbr), "GBR"); 132fcf5ef2aSThomas Huth cpu_vbr = tcg_global_mem_new_i32(cpu_env, 133fcf5ef2aSThomas Huth offsetof(CPUSH4State, vbr), "VBR"); 134fcf5ef2aSThomas Huth cpu_sgr = tcg_global_mem_new_i32(cpu_env, 135fcf5ef2aSThomas Huth offsetof(CPUSH4State, sgr), "SGR"); 136fcf5ef2aSThomas Huth cpu_dbr = tcg_global_mem_new_i32(cpu_env, 137fcf5ef2aSThomas Huth offsetof(CPUSH4State, dbr), "DBR"); 138fcf5ef2aSThomas Huth cpu_mach = tcg_global_mem_new_i32(cpu_env, 139fcf5ef2aSThomas Huth offsetof(CPUSH4State, mach), "MACH"); 140fcf5ef2aSThomas Huth cpu_macl = tcg_global_mem_new_i32(cpu_env, 141fcf5ef2aSThomas Huth offsetof(CPUSH4State, macl), "MACL"); 142fcf5ef2aSThomas Huth cpu_pr = tcg_global_mem_new_i32(cpu_env, 143fcf5ef2aSThomas Huth offsetof(CPUSH4State, pr), "PR"); 144fcf5ef2aSThomas Huth cpu_fpscr = tcg_global_mem_new_i32(cpu_env, 145fcf5ef2aSThomas Huth offsetof(CPUSH4State, fpscr), "FPSCR"); 146fcf5ef2aSThomas Huth cpu_fpul = tcg_global_mem_new_i32(cpu_env, 147fcf5ef2aSThomas Huth offsetof(CPUSH4State, fpul), "FPUL"); 148fcf5ef2aSThomas Huth 149fcf5ef2aSThomas Huth cpu_flags = tcg_global_mem_new_i32(cpu_env, 150fcf5ef2aSThomas Huth offsetof(CPUSH4State, flags), "_flags_"); 151fcf5ef2aSThomas Huth cpu_delayed_pc = tcg_global_mem_new_i32(cpu_env, 152fcf5ef2aSThomas Huth offsetof(CPUSH4State, delayed_pc), 153fcf5ef2aSThomas Huth "_delayed_pc_"); 15447b9f4d5SAurelien Jarno cpu_delayed_cond = tcg_global_mem_new_i32(cpu_env, 15547b9f4d5SAurelien Jarno offsetof(CPUSH4State, 15647b9f4d5SAurelien Jarno delayed_cond), 15747b9f4d5SAurelien Jarno "_delayed_cond_"); 158fcf5ef2aSThomas Huth cpu_ldst = tcg_global_mem_new_i32(cpu_env, 159fcf5ef2aSThomas Huth offsetof(CPUSH4State, ldst), "_ldst_"); 160fcf5ef2aSThomas Huth 161fcf5ef2aSThomas Huth for (i = 0; i < 32; i++) 162fcf5ef2aSThomas Huth cpu_fregs[i] = tcg_global_mem_new_i32(cpu_env, 163fcf5ef2aSThomas Huth offsetof(CPUSH4State, fregs[i]), 164fcf5ef2aSThomas Huth fregnames[i]); 165fcf5ef2aSThomas Huth 166fcf5ef2aSThomas Huth done_init = 1; 167fcf5ef2aSThomas Huth } 168fcf5ef2aSThomas Huth 169fcf5ef2aSThomas Huth void superh_cpu_dump_state(CPUState *cs, FILE *f, 170fcf5ef2aSThomas Huth fprintf_function cpu_fprintf, int flags) 171fcf5ef2aSThomas Huth { 172fcf5ef2aSThomas Huth SuperHCPU *cpu = SUPERH_CPU(cs); 173fcf5ef2aSThomas Huth CPUSH4State *env = &cpu->env; 174fcf5ef2aSThomas Huth int i; 175fcf5ef2aSThomas Huth cpu_fprintf(f, "pc=0x%08x sr=0x%08x pr=0x%08x fpscr=0x%08x\n", 176fcf5ef2aSThomas Huth env->pc, cpu_read_sr(env), env->pr, env->fpscr); 177fcf5ef2aSThomas Huth cpu_fprintf(f, "spc=0x%08x ssr=0x%08x gbr=0x%08x vbr=0x%08x\n", 178fcf5ef2aSThomas Huth env->spc, env->ssr, env->gbr, env->vbr); 179fcf5ef2aSThomas Huth cpu_fprintf(f, "sgr=0x%08x dbr=0x%08x delayed_pc=0x%08x fpul=0x%08x\n", 180fcf5ef2aSThomas Huth env->sgr, env->dbr, env->delayed_pc, env->fpul); 181fcf5ef2aSThomas Huth for (i = 0; i < 24; i += 4) { 182fcf5ef2aSThomas Huth cpu_fprintf(f, "r%d=0x%08x r%d=0x%08x r%d=0x%08x r%d=0x%08x\n", 183fcf5ef2aSThomas Huth i, env->gregs[i], i + 1, env->gregs[i + 1], 184fcf5ef2aSThomas Huth i + 2, env->gregs[i + 2], i + 3, env->gregs[i + 3]); 185fcf5ef2aSThomas Huth } 186fcf5ef2aSThomas Huth if (env->flags & DELAY_SLOT) { 187fcf5ef2aSThomas Huth cpu_fprintf(f, "in delay slot (delayed_pc=0x%08x)\n", 188fcf5ef2aSThomas Huth env->delayed_pc); 189fcf5ef2aSThomas Huth } else if (env->flags & DELAY_SLOT_CONDITIONAL) { 190fcf5ef2aSThomas Huth cpu_fprintf(f, "in conditional delay slot (delayed_pc=0x%08x)\n", 191fcf5ef2aSThomas Huth env->delayed_pc); 192be53081aSAurelien Jarno } else if (env->flags & DELAY_SLOT_RTE) { 193be53081aSAurelien Jarno cpu_fprintf(f, "in rte delay slot (delayed_pc=0x%08x)\n", 194be53081aSAurelien Jarno env->delayed_pc); 195fcf5ef2aSThomas Huth } 196fcf5ef2aSThomas Huth } 197fcf5ef2aSThomas Huth 198fcf5ef2aSThomas Huth static void gen_read_sr(TCGv dst) 199fcf5ef2aSThomas Huth { 200fcf5ef2aSThomas Huth TCGv t0 = tcg_temp_new(); 201fcf5ef2aSThomas Huth tcg_gen_shli_i32(t0, cpu_sr_q, SR_Q); 202fcf5ef2aSThomas Huth tcg_gen_or_i32(dst, dst, t0); 203fcf5ef2aSThomas Huth tcg_gen_shli_i32(t0, cpu_sr_m, SR_M); 204fcf5ef2aSThomas Huth tcg_gen_or_i32(dst, dst, t0); 205fcf5ef2aSThomas Huth tcg_gen_shli_i32(t0, cpu_sr_t, SR_T); 206fcf5ef2aSThomas Huth tcg_gen_or_i32(dst, cpu_sr, t0); 207fcf5ef2aSThomas Huth tcg_temp_free_i32(t0); 208fcf5ef2aSThomas Huth } 209fcf5ef2aSThomas Huth 210fcf5ef2aSThomas Huth static void gen_write_sr(TCGv src) 211fcf5ef2aSThomas Huth { 212fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr, src, 213fcf5ef2aSThomas Huth ~((1u << SR_Q) | (1u << SR_M) | (1u << SR_T))); 214a380f9dbSAurelien Jarno tcg_gen_extract_i32(cpu_sr_q, src, SR_Q, 1); 215a380f9dbSAurelien Jarno tcg_gen_extract_i32(cpu_sr_m, src, SR_M, 1); 216a380f9dbSAurelien Jarno tcg_gen_extract_i32(cpu_sr_t, src, SR_T, 1); 217fcf5ef2aSThomas Huth } 218fcf5ef2aSThomas Huth 219ac9707eaSAurelien Jarno static inline void gen_save_cpu_state(DisasContext *ctx, bool save_pc) 220ac9707eaSAurelien Jarno { 221ac9707eaSAurelien Jarno if (save_pc) { 222ac9707eaSAurelien Jarno tcg_gen_movi_i32(cpu_pc, ctx->pc); 223ac9707eaSAurelien Jarno } 224ac9707eaSAurelien Jarno if (ctx->delayed_pc != (uint32_t) -1) { 225ac9707eaSAurelien Jarno tcg_gen_movi_i32(cpu_delayed_pc, ctx->delayed_pc); 226ac9707eaSAurelien Jarno } 227e1933d14SRichard Henderson if ((ctx->tbflags & TB_FLAG_ENVFLAGS_MASK) != ctx->envflags) { 228ac9707eaSAurelien Jarno tcg_gen_movi_i32(cpu_flags, ctx->envflags); 229ac9707eaSAurelien Jarno } 230ac9707eaSAurelien Jarno } 231ac9707eaSAurelien Jarno 232fcf5ef2aSThomas Huth static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest) 233fcf5ef2aSThomas Huth { 234fcf5ef2aSThomas Huth if (unlikely(ctx->singlestep_enabled)) { 235fcf5ef2aSThomas Huth return false; 236fcf5ef2aSThomas Huth } 2374bfa602bSRichard Henderson if (ctx->tbflags & GUSA_EXCLUSIVE) { 2384bfa602bSRichard Henderson return false; 2394bfa602bSRichard Henderson } 240fcf5ef2aSThomas Huth #ifndef CONFIG_USER_ONLY 241fcf5ef2aSThomas Huth return (ctx->tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK); 242fcf5ef2aSThomas Huth #else 243fcf5ef2aSThomas Huth return true; 244fcf5ef2aSThomas Huth #endif 245fcf5ef2aSThomas Huth } 246fcf5ef2aSThomas Huth 247fcf5ef2aSThomas Huth static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest) 248fcf5ef2aSThomas Huth { 249fcf5ef2aSThomas Huth if (use_goto_tb(ctx, dest)) { 250fcf5ef2aSThomas Huth /* Use a direct jump if in same page and singlestep not enabled */ 251fcf5ef2aSThomas Huth tcg_gen_goto_tb(n); 252fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_pc, dest); 253fcf5ef2aSThomas Huth tcg_gen_exit_tb((uintptr_t)ctx->tb + n); 254fcf5ef2aSThomas Huth } else { 255fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_pc, dest); 256fcf5ef2aSThomas Huth if (ctx->singlestep_enabled) 257fcf5ef2aSThomas Huth gen_helper_debug(cpu_env); 258fcf5ef2aSThomas Huth tcg_gen_exit_tb(0); 259fcf5ef2aSThomas Huth } 260fcf5ef2aSThomas Huth } 261fcf5ef2aSThomas Huth 262fcf5ef2aSThomas Huth static void gen_jump(DisasContext * ctx) 263fcf5ef2aSThomas Huth { 264fcf5ef2aSThomas Huth if (ctx->delayed_pc == (uint32_t) - 1) { 265fcf5ef2aSThomas Huth /* Target is not statically known, it comes necessarily from a 266fcf5ef2aSThomas Huth delayed jump as immediate jump are conditinal jumps */ 267fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_pc, cpu_delayed_pc); 268ac9707eaSAurelien Jarno tcg_gen_discard_i32(cpu_delayed_pc); 269fcf5ef2aSThomas Huth if (ctx->singlestep_enabled) 270fcf5ef2aSThomas Huth gen_helper_debug(cpu_env); 271fcf5ef2aSThomas Huth tcg_gen_exit_tb(0); 272fcf5ef2aSThomas Huth } else { 273fcf5ef2aSThomas Huth gen_goto_tb(ctx, 0, ctx->delayed_pc); 274fcf5ef2aSThomas Huth } 275fcf5ef2aSThomas Huth } 276fcf5ef2aSThomas Huth 277fcf5ef2aSThomas Huth /* Immediate conditional jump (bt or bf) */ 2784bfa602bSRichard Henderson static void gen_conditional_jump(DisasContext *ctx, target_ulong dest, 2794bfa602bSRichard Henderson bool jump_if_true) 280fcf5ef2aSThomas Huth { 281fcf5ef2aSThomas Huth TCGLabel *l1 = gen_new_label(); 2824bfa602bSRichard Henderson TCGCond cond_not_taken = jump_if_true ? TCG_COND_EQ : TCG_COND_NE; 2834bfa602bSRichard Henderson 2844bfa602bSRichard Henderson if (ctx->tbflags & GUSA_EXCLUSIVE) { 2854bfa602bSRichard Henderson /* When in an exclusive region, we must continue to the end. 2864bfa602bSRichard Henderson Therefore, exit the region on a taken branch, but otherwise 2874bfa602bSRichard Henderson fall through to the next instruction. */ 2884bfa602bSRichard Henderson tcg_gen_brcondi_i32(cond_not_taken, cpu_sr_t, 0, l1); 2894bfa602bSRichard Henderson tcg_gen_movi_i32(cpu_flags, ctx->envflags & ~GUSA_MASK); 2904bfa602bSRichard Henderson /* Note that this won't actually use a goto_tb opcode because we 2914bfa602bSRichard Henderson disallow it in use_goto_tb, but it handles exit + singlestep. */ 2924bfa602bSRichard Henderson gen_goto_tb(ctx, 0, dest); 293fcf5ef2aSThomas Huth gen_set_label(l1); 2944bfa602bSRichard Henderson return; 2954bfa602bSRichard Henderson } 2964bfa602bSRichard Henderson 2974bfa602bSRichard Henderson gen_save_cpu_state(ctx, false); 2984bfa602bSRichard Henderson tcg_gen_brcondi_i32(cond_not_taken, cpu_sr_t, 0, l1); 2994bfa602bSRichard Henderson gen_goto_tb(ctx, 0, dest); 3004bfa602bSRichard Henderson gen_set_label(l1); 3014bfa602bSRichard Henderson gen_goto_tb(ctx, 1, ctx->pc + 2); 302b3995c23SAurelien Jarno ctx->bstate = BS_BRANCH; 303fcf5ef2aSThomas Huth } 304fcf5ef2aSThomas Huth 305fcf5ef2aSThomas Huth /* Delayed conditional jump (bt or bf) */ 306fcf5ef2aSThomas Huth static void gen_delayed_conditional_jump(DisasContext * ctx) 307fcf5ef2aSThomas Huth { 3084bfa602bSRichard Henderson TCGLabel *l1 = gen_new_label(); 3094bfa602bSRichard Henderson TCGv ds = tcg_temp_new(); 310fcf5ef2aSThomas Huth 31147b9f4d5SAurelien Jarno tcg_gen_mov_i32(ds, cpu_delayed_cond); 31247b9f4d5SAurelien Jarno tcg_gen_discard_i32(cpu_delayed_cond); 3134bfa602bSRichard Henderson 3144bfa602bSRichard Henderson if (ctx->tbflags & GUSA_EXCLUSIVE) { 3154bfa602bSRichard Henderson /* When in an exclusive region, we must continue to the end. 3164bfa602bSRichard Henderson Therefore, exit the region on a taken branch, but otherwise 3174bfa602bSRichard Henderson fall through to the next instruction. */ 3184bfa602bSRichard Henderson tcg_gen_brcondi_i32(TCG_COND_EQ, ds, 0, l1); 3194bfa602bSRichard Henderson 3204bfa602bSRichard Henderson /* Leave the gUSA region. */ 3214bfa602bSRichard Henderson tcg_gen_movi_i32(cpu_flags, ctx->envflags & ~GUSA_MASK); 3224bfa602bSRichard Henderson gen_jump(ctx); 3234bfa602bSRichard Henderson 3244bfa602bSRichard Henderson gen_set_label(l1); 3254bfa602bSRichard Henderson return; 3264bfa602bSRichard Henderson } 3274bfa602bSRichard Henderson 328fcf5ef2aSThomas Huth tcg_gen_brcondi_i32(TCG_COND_NE, ds, 0, l1); 329fcf5ef2aSThomas Huth gen_goto_tb(ctx, 1, ctx->pc + 2); 330fcf5ef2aSThomas Huth gen_set_label(l1); 331fcf5ef2aSThomas Huth gen_jump(ctx); 332fcf5ef2aSThomas Huth } 333fcf5ef2aSThomas Huth 334fcf5ef2aSThomas Huth static inline void gen_load_fpr64(TCGv_i64 t, int reg) 335fcf5ef2aSThomas Huth { 336fcf5ef2aSThomas Huth tcg_gen_concat_i32_i64(t, cpu_fregs[reg + 1], cpu_fregs[reg]); 337fcf5ef2aSThomas Huth } 338fcf5ef2aSThomas Huth 339fcf5ef2aSThomas Huth static inline void gen_store_fpr64 (TCGv_i64 t, int reg) 340fcf5ef2aSThomas Huth { 34158d2a9aeSAurelien Jarno tcg_gen_extr_i64_i32(cpu_fregs[reg + 1], cpu_fregs[reg], t); 342fcf5ef2aSThomas Huth } 343fcf5ef2aSThomas Huth 344fcf5ef2aSThomas Huth #define B3_0 (ctx->opcode & 0xf) 345fcf5ef2aSThomas Huth #define B6_4 ((ctx->opcode >> 4) & 0x7) 346fcf5ef2aSThomas Huth #define B7_4 ((ctx->opcode >> 4) & 0xf) 347fcf5ef2aSThomas Huth #define B7_0 (ctx->opcode & 0xff) 348fcf5ef2aSThomas Huth #define B7_0s ((int32_t) (int8_t) (ctx->opcode & 0xff)) 349fcf5ef2aSThomas Huth #define B11_0s (ctx->opcode & 0x800 ? 0xfffff000 | (ctx->opcode & 0xfff) : \ 350fcf5ef2aSThomas Huth (ctx->opcode & 0xfff)) 351fcf5ef2aSThomas Huth #define B11_8 ((ctx->opcode >> 8) & 0xf) 352fcf5ef2aSThomas Huth #define B15_12 ((ctx->opcode >> 12) & 0xf) 353fcf5ef2aSThomas Huth 354*3a3bb8d2SRichard Henderson #define REG(x) cpu_gregs[(x) ^ ctx->gbank] 355*3a3bb8d2SRichard Henderson #define ALTREG(x) cpu_gregs[(x) ^ ctx->gbank ^ 0x10] 356fcf5ef2aSThomas Huth 357a6215749SAurelien Jarno #define FREG(x) (ctx->tbflags & FPSCR_FR ? (x) ^ 0x10 : (x)) 358fcf5ef2aSThomas Huth #define XHACK(x) ((((x) & 1 ) << 4) | ((x) & 0xe)) 359a6215749SAurelien Jarno #define XREG(x) (ctx->tbflags & FPSCR_FR ? XHACK(x) ^ 0x10 : XHACK(x)) 360fcf5ef2aSThomas Huth #define DREG(x) FREG(x) /* Assumes lsb of (x) is always 0 */ 361fcf5ef2aSThomas Huth 362fcf5ef2aSThomas Huth #define CHECK_NOT_DELAY_SLOT \ 3639a562ae7SAurelien Jarno if (ctx->envflags & DELAY_SLOT_MASK) { \ 364ac9707eaSAurelien Jarno gen_save_cpu_state(ctx, true); \ 365fcf5ef2aSThomas Huth gen_helper_raise_slot_illegal_instruction(cpu_env); \ 36663205665SAurelien Jarno ctx->bstate = BS_EXCP; \ 367fcf5ef2aSThomas Huth return; \ 368fcf5ef2aSThomas Huth } 369fcf5ef2aSThomas Huth 370fcf5ef2aSThomas Huth #define CHECK_PRIVILEGED \ 371fcf5ef2aSThomas Huth if (IS_USER(ctx)) { \ 372ac9707eaSAurelien Jarno gen_save_cpu_state(ctx, true); \ 3739a562ae7SAurelien Jarno if (ctx->envflags & DELAY_SLOT_MASK) { \ 374fcf5ef2aSThomas Huth gen_helper_raise_slot_illegal_instruction(cpu_env); \ 375fcf5ef2aSThomas Huth } else { \ 376fcf5ef2aSThomas Huth gen_helper_raise_illegal_instruction(cpu_env); \ 377fcf5ef2aSThomas Huth } \ 37863205665SAurelien Jarno ctx->bstate = BS_EXCP; \ 379fcf5ef2aSThomas Huth return; \ 380fcf5ef2aSThomas Huth } 381fcf5ef2aSThomas Huth 382fcf5ef2aSThomas Huth #define CHECK_FPU_ENABLED \ 383a6215749SAurelien Jarno if (ctx->tbflags & (1u << SR_FD)) { \ 384ac9707eaSAurelien Jarno gen_save_cpu_state(ctx, true); \ 3859a562ae7SAurelien Jarno if (ctx->envflags & DELAY_SLOT_MASK) { \ 386fcf5ef2aSThomas Huth gen_helper_raise_slot_fpu_disable(cpu_env); \ 387fcf5ef2aSThomas Huth } else { \ 388fcf5ef2aSThomas Huth gen_helper_raise_fpu_disable(cpu_env); \ 389fcf5ef2aSThomas Huth } \ 39063205665SAurelien Jarno ctx->bstate = BS_EXCP; \ 391fcf5ef2aSThomas Huth return; \ 392fcf5ef2aSThomas Huth } 393fcf5ef2aSThomas Huth 394fcf5ef2aSThomas Huth static void _decode_opc(DisasContext * ctx) 395fcf5ef2aSThomas Huth { 396fcf5ef2aSThomas Huth /* This code tries to make movcal emulation sufficiently 397fcf5ef2aSThomas Huth accurate for Linux purposes. This instruction writes 398fcf5ef2aSThomas Huth memory, and prior to that, always allocates a cache line. 399fcf5ef2aSThomas Huth It is used in two contexts: 400fcf5ef2aSThomas Huth - in memcpy, where data is copied in blocks, the first write 401fcf5ef2aSThomas Huth of to a block uses movca.l for performance. 402fcf5ef2aSThomas Huth - in arch/sh/mm/cache-sh4.c, movcal.l + ocbi combination is used 403fcf5ef2aSThomas Huth to flush the cache. Here, the data written by movcal.l is never 404fcf5ef2aSThomas Huth written to memory, and the data written is just bogus. 405fcf5ef2aSThomas Huth 406fcf5ef2aSThomas Huth To simulate this, we simulate movcal.l, we store the value to memory, 407fcf5ef2aSThomas Huth but we also remember the previous content. If we see ocbi, we check 408fcf5ef2aSThomas Huth if movcal.l for that address was done previously. If so, the write should 409fcf5ef2aSThomas Huth not have hit the memory, so we restore the previous content. 410fcf5ef2aSThomas Huth When we see an instruction that is neither movca.l 411fcf5ef2aSThomas Huth nor ocbi, the previous content is discarded. 412fcf5ef2aSThomas Huth 413fcf5ef2aSThomas Huth To optimize, we only try to flush stores when we're at the start of 414fcf5ef2aSThomas Huth TB, or if we already saw movca.l in this TB and did not flush stores 415fcf5ef2aSThomas Huth yet. */ 416fcf5ef2aSThomas Huth if (ctx->has_movcal) 417fcf5ef2aSThomas Huth { 418fcf5ef2aSThomas Huth int opcode = ctx->opcode & 0xf0ff; 419fcf5ef2aSThomas Huth if (opcode != 0x0093 /* ocbi */ 420fcf5ef2aSThomas Huth && opcode != 0x00c3 /* movca.l */) 421fcf5ef2aSThomas Huth { 422fcf5ef2aSThomas Huth gen_helper_discard_movcal_backup(cpu_env); 423fcf5ef2aSThomas Huth ctx->has_movcal = 0; 424fcf5ef2aSThomas Huth } 425fcf5ef2aSThomas Huth } 426fcf5ef2aSThomas Huth 427fcf5ef2aSThomas Huth #if 0 428fcf5ef2aSThomas Huth fprintf(stderr, "Translating opcode 0x%04x\n", ctx->opcode); 429fcf5ef2aSThomas Huth #endif 430fcf5ef2aSThomas Huth 431fcf5ef2aSThomas Huth switch (ctx->opcode) { 432fcf5ef2aSThomas Huth case 0x0019: /* div0u */ 433fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_m, 0); 434fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_q, 0); 435fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_t, 0); 436fcf5ef2aSThomas Huth return; 437fcf5ef2aSThomas Huth case 0x000b: /* rts */ 438fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 439fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_delayed_pc, cpu_pr); 440a6215749SAurelien Jarno ctx->envflags |= DELAY_SLOT; 441fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 442fcf5ef2aSThomas Huth return; 443fcf5ef2aSThomas Huth case 0x0028: /* clrmac */ 444fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_mach, 0); 445fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_macl, 0); 446fcf5ef2aSThomas Huth return; 447fcf5ef2aSThomas Huth case 0x0048: /* clrs */ 448fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr, cpu_sr, ~(1u << SR_S)); 449fcf5ef2aSThomas Huth return; 450fcf5ef2aSThomas Huth case 0x0008: /* clrt */ 451fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_t, 0); 452fcf5ef2aSThomas Huth return; 453fcf5ef2aSThomas Huth case 0x0038: /* ldtlb */ 454fcf5ef2aSThomas Huth CHECK_PRIVILEGED 455fcf5ef2aSThomas Huth gen_helper_ldtlb(cpu_env); 456fcf5ef2aSThomas Huth return; 457fcf5ef2aSThomas Huth case 0x002b: /* rte */ 458fcf5ef2aSThomas Huth CHECK_PRIVILEGED 459fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 460fcf5ef2aSThomas Huth gen_write_sr(cpu_ssr); 461fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_delayed_pc, cpu_spc); 462be53081aSAurelien Jarno ctx->envflags |= DELAY_SLOT_RTE; 463fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 464be53081aSAurelien Jarno ctx->bstate = BS_STOP; 465fcf5ef2aSThomas Huth return; 466fcf5ef2aSThomas Huth case 0x0058: /* sets */ 467fcf5ef2aSThomas Huth tcg_gen_ori_i32(cpu_sr, cpu_sr, (1u << SR_S)); 468fcf5ef2aSThomas Huth return; 469fcf5ef2aSThomas Huth case 0x0018: /* sett */ 470fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_t, 1); 471fcf5ef2aSThomas Huth return; 472fcf5ef2aSThomas Huth case 0xfbfd: /* frchg */ 473fcf5ef2aSThomas Huth tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_FR); 474fcf5ef2aSThomas Huth ctx->bstate = BS_STOP; 475fcf5ef2aSThomas Huth return; 476fcf5ef2aSThomas Huth case 0xf3fd: /* fschg */ 477fcf5ef2aSThomas Huth tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_SZ); 478fcf5ef2aSThomas Huth ctx->bstate = BS_STOP; 479fcf5ef2aSThomas Huth return; 480fcf5ef2aSThomas Huth case 0x0009: /* nop */ 481fcf5ef2aSThomas Huth return; 482fcf5ef2aSThomas Huth case 0x001b: /* sleep */ 483fcf5ef2aSThomas Huth CHECK_PRIVILEGED 484fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_pc, ctx->pc + 2); 485fcf5ef2aSThomas Huth gen_helper_sleep(cpu_env); 486fcf5ef2aSThomas Huth return; 487fcf5ef2aSThomas Huth } 488fcf5ef2aSThomas Huth 489fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf000) { 490fcf5ef2aSThomas Huth case 0x1000: /* mov.l Rm,@(disp,Rn) */ 491fcf5ef2aSThomas Huth { 492fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 493fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B11_8), B3_0 * 4); 494fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_TEUL); 495fcf5ef2aSThomas Huth tcg_temp_free(addr); 496fcf5ef2aSThomas Huth } 497fcf5ef2aSThomas Huth return; 498fcf5ef2aSThomas Huth case 0x5000: /* mov.l @(disp,Rm),Rn */ 499fcf5ef2aSThomas Huth { 500fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 501fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 4); 502fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_TESL); 503fcf5ef2aSThomas Huth tcg_temp_free(addr); 504fcf5ef2aSThomas Huth } 505fcf5ef2aSThomas Huth return; 506fcf5ef2aSThomas Huth case 0xe000: /* mov #imm,Rn */ 5074bfa602bSRichard Henderson #ifdef CONFIG_USER_ONLY 5084bfa602bSRichard Henderson /* Detect the start of a gUSA region. If so, update envflags 5094bfa602bSRichard Henderson and end the TB. This will allow us to see the end of the 5104bfa602bSRichard Henderson region (stored in R0) in the next TB. */ 5114bfa602bSRichard Henderson if (B11_8 == 15 && B7_0s < 0 && parallel_cpus) { 5124bfa602bSRichard Henderson ctx->envflags = deposit32(ctx->envflags, GUSA_SHIFT, 8, B7_0s); 5134bfa602bSRichard Henderson ctx->bstate = BS_STOP; 5144bfa602bSRichard Henderson } 5154bfa602bSRichard Henderson #endif 516fcf5ef2aSThomas Huth tcg_gen_movi_i32(REG(B11_8), B7_0s); 517fcf5ef2aSThomas Huth return; 518fcf5ef2aSThomas Huth case 0x9000: /* mov.w @(disp,PC),Rn */ 519fcf5ef2aSThomas Huth { 520fcf5ef2aSThomas Huth TCGv addr = tcg_const_i32(ctx->pc + 4 + B7_0 * 2); 521fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_TESW); 522fcf5ef2aSThomas Huth tcg_temp_free(addr); 523fcf5ef2aSThomas Huth } 524fcf5ef2aSThomas Huth return; 525fcf5ef2aSThomas Huth case 0xd000: /* mov.l @(disp,PC),Rn */ 526fcf5ef2aSThomas Huth { 527fcf5ef2aSThomas Huth TCGv addr = tcg_const_i32((ctx->pc + 4 + B7_0 * 4) & ~3); 528fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_TESL); 529fcf5ef2aSThomas Huth tcg_temp_free(addr); 530fcf5ef2aSThomas Huth } 531fcf5ef2aSThomas Huth return; 532fcf5ef2aSThomas Huth case 0x7000: /* add #imm,Rn */ 533fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), B7_0s); 534fcf5ef2aSThomas Huth return; 535fcf5ef2aSThomas Huth case 0xa000: /* bra disp */ 536fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 537fcf5ef2aSThomas Huth ctx->delayed_pc = ctx->pc + 4 + B11_0s * 2; 538a6215749SAurelien Jarno ctx->envflags |= DELAY_SLOT; 539fcf5ef2aSThomas Huth return; 540fcf5ef2aSThomas Huth case 0xb000: /* bsr disp */ 541fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 542fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_pr, ctx->pc + 4); 543fcf5ef2aSThomas Huth ctx->delayed_pc = ctx->pc + 4 + B11_0s * 2; 544a6215749SAurelien Jarno ctx->envflags |= DELAY_SLOT; 545fcf5ef2aSThomas Huth return; 546fcf5ef2aSThomas Huth } 547fcf5ef2aSThomas Huth 548fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf00f) { 549fcf5ef2aSThomas Huth case 0x6003: /* mov Rm,Rn */ 550fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), REG(B7_4)); 551fcf5ef2aSThomas Huth return; 552fcf5ef2aSThomas Huth case 0x2000: /* mov.b Rm,@Rn */ 553fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), REG(B11_8), ctx->memidx, MO_UB); 554fcf5ef2aSThomas Huth return; 555fcf5ef2aSThomas Huth case 0x2001: /* mov.w Rm,@Rn */ 556fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), REG(B11_8), ctx->memidx, MO_TEUW); 557fcf5ef2aSThomas Huth return; 558fcf5ef2aSThomas Huth case 0x2002: /* mov.l Rm,@Rn */ 559fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), REG(B11_8), ctx->memidx, MO_TEUL); 560fcf5ef2aSThomas Huth return; 561fcf5ef2aSThomas Huth case 0x6000: /* mov.b @Rm,Rn */ 562fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_SB); 563fcf5ef2aSThomas Huth return; 564fcf5ef2aSThomas Huth case 0x6001: /* mov.w @Rm,Rn */ 565fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_TESW); 566fcf5ef2aSThomas Huth return; 567fcf5ef2aSThomas Huth case 0x6002: /* mov.l @Rm,Rn */ 568fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_TESL); 569fcf5ef2aSThomas Huth return; 570fcf5ef2aSThomas Huth case 0x2004: /* mov.b Rm,@-Rn */ 571fcf5ef2aSThomas Huth { 572fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 573fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 1); 574fcf5ef2aSThomas Huth /* might cause re-execution */ 575fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_UB); 576fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); /* modify register status */ 577fcf5ef2aSThomas Huth tcg_temp_free(addr); 578fcf5ef2aSThomas Huth } 579fcf5ef2aSThomas Huth return; 580fcf5ef2aSThomas Huth case 0x2005: /* mov.w Rm,@-Rn */ 581fcf5ef2aSThomas Huth { 582fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 583fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 2); 584fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_TEUW); 585fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 586fcf5ef2aSThomas Huth tcg_temp_free(addr); 587fcf5ef2aSThomas Huth } 588fcf5ef2aSThomas Huth return; 589fcf5ef2aSThomas Huth case 0x2006: /* mov.l Rm,@-Rn */ 590fcf5ef2aSThomas Huth { 591fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 592fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); 593fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_TEUL); 594fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 595fcf5ef2aSThomas Huth } 596fcf5ef2aSThomas Huth return; 597fcf5ef2aSThomas Huth case 0x6004: /* mov.b @Rm+,Rn */ 598fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_SB); 599fcf5ef2aSThomas Huth if ( B11_8 != B7_4 ) 600fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 1); 601fcf5ef2aSThomas Huth return; 602fcf5ef2aSThomas Huth case 0x6005: /* mov.w @Rm+,Rn */ 603fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_TESW); 604fcf5ef2aSThomas Huth if ( B11_8 != B7_4 ) 605fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2); 606fcf5ef2aSThomas Huth return; 607fcf5ef2aSThomas Huth case 0x6006: /* mov.l @Rm+,Rn */ 608fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_TESL); 609fcf5ef2aSThomas Huth if ( B11_8 != B7_4 ) 610fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4); 611fcf5ef2aSThomas Huth return; 612fcf5ef2aSThomas Huth case 0x0004: /* mov.b Rm,@(R0,Rn) */ 613fcf5ef2aSThomas Huth { 614fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 615fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B11_8), REG(0)); 616fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_UB); 617fcf5ef2aSThomas Huth tcg_temp_free(addr); 618fcf5ef2aSThomas Huth } 619fcf5ef2aSThomas Huth return; 620fcf5ef2aSThomas Huth case 0x0005: /* mov.w Rm,@(R0,Rn) */ 621fcf5ef2aSThomas Huth { 622fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 623fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B11_8), REG(0)); 624fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_TEUW); 625fcf5ef2aSThomas Huth tcg_temp_free(addr); 626fcf5ef2aSThomas Huth } 627fcf5ef2aSThomas Huth return; 628fcf5ef2aSThomas Huth case 0x0006: /* mov.l Rm,@(R0,Rn) */ 629fcf5ef2aSThomas Huth { 630fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 631fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B11_8), REG(0)); 632fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_TEUL); 633fcf5ef2aSThomas Huth tcg_temp_free(addr); 634fcf5ef2aSThomas Huth } 635fcf5ef2aSThomas Huth return; 636fcf5ef2aSThomas Huth case 0x000c: /* mov.b @(R0,Rm),Rn */ 637fcf5ef2aSThomas Huth { 638fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 639fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B7_4), REG(0)); 640fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_SB); 641fcf5ef2aSThomas Huth tcg_temp_free(addr); 642fcf5ef2aSThomas Huth } 643fcf5ef2aSThomas Huth return; 644fcf5ef2aSThomas Huth case 0x000d: /* mov.w @(R0,Rm),Rn */ 645fcf5ef2aSThomas Huth { 646fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 647fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B7_4), REG(0)); 648fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_TESW); 649fcf5ef2aSThomas Huth tcg_temp_free(addr); 650fcf5ef2aSThomas Huth } 651fcf5ef2aSThomas Huth return; 652fcf5ef2aSThomas Huth case 0x000e: /* mov.l @(R0,Rm),Rn */ 653fcf5ef2aSThomas Huth { 654fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 655fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B7_4), REG(0)); 656fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_TESL); 657fcf5ef2aSThomas Huth tcg_temp_free(addr); 658fcf5ef2aSThomas Huth } 659fcf5ef2aSThomas Huth return; 660fcf5ef2aSThomas Huth case 0x6008: /* swap.b Rm,Rn */ 661fcf5ef2aSThomas Huth { 662fcf5ef2aSThomas Huth TCGv low = tcg_temp_new();; 663fcf5ef2aSThomas Huth tcg_gen_ext16u_i32(low, REG(B7_4)); 664fcf5ef2aSThomas Huth tcg_gen_bswap16_i32(low, low); 665fcf5ef2aSThomas Huth tcg_gen_deposit_i32(REG(B11_8), REG(B7_4), low, 0, 16); 666fcf5ef2aSThomas Huth tcg_temp_free(low); 667fcf5ef2aSThomas Huth } 668fcf5ef2aSThomas Huth return; 669fcf5ef2aSThomas Huth case 0x6009: /* swap.w Rm,Rn */ 670fcf5ef2aSThomas Huth tcg_gen_rotli_i32(REG(B11_8), REG(B7_4), 16); 671fcf5ef2aSThomas Huth return; 672fcf5ef2aSThomas Huth case 0x200d: /* xtrct Rm,Rn */ 673fcf5ef2aSThomas Huth { 674fcf5ef2aSThomas Huth TCGv high, low; 675fcf5ef2aSThomas Huth high = tcg_temp_new(); 676fcf5ef2aSThomas Huth tcg_gen_shli_i32(high, REG(B7_4), 16); 677fcf5ef2aSThomas Huth low = tcg_temp_new(); 678fcf5ef2aSThomas Huth tcg_gen_shri_i32(low, REG(B11_8), 16); 679fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), high, low); 680fcf5ef2aSThomas Huth tcg_temp_free(low); 681fcf5ef2aSThomas Huth tcg_temp_free(high); 682fcf5ef2aSThomas Huth } 683fcf5ef2aSThomas Huth return; 684fcf5ef2aSThomas Huth case 0x300c: /* add Rm,Rn */ 685fcf5ef2aSThomas Huth tcg_gen_add_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 686fcf5ef2aSThomas Huth return; 687fcf5ef2aSThomas Huth case 0x300e: /* addc Rm,Rn */ 688fcf5ef2aSThomas Huth { 689fcf5ef2aSThomas Huth TCGv t0, t1; 690fcf5ef2aSThomas Huth t0 = tcg_const_tl(0); 691fcf5ef2aSThomas Huth t1 = tcg_temp_new(); 692fcf5ef2aSThomas Huth tcg_gen_add2_i32(t1, cpu_sr_t, cpu_sr_t, t0, REG(B7_4), t0); 693fcf5ef2aSThomas Huth tcg_gen_add2_i32(REG(B11_8), cpu_sr_t, 694fcf5ef2aSThomas Huth REG(B11_8), t0, t1, cpu_sr_t); 695fcf5ef2aSThomas Huth tcg_temp_free(t0); 696fcf5ef2aSThomas Huth tcg_temp_free(t1); 697fcf5ef2aSThomas Huth } 698fcf5ef2aSThomas Huth return; 699fcf5ef2aSThomas Huth case 0x300f: /* addv Rm,Rn */ 700fcf5ef2aSThomas Huth { 701fcf5ef2aSThomas Huth TCGv t0, t1, t2; 702fcf5ef2aSThomas Huth t0 = tcg_temp_new(); 703fcf5ef2aSThomas Huth tcg_gen_add_i32(t0, REG(B7_4), REG(B11_8)); 704fcf5ef2aSThomas Huth t1 = tcg_temp_new(); 705fcf5ef2aSThomas Huth tcg_gen_xor_i32(t1, t0, REG(B11_8)); 706fcf5ef2aSThomas Huth t2 = tcg_temp_new(); 707fcf5ef2aSThomas Huth tcg_gen_xor_i32(t2, REG(B7_4), REG(B11_8)); 708fcf5ef2aSThomas Huth tcg_gen_andc_i32(cpu_sr_t, t1, t2); 709fcf5ef2aSThomas Huth tcg_temp_free(t2); 710fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_t, cpu_sr_t, 31); 711fcf5ef2aSThomas Huth tcg_temp_free(t1); 712fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B7_4), t0); 713fcf5ef2aSThomas Huth tcg_temp_free(t0); 714fcf5ef2aSThomas Huth } 715fcf5ef2aSThomas Huth return; 716fcf5ef2aSThomas Huth case 0x2009: /* and Rm,Rn */ 717fcf5ef2aSThomas Huth tcg_gen_and_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 718fcf5ef2aSThomas Huth return; 719fcf5ef2aSThomas Huth case 0x3000: /* cmp/eq Rm,Rn */ 720fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_EQ, cpu_sr_t, REG(B11_8), REG(B7_4)); 721fcf5ef2aSThomas Huth return; 722fcf5ef2aSThomas Huth case 0x3003: /* cmp/ge Rm,Rn */ 723fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_GE, cpu_sr_t, REG(B11_8), REG(B7_4)); 724fcf5ef2aSThomas Huth return; 725fcf5ef2aSThomas Huth case 0x3007: /* cmp/gt Rm,Rn */ 726fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_GT, cpu_sr_t, REG(B11_8), REG(B7_4)); 727fcf5ef2aSThomas Huth return; 728fcf5ef2aSThomas Huth case 0x3006: /* cmp/hi Rm,Rn */ 729fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_GTU, cpu_sr_t, REG(B11_8), REG(B7_4)); 730fcf5ef2aSThomas Huth return; 731fcf5ef2aSThomas Huth case 0x3002: /* cmp/hs Rm,Rn */ 732fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_GEU, cpu_sr_t, REG(B11_8), REG(B7_4)); 733fcf5ef2aSThomas Huth return; 734fcf5ef2aSThomas Huth case 0x200c: /* cmp/str Rm,Rn */ 735fcf5ef2aSThomas Huth { 736fcf5ef2aSThomas Huth TCGv cmp1 = tcg_temp_new(); 737fcf5ef2aSThomas Huth TCGv cmp2 = tcg_temp_new(); 738fcf5ef2aSThomas Huth tcg_gen_xor_i32(cmp2, REG(B7_4), REG(B11_8)); 739fcf5ef2aSThomas Huth tcg_gen_subi_i32(cmp1, cmp2, 0x01010101); 740fcf5ef2aSThomas Huth tcg_gen_andc_i32(cmp1, cmp1, cmp2); 741fcf5ef2aSThomas Huth tcg_gen_andi_i32(cmp1, cmp1, 0x80808080); 742fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_NE, cpu_sr_t, cmp1, 0); 743fcf5ef2aSThomas Huth tcg_temp_free(cmp2); 744fcf5ef2aSThomas Huth tcg_temp_free(cmp1); 745fcf5ef2aSThomas Huth } 746fcf5ef2aSThomas Huth return; 747fcf5ef2aSThomas Huth case 0x2007: /* div0s Rm,Rn */ 748fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_q, REG(B11_8), 31); /* SR_Q */ 749fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_m, REG(B7_4), 31); /* SR_M */ 750fcf5ef2aSThomas Huth tcg_gen_xor_i32(cpu_sr_t, cpu_sr_q, cpu_sr_m); /* SR_T */ 751fcf5ef2aSThomas Huth return; 752fcf5ef2aSThomas Huth case 0x3004: /* div1 Rm,Rn */ 753fcf5ef2aSThomas Huth { 754fcf5ef2aSThomas Huth TCGv t0 = tcg_temp_new(); 755fcf5ef2aSThomas Huth TCGv t1 = tcg_temp_new(); 756fcf5ef2aSThomas Huth TCGv t2 = tcg_temp_new(); 757fcf5ef2aSThomas Huth TCGv zero = tcg_const_i32(0); 758fcf5ef2aSThomas Huth 759fcf5ef2aSThomas Huth /* shift left arg1, saving the bit being pushed out and inserting 760fcf5ef2aSThomas Huth T on the right */ 761fcf5ef2aSThomas Huth tcg_gen_shri_i32(t0, REG(B11_8), 31); 762fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1); 763fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), REG(B11_8), cpu_sr_t); 764fcf5ef2aSThomas Huth 765fcf5ef2aSThomas Huth /* Add or subtract arg0 from arg1 depending if Q == M. To avoid 766fcf5ef2aSThomas Huth using 64-bit temps, we compute arg0's high part from q ^ m, so 767fcf5ef2aSThomas Huth that it is 0x00000000 when adding the value or 0xffffffff when 768fcf5ef2aSThomas Huth subtracting it. */ 769fcf5ef2aSThomas Huth tcg_gen_xor_i32(t1, cpu_sr_q, cpu_sr_m); 770fcf5ef2aSThomas Huth tcg_gen_subi_i32(t1, t1, 1); 771fcf5ef2aSThomas Huth tcg_gen_neg_i32(t2, REG(B7_4)); 772fcf5ef2aSThomas Huth tcg_gen_movcond_i32(TCG_COND_EQ, t2, t1, zero, REG(B7_4), t2); 773fcf5ef2aSThomas Huth tcg_gen_add2_i32(REG(B11_8), t1, REG(B11_8), zero, t2, t1); 774fcf5ef2aSThomas Huth 775fcf5ef2aSThomas Huth /* compute T and Q depending on carry */ 776fcf5ef2aSThomas Huth tcg_gen_andi_i32(t1, t1, 1); 777fcf5ef2aSThomas Huth tcg_gen_xor_i32(t1, t1, t0); 778fcf5ef2aSThomas Huth tcg_gen_xori_i32(cpu_sr_t, t1, 1); 779fcf5ef2aSThomas Huth tcg_gen_xor_i32(cpu_sr_q, cpu_sr_m, t1); 780fcf5ef2aSThomas Huth 781fcf5ef2aSThomas Huth tcg_temp_free(zero); 782fcf5ef2aSThomas Huth tcg_temp_free(t2); 783fcf5ef2aSThomas Huth tcg_temp_free(t1); 784fcf5ef2aSThomas Huth tcg_temp_free(t0); 785fcf5ef2aSThomas Huth } 786fcf5ef2aSThomas Huth return; 787fcf5ef2aSThomas Huth case 0x300d: /* dmuls.l Rm,Rn */ 788fcf5ef2aSThomas Huth tcg_gen_muls2_i32(cpu_macl, cpu_mach, REG(B7_4), REG(B11_8)); 789fcf5ef2aSThomas Huth return; 790fcf5ef2aSThomas Huth case 0x3005: /* dmulu.l Rm,Rn */ 791fcf5ef2aSThomas Huth tcg_gen_mulu2_i32(cpu_macl, cpu_mach, REG(B7_4), REG(B11_8)); 792fcf5ef2aSThomas Huth return; 793fcf5ef2aSThomas Huth case 0x600e: /* exts.b Rm,Rn */ 794fcf5ef2aSThomas Huth tcg_gen_ext8s_i32(REG(B11_8), REG(B7_4)); 795fcf5ef2aSThomas Huth return; 796fcf5ef2aSThomas Huth case 0x600f: /* exts.w Rm,Rn */ 797fcf5ef2aSThomas Huth tcg_gen_ext16s_i32(REG(B11_8), REG(B7_4)); 798fcf5ef2aSThomas Huth return; 799fcf5ef2aSThomas Huth case 0x600c: /* extu.b Rm,Rn */ 800fcf5ef2aSThomas Huth tcg_gen_ext8u_i32(REG(B11_8), REG(B7_4)); 801fcf5ef2aSThomas Huth return; 802fcf5ef2aSThomas Huth case 0x600d: /* extu.w Rm,Rn */ 803fcf5ef2aSThomas Huth tcg_gen_ext16u_i32(REG(B11_8), REG(B7_4)); 804fcf5ef2aSThomas Huth return; 805fcf5ef2aSThomas Huth case 0x000f: /* mac.l @Rm+,@Rn+ */ 806fcf5ef2aSThomas Huth { 807fcf5ef2aSThomas Huth TCGv arg0, arg1; 808fcf5ef2aSThomas Huth arg0 = tcg_temp_new(); 809fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(arg0, REG(B7_4), ctx->memidx, MO_TESL); 810fcf5ef2aSThomas Huth arg1 = tcg_temp_new(); 811fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(arg1, REG(B11_8), ctx->memidx, MO_TESL); 812fcf5ef2aSThomas Huth gen_helper_macl(cpu_env, arg0, arg1); 813fcf5ef2aSThomas Huth tcg_temp_free(arg1); 814fcf5ef2aSThomas Huth tcg_temp_free(arg0); 815fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4); 816fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 817fcf5ef2aSThomas Huth } 818fcf5ef2aSThomas Huth return; 819fcf5ef2aSThomas Huth case 0x400f: /* mac.w @Rm+,@Rn+ */ 820fcf5ef2aSThomas Huth { 821fcf5ef2aSThomas Huth TCGv arg0, arg1; 822fcf5ef2aSThomas Huth arg0 = tcg_temp_new(); 823fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(arg0, REG(B7_4), ctx->memidx, MO_TESL); 824fcf5ef2aSThomas Huth arg1 = tcg_temp_new(); 825fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(arg1, REG(B11_8), ctx->memidx, MO_TESL); 826fcf5ef2aSThomas Huth gen_helper_macw(cpu_env, arg0, arg1); 827fcf5ef2aSThomas Huth tcg_temp_free(arg1); 828fcf5ef2aSThomas Huth tcg_temp_free(arg0); 829fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 2); 830fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2); 831fcf5ef2aSThomas Huth } 832fcf5ef2aSThomas Huth return; 833fcf5ef2aSThomas Huth case 0x0007: /* mul.l Rm,Rn */ 834fcf5ef2aSThomas Huth tcg_gen_mul_i32(cpu_macl, REG(B7_4), REG(B11_8)); 835fcf5ef2aSThomas Huth return; 836fcf5ef2aSThomas Huth case 0x200f: /* muls.w Rm,Rn */ 837fcf5ef2aSThomas Huth { 838fcf5ef2aSThomas Huth TCGv arg0, arg1; 839fcf5ef2aSThomas Huth arg0 = tcg_temp_new(); 840fcf5ef2aSThomas Huth tcg_gen_ext16s_i32(arg0, REG(B7_4)); 841fcf5ef2aSThomas Huth arg1 = tcg_temp_new(); 842fcf5ef2aSThomas Huth tcg_gen_ext16s_i32(arg1, REG(B11_8)); 843fcf5ef2aSThomas Huth tcg_gen_mul_i32(cpu_macl, arg0, arg1); 844fcf5ef2aSThomas Huth tcg_temp_free(arg1); 845fcf5ef2aSThomas Huth tcg_temp_free(arg0); 846fcf5ef2aSThomas Huth } 847fcf5ef2aSThomas Huth return; 848fcf5ef2aSThomas Huth case 0x200e: /* mulu.w Rm,Rn */ 849fcf5ef2aSThomas Huth { 850fcf5ef2aSThomas Huth TCGv arg0, arg1; 851fcf5ef2aSThomas Huth arg0 = tcg_temp_new(); 852fcf5ef2aSThomas Huth tcg_gen_ext16u_i32(arg0, REG(B7_4)); 853fcf5ef2aSThomas Huth arg1 = tcg_temp_new(); 854fcf5ef2aSThomas Huth tcg_gen_ext16u_i32(arg1, REG(B11_8)); 855fcf5ef2aSThomas Huth tcg_gen_mul_i32(cpu_macl, arg0, arg1); 856fcf5ef2aSThomas Huth tcg_temp_free(arg1); 857fcf5ef2aSThomas Huth tcg_temp_free(arg0); 858fcf5ef2aSThomas Huth } 859fcf5ef2aSThomas Huth return; 860fcf5ef2aSThomas Huth case 0x600b: /* neg Rm,Rn */ 861fcf5ef2aSThomas Huth tcg_gen_neg_i32(REG(B11_8), REG(B7_4)); 862fcf5ef2aSThomas Huth return; 863fcf5ef2aSThomas Huth case 0x600a: /* negc Rm,Rn */ 864fcf5ef2aSThomas Huth { 865fcf5ef2aSThomas Huth TCGv t0 = tcg_const_i32(0); 866fcf5ef2aSThomas Huth tcg_gen_add2_i32(REG(B11_8), cpu_sr_t, 867fcf5ef2aSThomas Huth REG(B7_4), t0, cpu_sr_t, t0); 868fcf5ef2aSThomas Huth tcg_gen_sub2_i32(REG(B11_8), cpu_sr_t, 869fcf5ef2aSThomas Huth t0, t0, REG(B11_8), cpu_sr_t); 870fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, cpu_sr_t, 1); 871fcf5ef2aSThomas Huth tcg_temp_free(t0); 872fcf5ef2aSThomas Huth } 873fcf5ef2aSThomas Huth return; 874fcf5ef2aSThomas Huth case 0x6007: /* not Rm,Rn */ 875fcf5ef2aSThomas Huth tcg_gen_not_i32(REG(B11_8), REG(B7_4)); 876fcf5ef2aSThomas Huth return; 877fcf5ef2aSThomas Huth case 0x200b: /* or Rm,Rn */ 878fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 879fcf5ef2aSThomas Huth return; 880fcf5ef2aSThomas Huth case 0x400c: /* shad Rm,Rn */ 881fcf5ef2aSThomas Huth { 882fcf5ef2aSThomas Huth TCGv t0 = tcg_temp_new(); 883fcf5ef2aSThomas Huth TCGv t1 = tcg_temp_new(); 884fcf5ef2aSThomas Huth TCGv t2 = tcg_temp_new(); 885fcf5ef2aSThomas Huth 886fcf5ef2aSThomas Huth tcg_gen_andi_i32(t0, REG(B7_4), 0x1f); 887fcf5ef2aSThomas Huth 888fcf5ef2aSThomas Huth /* positive case: shift to the left */ 889fcf5ef2aSThomas Huth tcg_gen_shl_i32(t1, REG(B11_8), t0); 890fcf5ef2aSThomas Huth 891fcf5ef2aSThomas Huth /* negative case: shift to the right in two steps to 892fcf5ef2aSThomas Huth correctly handle the -32 case */ 893fcf5ef2aSThomas Huth tcg_gen_xori_i32(t0, t0, 0x1f); 894fcf5ef2aSThomas Huth tcg_gen_sar_i32(t2, REG(B11_8), t0); 895fcf5ef2aSThomas Huth tcg_gen_sari_i32(t2, t2, 1); 896fcf5ef2aSThomas Huth 897fcf5ef2aSThomas Huth /* select between the two cases */ 898fcf5ef2aSThomas Huth tcg_gen_movi_i32(t0, 0); 899fcf5ef2aSThomas Huth tcg_gen_movcond_i32(TCG_COND_GE, REG(B11_8), REG(B7_4), t0, t1, t2); 900fcf5ef2aSThomas Huth 901fcf5ef2aSThomas Huth tcg_temp_free(t0); 902fcf5ef2aSThomas Huth tcg_temp_free(t1); 903fcf5ef2aSThomas Huth tcg_temp_free(t2); 904fcf5ef2aSThomas Huth } 905fcf5ef2aSThomas Huth return; 906fcf5ef2aSThomas Huth case 0x400d: /* shld Rm,Rn */ 907fcf5ef2aSThomas Huth { 908fcf5ef2aSThomas Huth TCGv t0 = tcg_temp_new(); 909fcf5ef2aSThomas Huth TCGv t1 = tcg_temp_new(); 910fcf5ef2aSThomas Huth TCGv t2 = tcg_temp_new(); 911fcf5ef2aSThomas Huth 912fcf5ef2aSThomas Huth tcg_gen_andi_i32(t0, REG(B7_4), 0x1f); 913fcf5ef2aSThomas Huth 914fcf5ef2aSThomas Huth /* positive case: shift to the left */ 915fcf5ef2aSThomas Huth tcg_gen_shl_i32(t1, REG(B11_8), t0); 916fcf5ef2aSThomas Huth 917fcf5ef2aSThomas Huth /* negative case: shift to the right in two steps to 918fcf5ef2aSThomas Huth correctly handle the -32 case */ 919fcf5ef2aSThomas Huth tcg_gen_xori_i32(t0, t0, 0x1f); 920fcf5ef2aSThomas Huth tcg_gen_shr_i32(t2, REG(B11_8), t0); 921fcf5ef2aSThomas Huth tcg_gen_shri_i32(t2, t2, 1); 922fcf5ef2aSThomas Huth 923fcf5ef2aSThomas Huth /* select between the two cases */ 924fcf5ef2aSThomas Huth tcg_gen_movi_i32(t0, 0); 925fcf5ef2aSThomas Huth tcg_gen_movcond_i32(TCG_COND_GE, REG(B11_8), REG(B7_4), t0, t1, t2); 926fcf5ef2aSThomas Huth 927fcf5ef2aSThomas Huth tcg_temp_free(t0); 928fcf5ef2aSThomas Huth tcg_temp_free(t1); 929fcf5ef2aSThomas Huth tcg_temp_free(t2); 930fcf5ef2aSThomas Huth } 931fcf5ef2aSThomas Huth return; 932fcf5ef2aSThomas Huth case 0x3008: /* sub Rm,Rn */ 933fcf5ef2aSThomas Huth tcg_gen_sub_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 934fcf5ef2aSThomas Huth return; 935fcf5ef2aSThomas Huth case 0x300a: /* subc Rm,Rn */ 936fcf5ef2aSThomas Huth { 937fcf5ef2aSThomas Huth TCGv t0, t1; 938fcf5ef2aSThomas Huth t0 = tcg_const_tl(0); 939fcf5ef2aSThomas Huth t1 = tcg_temp_new(); 940fcf5ef2aSThomas Huth tcg_gen_add2_i32(t1, cpu_sr_t, cpu_sr_t, t0, REG(B7_4), t0); 941fcf5ef2aSThomas Huth tcg_gen_sub2_i32(REG(B11_8), cpu_sr_t, 942fcf5ef2aSThomas Huth REG(B11_8), t0, t1, cpu_sr_t); 943fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, cpu_sr_t, 1); 944fcf5ef2aSThomas Huth tcg_temp_free(t0); 945fcf5ef2aSThomas Huth tcg_temp_free(t1); 946fcf5ef2aSThomas Huth } 947fcf5ef2aSThomas Huth return; 948fcf5ef2aSThomas Huth case 0x300b: /* subv Rm,Rn */ 949fcf5ef2aSThomas Huth { 950fcf5ef2aSThomas Huth TCGv t0, t1, t2; 951fcf5ef2aSThomas Huth t0 = tcg_temp_new(); 952fcf5ef2aSThomas Huth tcg_gen_sub_i32(t0, REG(B11_8), REG(B7_4)); 953fcf5ef2aSThomas Huth t1 = tcg_temp_new(); 954fcf5ef2aSThomas Huth tcg_gen_xor_i32(t1, t0, REG(B7_4)); 955fcf5ef2aSThomas Huth t2 = tcg_temp_new(); 956fcf5ef2aSThomas Huth tcg_gen_xor_i32(t2, REG(B11_8), REG(B7_4)); 957fcf5ef2aSThomas Huth tcg_gen_and_i32(t1, t1, t2); 958fcf5ef2aSThomas Huth tcg_temp_free(t2); 959fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_t, t1, 31); 960fcf5ef2aSThomas Huth tcg_temp_free(t1); 961fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), t0); 962fcf5ef2aSThomas Huth tcg_temp_free(t0); 963fcf5ef2aSThomas Huth } 964fcf5ef2aSThomas Huth return; 965fcf5ef2aSThomas Huth case 0x2008: /* tst Rm,Rn */ 966fcf5ef2aSThomas Huth { 967fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 968fcf5ef2aSThomas Huth tcg_gen_and_i32(val, REG(B7_4), REG(B11_8)); 969fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, val, 0); 970fcf5ef2aSThomas Huth tcg_temp_free(val); 971fcf5ef2aSThomas Huth } 972fcf5ef2aSThomas Huth return; 973fcf5ef2aSThomas Huth case 0x200a: /* xor Rm,Rn */ 974fcf5ef2aSThomas Huth tcg_gen_xor_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 975fcf5ef2aSThomas Huth return; 976fcf5ef2aSThomas Huth case 0xf00c: /* fmov {F,D,X}Rm,{F,D,X}Rn - FPSCR: Nothing */ 977fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 978a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 979fcf5ef2aSThomas Huth TCGv_i64 fp = tcg_temp_new_i64(); 980fcf5ef2aSThomas Huth gen_load_fpr64(fp, XREG(B7_4)); 981fcf5ef2aSThomas Huth gen_store_fpr64(fp, XREG(B11_8)); 982fcf5ef2aSThomas Huth tcg_temp_free_i64(fp); 983fcf5ef2aSThomas Huth } else { 984fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]); 985fcf5ef2aSThomas Huth } 986fcf5ef2aSThomas Huth return; 987fcf5ef2aSThomas Huth case 0xf00a: /* fmov {F,D,X}Rm,@Rn - FPSCR: Nothing */ 988fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 989a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 990fcf5ef2aSThomas Huth TCGv addr_hi = tcg_temp_new(); 991fcf5ef2aSThomas Huth int fr = XREG(B7_4); 992fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr_hi, REG(B11_8), 4); 993fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(cpu_fregs[fr], REG(B11_8), 994fcf5ef2aSThomas Huth ctx->memidx, MO_TEUL); 995fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(cpu_fregs[fr+1], addr_hi, 996fcf5ef2aSThomas Huth ctx->memidx, MO_TEUL); 997fcf5ef2aSThomas Huth tcg_temp_free(addr_hi); 998fcf5ef2aSThomas Huth } else { 999fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(cpu_fregs[FREG(B7_4)], REG(B11_8), 1000fcf5ef2aSThomas Huth ctx->memidx, MO_TEUL); 1001fcf5ef2aSThomas Huth } 1002fcf5ef2aSThomas Huth return; 1003fcf5ef2aSThomas Huth case 0xf008: /* fmov @Rm,{F,D,X}Rn - FPSCR: Nothing */ 1004fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1005a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 1006fcf5ef2aSThomas Huth TCGv addr_hi = tcg_temp_new(); 1007fcf5ef2aSThomas Huth int fr = XREG(B11_8); 1008fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr_hi, REG(B7_4), 4); 1009fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(cpu_fregs[fr], REG(B7_4), ctx->memidx, MO_TEUL); 1010fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(cpu_fregs[fr+1], addr_hi, ctx->memidx, MO_TEUL); 1011fcf5ef2aSThomas Huth tcg_temp_free(addr_hi); 1012fcf5ef2aSThomas Huth } else { 1013fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(cpu_fregs[FREG(B11_8)], REG(B7_4), 1014fcf5ef2aSThomas Huth ctx->memidx, MO_TEUL); 1015fcf5ef2aSThomas Huth } 1016fcf5ef2aSThomas Huth return; 1017fcf5ef2aSThomas Huth case 0xf009: /* fmov @Rm+,{F,D,X}Rn - FPSCR: Nothing */ 1018fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1019a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 1020fcf5ef2aSThomas Huth TCGv addr_hi = tcg_temp_new(); 1021fcf5ef2aSThomas Huth int fr = XREG(B11_8); 1022fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr_hi, REG(B7_4), 4); 1023fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(cpu_fregs[fr], REG(B7_4), ctx->memidx, MO_TEUL); 1024fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(cpu_fregs[fr+1], addr_hi, ctx->memidx, MO_TEUL); 1025fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 8); 1026fcf5ef2aSThomas Huth tcg_temp_free(addr_hi); 1027fcf5ef2aSThomas Huth } else { 1028fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(cpu_fregs[FREG(B11_8)], REG(B7_4), 1029fcf5ef2aSThomas Huth ctx->memidx, MO_TEUL); 1030fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4); 1031fcf5ef2aSThomas Huth } 1032fcf5ef2aSThomas Huth return; 1033fcf5ef2aSThomas Huth case 0xf00b: /* fmov {F,D,X}Rm,@-Rn - FPSCR: Nothing */ 1034fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1035fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new_i32(); 1036fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); 1037a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 1038fcf5ef2aSThomas Huth int fr = XREG(B7_4); 1039fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(cpu_fregs[fr+1], addr, ctx->memidx, MO_TEUL); 1040fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, addr, 4); 1041fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(cpu_fregs[fr], addr, ctx->memidx, MO_TEUL); 1042fcf5ef2aSThomas Huth } else { 1043fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(cpu_fregs[FREG(B7_4)], addr, 1044fcf5ef2aSThomas Huth ctx->memidx, MO_TEUL); 1045fcf5ef2aSThomas Huth } 1046fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 1047fcf5ef2aSThomas Huth tcg_temp_free(addr); 1048fcf5ef2aSThomas Huth return; 1049fcf5ef2aSThomas Huth case 0xf006: /* fmov @(R0,Rm),{F,D,X}Rm - FPSCR: Nothing */ 1050fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1051fcf5ef2aSThomas Huth { 1052fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new_i32(); 1053fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B7_4), REG(0)); 1054a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 1055fcf5ef2aSThomas Huth int fr = XREG(B11_8); 1056fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(cpu_fregs[fr], addr, 1057fcf5ef2aSThomas Huth ctx->memidx, MO_TEUL); 1058fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, addr, 4); 1059fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(cpu_fregs[fr+1], addr, 1060fcf5ef2aSThomas Huth ctx->memidx, MO_TEUL); 1061fcf5ef2aSThomas Huth } else { 1062fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(cpu_fregs[FREG(B11_8)], addr, 1063fcf5ef2aSThomas Huth ctx->memidx, MO_TEUL); 1064fcf5ef2aSThomas Huth } 1065fcf5ef2aSThomas Huth tcg_temp_free(addr); 1066fcf5ef2aSThomas Huth } 1067fcf5ef2aSThomas Huth return; 1068fcf5ef2aSThomas Huth case 0xf007: /* fmov {F,D,X}Rn,@(R0,Rn) - FPSCR: Nothing */ 1069fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1070fcf5ef2aSThomas Huth { 1071fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1072fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B11_8), REG(0)); 1073a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 1074fcf5ef2aSThomas Huth int fr = XREG(B7_4); 1075fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(cpu_fregs[fr], addr, 1076fcf5ef2aSThomas Huth ctx->memidx, MO_TEUL); 1077fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, addr, 4); 1078fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(cpu_fregs[fr+1], addr, 1079fcf5ef2aSThomas Huth ctx->memidx, MO_TEUL); 1080fcf5ef2aSThomas Huth } else { 1081fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(cpu_fregs[FREG(B7_4)], addr, 1082fcf5ef2aSThomas Huth ctx->memidx, MO_TEUL); 1083fcf5ef2aSThomas Huth } 1084fcf5ef2aSThomas Huth tcg_temp_free(addr); 1085fcf5ef2aSThomas Huth } 1086fcf5ef2aSThomas Huth return; 1087fcf5ef2aSThomas Huth case 0xf000: /* fadd Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */ 1088fcf5ef2aSThomas Huth case 0xf001: /* fsub Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */ 1089fcf5ef2aSThomas Huth case 0xf002: /* fmul Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */ 1090fcf5ef2aSThomas Huth case 0xf003: /* fdiv Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */ 1091fcf5ef2aSThomas Huth case 0xf004: /* fcmp/eq Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */ 1092fcf5ef2aSThomas Huth case 0xf005: /* fcmp/gt Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */ 1093fcf5ef2aSThomas Huth { 1094fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1095a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_PR) { 1096fcf5ef2aSThomas Huth TCGv_i64 fp0, fp1; 1097fcf5ef2aSThomas Huth 1098fcf5ef2aSThomas Huth if (ctx->opcode & 0x0110) 1099fcf5ef2aSThomas Huth break; /* illegal instruction */ 1100fcf5ef2aSThomas Huth fp0 = tcg_temp_new_i64(); 1101fcf5ef2aSThomas Huth fp1 = tcg_temp_new_i64(); 1102fcf5ef2aSThomas Huth gen_load_fpr64(fp0, DREG(B11_8)); 1103fcf5ef2aSThomas Huth gen_load_fpr64(fp1, DREG(B7_4)); 1104fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf00f) { 1105fcf5ef2aSThomas Huth case 0xf000: /* fadd Rm,Rn */ 1106fcf5ef2aSThomas Huth gen_helper_fadd_DT(fp0, cpu_env, fp0, fp1); 1107fcf5ef2aSThomas Huth break; 1108fcf5ef2aSThomas Huth case 0xf001: /* fsub Rm,Rn */ 1109fcf5ef2aSThomas Huth gen_helper_fsub_DT(fp0, cpu_env, fp0, fp1); 1110fcf5ef2aSThomas Huth break; 1111fcf5ef2aSThomas Huth case 0xf002: /* fmul Rm,Rn */ 1112fcf5ef2aSThomas Huth gen_helper_fmul_DT(fp0, cpu_env, fp0, fp1); 1113fcf5ef2aSThomas Huth break; 1114fcf5ef2aSThomas Huth case 0xf003: /* fdiv Rm,Rn */ 1115fcf5ef2aSThomas Huth gen_helper_fdiv_DT(fp0, cpu_env, fp0, fp1); 1116fcf5ef2aSThomas Huth break; 1117fcf5ef2aSThomas Huth case 0xf004: /* fcmp/eq Rm,Rn */ 111892f1f83eSAurelien Jarno gen_helper_fcmp_eq_DT(cpu_sr_t, cpu_env, fp0, fp1); 1119fcf5ef2aSThomas Huth return; 1120fcf5ef2aSThomas Huth case 0xf005: /* fcmp/gt Rm,Rn */ 112192f1f83eSAurelien Jarno gen_helper_fcmp_gt_DT(cpu_sr_t, cpu_env, fp0, fp1); 1122fcf5ef2aSThomas Huth return; 1123fcf5ef2aSThomas Huth } 1124fcf5ef2aSThomas Huth gen_store_fpr64(fp0, DREG(B11_8)); 1125fcf5ef2aSThomas Huth tcg_temp_free_i64(fp0); 1126fcf5ef2aSThomas Huth tcg_temp_free_i64(fp1); 1127fcf5ef2aSThomas Huth } else { 1128fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf00f) { 1129fcf5ef2aSThomas Huth case 0xf000: /* fadd Rm,Rn */ 1130fcf5ef2aSThomas Huth gen_helper_fadd_FT(cpu_fregs[FREG(B11_8)], cpu_env, 1131fcf5ef2aSThomas Huth cpu_fregs[FREG(B11_8)], 1132fcf5ef2aSThomas Huth cpu_fregs[FREG(B7_4)]); 1133fcf5ef2aSThomas Huth break; 1134fcf5ef2aSThomas Huth case 0xf001: /* fsub Rm,Rn */ 1135fcf5ef2aSThomas Huth gen_helper_fsub_FT(cpu_fregs[FREG(B11_8)], cpu_env, 1136fcf5ef2aSThomas Huth cpu_fregs[FREG(B11_8)], 1137fcf5ef2aSThomas Huth cpu_fregs[FREG(B7_4)]); 1138fcf5ef2aSThomas Huth break; 1139fcf5ef2aSThomas Huth case 0xf002: /* fmul Rm,Rn */ 1140fcf5ef2aSThomas Huth gen_helper_fmul_FT(cpu_fregs[FREG(B11_8)], cpu_env, 1141fcf5ef2aSThomas Huth cpu_fregs[FREG(B11_8)], 1142fcf5ef2aSThomas Huth cpu_fregs[FREG(B7_4)]); 1143fcf5ef2aSThomas Huth break; 1144fcf5ef2aSThomas Huth case 0xf003: /* fdiv Rm,Rn */ 1145fcf5ef2aSThomas Huth gen_helper_fdiv_FT(cpu_fregs[FREG(B11_8)], cpu_env, 1146fcf5ef2aSThomas Huth cpu_fregs[FREG(B11_8)], 1147fcf5ef2aSThomas Huth cpu_fregs[FREG(B7_4)]); 1148fcf5ef2aSThomas Huth break; 1149fcf5ef2aSThomas Huth case 0xf004: /* fcmp/eq Rm,Rn */ 115092f1f83eSAurelien Jarno gen_helper_fcmp_eq_FT(cpu_sr_t, cpu_env, 115192f1f83eSAurelien Jarno cpu_fregs[FREG(B11_8)], 1152fcf5ef2aSThomas Huth cpu_fregs[FREG(B7_4)]); 1153fcf5ef2aSThomas Huth return; 1154fcf5ef2aSThomas Huth case 0xf005: /* fcmp/gt Rm,Rn */ 115592f1f83eSAurelien Jarno gen_helper_fcmp_gt_FT(cpu_sr_t, cpu_env, 115692f1f83eSAurelien Jarno cpu_fregs[FREG(B11_8)], 1157fcf5ef2aSThomas Huth cpu_fregs[FREG(B7_4)]); 1158fcf5ef2aSThomas Huth return; 1159fcf5ef2aSThomas Huth } 1160fcf5ef2aSThomas Huth } 1161fcf5ef2aSThomas Huth } 1162fcf5ef2aSThomas Huth return; 1163fcf5ef2aSThomas Huth case 0xf00e: /* fmac FR0,RM,Rn */ 1164fcf5ef2aSThomas Huth { 1165fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1166a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_PR) { 1167fcf5ef2aSThomas Huth break; /* illegal instruction */ 1168fcf5ef2aSThomas Huth } else { 1169fcf5ef2aSThomas Huth gen_helper_fmac_FT(cpu_fregs[FREG(B11_8)], cpu_env, 1170fcf5ef2aSThomas Huth cpu_fregs[FREG(0)], cpu_fregs[FREG(B7_4)], 1171fcf5ef2aSThomas Huth cpu_fregs[FREG(B11_8)]); 1172fcf5ef2aSThomas Huth return; 1173fcf5ef2aSThomas Huth } 1174fcf5ef2aSThomas Huth } 1175fcf5ef2aSThomas Huth } 1176fcf5ef2aSThomas Huth 1177fcf5ef2aSThomas Huth switch (ctx->opcode & 0xff00) { 1178fcf5ef2aSThomas Huth case 0xc900: /* and #imm,R0 */ 1179fcf5ef2aSThomas Huth tcg_gen_andi_i32(REG(0), REG(0), B7_0); 1180fcf5ef2aSThomas Huth return; 1181fcf5ef2aSThomas Huth case 0xcd00: /* and.b #imm,@(R0,GBR) */ 1182fcf5ef2aSThomas Huth { 1183fcf5ef2aSThomas Huth TCGv addr, val; 1184fcf5ef2aSThomas Huth addr = tcg_temp_new(); 1185fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(0), cpu_gbr); 1186fcf5ef2aSThomas Huth val = tcg_temp_new(); 1187fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, addr, ctx->memidx, MO_UB); 1188fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, val, B7_0); 1189fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_UB); 1190fcf5ef2aSThomas Huth tcg_temp_free(val); 1191fcf5ef2aSThomas Huth tcg_temp_free(addr); 1192fcf5ef2aSThomas Huth } 1193fcf5ef2aSThomas Huth return; 1194fcf5ef2aSThomas Huth case 0x8b00: /* bf label */ 1195fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 11964bfa602bSRichard Henderson gen_conditional_jump(ctx, ctx->pc + 4 + B7_0s * 2, false); 1197fcf5ef2aSThomas Huth return; 1198fcf5ef2aSThomas Huth case 0x8f00: /* bf/s label */ 1199fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1200ac9707eaSAurelien Jarno tcg_gen_xori_i32(cpu_delayed_cond, cpu_sr_t, 1); 1201ac9707eaSAurelien Jarno ctx->delayed_pc = ctx->pc + 4 + B7_0s * 2; 1202a6215749SAurelien Jarno ctx->envflags |= DELAY_SLOT_CONDITIONAL; 1203fcf5ef2aSThomas Huth return; 1204fcf5ef2aSThomas Huth case 0x8900: /* bt label */ 1205fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 12064bfa602bSRichard Henderson gen_conditional_jump(ctx, ctx->pc + 4 + B7_0s * 2, true); 1207fcf5ef2aSThomas Huth return; 1208fcf5ef2aSThomas Huth case 0x8d00: /* bt/s label */ 1209fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1210ac9707eaSAurelien Jarno tcg_gen_mov_i32(cpu_delayed_cond, cpu_sr_t); 1211ac9707eaSAurelien Jarno ctx->delayed_pc = ctx->pc + 4 + B7_0s * 2; 1212a6215749SAurelien Jarno ctx->envflags |= DELAY_SLOT_CONDITIONAL; 1213fcf5ef2aSThomas Huth return; 1214fcf5ef2aSThomas Huth case 0x8800: /* cmp/eq #imm,R0 */ 1215fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, REG(0), B7_0s); 1216fcf5ef2aSThomas Huth return; 1217fcf5ef2aSThomas Huth case 0xc400: /* mov.b @(disp,GBR),R0 */ 1218fcf5ef2aSThomas Huth { 1219fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1220fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0); 1221fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_SB); 1222fcf5ef2aSThomas Huth tcg_temp_free(addr); 1223fcf5ef2aSThomas Huth } 1224fcf5ef2aSThomas Huth return; 1225fcf5ef2aSThomas Huth case 0xc500: /* mov.w @(disp,GBR),R0 */ 1226fcf5ef2aSThomas Huth { 1227fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1228fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2); 1229fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_TESW); 1230fcf5ef2aSThomas Huth tcg_temp_free(addr); 1231fcf5ef2aSThomas Huth } 1232fcf5ef2aSThomas Huth return; 1233fcf5ef2aSThomas Huth case 0xc600: /* mov.l @(disp,GBR),R0 */ 1234fcf5ef2aSThomas Huth { 1235fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1236fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4); 1237fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_TESL); 1238fcf5ef2aSThomas Huth tcg_temp_free(addr); 1239fcf5ef2aSThomas Huth } 1240fcf5ef2aSThomas Huth return; 1241fcf5ef2aSThomas Huth case 0xc000: /* mov.b R0,@(disp,GBR) */ 1242fcf5ef2aSThomas Huth { 1243fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1244fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0); 1245fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_UB); 1246fcf5ef2aSThomas Huth tcg_temp_free(addr); 1247fcf5ef2aSThomas Huth } 1248fcf5ef2aSThomas Huth return; 1249fcf5ef2aSThomas Huth case 0xc100: /* mov.w R0,@(disp,GBR) */ 1250fcf5ef2aSThomas Huth { 1251fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1252fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2); 1253fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_TEUW); 1254fcf5ef2aSThomas Huth tcg_temp_free(addr); 1255fcf5ef2aSThomas Huth } 1256fcf5ef2aSThomas Huth return; 1257fcf5ef2aSThomas Huth case 0xc200: /* mov.l R0,@(disp,GBR) */ 1258fcf5ef2aSThomas Huth { 1259fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1260fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4); 1261fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_TEUL); 1262fcf5ef2aSThomas Huth tcg_temp_free(addr); 1263fcf5ef2aSThomas Huth } 1264fcf5ef2aSThomas Huth return; 1265fcf5ef2aSThomas Huth case 0x8000: /* mov.b R0,@(disp,Rn) */ 1266fcf5ef2aSThomas Huth { 1267fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1268fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0); 1269fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_UB); 1270fcf5ef2aSThomas Huth tcg_temp_free(addr); 1271fcf5ef2aSThomas Huth } 1272fcf5ef2aSThomas Huth return; 1273fcf5ef2aSThomas Huth case 0x8100: /* mov.w R0,@(disp,Rn) */ 1274fcf5ef2aSThomas Huth { 1275fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1276fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2); 1277fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_TEUW); 1278fcf5ef2aSThomas Huth tcg_temp_free(addr); 1279fcf5ef2aSThomas Huth } 1280fcf5ef2aSThomas Huth return; 1281fcf5ef2aSThomas Huth case 0x8400: /* mov.b @(disp,Rn),R0 */ 1282fcf5ef2aSThomas Huth { 1283fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1284fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0); 1285fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_SB); 1286fcf5ef2aSThomas Huth tcg_temp_free(addr); 1287fcf5ef2aSThomas Huth } 1288fcf5ef2aSThomas Huth return; 1289fcf5ef2aSThomas Huth case 0x8500: /* mov.w @(disp,Rn),R0 */ 1290fcf5ef2aSThomas Huth { 1291fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1292fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2); 1293fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_TESW); 1294fcf5ef2aSThomas Huth tcg_temp_free(addr); 1295fcf5ef2aSThomas Huth } 1296fcf5ef2aSThomas Huth return; 1297fcf5ef2aSThomas Huth case 0xc700: /* mova @(disp,PC),R0 */ 1298fcf5ef2aSThomas Huth tcg_gen_movi_i32(REG(0), ((ctx->pc & 0xfffffffc) + 4 + B7_0 * 4) & ~3); 1299fcf5ef2aSThomas Huth return; 1300fcf5ef2aSThomas Huth case 0xcb00: /* or #imm,R0 */ 1301fcf5ef2aSThomas Huth tcg_gen_ori_i32(REG(0), REG(0), B7_0); 1302fcf5ef2aSThomas Huth return; 1303fcf5ef2aSThomas Huth case 0xcf00: /* or.b #imm,@(R0,GBR) */ 1304fcf5ef2aSThomas Huth { 1305fcf5ef2aSThomas Huth TCGv addr, val; 1306fcf5ef2aSThomas Huth addr = tcg_temp_new(); 1307fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(0), cpu_gbr); 1308fcf5ef2aSThomas Huth val = tcg_temp_new(); 1309fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, addr, ctx->memidx, MO_UB); 1310fcf5ef2aSThomas Huth tcg_gen_ori_i32(val, val, B7_0); 1311fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_UB); 1312fcf5ef2aSThomas Huth tcg_temp_free(val); 1313fcf5ef2aSThomas Huth tcg_temp_free(addr); 1314fcf5ef2aSThomas Huth } 1315fcf5ef2aSThomas Huth return; 1316fcf5ef2aSThomas Huth case 0xc300: /* trapa #imm */ 1317fcf5ef2aSThomas Huth { 1318fcf5ef2aSThomas Huth TCGv imm; 1319fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1320ac9707eaSAurelien Jarno gen_save_cpu_state(ctx, true); 1321fcf5ef2aSThomas Huth imm = tcg_const_i32(B7_0); 1322fcf5ef2aSThomas Huth gen_helper_trapa(cpu_env, imm); 1323fcf5ef2aSThomas Huth tcg_temp_free(imm); 132463205665SAurelien Jarno ctx->bstate = BS_EXCP; 1325fcf5ef2aSThomas Huth } 1326fcf5ef2aSThomas Huth return; 1327fcf5ef2aSThomas Huth case 0xc800: /* tst #imm,R0 */ 1328fcf5ef2aSThomas Huth { 1329fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1330fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, REG(0), B7_0); 1331fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, val, 0); 1332fcf5ef2aSThomas Huth tcg_temp_free(val); 1333fcf5ef2aSThomas Huth } 1334fcf5ef2aSThomas Huth return; 1335fcf5ef2aSThomas Huth case 0xcc00: /* tst.b #imm,@(R0,GBR) */ 1336fcf5ef2aSThomas Huth { 1337fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1338fcf5ef2aSThomas Huth tcg_gen_add_i32(val, REG(0), cpu_gbr); 1339fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, val, ctx->memidx, MO_UB); 1340fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, val, B7_0); 1341fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, val, 0); 1342fcf5ef2aSThomas Huth tcg_temp_free(val); 1343fcf5ef2aSThomas Huth } 1344fcf5ef2aSThomas Huth return; 1345fcf5ef2aSThomas Huth case 0xca00: /* xor #imm,R0 */ 1346fcf5ef2aSThomas Huth tcg_gen_xori_i32(REG(0), REG(0), B7_0); 1347fcf5ef2aSThomas Huth return; 1348fcf5ef2aSThomas Huth case 0xce00: /* xor.b #imm,@(R0,GBR) */ 1349fcf5ef2aSThomas Huth { 1350fcf5ef2aSThomas Huth TCGv addr, val; 1351fcf5ef2aSThomas Huth addr = tcg_temp_new(); 1352fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(0), cpu_gbr); 1353fcf5ef2aSThomas Huth val = tcg_temp_new(); 1354fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, addr, ctx->memidx, MO_UB); 1355fcf5ef2aSThomas Huth tcg_gen_xori_i32(val, val, B7_0); 1356fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_UB); 1357fcf5ef2aSThomas Huth tcg_temp_free(val); 1358fcf5ef2aSThomas Huth tcg_temp_free(addr); 1359fcf5ef2aSThomas Huth } 1360fcf5ef2aSThomas Huth return; 1361fcf5ef2aSThomas Huth } 1362fcf5ef2aSThomas Huth 1363fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf08f) { 1364fcf5ef2aSThomas Huth case 0x408e: /* ldc Rm,Rn_BANK */ 1365fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1366fcf5ef2aSThomas Huth tcg_gen_mov_i32(ALTREG(B6_4), REG(B11_8)); 1367fcf5ef2aSThomas Huth return; 1368fcf5ef2aSThomas Huth case 0x4087: /* ldc.l @Rm+,Rn_BANK */ 1369fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1370fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(ALTREG(B6_4), REG(B11_8), ctx->memidx, MO_TESL); 1371fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 1372fcf5ef2aSThomas Huth return; 1373fcf5ef2aSThomas Huth case 0x0082: /* stc Rm_BANK,Rn */ 1374fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1375fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), ALTREG(B6_4)); 1376fcf5ef2aSThomas Huth return; 1377fcf5ef2aSThomas Huth case 0x4083: /* stc.l Rm_BANK,@-Rn */ 1378fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1379fcf5ef2aSThomas Huth { 1380fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1381fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); 1382fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(ALTREG(B6_4), addr, ctx->memidx, MO_TEUL); 1383fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 1384fcf5ef2aSThomas Huth tcg_temp_free(addr); 1385fcf5ef2aSThomas Huth } 1386fcf5ef2aSThomas Huth return; 1387fcf5ef2aSThomas Huth } 1388fcf5ef2aSThomas Huth 1389fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf0ff) { 1390fcf5ef2aSThomas Huth case 0x0023: /* braf Rn */ 1391fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1392fcf5ef2aSThomas Huth tcg_gen_addi_i32(cpu_delayed_pc, REG(B11_8), ctx->pc + 4); 1393a6215749SAurelien Jarno ctx->envflags |= DELAY_SLOT; 1394fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 1395fcf5ef2aSThomas Huth return; 1396fcf5ef2aSThomas Huth case 0x0003: /* bsrf Rn */ 1397fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1398fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_pr, ctx->pc + 4); 1399fcf5ef2aSThomas Huth tcg_gen_add_i32(cpu_delayed_pc, REG(B11_8), cpu_pr); 1400a6215749SAurelien Jarno ctx->envflags |= DELAY_SLOT; 1401fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 1402fcf5ef2aSThomas Huth return; 1403fcf5ef2aSThomas Huth case 0x4015: /* cmp/pl Rn */ 1404fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_GT, cpu_sr_t, REG(B11_8), 0); 1405fcf5ef2aSThomas Huth return; 1406fcf5ef2aSThomas Huth case 0x4011: /* cmp/pz Rn */ 1407fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_GE, cpu_sr_t, REG(B11_8), 0); 1408fcf5ef2aSThomas Huth return; 1409fcf5ef2aSThomas Huth case 0x4010: /* dt Rn */ 1410fcf5ef2aSThomas Huth tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 1); 1411fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, REG(B11_8), 0); 1412fcf5ef2aSThomas Huth return; 1413fcf5ef2aSThomas Huth case 0x402b: /* jmp @Rn */ 1414fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1415fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8)); 1416a6215749SAurelien Jarno ctx->envflags |= DELAY_SLOT; 1417fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 1418fcf5ef2aSThomas Huth return; 1419fcf5ef2aSThomas Huth case 0x400b: /* jsr @Rn */ 1420fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1421fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_pr, ctx->pc + 4); 1422fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8)); 1423a6215749SAurelien Jarno ctx->envflags |= DELAY_SLOT; 1424fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 1425fcf5ef2aSThomas Huth return; 1426fcf5ef2aSThomas Huth case 0x400e: /* ldc Rm,SR */ 1427fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1428fcf5ef2aSThomas Huth { 1429fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1430fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, REG(B11_8), 0x700083f3); 1431fcf5ef2aSThomas Huth gen_write_sr(val); 1432fcf5ef2aSThomas Huth tcg_temp_free(val); 1433fcf5ef2aSThomas Huth ctx->bstate = BS_STOP; 1434fcf5ef2aSThomas Huth } 1435fcf5ef2aSThomas Huth return; 1436fcf5ef2aSThomas Huth case 0x4007: /* ldc.l @Rm+,SR */ 1437fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1438fcf5ef2aSThomas Huth { 1439fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1440fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, REG(B11_8), ctx->memidx, MO_TESL); 1441fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, val, 0x700083f3); 1442fcf5ef2aSThomas Huth gen_write_sr(val); 1443fcf5ef2aSThomas Huth tcg_temp_free(val); 1444fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 1445fcf5ef2aSThomas Huth ctx->bstate = BS_STOP; 1446fcf5ef2aSThomas Huth } 1447fcf5ef2aSThomas Huth return; 1448fcf5ef2aSThomas Huth case 0x0002: /* stc SR,Rn */ 1449fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1450fcf5ef2aSThomas Huth gen_read_sr(REG(B11_8)); 1451fcf5ef2aSThomas Huth return; 1452fcf5ef2aSThomas Huth case 0x4003: /* stc SR,@-Rn */ 1453fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1454fcf5ef2aSThomas Huth { 1455fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1456fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1457fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); 1458fcf5ef2aSThomas Huth gen_read_sr(val); 1459fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_TEUL); 1460fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 1461fcf5ef2aSThomas Huth tcg_temp_free(val); 1462fcf5ef2aSThomas Huth tcg_temp_free(addr); 1463fcf5ef2aSThomas Huth } 1464fcf5ef2aSThomas Huth return; 1465fcf5ef2aSThomas Huth #define LD(reg,ldnum,ldpnum,prechk) \ 1466fcf5ef2aSThomas Huth case ldnum: \ 1467fcf5ef2aSThomas Huth prechk \ 1468fcf5ef2aSThomas Huth tcg_gen_mov_i32 (cpu_##reg, REG(B11_8)); \ 1469fcf5ef2aSThomas Huth return; \ 1470fcf5ef2aSThomas Huth case ldpnum: \ 1471fcf5ef2aSThomas Huth prechk \ 1472fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(cpu_##reg, REG(B11_8), ctx->memidx, MO_TESL); \ 1473fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); \ 1474fcf5ef2aSThomas Huth return; 1475fcf5ef2aSThomas Huth #define ST(reg,stnum,stpnum,prechk) \ 1476fcf5ef2aSThomas Huth case stnum: \ 1477fcf5ef2aSThomas Huth prechk \ 1478fcf5ef2aSThomas Huth tcg_gen_mov_i32 (REG(B11_8), cpu_##reg); \ 1479fcf5ef2aSThomas Huth return; \ 1480fcf5ef2aSThomas Huth case stpnum: \ 1481fcf5ef2aSThomas Huth prechk \ 1482fcf5ef2aSThomas Huth { \ 1483fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); \ 1484fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); \ 1485fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(cpu_##reg, addr, ctx->memidx, MO_TEUL); \ 1486fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); \ 1487fcf5ef2aSThomas Huth tcg_temp_free(addr); \ 1488fcf5ef2aSThomas Huth } \ 1489fcf5ef2aSThomas Huth return; 1490fcf5ef2aSThomas Huth #define LDST(reg,ldnum,ldpnum,stnum,stpnum,prechk) \ 1491fcf5ef2aSThomas Huth LD(reg,ldnum,ldpnum,prechk) \ 1492fcf5ef2aSThomas Huth ST(reg,stnum,stpnum,prechk) 1493fcf5ef2aSThomas Huth LDST(gbr, 0x401e, 0x4017, 0x0012, 0x4013, {}) 1494fcf5ef2aSThomas Huth LDST(vbr, 0x402e, 0x4027, 0x0022, 0x4023, CHECK_PRIVILEGED) 1495fcf5ef2aSThomas Huth LDST(ssr, 0x403e, 0x4037, 0x0032, 0x4033, CHECK_PRIVILEGED) 1496fcf5ef2aSThomas Huth LDST(spc, 0x404e, 0x4047, 0x0042, 0x4043, CHECK_PRIVILEGED) 1497fcf5ef2aSThomas Huth ST(sgr, 0x003a, 0x4032, CHECK_PRIVILEGED) 1498fcf5ef2aSThomas Huth LD(sgr, 0x403a, 0x4036, CHECK_PRIVILEGED if (!(ctx->features & SH_FEATURE_SH4A)) break;) 1499fcf5ef2aSThomas Huth LDST(dbr, 0x40fa, 0x40f6, 0x00fa, 0x40f2, CHECK_PRIVILEGED) 1500fcf5ef2aSThomas Huth LDST(mach, 0x400a, 0x4006, 0x000a, 0x4002, {}) 1501fcf5ef2aSThomas Huth LDST(macl, 0x401a, 0x4016, 0x001a, 0x4012, {}) 1502fcf5ef2aSThomas Huth LDST(pr, 0x402a, 0x4026, 0x002a, 0x4022, {}) 1503fcf5ef2aSThomas Huth LDST(fpul, 0x405a, 0x4056, 0x005a, 0x4052, {CHECK_FPU_ENABLED}) 1504fcf5ef2aSThomas Huth case 0x406a: /* lds Rm,FPSCR */ 1505fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1506fcf5ef2aSThomas Huth gen_helper_ld_fpscr(cpu_env, REG(B11_8)); 1507fcf5ef2aSThomas Huth ctx->bstate = BS_STOP; 1508fcf5ef2aSThomas Huth return; 1509fcf5ef2aSThomas Huth case 0x4066: /* lds.l @Rm+,FPSCR */ 1510fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1511fcf5ef2aSThomas Huth { 1512fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1513fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(addr, REG(B11_8), ctx->memidx, MO_TESL); 1514fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 1515fcf5ef2aSThomas Huth gen_helper_ld_fpscr(cpu_env, addr); 1516fcf5ef2aSThomas Huth tcg_temp_free(addr); 1517fcf5ef2aSThomas Huth ctx->bstate = BS_STOP; 1518fcf5ef2aSThomas Huth } 1519fcf5ef2aSThomas Huth return; 1520fcf5ef2aSThomas Huth case 0x006a: /* sts FPSCR,Rn */ 1521fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1522fcf5ef2aSThomas Huth tcg_gen_andi_i32(REG(B11_8), cpu_fpscr, 0x003fffff); 1523fcf5ef2aSThomas Huth return; 1524fcf5ef2aSThomas Huth case 0x4062: /* sts FPSCR,@-Rn */ 1525fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1526fcf5ef2aSThomas Huth { 1527fcf5ef2aSThomas Huth TCGv addr, val; 1528fcf5ef2aSThomas Huth val = tcg_temp_new(); 1529fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, cpu_fpscr, 0x003fffff); 1530fcf5ef2aSThomas Huth addr = tcg_temp_new(); 1531fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); 1532fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_TEUL); 1533fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 1534fcf5ef2aSThomas Huth tcg_temp_free(addr); 1535fcf5ef2aSThomas Huth tcg_temp_free(val); 1536fcf5ef2aSThomas Huth } 1537fcf5ef2aSThomas Huth return; 1538fcf5ef2aSThomas Huth case 0x00c3: /* movca.l R0,@Rm */ 1539fcf5ef2aSThomas Huth { 1540fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1541fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, REG(B11_8), ctx->memidx, MO_TEUL); 1542fcf5ef2aSThomas Huth gen_helper_movcal(cpu_env, REG(B11_8), val); 1543fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), REG(B11_8), ctx->memidx, MO_TEUL); 1544fcf5ef2aSThomas Huth } 1545fcf5ef2aSThomas Huth ctx->has_movcal = 1; 1546fcf5ef2aSThomas Huth return; 1547143021b2SAurelien Jarno case 0x40a9: /* movua.l @Rm,R0 */ 1548143021b2SAurelien Jarno /* Load non-boundary-aligned data */ 1549143021b2SAurelien Jarno if (ctx->features & SH_FEATURE_SH4A) { 155034257c21SAurelien Jarno tcg_gen_qemu_ld_i32(REG(0), REG(B11_8), ctx->memidx, 155134257c21SAurelien Jarno MO_TEUL | MO_UNALN); 1552fcf5ef2aSThomas Huth return; 1553143021b2SAurelien Jarno } 1554143021b2SAurelien Jarno break; 1555143021b2SAurelien Jarno case 0x40e9: /* movua.l @Rm+,R0 */ 1556143021b2SAurelien Jarno /* Load non-boundary-aligned data */ 1557143021b2SAurelien Jarno if (ctx->features & SH_FEATURE_SH4A) { 155834257c21SAurelien Jarno tcg_gen_qemu_ld_i32(REG(0), REG(B11_8), ctx->memidx, 155934257c21SAurelien Jarno MO_TEUL | MO_UNALN); 1560fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 1561fcf5ef2aSThomas Huth return; 1562143021b2SAurelien Jarno } 1563143021b2SAurelien Jarno break; 1564fcf5ef2aSThomas Huth case 0x0029: /* movt Rn */ 1565fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), cpu_sr_t); 1566fcf5ef2aSThomas Huth return; 1567fcf5ef2aSThomas Huth case 0x0073: 1568fcf5ef2aSThomas Huth /* MOVCO.L 1569fcf5ef2aSThomas Huth LDST -> T 1570fcf5ef2aSThomas Huth If (T == 1) R0 -> (Rn) 1571fcf5ef2aSThomas Huth 0 -> LDST 1572fcf5ef2aSThomas Huth */ 1573fcf5ef2aSThomas Huth if (ctx->features & SH_FEATURE_SH4A) { 1574fcf5ef2aSThomas Huth TCGLabel *label = gen_new_label(); 1575fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_sr_t, cpu_ldst); 1576fcf5ef2aSThomas Huth tcg_gen_brcondi_i32(TCG_COND_EQ, cpu_ldst, 0, label); 1577fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), REG(B11_8), ctx->memidx, MO_TEUL); 1578fcf5ef2aSThomas Huth gen_set_label(label); 1579fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_ldst, 0); 1580fcf5ef2aSThomas Huth return; 1581fcf5ef2aSThomas Huth } else 1582fcf5ef2aSThomas Huth break; 1583fcf5ef2aSThomas Huth case 0x0063: 1584fcf5ef2aSThomas Huth /* MOVLI.L @Rm,R0 1585fcf5ef2aSThomas Huth 1 -> LDST 1586fcf5ef2aSThomas Huth (Rm) -> R0 1587fcf5ef2aSThomas Huth When interrupt/exception 1588fcf5ef2aSThomas Huth occurred 0 -> LDST 1589fcf5ef2aSThomas Huth */ 1590fcf5ef2aSThomas Huth if (ctx->features & SH_FEATURE_SH4A) { 1591fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_ldst, 0); 1592fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), REG(B11_8), ctx->memidx, MO_TESL); 1593fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_ldst, 1); 1594fcf5ef2aSThomas Huth return; 1595fcf5ef2aSThomas Huth } else 1596fcf5ef2aSThomas Huth break; 1597fcf5ef2aSThomas Huth case 0x0093: /* ocbi @Rn */ 1598fcf5ef2aSThomas Huth { 1599fcf5ef2aSThomas Huth gen_helper_ocbi(cpu_env, REG(B11_8)); 1600fcf5ef2aSThomas Huth } 1601fcf5ef2aSThomas Huth return; 1602fcf5ef2aSThomas Huth case 0x00a3: /* ocbp @Rn */ 1603fcf5ef2aSThomas Huth case 0x00b3: /* ocbwb @Rn */ 1604fcf5ef2aSThomas Huth /* These instructions are supposed to do nothing in case of 1605fcf5ef2aSThomas Huth a cache miss. Given that we only partially emulate caches 1606fcf5ef2aSThomas Huth it is safe to simply ignore them. */ 1607fcf5ef2aSThomas Huth return; 1608fcf5ef2aSThomas Huth case 0x0083: /* pref @Rn */ 1609fcf5ef2aSThomas Huth return; 1610fcf5ef2aSThomas Huth case 0x00d3: /* prefi @Rn */ 1611fcf5ef2aSThomas Huth if (ctx->features & SH_FEATURE_SH4A) 1612fcf5ef2aSThomas Huth return; 1613fcf5ef2aSThomas Huth else 1614fcf5ef2aSThomas Huth break; 1615fcf5ef2aSThomas Huth case 0x00e3: /* icbi @Rn */ 1616fcf5ef2aSThomas Huth if (ctx->features & SH_FEATURE_SH4A) 1617fcf5ef2aSThomas Huth return; 1618fcf5ef2aSThomas Huth else 1619fcf5ef2aSThomas Huth break; 1620fcf5ef2aSThomas Huth case 0x00ab: /* synco */ 1621aa351317SAurelien Jarno if (ctx->features & SH_FEATURE_SH4A) { 1622aa351317SAurelien Jarno tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 1623fcf5ef2aSThomas Huth return; 1624aa351317SAurelien Jarno } 1625fcf5ef2aSThomas Huth break; 1626fcf5ef2aSThomas Huth case 0x4024: /* rotcl Rn */ 1627fcf5ef2aSThomas Huth { 1628fcf5ef2aSThomas Huth TCGv tmp = tcg_temp_new(); 1629fcf5ef2aSThomas Huth tcg_gen_mov_i32(tmp, cpu_sr_t); 1630fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_t, REG(B11_8), 31); 1631fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1); 1632fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), REG(B11_8), tmp); 1633fcf5ef2aSThomas Huth tcg_temp_free(tmp); 1634fcf5ef2aSThomas Huth } 1635fcf5ef2aSThomas Huth return; 1636fcf5ef2aSThomas Huth case 0x4025: /* rotcr Rn */ 1637fcf5ef2aSThomas Huth { 1638fcf5ef2aSThomas Huth TCGv tmp = tcg_temp_new(); 1639fcf5ef2aSThomas Huth tcg_gen_shli_i32(tmp, cpu_sr_t, 31); 1640fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 1); 1641fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1); 1642fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), REG(B11_8), tmp); 1643fcf5ef2aSThomas Huth tcg_temp_free(tmp); 1644fcf5ef2aSThomas Huth } 1645fcf5ef2aSThomas Huth return; 1646fcf5ef2aSThomas Huth case 0x4004: /* rotl Rn */ 1647fcf5ef2aSThomas Huth tcg_gen_rotli_i32(REG(B11_8), REG(B11_8), 1); 1648fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 0); 1649fcf5ef2aSThomas Huth return; 1650fcf5ef2aSThomas Huth case 0x4005: /* rotr Rn */ 1651fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 0); 1652fcf5ef2aSThomas Huth tcg_gen_rotri_i32(REG(B11_8), REG(B11_8), 1); 1653fcf5ef2aSThomas Huth return; 1654fcf5ef2aSThomas Huth case 0x4000: /* shll Rn */ 1655fcf5ef2aSThomas Huth case 0x4020: /* shal Rn */ 1656fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_t, REG(B11_8), 31); 1657fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1); 1658fcf5ef2aSThomas Huth return; 1659fcf5ef2aSThomas Huth case 0x4021: /* shar Rn */ 1660fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 1); 1661fcf5ef2aSThomas Huth tcg_gen_sari_i32(REG(B11_8), REG(B11_8), 1); 1662fcf5ef2aSThomas Huth return; 1663fcf5ef2aSThomas Huth case 0x4001: /* shlr Rn */ 1664fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 1); 1665fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1); 1666fcf5ef2aSThomas Huth return; 1667fcf5ef2aSThomas Huth case 0x4008: /* shll2 Rn */ 1668fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 2); 1669fcf5ef2aSThomas Huth return; 1670fcf5ef2aSThomas Huth case 0x4018: /* shll8 Rn */ 1671fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 8); 1672fcf5ef2aSThomas Huth return; 1673fcf5ef2aSThomas Huth case 0x4028: /* shll16 Rn */ 1674fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 16); 1675fcf5ef2aSThomas Huth return; 1676fcf5ef2aSThomas Huth case 0x4009: /* shlr2 Rn */ 1677fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 2); 1678fcf5ef2aSThomas Huth return; 1679fcf5ef2aSThomas Huth case 0x4019: /* shlr8 Rn */ 1680fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 8); 1681fcf5ef2aSThomas Huth return; 1682fcf5ef2aSThomas Huth case 0x4029: /* shlr16 Rn */ 1683fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 16); 1684fcf5ef2aSThomas Huth return; 1685fcf5ef2aSThomas Huth case 0x401b: /* tas.b @Rn */ 1686fcf5ef2aSThomas Huth { 1687cb32f179SAurelien Jarno TCGv val = tcg_const_i32(0x80); 1688cb32f179SAurelien Jarno tcg_gen_atomic_fetch_or_i32(val, REG(B11_8), val, 1689cb32f179SAurelien Jarno ctx->memidx, MO_UB); 1690fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, val, 0); 1691fcf5ef2aSThomas Huth tcg_temp_free(val); 1692fcf5ef2aSThomas Huth } 1693fcf5ef2aSThomas Huth return; 1694fcf5ef2aSThomas Huth case 0xf00d: /* fsts FPUL,FRn - FPSCR: Nothing */ 1695fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1696fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_fregs[FREG(B11_8)], cpu_fpul); 1697fcf5ef2aSThomas Huth return; 1698fcf5ef2aSThomas Huth case 0xf01d: /* flds FRm,FPUL - FPSCR: Nothing */ 1699fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1700fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_fpul, cpu_fregs[FREG(B11_8)]); 1701fcf5ef2aSThomas Huth return; 1702fcf5ef2aSThomas Huth case 0xf02d: /* float FPUL,FRn/DRn - FPSCR: R[PR,Enable.I]/W[Cause,Flag] */ 1703fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1704a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_PR) { 1705fcf5ef2aSThomas Huth TCGv_i64 fp; 1706fcf5ef2aSThomas Huth if (ctx->opcode & 0x0100) 1707fcf5ef2aSThomas Huth break; /* illegal instruction */ 1708fcf5ef2aSThomas Huth fp = tcg_temp_new_i64(); 1709fcf5ef2aSThomas Huth gen_helper_float_DT(fp, cpu_env, cpu_fpul); 1710fcf5ef2aSThomas Huth gen_store_fpr64(fp, DREG(B11_8)); 1711fcf5ef2aSThomas Huth tcg_temp_free_i64(fp); 1712fcf5ef2aSThomas Huth } 1713fcf5ef2aSThomas Huth else { 1714fcf5ef2aSThomas Huth gen_helper_float_FT(cpu_fregs[FREG(B11_8)], cpu_env, cpu_fpul); 1715fcf5ef2aSThomas Huth } 1716fcf5ef2aSThomas Huth return; 1717fcf5ef2aSThomas Huth case 0xf03d: /* ftrc FRm/DRm,FPUL - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */ 1718fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1719a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_PR) { 1720fcf5ef2aSThomas Huth TCGv_i64 fp; 1721fcf5ef2aSThomas Huth if (ctx->opcode & 0x0100) 1722fcf5ef2aSThomas Huth break; /* illegal instruction */ 1723fcf5ef2aSThomas Huth fp = tcg_temp_new_i64(); 1724fcf5ef2aSThomas Huth gen_load_fpr64(fp, DREG(B11_8)); 1725fcf5ef2aSThomas Huth gen_helper_ftrc_DT(cpu_fpul, cpu_env, fp); 1726fcf5ef2aSThomas Huth tcg_temp_free_i64(fp); 1727fcf5ef2aSThomas Huth } 1728fcf5ef2aSThomas Huth else { 1729fcf5ef2aSThomas Huth gen_helper_ftrc_FT(cpu_fpul, cpu_env, cpu_fregs[FREG(B11_8)]); 1730fcf5ef2aSThomas Huth } 1731fcf5ef2aSThomas Huth return; 1732fcf5ef2aSThomas Huth case 0xf04d: /* fneg FRn/DRn - FPSCR: Nothing */ 1733fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 173482e82513SAurelien Jarno tcg_gen_xori_i32(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)], 173582e82513SAurelien Jarno 0x80000000); 1736fcf5ef2aSThomas Huth return; 173757f5c1b0SAurelien Jarno case 0xf05d: /* fabs FRn/DRn - FPCSR: Nothing */ 1738fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 173957f5c1b0SAurelien Jarno tcg_gen_andi_i32(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)], 174057f5c1b0SAurelien Jarno 0x7fffffff); 1741fcf5ef2aSThomas Huth return; 1742fcf5ef2aSThomas Huth case 0xf06d: /* fsqrt FRn */ 1743fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1744a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_PR) { 1745fcf5ef2aSThomas Huth if (ctx->opcode & 0x0100) 1746fcf5ef2aSThomas Huth break; /* illegal instruction */ 1747fcf5ef2aSThomas Huth TCGv_i64 fp = tcg_temp_new_i64(); 1748fcf5ef2aSThomas Huth gen_load_fpr64(fp, DREG(B11_8)); 1749fcf5ef2aSThomas Huth gen_helper_fsqrt_DT(fp, cpu_env, fp); 1750fcf5ef2aSThomas Huth gen_store_fpr64(fp, DREG(B11_8)); 1751fcf5ef2aSThomas Huth tcg_temp_free_i64(fp); 1752fcf5ef2aSThomas Huth } else { 1753fcf5ef2aSThomas Huth gen_helper_fsqrt_FT(cpu_fregs[FREG(B11_8)], cpu_env, 1754fcf5ef2aSThomas Huth cpu_fregs[FREG(B11_8)]); 1755fcf5ef2aSThomas Huth } 1756fcf5ef2aSThomas Huth return; 1757fcf5ef2aSThomas Huth case 0xf07d: /* fsrra FRn */ 1758fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1759fcf5ef2aSThomas Huth break; 1760fcf5ef2aSThomas Huth case 0xf08d: /* fldi0 FRn - FPSCR: R[PR] */ 1761fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1762a6215749SAurelien Jarno if (!(ctx->tbflags & FPSCR_PR)) { 1763fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_fregs[FREG(B11_8)], 0); 1764fcf5ef2aSThomas Huth } 1765fcf5ef2aSThomas Huth return; 1766fcf5ef2aSThomas Huth case 0xf09d: /* fldi1 FRn - FPSCR: R[PR] */ 1767fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1768a6215749SAurelien Jarno if (!(ctx->tbflags & FPSCR_PR)) { 1769fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_fregs[FREG(B11_8)], 0x3f800000); 1770fcf5ef2aSThomas Huth } 1771fcf5ef2aSThomas Huth return; 1772fcf5ef2aSThomas Huth case 0xf0ad: /* fcnvsd FPUL,DRn */ 1773fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1774fcf5ef2aSThomas Huth { 1775fcf5ef2aSThomas Huth TCGv_i64 fp = tcg_temp_new_i64(); 1776fcf5ef2aSThomas Huth gen_helper_fcnvsd_FT_DT(fp, cpu_env, cpu_fpul); 1777fcf5ef2aSThomas Huth gen_store_fpr64(fp, DREG(B11_8)); 1778fcf5ef2aSThomas Huth tcg_temp_free_i64(fp); 1779fcf5ef2aSThomas Huth } 1780fcf5ef2aSThomas Huth return; 1781fcf5ef2aSThomas Huth case 0xf0bd: /* fcnvds DRn,FPUL */ 1782fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1783fcf5ef2aSThomas Huth { 1784fcf5ef2aSThomas Huth TCGv_i64 fp = tcg_temp_new_i64(); 1785fcf5ef2aSThomas Huth gen_load_fpr64(fp, DREG(B11_8)); 1786fcf5ef2aSThomas Huth gen_helper_fcnvds_DT_FT(cpu_fpul, cpu_env, fp); 1787fcf5ef2aSThomas Huth tcg_temp_free_i64(fp); 1788fcf5ef2aSThomas Huth } 1789fcf5ef2aSThomas Huth return; 1790fcf5ef2aSThomas Huth case 0xf0ed: /* fipr FVm,FVn */ 1791fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1792a6215749SAurelien Jarno if ((ctx->tbflags & FPSCR_PR) == 0) { 1793fcf5ef2aSThomas Huth TCGv m, n; 1794fcf5ef2aSThomas Huth m = tcg_const_i32((ctx->opcode >> 8) & 3); 1795fcf5ef2aSThomas Huth n = tcg_const_i32((ctx->opcode >> 10) & 3); 1796fcf5ef2aSThomas Huth gen_helper_fipr(cpu_env, m, n); 1797fcf5ef2aSThomas Huth tcg_temp_free(m); 1798fcf5ef2aSThomas Huth tcg_temp_free(n); 1799fcf5ef2aSThomas Huth return; 1800fcf5ef2aSThomas Huth } 1801fcf5ef2aSThomas Huth break; 1802fcf5ef2aSThomas Huth case 0xf0fd: /* ftrv XMTRX,FVn */ 1803fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1804fcf5ef2aSThomas Huth if ((ctx->opcode & 0x0300) == 0x0100 && 1805a6215749SAurelien Jarno (ctx->tbflags & FPSCR_PR) == 0) { 1806fcf5ef2aSThomas Huth TCGv n; 1807fcf5ef2aSThomas Huth n = tcg_const_i32((ctx->opcode >> 10) & 3); 1808fcf5ef2aSThomas Huth gen_helper_ftrv(cpu_env, n); 1809fcf5ef2aSThomas Huth tcg_temp_free(n); 1810fcf5ef2aSThomas Huth return; 1811fcf5ef2aSThomas Huth } 1812fcf5ef2aSThomas Huth break; 1813fcf5ef2aSThomas Huth } 1814fcf5ef2aSThomas Huth #if 0 1815fcf5ef2aSThomas Huth fprintf(stderr, "unknown instruction 0x%04x at pc 0x%08x\n", 1816fcf5ef2aSThomas Huth ctx->opcode, ctx->pc); 1817fcf5ef2aSThomas Huth fflush(stderr); 1818fcf5ef2aSThomas Huth #endif 1819ac9707eaSAurelien Jarno gen_save_cpu_state(ctx, true); 18209a562ae7SAurelien Jarno if (ctx->envflags & DELAY_SLOT_MASK) { 1821fcf5ef2aSThomas Huth gen_helper_raise_slot_illegal_instruction(cpu_env); 1822fcf5ef2aSThomas Huth } else { 1823fcf5ef2aSThomas Huth gen_helper_raise_illegal_instruction(cpu_env); 1824fcf5ef2aSThomas Huth } 182563205665SAurelien Jarno ctx->bstate = BS_EXCP; 1826fcf5ef2aSThomas Huth } 1827fcf5ef2aSThomas Huth 1828fcf5ef2aSThomas Huth static void decode_opc(DisasContext * ctx) 1829fcf5ef2aSThomas Huth { 1830a6215749SAurelien Jarno uint32_t old_flags = ctx->envflags; 1831fcf5ef2aSThomas Huth 1832fcf5ef2aSThomas Huth _decode_opc(ctx); 1833fcf5ef2aSThomas Huth 18349a562ae7SAurelien Jarno if (old_flags & DELAY_SLOT_MASK) { 1835fcf5ef2aSThomas Huth /* go out of the delay slot */ 18369a562ae7SAurelien Jarno ctx->envflags &= ~DELAY_SLOT_MASK; 18374bfa602bSRichard Henderson 18384bfa602bSRichard Henderson /* When in an exclusive region, we must continue to the end 18394bfa602bSRichard Henderson for conditional branches. */ 18404bfa602bSRichard Henderson if (ctx->tbflags & GUSA_EXCLUSIVE 18414bfa602bSRichard Henderson && old_flags & DELAY_SLOT_CONDITIONAL) { 18424bfa602bSRichard Henderson gen_delayed_conditional_jump(ctx); 18434bfa602bSRichard Henderson return; 18444bfa602bSRichard Henderson } 18454bfa602bSRichard Henderson /* Otherwise this is probably an invalid gUSA region. 18464bfa602bSRichard Henderson Drop the GUSA bits so the next TB doesn't see them. */ 18474bfa602bSRichard Henderson ctx->envflags &= ~GUSA_MASK; 18484bfa602bSRichard Henderson 1849ac9707eaSAurelien Jarno tcg_gen_movi_i32(cpu_flags, ctx->envflags); 1850fcf5ef2aSThomas Huth ctx->bstate = BS_BRANCH; 1851fcf5ef2aSThomas Huth if (old_flags & DELAY_SLOT_CONDITIONAL) { 1852fcf5ef2aSThomas Huth gen_delayed_conditional_jump(ctx); 1853be53081aSAurelien Jarno } else { 1854fcf5ef2aSThomas Huth gen_jump(ctx); 1855fcf5ef2aSThomas Huth } 18564bfa602bSRichard Henderson } 18574bfa602bSRichard Henderson } 1858fcf5ef2aSThomas Huth 18594bfa602bSRichard Henderson #ifdef CONFIG_USER_ONLY 18604bfa602bSRichard Henderson /* For uniprocessors, SH4 uses optimistic restartable atomic sequences. 18614bfa602bSRichard Henderson Upon an interrupt, a real kernel would simply notice magic values in 18624bfa602bSRichard Henderson the registers and reset the PC to the start of the sequence. 18634bfa602bSRichard Henderson 18644bfa602bSRichard Henderson For QEMU, we cannot do this in quite the same way. Instead, we notice 18654bfa602bSRichard Henderson the normal start of such a sequence (mov #-x,r15). While we can handle 18664bfa602bSRichard Henderson any sequence via cpu_exec_step_atomic, we can recognize the "normal" 18674bfa602bSRichard Henderson sequences and transform them into atomic operations as seen by the host. 18684bfa602bSRichard Henderson */ 18694bfa602bSRichard Henderson static int decode_gusa(DisasContext *ctx, CPUSH4State *env, int *pmax_insns) 18704bfa602bSRichard Henderson { 1871d6a6cffdSRichard Henderson uint16_t insns[5]; 1872d6a6cffdSRichard Henderson int ld_adr, ld_dst, ld_mop; 1873d6a6cffdSRichard Henderson int op_dst, op_src, op_opc; 1874d6a6cffdSRichard Henderson int mv_src, mt_dst, st_src, st_mop; 1875d6a6cffdSRichard Henderson TCGv op_arg; 1876d6a6cffdSRichard Henderson 18774bfa602bSRichard Henderson uint32_t pc = ctx->pc; 18784bfa602bSRichard Henderson uint32_t pc_end = ctx->tb->cs_base; 18794bfa602bSRichard Henderson int backup = sextract32(ctx->tbflags, GUSA_SHIFT, 8); 18804bfa602bSRichard Henderson int max_insns = (pc_end - pc) / 2; 1881d6a6cffdSRichard Henderson int i; 18824bfa602bSRichard Henderson 18834bfa602bSRichard Henderson if (pc != pc_end + backup || max_insns < 2) { 18844bfa602bSRichard Henderson /* This is a malformed gUSA region. Don't do anything special, 18854bfa602bSRichard Henderson since the interpreter is likely to get confused. */ 18864bfa602bSRichard Henderson ctx->envflags &= ~GUSA_MASK; 18874bfa602bSRichard Henderson return 0; 1888fcf5ef2aSThomas Huth } 18894bfa602bSRichard Henderson 18904bfa602bSRichard Henderson if (ctx->tbflags & GUSA_EXCLUSIVE) { 18914bfa602bSRichard Henderson /* Regardless of single-stepping or the end of the page, 18924bfa602bSRichard Henderson we must complete execution of the gUSA region while 18934bfa602bSRichard Henderson holding the exclusive lock. */ 18944bfa602bSRichard Henderson *pmax_insns = max_insns; 18954bfa602bSRichard Henderson return 0; 1896fcf5ef2aSThomas Huth } 1897fcf5ef2aSThomas Huth 1898d6a6cffdSRichard Henderson /* The state machine below will consume only a few insns. 1899d6a6cffdSRichard Henderson If there are more than that in a region, fail now. */ 1900d6a6cffdSRichard Henderson if (max_insns > ARRAY_SIZE(insns)) { 1901d6a6cffdSRichard Henderson goto fail; 1902d6a6cffdSRichard Henderson } 1903d6a6cffdSRichard Henderson 1904d6a6cffdSRichard Henderson /* Read all of the insns for the region. */ 1905d6a6cffdSRichard Henderson for (i = 0; i < max_insns; ++i) { 1906d6a6cffdSRichard Henderson insns[i] = cpu_lduw_code(env, pc + i * 2); 1907d6a6cffdSRichard Henderson } 1908d6a6cffdSRichard Henderson 1909d6a6cffdSRichard Henderson ld_adr = ld_dst = ld_mop = -1; 1910d6a6cffdSRichard Henderson mv_src = -1; 1911d6a6cffdSRichard Henderson op_dst = op_src = op_opc = -1; 1912d6a6cffdSRichard Henderson mt_dst = -1; 1913d6a6cffdSRichard Henderson st_src = st_mop = -1; 1914d6a6cffdSRichard Henderson TCGV_UNUSED(op_arg); 1915d6a6cffdSRichard Henderson i = 0; 1916d6a6cffdSRichard Henderson 1917d6a6cffdSRichard Henderson #define NEXT_INSN \ 1918d6a6cffdSRichard Henderson do { if (i >= max_insns) goto fail; ctx->opcode = insns[i++]; } while (0) 1919d6a6cffdSRichard Henderson 1920d6a6cffdSRichard Henderson /* 1921d6a6cffdSRichard Henderson * Expect a load to begin the region. 1922d6a6cffdSRichard Henderson */ 1923d6a6cffdSRichard Henderson NEXT_INSN; 1924d6a6cffdSRichard Henderson switch (ctx->opcode & 0xf00f) { 1925d6a6cffdSRichard Henderson case 0x6000: /* mov.b @Rm,Rn */ 1926d6a6cffdSRichard Henderson ld_mop = MO_SB; 1927d6a6cffdSRichard Henderson break; 1928d6a6cffdSRichard Henderson case 0x6001: /* mov.w @Rm,Rn */ 1929d6a6cffdSRichard Henderson ld_mop = MO_TESW; 1930d6a6cffdSRichard Henderson break; 1931d6a6cffdSRichard Henderson case 0x6002: /* mov.l @Rm,Rn */ 1932d6a6cffdSRichard Henderson ld_mop = MO_TESL; 1933d6a6cffdSRichard Henderson break; 1934d6a6cffdSRichard Henderson default: 1935d6a6cffdSRichard Henderson goto fail; 1936d6a6cffdSRichard Henderson } 1937d6a6cffdSRichard Henderson ld_adr = B7_4; 1938d6a6cffdSRichard Henderson ld_dst = B11_8; 1939d6a6cffdSRichard Henderson if (ld_adr == ld_dst) { 1940d6a6cffdSRichard Henderson goto fail; 1941d6a6cffdSRichard Henderson } 1942d6a6cffdSRichard Henderson /* Unless we see a mov, any two-operand operation must use ld_dst. */ 1943d6a6cffdSRichard Henderson op_dst = ld_dst; 1944d6a6cffdSRichard Henderson 1945d6a6cffdSRichard Henderson /* 1946d6a6cffdSRichard Henderson * Expect an optional register move. 1947d6a6cffdSRichard Henderson */ 1948d6a6cffdSRichard Henderson NEXT_INSN; 1949d6a6cffdSRichard Henderson switch (ctx->opcode & 0xf00f) { 1950d6a6cffdSRichard Henderson case 0x6003: /* mov Rm,Rn */ 1951d6a6cffdSRichard Henderson /* Here we want to recognize ld_dst being saved for later consumtion, 1952d6a6cffdSRichard Henderson or for another input register being copied so that ld_dst need not 1953d6a6cffdSRichard Henderson be clobbered during the operation. */ 1954d6a6cffdSRichard Henderson op_dst = B11_8; 1955d6a6cffdSRichard Henderson mv_src = B7_4; 1956d6a6cffdSRichard Henderson if (op_dst == ld_dst) { 1957d6a6cffdSRichard Henderson /* Overwriting the load output. */ 1958d6a6cffdSRichard Henderson goto fail; 1959d6a6cffdSRichard Henderson } 1960d6a6cffdSRichard Henderson if (mv_src != ld_dst) { 1961d6a6cffdSRichard Henderson /* Copying a new input; constrain op_src to match the load. */ 1962d6a6cffdSRichard Henderson op_src = ld_dst; 1963d6a6cffdSRichard Henderson } 1964d6a6cffdSRichard Henderson break; 1965d6a6cffdSRichard Henderson 1966d6a6cffdSRichard Henderson default: 1967d6a6cffdSRichard Henderson /* Put back and re-examine as operation. */ 1968d6a6cffdSRichard Henderson --i; 1969d6a6cffdSRichard Henderson } 1970d6a6cffdSRichard Henderson 1971d6a6cffdSRichard Henderson /* 1972d6a6cffdSRichard Henderson * Expect the operation. 1973d6a6cffdSRichard Henderson */ 1974d6a6cffdSRichard Henderson NEXT_INSN; 1975d6a6cffdSRichard Henderson switch (ctx->opcode & 0xf00f) { 1976d6a6cffdSRichard Henderson case 0x300c: /* add Rm,Rn */ 1977d6a6cffdSRichard Henderson op_opc = INDEX_op_add_i32; 1978d6a6cffdSRichard Henderson goto do_reg_op; 1979d6a6cffdSRichard Henderson case 0x2009: /* and Rm,Rn */ 1980d6a6cffdSRichard Henderson op_opc = INDEX_op_and_i32; 1981d6a6cffdSRichard Henderson goto do_reg_op; 1982d6a6cffdSRichard Henderson case 0x200a: /* xor Rm,Rn */ 1983d6a6cffdSRichard Henderson op_opc = INDEX_op_xor_i32; 1984d6a6cffdSRichard Henderson goto do_reg_op; 1985d6a6cffdSRichard Henderson case 0x200b: /* or Rm,Rn */ 1986d6a6cffdSRichard Henderson op_opc = INDEX_op_or_i32; 1987d6a6cffdSRichard Henderson do_reg_op: 1988d6a6cffdSRichard Henderson /* The operation register should be as expected, and the 1989d6a6cffdSRichard Henderson other input cannot depend on the load. */ 1990d6a6cffdSRichard Henderson if (op_dst != B11_8) { 1991d6a6cffdSRichard Henderson goto fail; 1992d6a6cffdSRichard Henderson } 1993d6a6cffdSRichard Henderson if (op_src < 0) { 1994d6a6cffdSRichard Henderson /* Unconstrainted input. */ 1995d6a6cffdSRichard Henderson op_src = B7_4; 1996d6a6cffdSRichard Henderson } else if (op_src == B7_4) { 1997d6a6cffdSRichard Henderson /* Constrained input matched load. All operations are 1998d6a6cffdSRichard Henderson commutative; "swap" them by "moving" the load output 1999d6a6cffdSRichard Henderson to the (implicit) first argument and the move source 2000d6a6cffdSRichard Henderson to the (explicit) second argument. */ 2001d6a6cffdSRichard Henderson op_src = mv_src; 2002d6a6cffdSRichard Henderson } else { 2003d6a6cffdSRichard Henderson goto fail; 2004d6a6cffdSRichard Henderson } 2005d6a6cffdSRichard Henderson op_arg = REG(op_src); 2006d6a6cffdSRichard Henderson break; 2007d6a6cffdSRichard Henderson 2008d6a6cffdSRichard Henderson case 0x6007: /* not Rm,Rn */ 2009d6a6cffdSRichard Henderson if (ld_dst != B7_4 || mv_src >= 0) { 2010d6a6cffdSRichard Henderson goto fail; 2011d6a6cffdSRichard Henderson } 2012d6a6cffdSRichard Henderson op_dst = B11_8; 2013d6a6cffdSRichard Henderson op_opc = INDEX_op_xor_i32; 2014d6a6cffdSRichard Henderson op_arg = tcg_const_i32(-1); 2015d6a6cffdSRichard Henderson break; 2016d6a6cffdSRichard Henderson 2017d6a6cffdSRichard Henderson case 0x7000 ... 0x700f: /* add #imm,Rn */ 2018d6a6cffdSRichard Henderson if (op_dst != B11_8 || mv_src >= 0) { 2019d6a6cffdSRichard Henderson goto fail; 2020d6a6cffdSRichard Henderson } 2021d6a6cffdSRichard Henderson op_opc = INDEX_op_add_i32; 2022d6a6cffdSRichard Henderson op_arg = tcg_const_i32(B7_0s); 2023d6a6cffdSRichard Henderson break; 2024d6a6cffdSRichard Henderson 2025d6a6cffdSRichard Henderson case 0x3000: /* cmp/eq Rm,Rn */ 2026d6a6cffdSRichard Henderson /* Looking for the middle of a compare-and-swap sequence, 2027d6a6cffdSRichard Henderson beginning with the compare. Operands can be either order, 2028d6a6cffdSRichard Henderson but with only one overlapping the load. */ 2029d6a6cffdSRichard Henderson if ((ld_dst == B11_8) + (ld_dst == B7_4) != 1 || mv_src >= 0) { 2030d6a6cffdSRichard Henderson goto fail; 2031d6a6cffdSRichard Henderson } 2032d6a6cffdSRichard Henderson op_opc = INDEX_op_setcond_i32; /* placeholder */ 2033d6a6cffdSRichard Henderson op_src = (ld_dst == B11_8 ? B7_4 : B11_8); 2034d6a6cffdSRichard Henderson op_arg = REG(op_src); 2035d6a6cffdSRichard Henderson 2036d6a6cffdSRichard Henderson NEXT_INSN; 2037d6a6cffdSRichard Henderson switch (ctx->opcode & 0xff00) { 2038d6a6cffdSRichard Henderson case 0x8b00: /* bf label */ 2039d6a6cffdSRichard Henderson case 0x8f00: /* bf/s label */ 2040d6a6cffdSRichard Henderson if (pc + (i + 1 + B7_0s) * 2 != pc_end) { 2041d6a6cffdSRichard Henderson goto fail; 2042d6a6cffdSRichard Henderson } 2043d6a6cffdSRichard Henderson if ((ctx->opcode & 0xff00) == 0x8b00) { /* bf label */ 2044d6a6cffdSRichard Henderson break; 2045d6a6cffdSRichard Henderson } 2046d6a6cffdSRichard Henderson /* We're looking to unconditionally modify Rn with the 2047d6a6cffdSRichard Henderson result of the comparison, within the delay slot of 2048d6a6cffdSRichard Henderson the branch. This is used by older gcc. */ 2049d6a6cffdSRichard Henderson NEXT_INSN; 2050d6a6cffdSRichard Henderson if ((ctx->opcode & 0xf0ff) == 0x0029) { /* movt Rn */ 2051d6a6cffdSRichard Henderson mt_dst = B11_8; 2052d6a6cffdSRichard Henderson } else { 2053d6a6cffdSRichard Henderson goto fail; 2054d6a6cffdSRichard Henderson } 2055d6a6cffdSRichard Henderson break; 2056d6a6cffdSRichard Henderson 2057d6a6cffdSRichard Henderson default: 2058d6a6cffdSRichard Henderson goto fail; 2059d6a6cffdSRichard Henderson } 2060d6a6cffdSRichard Henderson break; 2061d6a6cffdSRichard Henderson 2062d6a6cffdSRichard Henderson case 0x2008: /* tst Rm,Rn */ 2063d6a6cffdSRichard Henderson /* Looking for a compare-and-swap against zero. */ 2064d6a6cffdSRichard Henderson if (ld_dst != B11_8 || ld_dst != B7_4 || mv_src >= 0) { 2065d6a6cffdSRichard Henderson goto fail; 2066d6a6cffdSRichard Henderson } 2067d6a6cffdSRichard Henderson op_opc = INDEX_op_setcond_i32; 2068d6a6cffdSRichard Henderson op_arg = tcg_const_i32(0); 2069d6a6cffdSRichard Henderson 2070d6a6cffdSRichard Henderson NEXT_INSN; 2071d6a6cffdSRichard Henderson if ((ctx->opcode & 0xff00) != 0x8900 /* bt label */ 2072d6a6cffdSRichard Henderson || pc + (i + 1 + B7_0s) * 2 != pc_end) { 2073d6a6cffdSRichard Henderson goto fail; 2074d6a6cffdSRichard Henderson } 2075d6a6cffdSRichard Henderson break; 2076d6a6cffdSRichard Henderson 2077d6a6cffdSRichard Henderson default: 2078d6a6cffdSRichard Henderson /* Put back and re-examine as store. */ 2079d6a6cffdSRichard Henderson --i; 2080d6a6cffdSRichard Henderson } 2081d6a6cffdSRichard Henderson 2082d6a6cffdSRichard Henderson /* 2083d6a6cffdSRichard Henderson * Expect the store. 2084d6a6cffdSRichard Henderson */ 2085d6a6cffdSRichard Henderson /* The store must be the last insn. */ 2086d6a6cffdSRichard Henderson if (i != max_insns - 1) { 2087d6a6cffdSRichard Henderson goto fail; 2088d6a6cffdSRichard Henderson } 2089d6a6cffdSRichard Henderson NEXT_INSN; 2090d6a6cffdSRichard Henderson switch (ctx->opcode & 0xf00f) { 2091d6a6cffdSRichard Henderson case 0x2000: /* mov.b Rm,@Rn */ 2092d6a6cffdSRichard Henderson st_mop = MO_UB; 2093d6a6cffdSRichard Henderson break; 2094d6a6cffdSRichard Henderson case 0x2001: /* mov.w Rm,@Rn */ 2095d6a6cffdSRichard Henderson st_mop = MO_UW; 2096d6a6cffdSRichard Henderson break; 2097d6a6cffdSRichard Henderson case 0x2002: /* mov.l Rm,@Rn */ 2098d6a6cffdSRichard Henderson st_mop = MO_UL; 2099d6a6cffdSRichard Henderson break; 2100d6a6cffdSRichard Henderson default: 2101d6a6cffdSRichard Henderson goto fail; 2102d6a6cffdSRichard Henderson } 2103d6a6cffdSRichard Henderson /* The store must match the load. */ 2104d6a6cffdSRichard Henderson if (ld_adr != B11_8 || st_mop != (ld_mop & MO_SIZE)) { 2105d6a6cffdSRichard Henderson goto fail; 2106d6a6cffdSRichard Henderson } 2107d6a6cffdSRichard Henderson st_src = B7_4; 2108d6a6cffdSRichard Henderson 2109d6a6cffdSRichard Henderson #undef NEXT_INSN 2110d6a6cffdSRichard Henderson 2111d6a6cffdSRichard Henderson /* 2112d6a6cffdSRichard Henderson * Emit the operation. 2113d6a6cffdSRichard Henderson */ 2114d6a6cffdSRichard Henderson tcg_gen_insn_start(pc, ctx->envflags); 2115d6a6cffdSRichard Henderson switch (op_opc) { 2116d6a6cffdSRichard Henderson case -1: 2117d6a6cffdSRichard Henderson /* No operation found. Look for exchange pattern. */ 2118d6a6cffdSRichard Henderson if (st_src == ld_dst || mv_src >= 0) { 2119d6a6cffdSRichard Henderson goto fail; 2120d6a6cffdSRichard Henderson } 2121d6a6cffdSRichard Henderson tcg_gen_atomic_xchg_i32(REG(ld_dst), REG(ld_adr), REG(st_src), 2122d6a6cffdSRichard Henderson ctx->memidx, ld_mop); 2123d6a6cffdSRichard Henderson break; 2124d6a6cffdSRichard Henderson 2125d6a6cffdSRichard Henderson case INDEX_op_add_i32: 2126d6a6cffdSRichard Henderson if (op_dst != st_src) { 2127d6a6cffdSRichard Henderson goto fail; 2128d6a6cffdSRichard Henderson } 2129d6a6cffdSRichard Henderson if (op_dst == ld_dst && st_mop == MO_UL) { 2130d6a6cffdSRichard Henderson tcg_gen_atomic_add_fetch_i32(REG(ld_dst), REG(ld_adr), 2131d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2132d6a6cffdSRichard Henderson } else { 2133d6a6cffdSRichard Henderson tcg_gen_atomic_fetch_add_i32(REG(ld_dst), REG(ld_adr), 2134d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2135d6a6cffdSRichard Henderson if (op_dst != ld_dst) { 2136d6a6cffdSRichard Henderson /* Note that mop sizes < 4 cannot use add_fetch 2137d6a6cffdSRichard Henderson because it won't carry into the higher bits. */ 2138d6a6cffdSRichard Henderson tcg_gen_add_i32(REG(op_dst), REG(ld_dst), op_arg); 2139d6a6cffdSRichard Henderson } 2140d6a6cffdSRichard Henderson } 2141d6a6cffdSRichard Henderson break; 2142d6a6cffdSRichard Henderson 2143d6a6cffdSRichard Henderson case INDEX_op_and_i32: 2144d6a6cffdSRichard Henderson if (op_dst != st_src) { 2145d6a6cffdSRichard Henderson goto fail; 2146d6a6cffdSRichard Henderson } 2147d6a6cffdSRichard Henderson if (op_dst == ld_dst) { 2148d6a6cffdSRichard Henderson tcg_gen_atomic_and_fetch_i32(REG(ld_dst), REG(ld_adr), 2149d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2150d6a6cffdSRichard Henderson } else { 2151d6a6cffdSRichard Henderson tcg_gen_atomic_fetch_and_i32(REG(ld_dst), REG(ld_adr), 2152d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2153d6a6cffdSRichard Henderson tcg_gen_and_i32(REG(op_dst), REG(ld_dst), op_arg); 2154d6a6cffdSRichard Henderson } 2155d6a6cffdSRichard Henderson break; 2156d6a6cffdSRichard Henderson 2157d6a6cffdSRichard Henderson case INDEX_op_or_i32: 2158d6a6cffdSRichard Henderson if (op_dst != st_src) { 2159d6a6cffdSRichard Henderson goto fail; 2160d6a6cffdSRichard Henderson } 2161d6a6cffdSRichard Henderson if (op_dst == ld_dst) { 2162d6a6cffdSRichard Henderson tcg_gen_atomic_or_fetch_i32(REG(ld_dst), REG(ld_adr), 2163d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2164d6a6cffdSRichard Henderson } else { 2165d6a6cffdSRichard Henderson tcg_gen_atomic_fetch_or_i32(REG(ld_dst), REG(ld_adr), 2166d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2167d6a6cffdSRichard Henderson tcg_gen_or_i32(REG(op_dst), REG(ld_dst), op_arg); 2168d6a6cffdSRichard Henderson } 2169d6a6cffdSRichard Henderson break; 2170d6a6cffdSRichard Henderson 2171d6a6cffdSRichard Henderson case INDEX_op_xor_i32: 2172d6a6cffdSRichard Henderson if (op_dst != st_src) { 2173d6a6cffdSRichard Henderson goto fail; 2174d6a6cffdSRichard Henderson } 2175d6a6cffdSRichard Henderson if (op_dst == ld_dst) { 2176d6a6cffdSRichard Henderson tcg_gen_atomic_xor_fetch_i32(REG(ld_dst), REG(ld_adr), 2177d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2178d6a6cffdSRichard Henderson } else { 2179d6a6cffdSRichard Henderson tcg_gen_atomic_fetch_xor_i32(REG(ld_dst), REG(ld_adr), 2180d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2181d6a6cffdSRichard Henderson tcg_gen_xor_i32(REG(op_dst), REG(ld_dst), op_arg); 2182d6a6cffdSRichard Henderson } 2183d6a6cffdSRichard Henderson break; 2184d6a6cffdSRichard Henderson 2185d6a6cffdSRichard Henderson case INDEX_op_setcond_i32: 2186d6a6cffdSRichard Henderson if (st_src == ld_dst) { 2187d6a6cffdSRichard Henderson goto fail; 2188d6a6cffdSRichard Henderson } 2189d6a6cffdSRichard Henderson tcg_gen_atomic_cmpxchg_i32(REG(ld_dst), REG(ld_adr), op_arg, 2190d6a6cffdSRichard Henderson REG(st_src), ctx->memidx, ld_mop); 2191d6a6cffdSRichard Henderson tcg_gen_setcond_i32(TCG_COND_EQ, cpu_sr_t, REG(ld_dst), op_arg); 2192d6a6cffdSRichard Henderson if (mt_dst >= 0) { 2193d6a6cffdSRichard Henderson tcg_gen_mov_i32(REG(mt_dst), cpu_sr_t); 2194d6a6cffdSRichard Henderson } 2195d6a6cffdSRichard Henderson break; 2196d6a6cffdSRichard Henderson 2197d6a6cffdSRichard Henderson default: 2198d6a6cffdSRichard Henderson g_assert_not_reached(); 2199d6a6cffdSRichard Henderson } 2200d6a6cffdSRichard Henderson 2201d6a6cffdSRichard Henderson /* If op_src is not a valid register, then op_arg was a constant. */ 2202d6a6cffdSRichard Henderson if (op_src < 0) { 2203d6a6cffdSRichard Henderson tcg_temp_free_i32(op_arg); 2204d6a6cffdSRichard Henderson } 2205d6a6cffdSRichard Henderson 2206d6a6cffdSRichard Henderson /* The entire region has been translated. */ 2207d6a6cffdSRichard Henderson ctx->envflags &= ~GUSA_MASK; 2208d6a6cffdSRichard Henderson ctx->pc = pc_end; 2209d6a6cffdSRichard Henderson return max_insns; 2210d6a6cffdSRichard Henderson 2211d6a6cffdSRichard Henderson fail: 22124bfa602bSRichard Henderson qemu_log_mask(LOG_UNIMP, "Unrecognized gUSA sequence %08x-%08x\n", 22134bfa602bSRichard Henderson pc, pc_end); 22144bfa602bSRichard Henderson 22154bfa602bSRichard Henderson /* Restart with the EXCLUSIVE bit set, within a TB run via 22164bfa602bSRichard Henderson cpu_exec_step_atomic holding the exclusive lock. */ 22174bfa602bSRichard Henderson tcg_gen_insn_start(pc, ctx->envflags); 22184bfa602bSRichard Henderson ctx->envflags |= GUSA_EXCLUSIVE; 22194bfa602bSRichard Henderson gen_save_cpu_state(ctx, false); 22204bfa602bSRichard Henderson gen_helper_exclusive(cpu_env); 22214bfa602bSRichard Henderson ctx->bstate = BS_EXCP; 22224bfa602bSRichard Henderson 22234bfa602bSRichard Henderson /* We're not executing an instruction, but we must report one for the 22244bfa602bSRichard Henderson purposes of accounting within the TB. We might as well report the 22254bfa602bSRichard Henderson entire region consumed via ctx->pc so that it's immediately available 22264bfa602bSRichard Henderson in the disassembly dump. */ 22274bfa602bSRichard Henderson ctx->pc = pc_end; 22284bfa602bSRichard Henderson return 1; 22294bfa602bSRichard Henderson } 22304bfa602bSRichard Henderson #endif 22314bfa602bSRichard Henderson 2232fcf5ef2aSThomas Huth void gen_intermediate_code(CPUSH4State * env, struct TranslationBlock *tb) 2233fcf5ef2aSThomas Huth { 2234fcf5ef2aSThomas Huth SuperHCPU *cpu = sh_env_get_cpu(env); 2235fcf5ef2aSThomas Huth CPUState *cs = CPU(cpu); 2236fcf5ef2aSThomas Huth DisasContext ctx; 2237fcf5ef2aSThomas Huth target_ulong pc_start; 2238fcf5ef2aSThomas Huth int num_insns; 2239fcf5ef2aSThomas Huth int max_insns; 2240fcf5ef2aSThomas Huth 2241fcf5ef2aSThomas Huth pc_start = tb->pc; 2242fcf5ef2aSThomas Huth ctx.pc = pc_start; 2243a6215749SAurelien Jarno ctx.tbflags = (uint32_t)tb->flags; 2244e1933d14SRichard Henderson ctx.envflags = tb->flags & TB_FLAG_ENVFLAGS_MASK; 2245fcf5ef2aSThomas Huth ctx.bstate = BS_NONE; 2246a6215749SAurelien Jarno ctx.memidx = (ctx.tbflags & (1u << SR_MD)) == 0 ? 1 : 0; 2247fcf5ef2aSThomas Huth /* We don't know if the delayed pc came from a dynamic or static branch, 2248fcf5ef2aSThomas Huth so assume it is a dynamic branch. */ 2249fcf5ef2aSThomas Huth ctx.delayed_pc = -1; /* use delayed pc from env pointer */ 2250fcf5ef2aSThomas Huth ctx.tb = tb; 2251fcf5ef2aSThomas Huth ctx.singlestep_enabled = cs->singlestep_enabled; 2252fcf5ef2aSThomas Huth ctx.features = env->features; 2253a6215749SAurelien Jarno ctx.has_movcal = (ctx.tbflags & TB_FLAG_PENDING_MOVCA); 2254*3a3bb8d2SRichard Henderson ctx.gbank = ((ctx.tbflags & (1 << SR_MD)) && 2255*3a3bb8d2SRichard Henderson (ctx.tbflags & (1 << SR_RB))) * 0x10; 2256fcf5ef2aSThomas Huth 2257fcf5ef2aSThomas Huth max_insns = tb->cflags & CF_COUNT_MASK; 2258fcf5ef2aSThomas Huth if (max_insns == 0) { 2259fcf5ef2aSThomas Huth max_insns = CF_COUNT_MASK; 2260fcf5ef2aSThomas Huth } 22614448a836SRichard Henderson max_insns = MIN(max_insns, TCG_MAX_INSNS); 22624448a836SRichard Henderson 22634448a836SRichard Henderson /* Since the ISA is fixed-width, we can bound by the number 22644448a836SRichard Henderson of instructions remaining on the page. */ 22654448a836SRichard Henderson num_insns = -(ctx.pc | TARGET_PAGE_MASK) / 2; 22664448a836SRichard Henderson max_insns = MIN(max_insns, num_insns); 22674448a836SRichard Henderson 22684448a836SRichard Henderson /* Single stepping means just that. */ 22694448a836SRichard Henderson if (ctx.singlestep_enabled || singlestep) { 22704448a836SRichard Henderson max_insns = 1; 2271fcf5ef2aSThomas Huth } 2272fcf5ef2aSThomas Huth 2273fcf5ef2aSThomas Huth gen_tb_start(tb); 22744448a836SRichard Henderson num_insns = 0; 22754448a836SRichard Henderson 22764bfa602bSRichard Henderson #ifdef CONFIG_USER_ONLY 22774bfa602bSRichard Henderson if (ctx.tbflags & GUSA_MASK) { 22784bfa602bSRichard Henderson num_insns = decode_gusa(&ctx, env, &max_insns); 22794bfa602bSRichard Henderson } 22804bfa602bSRichard Henderson #endif 22814bfa602bSRichard Henderson 22824448a836SRichard Henderson while (ctx.bstate == BS_NONE 22834448a836SRichard Henderson && num_insns < max_insns 22844448a836SRichard Henderson && !tcg_op_buf_full()) { 2285a6215749SAurelien Jarno tcg_gen_insn_start(ctx.pc, ctx.envflags); 2286fcf5ef2aSThomas Huth num_insns++; 2287fcf5ef2aSThomas Huth 2288fcf5ef2aSThomas Huth if (unlikely(cpu_breakpoint_test(cs, ctx.pc, BP_ANY))) { 2289fcf5ef2aSThomas Huth /* We have hit a breakpoint - make sure PC is up-to-date */ 2290ac9707eaSAurelien Jarno gen_save_cpu_state(&ctx, true); 2291fcf5ef2aSThomas Huth gen_helper_debug(cpu_env); 229263205665SAurelien Jarno ctx.bstate = BS_EXCP; 2293fcf5ef2aSThomas Huth /* The address covered by the breakpoint must be included in 2294fcf5ef2aSThomas Huth [tb->pc, tb->pc + tb->size) in order to for it to be 2295fcf5ef2aSThomas Huth properly cleared -- thus we increment the PC here so that 2296fcf5ef2aSThomas Huth the logic setting tb->size below does the right thing. */ 2297fcf5ef2aSThomas Huth ctx.pc += 2; 2298fcf5ef2aSThomas Huth break; 2299fcf5ef2aSThomas Huth } 2300fcf5ef2aSThomas Huth 2301fcf5ef2aSThomas Huth if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) { 2302fcf5ef2aSThomas Huth gen_io_start(); 2303fcf5ef2aSThomas Huth } 2304fcf5ef2aSThomas Huth 2305fcf5ef2aSThomas Huth ctx.opcode = cpu_lduw_code(env, ctx.pc); 2306fcf5ef2aSThomas Huth decode_opc(&ctx); 2307fcf5ef2aSThomas Huth ctx.pc += 2; 2308fcf5ef2aSThomas Huth } 23094448a836SRichard Henderson if (tb->cflags & CF_LAST_IO) { 2310fcf5ef2aSThomas Huth gen_io_end(); 23114448a836SRichard Henderson } 23124bfa602bSRichard Henderson 23134bfa602bSRichard Henderson if (ctx.tbflags & GUSA_EXCLUSIVE) { 23144bfa602bSRichard Henderson /* Ending the region of exclusivity. Clear the bits. */ 23154bfa602bSRichard Henderson ctx.envflags &= ~GUSA_MASK; 23164bfa602bSRichard Henderson } 23174bfa602bSRichard Henderson 2318fcf5ef2aSThomas Huth if (cs->singlestep_enabled) { 2319ac9707eaSAurelien Jarno gen_save_cpu_state(&ctx, true); 2320fcf5ef2aSThomas Huth gen_helper_debug(cpu_env); 2321fcf5ef2aSThomas Huth } else { 2322fcf5ef2aSThomas Huth switch (ctx.bstate) { 2323fcf5ef2aSThomas Huth case BS_STOP: 2324ac9707eaSAurelien Jarno gen_save_cpu_state(&ctx, true); 23250fc37a8bSAurelien Jarno tcg_gen_exit_tb(0); 23260fc37a8bSAurelien Jarno break; 2327fcf5ef2aSThomas Huth case BS_NONE: 2328ac9707eaSAurelien Jarno gen_save_cpu_state(&ctx, false); 2329fcf5ef2aSThomas Huth gen_goto_tb(&ctx, 0, ctx.pc); 2330fcf5ef2aSThomas Huth break; 2331fcf5ef2aSThomas Huth case BS_EXCP: 233263205665SAurelien Jarno /* fall through */ 2333fcf5ef2aSThomas Huth case BS_BRANCH: 2334fcf5ef2aSThomas Huth default: 2335fcf5ef2aSThomas Huth break; 2336fcf5ef2aSThomas Huth } 2337fcf5ef2aSThomas Huth } 2338fcf5ef2aSThomas Huth 2339fcf5ef2aSThomas Huth gen_tb_end(tb, num_insns); 2340fcf5ef2aSThomas Huth 2341fcf5ef2aSThomas Huth tb->size = ctx.pc - pc_start; 2342fcf5ef2aSThomas Huth tb->icount = num_insns; 2343fcf5ef2aSThomas Huth 2344fcf5ef2aSThomas Huth #ifdef DEBUG_DISAS 2345fcf5ef2aSThomas Huth if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM) 2346fcf5ef2aSThomas Huth && qemu_log_in_addr_range(pc_start)) { 2347fcf5ef2aSThomas Huth qemu_log_lock(); 2348fcf5ef2aSThomas Huth qemu_log("IN:\n"); /* , lookup_symbol(pc_start)); */ 2349fcf5ef2aSThomas Huth log_target_disas(cs, pc_start, ctx.pc - pc_start, 0); 2350fcf5ef2aSThomas Huth qemu_log("\n"); 2351fcf5ef2aSThomas Huth qemu_log_unlock(); 2352fcf5ef2aSThomas Huth } 2353fcf5ef2aSThomas Huth #endif 2354fcf5ef2aSThomas Huth } 2355fcf5ef2aSThomas Huth 2356fcf5ef2aSThomas Huth void restore_state_to_opc(CPUSH4State *env, TranslationBlock *tb, 2357fcf5ef2aSThomas Huth target_ulong *data) 2358fcf5ef2aSThomas Huth { 2359fcf5ef2aSThomas Huth env->pc = data[0]; 2360fcf5ef2aSThomas Huth env->flags = data[1]; 2361ac9707eaSAurelien Jarno /* Theoretically delayed_pc should also be restored. In practice the 2362ac9707eaSAurelien Jarno branch instruction is re-executed after exception, so the delayed 2363ac9707eaSAurelien Jarno branch target will be recomputed. */ 2364fcf5ef2aSThomas Huth } 2365