1fcf5ef2aSThomas Huth /* 2fcf5ef2aSThomas Huth * SH4 translation 3fcf5ef2aSThomas Huth * 4fcf5ef2aSThomas Huth * Copyright (c) 2005 Samuel Tardieu 5fcf5ef2aSThomas Huth * 6fcf5ef2aSThomas Huth * This library is free software; you can redistribute it and/or 7fcf5ef2aSThomas Huth * modify it under the terms of the GNU Lesser General Public 8fcf5ef2aSThomas Huth * License as published by the Free Software Foundation; either 9fcf5ef2aSThomas Huth * version 2 of the License, or (at your option) any later version. 10fcf5ef2aSThomas Huth * 11fcf5ef2aSThomas Huth * This library is distributed in the hope that it will be useful, 12fcf5ef2aSThomas Huth * but WITHOUT ANY WARRANTY; without even the implied warranty of 13fcf5ef2aSThomas Huth * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 14fcf5ef2aSThomas Huth * Lesser General Public License for more details. 15fcf5ef2aSThomas Huth * 16fcf5ef2aSThomas Huth * You should have received a copy of the GNU Lesser General Public 17fcf5ef2aSThomas Huth * License along with this library; if not, see <http://www.gnu.org/licenses/>. 18fcf5ef2aSThomas Huth */ 19fcf5ef2aSThomas Huth 20fcf5ef2aSThomas Huth #define DEBUG_DISAS 21fcf5ef2aSThomas Huth 22fcf5ef2aSThomas Huth #include "qemu/osdep.h" 23fcf5ef2aSThomas Huth #include "cpu.h" 24fcf5ef2aSThomas Huth #include "disas/disas.h" 25fcf5ef2aSThomas Huth #include "exec/exec-all.h" 26fcf5ef2aSThomas Huth #include "tcg-op.h" 27fcf5ef2aSThomas Huth #include "exec/cpu_ldst.h" 28fcf5ef2aSThomas Huth #include "exec/helper-proto.h" 29fcf5ef2aSThomas Huth #include "exec/helper-gen.h" 30*4834871bSRichard Henderson #include "exec/translator.h" 31fcf5ef2aSThomas Huth #include "trace-tcg.h" 32fcf5ef2aSThomas Huth #include "exec/log.h" 33fcf5ef2aSThomas Huth 34fcf5ef2aSThomas Huth 35fcf5ef2aSThomas Huth typedef struct DisasContext { 36fcf5ef2aSThomas Huth struct TranslationBlock *tb; 37fcf5ef2aSThomas Huth target_ulong pc; 38fcf5ef2aSThomas Huth uint16_t opcode; 39a6215749SAurelien Jarno uint32_t tbflags; /* should stay unmodified during the TB translation */ 40a6215749SAurelien Jarno uint32_t envflags; /* should stay in sync with env->flags using TCG ops */ 41*4834871bSRichard Henderson DisasJumpType bstate; 42fcf5ef2aSThomas Huth int memidx; 433a3bb8d2SRichard Henderson int gbank; 445c13bad9SRichard Henderson int fbank; 45fcf5ef2aSThomas Huth uint32_t delayed_pc; 46fcf5ef2aSThomas Huth int singlestep_enabled; 47fcf5ef2aSThomas Huth uint32_t features; 48fcf5ef2aSThomas Huth int has_movcal; 49fcf5ef2aSThomas Huth } DisasContext; 50fcf5ef2aSThomas Huth 51fcf5ef2aSThomas Huth #if defined(CONFIG_USER_ONLY) 52fcf5ef2aSThomas Huth #define IS_USER(ctx) 1 53fcf5ef2aSThomas Huth #else 54a6215749SAurelien Jarno #define IS_USER(ctx) (!(ctx->tbflags & (1u << SR_MD))) 55fcf5ef2aSThomas Huth #endif 56fcf5ef2aSThomas Huth 57*4834871bSRichard Henderson /* Target-specific values for ctx->bstate. */ 58*4834871bSRichard Henderson /* We want to exit back to the cpu loop for some reason. 59*4834871bSRichard Henderson Usually this is to recognize interrupts immediately. */ 60*4834871bSRichard Henderson #define DISAS_STOP DISAS_TARGET_0 61fcf5ef2aSThomas Huth 62fcf5ef2aSThomas Huth /* global register indexes */ 633a3bb8d2SRichard Henderson static TCGv cpu_gregs[32]; 64fcf5ef2aSThomas Huth static TCGv cpu_sr, cpu_sr_m, cpu_sr_q, cpu_sr_t; 65fcf5ef2aSThomas Huth static TCGv cpu_pc, cpu_ssr, cpu_spc, cpu_gbr; 66fcf5ef2aSThomas Huth static TCGv cpu_vbr, cpu_sgr, cpu_dbr, cpu_mach, cpu_macl; 67f85da308SRichard Henderson static TCGv cpu_pr, cpu_fpscr, cpu_fpul; 68f85da308SRichard Henderson static TCGv cpu_lock_addr, cpu_lock_value; 69fcf5ef2aSThomas Huth static TCGv cpu_fregs[32]; 70fcf5ef2aSThomas Huth 71fcf5ef2aSThomas Huth /* internal register indexes */ 7247b9f4d5SAurelien Jarno static TCGv cpu_flags, cpu_delayed_pc, cpu_delayed_cond; 73fcf5ef2aSThomas Huth 74fcf5ef2aSThomas Huth #include "exec/gen-icount.h" 75fcf5ef2aSThomas Huth 76fcf5ef2aSThomas Huth void sh4_translate_init(void) 77fcf5ef2aSThomas Huth { 78fcf5ef2aSThomas Huth int i; 79fcf5ef2aSThomas Huth static const char * const gregnames[24] = { 80fcf5ef2aSThomas Huth "R0_BANK0", "R1_BANK0", "R2_BANK0", "R3_BANK0", 81fcf5ef2aSThomas Huth "R4_BANK0", "R5_BANK0", "R6_BANK0", "R7_BANK0", 82fcf5ef2aSThomas Huth "R8", "R9", "R10", "R11", "R12", "R13", "R14", "R15", 83fcf5ef2aSThomas Huth "R0_BANK1", "R1_BANK1", "R2_BANK1", "R3_BANK1", 84fcf5ef2aSThomas Huth "R4_BANK1", "R5_BANK1", "R6_BANK1", "R7_BANK1" 85fcf5ef2aSThomas Huth }; 86fcf5ef2aSThomas Huth static const char * const fregnames[32] = { 87fcf5ef2aSThomas Huth "FPR0_BANK0", "FPR1_BANK0", "FPR2_BANK0", "FPR3_BANK0", 88fcf5ef2aSThomas Huth "FPR4_BANK0", "FPR5_BANK0", "FPR6_BANK0", "FPR7_BANK0", 89fcf5ef2aSThomas Huth "FPR8_BANK0", "FPR9_BANK0", "FPR10_BANK0", "FPR11_BANK0", 90fcf5ef2aSThomas Huth "FPR12_BANK0", "FPR13_BANK0", "FPR14_BANK0", "FPR15_BANK0", 91fcf5ef2aSThomas Huth "FPR0_BANK1", "FPR1_BANK1", "FPR2_BANK1", "FPR3_BANK1", 92fcf5ef2aSThomas Huth "FPR4_BANK1", "FPR5_BANK1", "FPR6_BANK1", "FPR7_BANK1", 93fcf5ef2aSThomas Huth "FPR8_BANK1", "FPR9_BANK1", "FPR10_BANK1", "FPR11_BANK1", 94fcf5ef2aSThomas Huth "FPR12_BANK1", "FPR13_BANK1", "FPR14_BANK1", "FPR15_BANK1", 95fcf5ef2aSThomas Huth }; 96fcf5ef2aSThomas Huth 973a3bb8d2SRichard Henderson for (i = 0; i < 24; i++) { 98fcf5ef2aSThomas Huth cpu_gregs[i] = tcg_global_mem_new_i32(cpu_env, 99fcf5ef2aSThomas Huth offsetof(CPUSH4State, gregs[i]), 100fcf5ef2aSThomas Huth gregnames[i]); 1013a3bb8d2SRichard Henderson } 1023a3bb8d2SRichard Henderson memcpy(cpu_gregs + 24, cpu_gregs + 8, 8 * sizeof(TCGv)); 103fcf5ef2aSThomas Huth 104fcf5ef2aSThomas Huth cpu_pc = tcg_global_mem_new_i32(cpu_env, 105fcf5ef2aSThomas Huth offsetof(CPUSH4State, pc), "PC"); 106fcf5ef2aSThomas Huth cpu_sr = tcg_global_mem_new_i32(cpu_env, 107fcf5ef2aSThomas Huth offsetof(CPUSH4State, sr), "SR"); 108fcf5ef2aSThomas Huth cpu_sr_m = tcg_global_mem_new_i32(cpu_env, 109fcf5ef2aSThomas Huth offsetof(CPUSH4State, sr_m), "SR_M"); 110fcf5ef2aSThomas Huth cpu_sr_q = tcg_global_mem_new_i32(cpu_env, 111fcf5ef2aSThomas Huth offsetof(CPUSH4State, sr_q), "SR_Q"); 112fcf5ef2aSThomas Huth cpu_sr_t = tcg_global_mem_new_i32(cpu_env, 113fcf5ef2aSThomas Huth offsetof(CPUSH4State, sr_t), "SR_T"); 114fcf5ef2aSThomas Huth cpu_ssr = tcg_global_mem_new_i32(cpu_env, 115fcf5ef2aSThomas Huth offsetof(CPUSH4State, ssr), "SSR"); 116fcf5ef2aSThomas Huth cpu_spc = tcg_global_mem_new_i32(cpu_env, 117fcf5ef2aSThomas Huth offsetof(CPUSH4State, spc), "SPC"); 118fcf5ef2aSThomas Huth cpu_gbr = tcg_global_mem_new_i32(cpu_env, 119fcf5ef2aSThomas Huth offsetof(CPUSH4State, gbr), "GBR"); 120fcf5ef2aSThomas Huth cpu_vbr = tcg_global_mem_new_i32(cpu_env, 121fcf5ef2aSThomas Huth offsetof(CPUSH4State, vbr), "VBR"); 122fcf5ef2aSThomas Huth cpu_sgr = tcg_global_mem_new_i32(cpu_env, 123fcf5ef2aSThomas Huth offsetof(CPUSH4State, sgr), "SGR"); 124fcf5ef2aSThomas Huth cpu_dbr = tcg_global_mem_new_i32(cpu_env, 125fcf5ef2aSThomas Huth offsetof(CPUSH4State, dbr), "DBR"); 126fcf5ef2aSThomas Huth cpu_mach = tcg_global_mem_new_i32(cpu_env, 127fcf5ef2aSThomas Huth offsetof(CPUSH4State, mach), "MACH"); 128fcf5ef2aSThomas Huth cpu_macl = tcg_global_mem_new_i32(cpu_env, 129fcf5ef2aSThomas Huth offsetof(CPUSH4State, macl), "MACL"); 130fcf5ef2aSThomas Huth cpu_pr = tcg_global_mem_new_i32(cpu_env, 131fcf5ef2aSThomas Huth offsetof(CPUSH4State, pr), "PR"); 132fcf5ef2aSThomas Huth cpu_fpscr = tcg_global_mem_new_i32(cpu_env, 133fcf5ef2aSThomas Huth offsetof(CPUSH4State, fpscr), "FPSCR"); 134fcf5ef2aSThomas Huth cpu_fpul = tcg_global_mem_new_i32(cpu_env, 135fcf5ef2aSThomas Huth offsetof(CPUSH4State, fpul), "FPUL"); 136fcf5ef2aSThomas Huth 137fcf5ef2aSThomas Huth cpu_flags = tcg_global_mem_new_i32(cpu_env, 138fcf5ef2aSThomas Huth offsetof(CPUSH4State, flags), "_flags_"); 139fcf5ef2aSThomas Huth cpu_delayed_pc = tcg_global_mem_new_i32(cpu_env, 140fcf5ef2aSThomas Huth offsetof(CPUSH4State, delayed_pc), 141fcf5ef2aSThomas Huth "_delayed_pc_"); 14247b9f4d5SAurelien Jarno cpu_delayed_cond = tcg_global_mem_new_i32(cpu_env, 14347b9f4d5SAurelien Jarno offsetof(CPUSH4State, 14447b9f4d5SAurelien Jarno delayed_cond), 14547b9f4d5SAurelien Jarno "_delayed_cond_"); 146f85da308SRichard Henderson cpu_lock_addr = tcg_global_mem_new_i32(cpu_env, 147f85da308SRichard Henderson offsetof(CPUSH4State, lock_addr), 148f85da308SRichard Henderson "_lock_addr_"); 149f85da308SRichard Henderson cpu_lock_value = tcg_global_mem_new_i32(cpu_env, 150f85da308SRichard Henderson offsetof(CPUSH4State, lock_value), 151f85da308SRichard Henderson "_lock_value_"); 152fcf5ef2aSThomas Huth 153fcf5ef2aSThomas Huth for (i = 0; i < 32; i++) 154fcf5ef2aSThomas Huth cpu_fregs[i] = tcg_global_mem_new_i32(cpu_env, 155fcf5ef2aSThomas Huth offsetof(CPUSH4State, fregs[i]), 156fcf5ef2aSThomas Huth fregnames[i]); 157fcf5ef2aSThomas Huth } 158fcf5ef2aSThomas Huth 159fcf5ef2aSThomas Huth void superh_cpu_dump_state(CPUState *cs, FILE *f, 160fcf5ef2aSThomas Huth fprintf_function cpu_fprintf, int flags) 161fcf5ef2aSThomas Huth { 162fcf5ef2aSThomas Huth SuperHCPU *cpu = SUPERH_CPU(cs); 163fcf5ef2aSThomas Huth CPUSH4State *env = &cpu->env; 164fcf5ef2aSThomas Huth int i; 165fcf5ef2aSThomas Huth cpu_fprintf(f, "pc=0x%08x sr=0x%08x pr=0x%08x fpscr=0x%08x\n", 166fcf5ef2aSThomas Huth env->pc, cpu_read_sr(env), env->pr, env->fpscr); 167fcf5ef2aSThomas Huth cpu_fprintf(f, "spc=0x%08x ssr=0x%08x gbr=0x%08x vbr=0x%08x\n", 168fcf5ef2aSThomas Huth env->spc, env->ssr, env->gbr, env->vbr); 169fcf5ef2aSThomas Huth cpu_fprintf(f, "sgr=0x%08x dbr=0x%08x delayed_pc=0x%08x fpul=0x%08x\n", 170fcf5ef2aSThomas Huth env->sgr, env->dbr, env->delayed_pc, env->fpul); 171fcf5ef2aSThomas Huth for (i = 0; i < 24; i += 4) { 172fcf5ef2aSThomas Huth cpu_fprintf(f, "r%d=0x%08x r%d=0x%08x r%d=0x%08x r%d=0x%08x\n", 173fcf5ef2aSThomas Huth i, env->gregs[i], i + 1, env->gregs[i + 1], 174fcf5ef2aSThomas Huth i + 2, env->gregs[i + 2], i + 3, env->gregs[i + 3]); 175fcf5ef2aSThomas Huth } 176fcf5ef2aSThomas Huth if (env->flags & DELAY_SLOT) { 177fcf5ef2aSThomas Huth cpu_fprintf(f, "in delay slot (delayed_pc=0x%08x)\n", 178fcf5ef2aSThomas Huth env->delayed_pc); 179fcf5ef2aSThomas Huth } else if (env->flags & DELAY_SLOT_CONDITIONAL) { 180fcf5ef2aSThomas Huth cpu_fprintf(f, "in conditional delay slot (delayed_pc=0x%08x)\n", 181fcf5ef2aSThomas Huth env->delayed_pc); 182be53081aSAurelien Jarno } else if (env->flags & DELAY_SLOT_RTE) { 183be53081aSAurelien Jarno cpu_fprintf(f, "in rte delay slot (delayed_pc=0x%08x)\n", 184be53081aSAurelien Jarno env->delayed_pc); 185fcf5ef2aSThomas Huth } 186fcf5ef2aSThomas Huth } 187fcf5ef2aSThomas Huth 188fcf5ef2aSThomas Huth static void gen_read_sr(TCGv dst) 189fcf5ef2aSThomas Huth { 190fcf5ef2aSThomas Huth TCGv t0 = tcg_temp_new(); 191fcf5ef2aSThomas Huth tcg_gen_shli_i32(t0, cpu_sr_q, SR_Q); 192fcf5ef2aSThomas Huth tcg_gen_or_i32(dst, dst, t0); 193fcf5ef2aSThomas Huth tcg_gen_shli_i32(t0, cpu_sr_m, SR_M); 194fcf5ef2aSThomas Huth tcg_gen_or_i32(dst, dst, t0); 195fcf5ef2aSThomas Huth tcg_gen_shli_i32(t0, cpu_sr_t, SR_T); 196fcf5ef2aSThomas Huth tcg_gen_or_i32(dst, cpu_sr, t0); 197fcf5ef2aSThomas Huth tcg_temp_free_i32(t0); 198fcf5ef2aSThomas Huth } 199fcf5ef2aSThomas Huth 200fcf5ef2aSThomas Huth static void gen_write_sr(TCGv src) 201fcf5ef2aSThomas Huth { 202fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr, src, 203fcf5ef2aSThomas Huth ~((1u << SR_Q) | (1u << SR_M) | (1u << SR_T))); 204a380f9dbSAurelien Jarno tcg_gen_extract_i32(cpu_sr_q, src, SR_Q, 1); 205a380f9dbSAurelien Jarno tcg_gen_extract_i32(cpu_sr_m, src, SR_M, 1); 206a380f9dbSAurelien Jarno tcg_gen_extract_i32(cpu_sr_t, src, SR_T, 1); 207fcf5ef2aSThomas Huth } 208fcf5ef2aSThomas Huth 209ac9707eaSAurelien Jarno static inline void gen_save_cpu_state(DisasContext *ctx, bool save_pc) 210ac9707eaSAurelien Jarno { 211ac9707eaSAurelien Jarno if (save_pc) { 212ac9707eaSAurelien Jarno tcg_gen_movi_i32(cpu_pc, ctx->pc); 213ac9707eaSAurelien Jarno } 214ac9707eaSAurelien Jarno if (ctx->delayed_pc != (uint32_t) -1) { 215ac9707eaSAurelien Jarno tcg_gen_movi_i32(cpu_delayed_pc, ctx->delayed_pc); 216ac9707eaSAurelien Jarno } 217e1933d14SRichard Henderson if ((ctx->tbflags & TB_FLAG_ENVFLAGS_MASK) != ctx->envflags) { 218ac9707eaSAurelien Jarno tcg_gen_movi_i32(cpu_flags, ctx->envflags); 219ac9707eaSAurelien Jarno } 220ac9707eaSAurelien Jarno } 221ac9707eaSAurelien Jarno 222ec2eb22eSRichard Henderson static inline bool use_exit_tb(DisasContext *ctx) 223ec2eb22eSRichard Henderson { 224ec2eb22eSRichard Henderson return (ctx->tbflags & GUSA_EXCLUSIVE) != 0; 225ec2eb22eSRichard Henderson } 226ec2eb22eSRichard Henderson 227fcf5ef2aSThomas Huth static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest) 228fcf5ef2aSThomas Huth { 229ec2eb22eSRichard Henderson /* Use a direct jump if in same page and singlestep not enabled */ 230ec2eb22eSRichard Henderson if (unlikely(ctx->singlestep_enabled || use_exit_tb(ctx))) { 2314bfa602bSRichard Henderson return false; 2324bfa602bSRichard Henderson } 233fcf5ef2aSThomas Huth #ifndef CONFIG_USER_ONLY 234fcf5ef2aSThomas Huth return (ctx->tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK); 235fcf5ef2aSThomas Huth #else 236fcf5ef2aSThomas Huth return true; 237fcf5ef2aSThomas Huth #endif 238fcf5ef2aSThomas Huth } 239fcf5ef2aSThomas Huth 240fcf5ef2aSThomas Huth static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest) 241fcf5ef2aSThomas Huth { 242fcf5ef2aSThomas Huth if (use_goto_tb(ctx, dest)) { 243fcf5ef2aSThomas Huth tcg_gen_goto_tb(n); 244fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_pc, dest); 245fcf5ef2aSThomas Huth tcg_gen_exit_tb((uintptr_t)ctx->tb + n); 246fcf5ef2aSThomas Huth } else { 247fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_pc, dest); 248ec2eb22eSRichard Henderson if (ctx->singlestep_enabled) { 249fcf5ef2aSThomas Huth gen_helper_debug(cpu_env); 250ec2eb22eSRichard Henderson } else if (use_exit_tb(ctx)) { 251fcf5ef2aSThomas Huth tcg_gen_exit_tb(0); 252ec2eb22eSRichard Henderson } else { 2537f11636dSEmilio G. Cota tcg_gen_lookup_and_goto_ptr(); 254ec2eb22eSRichard Henderson } 255fcf5ef2aSThomas Huth } 256*4834871bSRichard Henderson ctx->bstate = DISAS_NORETURN; 257fcf5ef2aSThomas Huth } 258fcf5ef2aSThomas Huth 259fcf5ef2aSThomas Huth static void gen_jump(DisasContext * ctx) 260fcf5ef2aSThomas Huth { 261ec2eb22eSRichard Henderson if (ctx->delayed_pc == -1) { 262fcf5ef2aSThomas Huth /* Target is not statically known, it comes necessarily from a 263fcf5ef2aSThomas Huth delayed jump as immediate jump are conditinal jumps */ 264fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_pc, cpu_delayed_pc); 265ac9707eaSAurelien Jarno tcg_gen_discard_i32(cpu_delayed_pc); 266ec2eb22eSRichard Henderson if (ctx->singlestep_enabled) { 267fcf5ef2aSThomas Huth gen_helper_debug(cpu_env); 268ec2eb22eSRichard Henderson } else if (use_exit_tb(ctx)) { 269fcf5ef2aSThomas Huth tcg_gen_exit_tb(0); 270fcf5ef2aSThomas Huth } else { 2717f11636dSEmilio G. Cota tcg_gen_lookup_and_goto_ptr(); 272ec2eb22eSRichard Henderson } 273ec2eb22eSRichard Henderson } else { 274fcf5ef2aSThomas Huth gen_goto_tb(ctx, 0, ctx->delayed_pc); 275fcf5ef2aSThomas Huth } 276fcf5ef2aSThomas Huth } 277fcf5ef2aSThomas Huth 278fcf5ef2aSThomas Huth /* Immediate conditional jump (bt or bf) */ 2794bfa602bSRichard Henderson static void gen_conditional_jump(DisasContext *ctx, target_ulong dest, 2804bfa602bSRichard Henderson bool jump_if_true) 281fcf5ef2aSThomas Huth { 282fcf5ef2aSThomas Huth TCGLabel *l1 = gen_new_label(); 2834bfa602bSRichard Henderson TCGCond cond_not_taken = jump_if_true ? TCG_COND_EQ : TCG_COND_NE; 2844bfa602bSRichard Henderson 2854bfa602bSRichard Henderson if (ctx->tbflags & GUSA_EXCLUSIVE) { 2864bfa602bSRichard Henderson /* When in an exclusive region, we must continue to the end. 2874bfa602bSRichard Henderson Therefore, exit the region on a taken branch, but otherwise 2884bfa602bSRichard Henderson fall through to the next instruction. */ 2894bfa602bSRichard Henderson tcg_gen_brcondi_i32(cond_not_taken, cpu_sr_t, 0, l1); 2904bfa602bSRichard Henderson tcg_gen_movi_i32(cpu_flags, ctx->envflags & ~GUSA_MASK); 2914bfa602bSRichard Henderson /* Note that this won't actually use a goto_tb opcode because we 2924bfa602bSRichard Henderson disallow it in use_goto_tb, but it handles exit + singlestep. */ 2934bfa602bSRichard Henderson gen_goto_tb(ctx, 0, dest); 294fcf5ef2aSThomas Huth gen_set_label(l1); 2954bfa602bSRichard Henderson return; 2964bfa602bSRichard Henderson } 2974bfa602bSRichard Henderson 2984bfa602bSRichard Henderson gen_save_cpu_state(ctx, false); 2994bfa602bSRichard Henderson tcg_gen_brcondi_i32(cond_not_taken, cpu_sr_t, 0, l1); 3004bfa602bSRichard Henderson gen_goto_tb(ctx, 0, dest); 3014bfa602bSRichard Henderson gen_set_label(l1); 3024bfa602bSRichard Henderson gen_goto_tb(ctx, 1, ctx->pc + 2); 303*4834871bSRichard Henderson ctx->bstate = DISAS_NORETURN; 304fcf5ef2aSThomas Huth } 305fcf5ef2aSThomas Huth 306fcf5ef2aSThomas Huth /* Delayed conditional jump (bt or bf) */ 307fcf5ef2aSThomas Huth static void gen_delayed_conditional_jump(DisasContext * ctx) 308fcf5ef2aSThomas Huth { 3094bfa602bSRichard Henderson TCGLabel *l1 = gen_new_label(); 3104bfa602bSRichard Henderson TCGv ds = tcg_temp_new(); 311fcf5ef2aSThomas Huth 31247b9f4d5SAurelien Jarno tcg_gen_mov_i32(ds, cpu_delayed_cond); 31347b9f4d5SAurelien Jarno tcg_gen_discard_i32(cpu_delayed_cond); 3144bfa602bSRichard Henderson 3154bfa602bSRichard Henderson if (ctx->tbflags & GUSA_EXCLUSIVE) { 3164bfa602bSRichard Henderson /* When in an exclusive region, we must continue to the end. 3174bfa602bSRichard Henderson Therefore, exit the region on a taken branch, but otherwise 3184bfa602bSRichard Henderson fall through to the next instruction. */ 3194bfa602bSRichard Henderson tcg_gen_brcondi_i32(TCG_COND_EQ, ds, 0, l1); 3204bfa602bSRichard Henderson 3214bfa602bSRichard Henderson /* Leave the gUSA region. */ 3224bfa602bSRichard Henderson tcg_gen_movi_i32(cpu_flags, ctx->envflags & ~GUSA_MASK); 3234bfa602bSRichard Henderson gen_jump(ctx); 3244bfa602bSRichard Henderson 3254bfa602bSRichard Henderson gen_set_label(l1); 326*4834871bSRichard Henderson ctx->bstate = DISAS_NEXT; 3274bfa602bSRichard Henderson return; 3284bfa602bSRichard Henderson } 3294bfa602bSRichard Henderson 330fcf5ef2aSThomas Huth tcg_gen_brcondi_i32(TCG_COND_NE, ds, 0, l1); 331fcf5ef2aSThomas Huth gen_goto_tb(ctx, 1, ctx->pc + 2); 332fcf5ef2aSThomas Huth gen_set_label(l1); 333fcf5ef2aSThomas Huth gen_jump(ctx); 334fcf5ef2aSThomas Huth } 335fcf5ef2aSThomas Huth 336e5d8053eSRichard Henderson static inline void gen_load_fpr64(DisasContext *ctx, TCGv_i64 t, int reg) 337fcf5ef2aSThomas Huth { 3381e0b21d8SRichard Henderson /* We have already signaled illegal instruction for odd Dr. */ 3391e0b21d8SRichard Henderson tcg_debug_assert((reg & 1) == 0); 3401e0b21d8SRichard Henderson reg ^= ctx->fbank; 341fcf5ef2aSThomas Huth tcg_gen_concat_i32_i64(t, cpu_fregs[reg + 1], cpu_fregs[reg]); 342fcf5ef2aSThomas Huth } 343fcf5ef2aSThomas Huth 344e5d8053eSRichard Henderson static inline void gen_store_fpr64(DisasContext *ctx, TCGv_i64 t, int reg) 345fcf5ef2aSThomas Huth { 3461e0b21d8SRichard Henderson /* We have already signaled illegal instruction for odd Dr. */ 3471e0b21d8SRichard Henderson tcg_debug_assert((reg & 1) == 0); 3481e0b21d8SRichard Henderson reg ^= ctx->fbank; 34958d2a9aeSAurelien Jarno tcg_gen_extr_i64_i32(cpu_fregs[reg + 1], cpu_fregs[reg], t); 350fcf5ef2aSThomas Huth } 351fcf5ef2aSThomas Huth 352fcf5ef2aSThomas Huth #define B3_0 (ctx->opcode & 0xf) 353fcf5ef2aSThomas Huth #define B6_4 ((ctx->opcode >> 4) & 0x7) 354fcf5ef2aSThomas Huth #define B7_4 ((ctx->opcode >> 4) & 0xf) 355fcf5ef2aSThomas Huth #define B7_0 (ctx->opcode & 0xff) 356fcf5ef2aSThomas Huth #define B7_0s ((int32_t) (int8_t) (ctx->opcode & 0xff)) 357fcf5ef2aSThomas Huth #define B11_0s (ctx->opcode & 0x800 ? 0xfffff000 | (ctx->opcode & 0xfff) : \ 358fcf5ef2aSThomas Huth (ctx->opcode & 0xfff)) 359fcf5ef2aSThomas Huth #define B11_8 ((ctx->opcode >> 8) & 0xf) 360fcf5ef2aSThomas Huth #define B15_12 ((ctx->opcode >> 12) & 0xf) 361fcf5ef2aSThomas Huth 3623a3bb8d2SRichard Henderson #define REG(x) cpu_gregs[(x) ^ ctx->gbank] 3633a3bb8d2SRichard Henderson #define ALTREG(x) cpu_gregs[(x) ^ ctx->gbank ^ 0x10] 3645c13bad9SRichard Henderson #define FREG(x) cpu_fregs[(x) ^ ctx->fbank] 365fcf5ef2aSThomas Huth 366fcf5ef2aSThomas Huth #define XHACK(x) ((((x) & 1 ) << 4) | ((x) & 0xe)) 367fcf5ef2aSThomas Huth 368fcf5ef2aSThomas Huth #define CHECK_NOT_DELAY_SLOT \ 3699a562ae7SAurelien Jarno if (ctx->envflags & DELAY_SLOT_MASK) { \ 370dec16c6eSRichard Henderson goto do_illegal_slot; \ 371fcf5ef2aSThomas Huth } 372fcf5ef2aSThomas Huth 373fcf5ef2aSThomas Huth #define CHECK_PRIVILEGED \ 374fcf5ef2aSThomas Huth if (IS_USER(ctx)) { \ 3756b98213dSRichard Henderson goto do_illegal; \ 376fcf5ef2aSThomas Huth } 377fcf5ef2aSThomas Huth 378fcf5ef2aSThomas Huth #define CHECK_FPU_ENABLED \ 379a6215749SAurelien Jarno if (ctx->tbflags & (1u << SR_FD)) { \ 380dec4f042SRichard Henderson goto do_fpu_disabled; \ 381fcf5ef2aSThomas Huth } 382fcf5ef2aSThomas Huth 3837e9f7ca8SRichard Henderson #define CHECK_FPSCR_PR_0 \ 3847e9f7ca8SRichard Henderson if (ctx->tbflags & FPSCR_PR) { \ 3857e9f7ca8SRichard Henderson goto do_illegal; \ 3867e9f7ca8SRichard Henderson } 3877e9f7ca8SRichard Henderson 3887e9f7ca8SRichard Henderson #define CHECK_FPSCR_PR_1 \ 3897e9f7ca8SRichard Henderson if (!(ctx->tbflags & FPSCR_PR)) { \ 3907e9f7ca8SRichard Henderson goto do_illegal; \ 3917e9f7ca8SRichard Henderson } 3927e9f7ca8SRichard Henderson 393ccae24d4SRichard Henderson #define CHECK_SH4A \ 394ccae24d4SRichard Henderson if (!(ctx->features & SH_FEATURE_SH4A)) { \ 395ccae24d4SRichard Henderson goto do_illegal; \ 396ccae24d4SRichard Henderson } 397ccae24d4SRichard Henderson 398fcf5ef2aSThomas Huth static void _decode_opc(DisasContext * ctx) 399fcf5ef2aSThomas Huth { 400fcf5ef2aSThomas Huth /* This code tries to make movcal emulation sufficiently 401fcf5ef2aSThomas Huth accurate for Linux purposes. This instruction writes 402fcf5ef2aSThomas Huth memory, and prior to that, always allocates a cache line. 403fcf5ef2aSThomas Huth It is used in two contexts: 404fcf5ef2aSThomas Huth - in memcpy, where data is copied in blocks, the first write 405fcf5ef2aSThomas Huth of to a block uses movca.l for performance. 406fcf5ef2aSThomas Huth - in arch/sh/mm/cache-sh4.c, movcal.l + ocbi combination is used 407fcf5ef2aSThomas Huth to flush the cache. Here, the data written by movcal.l is never 408fcf5ef2aSThomas Huth written to memory, and the data written is just bogus. 409fcf5ef2aSThomas Huth 410fcf5ef2aSThomas Huth To simulate this, we simulate movcal.l, we store the value to memory, 411fcf5ef2aSThomas Huth but we also remember the previous content. If we see ocbi, we check 412fcf5ef2aSThomas Huth if movcal.l for that address was done previously. If so, the write should 413fcf5ef2aSThomas Huth not have hit the memory, so we restore the previous content. 414fcf5ef2aSThomas Huth When we see an instruction that is neither movca.l 415fcf5ef2aSThomas Huth nor ocbi, the previous content is discarded. 416fcf5ef2aSThomas Huth 417fcf5ef2aSThomas Huth To optimize, we only try to flush stores when we're at the start of 418fcf5ef2aSThomas Huth TB, or if we already saw movca.l in this TB and did not flush stores 419fcf5ef2aSThomas Huth yet. */ 420fcf5ef2aSThomas Huth if (ctx->has_movcal) 421fcf5ef2aSThomas Huth { 422fcf5ef2aSThomas Huth int opcode = ctx->opcode & 0xf0ff; 423fcf5ef2aSThomas Huth if (opcode != 0x0093 /* ocbi */ 424fcf5ef2aSThomas Huth && opcode != 0x00c3 /* movca.l */) 425fcf5ef2aSThomas Huth { 426fcf5ef2aSThomas Huth gen_helper_discard_movcal_backup(cpu_env); 427fcf5ef2aSThomas Huth ctx->has_movcal = 0; 428fcf5ef2aSThomas Huth } 429fcf5ef2aSThomas Huth } 430fcf5ef2aSThomas Huth 431fcf5ef2aSThomas Huth #if 0 432fcf5ef2aSThomas Huth fprintf(stderr, "Translating opcode 0x%04x\n", ctx->opcode); 433fcf5ef2aSThomas Huth #endif 434fcf5ef2aSThomas Huth 435fcf5ef2aSThomas Huth switch (ctx->opcode) { 436fcf5ef2aSThomas Huth case 0x0019: /* div0u */ 437fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_m, 0); 438fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_q, 0); 439fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_t, 0); 440fcf5ef2aSThomas Huth return; 441fcf5ef2aSThomas Huth case 0x000b: /* rts */ 442fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 443fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_delayed_pc, cpu_pr); 444a6215749SAurelien Jarno ctx->envflags |= DELAY_SLOT; 445fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 446fcf5ef2aSThomas Huth return; 447fcf5ef2aSThomas Huth case 0x0028: /* clrmac */ 448fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_mach, 0); 449fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_macl, 0); 450fcf5ef2aSThomas Huth return; 451fcf5ef2aSThomas Huth case 0x0048: /* clrs */ 452fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr, cpu_sr, ~(1u << SR_S)); 453fcf5ef2aSThomas Huth return; 454fcf5ef2aSThomas Huth case 0x0008: /* clrt */ 455fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_t, 0); 456fcf5ef2aSThomas Huth return; 457fcf5ef2aSThomas Huth case 0x0038: /* ldtlb */ 458fcf5ef2aSThomas Huth CHECK_PRIVILEGED 459fcf5ef2aSThomas Huth gen_helper_ldtlb(cpu_env); 460fcf5ef2aSThomas Huth return; 461fcf5ef2aSThomas Huth case 0x002b: /* rte */ 462fcf5ef2aSThomas Huth CHECK_PRIVILEGED 463fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 464fcf5ef2aSThomas Huth gen_write_sr(cpu_ssr); 465fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_delayed_pc, cpu_spc); 466be53081aSAurelien Jarno ctx->envflags |= DELAY_SLOT_RTE; 467fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 468*4834871bSRichard Henderson ctx->bstate = DISAS_STOP; 469fcf5ef2aSThomas Huth return; 470fcf5ef2aSThomas Huth case 0x0058: /* sets */ 471fcf5ef2aSThomas Huth tcg_gen_ori_i32(cpu_sr, cpu_sr, (1u << SR_S)); 472fcf5ef2aSThomas Huth return; 473fcf5ef2aSThomas Huth case 0x0018: /* sett */ 474fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_t, 1); 475fcf5ef2aSThomas Huth return; 476fcf5ef2aSThomas Huth case 0xfbfd: /* frchg */ 47761dedf2aSRichard Henderson CHECK_FPSCR_PR_0 478fcf5ef2aSThomas Huth tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_FR); 479*4834871bSRichard Henderson ctx->bstate = DISAS_STOP; 480fcf5ef2aSThomas Huth return; 481fcf5ef2aSThomas Huth case 0xf3fd: /* fschg */ 48261dedf2aSRichard Henderson CHECK_FPSCR_PR_0 483fcf5ef2aSThomas Huth tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_SZ); 484*4834871bSRichard Henderson ctx->bstate = DISAS_STOP; 485fcf5ef2aSThomas Huth return; 486907759f9SRichard Henderson case 0xf7fd: /* fpchg */ 487907759f9SRichard Henderson CHECK_SH4A 488907759f9SRichard Henderson tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_PR); 489*4834871bSRichard Henderson ctx->bstate = DISAS_STOP; 490907759f9SRichard Henderson return; 491fcf5ef2aSThomas Huth case 0x0009: /* nop */ 492fcf5ef2aSThomas Huth return; 493fcf5ef2aSThomas Huth case 0x001b: /* sleep */ 494fcf5ef2aSThomas Huth CHECK_PRIVILEGED 495fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_pc, ctx->pc + 2); 496fcf5ef2aSThomas Huth gen_helper_sleep(cpu_env); 497fcf5ef2aSThomas Huth return; 498fcf5ef2aSThomas Huth } 499fcf5ef2aSThomas Huth 500fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf000) { 501fcf5ef2aSThomas Huth case 0x1000: /* mov.l Rm,@(disp,Rn) */ 502fcf5ef2aSThomas Huth { 503fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 504fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B11_8), B3_0 * 4); 505fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_TEUL); 506fcf5ef2aSThomas Huth tcg_temp_free(addr); 507fcf5ef2aSThomas Huth } 508fcf5ef2aSThomas Huth return; 509fcf5ef2aSThomas Huth case 0x5000: /* mov.l @(disp,Rm),Rn */ 510fcf5ef2aSThomas Huth { 511fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 512fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 4); 513fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_TESL); 514fcf5ef2aSThomas Huth tcg_temp_free(addr); 515fcf5ef2aSThomas Huth } 516fcf5ef2aSThomas Huth return; 517fcf5ef2aSThomas Huth case 0xe000: /* mov #imm,Rn */ 5184bfa602bSRichard Henderson #ifdef CONFIG_USER_ONLY 5194bfa602bSRichard Henderson /* Detect the start of a gUSA region. If so, update envflags 5204bfa602bSRichard Henderson and end the TB. This will allow us to see the end of the 5214bfa602bSRichard Henderson region (stored in R0) in the next TB. */ 522671f9a85SEmilio G. Cota if (B11_8 == 15 && B7_0s < 0 && (tb_cflags(ctx->tb) & CF_PARALLEL)) { 5234bfa602bSRichard Henderson ctx->envflags = deposit32(ctx->envflags, GUSA_SHIFT, 8, B7_0s); 524*4834871bSRichard Henderson ctx->bstate = DISAS_STOP; 5254bfa602bSRichard Henderson } 5264bfa602bSRichard Henderson #endif 527fcf5ef2aSThomas Huth tcg_gen_movi_i32(REG(B11_8), B7_0s); 528fcf5ef2aSThomas Huth return; 529fcf5ef2aSThomas Huth case 0x9000: /* mov.w @(disp,PC),Rn */ 530fcf5ef2aSThomas Huth { 531fcf5ef2aSThomas Huth TCGv addr = tcg_const_i32(ctx->pc + 4 + B7_0 * 2); 532fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_TESW); 533fcf5ef2aSThomas Huth tcg_temp_free(addr); 534fcf5ef2aSThomas Huth } 535fcf5ef2aSThomas Huth return; 536fcf5ef2aSThomas Huth case 0xd000: /* mov.l @(disp,PC),Rn */ 537fcf5ef2aSThomas Huth { 538fcf5ef2aSThomas Huth TCGv addr = tcg_const_i32((ctx->pc + 4 + B7_0 * 4) & ~3); 539fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_TESL); 540fcf5ef2aSThomas Huth tcg_temp_free(addr); 541fcf5ef2aSThomas Huth } 542fcf5ef2aSThomas Huth return; 543fcf5ef2aSThomas Huth case 0x7000: /* add #imm,Rn */ 544fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), B7_0s); 545fcf5ef2aSThomas Huth return; 546fcf5ef2aSThomas Huth case 0xa000: /* bra disp */ 547fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 548fcf5ef2aSThomas Huth ctx->delayed_pc = ctx->pc + 4 + B11_0s * 2; 549a6215749SAurelien Jarno ctx->envflags |= DELAY_SLOT; 550fcf5ef2aSThomas Huth return; 551fcf5ef2aSThomas Huth case 0xb000: /* bsr disp */ 552fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 553fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_pr, ctx->pc + 4); 554fcf5ef2aSThomas Huth ctx->delayed_pc = ctx->pc + 4 + B11_0s * 2; 555a6215749SAurelien Jarno ctx->envflags |= DELAY_SLOT; 556fcf5ef2aSThomas Huth return; 557fcf5ef2aSThomas Huth } 558fcf5ef2aSThomas Huth 559fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf00f) { 560fcf5ef2aSThomas Huth case 0x6003: /* mov Rm,Rn */ 561fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), REG(B7_4)); 562fcf5ef2aSThomas Huth return; 563fcf5ef2aSThomas Huth case 0x2000: /* mov.b Rm,@Rn */ 564fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), REG(B11_8), ctx->memidx, MO_UB); 565fcf5ef2aSThomas Huth return; 566fcf5ef2aSThomas Huth case 0x2001: /* mov.w Rm,@Rn */ 567fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), REG(B11_8), ctx->memidx, MO_TEUW); 568fcf5ef2aSThomas Huth return; 569fcf5ef2aSThomas Huth case 0x2002: /* mov.l Rm,@Rn */ 570fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), REG(B11_8), ctx->memidx, MO_TEUL); 571fcf5ef2aSThomas Huth return; 572fcf5ef2aSThomas Huth case 0x6000: /* mov.b @Rm,Rn */ 573fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_SB); 574fcf5ef2aSThomas Huth return; 575fcf5ef2aSThomas Huth case 0x6001: /* mov.w @Rm,Rn */ 576fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_TESW); 577fcf5ef2aSThomas Huth return; 578fcf5ef2aSThomas Huth case 0x6002: /* mov.l @Rm,Rn */ 579fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_TESL); 580fcf5ef2aSThomas Huth return; 581fcf5ef2aSThomas Huth case 0x2004: /* mov.b Rm,@-Rn */ 582fcf5ef2aSThomas Huth { 583fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 584fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 1); 585fcf5ef2aSThomas Huth /* might cause re-execution */ 586fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_UB); 587fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); /* modify register status */ 588fcf5ef2aSThomas Huth tcg_temp_free(addr); 589fcf5ef2aSThomas Huth } 590fcf5ef2aSThomas Huth return; 591fcf5ef2aSThomas Huth case 0x2005: /* mov.w Rm,@-Rn */ 592fcf5ef2aSThomas Huth { 593fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 594fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 2); 595fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_TEUW); 596fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 597fcf5ef2aSThomas Huth tcg_temp_free(addr); 598fcf5ef2aSThomas Huth } 599fcf5ef2aSThomas Huth return; 600fcf5ef2aSThomas Huth case 0x2006: /* mov.l Rm,@-Rn */ 601fcf5ef2aSThomas Huth { 602fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 603fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); 604fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_TEUL); 605fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 606e691e0edSPhilippe Mathieu-Daudé tcg_temp_free(addr); 607fcf5ef2aSThomas Huth } 608fcf5ef2aSThomas Huth return; 609fcf5ef2aSThomas Huth case 0x6004: /* mov.b @Rm+,Rn */ 610fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_SB); 611fcf5ef2aSThomas Huth if ( B11_8 != B7_4 ) 612fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 1); 613fcf5ef2aSThomas Huth return; 614fcf5ef2aSThomas Huth case 0x6005: /* mov.w @Rm+,Rn */ 615fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_TESW); 616fcf5ef2aSThomas Huth if ( B11_8 != B7_4 ) 617fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2); 618fcf5ef2aSThomas Huth return; 619fcf5ef2aSThomas Huth case 0x6006: /* mov.l @Rm+,Rn */ 620fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_TESL); 621fcf5ef2aSThomas Huth if ( B11_8 != B7_4 ) 622fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4); 623fcf5ef2aSThomas Huth return; 624fcf5ef2aSThomas Huth case 0x0004: /* mov.b Rm,@(R0,Rn) */ 625fcf5ef2aSThomas Huth { 626fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 627fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B11_8), REG(0)); 628fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_UB); 629fcf5ef2aSThomas Huth tcg_temp_free(addr); 630fcf5ef2aSThomas Huth } 631fcf5ef2aSThomas Huth return; 632fcf5ef2aSThomas Huth case 0x0005: /* mov.w Rm,@(R0,Rn) */ 633fcf5ef2aSThomas Huth { 634fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 635fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B11_8), REG(0)); 636fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_TEUW); 637fcf5ef2aSThomas Huth tcg_temp_free(addr); 638fcf5ef2aSThomas Huth } 639fcf5ef2aSThomas Huth return; 640fcf5ef2aSThomas Huth case 0x0006: /* mov.l Rm,@(R0,Rn) */ 641fcf5ef2aSThomas Huth { 642fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 643fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B11_8), REG(0)); 644fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_TEUL); 645fcf5ef2aSThomas Huth tcg_temp_free(addr); 646fcf5ef2aSThomas Huth } 647fcf5ef2aSThomas Huth return; 648fcf5ef2aSThomas Huth case 0x000c: /* mov.b @(R0,Rm),Rn */ 649fcf5ef2aSThomas Huth { 650fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 651fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B7_4), REG(0)); 652fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_SB); 653fcf5ef2aSThomas Huth tcg_temp_free(addr); 654fcf5ef2aSThomas Huth } 655fcf5ef2aSThomas Huth return; 656fcf5ef2aSThomas Huth case 0x000d: /* mov.w @(R0,Rm),Rn */ 657fcf5ef2aSThomas Huth { 658fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 659fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B7_4), REG(0)); 660fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_TESW); 661fcf5ef2aSThomas Huth tcg_temp_free(addr); 662fcf5ef2aSThomas Huth } 663fcf5ef2aSThomas Huth return; 664fcf5ef2aSThomas Huth case 0x000e: /* mov.l @(R0,Rm),Rn */ 665fcf5ef2aSThomas Huth { 666fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 667fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B7_4), REG(0)); 668fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_TESL); 669fcf5ef2aSThomas Huth tcg_temp_free(addr); 670fcf5ef2aSThomas Huth } 671fcf5ef2aSThomas Huth return; 672fcf5ef2aSThomas Huth case 0x6008: /* swap.b Rm,Rn */ 673fcf5ef2aSThomas Huth { 6743c254ab8SLadi Prosek TCGv low = tcg_temp_new(); 675fcf5ef2aSThomas Huth tcg_gen_ext16u_i32(low, REG(B7_4)); 676fcf5ef2aSThomas Huth tcg_gen_bswap16_i32(low, low); 677fcf5ef2aSThomas Huth tcg_gen_deposit_i32(REG(B11_8), REG(B7_4), low, 0, 16); 678fcf5ef2aSThomas Huth tcg_temp_free(low); 679fcf5ef2aSThomas Huth } 680fcf5ef2aSThomas Huth return; 681fcf5ef2aSThomas Huth case 0x6009: /* swap.w Rm,Rn */ 682fcf5ef2aSThomas Huth tcg_gen_rotli_i32(REG(B11_8), REG(B7_4), 16); 683fcf5ef2aSThomas Huth return; 684fcf5ef2aSThomas Huth case 0x200d: /* xtrct Rm,Rn */ 685fcf5ef2aSThomas Huth { 686fcf5ef2aSThomas Huth TCGv high, low; 687fcf5ef2aSThomas Huth high = tcg_temp_new(); 688fcf5ef2aSThomas Huth tcg_gen_shli_i32(high, REG(B7_4), 16); 689fcf5ef2aSThomas Huth low = tcg_temp_new(); 690fcf5ef2aSThomas Huth tcg_gen_shri_i32(low, REG(B11_8), 16); 691fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), high, low); 692fcf5ef2aSThomas Huth tcg_temp_free(low); 693fcf5ef2aSThomas Huth tcg_temp_free(high); 694fcf5ef2aSThomas Huth } 695fcf5ef2aSThomas Huth return; 696fcf5ef2aSThomas Huth case 0x300c: /* add Rm,Rn */ 697fcf5ef2aSThomas Huth tcg_gen_add_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 698fcf5ef2aSThomas Huth return; 699fcf5ef2aSThomas Huth case 0x300e: /* addc Rm,Rn */ 700fcf5ef2aSThomas Huth { 701fcf5ef2aSThomas Huth TCGv t0, t1; 702fcf5ef2aSThomas Huth t0 = tcg_const_tl(0); 703fcf5ef2aSThomas Huth t1 = tcg_temp_new(); 704fcf5ef2aSThomas Huth tcg_gen_add2_i32(t1, cpu_sr_t, cpu_sr_t, t0, REG(B7_4), t0); 705fcf5ef2aSThomas Huth tcg_gen_add2_i32(REG(B11_8), cpu_sr_t, 706fcf5ef2aSThomas Huth REG(B11_8), t0, t1, cpu_sr_t); 707fcf5ef2aSThomas Huth tcg_temp_free(t0); 708fcf5ef2aSThomas Huth tcg_temp_free(t1); 709fcf5ef2aSThomas Huth } 710fcf5ef2aSThomas Huth return; 711fcf5ef2aSThomas Huth case 0x300f: /* addv Rm,Rn */ 712fcf5ef2aSThomas Huth { 713fcf5ef2aSThomas Huth TCGv t0, t1, t2; 714fcf5ef2aSThomas Huth t0 = tcg_temp_new(); 715fcf5ef2aSThomas Huth tcg_gen_add_i32(t0, REG(B7_4), REG(B11_8)); 716fcf5ef2aSThomas Huth t1 = tcg_temp_new(); 717fcf5ef2aSThomas Huth tcg_gen_xor_i32(t1, t0, REG(B11_8)); 718fcf5ef2aSThomas Huth t2 = tcg_temp_new(); 719fcf5ef2aSThomas Huth tcg_gen_xor_i32(t2, REG(B7_4), REG(B11_8)); 720fcf5ef2aSThomas Huth tcg_gen_andc_i32(cpu_sr_t, t1, t2); 721fcf5ef2aSThomas Huth tcg_temp_free(t2); 722fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_t, cpu_sr_t, 31); 723fcf5ef2aSThomas Huth tcg_temp_free(t1); 724fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B7_4), t0); 725fcf5ef2aSThomas Huth tcg_temp_free(t0); 726fcf5ef2aSThomas Huth } 727fcf5ef2aSThomas Huth return; 728fcf5ef2aSThomas Huth case 0x2009: /* and Rm,Rn */ 729fcf5ef2aSThomas Huth tcg_gen_and_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 730fcf5ef2aSThomas Huth return; 731fcf5ef2aSThomas Huth case 0x3000: /* cmp/eq Rm,Rn */ 732fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_EQ, cpu_sr_t, REG(B11_8), REG(B7_4)); 733fcf5ef2aSThomas Huth return; 734fcf5ef2aSThomas Huth case 0x3003: /* cmp/ge Rm,Rn */ 735fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_GE, cpu_sr_t, REG(B11_8), REG(B7_4)); 736fcf5ef2aSThomas Huth return; 737fcf5ef2aSThomas Huth case 0x3007: /* cmp/gt Rm,Rn */ 738fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_GT, cpu_sr_t, REG(B11_8), REG(B7_4)); 739fcf5ef2aSThomas Huth return; 740fcf5ef2aSThomas Huth case 0x3006: /* cmp/hi Rm,Rn */ 741fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_GTU, cpu_sr_t, REG(B11_8), REG(B7_4)); 742fcf5ef2aSThomas Huth return; 743fcf5ef2aSThomas Huth case 0x3002: /* cmp/hs Rm,Rn */ 744fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_GEU, cpu_sr_t, REG(B11_8), REG(B7_4)); 745fcf5ef2aSThomas Huth return; 746fcf5ef2aSThomas Huth case 0x200c: /* cmp/str Rm,Rn */ 747fcf5ef2aSThomas Huth { 748fcf5ef2aSThomas Huth TCGv cmp1 = tcg_temp_new(); 749fcf5ef2aSThomas Huth TCGv cmp2 = tcg_temp_new(); 750fcf5ef2aSThomas Huth tcg_gen_xor_i32(cmp2, REG(B7_4), REG(B11_8)); 751fcf5ef2aSThomas Huth tcg_gen_subi_i32(cmp1, cmp2, 0x01010101); 752fcf5ef2aSThomas Huth tcg_gen_andc_i32(cmp1, cmp1, cmp2); 753fcf5ef2aSThomas Huth tcg_gen_andi_i32(cmp1, cmp1, 0x80808080); 754fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_NE, cpu_sr_t, cmp1, 0); 755fcf5ef2aSThomas Huth tcg_temp_free(cmp2); 756fcf5ef2aSThomas Huth tcg_temp_free(cmp1); 757fcf5ef2aSThomas Huth } 758fcf5ef2aSThomas Huth return; 759fcf5ef2aSThomas Huth case 0x2007: /* div0s Rm,Rn */ 760fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_q, REG(B11_8), 31); /* SR_Q */ 761fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_m, REG(B7_4), 31); /* SR_M */ 762fcf5ef2aSThomas Huth tcg_gen_xor_i32(cpu_sr_t, cpu_sr_q, cpu_sr_m); /* SR_T */ 763fcf5ef2aSThomas Huth return; 764fcf5ef2aSThomas Huth case 0x3004: /* div1 Rm,Rn */ 765fcf5ef2aSThomas Huth { 766fcf5ef2aSThomas Huth TCGv t0 = tcg_temp_new(); 767fcf5ef2aSThomas Huth TCGv t1 = tcg_temp_new(); 768fcf5ef2aSThomas Huth TCGv t2 = tcg_temp_new(); 769fcf5ef2aSThomas Huth TCGv zero = tcg_const_i32(0); 770fcf5ef2aSThomas Huth 771fcf5ef2aSThomas Huth /* shift left arg1, saving the bit being pushed out and inserting 772fcf5ef2aSThomas Huth T on the right */ 773fcf5ef2aSThomas Huth tcg_gen_shri_i32(t0, REG(B11_8), 31); 774fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1); 775fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), REG(B11_8), cpu_sr_t); 776fcf5ef2aSThomas Huth 777fcf5ef2aSThomas Huth /* Add or subtract arg0 from arg1 depending if Q == M. To avoid 778fcf5ef2aSThomas Huth using 64-bit temps, we compute arg0's high part from q ^ m, so 779fcf5ef2aSThomas Huth that it is 0x00000000 when adding the value or 0xffffffff when 780fcf5ef2aSThomas Huth subtracting it. */ 781fcf5ef2aSThomas Huth tcg_gen_xor_i32(t1, cpu_sr_q, cpu_sr_m); 782fcf5ef2aSThomas Huth tcg_gen_subi_i32(t1, t1, 1); 783fcf5ef2aSThomas Huth tcg_gen_neg_i32(t2, REG(B7_4)); 784fcf5ef2aSThomas Huth tcg_gen_movcond_i32(TCG_COND_EQ, t2, t1, zero, REG(B7_4), t2); 785fcf5ef2aSThomas Huth tcg_gen_add2_i32(REG(B11_8), t1, REG(B11_8), zero, t2, t1); 786fcf5ef2aSThomas Huth 787fcf5ef2aSThomas Huth /* compute T and Q depending on carry */ 788fcf5ef2aSThomas Huth tcg_gen_andi_i32(t1, t1, 1); 789fcf5ef2aSThomas Huth tcg_gen_xor_i32(t1, t1, t0); 790fcf5ef2aSThomas Huth tcg_gen_xori_i32(cpu_sr_t, t1, 1); 791fcf5ef2aSThomas Huth tcg_gen_xor_i32(cpu_sr_q, cpu_sr_m, t1); 792fcf5ef2aSThomas Huth 793fcf5ef2aSThomas Huth tcg_temp_free(zero); 794fcf5ef2aSThomas Huth tcg_temp_free(t2); 795fcf5ef2aSThomas Huth tcg_temp_free(t1); 796fcf5ef2aSThomas Huth tcg_temp_free(t0); 797fcf5ef2aSThomas Huth } 798fcf5ef2aSThomas Huth return; 799fcf5ef2aSThomas Huth case 0x300d: /* dmuls.l Rm,Rn */ 800fcf5ef2aSThomas Huth tcg_gen_muls2_i32(cpu_macl, cpu_mach, REG(B7_4), REG(B11_8)); 801fcf5ef2aSThomas Huth return; 802fcf5ef2aSThomas Huth case 0x3005: /* dmulu.l Rm,Rn */ 803fcf5ef2aSThomas Huth tcg_gen_mulu2_i32(cpu_macl, cpu_mach, REG(B7_4), REG(B11_8)); 804fcf5ef2aSThomas Huth return; 805fcf5ef2aSThomas Huth case 0x600e: /* exts.b Rm,Rn */ 806fcf5ef2aSThomas Huth tcg_gen_ext8s_i32(REG(B11_8), REG(B7_4)); 807fcf5ef2aSThomas Huth return; 808fcf5ef2aSThomas Huth case 0x600f: /* exts.w Rm,Rn */ 809fcf5ef2aSThomas Huth tcg_gen_ext16s_i32(REG(B11_8), REG(B7_4)); 810fcf5ef2aSThomas Huth return; 811fcf5ef2aSThomas Huth case 0x600c: /* extu.b Rm,Rn */ 812fcf5ef2aSThomas Huth tcg_gen_ext8u_i32(REG(B11_8), REG(B7_4)); 813fcf5ef2aSThomas Huth return; 814fcf5ef2aSThomas Huth case 0x600d: /* extu.w Rm,Rn */ 815fcf5ef2aSThomas Huth tcg_gen_ext16u_i32(REG(B11_8), REG(B7_4)); 816fcf5ef2aSThomas Huth return; 817fcf5ef2aSThomas Huth case 0x000f: /* mac.l @Rm+,@Rn+ */ 818fcf5ef2aSThomas Huth { 819fcf5ef2aSThomas Huth TCGv arg0, arg1; 820fcf5ef2aSThomas Huth arg0 = tcg_temp_new(); 821fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(arg0, REG(B7_4), ctx->memidx, MO_TESL); 822fcf5ef2aSThomas Huth arg1 = tcg_temp_new(); 823fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(arg1, REG(B11_8), ctx->memidx, MO_TESL); 824fcf5ef2aSThomas Huth gen_helper_macl(cpu_env, arg0, arg1); 825fcf5ef2aSThomas Huth tcg_temp_free(arg1); 826fcf5ef2aSThomas Huth tcg_temp_free(arg0); 827fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4); 828fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 829fcf5ef2aSThomas Huth } 830fcf5ef2aSThomas Huth return; 831fcf5ef2aSThomas Huth case 0x400f: /* mac.w @Rm+,@Rn+ */ 832fcf5ef2aSThomas Huth { 833fcf5ef2aSThomas Huth TCGv arg0, arg1; 834fcf5ef2aSThomas Huth arg0 = tcg_temp_new(); 835fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(arg0, REG(B7_4), ctx->memidx, MO_TESL); 836fcf5ef2aSThomas Huth arg1 = tcg_temp_new(); 837fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(arg1, REG(B11_8), ctx->memidx, MO_TESL); 838fcf5ef2aSThomas Huth gen_helper_macw(cpu_env, arg0, arg1); 839fcf5ef2aSThomas Huth tcg_temp_free(arg1); 840fcf5ef2aSThomas Huth tcg_temp_free(arg0); 841fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 2); 842fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2); 843fcf5ef2aSThomas Huth } 844fcf5ef2aSThomas Huth return; 845fcf5ef2aSThomas Huth case 0x0007: /* mul.l Rm,Rn */ 846fcf5ef2aSThomas Huth tcg_gen_mul_i32(cpu_macl, REG(B7_4), REG(B11_8)); 847fcf5ef2aSThomas Huth return; 848fcf5ef2aSThomas Huth case 0x200f: /* muls.w Rm,Rn */ 849fcf5ef2aSThomas Huth { 850fcf5ef2aSThomas Huth TCGv arg0, arg1; 851fcf5ef2aSThomas Huth arg0 = tcg_temp_new(); 852fcf5ef2aSThomas Huth tcg_gen_ext16s_i32(arg0, REG(B7_4)); 853fcf5ef2aSThomas Huth arg1 = tcg_temp_new(); 854fcf5ef2aSThomas Huth tcg_gen_ext16s_i32(arg1, REG(B11_8)); 855fcf5ef2aSThomas Huth tcg_gen_mul_i32(cpu_macl, arg0, arg1); 856fcf5ef2aSThomas Huth tcg_temp_free(arg1); 857fcf5ef2aSThomas Huth tcg_temp_free(arg0); 858fcf5ef2aSThomas Huth } 859fcf5ef2aSThomas Huth return; 860fcf5ef2aSThomas Huth case 0x200e: /* mulu.w Rm,Rn */ 861fcf5ef2aSThomas Huth { 862fcf5ef2aSThomas Huth TCGv arg0, arg1; 863fcf5ef2aSThomas Huth arg0 = tcg_temp_new(); 864fcf5ef2aSThomas Huth tcg_gen_ext16u_i32(arg0, REG(B7_4)); 865fcf5ef2aSThomas Huth arg1 = tcg_temp_new(); 866fcf5ef2aSThomas Huth tcg_gen_ext16u_i32(arg1, REG(B11_8)); 867fcf5ef2aSThomas Huth tcg_gen_mul_i32(cpu_macl, arg0, arg1); 868fcf5ef2aSThomas Huth tcg_temp_free(arg1); 869fcf5ef2aSThomas Huth tcg_temp_free(arg0); 870fcf5ef2aSThomas Huth } 871fcf5ef2aSThomas Huth return; 872fcf5ef2aSThomas Huth case 0x600b: /* neg Rm,Rn */ 873fcf5ef2aSThomas Huth tcg_gen_neg_i32(REG(B11_8), REG(B7_4)); 874fcf5ef2aSThomas Huth return; 875fcf5ef2aSThomas Huth case 0x600a: /* negc Rm,Rn */ 876fcf5ef2aSThomas Huth { 877fcf5ef2aSThomas Huth TCGv t0 = tcg_const_i32(0); 878fcf5ef2aSThomas Huth tcg_gen_add2_i32(REG(B11_8), cpu_sr_t, 879fcf5ef2aSThomas Huth REG(B7_4), t0, cpu_sr_t, t0); 880fcf5ef2aSThomas Huth tcg_gen_sub2_i32(REG(B11_8), cpu_sr_t, 881fcf5ef2aSThomas Huth t0, t0, REG(B11_8), cpu_sr_t); 882fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, cpu_sr_t, 1); 883fcf5ef2aSThomas Huth tcg_temp_free(t0); 884fcf5ef2aSThomas Huth } 885fcf5ef2aSThomas Huth return; 886fcf5ef2aSThomas Huth case 0x6007: /* not Rm,Rn */ 887fcf5ef2aSThomas Huth tcg_gen_not_i32(REG(B11_8), REG(B7_4)); 888fcf5ef2aSThomas Huth return; 889fcf5ef2aSThomas Huth case 0x200b: /* or Rm,Rn */ 890fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 891fcf5ef2aSThomas Huth return; 892fcf5ef2aSThomas Huth case 0x400c: /* shad Rm,Rn */ 893fcf5ef2aSThomas Huth { 894fcf5ef2aSThomas Huth TCGv t0 = tcg_temp_new(); 895fcf5ef2aSThomas Huth TCGv t1 = tcg_temp_new(); 896fcf5ef2aSThomas Huth TCGv t2 = tcg_temp_new(); 897fcf5ef2aSThomas Huth 898fcf5ef2aSThomas Huth tcg_gen_andi_i32(t0, REG(B7_4), 0x1f); 899fcf5ef2aSThomas Huth 900fcf5ef2aSThomas Huth /* positive case: shift to the left */ 901fcf5ef2aSThomas Huth tcg_gen_shl_i32(t1, REG(B11_8), t0); 902fcf5ef2aSThomas Huth 903fcf5ef2aSThomas Huth /* negative case: shift to the right in two steps to 904fcf5ef2aSThomas Huth correctly handle the -32 case */ 905fcf5ef2aSThomas Huth tcg_gen_xori_i32(t0, t0, 0x1f); 906fcf5ef2aSThomas Huth tcg_gen_sar_i32(t2, REG(B11_8), t0); 907fcf5ef2aSThomas Huth tcg_gen_sari_i32(t2, t2, 1); 908fcf5ef2aSThomas Huth 909fcf5ef2aSThomas Huth /* select between the two cases */ 910fcf5ef2aSThomas Huth tcg_gen_movi_i32(t0, 0); 911fcf5ef2aSThomas Huth tcg_gen_movcond_i32(TCG_COND_GE, REG(B11_8), REG(B7_4), t0, t1, t2); 912fcf5ef2aSThomas Huth 913fcf5ef2aSThomas Huth tcg_temp_free(t0); 914fcf5ef2aSThomas Huth tcg_temp_free(t1); 915fcf5ef2aSThomas Huth tcg_temp_free(t2); 916fcf5ef2aSThomas Huth } 917fcf5ef2aSThomas Huth return; 918fcf5ef2aSThomas Huth case 0x400d: /* shld Rm,Rn */ 919fcf5ef2aSThomas Huth { 920fcf5ef2aSThomas Huth TCGv t0 = tcg_temp_new(); 921fcf5ef2aSThomas Huth TCGv t1 = tcg_temp_new(); 922fcf5ef2aSThomas Huth TCGv t2 = tcg_temp_new(); 923fcf5ef2aSThomas Huth 924fcf5ef2aSThomas Huth tcg_gen_andi_i32(t0, REG(B7_4), 0x1f); 925fcf5ef2aSThomas Huth 926fcf5ef2aSThomas Huth /* positive case: shift to the left */ 927fcf5ef2aSThomas Huth tcg_gen_shl_i32(t1, REG(B11_8), t0); 928fcf5ef2aSThomas Huth 929fcf5ef2aSThomas Huth /* negative case: shift to the right in two steps to 930fcf5ef2aSThomas Huth correctly handle the -32 case */ 931fcf5ef2aSThomas Huth tcg_gen_xori_i32(t0, t0, 0x1f); 932fcf5ef2aSThomas Huth tcg_gen_shr_i32(t2, REG(B11_8), t0); 933fcf5ef2aSThomas Huth tcg_gen_shri_i32(t2, t2, 1); 934fcf5ef2aSThomas Huth 935fcf5ef2aSThomas Huth /* select between the two cases */ 936fcf5ef2aSThomas Huth tcg_gen_movi_i32(t0, 0); 937fcf5ef2aSThomas Huth tcg_gen_movcond_i32(TCG_COND_GE, REG(B11_8), REG(B7_4), t0, t1, t2); 938fcf5ef2aSThomas Huth 939fcf5ef2aSThomas Huth tcg_temp_free(t0); 940fcf5ef2aSThomas Huth tcg_temp_free(t1); 941fcf5ef2aSThomas Huth tcg_temp_free(t2); 942fcf5ef2aSThomas Huth } 943fcf5ef2aSThomas Huth return; 944fcf5ef2aSThomas Huth case 0x3008: /* sub Rm,Rn */ 945fcf5ef2aSThomas Huth tcg_gen_sub_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 946fcf5ef2aSThomas Huth return; 947fcf5ef2aSThomas Huth case 0x300a: /* subc Rm,Rn */ 948fcf5ef2aSThomas Huth { 949fcf5ef2aSThomas Huth TCGv t0, t1; 950fcf5ef2aSThomas Huth t0 = tcg_const_tl(0); 951fcf5ef2aSThomas Huth t1 = tcg_temp_new(); 952fcf5ef2aSThomas Huth tcg_gen_add2_i32(t1, cpu_sr_t, cpu_sr_t, t0, REG(B7_4), t0); 953fcf5ef2aSThomas Huth tcg_gen_sub2_i32(REG(B11_8), cpu_sr_t, 954fcf5ef2aSThomas Huth REG(B11_8), t0, t1, cpu_sr_t); 955fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, cpu_sr_t, 1); 956fcf5ef2aSThomas Huth tcg_temp_free(t0); 957fcf5ef2aSThomas Huth tcg_temp_free(t1); 958fcf5ef2aSThomas Huth } 959fcf5ef2aSThomas Huth return; 960fcf5ef2aSThomas Huth case 0x300b: /* subv Rm,Rn */ 961fcf5ef2aSThomas Huth { 962fcf5ef2aSThomas Huth TCGv t0, t1, t2; 963fcf5ef2aSThomas Huth t0 = tcg_temp_new(); 964fcf5ef2aSThomas Huth tcg_gen_sub_i32(t0, REG(B11_8), REG(B7_4)); 965fcf5ef2aSThomas Huth t1 = tcg_temp_new(); 966fcf5ef2aSThomas Huth tcg_gen_xor_i32(t1, t0, REG(B7_4)); 967fcf5ef2aSThomas Huth t2 = tcg_temp_new(); 968fcf5ef2aSThomas Huth tcg_gen_xor_i32(t2, REG(B11_8), REG(B7_4)); 969fcf5ef2aSThomas Huth tcg_gen_and_i32(t1, t1, t2); 970fcf5ef2aSThomas Huth tcg_temp_free(t2); 971fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_t, t1, 31); 972fcf5ef2aSThomas Huth tcg_temp_free(t1); 973fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), t0); 974fcf5ef2aSThomas Huth tcg_temp_free(t0); 975fcf5ef2aSThomas Huth } 976fcf5ef2aSThomas Huth return; 977fcf5ef2aSThomas Huth case 0x2008: /* tst Rm,Rn */ 978fcf5ef2aSThomas Huth { 979fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 980fcf5ef2aSThomas Huth tcg_gen_and_i32(val, REG(B7_4), REG(B11_8)); 981fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, val, 0); 982fcf5ef2aSThomas Huth tcg_temp_free(val); 983fcf5ef2aSThomas Huth } 984fcf5ef2aSThomas Huth return; 985fcf5ef2aSThomas Huth case 0x200a: /* xor Rm,Rn */ 986fcf5ef2aSThomas Huth tcg_gen_xor_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 987fcf5ef2aSThomas Huth return; 988fcf5ef2aSThomas Huth case 0xf00c: /* fmov {F,D,X}Rm,{F,D,X}Rn - FPSCR: Nothing */ 989fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 990a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 991bdcb3739SRichard Henderson int xsrc = XHACK(B7_4); 992bdcb3739SRichard Henderson int xdst = XHACK(B11_8); 993bdcb3739SRichard Henderson tcg_gen_mov_i32(FREG(xdst), FREG(xsrc)); 994bdcb3739SRichard Henderson tcg_gen_mov_i32(FREG(xdst + 1), FREG(xsrc + 1)); 995fcf5ef2aSThomas Huth } else { 9967c9f7038SRichard Henderson tcg_gen_mov_i32(FREG(B11_8), FREG(B7_4)); 997fcf5ef2aSThomas Huth } 998fcf5ef2aSThomas Huth return; 999fcf5ef2aSThomas Huth case 0xf00a: /* fmov {F,D,X}Rm,@Rn - FPSCR: Nothing */ 1000fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1001a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 10024d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 10034d57fa50SRichard Henderson gen_load_fpr64(ctx, fp, XHACK(B7_4)); 10044d57fa50SRichard Henderson tcg_gen_qemu_st_i64(fp, REG(B11_8), ctx->memidx, MO_TEQ); 10054d57fa50SRichard Henderson tcg_temp_free_i64(fp); 1006fcf5ef2aSThomas Huth } else { 10077c9f7038SRichard Henderson tcg_gen_qemu_st_i32(FREG(B7_4), REG(B11_8), ctx->memidx, MO_TEUL); 1008fcf5ef2aSThomas Huth } 1009fcf5ef2aSThomas Huth return; 1010fcf5ef2aSThomas Huth case 0xf008: /* fmov @Rm,{F,D,X}Rn - FPSCR: Nothing */ 1011fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1012a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 10134d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 10144d57fa50SRichard Henderson tcg_gen_qemu_ld_i64(fp, REG(B7_4), ctx->memidx, MO_TEQ); 10154d57fa50SRichard Henderson gen_store_fpr64(ctx, fp, XHACK(B11_8)); 10164d57fa50SRichard Henderson tcg_temp_free_i64(fp); 1017fcf5ef2aSThomas Huth } else { 10187c9f7038SRichard Henderson tcg_gen_qemu_ld_i32(FREG(B11_8), REG(B7_4), ctx->memidx, MO_TEUL); 1019fcf5ef2aSThomas Huth } 1020fcf5ef2aSThomas Huth return; 1021fcf5ef2aSThomas Huth case 0xf009: /* fmov @Rm+,{F,D,X}Rn - FPSCR: Nothing */ 1022fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1023a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 10244d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 10254d57fa50SRichard Henderson tcg_gen_qemu_ld_i64(fp, REG(B7_4), ctx->memidx, MO_TEQ); 10264d57fa50SRichard Henderson gen_store_fpr64(ctx, fp, XHACK(B11_8)); 10274d57fa50SRichard Henderson tcg_temp_free_i64(fp); 1028fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 8); 1029fcf5ef2aSThomas Huth } else { 10307c9f7038SRichard Henderson tcg_gen_qemu_ld_i32(FREG(B11_8), REG(B7_4), ctx->memidx, MO_TEUL); 1031fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4); 1032fcf5ef2aSThomas Huth } 1033fcf5ef2aSThomas Huth return; 1034fcf5ef2aSThomas Huth case 0xf00b: /* fmov {F,D,X}Rm,@-Rn - FPSCR: Nothing */ 1035fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 10364d57fa50SRichard Henderson { 1037fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new_i32(); 1038a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 10394d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 10404d57fa50SRichard Henderson gen_load_fpr64(ctx, fp, XHACK(B7_4)); 10414d57fa50SRichard Henderson tcg_gen_subi_i32(addr, REG(B11_8), 8); 10424d57fa50SRichard Henderson tcg_gen_qemu_st_i64(fp, addr, ctx->memidx, MO_TEQ); 10434d57fa50SRichard Henderson tcg_temp_free_i64(fp); 1044fcf5ef2aSThomas Huth } else { 10454d57fa50SRichard Henderson tcg_gen_subi_i32(addr, REG(B11_8), 4); 10467c9f7038SRichard Henderson tcg_gen_qemu_st_i32(FREG(B7_4), addr, ctx->memidx, MO_TEUL); 1047fcf5ef2aSThomas Huth } 1048fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 1049fcf5ef2aSThomas Huth tcg_temp_free(addr); 10504d57fa50SRichard Henderson } 1051fcf5ef2aSThomas Huth return; 1052fcf5ef2aSThomas Huth case 0xf006: /* fmov @(R0,Rm),{F,D,X}Rm - FPSCR: Nothing */ 1053fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1054fcf5ef2aSThomas Huth { 1055fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new_i32(); 1056fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B7_4), REG(0)); 1057a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 10584d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 10594d57fa50SRichard Henderson tcg_gen_qemu_ld_i64(fp, addr, ctx->memidx, MO_TEQ); 10604d57fa50SRichard Henderson gen_store_fpr64(ctx, fp, XHACK(B11_8)); 10614d57fa50SRichard Henderson tcg_temp_free_i64(fp); 1062fcf5ef2aSThomas Huth } else { 10637c9f7038SRichard Henderson tcg_gen_qemu_ld_i32(FREG(B11_8), addr, ctx->memidx, MO_TEUL); 1064fcf5ef2aSThomas Huth } 1065fcf5ef2aSThomas Huth tcg_temp_free(addr); 1066fcf5ef2aSThomas Huth } 1067fcf5ef2aSThomas Huth return; 1068fcf5ef2aSThomas Huth case 0xf007: /* fmov {F,D,X}Rn,@(R0,Rn) - FPSCR: Nothing */ 1069fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1070fcf5ef2aSThomas Huth { 1071fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1072fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B11_8), REG(0)); 1073a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 10744d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 10754d57fa50SRichard Henderson gen_load_fpr64(ctx, fp, XHACK(B7_4)); 10764d57fa50SRichard Henderson tcg_gen_qemu_st_i64(fp, addr, ctx->memidx, MO_TEQ); 10774d57fa50SRichard Henderson tcg_temp_free_i64(fp); 1078fcf5ef2aSThomas Huth } else { 10797c9f7038SRichard Henderson tcg_gen_qemu_st_i32(FREG(B7_4), addr, ctx->memidx, MO_TEUL); 1080fcf5ef2aSThomas Huth } 1081fcf5ef2aSThomas Huth tcg_temp_free(addr); 1082fcf5ef2aSThomas Huth } 1083fcf5ef2aSThomas Huth return; 1084fcf5ef2aSThomas Huth case 0xf000: /* fadd Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */ 1085fcf5ef2aSThomas Huth case 0xf001: /* fsub Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */ 1086fcf5ef2aSThomas Huth case 0xf002: /* fmul Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */ 1087fcf5ef2aSThomas Huth case 0xf003: /* fdiv Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */ 1088fcf5ef2aSThomas Huth case 0xf004: /* fcmp/eq Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */ 1089fcf5ef2aSThomas Huth case 0xf005: /* fcmp/gt Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */ 1090fcf5ef2aSThomas Huth { 1091fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1092a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_PR) { 1093fcf5ef2aSThomas Huth TCGv_i64 fp0, fp1; 1094fcf5ef2aSThomas Huth 109593dc9c89SRichard Henderson if (ctx->opcode & 0x0110) { 109693dc9c89SRichard Henderson goto do_illegal; 109793dc9c89SRichard Henderson } 1098fcf5ef2aSThomas Huth fp0 = tcg_temp_new_i64(); 1099fcf5ef2aSThomas Huth fp1 = tcg_temp_new_i64(); 11001e0b21d8SRichard Henderson gen_load_fpr64(ctx, fp0, B11_8); 11011e0b21d8SRichard Henderson gen_load_fpr64(ctx, fp1, B7_4); 1102fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf00f) { 1103fcf5ef2aSThomas Huth case 0xf000: /* fadd Rm,Rn */ 1104fcf5ef2aSThomas Huth gen_helper_fadd_DT(fp0, cpu_env, fp0, fp1); 1105fcf5ef2aSThomas Huth break; 1106fcf5ef2aSThomas Huth case 0xf001: /* fsub Rm,Rn */ 1107fcf5ef2aSThomas Huth gen_helper_fsub_DT(fp0, cpu_env, fp0, fp1); 1108fcf5ef2aSThomas Huth break; 1109fcf5ef2aSThomas Huth case 0xf002: /* fmul Rm,Rn */ 1110fcf5ef2aSThomas Huth gen_helper_fmul_DT(fp0, cpu_env, fp0, fp1); 1111fcf5ef2aSThomas Huth break; 1112fcf5ef2aSThomas Huth case 0xf003: /* fdiv Rm,Rn */ 1113fcf5ef2aSThomas Huth gen_helper_fdiv_DT(fp0, cpu_env, fp0, fp1); 1114fcf5ef2aSThomas Huth break; 1115fcf5ef2aSThomas Huth case 0xf004: /* fcmp/eq Rm,Rn */ 111692f1f83eSAurelien Jarno gen_helper_fcmp_eq_DT(cpu_sr_t, cpu_env, fp0, fp1); 1117fcf5ef2aSThomas Huth return; 1118fcf5ef2aSThomas Huth case 0xf005: /* fcmp/gt Rm,Rn */ 111992f1f83eSAurelien Jarno gen_helper_fcmp_gt_DT(cpu_sr_t, cpu_env, fp0, fp1); 1120fcf5ef2aSThomas Huth return; 1121fcf5ef2aSThomas Huth } 11221e0b21d8SRichard Henderson gen_store_fpr64(ctx, fp0, B11_8); 1123fcf5ef2aSThomas Huth tcg_temp_free_i64(fp0); 1124fcf5ef2aSThomas Huth tcg_temp_free_i64(fp1); 1125fcf5ef2aSThomas Huth } else { 1126fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf00f) { 1127fcf5ef2aSThomas Huth case 0xf000: /* fadd Rm,Rn */ 11287c9f7038SRichard Henderson gen_helper_fadd_FT(FREG(B11_8), cpu_env, 11297c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1130fcf5ef2aSThomas Huth break; 1131fcf5ef2aSThomas Huth case 0xf001: /* fsub Rm,Rn */ 11327c9f7038SRichard Henderson gen_helper_fsub_FT(FREG(B11_8), cpu_env, 11337c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1134fcf5ef2aSThomas Huth break; 1135fcf5ef2aSThomas Huth case 0xf002: /* fmul Rm,Rn */ 11367c9f7038SRichard Henderson gen_helper_fmul_FT(FREG(B11_8), cpu_env, 11377c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1138fcf5ef2aSThomas Huth break; 1139fcf5ef2aSThomas Huth case 0xf003: /* fdiv Rm,Rn */ 11407c9f7038SRichard Henderson gen_helper_fdiv_FT(FREG(B11_8), cpu_env, 11417c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1142fcf5ef2aSThomas Huth break; 1143fcf5ef2aSThomas Huth case 0xf004: /* fcmp/eq Rm,Rn */ 114492f1f83eSAurelien Jarno gen_helper_fcmp_eq_FT(cpu_sr_t, cpu_env, 11457c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1146fcf5ef2aSThomas Huth return; 1147fcf5ef2aSThomas Huth case 0xf005: /* fcmp/gt Rm,Rn */ 114892f1f83eSAurelien Jarno gen_helper_fcmp_gt_FT(cpu_sr_t, cpu_env, 11497c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1150fcf5ef2aSThomas Huth return; 1151fcf5ef2aSThomas Huth } 1152fcf5ef2aSThomas Huth } 1153fcf5ef2aSThomas Huth } 1154fcf5ef2aSThomas Huth return; 1155fcf5ef2aSThomas Huth case 0xf00e: /* fmac FR0,RM,Rn */ 1156fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 11577e9f7ca8SRichard Henderson CHECK_FPSCR_PR_0 11587c9f7038SRichard Henderson gen_helper_fmac_FT(FREG(B11_8), cpu_env, 11597c9f7038SRichard Henderson FREG(0), FREG(B7_4), FREG(B11_8)); 1160fcf5ef2aSThomas Huth return; 1161fcf5ef2aSThomas Huth } 1162fcf5ef2aSThomas Huth 1163fcf5ef2aSThomas Huth switch (ctx->opcode & 0xff00) { 1164fcf5ef2aSThomas Huth case 0xc900: /* and #imm,R0 */ 1165fcf5ef2aSThomas Huth tcg_gen_andi_i32(REG(0), REG(0), B7_0); 1166fcf5ef2aSThomas Huth return; 1167fcf5ef2aSThomas Huth case 0xcd00: /* and.b #imm,@(R0,GBR) */ 1168fcf5ef2aSThomas Huth { 1169fcf5ef2aSThomas Huth TCGv addr, val; 1170fcf5ef2aSThomas Huth addr = tcg_temp_new(); 1171fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(0), cpu_gbr); 1172fcf5ef2aSThomas Huth val = tcg_temp_new(); 1173fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, addr, ctx->memidx, MO_UB); 1174fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, val, B7_0); 1175fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_UB); 1176fcf5ef2aSThomas Huth tcg_temp_free(val); 1177fcf5ef2aSThomas Huth tcg_temp_free(addr); 1178fcf5ef2aSThomas Huth } 1179fcf5ef2aSThomas Huth return; 1180fcf5ef2aSThomas Huth case 0x8b00: /* bf label */ 1181fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 11824bfa602bSRichard Henderson gen_conditional_jump(ctx, ctx->pc + 4 + B7_0s * 2, false); 1183fcf5ef2aSThomas Huth return; 1184fcf5ef2aSThomas Huth case 0x8f00: /* bf/s label */ 1185fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1186ac9707eaSAurelien Jarno tcg_gen_xori_i32(cpu_delayed_cond, cpu_sr_t, 1); 1187ac9707eaSAurelien Jarno ctx->delayed_pc = ctx->pc + 4 + B7_0s * 2; 1188a6215749SAurelien Jarno ctx->envflags |= DELAY_SLOT_CONDITIONAL; 1189fcf5ef2aSThomas Huth return; 1190fcf5ef2aSThomas Huth case 0x8900: /* bt label */ 1191fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 11924bfa602bSRichard Henderson gen_conditional_jump(ctx, ctx->pc + 4 + B7_0s * 2, true); 1193fcf5ef2aSThomas Huth return; 1194fcf5ef2aSThomas Huth case 0x8d00: /* bt/s label */ 1195fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1196ac9707eaSAurelien Jarno tcg_gen_mov_i32(cpu_delayed_cond, cpu_sr_t); 1197ac9707eaSAurelien Jarno ctx->delayed_pc = ctx->pc + 4 + B7_0s * 2; 1198a6215749SAurelien Jarno ctx->envflags |= DELAY_SLOT_CONDITIONAL; 1199fcf5ef2aSThomas Huth return; 1200fcf5ef2aSThomas Huth case 0x8800: /* cmp/eq #imm,R0 */ 1201fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, REG(0), B7_0s); 1202fcf5ef2aSThomas Huth return; 1203fcf5ef2aSThomas Huth case 0xc400: /* mov.b @(disp,GBR),R0 */ 1204fcf5ef2aSThomas Huth { 1205fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1206fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0); 1207fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_SB); 1208fcf5ef2aSThomas Huth tcg_temp_free(addr); 1209fcf5ef2aSThomas Huth } 1210fcf5ef2aSThomas Huth return; 1211fcf5ef2aSThomas Huth case 0xc500: /* mov.w @(disp,GBR),R0 */ 1212fcf5ef2aSThomas Huth { 1213fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1214fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2); 1215fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_TESW); 1216fcf5ef2aSThomas Huth tcg_temp_free(addr); 1217fcf5ef2aSThomas Huth } 1218fcf5ef2aSThomas Huth return; 1219fcf5ef2aSThomas Huth case 0xc600: /* mov.l @(disp,GBR),R0 */ 1220fcf5ef2aSThomas Huth { 1221fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1222fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4); 1223fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_TESL); 1224fcf5ef2aSThomas Huth tcg_temp_free(addr); 1225fcf5ef2aSThomas Huth } 1226fcf5ef2aSThomas Huth return; 1227fcf5ef2aSThomas Huth case 0xc000: /* mov.b R0,@(disp,GBR) */ 1228fcf5ef2aSThomas Huth { 1229fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1230fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0); 1231fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_UB); 1232fcf5ef2aSThomas Huth tcg_temp_free(addr); 1233fcf5ef2aSThomas Huth } 1234fcf5ef2aSThomas Huth return; 1235fcf5ef2aSThomas Huth case 0xc100: /* mov.w R0,@(disp,GBR) */ 1236fcf5ef2aSThomas Huth { 1237fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1238fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2); 1239fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_TEUW); 1240fcf5ef2aSThomas Huth tcg_temp_free(addr); 1241fcf5ef2aSThomas Huth } 1242fcf5ef2aSThomas Huth return; 1243fcf5ef2aSThomas Huth case 0xc200: /* mov.l R0,@(disp,GBR) */ 1244fcf5ef2aSThomas Huth { 1245fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1246fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4); 1247fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_TEUL); 1248fcf5ef2aSThomas Huth tcg_temp_free(addr); 1249fcf5ef2aSThomas Huth } 1250fcf5ef2aSThomas Huth return; 1251fcf5ef2aSThomas Huth case 0x8000: /* mov.b R0,@(disp,Rn) */ 1252fcf5ef2aSThomas Huth { 1253fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1254fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0); 1255fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_UB); 1256fcf5ef2aSThomas Huth tcg_temp_free(addr); 1257fcf5ef2aSThomas Huth } 1258fcf5ef2aSThomas Huth return; 1259fcf5ef2aSThomas Huth case 0x8100: /* mov.w R0,@(disp,Rn) */ 1260fcf5ef2aSThomas Huth { 1261fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1262fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2); 1263fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_TEUW); 1264fcf5ef2aSThomas Huth tcg_temp_free(addr); 1265fcf5ef2aSThomas Huth } 1266fcf5ef2aSThomas Huth return; 1267fcf5ef2aSThomas Huth case 0x8400: /* mov.b @(disp,Rn),R0 */ 1268fcf5ef2aSThomas Huth { 1269fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1270fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0); 1271fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_SB); 1272fcf5ef2aSThomas Huth tcg_temp_free(addr); 1273fcf5ef2aSThomas Huth } 1274fcf5ef2aSThomas Huth return; 1275fcf5ef2aSThomas Huth case 0x8500: /* mov.w @(disp,Rn),R0 */ 1276fcf5ef2aSThomas Huth { 1277fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1278fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2); 1279fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_TESW); 1280fcf5ef2aSThomas Huth tcg_temp_free(addr); 1281fcf5ef2aSThomas Huth } 1282fcf5ef2aSThomas Huth return; 1283fcf5ef2aSThomas Huth case 0xc700: /* mova @(disp,PC),R0 */ 1284fcf5ef2aSThomas Huth tcg_gen_movi_i32(REG(0), ((ctx->pc & 0xfffffffc) + 4 + B7_0 * 4) & ~3); 1285fcf5ef2aSThomas Huth return; 1286fcf5ef2aSThomas Huth case 0xcb00: /* or #imm,R0 */ 1287fcf5ef2aSThomas Huth tcg_gen_ori_i32(REG(0), REG(0), B7_0); 1288fcf5ef2aSThomas Huth return; 1289fcf5ef2aSThomas Huth case 0xcf00: /* or.b #imm,@(R0,GBR) */ 1290fcf5ef2aSThomas Huth { 1291fcf5ef2aSThomas Huth TCGv addr, val; 1292fcf5ef2aSThomas Huth addr = tcg_temp_new(); 1293fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(0), cpu_gbr); 1294fcf5ef2aSThomas Huth val = tcg_temp_new(); 1295fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, addr, ctx->memidx, MO_UB); 1296fcf5ef2aSThomas Huth tcg_gen_ori_i32(val, val, B7_0); 1297fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_UB); 1298fcf5ef2aSThomas Huth tcg_temp_free(val); 1299fcf5ef2aSThomas Huth tcg_temp_free(addr); 1300fcf5ef2aSThomas Huth } 1301fcf5ef2aSThomas Huth return; 1302fcf5ef2aSThomas Huth case 0xc300: /* trapa #imm */ 1303fcf5ef2aSThomas Huth { 1304fcf5ef2aSThomas Huth TCGv imm; 1305fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1306ac9707eaSAurelien Jarno gen_save_cpu_state(ctx, true); 1307fcf5ef2aSThomas Huth imm = tcg_const_i32(B7_0); 1308fcf5ef2aSThomas Huth gen_helper_trapa(cpu_env, imm); 1309fcf5ef2aSThomas Huth tcg_temp_free(imm); 1310*4834871bSRichard Henderson ctx->bstate = DISAS_NORETURN; 1311fcf5ef2aSThomas Huth } 1312fcf5ef2aSThomas Huth return; 1313fcf5ef2aSThomas Huth case 0xc800: /* tst #imm,R0 */ 1314fcf5ef2aSThomas Huth { 1315fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1316fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, REG(0), B7_0); 1317fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, val, 0); 1318fcf5ef2aSThomas Huth tcg_temp_free(val); 1319fcf5ef2aSThomas Huth } 1320fcf5ef2aSThomas Huth return; 1321fcf5ef2aSThomas Huth case 0xcc00: /* tst.b #imm,@(R0,GBR) */ 1322fcf5ef2aSThomas Huth { 1323fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1324fcf5ef2aSThomas Huth tcg_gen_add_i32(val, REG(0), cpu_gbr); 1325fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, val, ctx->memidx, MO_UB); 1326fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, val, B7_0); 1327fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, val, 0); 1328fcf5ef2aSThomas Huth tcg_temp_free(val); 1329fcf5ef2aSThomas Huth } 1330fcf5ef2aSThomas Huth return; 1331fcf5ef2aSThomas Huth case 0xca00: /* xor #imm,R0 */ 1332fcf5ef2aSThomas Huth tcg_gen_xori_i32(REG(0), REG(0), B7_0); 1333fcf5ef2aSThomas Huth return; 1334fcf5ef2aSThomas Huth case 0xce00: /* xor.b #imm,@(R0,GBR) */ 1335fcf5ef2aSThomas Huth { 1336fcf5ef2aSThomas Huth TCGv addr, val; 1337fcf5ef2aSThomas Huth addr = tcg_temp_new(); 1338fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(0), cpu_gbr); 1339fcf5ef2aSThomas Huth val = tcg_temp_new(); 1340fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, addr, ctx->memidx, MO_UB); 1341fcf5ef2aSThomas Huth tcg_gen_xori_i32(val, val, B7_0); 1342fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_UB); 1343fcf5ef2aSThomas Huth tcg_temp_free(val); 1344fcf5ef2aSThomas Huth tcg_temp_free(addr); 1345fcf5ef2aSThomas Huth } 1346fcf5ef2aSThomas Huth return; 1347fcf5ef2aSThomas Huth } 1348fcf5ef2aSThomas Huth 1349fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf08f) { 1350fcf5ef2aSThomas Huth case 0x408e: /* ldc Rm,Rn_BANK */ 1351fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1352fcf5ef2aSThomas Huth tcg_gen_mov_i32(ALTREG(B6_4), REG(B11_8)); 1353fcf5ef2aSThomas Huth return; 1354fcf5ef2aSThomas Huth case 0x4087: /* ldc.l @Rm+,Rn_BANK */ 1355fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1356fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(ALTREG(B6_4), REG(B11_8), ctx->memidx, MO_TESL); 1357fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 1358fcf5ef2aSThomas Huth return; 1359fcf5ef2aSThomas Huth case 0x0082: /* stc Rm_BANK,Rn */ 1360fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1361fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), ALTREG(B6_4)); 1362fcf5ef2aSThomas Huth return; 1363fcf5ef2aSThomas Huth case 0x4083: /* stc.l Rm_BANK,@-Rn */ 1364fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1365fcf5ef2aSThomas Huth { 1366fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1367fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); 1368fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(ALTREG(B6_4), addr, ctx->memidx, MO_TEUL); 1369fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 1370fcf5ef2aSThomas Huth tcg_temp_free(addr); 1371fcf5ef2aSThomas Huth } 1372fcf5ef2aSThomas Huth return; 1373fcf5ef2aSThomas Huth } 1374fcf5ef2aSThomas Huth 1375fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf0ff) { 1376fcf5ef2aSThomas Huth case 0x0023: /* braf Rn */ 1377fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1378fcf5ef2aSThomas Huth tcg_gen_addi_i32(cpu_delayed_pc, REG(B11_8), ctx->pc + 4); 1379a6215749SAurelien Jarno ctx->envflags |= DELAY_SLOT; 1380fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 1381fcf5ef2aSThomas Huth return; 1382fcf5ef2aSThomas Huth case 0x0003: /* bsrf Rn */ 1383fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1384fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_pr, ctx->pc + 4); 1385fcf5ef2aSThomas Huth tcg_gen_add_i32(cpu_delayed_pc, REG(B11_8), cpu_pr); 1386a6215749SAurelien Jarno ctx->envflags |= DELAY_SLOT; 1387fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 1388fcf5ef2aSThomas Huth return; 1389fcf5ef2aSThomas Huth case 0x4015: /* cmp/pl Rn */ 1390fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_GT, cpu_sr_t, REG(B11_8), 0); 1391fcf5ef2aSThomas Huth return; 1392fcf5ef2aSThomas Huth case 0x4011: /* cmp/pz Rn */ 1393fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_GE, cpu_sr_t, REG(B11_8), 0); 1394fcf5ef2aSThomas Huth return; 1395fcf5ef2aSThomas Huth case 0x4010: /* dt Rn */ 1396fcf5ef2aSThomas Huth tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 1); 1397fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, REG(B11_8), 0); 1398fcf5ef2aSThomas Huth return; 1399fcf5ef2aSThomas Huth case 0x402b: /* jmp @Rn */ 1400fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1401fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8)); 1402a6215749SAurelien Jarno ctx->envflags |= DELAY_SLOT; 1403fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 1404fcf5ef2aSThomas Huth return; 1405fcf5ef2aSThomas Huth case 0x400b: /* jsr @Rn */ 1406fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1407fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_pr, ctx->pc + 4); 1408fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8)); 1409a6215749SAurelien Jarno ctx->envflags |= DELAY_SLOT; 1410fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 1411fcf5ef2aSThomas Huth return; 1412fcf5ef2aSThomas Huth case 0x400e: /* ldc Rm,SR */ 1413fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1414fcf5ef2aSThomas Huth { 1415fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1416fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, REG(B11_8), 0x700083f3); 1417fcf5ef2aSThomas Huth gen_write_sr(val); 1418fcf5ef2aSThomas Huth tcg_temp_free(val); 1419*4834871bSRichard Henderson ctx->bstate = DISAS_STOP; 1420fcf5ef2aSThomas Huth } 1421fcf5ef2aSThomas Huth return; 1422fcf5ef2aSThomas Huth case 0x4007: /* ldc.l @Rm+,SR */ 1423fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1424fcf5ef2aSThomas Huth { 1425fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1426fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, REG(B11_8), ctx->memidx, MO_TESL); 1427fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, val, 0x700083f3); 1428fcf5ef2aSThomas Huth gen_write_sr(val); 1429fcf5ef2aSThomas Huth tcg_temp_free(val); 1430fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 1431*4834871bSRichard Henderson ctx->bstate = DISAS_STOP; 1432fcf5ef2aSThomas Huth } 1433fcf5ef2aSThomas Huth return; 1434fcf5ef2aSThomas Huth case 0x0002: /* stc SR,Rn */ 1435fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1436fcf5ef2aSThomas Huth gen_read_sr(REG(B11_8)); 1437fcf5ef2aSThomas Huth return; 1438fcf5ef2aSThomas Huth case 0x4003: /* stc SR,@-Rn */ 1439fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1440fcf5ef2aSThomas Huth { 1441fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1442fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1443fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); 1444fcf5ef2aSThomas Huth gen_read_sr(val); 1445fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_TEUL); 1446fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 1447fcf5ef2aSThomas Huth tcg_temp_free(val); 1448fcf5ef2aSThomas Huth tcg_temp_free(addr); 1449fcf5ef2aSThomas Huth } 1450fcf5ef2aSThomas Huth return; 1451fcf5ef2aSThomas Huth #define LD(reg,ldnum,ldpnum,prechk) \ 1452fcf5ef2aSThomas Huth case ldnum: \ 1453fcf5ef2aSThomas Huth prechk \ 1454fcf5ef2aSThomas Huth tcg_gen_mov_i32 (cpu_##reg, REG(B11_8)); \ 1455fcf5ef2aSThomas Huth return; \ 1456fcf5ef2aSThomas Huth case ldpnum: \ 1457fcf5ef2aSThomas Huth prechk \ 1458fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(cpu_##reg, REG(B11_8), ctx->memidx, MO_TESL); \ 1459fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); \ 1460fcf5ef2aSThomas Huth return; 1461fcf5ef2aSThomas Huth #define ST(reg,stnum,stpnum,prechk) \ 1462fcf5ef2aSThomas Huth case stnum: \ 1463fcf5ef2aSThomas Huth prechk \ 1464fcf5ef2aSThomas Huth tcg_gen_mov_i32 (REG(B11_8), cpu_##reg); \ 1465fcf5ef2aSThomas Huth return; \ 1466fcf5ef2aSThomas Huth case stpnum: \ 1467fcf5ef2aSThomas Huth prechk \ 1468fcf5ef2aSThomas Huth { \ 1469fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); \ 1470fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); \ 1471fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(cpu_##reg, addr, ctx->memidx, MO_TEUL); \ 1472fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); \ 1473fcf5ef2aSThomas Huth tcg_temp_free(addr); \ 1474fcf5ef2aSThomas Huth } \ 1475fcf5ef2aSThomas Huth return; 1476fcf5ef2aSThomas Huth #define LDST(reg,ldnum,ldpnum,stnum,stpnum,prechk) \ 1477fcf5ef2aSThomas Huth LD(reg,ldnum,ldpnum,prechk) \ 1478fcf5ef2aSThomas Huth ST(reg,stnum,stpnum,prechk) 1479fcf5ef2aSThomas Huth LDST(gbr, 0x401e, 0x4017, 0x0012, 0x4013, {}) 1480fcf5ef2aSThomas Huth LDST(vbr, 0x402e, 0x4027, 0x0022, 0x4023, CHECK_PRIVILEGED) 1481fcf5ef2aSThomas Huth LDST(ssr, 0x403e, 0x4037, 0x0032, 0x4033, CHECK_PRIVILEGED) 1482fcf5ef2aSThomas Huth LDST(spc, 0x404e, 0x4047, 0x0042, 0x4043, CHECK_PRIVILEGED) 1483fcf5ef2aSThomas Huth ST(sgr, 0x003a, 0x4032, CHECK_PRIVILEGED) 1484ccae24d4SRichard Henderson LD(sgr, 0x403a, 0x4036, CHECK_PRIVILEGED CHECK_SH4A) 1485fcf5ef2aSThomas Huth LDST(dbr, 0x40fa, 0x40f6, 0x00fa, 0x40f2, CHECK_PRIVILEGED) 1486fcf5ef2aSThomas Huth LDST(mach, 0x400a, 0x4006, 0x000a, 0x4002, {}) 1487fcf5ef2aSThomas Huth LDST(macl, 0x401a, 0x4016, 0x001a, 0x4012, {}) 1488fcf5ef2aSThomas Huth LDST(pr, 0x402a, 0x4026, 0x002a, 0x4022, {}) 1489fcf5ef2aSThomas Huth LDST(fpul, 0x405a, 0x4056, 0x005a, 0x4052, {CHECK_FPU_ENABLED}) 1490fcf5ef2aSThomas Huth case 0x406a: /* lds Rm,FPSCR */ 1491fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1492fcf5ef2aSThomas Huth gen_helper_ld_fpscr(cpu_env, REG(B11_8)); 1493*4834871bSRichard Henderson ctx->bstate = DISAS_STOP; 1494fcf5ef2aSThomas Huth return; 1495fcf5ef2aSThomas Huth case 0x4066: /* lds.l @Rm+,FPSCR */ 1496fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1497fcf5ef2aSThomas Huth { 1498fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1499fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(addr, REG(B11_8), ctx->memidx, MO_TESL); 1500fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 1501fcf5ef2aSThomas Huth gen_helper_ld_fpscr(cpu_env, addr); 1502fcf5ef2aSThomas Huth tcg_temp_free(addr); 1503*4834871bSRichard Henderson ctx->bstate = DISAS_STOP; 1504fcf5ef2aSThomas Huth } 1505fcf5ef2aSThomas Huth return; 1506fcf5ef2aSThomas Huth case 0x006a: /* sts FPSCR,Rn */ 1507fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1508fcf5ef2aSThomas Huth tcg_gen_andi_i32(REG(B11_8), cpu_fpscr, 0x003fffff); 1509fcf5ef2aSThomas Huth return; 1510fcf5ef2aSThomas Huth case 0x4062: /* sts FPSCR,@-Rn */ 1511fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1512fcf5ef2aSThomas Huth { 1513fcf5ef2aSThomas Huth TCGv addr, val; 1514fcf5ef2aSThomas Huth val = tcg_temp_new(); 1515fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, cpu_fpscr, 0x003fffff); 1516fcf5ef2aSThomas Huth addr = tcg_temp_new(); 1517fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); 1518fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_TEUL); 1519fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 1520fcf5ef2aSThomas Huth tcg_temp_free(addr); 1521fcf5ef2aSThomas Huth tcg_temp_free(val); 1522fcf5ef2aSThomas Huth } 1523fcf5ef2aSThomas Huth return; 1524fcf5ef2aSThomas Huth case 0x00c3: /* movca.l R0,@Rm */ 1525fcf5ef2aSThomas Huth { 1526fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1527fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, REG(B11_8), ctx->memidx, MO_TEUL); 1528fcf5ef2aSThomas Huth gen_helper_movcal(cpu_env, REG(B11_8), val); 1529fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), REG(B11_8), ctx->memidx, MO_TEUL); 1530e691e0edSPhilippe Mathieu-Daudé tcg_temp_free(val); 1531fcf5ef2aSThomas Huth } 1532fcf5ef2aSThomas Huth ctx->has_movcal = 1; 1533fcf5ef2aSThomas Huth return; 1534143021b2SAurelien Jarno case 0x40a9: /* movua.l @Rm,R0 */ 1535ccae24d4SRichard Henderson CHECK_SH4A 1536143021b2SAurelien Jarno /* Load non-boundary-aligned data */ 153734257c21SAurelien Jarno tcg_gen_qemu_ld_i32(REG(0), REG(B11_8), ctx->memidx, 153834257c21SAurelien Jarno MO_TEUL | MO_UNALN); 1539fcf5ef2aSThomas Huth return; 1540143021b2SAurelien Jarno break; 1541143021b2SAurelien Jarno case 0x40e9: /* movua.l @Rm+,R0 */ 1542ccae24d4SRichard Henderson CHECK_SH4A 1543143021b2SAurelien Jarno /* Load non-boundary-aligned data */ 154434257c21SAurelien Jarno tcg_gen_qemu_ld_i32(REG(0), REG(B11_8), ctx->memidx, 154534257c21SAurelien Jarno MO_TEUL | MO_UNALN); 1546fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 1547fcf5ef2aSThomas Huth return; 1548143021b2SAurelien Jarno break; 1549fcf5ef2aSThomas Huth case 0x0029: /* movt Rn */ 1550fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), cpu_sr_t); 1551fcf5ef2aSThomas Huth return; 1552fcf5ef2aSThomas Huth case 0x0073: 1553fcf5ef2aSThomas Huth /* MOVCO.L 1554f85da308SRichard Henderson * LDST -> T 1555f85da308SRichard Henderson * If (T == 1) R0 -> (Rn) 1556f85da308SRichard Henderson * 0 -> LDST 1557f85da308SRichard Henderson * 1558f85da308SRichard Henderson * The above description doesn't work in a parallel context. 1559f85da308SRichard Henderson * Since we currently support no smp boards, this implies user-mode. 1560f85da308SRichard Henderson * But we can still support the official mechanism while user-mode 1561f85da308SRichard Henderson * is single-threaded. */ 1562ccae24d4SRichard Henderson CHECK_SH4A 1563ccae24d4SRichard Henderson { 1564f85da308SRichard Henderson TCGLabel *fail = gen_new_label(); 1565f85da308SRichard Henderson TCGLabel *done = gen_new_label(); 1566f85da308SRichard Henderson 1567f85da308SRichard Henderson if ((tb_cflags(ctx->tb) & CF_PARALLEL)) { 1568f85da308SRichard Henderson TCGv tmp; 1569f85da308SRichard Henderson 1570f85da308SRichard Henderson tcg_gen_brcond_i32(TCG_COND_NE, REG(B11_8), 1571f85da308SRichard Henderson cpu_lock_addr, fail); 1572f85da308SRichard Henderson tmp = tcg_temp_new(); 1573f85da308SRichard Henderson tcg_gen_atomic_cmpxchg_i32(tmp, REG(B11_8), cpu_lock_value, 1574f85da308SRichard Henderson REG(0), ctx->memidx, MO_TEUL); 1575f85da308SRichard Henderson tcg_gen_setcond_i32(TCG_COND_EQ, cpu_sr_t, tmp, cpu_lock_value); 1576f85da308SRichard Henderson tcg_temp_free(tmp); 1577f85da308SRichard Henderson } else { 1578f85da308SRichard Henderson tcg_gen_brcondi_i32(TCG_COND_EQ, cpu_lock_addr, -1, fail); 1579fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), REG(B11_8), ctx->memidx, MO_TEUL); 1580f85da308SRichard Henderson tcg_gen_movi_i32(cpu_sr_t, 1); 1581ccae24d4SRichard Henderson } 1582f85da308SRichard Henderson tcg_gen_br(done); 1583f85da308SRichard Henderson 1584f85da308SRichard Henderson gen_set_label(fail); 1585f85da308SRichard Henderson tcg_gen_movi_i32(cpu_sr_t, 0); 1586f85da308SRichard Henderson 1587f85da308SRichard Henderson gen_set_label(done); 1588f85da308SRichard Henderson tcg_gen_movi_i32(cpu_lock_addr, -1); 1589f85da308SRichard Henderson } 1590f85da308SRichard Henderson return; 1591fcf5ef2aSThomas Huth case 0x0063: 1592fcf5ef2aSThomas Huth /* MOVLI.L @Rm,R0 1593f85da308SRichard Henderson * 1 -> LDST 1594f85da308SRichard Henderson * (Rm) -> R0 1595f85da308SRichard Henderson * When interrupt/exception 1596f85da308SRichard Henderson * occurred 0 -> LDST 1597f85da308SRichard Henderson * 1598f85da308SRichard Henderson * In a parallel context, we must also save the loaded value 1599f85da308SRichard Henderson * for use with the cmpxchg that we'll use with movco.l. */ 1600ccae24d4SRichard Henderson CHECK_SH4A 1601f85da308SRichard Henderson if ((tb_cflags(ctx->tb) & CF_PARALLEL)) { 1602f85da308SRichard Henderson TCGv tmp = tcg_temp_new(); 1603f85da308SRichard Henderson tcg_gen_mov_i32(tmp, REG(B11_8)); 1604fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), REG(B11_8), ctx->memidx, MO_TESL); 1605f85da308SRichard Henderson tcg_gen_mov_i32(cpu_lock_value, REG(0)); 1606f85da308SRichard Henderson tcg_gen_mov_i32(cpu_lock_addr, tmp); 1607f85da308SRichard Henderson tcg_temp_free(tmp); 1608f85da308SRichard Henderson } else { 1609f85da308SRichard Henderson tcg_gen_qemu_ld_i32(REG(0), REG(B11_8), ctx->memidx, MO_TESL); 1610f85da308SRichard Henderson tcg_gen_movi_i32(cpu_lock_addr, 0); 1611f85da308SRichard Henderson } 1612fcf5ef2aSThomas Huth return; 1613fcf5ef2aSThomas Huth case 0x0093: /* ocbi @Rn */ 1614fcf5ef2aSThomas Huth { 1615fcf5ef2aSThomas Huth gen_helper_ocbi(cpu_env, REG(B11_8)); 1616fcf5ef2aSThomas Huth } 1617fcf5ef2aSThomas Huth return; 1618fcf5ef2aSThomas Huth case 0x00a3: /* ocbp @Rn */ 1619fcf5ef2aSThomas Huth case 0x00b3: /* ocbwb @Rn */ 1620fcf5ef2aSThomas Huth /* These instructions are supposed to do nothing in case of 1621fcf5ef2aSThomas Huth a cache miss. Given that we only partially emulate caches 1622fcf5ef2aSThomas Huth it is safe to simply ignore them. */ 1623fcf5ef2aSThomas Huth return; 1624fcf5ef2aSThomas Huth case 0x0083: /* pref @Rn */ 1625fcf5ef2aSThomas Huth return; 1626fcf5ef2aSThomas Huth case 0x00d3: /* prefi @Rn */ 1627ccae24d4SRichard Henderson CHECK_SH4A 1628fcf5ef2aSThomas Huth return; 1629fcf5ef2aSThomas Huth case 0x00e3: /* icbi @Rn */ 1630ccae24d4SRichard Henderson CHECK_SH4A 1631fcf5ef2aSThomas Huth return; 1632fcf5ef2aSThomas Huth case 0x00ab: /* synco */ 1633ccae24d4SRichard Henderson CHECK_SH4A 1634aa351317SAurelien Jarno tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 1635fcf5ef2aSThomas Huth return; 1636fcf5ef2aSThomas Huth break; 1637fcf5ef2aSThomas Huth case 0x4024: /* rotcl Rn */ 1638fcf5ef2aSThomas Huth { 1639fcf5ef2aSThomas Huth TCGv tmp = tcg_temp_new(); 1640fcf5ef2aSThomas Huth tcg_gen_mov_i32(tmp, cpu_sr_t); 1641fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_t, REG(B11_8), 31); 1642fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1); 1643fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), REG(B11_8), tmp); 1644fcf5ef2aSThomas Huth tcg_temp_free(tmp); 1645fcf5ef2aSThomas Huth } 1646fcf5ef2aSThomas Huth return; 1647fcf5ef2aSThomas Huth case 0x4025: /* rotcr Rn */ 1648fcf5ef2aSThomas Huth { 1649fcf5ef2aSThomas Huth TCGv tmp = tcg_temp_new(); 1650fcf5ef2aSThomas Huth tcg_gen_shli_i32(tmp, cpu_sr_t, 31); 1651fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 1); 1652fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1); 1653fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), REG(B11_8), tmp); 1654fcf5ef2aSThomas Huth tcg_temp_free(tmp); 1655fcf5ef2aSThomas Huth } 1656fcf5ef2aSThomas Huth return; 1657fcf5ef2aSThomas Huth case 0x4004: /* rotl Rn */ 1658fcf5ef2aSThomas Huth tcg_gen_rotli_i32(REG(B11_8), REG(B11_8), 1); 1659fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 0); 1660fcf5ef2aSThomas Huth return; 1661fcf5ef2aSThomas Huth case 0x4005: /* rotr Rn */ 1662fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 0); 1663fcf5ef2aSThomas Huth tcg_gen_rotri_i32(REG(B11_8), REG(B11_8), 1); 1664fcf5ef2aSThomas Huth return; 1665fcf5ef2aSThomas Huth case 0x4000: /* shll Rn */ 1666fcf5ef2aSThomas Huth case 0x4020: /* shal Rn */ 1667fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_t, REG(B11_8), 31); 1668fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1); 1669fcf5ef2aSThomas Huth return; 1670fcf5ef2aSThomas Huth case 0x4021: /* shar Rn */ 1671fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 1); 1672fcf5ef2aSThomas Huth tcg_gen_sari_i32(REG(B11_8), REG(B11_8), 1); 1673fcf5ef2aSThomas Huth return; 1674fcf5ef2aSThomas Huth case 0x4001: /* shlr Rn */ 1675fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 1); 1676fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1); 1677fcf5ef2aSThomas Huth return; 1678fcf5ef2aSThomas Huth case 0x4008: /* shll2 Rn */ 1679fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 2); 1680fcf5ef2aSThomas Huth return; 1681fcf5ef2aSThomas Huth case 0x4018: /* shll8 Rn */ 1682fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 8); 1683fcf5ef2aSThomas Huth return; 1684fcf5ef2aSThomas Huth case 0x4028: /* shll16 Rn */ 1685fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 16); 1686fcf5ef2aSThomas Huth return; 1687fcf5ef2aSThomas Huth case 0x4009: /* shlr2 Rn */ 1688fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 2); 1689fcf5ef2aSThomas Huth return; 1690fcf5ef2aSThomas Huth case 0x4019: /* shlr8 Rn */ 1691fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 8); 1692fcf5ef2aSThomas Huth return; 1693fcf5ef2aSThomas Huth case 0x4029: /* shlr16 Rn */ 1694fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 16); 1695fcf5ef2aSThomas Huth return; 1696fcf5ef2aSThomas Huth case 0x401b: /* tas.b @Rn */ 1697fcf5ef2aSThomas Huth { 1698cb32f179SAurelien Jarno TCGv val = tcg_const_i32(0x80); 1699cb32f179SAurelien Jarno tcg_gen_atomic_fetch_or_i32(val, REG(B11_8), val, 1700cb32f179SAurelien Jarno ctx->memidx, MO_UB); 1701fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, val, 0); 1702fcf5ef2aSThomas Huth tcg_temp_free(val); 1703fcf5ef2aSThomas Huth } 1704fcf5ef2aSThomas Huth return; 1705fcf5ef2aSThomas Huth case 0xf00d: /* fsts FPUL,FRn - FPSCR: Nothing */ 1706fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 17077c9f7038SRichard Henderson tcg_gen_mov_i32(FREG(B11_8), cpu_fpul); 1708fcf5ef2aSThomas Huth return; 1709fcf5ef2aSThomas Huth case 0xf01d: /* flds FRm,FPUL - FPSCR: Nothing */ 1710fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 17117c9f7038SRichard Henderson tcg_gen_mov_i32(cpu_fpul, FREG(B11_8)); 1712fcf5ef2aSThomas Huth return; 1713fcf5ef2aSThomas Huth case 0xf02d: /* float FPUL,FRn/DRn - FPSCR: R[PR,Enable.I]/W[Cause,Flag] */ 1714fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1715a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_PR) { 1716fcf5ef2aSThomas Huth TCGv_i64 fp; 171793dc9c89SRichard Henderson if (ctx->opcode & 0x0100) { 171893dc9c89SRichard Henderson goto do_illegal; 171993dc9c89SRichard Henderson } 1720fcf5ef2aSThomas Huth fp = tcg_temp_new_i64(); 1721fcf5ef2aSThomas Huth gen_helper_float_DT(fp, cpu_env, cpu_fpul); 17221e0b21d8SRichard Henderson gen_store_fpr64(ctx, fp, B11_8); 1723fcf5ef2aSThomas Huth tcg_temp_free_i64(fp); 1724fcf5ef2aSThomas Huth } 1725fcf5ef2aSThomas Huth else { 17267c9f7038SRichard Henderson gen_helper_float_FT(FREG(B11_8), cpu_env, cpu_fpul); 1727fcf5ef2aSThomas Huth } 1728fcf5ef2aSThomas Huth return; 1729fcf5ef2aSThomas Huth case 0xf03d: /* ftrc FRm/DRm,FPUL - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */ 1730fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1731a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_PR) { 1732fcf5ef2aSThomas Huth TCGv_i64 fp; 173393dc9c89SRichard Henderson if (ctx->opcode & 0x0100) { 173493dc9c89SRichard Henderson goto do_illegal; 173593dc9c89SRichard Henderson } 1736fcf5ef2aSThomas Huth fp = tcg_temp_new_i64(); 17371e0b21d8SRichard Henderson gen_load_fpr64(ctx, fp, B11_8); 1738fcf5ef2aSThomas Huth gen_helper_ftrc_DT(cpu_fpul, cpu_env, fp); 1739fcf5ef2aSThomas Huth tcg_temp_free_i64(fp); 1740fcf5ef2aSThomas Huth } 1741fcf5ef2aSThomas Huth else { 17427c9f7038SRichard Henderson gen_helper_ftrc_FT(cpu_fpul, cpu_env, FREG(B11_8)); 1743fcf5ef2aSThomas Huth } 1744fcf5ef2aSThomas Huth return; 1745fcf5ef2aSThomas Huth case 0xf04d: /* fneg FRn/DRn - FPSCR: Nothing */ 1746fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 17477c9f7038SRichard Henderson tcg_gen_xori_i32(FREG(B11_8), FREG(B11_8), 0x80000000); 1748fcf5ef2aSThomas Huth return; 174957f5c1b0SAurelien Jarno case 0xf05d: /* fabs FRn/DRn - FPCSR: Nothing */ 1750fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 17517c9f7038SRichard Henderson tcg_gen_andi_i32(FREG(B11_8), FREG(B11_8), 0x7fffffff); 1752fcf5ef2aSThomas Huth return; 1753fcf5ef2aSThomas Huth case 0xf06d: /* fsqrt FRn */ 1754fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1755a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_PR) { 175693dc9c89SRichard Henderson if (ctx->opcode & 0x0100) { 175793dc9c89SRichard Henderson goto do_illegal; 175893dc9c89SRichard Henderson } 1759fcf5ef2aSThomas Huth TCGv_i64 fp = tcg_temp_new_i64(); 17601e0b21d8SRichard Henderson gen_load_fpr64(ctx, fp, B11_8); 1761fcf5ef2aSThomas Huth gen_helper_fsqrt_DT(fp, cpu_env, fp); 17621e0b21d8SRichard Henderson gen_store_fpr64(ctx, fp, B11_8); 1763fcf5ef2aSThomas Huth tcg_temp_free_i64(fp); 1764fcf5ef2aSThomas Huth } else { 17657c9f7038SRichard Henderson gen_helper_fsqrt_FT(FREG(B11_8), cpu_env, FREG(B11_8)); 1766fcf5ef2aSThomas Huth } 1767fcf5ef2aSThomas Huth return; 1768fcf5ef2aSThomas Huth case 0xf07d: /* fsrra FRn */ 1769fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 177011b7aa23SRichard Henderson CHECK_FPSCR_PR_0 177111b7aa23SRichard Henderson gen_helper_fsrra_FT(FREG(B11_8), cpu_env, FREG(B11_8)); 1772fcf5ef2aSThomas Huth break; 1773fcf5ef2aSThomas Huth case 0xf08d: /* fldi0 FRn - FPSCR: R[PR] */ 1774fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 17757e9f7ca8SRichard Henderson CHECK_FPSCR_PR_0 17767c9f7038SRichard Henderson tcg_gen_movi_i32(FREG(B11_8), 0); 1777fcf5ef2aSThomas Huth return; 1778fcf5ef2aSThomas Huth case 0xf09d: /* fldi1 FRn - FPSCR: R[PR] */ 1779fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 17807e9f7ca8SRichard Henderson CHECK_FPSCR_PR_0 17817c9f7038SRichard Henderson tcg_gen_movi_i32(FREG(B11_8), 0x3f800000); 1782fcf5ef2aSThomas Huth return; 1783fcf5ef2aSThomas Huth case 0xf0ad: /* fcnvsd FPUL,DRn */ 1784fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1785fcf5ef2aSThomas Huth { 1786fcf5ef2aSThomas Huth TCGv_i64 fp = tcg_temp_new_i64(); 1787fcf5ef2aSThomas Huth gen_helper_fcnvsd_FT_DT(fp, cpu_env, cpu_fpul); 17881e0b21d8SRichard Henderson gen_store_fpr64(ctx, fp, B11_8); 1789fcf5ef2aSThomas Huth tcg_temp_free_i64(fp); 1790fcf5ef2aSThomas Huth } 1791fcf5ef2aSThomas Huth return; 1792fcf5ef2aSThomas Huth case 0xf0bd: /* fcnvds DRn,FPUL */ 1793fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1794fcf5ef2aSThomas Huth { 1795fcf5ef2aSThomas Huth TCGv_i64 fp = tcg_temp_new_i64(); 17961e0b21d8SRichard Henderson gen_load_fpr64(ctx, fp, B11_8); 1797fcf5ef2aSThomas Huth gen_helper_fcnvds_DT_FT(cpu_fpul, cpu_env, fp); 1798fcf5ef2aSThomas Huth tcg_temp_free_i64(fp); 1799fcf5ef2aSThomas Huth } 1800fcf5ef2aSThomas Huth return; 1801fcf5ef2aSThomas Huth case 0xf0ed: /* fipr FVm,FVn */ 1802fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 18037e9f7ca8SRichard Henderson CHECK_FPSCR_PR_1 18047e9f7ca8SRichard Henderson { 18057e9f7ca8SRichard Henderson TCGv m = tcg_const_i32((ctx->opcode >> 8) & 3); 18067e9f7ca8SRichard Henderson TCGv n = tcg_const_i32((ctx->opcode >> 10) & 3); 1807fcf5ef2aSThomas Huth gen_helper_fipr(cpu_env, m, n); 1808fcf5ef2aSThomas Huth tcg_temp_free(m); 1809fcf5ef2aSThomas Huth tcg_temp_free(n); 1810fcf5ef2aSThomas Huth return; 1811fcf5ef2aSThomas Huth } 1812fcf5ef2aSThomas Huth break; 1813fcf5ef2aSThomas Huth case 0xf0fd: /* ftrv XMTRX,FVn */ 1814fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 18157e9f7ca8SRichard Henderson CHECK_FPSCR_PR_1 18167e9f7ca8SRichard Henderson { 18177e9f7ca8SRichard Henderson if ((ctx->opcode & 0x0300) != 0x0100) { 18187e9f7ca8SRichard Henderson goto do_illegal; 18197e9f7ca8SRichard Henderson } 18207e9f7ca8SRichard Henderson TCGv n = tcg_const_i32((ctx->opcode >> 10) & 3); 1821fcf5ef2aSThomas Huth gen_helper_ftrv(cpu_env, n); 1822fcf5ef2aSThomas Huth tcg_temp_free(n); 1823fcf5ef2aSThomas Huth return; 1824fcf5ef2aSThomas Huth } 1825fcf5ef2aSThomas Huth break; 1826fcf5ef2aSThomas Huth } 1827fcf5ef2aSThomas Huth #if 0 1828fcf5ef2aSThomas Huth fprintf(stderr, "unknown instruction 0x%04x at pc 0x%08x\n", 1829fcf5ef2aSThomas Huth ctx->opcode, ctx->pc); 1830fcf5ef2aSThomas Huth fflush(stderr); 1831fcf5ef2aSThomas Huth #endif 18326b98213dSRichard Henderson do_illegal: 18339a562ae7SAurelien Jarno if (ctx->envflags & DELAY_SLOT_MASK) { 1834dec16c6eSRichard Henderson do_illegal_slot: 1835dec16c6eSRichard Henderson gen_save_cpu_state(ctx, true); 1836fcf5ef2aSThomas Huth gen_helper_raise_slot_illegal_instruction(cpu_env); 1837fcf5ef2aSThomas Huth } else { 1838dec16c6eSRichard Henderson gen_save_cpu_state(ctx, true); 1839fcf5ef2aSThomas Huth gen_helper_raise_illegal_instruction(cpu_env); 1840fcf5ef2aSThomas Huth } 1841*4834871bSRichard Henderson ctx->bstate = DISAS_NORETURN; 1842dec4f042SRichard Henderson return; 1843dec4f042SRichard Henderson 1844dec4f042SRichard Henderson do_fpu_disabled: 1845dec4f042SRichard Henderson gen_save_cpu_state(ctx, true); 1846dec4f042SRichard Henderson if (ctx->envflags & DELAY_SLOT_MASK) { 1847dec4f042SRichard Henderson gen_helper_raise_slot_fpu_disable(cpu_env); 1848dec4f042SRichard Henderson } else { 1849dec4f042SRichard Henderson gen_helper_raise_fpu_disable(cpu_env); 1850dec4f042SRichard Henderson } 1851*4834871bSRichard Henderson ctx->bstate = DISAS_NORETURN; 1852dec4f042SRichard Henderson return; 1853fcf5ef2aSThomas Huth } 1854fcf5ef2aSThomas Huth 1855fcf5ef2aSThomas Huth static void decode_opc(DisasContext * ctx) 1856fcf5ef2aSThomas Huth { 1857a6215749SAurelien Jarno uint32_t old_flags = ctx->envflags; 1858fcf5ef2aSThomas Huth 1859fcf5ef2aSThomas Huth _decode_opc(ctx); 1860fcf5ef2aSThomas Huth 18619a562ae7SAurelien Jarno if (old_flags & DELAY_SLOT_MASK) { 1862fcf5ef2aSThomas Huth /* go out of the delay slot */ 18639a562ae7SAurelien Jarno ctx->envflags &= ~DELAY_SLOT_MASK; 18644bfa602bSRichard Henderson 18654bfa602bSRichard Henderson /* When in an exclusive region, we must continue to the end 18664bfa602bSRichard Henderson for conditional branches. */ 18674bfa602bSRichard Henderson if (ctx->tbflags & GUSA_EXCLUSIVE 18684bfa602bSRichard Henderson && old_flags & DELAY_SLOT_CONDITIONAL) { 18694bfa602bSRichard Henderson gen_delayed_conditional_jump(ctx); 18704bfa602bSRichard Henderson return; 18714bfa602bSRichard Henderson } 18724bfa602bSRichard Henderson /* Otherwise this is probably an invalid gUSA region. 18734bfa602bSRichard Henderson Drop the GUSA bits so the next TB doesn't see them. */ 18744bfa602bSRichard Henderson ctx->envflags &= ~GUSA_MASK; 18754bfa602bSRichard Henderson 1876ac9707eaSAurelien Jarno tcg_gen_movi_i32(cpu_flags, ctx->envflags); 1877fcf5ef2aSThomas Huth if (old_flags & DELAY_SLOT_CONDITIONAL) { 1878fcf5ef2aSThomas Huth gen_delayed_conditional_jump(ctx); 1879be53081aSAurelien Jarno } else { 1880fcf5ef2aSThomas Huth gen_jump(ctx); 1881fcf5ef2aSThomas Huth } 18824bfa602bSRichard Henderson } 18834bfa602bSRichard Henderson } 1884fcf5ef2aSThomas Huth 18854bfa602bSRichard Henderson #ifdef CONFIG_USER_ONLY 18864bfa602bSRichard Henderson /* For uniprocessors, SH4 uses optimistic restartable atomic sequences. 18874bfa602bSRichard Henderson Upon an interrupt, a real kernel would simply notice magic values in 18884bfa602bSRichard Henderson the registers and reset the PC to the start of the sequence. 18894bfa602bSRichard Henderson 18904bfa602bSRichard Henderson For QEMU, we cannot do this in quite the same way. Instead, we notice 18914bfa602bSRichard Henderson the normal start of such a sequence (mov #-x,r15). While we can handle 18924bfa602bSRichard Henderson any sequence via cpu_exec_step_atomic, we can recognize the "normal" 18934bfa602bSRichard Henderson sequences and transform them into atomic operations as seen by the host. 18944bfa602bSRichard Henderson */ 18954bfa602bSRichard Henderson static int decode_gusa(DisasContext *ctx, CPUSH4State *env, int *pmax_insns) 18964bfa602bSRichard Henderson { 1897d6a6cffdSRichard Henderson uint16_t insns[5]; 1898d6a6cffdSRichard Henderson int ld_adr, ld_dst, ld_mop; 1899d6a6cffdSRichard Henderson int op_dst, op_src, op_opc; 1900d6a6cffdSRichard Henderson int mv_src, mt_dst, st_src, st_mop; 1901d6a6cffdSRichard Henderson TCGv op_arg; 1902d6a6cffdSRichard Henderson 19034bfa602bSRichard Henderson uint32_t pc = ctx->pc; 19044bfa602bSRichard Henderson uint32_t pc_end = ctx->tb->cs_base; 19054bfa602bSRichard Henderson int backup = sextract32(ctx->tbflags, GUSA_SHIFT, 8); 19064bfa602bSRichard Henderson int max_insns = (pc_end - pc) / 2; 1907d6a6cffdSRichard Henderson int i; 19084bfa602bSRichard Henderson 19094bfa602bSRichard Henderson if (pc != pc_end + backup || max_insns < 2) { 19104bfa602bSRichard Henderson /* This is a malformed gUSA region. Don't do anything special, 19114bfa602bSRichard Henderson since the interpreter is likely to get confused. */ 19124bfa602bSRichard Henderson ctx->envflags &= ~GUSA_MASK; 19134bfa602bSRichard Henderson return 0; 1914fcf5ef2aSThomas Huth } 19154bfa602bSRichard Henderson 19164bfa602bSRichard Henderson if (ctx->tbflags & GUSA_EXCLUSIVE) { 19174bfa602bSRichard Henderson /* Regardless of single-stepping or the end of the page, 19184bfa602bSRichard Henderson we must complete execution of the gUSA region while 19194bfa602bSRichard Henderson holding the exclusive lock. */ 19204bfa602bSRichard Henderson *pmax_insns = max_insns; 19214bfa602bSRichard Henderson return 0; 1922fcf5ef2aSThomas Huth } 1923fcf5ef2aSThomas Huth 1924d6a6cffdSRichard Henderson /* The state machine below will consume only a few insns. 1925d6a6cffdSRichard Henderson If there are more than that in a region, fail now. */ 1926d6a6cffdSRichard Henderson if (max_insns > ARRAY_SIZE(insns)) { 1927d6a6cffdSRichard Henderson goto fail; 1928d6a6cffdSRichard Henderson } 1929d6a6cffdSRichard Henderson 1930d6a6cffdSRichard Henderson /* Read all of the insns for the region. */ 1931d6a6cffdSRichard Henderson for (i = 0; i < max_insns; ++i) { 1932d6a6cffdSRichard Henderson insns[i] = cpu_lduw_code(env, pc + i * 2); 1933d6a6cffdSRichard Henderson } 1934d6a6cffdSRichard Henderson 1935d6a6cffdSRichard Henderson ld_adr = ld_dst = ld_mop = -1; 1936d6a6cffdSRichard Henderson mv_src = -1; 1937d6a6cffdSRichard Henderson op_dst = op_src = op_opc = -1; 1938d6a6cffdSRichard Henderson mt_dst = -1; 1939d6a6cffdSRichard Henderson st_src = st_mop = -1; 1940d6a6cffdSRichard Henderson TCGV_UNUSED(op_arg); 1941d6a6cffdSRichard Henderson i = 0; 1942d6a6cffdSRichard Henderson 1943d6a6cffdSRichard Henderson #define NEXT_INSN \ 1944d6a6cffdSRichard Henderson do { if (i >= max_insns) goto fail; ctx->opcode = insns[i++]; } while (0) 1945d6a6cffdSRichard Henderson 1946d6a6cffdSRichard Henderson /* 1947d6a6cffdSRichard Henderson * Expect a load to begin the region. 1948d6a6cffdSRichard Henderson */ 1949d6a6cffdSRichard Henderson NEXT_INSN; 1950d6a6cffdSRichard Henderson switch (ctx->opcode & 0xf00f) { 1951d6a6cffdSRichard Henderson case 0x6000: /* mov.b @Rm,Rn */ 1952d6a6cffdSRichard Henderson ld_mop = MO_SB; 1953d6a6cffdSRichard Henderson break; 1954d6a6cffdSRichard Henderson case 0x6001: /* mov.w @Rm,Rn */ 1955d6a6cffdSRichard Henderson ld_mop = MO_TESW; 1956d6a6cffdSRichard Henderson break; 1957d6a6cffdSRichard Henderson case 0x6002: /* mov.l @Rm,Rn */ 1958d6a6cffdSRichard Henderson ld_mop = MO_TESL; 1959d6a6cffdSRichard Henderson break; 1960d6a6cffdSRichard Henderson default: 1961d6a6cffdSRichard Henderson goto fail; 1962d6a6cffdSRichard Henderson } 1963d6a6cffdSRichard Henderson ld_adr = B7_4; 1964d6a6cffdSRichard Henderson ld_dst = B11_8; 1965d6a6cffdSRichard Henderson if (ld_adr == ld_dst) { 1966d6a6cffdSRichard Henderson goto fail; 1967d6a6cffdSRichard Henderson } 1968d6a6cffdSRichard Henderson /* Unless we see a mov, any two-operand operation must use ld_dst. */ 1969d6a6cffdSRichard Henderson op_dst = ld_dst; 1970d6a6cffdSRichard Henderson 1971d6a6cffdSRichard Henderson /* 1972d6a6cffdSRichard Henderson * Expect an optional register move. 1973d6a6cffdSRichard Henderson */ 1974d6a6cffdSRichard Henderson NEXT_INSN; 1975d6a6cffdSRichard Henderson switch (ctx->opcode & 0xf00f) { 1976d6a6cffdSRichard Henderson case 0x6003: /* mov Rm,Rn */ 1977d6a6cffdSRichard Henderson /* Here we want to recognize ld_dst being saved for later consumtion, 1978d6a6cffdSRichard Henderson or for another input register being copied so that ld_dst need not 1979d6a6cffdSRichard Henderson be clobbered during the operation. */ 1980d6a6cffdSRichard Henderson op_dst = B11_8; 1981d6a6cffdSRichard Henderson mv_src = B7_4; 1982d6a6cffdSRichard Henderson if (op_dst == ld_dst) { 1983d6a6cffdSRichard Henderson /* Overwriting the load output. */ 1984d6a6cffdSRichard Henderson goto fail; 1985d6a6cffdSRichard Henderson } 1986d6a6cffdSRichard Henderson if (mv_src != ld_dst) { 1987d6a6cffdSRichard Henderson /* Copying a new input; constrain op_src to match the load. */ 1988d6a6cffdSRichard Henderson op_src = ld_dst; 1989d6a6cffdSRichard Henderson } 1990d6a6cffdSRichard Henderson break; 1991d6a6cffdSRichard Henderson 1992d6a6cffdSRichard Henderson default: 1993d6a6cffdSRichard Henderson /* Put back and re-examine as operation. */ 1994d6a6cffdSRichard Henderson --i; 1995d6a6cffdSRichard Henderson } 1996d6a6cffdSRichard Henderson 1997d6a6cffdSRichard Henderson /* 1998d6a6cffdSRichard Henderson * Expect the operation. 1999d6a6cffdSRichard Henderson */ 2000d6a6cffdSRichard Henderson NEXT_INSN; 2001d6a6cffdSRichard Henderson switch (ctx->opcode & 0xf00f) { 2002d6a6cffdSRichard Henderson case 0x300c: /* add Rm,Rn */ 2003d6a6cffdSRichard Henderson op_opc = INDEX_op_add_i32; 2004d6a6cffdSRichard Henderson goto do_reg_op; 2005d6a6cffdSRichard Henderson case 0x2009: /* and Rm,Rn */ 2006d6a6cffdSRichard Henderson op_opc = INDEX_op_and_i32; 2007d6a6cffdSRichard Henderson goto do_reg_op; 2008d6a6cffdSRichard Henderson case 0x200a: /* xor Rm,Rn */ 2009d6a6cffdSRichard Henderson op_opc = INDEX_op_xor_i32; 2010d6a6cffdSRichard Henderson goto do_reg_op; 2011d6a6cffdSRichard Henderson case 0x200b: /* or Rm,Rn */ 2012d6a6cffdSRichard Henderson op_opc = INDEX_op_or_i32; 2013d6a6cffdSRichard Henderson do_reg_op: 2014d6a6cffdSRichard Henderson /* The operation register should be as expected, and the 2015d6a6cffdSRichard Henderson other input cannot depend on the load. */ 2016d6a6cffdSRichard Henderson if (op_dst != B11_8) { 2017d6a6cffdSRichard Henderson goto fail; 2018d6a6cffdSRichard Henderson } 2019d6a6cffdSRichard Henderson if (op_src < 0) { 2020d6a6cffdSRichard Henderson /* Unconstrainted input. */ 2021d6a6cffdSRichard Henderson op_src = B7_4; 2022d6a6cffdSRichard Henderson } else if (op_src == B7_4) { 2023d6a6cffdSRichard Henderson /* Constrained input matched load. All operations are 2024d6a6cffdSRichard Henderson commutative; "swap" them by "moving" the load output 2025d6a6cffdSRichard Henderson to the (implicit) first argument and the move source 2026d6a6cffdSRichard Henderson to the (explicit) second argument. */ 2027d6a6cffdSRichard Henderson op_src = mv_src; 2028d6a6cffdSRichard Henderson } else { 2029d6a6cffdSRichard Henderson goto fail; 2030d6a6cffdSRichard Henderson } 2031d6a6cffdSRichard Henderson op_arg = REG(op_src); 2032d6a6cffdSRichard Henderson break; 2033d6a6cffdSRichard Henderson 2034d6a6cffdSRichard Henderson case 0x6007: /* not Rm,Rn */ 2035d6a6cffdSRichard Henderson if (ld_dst != B7_4 || mv_src >= 0) { 2036d6a6cffdSRichard Henderson goto fail; 2037d6a6cffdSRichard Henderson } 2038d6a6cffdSRichard Henderson op_dst = B11_8; 2039d6a6cffdSRichard Henderson op_opc = INDEX_op_xor_i32; 2040d6a6cffdSRichard Henderson op_arg = tcg_const_i32(-1); 2041d6a6cffdSRichard Henderson break; 2042d6a6cffdSRichard Henderson 2043d6a6cffdSRichard Henderson case 0x7000 ... 0x700f: /* add #imm,Rn */ 2044d6a6cffdSRichard Henderson if (op_dst != B11_8 || mv_src >= 0) { 2045d6a6cffdSRichard Henderson goto fail; 2046d6a6cffdSRichard Henderson } 2047d6a6cffdSRichard Henderson op_opc = INDEX_op_add_i32; 2048d6a6cffdSRichard Henderson op_arg = tcg_const_i32(B7_0s); 2049d6a6cffdSRichard Henderson break; 2050d6a6cffdSRichard Henderson 2051d6a6cffdSRichard Henderson case 0x3000: /* cmp/eq Rm,Rn */ 2052d6a6cffdSRichard Henderson /* Looking for the middle of a compare-and-swap sequence, 2053d6a6cffdSRichard Henderson beginning with the compare. Operands can be either order, 2054d6a6cffdSRichard Henderson but with only one overlapping the load. */ 2055d6a6cffdSRichard Henderson if ((ld_dst == B11_8) + (ld_dst == B7_4) != 1 || mv_src >= 0) { 2056d6a6cffdSRichard Henderson goto fail; 2057d6a6cffdSRichard Henderson } 2058d6a6cffdSRichard Henderson op_opc = INDEX_op_setcond_i32; /* placeholder */ 2059d6a6cffdSRichard Henderson op_src = (ld_dst == B11_8 ? B7_4 : B11_8); 2060d6a6cffdSRichard Henderson op_arg = REG(op_src); 2061d6a6cffdSRichard Henderson 2062d6a6cffdSRichard Henderson NEXT_INSN; 2063d6a6cffdSRichard Henderson switch (ctx->opcode & 0xff00) { 2064d6a6cffdSRichard Henderson case 0x8b00: /* bf label */ 2065d6a6cffdSRichard Henderson case 0x8f00: /* bf/s label */ 2066d6a6cffdSRichard Henderson if (pc + (i + 1 + B7_0s) * 2 != pc_end) { 2067d6a6cffdSRichard Henderson goto fail; 2068d6a6cffdSRichard Henderson } 2069d6a6cffdSRichard Henderson if ((ctx->opcode & 0xff00) == 0x8b00) { /* bf label */ 2070d6a6cffdSRichard Henderson break; 2071d6a6cffdSRichard Henderson } 2072d6a6cffdSRichard Henderson /* We're looking to unconditionally modify Rn with the 2073d6a6cffdSRichard Henderson result of the comparison, within the delay slot of 2074d6a6cffdSRichard Henderson the branch. This is used by older gcc. */ 2075d6a6cffdSRichard Henderson NEXT_INSN; 2076d6a6cffdSRichard Henderson if ((ctx->opcode & 0xf0ff) == 0x0029) { /* movt Rn */ 2077d6a6cffdSRichard Henderson mt_dst = B11_8; 2078d6a6cffdSRichard Henderson } else { 2079d6a6cffdSRichard Henderson goto fail; 2080d6a6cffdSRichard Henderson } 2081d6a6cffdSRichard Henderson break; 2082d6a6cffdSRichard Henderson 2083d6a6cffdSRichard Henderson default: 2084d6a6cffdSRichard Henderson goto fail; 2085d6a6cffdSRichard Henderson } 2086d6a6cffdSRichard Henderson break; 2087d6a6cffdSRichard Henderson 2088d6a6cffdSRichard Henderson case 0x2008: /* tst Rm,Rn */ 2089d6a6cffdSRichard Henderson /* Looking for a compare-and-swap against zero. */ 2090d6a6cffdSRichard Henderson if (ld_dst != B11_8 || ld_dst != B7_4 || mv_src >= 0) { 2091d6a6cffdSRichard Henderson goto fail; 2092d6a6cffdSRichard Henderson } 2093d6a6cffdSRichard Henderson op_opc = INDEX_op_setcond_i32; 2094d6a6cffdSRichard Henderson op_arg = tcg_const_i32(0); 2095d6a6cffdSRichard Henderson 2096d6a6cffdSRichard Henderson NEXT_INSN; 2097d6a6cffdSRichard Henderson if ((ctx->opcode & 0xff00) != 0x8900 /* bt label */ 2098d6a6cffdSRichard Henderson || pc + (i + 1 + B7_0s) * 2 != pc_end) { 2099d6a6cffdSRichard Henderson goto fail; 2100d6a6cffdSRichard Henderson } 2101d6a6cffdSRichard Henderson break; 2102d6a6cffdSRichard Henderson 2103d6a6cffdSRichard Henderson default: 2104d6a6cffdSRichard Henderson /* Put back and re-examine as store. */ 2105d6a6cffdSRichard Henderson --i; 2106d6a6cffdSRichard Henderson } 2107d6a6cffdSRichard Henderson 2108d6a6cffdSRichard Henderson /* 2109d6a6cffdSRichard Henderson * Expect the store. 2110d6a6cffdSRichard Henderson */ 2111d6a6cffdSRichard Henderson /* The store must be the last insn. */ 2112d6a6cffdSRichard Henderson if (i != max_insns - 1) { 2113d6a6cffdSRichard Henderson goto fail; 2114d6a6cffdSRichard Henderson } 2115d6a6cffdSRichard Henderson NEXT_INSN; 2116d6a6cffdSRichard Henderson switch (ctx->opcode & 0xf00f) { 2117d6a6cffdSRichard Henderson case 0x2000: /* mov.b Rm,@Rn */ 2118d6a6cffdSRichard Henderson st_mop = MO_UB; 2119d6a6cffdSRichard Henderson break; 2120d6a6cffdSRichard Henderson case 0x2001: /* mov.w Rm,@Rn */ 2121d6a6cffdSRichard Henderson st_mop = MO_UW; 2122d6a6cffdSRichard Henderson break; 2123d6a6cffdSRichard Henderson case 0x2002: /* mov.l Rm,@Rn */ 2124d6a6cffdSRichard Henderson st_mop = MO_UL; 2125d6a6cffdSRichard Henderson break; 2126d6a6cffdSRichard Henderson default: 2127d6a6cffdSRichard Henderson goto fail; 2128d6a6cffdSRichard Henderson } 2129d6a6cffdSRichard Henderson /* The store must match the load. */ 2130d6a6cffdSRichard Henderson if (ld_adr != B11_8 || st_mop != (ld_mop & MO_SIZE)) { 2131d6a6cffdSRichard Henderson goto fail; 2132d6a6cffdSRichard Henderson } 2133d6a6cffdSRichard Henderson st_src = B7_4; 2134d6a6cffdSRichard Henderson 2135d6a6cffdSRichard Henderson #undef NEXT_INSN 2136d6a6cffdSRichard Henderson 2137d6a6cffdSRichard Henderson /* 2138d6a6cffdSRichard Henderson * Emit the operation. 2139d6a6cffdSRichard Henderson */ 2140d6a6cffdSRichard Henderson tcg_gen_insn_start(pc, ctx->envflags); 2141d6a6cffdSRichard Henderson switch (op_opc) { 2142d6a6cffdSRichard Henderson case -1: 2143d6a6cffdSRichard Henderson /* No operation found. Look for exchange pattern. */ 2144d6a6cffdSRichard Henderson if (st_src == ld_dst || mv_src >= 0) { 2145d6a6cffdSRichard Henderson goto fail; 2146d6a6cffdSRichard Henderson } 2147d6a6cffdSRichard Henderson tcg_gen_atomic_xchg_i32(REG(ld_dst), REG(ld_adr), REG(st_src), 2148d6a6cffdSRichard Henderson ctx->memidx, ld_mop); 2149d6a6cffdSRichard Henderson break; 2150d6a6cffdSRichard Henderson 2151d6a6cffdSRichard Henderson case INDEX_op_add_i32: 2152d6a6cffdSRichard Henderson if (op_dst != st_src) { 2153d6a6cffdSRichard Henderson goto fail; 2154d6a6cffdSRichard Henderson } 2155d6a6cffdSRichard Henderson if (op_dst == ld_dst && st_mop == MO_UL) { 2156d6a6cffdSRichard Henderson tcg_gen_atomic_add_fetch_i32(REG(ld_dst), REG(ld_adr), 2157d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2158d6a6cffdSRichard Henderson } else { 2159d6a6cffdSRichard Henderson tcg_gen_atomic_fetch_add_i32(REG(ld_dst), REG(ld_adr), 2160d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2161d6a6cffdSRichard Henderson if (op_dst != ld_dst) { 2162d6a6cffdSRichard Henderson /* Note that mop sizes < 4 cannot use add_fetch 2163d6a6cffdSRichard Henderson because it won't carry into the higher bits. */ 2164d6a6cffdSRichard Henderson tcg_gen_add_i32(REG(op_dst), REG(ld_dst), op_arg); 2165d6a6cffdSRichard Henderson } 2166d6a6cffdSRichard Henderson } 2167d6a6cffdSRichard Henderson break; 2168d6a6cffdSRichard Henderson 2169d6a6cffdSRichard Henderson case INDEX_op_and_i32: 2170d6a6cffdSRichard Henderson if (op_dst != st_src) { 2171d6a6cffdSRichard Henderson goto fail; 2172d6a6cffdSRichard Henderson } 2173d6a6cffdSRichard Henderson if (op_dst == ld_dst) { 2174d6a6cffdSRichard Henderson tcg_gen_atomic_and_fetch_i32(REG(ld_dst), REG(ld_adr), 2175d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2176d6a6cffdSRichard Henderson } else { 2177d6a6cffdSRichard Henderson tcg_gen_atomic_fetch_and_i32(REG(ld_dst), REG(ld_adr), 2178d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2179d6a6cffdSRichard Henderson tcg_gen_and_i32(REG(op_dst), REG(ld_dst), op_arg); 2180d6a6cffdSRichard Henderson } 2181d6a6cffdSRichard Henderson break; 2182d6a6cffdSRichard Henderson 2183d6a6cffdSRichard Henderson case INDEX_op_or_i32: 2184d6a6cffdSRichard Henderson if (op_dst != st_src) { 2185d6a6cffdSRichard Henderson goto fail; 2186d6a6cffdSRichard Henderson } 2187d6a6cffdSRichard Henderson if (op_dst == ld_dst) { 2188d6a6cffdSRichard Henderson tcg_gen_atomic_or_fetch_i32(REG(ld_dst), REG(ld_adr), 2189d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2190d6a6cffdSRichard Henderson } else { 2191d6a6cffdSRichard Henderson tcg_gen_atomic_fetch_or_i32(REG(ld_dst), REG(ld_adr), 2192d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2193d6a6cffdSRichard Henderson tcg_gen_or_i32(REG(op_dst), REG(ld_dst), op_arg); 2194d6a6cffdSRichard Henderson } 2195d6a6cffdSRichard Henderson break; 2196d6a6cffdSRichard Henderson 2197d6a6cffdSRichard Henderson case INDEX_op_xor_i32: 2198d6a6cffdSRichard Henderson if (op_dst != st_src) { 2199d6a6cffdSRichard Henderson goto fail; 2200d6a6cffdSRichard Henderson } 2201d6a6cffdSRichard Henderson if (op_dst == ld_dst) { 2202d6a6cffdSRichard Henderson tcg_gen_atomic_xor_fetch_i32(REG(ld_dst), REG(ld_adr), 2203d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2204d6a6cffdSRichard Henderson } else { 2205d6a6cffdSRichard Henderson tcg_gen_atomic_fetch_xor_i32(REG(ld_dst), REG(ld_adr), 2206d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2207d6a6cffdSRichard Henderson tcg_gen_xor_i32(REG(op_dst), REG(ld_dst), op_arg); 2208d6a6cffdSRichard Henderson } 2209d6a6cffdSRichard Henderson break; 2210d6a6cffdSRichard Henderson 2211d6a6cffdSRichard Henderson case INDEX_op_setcond_i32: 2212d6a6cffdSRichard Henderson if (st_src == ld_dst) { 2213d6a6cffdSRichard Henderson goto fail; 2214d6a6cffdSRichard Henderson } 2215d6a6cffdSRichard Henderson tcg_gen_atomic_cmpxchg_i32(REG(ld_dst), REG(ld_adr), op_arg, 2216d6a6cffdSRichard Henderson REG(st_src), ctx->memidx, ld_mop); 2217d6a6cffdSRichard Henderson tcg_gen_setcond_i32(TCG_COND_EQ, cpu_sr_t, REG(ld_dst), op_arg); 2218d6a6cffdSRichard Henderson if (mt_dst >= 0) { 2219d6a6cffdSRichard Henderson tcg_gen_mov_i32(REG(mt_dst), cpu_sr_t); 2220d6a6cffdSRichard Henderson } 2221d6a6cffdSRichard Henderson break; 2222d6a6cffdSRichard Henderson 2223d6a6cffdSRichard Henderson default: 2224d6a6cffdSRichard Henderson g_assert_not_reached(); 2225d6a6cffdSRichard Henderson } 2226d6a6cffdSRichard Henderson 2227d6a6cffdSRichard Henderson /* If op_src is not a valid register, then op_arg was a constant. */ 22286d56fc6cSAlex Bennée if (op_src < 0 && !TCGV_IS_UNUSED(op_arg)) { 2229d6a6cffdSRichard Henderson tcg_temp_free_i32(op_arg); 2230d6a6cffdSRichard Henderson } 2231d6a6cffdSRichard Henderson 2232d6a6cffdSRichard Henderson /* The entire region has been translated. */ 2233d6a6cffdSRichard Henderson ctx->envflags &= ~GUSA_MASK; 2234d6a6cffdSRichard Henderson ctx->pc = pc_end; 2235d6a6cffdSRichard Henderson return max_insns; 2236d6a6cffdSRichard Henderson 2237d6a6cffdSRichard Henderson fail: 22384bfa602bSRichard Henderson qemu_log_mask(LOG_UNIMP, "Unrecognized gUSA sequence %08x-%08x\n", 22394bfa602bSRichard Henderson pc, pc_end); 22404bfa602bSRichard Henderson 22414bfa602bSRichard Henderson /* Restart with the EXCLUSIVE bit set, within a TB run via 22424bfa602bSRichard Henderson cpu_exec_step_atomic holding the exclusive lock. */ 22434bfa602bSRichard Henderson tcg_gen_insn_start(pc, ctx->envflags); 22444bfa602bSRichard Henderson ctx->envflags |= GUSA_EXCLUSIVE; 22454bfa602bSRichard Henderson gen_save_cpu_state(ctx, false); 22464bfa602bSRichard Henderson gen_helper_exclusive(cpu_env); 2247*4834871bSRichard Henderson ctx->bstate = DISAS_NORETURN; 22484bfa602bSRichard Henderson 22494bfa602bSRichard Henderson /* We're not executing an instruction, but we must report one for the 22504bfa602bSRichard Henderson purposes of accounting within the TB. We might as well report the 22514bfa602bSRichard Henderson entire region consumed via ctx->pc so that it's immediately available 22524bfa602bSRichard Henderson in the disassembly dump. */ 22534bfa602bSRichard Henderson ctx->pc = pc_end; 22544bfa602bSRichard Henderson return 1; 22554bfa602bSRichard Henderson } 22564bfa602bSRichard Henderson #endif 22574bfa602bSRichard Henderson 22589c489ea6SLluís Vilanova void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb) 2259fcf5ef2aSThomas Huth { 22609c489ea6SLluís Vilanova CPUSH4State *env = cs->env_ptr; 2261fcf5ef2aSThomas Huth DisasContext ctx; 2262fcf5ef2aSThomas Huth target_ulong pc_start; 2263fcf5ef2aSThomas Huth int num_insns; 2264fcf5ef2aSThomas Huth int max_insns; 2265fcf5ef2aSThomas Huth 2266fcf5ef2aSThomas Huth pc_start = tb->pc; 2267fcf5ef2aSThomas Huth ctx.pc = pc_start; 2268a6215749SAurelien Jarno ctx.tbflags = (uint32_t)tb->flags; 2269e1933d14SRichard Henderson ctx.envflags = tb->flags & TB_FLAG_ENVFLAGS_MASK; 2270*4834871bSRichard Henderson ctx.bstate = DISAS_NEXT; 2271a6215749SAurelien Jarno ctx.memidx = (ctx.tbflags & (1u << SR_MD)) == 0 ? 1 : 0; 2272fcf5ef2aSThomas Huth /* We don't know if the delayed pc came from a dynamic or static branch, 2273fcf5ef2aSThomas Huth so assume it is a dynamic branch. */ 2274fcf5ef2aSThomas Huth ctx.delayed_pc = -1; /* use delayed pc from env pointer */ 2275fcf5ef2aSThomas Huth ctx.tb = tb; 2276fcf5ef2aSThomas Huth ctx.singlestep_enabled = cs->singlestep_enabled; 2277fcf5ef2aSThomas Huth ctx.features = env->features; 2278a6215749SAurelien Jarno ctx.has_movcal = (ctx.tbflags & TB_FLAG_PENDING_MOVCA); 22793a3bb8d2SRichard Henderson ctx.gbank = ((ctx.tbflags & (1 << SR_MD)) && 22803a3bb8d2SRichard Henderson (ctx.tbflags & (1 << SR_RB))) * 0x10; 22815c13bad9SRichard Henderson ctx.fbank = ctx.tbflags & FPSCR_FR ? 0x10 : 0; 2282fcf5ef2aSThomas Huth 2283c5a49c63SEmilio G. Cota max_insns = tb_cflags(tb) & CF_COUNT_MASK; 2284fcf5ef2aSThomas Huth if (max_insns == 0) { 2285fcf5ef2aSThomas Huth max_insns = CF_COUNT_MASK; 2286fcf5ef2aSThomas Huth } 22874448a836SRichard Henderson max_insns = MIN(max_insns, TCG_MAX_INSNS); 22884448a836SRichard Henderson 22894448a836SRichard Henderson /* Since the ISA is fixed-width, we can bound by the number 22904448a836SRichard Henderson of instructions remaining on the page. */ 22914448a836SRichard Henderson num_insns = -(ctx.pc | TARGET_PAGE_MASK) / 2; 22924448a836SRichard Henderson max_insns = MIN(max_insns, num_insns); 22934448a836SRichard Henderson 22944448a836SRichard Henderson /* Single stepping means just that. */ 22954448a836SRichard Henderson if (ctx.singlestep_enabled || singlestep) { 22964448a836SRichard Henderson max_insns = 1; 2297fcf5ef2aSThomas Huth } 2298fcf5ef2aSThomas Huth 2299fcf5ef2aSThomas Huth gen_tb_start(tb); 23004448a836SRichard Henderson num_insns = 0; 23014448a836SRichard Henderson 23024bfa602bSRichard Henderson #ifdef CONFIG_USER_ONLY 23034bfa602bSRichard Henderson if (ctx.tbflags & GUSA_MASK) { 23044bfa602bSRichard Henderson num_insns = decode_gusa(&ctx, env, &max_insns); 23054bfa602bSRichard Henderson } 23064bfa602bSRichard Henderson #endif 23074bfa602bSRichard Henderson 2308*4834871bSRichard Henderson while (ctx.bstate == DISAS_NEXT 23094448a836SRichard Henderson && num_insns < max_insns 23104448a836SRichard Henderson && !tcg_op_buf_full()) { 2311a6215749SAurelien Jarno tcg_gen_insn_start(ctx.pc, ctx.envflags); 2312fcf5ef2aSThomas Huth num_insns++; 2313fcf5ef2aSThomas Huth 2314fcf5ef2aSThomas Huth if (unlikely(cpu_breakpoint_test(cs, ctx.pc, BP_ANY))) { 2315fcf5ef2aSThomas Huth /* We have hit a breakpoint - make sure PC is up-to-date */ 2316ac9707eaSAurelien Jarno gen_save_cpu_state(&ctx, true); 2317fcf5ef2aSThomas Huth gen_helper_debug(cpu_env); 2318*4834871bSRichard Henderson ctx.bstate = DISAS_NORETURN; 2319fcf5ef2aSThomas Huth /* The address covered by the breakpoint must be included in 2320fcf5ef2aSThomas Huth [tb->pc, tb->pc + tb->size) in order to for it to be 2321fcf5ef2aSThomas Huth properly cleared -- thus we increment the PC here so that 2322fcf5ef2aSThomas Huth the logic setting tb->size below does the right thing. */ 2323fcf5ef2aSThomas Huth ctx.pc += 2; 2324fcf5ef2aSThomas Huth break; 2325fcf5ef2aSThomas Huth } 2326fcf5ef2aSThomas Huth 2327c5a49c63SEmilio G. Cota if (num_insns == max_insns && (tb_cflags(tb) & CF_LAST_IO)) { 2328fcf5ef2aSThomas Huth gen_io_start(); 2329fcf5ef2aSThomas Huth } 2330fcf5ef2aSThomas Huth 2331fcf5ef2aSThomas Huth ctx.opcode = cpu_lduw_code(env, ctx.pc); 2332fcf5ef2aSThomas Huth decode_opc(&ctx); 2333fcf5ef2aSThomas Huth ctx.pc += 2; 2334fcf5ef2aSThomas Huth } 2335c5a49c63SEmilio G. Cota if (tb_cflags(tb) & CF_LAST_IO) { 2336fcf5ef2aSThomas Huth gen_io_end(); 23374448a836SRichard Henderson } 23384bfa602bSRichard Henderson 23394bfa602bSRichard Henderson if (ctx.tbflags & GUSA_EXCLUSIVE) { 23404bfa602bSRichard Henderson /* Ending the region of exclusivity. Clear the bits. */ 23414bfa602bSRichard Henderson ctx.envflags &= ~GUSA_MASK; 23424bfa602bSRichard Henderson } 23434bfa602bSRichard Henderson 2344fcf5ef2aSThomas Huth if (cs->singlestep_enabled) { 2345ac9707eaSAurelien Jarno gen_save_cpu_state(&ctx, true); 2346fcf5ef2aSThomas Huth gen_helper_debug(cpu_env); 2347fcf5ef2aSThomas Huth } else { 2348fcf5ef2aSThomas Huth switch (ctx.bstate) { 2349*4834871bSRichard Henderson case DISAS_STOP: 2350ac9707eaSAurelien Jarno gen_save_cpu_state(&ctx, true); 23510fc37a8bSAurelien Jarno tcg_gen_exit_tb(0); 23520fc37a8bSAurelien Jarno break; 2353*4834871bSRichard Henderson case DISAS_NEXT: 2354ac9707eaSAurelien Jarno gen_save_cpu_state(&ctx, false); 2355fcf5ef2aSThomas Huth gen_goto_tb(&ctx, 0, ctx.pc); 2356fcf5ef2aSThomas Huth break; 2357*4834871bSRichard Henderson case DISAS_NORETURN: 2358fcf5ef2aSThomas Huth break; 2359*4834871bSRichard Henderson default: 2360*4834871bSRichard Henderson g_assert_not_reached(); 2361fcf5ef2aSThomas Huth } 2362fcf5ef2aSThomas Huth } 2363fcf5ef2aSThomas Huth 2364fcf5ef2aSThomas Huth gen_tb_end(tb, num_insns); 2365fcf5ef2aSThomas Huth 2366fcf5ef2aSThomas Huth tb->size = ctx.pc - pc_start; 2367fcf5ef2aSThomas Huth tb->icount = num_insns; 2368fcf5ef2aSThomas Huth 2369fcf5ef2aSThomas Huth #ifdef DEBUG_DISAS 2370fcf5ef2aSThomas Huth if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM) 2371fcf5ef2aSThomas Huth && qemu_log_in_addr_range(pc_start)) { 2372fcf5ef2aSThomas Huth qemu_log_lock(); 2373fcf5ef2aSThomas Huth qemu_log("IN:\n"); /* , lookup_symbol(pc_start)); */ 23741d48474dSRichard Henderson log_target_disas(cs, pc_start, ctx.pc - pc_start); 2375fcf5ef2aSThomas Huth qemu_log("\n"); 2376fcf5ef2aSThomas Huth qemu_log_unlock(); 2377fcf5ef2aSThomas Huth } 2378fcf5ef2aSThomas Huth #endif 2379fcf5ef2aSThomas Huth } 2380fcf5ef2aSThomas Huth 2381fcf5ef2aSThomas Huth void restore_state_to_opc(CPUSH4State *env, TranslationBlock *tb, 2382fcf5ef2aSThomas Huth target_ulong *data) 2383fcf5ef2aSThomas Huth { 2384fcf5ef2aSThomas Huth env->pc = data[0]; 2385fcf5ef2aSThomas Huth env->flags = data[1]; 2386ac9707eaSAurelien Jarno /* Theoretically delayed_pc should also be restored. In practice the 2387ac9707eaSAurelien Jarno branch instruction is re-executed after exception, so the delayed 2388ac9707eaSAurelien Jarno branch target will be recomputed. */ 2389fcf5ef2aSThomas Huth } 2390