1fcf5ef2aSThomas Huth /* 2fcf5ef2aSThomas Huth * SH4 translation 3fcf5ef2aSThomas Huth * 4fcf5ef2aSThomas Huth * Copyright (c) 2005 Samuel Tardieu 5fcf5ef2aSThomas Huth * 6fcf5ef2aSThomas Huth * This library is free software; you can redistribute it and/or 7fcf5ef2aSThomas Huth * modify it under the terms of the GNU Lesser General Public 8fcf5ef2aSThomas Huth * License as published by the Free Software Foundation; either 96faf2b6cSThomas Huth * version 2.1 of the License, or (at your option) any later version. 10fcf5ef2aSThomas Huth * 11fcf5ef2aSThomas Huth * This library is distributed in the hope that it will be useful, 12fcf5ef2aSThomas Huth * but WITHOUT ANY WARRANTY; without even the implied warranty of 13fcf5ef2aSThomas Huth * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 14fcf5ef2aSThomas Huth * Lesser General Public License for more details. 15fcf5ef2aSThomas Huth * 16fcf5ef2aSThomas Huth * You should have received a copy of the GNU Lesser General Public 17fcf5ef2aSThomas Huth * License along with this library; if not, see <http://www.gnu.org/licenses/>. 18fcf5ef2aSThomas Huth */ 19fcf5ef2aSThomas Huth 20fcf5ef2aSThomas Huth #define DEBUG_DISAS 21fcf5ef2aSThomas Huth 22fcf5ef2aSThomas Huth #include "qemu/osdep.h" 23fcf5ef2aSThomas Huth #include "cpu.h" 24fcf5ef2aSThomas Huth #include "disas/disas.h" 25fcf5ef2aSThomas Huth #include "exec/exec-all.h" 26dcb32f1dSPhilippe Mathieu-Daudé #include "tcg/tcg-op.h" 27fcf5ef2aSThomas Huth #include "exec/cpu_ldst.h" 28fcf5ef2aSThomas Huth #include "exec/helper-proto.h" 29fcf5ef2aSThomas Huth #include "exec/helper-gen.h" 304834871bSRichard Henderson #include "exec/translator.h" 31fcf5ef2aSThomas Huth #include "trace-tcg.h" 32fcf5ef2aSThomas Huth #include "exec/log.h" 3390c84c56SMarkus Armbruster #include "qemu/qemu-print.h" 34fcf5ef2aSThomas Huth 35fcf5ef2aSThomas Huth 36fcf5ef2aSThomas Huth typedef struct DisasContext { 376f1c2af6SRichard Henderson DisasContextBase base; 386f1c2af6SRichard Henderson 39a6215749SAurelien Jarno uint32_t tbflags; /* should stay unmodified during the TB translation */ 40a6215749SAurelien Jarno uint32_t envflags; /* should stay in sync with env->flags using TCG ops */ 41fcf5ef2aSThomas Huth int memidx; 423a3bb8d2SRichard Henderson int gbank; 435c13bad9SRichard Henderson int fbank; 44fcf5ef2aSThomas Huth uint32_t delayed_pc; 45fcf5ef2aSThomas Huth uint32_t features; 466f1c2af6SRichard Henderson 476f1c2af6SRichard Henderson uint16_t opcode; 486f1c2af6SRichard Henderson 496f1c2af6SRichard Henderson bool has_movcal; 50fcf5ef2aSThomas Huth } DisasContext; 51fcf5ef2aSThomas Huth 52fcf5ef2aSThomas Huth #if defined(CONFIG_USER_ONLY) 53fcf5ef2aSThomas Huth #define IS_USER(ctx) 1 54fcf5ef2aSThomas Huth #else 55a6215749SAurelien Jarno #define IS_USER(ctx) (!(ctx->tbflags & (1u << SR_MD))) 56fcf5ef2aSThomas Huth #endif 57fcf5ef2aSThomas Huth 586f1c2af6SRichard Henderson /* Target-specific values for ctx->base.is_jmp. */ 594834871bSRichard Henderson /* We want to exit back to the cpu loop for some reason. 604834871bSRichard Henderson Usually this is to recognize interrupts immediately. */ 614834871bSRichard Henderson #define DISAS_STOP DISAS_TARGET_0 62fcf5ef2aSThomas Huth 63fcf5ef2aSThomas Huth /* global register indexes */ 643a3bb8d2SRichard Henderson static TCGv cpu_gregs[32]; 65fcf5ef2aSThomas Huth static TCGv cpu_sr, cpu_sr_m, cpu_sr_q, cpu_sr_t; 66fcf5ef2aSThomas Huth static TCGv cpu_pc, cpu_ssr, cpu_spc, cpu_gbr; 67fcf5ef2aSThomas Huth static TCGv cpu_vbr, cpu_sgr, cpu_dbr, cpu_mach, cpu_macl; 68f85da308SRichard Henderson static TCGv cpu_pr, cpu_fpscr, cpu_fpul; 69f85da308SRichard Henderson static TCGv cpu_lock_addr, cpu_lock_value; 70fcf5ef2aSThomas Huth static TCGv cpu_fregs[32]; 71fcf5ef2aSThomas Huth 72fcf5ef2aSThomas Huth /* internal register indexes */ 7347b9f4d5SAurelien Jarno static TCGv cpu_flags, cpu_delayed_pc, cpu_delayed_cond; 74fcf5ef2aSThomas Huth 75fcf5ef2aSThomas Huth #include "exec/gen-icount.h" 76fcf5ef2aSThomas Huth 77fcf5ef2aSThomas Huth void sh4_translate_init(void) 78fcf5ef2aSThomas Huth { 79fcf5ef2aSThomas Huth int i; 80fcf5ef2aSThomas Huth static const char * const gregnames[24] = { 81fcf5ef2aSThomas Huth "R0_BANK0", "R1_BANK0", "R2_BANK0", "R3_BANK0", 82fcf5ef2aSThomas Huth "R4_BANK0", "R5_BANK0", "R6_BANK0", "R7_BANK0", 83fcf5ef2aSThomas Huth "R8", "R9", "R10", "R11", "R12", "R13", "R14", "R15", 84fcf5ef2aSThomas Huth "R0_BANK1", "R1_BANK1", "R2_BANK1", "R3_BANK1", 85fcf5ef2aSThomas Huth "R4_BANK1", "R5_BANK1", "R6_BANK1", "R7_BANK1" 86fcf5ef2aSThomas Huth }; 87fcf5ef2aSThomas Huth static const char * const fregnames[32] = { 88fcf5ef2aSThomas Huth "FPR0_BANK0", "FPR1_BANK0", "FPR2_BANK0", "FPR3_BANK0", 89fcf5ef2aSThomas Huth "FPR4_BANK0", "FPR5_BANK0", "FPR6_BANK0", "FPR7_BANK0", 90fcf5ef2aSThomas Huth "FPR8_BANK0", "FPR9_BANK0", "FPR10_BANK0", "FPR11_BANK0", 91fcf5ef2aSThomas Huth "FPR12_BANK0", "FPR13_BANK0", "FPR14_BANK0", "FPR15_BANK0", 92fcf5ef2aSThomas Huth "FPR0_BANK1", "FPR1_BANK1", "FPR2_BANK1", "FPR3_BANK1", 93fcf5ef2aSThomas Huth "FPR4_BANK1", "FPR5_BANK1", "FPR6_BANK1", "FPR7_BANK1", 94fcf5ef2aSThomas Huth "FPR8_BANK1", "FPR9_BANK1", "FPR10_BANK1", "FPR11_BANK1", 95fcf5ef2aSThomas Huth "FPR12_BANK1", "FPR13_BANK1", "FPR14_BANK1", "FPR15_BANK1", 96fcf5ef2aSThomas Huth }; 97fcf5ef2aSThomas Huth 983a3bb8d2SRichard Henderson for (i = 0; i < 24; i++) { 99fcf5ef2aSThomas Huth cpu_gregs[i] = tcg_global_mem_new_i32(cpu_env, 100fcf5ef2aSThomas Huth offsetof(CPUSH4State, gregs[i]), 101fcf5ef2aSThomas Huth gregnames[i]); 1023a3bb8d2SRichard Henderson } 1033a3bb8d2SRichard Henderson memcpy(cpu_gregs + 24, cpu_gregs + 8, 8 * sizeof(TCGv)); 104fcf5ef2aSThomas Huth 105fcf5ef2aSThomas Huth cpu_pc = tcg_global_mem_new_i32(cpu_env, 106fcf5ef2aSThomas Huth offsetof(CPUSH4State, pc), "PC"); 107fcf5ef2aSThomas Huth cpu_sr = tcg_global_mem_new_i32(cpu_env, 108fcf5ef2aSThomas Huth offsetof(CPUSH4State, sr), "SR"); 109fcf5ef2aSThomas Huth cpu_sr_m = tcg_global_mem_new_i32(cpu_env, 110fcf5ef2aSThomas Huth offsetof(CPUSH4State, sr_m), "SR_M"); 111fcf5ef2aSThomas Huth cpu_sr_q = tcg_global_mem_new_i32(cpu_env, 112fcf5ef2aSThomas Huth offsetof(CPUSH4State, sr_q), "SR_Q"); 113fcf5ef2aSThomas Huth cpu_sr_t = tcg_global_mem_new_i32(cpu_env, 114fcf5ef2aSThomas Huth offsetof(CPUSH4State, sr_t), "SR_T"); 115fcf5ef2aSThomas Huth cpu_ssr = tcg_global_mem_new_i32(cpu_env, 116fcf5ef2aSThomas Huth offsetof(CPUSH4State, ssr), "SSR"); 117fcf5ef2aSThomas Huth cpu_spc = tcg_global_mem_new_i32(cpu_env, 118fcf5ef2aSThomas Huth offsetof(CPUSH4State, spc), "SPC"); 119fcf5ef2aSThomas Huth cpu_gbr = tcg_global_mem_new_i32(cpu_env, 120fcf5ef2aSThomas Huth offsetof(CPUSH4State, gbr), "GBR"); 121fcf5ef2aSThomas Huth cpu_vbr = tcg_global_mem_new_i32(cpu_env, 122fcf5ef2aSThomas Huth offsetof(CPUSH4State, vbr), "VBR"); 123fcf5ef2aSThomas Huth cpu_sgr = tcg_global_mem_new_i32(cpu_env, 124fcf5ef2aSThomas Huth offsetof(CPUSH4State, sgr), "SGR"); 125fcf5ef2aSThomas Huth cpu_dbr = tcg_global_mem_new_i32(cpu_env, 126fcf5ef2aSThomas Huth offsetof(CPUSH4State, dbr), "DBR"); 127fcf5ef2aSThomas Huth cpu_mach = tcg_global_mem_new_i32(cpu_env, 128fcf5ef2aSThomas Huth offsetof(CPUSH4State, mach), "MACH"); 129fcf5ef2aSThomas Huth cpu_macl = tcg_global_mem_new_i32(cpu_env, 130fcf5ef2aSThomas Huth offsetof(CPUSH4State, macl), "MACL"); 131fcf5ef2aSThomas Huth cpu_pr = tcg_global_mem_new_i32(cpu_env, 132fcf5ef2aSThomas Huth offsetof(CPUSH4State, pr), "PR"); 133fcf5ef2aSThomas Huth cpu_fpscr = tcg_global_mem_new_i32(cpu_env, 134fcf5ef2aSThomas Huth offsetof(CPUSH4State, fpscr), "FPSCR"); 135fcf5ef2aSThomas Huth cpu_fpul = tcg_global_mem_new_i32(cpu_env, 136fcf5ef2aSThomas Huth offsetof(CPUSH4State, fpul), "FPUL"); 137fcf5ef2aSThomas Huth 138fcf5ef2aSThomas Huth cpu_flags = tcg_global_mem_new_i32(cpu_env, 139fcf5ef2aSThomas Huth offsetof(CPUSH4State, flags), "_flags_"); 140fcf5ef2aSThomas Huth cpu_delayed_pc = tcg_global_mem_new_i32(cpu_env, 141fcf5ef2aSThomas Huth offsetof(CPUSH4State, delayed_pc), 142fcf5ef2aSThomas Huth "_delayed_pc_"); 14347b9f4d5SAurelien Jarno cpu_delayed_cond = tcg_global_mem_new_i32(cpu_env, 14447b9f4d5SAurelien Jarno offsetof(CPUSH4State, 14547b9f4d5SAurelien Jarno delayed_cond), 14647b9f4d5SAurelien Jarno "_delayed_cond_"); 147f85da308SRichard Henderson cpu_lock_addr = tcg_global_mem_new_i32(cpu_env, 148f85da308SRichard Henderson offsetof(CPUSH4State, lock_addr), 149f85da308SRichard Henderson "_lock_addr_"); 150f85da308SRichard Henderson cpu_lock_value = tcg_global_mem_new_i32(cpu_env, 151f85da308SRichard Henderson offsetof(CPUSH4State, lock_value), 152f85da308SRichard Henderson "_lock_value_"); 153fcf5ef2aSThomas Huth 154fcf5ef2aSThomas Huth for (i = 0; i < 32; i++) 155fcf5ef2aSThomas Huth cpu_fregs[i] = tcg_global_mem_new_i32(cpu_env, 156fcf5ef2aSThomas Huth offsetof(CPUSH4State, fregs[i]), 157fcf5ef2aSThomas Huth fregnames[i]); 158fcf5ef2aSThomas Huth } 159fcf5ef2aSThomas Huth 16090c84c56SMarkus Armbruster void superh_cpu_dump_state(CPUState *cs, FILE *f, int flags) 161fcf5ef2aSThomas Huth { 162fcf5ef2aSThomas Huth SuperHCPU *cpu = SUPERH_CPU(cs); 163fcf5ef2aSThomas Huth CPUSH4State *env = &cpu->env; 164fcf5ef2aSThomas Huth int i; 16590c84c56SMarkus Armbruster 16690c84c56SMarkus Armbruster qemu_fprintf(f, "pc=0x%08x sr=0x%08x pr=0x%08x fpscr=0x%08x\n", 167fcf5ef2aSThomas Huth env->pc, cpu_read_sr(env), env->pr, env->fpscr); 16890c84c56SMarkus Armbruster qemu_fprintf(f, "spc=0x%08x ssr=0x%08x gbr=0x%08x vbr=0x%08x\n", 169fcf5ef2aSThomas Huth env->spc, env->ssr, env->gbr, env->vbr); 17090c84c56SMarkus Armbruster qemu_fprintf(f, "sgr=0x%08x dbr=0x%08x delayed_pc=0x%08x fpul=0x%08x\n", 171fcf5ef2aSThomas Huth env->sgr, env->dbr, env->delayed_pc, env->fpul); 172fcf5ef2aSThomas Huth for (i = 0; i < 24; i += 4) { 17390c84c56SMarkus Armbruster qemu_printf("r%d=0x%08x r%d=0x%08x r%d=0x%08x r%d=0x%08x\n", 174fcf5ef2aSThomas Huth i, env->gregs[i], i + 1, env->gregs[i + 1], 175fcf5ef2aSThomas Huth i + 2, env->gregs[i + 2], i + 3, env->gregs[i + 3]); 176fcf5ef2aSThomas Huth } 177fcf5ef2aSThomas Huth if (env->flags & DELAY_SLOT) { 17890c84c56SMarkus Armbruster qemu_printf("in delay slot (delayed_pc=0x%08x)\n", 179fcf5ef2aSThomas Huth env->delayed_pc); 180fcf5ef2aSThomas Huth } else if (env->flags & DELAY_SLOT_CONDITIONAL) { 18190c84c56SMarkus Armbruster qemu_printf("in conditional delay slot (delayed_pc=0x%08x)\n", 182fcf5ef2aSThomas Huth env->delayed_pc); 183be53081aSAurelien Jarno } else if (env->flags & DELAY_SLOT_RTE) { 18490c84c56SMarkus Armbruster qemu_fprintf(f, "in rte delay slot (delayed_pc=0x%08x)\n", 185be53081aSAurelien Jarno env->delayed_pc); 186fcf5ef2aSThomas Huth } 187fcf5ef2aSThomas Huth } 188fcf5ef2aSThomas Huth 189fcf5ef2aSThomas Huth static void gen_read_sr(TCGv dst) 190fcf5ef2aSThomas Huth { 191fcf5ef2aSThomas Huth TCGv t0 = tcg_temp_new(); 192fcf5ef2aSThomas Huth tcg_gen_shli_i32(t0, cpu_sr_q, SR_Q); 193fcf5ef2aSThomas Huth tcg_gen_or_i32(dst, dst, t0); 194fcf5ef2aSThomas Huth tcg_gen_shli_i32(t0, cpu_sr_m, SR_M); 195fcf5ef2aSThomas Huth tcg_gen_or_i32(dst, dst, t0); 196fcf5ef2aSThomas Huth tcg_gen_shli_i32(t0, cpu_sr_t, SR_T); 197fcf5ef2aSThomas Huth tcg_gen_or_i32(dst, cpu_sr, t0); 198fcf5ef2aSThomas Huth tcg_temp_free_i32(t0); 199fcf5ef2aSThomas Huth } 200fcf5ef2aSThomas Huth 201fcf5ef2aSThomas Huth static void gen_write_sr(TCGv src) 202fcf5ef2aSThomas Huth { 203fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr, src, 204fcf5ef2aSThomas Huth ~((1u << SR_Q) | (1u << SR_M) | (1u << SR_T))); 205a380f9dbSAurelien Jarno tcg_gen_extract_i32(cpu_sr_q, src, SR_Q, 1); 206a380f9dbSAurelien Jarno tcg_gen_extract_i32(cpu_sr_m, src, SR_M, 1); 207a380f9dbSAurelien Jarno tcg_gen_extract_i32(cpu_sr_t, src, SR_T, 1); 208fcf5ef2aSThomas Huth } 209fcf5ef2aSThomas Huth 210ac9707eaSAurelien Jarno static inline void gen_save_cpu_state(DisasContext *ctx, bool save_pc) 211ac9707eaSAurelien Jarno { 212ac9707eaSAurelien Jarno if (save_pc) { 2136f1c2af6SRichard Henderson tcg_gen_movi_i32(cpu_pc, ctx->base.pc_next); 214ac9707eaSAurelien Jarno } 215ac9707eaSAurelien Jarno if (ctx->delayed_pc != (uint32_t) -1) { 216ac9707eaSAurelien Jarno tcg_gen_movi_i32(cpu_delayed_pc, ctx->delayed_pc); 217ac9707eaSAurelien Jarno } 218e1933d14SRichard Henderson if ((ctx->tbflags & TB_FLAG_ENVFLAGS_MASK) != ctx->envflags) { 219ac9707eaSAurelien Jarno tcg_gen_movi_i32(cpu_flags, ctx->envflags); 220ac9707eaSAurelien Jarno } 221ac9707eaSAurelien Jarno } 222ac9707eaSAurelien Jarno 223ec2eb22eSRichard Henderson static inline bool use_exit_tb(DisasContext *ctx) 224ec2eb22eSRichard Henderson { 225ec2eb22eSRichard Henderson return (ctx->tbflags & GUSA_EXCLUSIVE) != 0; 226ec2eb22eSRichard Henderson } 227ec2eb22eSRichard Henderson 228fcf5ef2aSThomas Huth static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest) 229fcf5ef2aSThomas Huth { 230ec2eb22eSRichard Henderson /* Use a direct jump if in same page and singlestep not enabled */ 2316f1c2af6SRichard Henderson if (unlikely(ctx->base.singlestep_enabled || use_exit_tb(ctx))) { 2324bfa602bSRichard Henderson return false; 2334bfa602bSRichard Henderson } 234fcf5ef2aSThomas Huth #ifndef CONFIG_USER_ONLY 2356f1c2af6SRichard Henderson return (ctx->base.tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK); 236fcf5ef2aSThomas Huth #else 237fcf5ef2aSThomas Huth return true; 238fcf5ef2aSThomas Huth #endif 239fcf5ef2aSThomas Huth } 240fcf5ef2aSThomas Huth 241fcf5ef2aSThomas Huth static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest) 242fcf5ef2aSThomas Huth { 243fcf5ef2aSThomas Huth if (use_goto_tb(ctx, dest)) { 244fcf5ef2aSThomas Huth tcg_gen_goto_tb(n); 245fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_pc, dest); 24607ea28b4SRichard Henderson tcg_gen_exit_tb(ctx->base.tb, n); 247fcf5ef2aSThomas Huth } else { 248fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_pc, dest); 2496f1c2af6SRichard Henderson if (ctx->base.singlestep_enabled) { 250fcf5ef2aSThomas Huth gen_helper_debug(cpu_env); 251ec2eb22eSRichard Henderson } else if (use_exit_tb(ctx)) { 25207ea28b4SRichard Henderson tcg_gen_exit_tb(NULL, 0); 253ec2eb22eSRichard Henderson } else { 2547f11636dSEmilio G. Cota tcg_gen_lookup_and_goto_ptr(); 255ec2eb22eSRichard Henderson } 256fcf5ef2aSThomas Huth } 2576f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NORETURN; 258fcf5ef2aSThomas Huth } 259fcf5ef2aSThomas Huth 260fcf5ef2aSThomas Huth static void gen_jump(DisasContext * ctx) 261fcf5ef2aSThomas Huth { 262ec2eb22eSRichard Henderson if (ctx->delayed_pc == -1) { 263fcf5ef2aSThomas Huth /* Target is not statically known, it comes necessarily from a 264fcf5ef2aSThomas Huth delayed jump as immediate jump are conditinal jumps */ 265fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_pc, cpu_delayed_pc); 266ac9707eaSAurelien Jarno tcg_gen_discard_i32(cpu_delayed_pc); 2676f1c2af6SRichard Henderson if (ctx->base.singlestep_enabled) { 268fcf5ef2aSThomas Huth gen_helper_debug(cpu_env); 269ec2eb22eSRichard Henderson } else if (use_exit_tb(ctx)) { 27007ea28b4SRichard Henderson tcg_gen_exit_tb(NULL, 0); 271fcf5ef2aSThomas Huth } else { 2727f11636dSEmilio G. Cota tcg_gen_lookup_and_goto_ptr(); 273ec2eb22eSRichard Henderson } 2746f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NORETURN; 275ec2eb22eSRichard Henderson } else { 276fcf5ef2aSThomas Huth gen_goto_tb(ctx, 0, ctx->delayed_pc); 277fcf5ef2aSThomas Huth } 278fcf5ef2aSThomas Huth } 279fcf5ef2aSThomas Huth 280fcf5ef2aSThomas Huth /* Immediate conditional jump (bt or bf) */ 2814bfa602bSRichard Henderson static void gen_conditional_jump(DisasContext *ctx, target_ulong dest, 2824bfa602bSRichard Henderson bool jump_if_true) 283fcf5ef2aSThomas Huth { 284fcf5ef2aSThomas Huth TCGLabel *l1 = gen_new_label(); 2854bfa602bSRichard Henderson TCGCond cond_not_taken = jump_if_true ? TCG_COND_EQ : TCG_COND_NE; 2864bfa602bSRichard Henderson 2874bfa602bSRichard Henderson if (ctx->tbflags & GUSA_EXCLUSIVE) { 2884bfa602bSRichard Henderson /* When in an exclusive region, we must continue to the end. 2894bfa602bSRichard Henderson Therefore, exit the region on a taken branch, but otherwise 2904bfa602bSRichard Henderson fall through to the next instruction. */ 2914bfa602bSRichard Henderson tcg_gen_brcondi_i32(cond_not_taken, cpu_sr_t, 0, l1); 2924bfa602bSRichard Henderson tcg_gen_movi_i32(cpu_flags, ctx->envflags & ~GUSA_MASK); 2934bfa602bSRichard Henderson /* Note that this won't actually use a goto_tb opcode because we 2944bfa602bSRichard Henderson disallow it in use_goto_tb, but it handles exit + singlestep. */ 2954bfa602bSRichard Henderson gen_goto_tb(ctx, 0, dest); 296fcf5ef2aSThomas Huth gen_set_label(l1); 2975b38d026SLaurent Vivier ctx->base.is_jmp = DISAS_NEXT; 2984bfa602bSRichard Henderson return; 2994bfa602bSRichard Henderson } 3004bfa602bSRichard Henderson 3014bfa602bSRichard Henderson gen_save_cpu_state(ctx, false); 3024bfa602bSRichard Henderson tcg_gen_brcondi_i32(cond_not_taken, cpu_sr_t, 0, l1); 3034bfa602bSRichard Henderson gen_goto_tb(ctx, 0, dest); 3044bfa602bSRichard Henderson gen_set_label(l1); 3056f1c2af6SRichard Henderson gen_goto_tb(ctx, 1, ctx->base.pc_next + 2); 3066f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NORETURN; 307fcf5ef2aSThomas Huth } 308fcf5ef2aSThomas Huth 309fcf5ef2aSThomas Huth /* Delayed conditional jump (bt or bf) */ 310fcf5ef2aSThomas Huth static void gen_delayed_conditional_jump(DisasContext * ctx) 311fcf5ef2aSThomas Huth { 3124bfa602bSRichard Henderson TCGLabel *l1 = gen_new_label(); 3134bfa602bSRichard Henderson TCGv ds = tcg_temp_new(); 314fcf5ef2aSThomas Huth 31547b9f4d5SAurelien Jarno tcg_gen_mov_i32(ds, cpu_delayed_cond); 31647b9f4d5SAurelien Jarno tcg_gen_discard_i32(cpu_delayed_cond); 3174bfa602bSRichard Henderson 3184bfa602bSRichard Henderson if (ctx->tbflags & GUSA_EXCLUSIVE) { 3194bfa602bSRichard Henderson /* When in an exclusive region, we must continue to the end. 3204bfa602bSRichard Henderson Therefore, exit the region on a taken branch, but otherwise 3214bfa602bSRichard Henderson fall through to the next instruction. */ 3224bfa602bSRichard Henderson tcg_gen_brcondi_i32(TCG_COND_EQ, ds, 0, l1); 3234bfa602bSRichard Henderson 3244bfa602bSRichard Henderson /* Leave the gUSA region. */ 3254bfa602bSRichard Henderson tcg_gen_movi_i32(cpu_flags, ctx->envflags & ~GUSA_MASK); 3264bfa602bSRichard Henderson gen_jump(ctx); 3274bfa602bSRichard Henderson 3284bfa602bSRichard Henderson gen_set_label(l1); 3296f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NEXT; 3304bfa602bSRichard Henderson return; 3314bfa602bSRichard Henderson } 3324bfa602bSRichard Henderson 333fcf5ef2aSThomas Huth tcg_gen_brcondi_i32(TCG_COND_NE, ds, 0, l1); 3346f1c2af6SRichard Henderson gen_goto_tb(ctx, 1, ctx->base.pc_next + 2); 335fcf5ef2aSThomas Huth gen_set_label(l1); 336fcf5ef2aSThomas Huth gen_jump(ctx); 337fcf5ef2aSThomas Huth } 338fcf5ef2aSThomas Huth 339e5d8053eSRichard Henderson static inline void gen_load_fpr64(DisasContext *ctx, TCGv_i64 t, int reg) 340fcf5ef2aSThomas Huth { 3411e0b21d8SRichard Henderson /* We have already signaled illegal instruction for odd Dr. */ 3421e0b21d8SRichard Henderson tcg_debug_assert((reg & 1) == 0); 3431e0b21d8SRichard Henderson reg ^= ctx->fbank; 344fcf5ef2aSThomas Huth tcg_gen_concat_i32_i64(t, cpu_fregs[reg + 1], cpu_fregs[reg]); 345fcf5ef2aSThomas Huth } 346fcf5ef2aSThomas Huth 347e5d8053eSRichard Henderson static inline void gen_store_fpr64(DisasContext *ctx, TCGv_i64 t, int reg) 348fcf5ef2aSThomas Huth { 3491e0b21d8SRichard Henderson /* We have already signaled illegal instruction for odd Dr. */ 3501e0b21d8SRichard Henderson tcg_debug_assert((reg & 1) == 0); 3511e0b21d8SRichard Henderson reg ^= ctx->fbank; 35258d2a9aeSAurelien Jarno tcg_gen_extr_i64_i32(cpu_fregs[reg + 1], cpu_fregs[reg], t); 353fcf5ef2aSThomas Huth } 354fcf5ef2aSThomas Huth 355fcf5ef2aSThomas Huth #define B3_0 (ctx->opcode & 0xf) 356fcf5ef2aSThomas Huth #define B6_4 ((ctx->opcode >> 4) & 0x7) 357fcf5ef2aSThomas Huth #define B7_4 ((ctx->opcode >> 4) & 0xf) 358fcf5ef2aSThomas Huth #define B7_0 (ctx->opcode & 0xff) 359fcf5ef2aSThomas Huth #define B7_0s ((int32_t) (int8_t) (ctx->opcode & 0xff)) 360fcf5ef2aSThomas Huth #define B11_0s (ctx->opcode & 0x800 ? 0xfffff000 | (ctx->opcode & 0xfff) : \ 361fcf5ef2aSThomas Huth (ctx->opcode & 0xfff)) 362fcf5ef2aSThomas Huth #define B11_8 ((ctx->opcode >> 8) & 0xf) 363fcf5ef2aSThomas Huth #define B15_12 ((ctx->opcode >> 12) & 0xf) 364fcf5ef2aSThomas Huth 3653a3bb8d2SRichard Henderson #define REG(x) cpu_gregs[(x) ^ ctx->gbank] 3663a3bb8d2SRichard Henderson #define ALTREG(x) cpu_gregs[(x) ^ ctx->gbank ^ 0x10] 3675c13bad9SRichard Henderson #define FREG(x) cpu_fregs[(x) ^ ctx->fbank] 368fcf5ef2aSThomas Huth 369fcf5ef2aSThomas Huth #define XHACK(x) ((((x) & 1 ) << 4) | ((x) & 0xe)) 370fcf5ef2aSThomas Huth 371fcf5ef2aSThomas Huth #define CHECK_NOT_DELAY_SLOT \ 3729a562ae7SAurelien Jarno if (ctx->envflags & DELAY_SLOT_MASK) { \ 373dec16c6eSRichard Henderson goto do_illegal_slot; \ 374fcf5ef2aSThomas Huth } 375fcf5ef2aSThomas Huth 376fcf5ef2aSThomas Huth #define CHECK_PRIVILEGED \ 377fcf5ef2aSThomas Huth if (IS_USER(ctx)) { \ 3786b98213dSRichard Henderson goto do_illegal; \ 379fcf5ef2aSThomas Huth } 380fcf5ef2aSThomas Huth 381fcf5ef2aSThomas Huth #define CHECK_FPU_ENABLED \ 382a6215749SAurelien Jarno if (ctx->tbflags & (1u << SR_FD)) { \ 383dec4f042SRichard Henderson goto do_fpu_disabled; \ 384fcf5ef2aSThomas Huth } 385fcf5ef2aSThomas Huth 3867e9f7ca8SRichard Henderson #define CHECK_FPSCR_PR_0 \ 3877e9f7ca8SRichard Henderson if (ctx->tbflags & FPSCR_PR) { \ 3887e9f7ca8SRichard Henderson goto do_illegal; \ 3897e9f7ca8SRichard Henderson } 3907e9f7ca8SRichard Henderson 3917e9f7ca8SRichard Henderson #define CHECK_FPSCR_PR_1 \ 3927e9f7ca8SRichard Henderson if (!(ctx->tbflags & FPSCR_PR)) { \ 3937e9f7ca8SRichard Henderson goto do_illegal; \ 3947e9f7ca8SRichard Henderson } 3957e9f7ca8SRichard Henderson 396ccae24d4SRichard Henderson #define CHECK_SH4A \ 397ccae24d4SRichard Henderson if (!(ctx->features & SH_FEATURE_SH4A)) { \ 398ccae24d4SRichard Henderson goto do_illegal; \ 399ccae24d4SRichard Henderson } 400ccae24d4SRichard Henderson 401fcf5ef2aSThomas Huth static void _decode_opc(DisasContext * ctx) 402fcf5ef2aSThomas Huth { 403fcf5ef2aSThomas Huth /* This code tries to make movcal emulation sufficiently 404fcf5ef2aSThomas Huth accurate for Linux purposes. This instruction writes 405fcf5ef2aSThomas Huth memory, and prior to that, always allocates a cache line. 406fcf5ef2aSThomas Huth It is used in two contexts: 407fcf5ef2aSThomas Huth - in memcpy, where data is copied in blocks, the first write 408fcf5ef2aSThomas Huth of to a block uses movca.l for performance. 409fcf5ef2aSThomas Huth - in arch/sh/mm/cache-sh4.c, movcal.l + ocbi combination is used 410fcf5ef2aSThomas Huth to flush the cache. Here, the data written by movcal.l is never 411fcf5ef2aSThomas Huth written to memory, and the data written is just bogus. 412fcf5ef2aSThomas Huth 413fcf5ef2aSThomas Huth To simulate this, we simulate movcal.l, we store the value to memory, 414fcf5ef2aSThomas Huth but we also remember the previous content. If we see ocbi, we check 415fcf5ef2aSThomas Huth if movcal.l for that address was done previously. If so, the write should 416fcf5ef2aSThomas Huth not have hit the memory, so we restore the previous content. 417fcf5ef2aSThomas Huth When we see an instruction that is neither movca.l 418fcf5ef2aSThomas Huth nor ocbi, the previous content is discarded. 419fcf5ef2aSThomas Huth 420fcf5ef2aSThomas Huth To optimize, we only try to flush stores when we're at the start of 421fcf5ef2aSThomas Huth TB, or if we already saw movca.l in this TB and did not flush stores 422fcf5ef2aSThomas Huth yet. */ 423fcf5ef2aSThomas Huth if (ctx->has_movcal) 424fcf5ef2aSThomas Huth { 425fcf5ef2aSThomas Huth int opcode = ctx->opcode & 0xf0ff; 426fcf5ef2aSThomas Huth if (opcode != 0x0093 /* ocbi */ 427fcf5ef2aSThomas Huth && opcode != 0x00c3 /* movca.l */) 428fcf5ef2aSThomas Huth { 429fcf5ef2aSThomas Huth gen_helper_discard_movcal_backup(cpu_env); 430fcf5ef2aSThomas Huth ctx->has_movcal = 0; 431fcf5ef2aSThomas Huth } 432fcf5ef2aSThomas Huth } 433fcf5ef2aSThomas Huth 434fcf5ef2aSThomas Huth #if 0 435fcf5ef2aSThomas Huth fprintf(stderr, "Translating opcode 0x%04x\n", ctx->opcode); 436fcf5ef2aSThomas Huth #endif 437fcf5ef2aSThomas Huth 438fcf5ef2aSThomas Huth switch (ctx->opcode) { 439fcf5ef2aSThomas Huth case 0x0019: /* div0u */ 440fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_m, 0); 441fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_q, 0); 442fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_t, 0); 443fcf5ef2aSThomas Huth return; 444fcf5ef2aSThomas Huth case 0x000b: /* rts */ 445fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 446fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_delayed_pc, cpu_pr); 447a6215749SAurelien Jarno ctx->envflags |= DELAY_SLOT; 448fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 449fcf5ef2aSThomas Huth return; 450fcf5ef2aSThomas Huth case 0x0028: /* clrmac */ 451fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_mach, 0); 452fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_macl, 0); 453fcf5ef2aSThomas Huth return; 454fcf5ef2aSThomas Huth case 0x0048: /* clrs */ 455fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr, cpu_sr, ~(1u << SR_S)); 456fcf5ef2aSThomas Huth return; 457fcf5ef2aSThomas Huth case 0x0008: /* clrt */ 458fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_t, 0); 459fcf5ef2aSThomas Huth return; 460fcf5ef2aSThomas Huth case 0x0038: /* ldtlb */ 461fcf5ef2aSThomas Huth CHECK_PRIVILEGED 462fcf5ef2aSThomas Huth gen_helper_ldtlb(cpu_env); 463fcf5ef2aSThomas Huth return; 464fcf5ef2aSThomas Huth case 0x002b: /* rte */ 465fcf5ef2aSThomas Huth CHECK_PRIVILEGED 466fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 467fcf5ef2aSThomas Huth gen_write_sr(cpu_ssr); 468fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_delayed_pc, cpu_spc); 469be53081aSAurelien Jarno ctx->envflags |= DELAY_SLOT_RTE; 470fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 4716f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 472fcf5ef2aSThomas Huth return; 473fcf5ef2aSThomas Huth case 0x0058: /* sets */ 474fcf5ef2aSThomas Huth tcg_gen_ori_i32(cpu_sr, cpu_sr, (1u << SR_S)); 475fcf5ef2aSThomas Huth return; 476fcf5ef2aSThomas Huth case 0x0018: /* sett */ 477fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_t, 1); 478fcf5ef2aSThomas Huth return; 479fcf5ef2aSThomas Huth case 0xfbfd: /* frchg */ 48061dedf2aSRichard Henderson CHECK_FPSCR_PR_0 481fcf5ef2aSThomas Huth tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_FR); 4826f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 483fcf5ef2aSThomas Huth return; 484fcf5ef2aSThomas Huth case 0xf3fd: /* fschg */ 48561dedf2aSRichard Henderson CHECK_FPSCR_PR_0 486fcf5ef2aSThomas Huth tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_SZ); 4876f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 488fcf5ef2aSThomas Huth return; 489907759f9SRichard Henderson case 0xf7fd: /* fpchg */ 490907759f9SRichard Henderson CHECK_SH4A 491907759f9SRichard Henderson tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_PR); 4926f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 493907759f9SRichard Henderson return; 494fcf5ef2aSThomas Huth case 0x0009: /* nop */ 495fcf5ef2aSThomas Huth return; 496fcf5ef2aSThomas Huth case 0x001b: /* sleep */ 497fcf5ef2aSThomas Huth CHECK_PRIVILEGED 4986f1c2af6SRichard Henderson tcg_gen_movi_i32(cpu_pc, ctx->base.pc_next + 2); 499fcf5ef2aSThomas Huth gen_helper_sleep(cpu_env); 500fcf5ef2aSThomas Huth return; 501fcf5ef2aSThomas Huth } 502fcf5ef2aSThomas Huth 503fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf000) { 504fcf5ef2aSThomas Huth case 0x1000: /* mov.l Rm,@(disp,Rn) */ 505fcf5ef2aSThomas Huth { 506fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 507fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B11_8), B3_0 * 4); 508fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_TEUL); 509fcf5ef2aSThomas Huth tcg_temp_free(addr); 510fcf5ef2aSThomas Huth } 511fcf5ef2aSThomas Huth return; 512fcf5ef2aSThomas Huth case 0x5000: /* mov.l @(disp,Rm),Rn */ 513fcf5ef2aSThomas Huth { 514fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 515fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 4); 516fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_TESL); 517fcf5ef2aSThomas Huth tcg_temp_free(addr); 518fcf5ef2aSThomas Huth } 519fcf5ef2aSThomas Huth return; 520fcf5ef2aSThomas Huth case 0xe000: /* mov #imm,Rn */ 5214bfa602bSRichard Henderson #ifdef CONFIG_USER_ONLY 5224bfa602bSRichard Henderson /* Detect the start of a gUSA region. If so, update envflags 5234bfa602bSRichard Henderson and end the TB. This will allow us to see the end of the 5244bfa602bSRichard Henderson region (stored in R0) in the next TB. */ 5256f1c2af6SRichard Henderson if (B11_8 == 15 && B7_0s < 0 && 5266f1c2af6SRichard Henderson (tb_cflags(ctx->base.tb) & CF_PARALLEL)) { 5274bfa602bSRichard Henderson ctx->envflags = deposit32(ctx->envflags, GUSA_SHIFT, 8, B7_0s); 5286f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 5294bfa602bSRichard Henderson } 5304bfa602bSRichard Henderson #endif 531fcf5ef2aSThomas Huth tcg_gen_movi_i32(REG(B11_8), B7_0s); 532fcf5ef2aSThomas Huth return; 533fcf5ef2aSThomas Huth case 0x9000: /* mov.w @(disp,PC),Rn */ 534fcf5ef2aSThomas Huth { 5356f1c2af6SRichard Henderson TCGv addr = tcg_const_i32(ctx->base.pc_next + 4 + B7_0 * 2); 536fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_TESW); 537fcf5ef2aSThomas Huth tcg_temp_free(addr); 538fcf5ef2aSThomas Huth } 539fcf5ef2aSThomas Huth return; 540fcf5ef2aSThomas Huth case 0xd000: /* mov.l @(disp,PC),Rn */ 541fcf5ef2aSThomas Huth { 5426f1c2af6SRichard Henderson TCGv addr = tcg_const_i32((ctx->base.pc_next + 4 + B7_0 * 4) & ~3); 543fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_TESL); 544fcf5ef2aSThomas Huth tcg_temp_free(addr); 545fcf5ef2aSThomas Huth } 546fcf5ef2aSThomas Huth return; 547fcf5ef2aSThomas Huth case 0x7000: /* add #imm,Rn */ 548fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), B7_0s); 549fcf5ef2aSThomas Huth return; 550fcf5ef2aSThomas Huth case 0xa000: /* bra disp */ 551fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 5526f1c2af6SRichard Henderson ctx->delayed_pc = ctx->base.pc_next + 4 + B11_0s * 2; 553a6215749SAurelien Jarno ctx->envflags |= DELAY_SLOT; 554fcf5ef2aSThomas Huth return; 555fcf5ef2aSThomas Huth case 0xb000: /* bsr disp */ 556fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 5576f1c2af6SRichard Henderson tcg_gen_movi_i32(cpu_pr, ctx->base.pc_next + 4); 5586f1c2af6SRichard Henderson ctx->delayed_pc = ctx->base.pc_next + 4 + B11_0s * 2; 559a6215749SAurelien Jarno ctx->envflags |= DELAY_SLOT; 560fcf5ef2aSThomas Huth return; 561fcf5ef2aSThomas Huth } 562fcf5ef2aSThomas Huth 563fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf00f) { 564fcf5ef2aSThomas Huth case 0x6003: /* mov Rm,Rn */ 565fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), REG(B7_4)); 566fcf5ef2aSThomas Huth return; 567fcf5ef2aSThomas Huth case 0x2000: /* mov.b Rm,@Rn */ 568fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), REG(B11_8), ctx->memidx, MO_UB); 569fcf5ef2aSThomas Huth return; 570fcf5ef2aSThomas Huth case 0x2001: /* mov.w Rm,@Rn */ 571fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), REG(B11_8), ctx->memidx, MO_TEUW); 572fcf5ef2aSThomas Huth return; 573fcf5ef2aSThomas Huth case 0x2002: /* mov.l Rm,@Rn */ 574fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), REG(B11_8), ctx->memidx, MO_TEUL); 575fcf5ef2aSThomas Huth return; 576fcf5ef2aSThomas Huth case 0x6000: /* mov.b @Rm,Rn */ 577fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_SB); 578fcf5ef2aSThomas Huth return; 579fcf5ef2aSThomas Huth case 0x6001: /* mov.w @Rm,Rn */ 580fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_TESW); 581fcf5ef2aSThomas Huth return; 582fcf5ef2aSThomas Huth case 0x6002: /* mov.l @Rm,Rn */ 583fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_TESL); 584fcf5ef2aSThomas Huth return; 585fcf5ef2aSThomas Huth case 0x2004: /* mov.b Rm,@-Rn */ 586fcf5ef2aSThomas Huth { 587fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 588fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 1); 589fcf5ef2aSThomas Huth /* might cause re-execution */ 590fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_UB); 591fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); /* modify register status */ 592fcf5ef2aSThomas Huth tcg_temp_free(addr); 593fcf5ef2aSThomas Huth } 594fcf5ef2aSThomas Huth return; 595fcf5ef2aSThomas Huth case 0x2005: /* mov.w Rm,@-Rn */ 596fcf5ef2aSThomas Huth { 597fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 598fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 2); 599fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_TEUW); 600fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 601fcf5ef2aSThomas Huth tcg_temp_free(addr); 602fcf5ef2aSThomas Huth } 603fcf5ef2aSThomas Huth return; 604fcf5ef2aSThomas Huth case 0x2006: /* mov.l Rm,@-Rn */ 605fcf5ef2aSThomas Huth { 606fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 607fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); 608fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_TEUL); 609fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 610e691e0edSPhilippe Mathieu-Daudé tcg_temp_free(addr); 611fcf5ef2aSThomas Huth } 612fcf5ef2aSThomas Huth return; 613fcf5ef2aSThomas Huth case 0x6004: /* mov.b @Rm+,Rn */ 614fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_SB); 615fcf5ef2aSThomas Huth if ( B11_8 != B7_4 ) 616fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 1); 617fcf5ef2aSThomas Huth return; 618fcf5ef2aSThomas Huth case 0x6005: /* mov.w @Rm+,Rn */ 619fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_TESW); 620fcf5ef2aSThomas Huth if ( B11_8 != B7_4 ) 621fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2); 622fcf5ef2aSThomas Huth return; 623fcf5ef2aSThomas Huth case 0x6006: /* mov.l @Rm+,Rn */ 624fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_TESL); 625fcf5ef2aSThomas Huth if ( B11_8 != B7_4 ) 626fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4); 627fcf5ef2aSThomas Huth return; 628fcf5ef2aSThomas Huth case 0x0004: /* mov.b Rm,@(R0,Rn) */ 629fcf5ef2aSThomas Huth { 630fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 631fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B11_8), REG(0)); 632fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_UB); 633fcf5ef2aSThomas Huth tcg_temp_free(addr); 634fcf5ef2aSThomas Huth } 635fcf5ef2aSThomas Huth return; 636fcf5ef2aSThomas Huth case 0x0005: /* mov.w Rm,@(R0,Rn) */ 637fcf5ef2aSThomas Huth { 638fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 639fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B11_8), REG(0)); 640fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_TEUW); 641fcf5ef2aSThomas Huth tcg_temp_free(addr); 642fcf5ef2aSThomas Huth } 643fcf5ef2aSThomas Huth return; 644fcf5ef2aSThomas Huth case 0x0006: /* mov.l Rm,@(R0,Rn) */ 645fcf5ef2aSThomas Huth { 646fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 647fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B11_8), REG(0)); 648fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_TEUL); 649fcf5ef2aSThomas Huth tcg_temp_free(addr); 650fcf5ef2aSThomas Huth } 651fcf5ef2aSThomas Huth return; 652fcf5ef2aSThomas Huth case 0x000c: /* mov.b @(R0,Rm),Rn */ 653fcf5ef2aSThomas Huth { 654fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 655fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B7_4), REG(0)); 656fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_SB); 657fcf5ef2aSThomas Huth tcg_temp_free(addr); 658fcf5ef2aSThomas Huth } 659fcf5ef2aSThomas Huth return; 660fcf5ef2aSThomas Huth case 0x000d: /* mov.w @(R0,Rm),Rn */ 661fcf5ef2aSThomas Huth { 662fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 663fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B7_4), REG(0)); 664fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_TESW); 665fcf5ef2aSThomas Huth tcg_temp_free(addr); 666fcf5ef2aSThomas Huth } 667fcf5ef2aSThomas Huth return; 668fcf5ef2aSThomas Huth case 0x000e: /* mov.l @(R0,Rm),Rn */ 669fcf5ef2aSThomas Huth { 670fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 671fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B7_4), REG(0)); 672fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_TESL); 673fcf5ef2aSThomas Huth tcg_temp_free(addr); 674fcf5ef2aSThomas Huth } 675fcf5ef2aSThomas Huth return; 676fcf5ef2aSThomas Huth case 0x6008: /* swap.b Rm,Rn */ 677fcf5ef2aSThomas Huth { 6783c254ab8SLadi Prosek TCGv low = tcg_temp_new(); 679fcf5ef2aSThomas Huth tcg_gen_ext16u_i32(low, REG(B7_4)); 680fcf5ef2aSThomas Huth tcg_gen_bswap16_i32(low, low); 681fcf5ef2aSThomas Huth tcg_gen_deposit_i32(REG(B11_8), REG(B7_4), low, 0, 16); 682fcf5ef2aSThomas Huth tcg_temp_free(low); 683fcf5ef2aSThomas Huth } 684fcf5ef2aSThomas Huth return; 685fcf5ef2aSThomas Huth case 0x6009: /* swap.w Rm,Rn */ 686fcf5ef2aSThomas Huth tcg_gen_rotli_i32(REG(B11_8), REG(B7_4), 16); 687fcf5ef2aSThomas Huth return; 688fcf5ef2aSThomas Huth case 0x200d: /* xtrct Rm,Rn */ 689fcf5ef2aSThomas Huth { 690fcf5ef2aSThomas Huth TCGv high, low; 691fcf5ef2aSThomas Huth high = tcg_temp_new(); 692fcf5ef2aSThomas Huth tcg_gen_shli_i32(high, REG(B7_4), 16); 693fcf5ef2aSThomas Huth low = tcg_temp_new(); 694fcf5ef2aSThomas Huth tcg_gen_shri_i32(low, REG(B11_8), 16); 695fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), high, low); 696fcf5ef2aSThomas Huth tcg_temp_free(low); 697fcf5ef2aSThomas Huth tcg_temp_free(high); 698fcf5ef2aSThomas Huth } 699fcf5ef2aSThomas Huth return; 700fcf5ef2aSThomas Huth case 0x300c: /* add Rm,Rn */ 701fcf5ef2aSThomas Huth tcg_gen_add_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 702fcf5ef2aSThomas Huth return; 703fcf5ef2aSThomas Huth case 0x300e: /* addc Rm,Rn */ 704fcf5ef2aSThomas Huth { 705fcf5ef2aSThomas Huth TCGv t0, t1; 706fcf5ef2aSThomas Huth t0 = tcg_const_tl(0); 707fcf5ef2aSThomas Huth t1 = tcg_temp_new(); 708fcf5ef2aSThomas Huth tcg_gen_add2_i32(t1, cpu_sr_t, cpu_sr_t, t0, REG(B7_4), t0); 709fcf5ef2aSThomas Huth tcg_gen_add2_i32(REG(B11_8), cpu_sr_t, 710fcf5ef2aSThomas Huth REG(B11_8), t0, t1, cpu_sr_t); 711fcf5ef2aSThomas Huth tcg_temp_free(t0); 712fcf5ef2aSThomas Huth tcg_temp_free(t1); 713fcf5ef2aSThomas Huth } 714fcf5ef2aSThomas Huth return; 715fcf5ef2aSThomas Huth case 0x300f: /* addv Rm,Rn */ 716fcf5ef2aSThomas Huth { 717fcf5ef2aSThomas Huth TCGv t0, t1, t2; 718fcf5ef2aSThomas Huth t0 = tcg_temp_new(); 719fcf5ef2aSThomas Huth tcg_gen_add_i32(t0, REG(B7_4), REG(B11_8)); 720fcf5ef2aSThomas Huth t1 = tcg_temp_new(); 721fcf5ef2aSThomas Huth tcg_gen_xor_i32(t1, t0, REG(B11_8)); 722fcf5ef2aSThomas Huth t2 = tcg_temp_new(); 723fcf5ef2aSThomas Huth tcg_gen_xor_i32(t2, REG(B7_4), REG(B11_8)); 724fcf5ef2aSThomas Huth tcg_gen_andc_i32(cpu_sr_t, t1, t2); 725fcf5ef2aSThomas Huth tcg_temp_free(t2); 726fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_t, cpu_sr_t, 31); 727fcf5ef2aSThomas Huth tcg_temp_free(t1); 728fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B7_4), t0); 729fcf5ef2aSThomas Huth tcg_temp_free(t0); 730fcf5ef2aSThomas Huth } 731fcf5ef2aSThomas Huth return; 732fcf5ef2aSThomas Huth case 0x2009: /* and Rm,Rn */ 733fcf5ef2aSThomas Huth tcg_gen_and_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 734fcf5ef2aSThomas Huth return; 735fcf5ef2aSThomas Huth case 0x3000: /* cmp/eq Rm,Rn */ 736fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_EQ, cpu_sr_t, REG(B11_8), REG(B7_4)); 737fcf5ef2aSThomas Huth return; 738fcf5ef2aSThomas Huth case 0x3003: /* cmp/ge Rm,Rn */ 739fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_GE, cpu_sr_t, REG(B11_8), REG(B7_4)); 740fcf5ef2aSThomas Huth return; 741fcf5ef2aSThomas Huth case 0x3007: /* cmp/gt Rm,Rn */ 742fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_GT, cpu_sr_t, REG(B11_8), REG(B7_4)); 743fcf5ef2aSThomas Huth return; 744fcf5ef2aSThomas Huth case 0x3006: /* cmp/hi Rm,Rn */ 745fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_GTU, cpu_sr_t, REG(B11_8), REG(B7_4)); 746fcf5ef2aSThomas Huth return; 747fcf5ef2aSThomas Huth case 0x3002: /* cmp/hs Rm,Rn */ 748fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_GEU, cpu_sr_t, REG(B11_8), REG(B7_4)); 749fcf5ef2aSThomas Huth return; 750fcf5ef2aSThomas Huth case 0x200c: /* cmp/str Rm,Rn */ 751fcf5ef2aSThomas Huth { 752fcf5ef2aSThomas Huth TCGv cmp1 = tcg_temp_new(); 753fcf5ef2aSThomas Huth TCGv cmp2 = tcg_temp_new(); 754fcf5ef2aSThomas Huth tcg_gen_xor_i32(cmp2, REG(B7_4), REG(B11_8)); 755fcf5ef2aSThomas Huth tcg_gen_subi_i32(cmp1, cmp2, 0x01010101); 756fcf5ef2aSThomas Huth tcg_gen_andc_i32(cmp1, cmp1, cmp2); 757fcf5ef2aSThomas Huth tcg_gen_andi_i32(cmp1, cmp1, 0x80808080); 758fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_NE, cpu_sr_t, cmp1, 0); 759fcf5ef2aSThomas Huth tcg_temp_free(cmp2); 760fcf5ef2aSThomas Huth tcg_temp_free(cmp1); 761fcf5ef2aSThomas Huth } 762fcf5ef2aSThomas Huth return; 763fcf5ef2aSThomas Huth case 0x2007: /* div0s Rm,Rn */ 764fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_q, REG(B11_8), 31); /* SR_Q */ 765fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_m, REG(B7_4), 31); /* SR_M */ 766fcf5ef2aSThomas Huth tcg_gen_xor_i32(cpu_sr_t, cpu_sr_q, cpu_sr_m); /* SR_T */ 767fcf5ef2aSThomas Huth return; 768fcf5ef2aSThomas Huth case 0x3004: /* div1 Rm,Rn */ 769fcf5ef2aSThomas Huth { 770fcf5ef2aSThomas Huth TCGv t0 = tcg_temp_new(); 771fcf5ef2aSThomas Huth TCGv t1 = tcg_temp_new(); 772fcf5ef2aSThomas Huth TCGv t2 = tcg_temp_new(); 773fcf5ef2aSThomas Huth TCGv zero = tcg_const_i32(0); 774fcf5ef2aSThomas Huth 775fcf5ef2aSThomas Huth /* shift left arg1, saving the bit being pushed out and inserting 776fcf5ef2aSThomas Huth T on the right */ 777fcf5ef2aSThomas Huth tcg_gen_shri_i32(t0, REG(B11_8), 31); 778fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1); 779fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), REG(B11_8), cpu_sr_t); 780fcf5ef2aSThomas Huth 781fcf5ef2aSThomas Huth /* Add or subtract arg0 from arg1 depending if Q == M. To avoid 782fcf5ef2aSThomas Huth using 64-bit temps, we compute arg0's high part from q ^ m, so 783fcf5ef2aSThomas Huth that it is 0x00000000 when adding the value or 0xffffffff when 784fcf5ef2aSThomas Huth subtracting it. */ 785fcf5ef2aSThomas Huth tcg_gen_xor_i32(t1, cpu_sr_q, cpu_sr_m); 786fcf5ef2aSThomas Huth tcg_gen_subi_i32(t1, t1, 1); 787fcf5ef2aSThomas Huth tcg_gen_neg_i32(t2, REG(B7_4)); 788fcf5ef2aSThomas Huth tcg_gen_movcond_i32(TCG_COND_EQ, t2, t1, zero, REG(B7_4), t2); 789fcf5ef2aSThomas Huth tcg_gen_add2_i32(REG(B11_8), t1, REG(B11_8), zero, t2, t1); 790fcf5ef2aSThomas Huth 791fcf5ef2aSThomas Huth /* compute T and Q depending on carry */ 792fcf5ef2aSThomas Huth tcg_gen_andi_i32(t1, t1, 1); 793fcf5ef2aSThomas Huth tcg_gen_xor_i32(t1, t1, t0); 794fcf5ef2aSThomas Huth tcg_gen_xori_i32(cpu_sr_t, t1, 1); 795fcf5ef2aSThomas Huth tcg_gen_xor_i32(cpu_sr_q, cpu_sr_m, t1); 796fcf5ef2aSThomas Huth 797fcf5ef2aSThomas Huth tcg_temp_free(zero); 798fcf5ef2aSThomas Huth tcg_temp_free(t2); 799fcf5ef2aSThomas Huth tcg_temp_free(t1); 800fcf5ef2aSThomas Huth tcg_temp_free(t0); 801fcf5ef2aSThomas Huth } 802fcf5ef2aSThomas Huth return; 803fcf5ef2aSThomas Huth case 0x300d: /* dmuls.l Rm,Rn */ 804fcf5ef2aSThomas Huth tcg_gen_muls2_i32(cpu_macl, cpu_mach, REG(B7_4), REG(B11_8)); 805fcf5ef2aSThomas Huth return; 806fcf5ef2aSThomas Huth case 0x3005: /* dmulu.l Rm,Rn */ 807fcf5ef2aSThomas Huth tcg_gen_mulu2_i32(cpu_macl, cpu_mach, REG(B7_4), REG(B11_8)); 808fcf5ef2aSThomas Huth return; 809fcf5ef2aSThomas Huth case 0x600e: /* exts.b Rm,Rn */ 810fcf5ef2aSThomas Huth tcg_gen_ext8s_i32(REG(B11_8), REG(B7_4)); 811fcf5ef2aSThomas Huth return; 812fcf5ef2aSThomas Huth case 0x600f: /* exts.w Rm,Rn */ 813fcf5ef2aSThomas Huth tcg_gen_ext16s_i32(REG(B11_8), REG(B7_4)); 814fcf5ef2aSThomas Huth return; 815fcf5ef2aSThomas Huth case 0x600c: /* extu.b Rm,Rn */ 816fcf5ef2aSThomas Huth tcg_gen_ext8u_i32(REG(B11_8), REG(B7_4)); 817fcf5ef2aSThomas Huth return; 818fcf5ef2aSThomas Huth case 0x600d: /* extu.w Rm,Rn */ 819fcf5ef2aSThomas Huth tcg_gen_ext16u_i32(REG(B11_8), REG(B7_4)); 820fcf5ef2aSThomas Huth return; 821fcf5ef2aSThomas Huth case 0x000f: /* mac.l @Rm+,@Rn+ */ 822fcf5ef2aSThomas Huth { 823fcf5ef2aSThomas Huth TCGv arg0, arg1; 824fcf5ef2aSThomas Huth arg0 = tcg_temp_new(); 825fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(arg0, REG(B7_4), ctx->memidx, MO_TESL); 826fcf5ef2aSThomas Huth arg1 = tcg_temp_new(); 827fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(arg1, REG(B11_8), ctx->memidx, MO_TESL); 828fcf5ef2aSThomas Huth gen_helper_macl(cpu_env, arg0, arg1); 829fcf5ef2aSThomas Huth tcg_temp_free(arg1); 830fcf5ef2aSThomas Huth tcg_temp_free(arg0); 831fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4); 832fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 833fcf5ef2aSThomas Huth } 834fcf5ef2aSThomas Huth return; 835fcf5ef2aSThomas Huth case 0x400f: /* mac.w @Rm+,@Rn+ */ 836fcf5ef2aSThomas Huth { 837fcf5ef2aSThomas Huth TCGv arg0, arg1; 838fcf5ef2aSThomas Huth arg0 = tcg_temp_new(); 839fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(arg0, REG(B7_4), ctx->memidx, MO_TESL); 840fcf5ef2aSThomas Huth arg1 = tcg_temp_new(); 841fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(arg1, REG(B11_8), ctx->memidx, MO_TESL); 842fcf5ef2aSThomas Huth gen_helper_macw(cpu_env, arg0, arg1); 843fcf5ef2aSThomas Huth tcg_temp_free(arg1); 844fcf5ef2aSThomas Huth tcg_temp_free(arg0); 845fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 2); 846fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2); 847fcf5ef2aSThomas Huth } 848fcf5ef2aSThomas Huth return; 849fcf5ef2aSThomas Huth case 0x0007: /* mul.l Rm,Rn */ 850fcf5ef2aSThomas Huth tcg_gen_mul_i32(cpu_macl, REG(B7_4), REG(B11_8)); 851fcf5ef2aSThomas Huth return; 852fcf5ef2aSThomas Huth case 0x200f: /* muls.w Rm,Rn */ 853fcf5ef2aSThomas Huth { 854fcf5ef2aSThomas Huth TCGv arg0, arg1; 855fcf5ef2aSThomas Huth arg0 = tcg_temp_new(); 856fcf5ef2aSThomas Huth tcg_gen_ext16s_i32(arg0, REG(B7_4)); 857fcf5ef2aSThomas Huth arg1 = tcg_temp_new(); 858fcf5ef2aSThomas Huth tcg_gen_ext16s_i32(arg1, REG(B11_8)); 859fcf5ef2aSThomas Huth tcg_gen_mul_i32(cpu_macl, arg0, arg1); 860fcf5ef2aSThomas Huth tcg_temp_free(arg1); 861fcf5ef2aSThomas Huth tcg_temp_free(arg0); 862fcf5ef2aSThomas Huth } 863fcf5ef2aSThomas Huth return; 864fcf5ef2aSThomas Huth case 0x200e: /* mulu.w Rm,Rn */ 865fcf5ef2aSThomas Huth { 866fcf5ef2aSThomas Huth TCGv arg0, arg1; 867fcf5ef2aSThomas Huth arg0 = tcg_temp_new(); 868fcf5ef2aSThomas Huth tcg_gen_ext16u_i32(arg0, REG(B7_4)); 869fcf5ef2aSThomas Huth arg1 = tcg_temp_new(); 870fcf5ef2aSThomas Huth tcg_gen_ext16u_i32(arg1, REG(B11_8)); 871fcf5ef2aSThomas Huth tcg_gen_mul_i32(cpu_macl, arg0, arg1); 872fcf5ef2aSThomas Huth tcg_temp_free(arg1); 873fcf5ef2aSThomas Huth tcg_temp_free(arg0); 874fcf5ef2aSThomas Huth } 875fcf5ef2aSThomas Huth return; 876fcf5ef2aSThomas Huth case 0x600b: /* neg Rm,Rn */ 877fcf5ef2aSThomas Huth tcg_gen_neg_i32(REG(B11_8), REG(B7_4)); 878fcf5ef2aSThomas Huth return; 879fcf5ef2aSThomas Huth case 0x600a: /* negc Rm,Rn */ 880fcf5ef2aSThomas Huth { 881fcf5ef2aSThomas Huth TCGv t0 = tcg_const_i32(0); 882fcf5ef2aSThomas Huth tcg_gen_add2_i32(REG(B11_8), cpu_sr_t, 883fcf5ef2aSThomas Huth REG(B7_4), t0, cpu_sr_t, t0); 884fcf5ef2aSThomas Huth tcg_gen_sub2_i32(REG(B11_8), cpu_sr_t, 885fcf5ef2aSThomas Huth t0, t0, REG(B11_8), cpu_sr_t); 886fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, cpu_sr_t, 1); 887fcf5ef2aSThomas Huth tcg_temp_free(t0); 888fcf5ef2aSThomas Huth } 889fcf5ef2aSThomas Huth return; 890fcf5ef2aSThomas Huth case 0x6007: /* not Rm,Rn */ 891fcf5ef2aSThomas Huth tcg_gen_not_i32(REG(B11_8), REG(B7_4)); 892fcf5ef2aSThomas Huth return; 893fcf5ef2aSThomas Huth case 0x200b: /* or Rm,Rn */ 894fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 895fcf5ef2aSThomas Huth return; 896fcf5ef2aSThomas Huth case 0x400c: /* shad Rm,Rn */ 897fcf5ef2aSThomas Huth { 898fcf5ef2aSThomas Huth TCGv t0 = tcg_temp_new(); 899fcf5ef2aSThomas Huth TCGv t1 = tcg_temp_new(); 900fcf5ef2aSThomas Huth TCGv t2 = tcg_temp_new(); 901fcf5ef2aSThomas Huth 902fcf5ef2aSThomas Huth tcg_gen_andi_i32(t0, REG(B7_4), 0x1f); 903fcf5ef2aSThomas Huth 904fcf5ef2aSThomas Huth /* positive case: shift to the left */ 905fcf5ef2aSThomas Huth tcg_gen_shl_i32(t1, REG(B11_8), t0); 906fcf5ef2aSThomas Huth 907fcf5ef2aSThomas Huth /* negative case: shift to the right in two steps to 908fcf5ef2aSThomas Huth correctly handle the -32 case */ 909fcf5ef2aSThomas Huth tcg_gen_xori_i32(t0, t0, 0x1f); 910fcf5ef2aSThomas Huth tcg_gen_sar_i32(t2, REG(B11_8), t0); 911fcf5ef2aSThomas Huth tcg_gen_sari_i32(t2, t2, 1); 912fcf5ef2aSThomas Huth 913fcf5ef2aSThomas Huth /* select between the two cases */ 914fcf5ef2aSThomas Huth tcg_gen_movi_i32(t0, 0); 915fcf5ef2aSThomas Huth tcg_gen_movcond_i32(TCG_COND_GE, REG(B11_8), REG(B7_4), t0, t1, t2); 916fcf5ef2aSThomas Huth 917fcf5ef2aSThomas Huth tcg_temp_free(t0); 918fcf5ef2aSThomas Huth tcg_temp_free(t1); 919fcf5ef2aSThomas Huth tcg_temp_free(t2); 920fcf5ef2aSThomas Huth } 921fcf5ef2aSThomas Huth return; 922fcf5ef2aSThomas Huth case 0x400d: /* shld Rm,Rn */ 923fcf5ef2aSThomas Huth { 924fcf5ef2aSThomas Huth TCGv t0 = tcg_temp_new(); 925fcf5ef2aSThomas Huth TCGv t1 = tcg_temp_new(); 926fcf5ef2aSThomas Huth TCGv t2 = tcg_temp_new(); 927fcf5ef2aSThomas Huth 928fcf5ef2aSThomas Huth tcg_gen_andi_i32(t0, REG(B7_4), 0x1f); 929fcf5ef2aSThomas Huth 930fcf5ef2aSThomas Huth /* positive case: shift to the left */ 931fcf5ef2aSThomas Huth tcg_gen_shl_i32(t1, REG(B11_8), t0); 932fcf5ef2aSThomas Huth 933fcf5ef2aSThomas Huth /* negative case: shift to the right in two steps to 934fcf5ef2aSThomas Huth correctly handle the -32 case */ 935fcf5ef2aSThomas Huth tcg_gen_xori_i32(t0, t0, 0x1f); 936fcf5ef2aSThomas Huth tcg_gen_shr_i32(t2, REG(B11_8), t0); 937fcf5ef2aSThomas Huth tcg_gen_shri_i32(t2, t2, 1); 938fcf5ef2aSThomas Huth 939fcf5ef2aSThomas Huth /* select between the two cases */ 940fcf5ef2aSThomas Huth tcg_gen_movi_i32(t0, 0); 941fcf5ef2aSThomas Huth tcg_gen_movcond_i32(TCG_COND_GE, REG(B11_8), REG(B7_4), t0, t1, t2); 942fcf5ef2aSThomas Huth 943fcf5ef2aSThomas Huth tcg_temp_free(t0); 944fcf5ef2aSThomas Huth tcg_temp_free(t1); 945fcf5ef2aSThomas Huth tcg_temp_free(t2); 946fcf5ef2aSThomas Huth } 947fcf5ef2aSThomas Huth return; 948fcf5ef2aSThomas Huth case 0x3008: /* sub Rm,Rn */ 949fcf5ef2aSThomas Huth tcg_gen_sub_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 950fcf5ef2aSThomas Huth return; 951fcf5ef2aSThomas Huth case 0x300a: /* subc Rm,Rn */ 952fcf5ef2aSThomas Huth { 953fcf5ef2aSThomas Huth TCGv t0, t1; 954fcf5ef2aSThomas Huth t0 = tcg_const_tl(0); 955fcf5ef2aSThomas Huth t1 = tcg_temp_new(); 956fcf5ef2aSThomas Huth tcg_gen_add2_i32(t1, cpu_sr_t, cpu_sr_t, t0, REG(B7_4), t0); 957fcf5ef2aSThomas Huth tcg_gen_sub2_i32(REG(B11_8), cpu_sr_t, 958fcf5ef2aSThomas Huth REG(B11_8), t0, t1, cpu_sr_t); 959fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, cpu_sr_t, 1); 960fcf5ef2aSThomas Huth tcg_temp_free(t0); 961fcf5ef2aSThomas Huth tcg_temp_free(t1); 962fcf5ef2aSThomas Huth } 963fcf5ef2aSThomas Huth return; 964fcf5ef2aSThomas Huth case 0x300b: /* subv Rm,Rn */ 965fcf5ef2aSThomas Huth { 966fcf5ef2aSThomas Huth TCGv t0, t1, t2; 967fcf5ef2aSThomas Huth t0 = tcg_temp_new(); 968fcf5ef2aSThomas Huth tcg_gen_sub_i32(t0, REG(B11_8), REG(B7_4)); 969fcf5ef2aSThomas Huth t1 = tcg_temp_new(); 970fcf5ef2aSThomas Huth tcg_gen_xor_i32(t1, t0, REG(B7_4)); 971fcf5ef2aSThomas Huth t2 = tcg_temp_new(); 972fcf5ef2aSThomas Huth tcg_gen_xor_i32(t2, REG(B11_8), REG(B7_4)); 973fcf5ef2aSThomas Huth tcg_gen_and_i32(t1, t1, t2); 974fcf5ef2aSThomas Huth tcg_temp_free(t2); 975fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_t, t1, 31); 976fcf5ef2aSThomas Huth tcg_temp_free(t1); 977fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), t0); 978fcf5ef2aSThomas Huth tcg_temp_free(t0); 979fcf5ef2aSThomas Huth } 980fcf5ef2aSThomas Huth return; 981fcf5ef2aSThomas Huth case 0x2008: /* tst Rm,Rn */ 982fcf5ef2aSThomas Huth { 983fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 984fcf5ef2aSThomas Huth tcg_gen_and_i32(val, REG(B7_4), REG(B11_8)); 985fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, val, 0); 986fcf5ef2aSThomas Huth tcg_temp_free(val); 987fcf5ef2aSThomas Huth } 988fcf5ef2aSThomas Huth return; 989fcf5ef2aSThomas Huth case 0x200a: /* xor Rm,Rn */ 990fcf5ef2aSThomas Huth tcg_gen_xor_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 991fcf5ef2aSThomas Huth return; 992fcf5ef2aSThomas Huth case 0xf00c: /* fmov {F,D,X}Rm,{F,D,X}Rn - FPSCR: Nothing */ 993fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 994a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 995bdcb3739SRichard Henderson int xsrc = XHACK(B7_4); 996bdcb3739SRichard Henderson int xdst = XHACK(B11_8); 997bdcb3739SRichard Henderson tcg_gen_mov_i32(FREG(xdst), FREG(xsrc)); 998bdcb3739SRichard Henderson tcg_gen_mov_i32(FREG(xdst + 1), FREG(xsrc + 1)); 999fcf5ef2aSThomas Huth } else { 10007c9f7038SRichard Henderson tcg_gen_mov_i32(FREG(B11_8), FREG(B7_4)); 1001fcf5ef2aSThomas Huth } 1002fcf5ef2aSThomas Huth return; 1003fcf5ef2aSThomas Huth case 0xf00a: /* fmov {F,D,X}Rm,@Rn - FPSCR: Nothing */ 1004fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1005a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 10064d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 10074d57fa50SRichard Henderson gen_load_fpr64(ctx, fp, XHACK(B7_4)); 10084d57fa50SRichard Henderson tcg_gen_qemu_st_i64(fp, REG(B11_8), ctx->memidx, MO_TEQ); 10094d57fa50SRichard Henderson tcg_temp_free_i64(fp); 1010fcf5ef2aSThomas Huth } else { 10117c9f7038SRichard Henderson tcg_gen_qemu_st_i32(FREG(B7_4), REG(B11_8), ctx->memidx, MO_TEUL); 1012fcf5ef2aSThomas Huth } 1013fcf5ef2aSThomas Huth return; 1014fcf5ef2aSThomas Huth case 0xf008: /* fmov @Rm,{F,D,X}Rn - FPSCR: Nothing */ 1015fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1016a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 10174d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 10184d57fa50SRichard Henderson tcg_gen_qemu_ld_i64(fp, REG(B7_4), ctx->memidx, MO_TEQ); 10194d57fa50SRichard Henderson gen_store_fpr64(ctx, fp, XHACK(B11_8)); 10204d57fa50SRichard Henderson tcg_temp_free_i64(fp); 1021fcf5ef2aSThomas Huth } else { 10227c9f7038SRichard Henderson tcg_gen_qemu_ld_i32(FREG(B11_8), REG(B7_4), ctx->memidx, MO_TEUL); 1023fcf5ef2aSThomas Huth } 1024fcf5ef2aSThomas Huth return; 1025fcf5ef2aSThomas Huth case 0xf009: /* fmov @Rm+,{F,D,X}Rn - FPSCR: Nothing */ 1026fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1027a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 10284d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 10294d57fa50SRichard Henderson tcg_gen_qemu_ld_i64(fp, REG(B7_4), ctx->memidx, MO_TEQ); 10304d57fa50SRichard Henderson gen_store_fpr64(ctx, fp, XHACK(B11_8)); 10314d57fa50SRichard Henderson tcg_temp_free_i64(fp); 1032fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 8); 1033fcf5ef2aSThomas Huth } else { 10347c9f7038SRichard Henderson tcg_gen_qemu_ld_i32(FREG(B11_8), REG(B7_4), ctx->memidx, MO_TEUL); 1035fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4); 1036fcf5ef2aSThomas Huth } 1037fcf5ef2aSThomas Huth return; 1038fcf5ef2aSThomas Huth case 0xf00b: /* fmov {F,D,X}Rm,@-Rn - FPSCR: Nothing */ 1039fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 10404d57fa50SRichard Henderson { 1041fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new_i32(); 1042a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 10434d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 10444d57fa50SRichard Henderson gen_load_fpr64(ctx, fp, XHACK(B7_4)); 10454d57fa50SRichard Henderson tcg_gen_subi_i32(addr, REG(B11_8), 8); 10464d57fa50SRichard Henderson tcg_gen_qemu_st_i64(fp, addr, ctx->memidx, MO_TEQ); 10474d57fa50SRichard Henderson tcg_temp_free_i64(fp); 1048fcf5ef2aSThomas Huth } else { 10494d57fa50SRichard Henderson tcg_gen_subi_i32(addr, REG(B11_8), 4); 10507c9f7038SRichard Henderson tcg_gen_qemu_st_i32(FREG(B7_4), addr, ctx->memidx, MO_TEUL); 1051fcf5ef2aSThomas Huth } 1052fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 1053fcf5ef2aSThomas Huth tcg_temp_free(addr); 10544d57fa50SRichard Henderson } 1055fcf5ef2aSThomas Huth return; 1056fcf5ef2aSThomas Huth case 0xf006: /* fmov @(R0,Rm),{F,D,X}Rm - FPSCR: Nothing */ 1057fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1058fcf5ef2aSThomas Huth { 1059fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new_i32(); 1060fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B7_4), REG(0)); 1061a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 10624d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 10634d57fa50SRichard Henderson tcg_gen_qemu_ld_i64(fp, addr, ctx->memidx, MO_TEQ); 10644d57fa50SRichard Henderson gen_store_fpr64(ctx, fp, XHACK(B11_8)); 10654d57fa50SRichard Henderson tcg_temp_free_i64(fp); 1066fcf5ef2aSThomas Huth } else { 10677c9f7038SRichard Henderson tcg_gen_qemu_ld_i32(FREG(B11_8), addr, ctx->memidx, MO_TEUL); 1068fcf5ef2aSThomas Huth } 1069fcf5ef2aSThomas Huth tcg_temp_free(addr); 1070fcf5ef2aSThomas Huth } 1071fcf5ef2aSThomas Huth return; 1072fcf5ef2aSThomas Huth case 0xf007: /* fmov {F,D,X}Rn,@(R0,Rn) - FPSCR: Nothing */ 1073fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1074fcf5ef2aSThomas Huth { 1075fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1076fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B11_8), REG(0)); 1077a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 10784d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 10794d57fa50SRichard Henderson gen_load_fpr64(ctx, fp, XHACK(B7_4)); 10804d57fa50SRichard Henderson tcg_gen_qemu_st_i64(fp, addr, ctx->memidx, MO_TEQ); 10814d57fa50SRichard Henderson tcg_temp_free_i64(fp); 1082fcf5ef2aSThomas Huth } else { 10837c9f7038SRichard Henderson tcg_gen_qemu_st_i32(FREG(B7_4), addr, ctx->memidx, MO_TEUL); 1084fcf5ef2aSThomas Huth } 1085fcf5ef2aSThomas Huth tcg_temp_free(addr); 1086fcf5ef2aSThomas Huth } 1087fcf5ef2aSThomas Huth return; 1088fcf5ef2aSThomas Huth case 0xf000: /* fadd Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */ 1089fcf5ef2aSThomas Huth case 0xf001: /* fsub Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */ 1090fcf5ef2aSThomas Huth case 0xf002: /* fmul Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */ 1091fcf5ef2aSThomas Huth case 0xf003: /* fdiv Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */ 1092fcf5ef2aSThomas Huth case 0xf004: /* fcmp/eq Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */ 1093fcf5ef2aSThomas Huth case 0xf005: /* fcmp/gt Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */ 1094fcf5ef2aSThomas Huth { 1095fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1096a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_PR) { 1097fcf5ef2aSThomas Huth TCGv_i64 fp0, fp1; 1098fcf5ef2aSThomas Huth 109993dc9c89SRichard Henderson if (ctx->opcode & 0x0110) { 110093dc9c89SRichard Henderson goto do_illegal; 110193dc9c89SRichard Henderson } 1102fcf5ef2aSThomas Huth fp0 = tcg_temp_new_i64(); 1103fcf5ef2aSThomas Huth fp1 = tcg_temp_new_i64(); 11041e0b21d8SRichard Henderson gen_load_fpr64(ctx, fp0, B11_8); 11051e0b21d8SRichard Henderson gen_load_fpr64(ctx, fp1, B7_4); 1106fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf00f) { 1107fcf5ef2aSThomas Huth case 0xf000: /* fadd Rm,Rn */ 1108fcf5ef2aSThomas Huth gen_helper_fadd_DT(fp0, cpu_env, fp0, fp1); 1109fcf5ef2aSThomas Huth break; 1110fcf5ef2aSThomas Huth case 0xf001: /* fsub Rm,Rn */ 1111fcf5ef2aSThomas Huth gen_helper_fsub_DT(fp0, cpu_env, fp0, fp1); 1112fcf5ef2aSThomas Huth break; 1113fcf5ef2aSThomas Huth case 0xf002: /* fmul Rm,Rn */ 1114fcf5ef2aSThomas Huth gen_helper_fmul_DT(fp0, cpu_env, fp0, fp1); 1115fcf5ef2aSThomas Huth break; 1116fcf5ef2aSThomas Huth case 0xf003: /* fdiv Rm,Rn */ 1117fcf5ef2aSThomas Huth gen_helper_fdiv_DT(fp0, cpu_env, fp0, fp1); 1118fcf5ef2aSThomas Huth break; 1119fcf5ef2aSThomas Huth case 0xf004: /* fcmp/eq Rm,Rn */ 112092f1f83eSAurelien Jarno gen_helper_fcmp_eq_DT(cpu_sr_t, cpu_env, fp0, fp1); 1121fcf5ef2aSThomas Huth return; 1122fcf5ef2aSThomas Huth case 0xf005: /* fcmp/gt Rm,Rn */ 112392f1f83eSAurelien Jarno gen_helper_fcmp_gt_DT(cpu_sr_t, cpu_env, fp0, fp1); 1124fcf5ef2aSThomas Huth return; 1125fcf5ef2aSThomas Huth } 11261e0b21d8SRichard Henderson gen_store_fpr64(ctx, fp0, B11_8); 1127fcf5ef2aSThomas Huth tcg_temp_free_i64(fp0); 1128fcf5ef2aSThomas Huth tcg_temp_free_i64(fp1); 1129fcf5ef2aSThomas Huth } else { 1130fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf00f) { 1131fcf5ef2aSThomas Huth case 0xf000: /* fadd Rm,Rn */ 11327c9f7038SRichard Henderson gen_helper_fadd_FT(FREG(B11_8), cpu_env, 11337c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1134fcf5ef2aSThomas Huth break; 1135fcf5ef2aSThomas Huth case 0xf001: /* fsub Rm,Rn */ 11367c9f7038SRichard Henderson gen_helper_fsub_FT(FREG(B11_8), cpu_env, 11377c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1138fcf5ef2aSThomas Huth break; 1139fcf5ef2aSThomas Huth case 0xf002: /* fmul Rm,Rn */ 11407c9f7038SRichard Henderson gen_helper_fmul_FT(FREG(B11_8), cpu_env, 11417c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1142fcf5ef2aSThomas Huth break; 1143fcf5ef2aSThomas Huth case 0xf003: /* fdiv Rm,Rn */ 11447c9f7038SRichard Henderson gen_helper_fdiv_FT(FREG(B11_8), cpu_env, 11457c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1146fcf5ef2aSThomas Huth break; 1147fcf5ef2aSThomas Huth case 0xf004: /* fcmp/eq Rm,Rn */ 114892f1f83eSAurelien Jarno gen_helper_fcmp_eq_FT(cpu_sr_t, cpu_env, 11497c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1150fcf5ef2aSThomas Huth return; 1151fcf5ef2aSThomas Huth case 0xf005: /* fcmp/gt Rm,Rn */ 115292f1f83eSAurelien Jarno gen_helper_fcmp_gt_FT(cpu_sr_t, cpu_env, 11537c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1154fcf5ef2aSThomas Huth return; 1155fcf5ef2aSThomas Huth } 1156fcf5ef2aSThomas Huth } 1157fcf5ef2aSThomas Huth } 1158fcf5ef2aSThomas Huth return; 1159fcf5ef2aSThomas Huth case 0xf00e: /* fmac FR0,RM,Rn */ 1160fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 11617e9f7ca8SRichard Henderson CHECK_FPSCR_PR_0 11627c9f7038SRichard Henderson gen_helper_fmac_FT(FREG(B11_8), cpu_env, 11637c9f7038SRichard Henderson FREG(0), FREG(B7_4), FREG(B11_8)); 1164fcf5ef2aSThomas Huth return; 1165fcf5ef2aSThomas Huth } 1166fcf5ef2aSThomas Huth 1167fcf5ef2aSThomas Huth switch (ctx->opcode & 0xff00) { 1168fcf5ef2aSThomas Huth case 0xc900: /* and #imm,R0 */ 1169fcf5ef2aSThomas Huth tcg_gen_andi_i32(REG(0), REG(0), B7_0); 1170fcf5ef2aSThomas Huth return; 1171fcf5ef2aSThomas Huth case 0xcd00: /* and.b #imm,@(R0,GBR) */ 1172fcf5ef2aSThomas Huth { 1173fcf5ef2aSThomas Huth TCGv addr, val; 1174fcf5ef2aSThomas Huth addr = tcg_temp_new(); 1175fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(0), cpu_gbr); 1176fcf5ef2aSThomas Huth val = tcg_temp_new(); 1177fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, addr, ctx->memidx, MO_UB); 1178fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, val, B7_0); 1179fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_UB); 1180fcf5ef2aSThomas Huth tcg_temp_free(val); 1181fcf5ef2aSThomas Huth tcg_temp_free(addr); 1182fcf5ef2aSThomas Huth } 1183fcf5ef2aSThomas Huth return; 1184fcf5ef2aSThomas Huth case 0x8b00: /* bf label */ 1185fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 11866f1c2af6SRichard Henderson gen_conditional_jump(ctx, ctx->base.pc_next + 4 + B7_0s * 2, false); 1187fcf5ef2aSThomas Huth return; 1188fcf5ef2aSThomas Huth case 0x8f00: /* bf/s label */ 1189fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1190ac9707eaSAurelien Jarno tcg_gen_xori_i32(cpu_delayed_cond, cpu_sr_t, 1); 11916f1c2af6SRichard Henderson ctx->delayed_pc = ctx->base.pc_next + 4 + B7_0s * 2; 1192a6215749SAurelien Jarno ctx->envflags |= DELAY_SLOT_CONDITIONAL; 1193fcf5ef2aSThomas Huth return; 1194fcf5ef2aSThomas Huth case 0x8900: /* bt label */ 1195fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 11966f1c2af6SRichard Henderson gen_conditional_jump(ctx, ctx->base.pc_next + 4 + B7_0s * 2, true); 1197fcf5ef2aSThomas Huth return; 1198fcf5ef2aSThomas Huth case 0x8d00: /* bt/s label */ 1199fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1200ac9707eaSAurelien Jarno tcg_gen_mov_i32(cpu_delayed_cond, cpu_sr_t); 12016f1c2af6SRichard Henderson ctx->delayed_pc = ctx->base.pc_next + 4 + B7_0s * 2; 1202a6215749SAurelien Jarno ctx->envflags |= DELAY_SLOT_CONDITIONAL; 1203fcf5ef2aSThomas Huth return; 1204fcf5ef2aSThomas Huth case 0x8800: /* cmp/eq #imm,R0 */ 1205fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, REG(0), B7_0s); 1206fcf5ef2aSThomas Huth return; 1207fcf5ef2aSThomas Huth case 0xc400: /* mov.b @(disp,GBR),R0 */ 1208fcf5ef2aSThomas Huth { 1209fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1210fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0); 1211fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_SB); 1212fcf5ef2aSThomas Huth tcg_temp_free(addr); 1213fcf5ef2aSThomas Huth } 1214fcf5ef2aSThomas Huth return; 1215fcf5ef2aSThomas Huth case 0xc500: /* mov.w @(disp,GBR),R0 */ 1216fcf5ef2aSThomas Huth { 1217fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1218fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2); 1219fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_TESW); 1220fcf5ef2aSThomas Huth tcg_temp_free(addr); 1221fcf5ef2aSThomas Huth } 1222fcf5ef2aSThomas Huth return; 1223fcf5ef2aSThomas Huth case 0xc600: /* mov.l @(disp,GBR),R0 */ 1224fcf5ef2aSThomas Huth { 1225fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1226fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4); 1227fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_TESL); 1228fcf5ef2aSThomas Huth tcg_temp_free(addr); 1229fcf5ef2aSThomas Huth } 1230fcf5ef2aSThomas Huth return; 1231fcf5ef2aSThomas Huth case 0xc000: /* mov.b R0,@(disp,GBR) */ 1232fcf5ef2aSThomas Huth { 1233fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1234fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0); 1235fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_UB); 1236fcf5ef2aSThomas Huth tcg_temp_free(addr); 1237fcf5ef2aSThomas Huth } 1238fcf5ef2aSThomas Huth return; 1239fcf5ef2aSThomas Huth case 0xc100: /* mov.w R0,@(disp,GBR) */ 1240fcf5ef2aSThomas Huth { 1241fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1242fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2); 1243fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_TEUW); 1244fcf5ef2aSThomas Huth tcg_temp_free(addr); 1245fcf5ef2aSThomas Huth } 1246fcf5ef2aSThomas Huth return; 1247fcf5ef2aSThomas Huth case 0xc200: /* mov.l R0,@(disp,GBR) */ 1248fcf5ef2aSThomas Huth { 1249fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1250fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4); 1251fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_TEUL); 1252fcf5ef2aSThomas Huth tcg_temp_free(addr); 1253fcf5ef2aSThomas Huth } 1254fcf5ef2aSThomas Huth return; 1255fcf5ef2aSThomas Huth case 0x8000: /* mov.b R0,@(disp,Rn) */ 1256fcf5ef2aSThomas Huth { 1257fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1258fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0); 1259fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_UB); 1260fcf5ef2aSThomas Huth tcg_temp_free(addr); 1261fcf5ef2aSThomas Huth } 1262fcf5ef2aSThomas Huth return; 1263fcf5ef2aSThomas Huth case 0x8100: /* mov.w R0,@(disp,Rn) */ 1264fcf5ef2aSThomas Huth { 1265fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1266fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2); 1267fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_TEUW); 1268fcf5ef2aSThomas Huth tcg_temp_free(addr); 1269fcf5ef2aSThomas Huth } 1270fcf5ef2aSThomas Huth return; 1271fcf5ef2aSThomas Huth case 0x8400: /* mov.b @(disp,Rn),R0 */ 1272fcf5ef2aSThomas Huth { 1273fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1274fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0); 1275fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_SB); 1276fcf5ef2aSThomas Huth tcg_temp_free(addr); 1277fcf5ef2aSThomas Huth } 1278fcf5ef2aSThomas Huth return; 1279fcf5ef2aSThomas Huth case 0x8500: /* mov.w @(disp,Rn),R0 */ 1280fcf5ef2aSThomas Huth { 1281fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1282fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2); 1283fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_TESW); 1284fcf5ef2aSThomas Huth tcg_temp_free(addr); 1285fcf5ef2aSThomas Huth } 1286fcf5ef2aSThomas Huth return; 1287fcf5ef2aSThomas Huth case 0xc700: /* mova @(disp,PC),R0 */ 12886f1c2af6SRichard Henderson tcg_gen_movi_i32(REG(0), ((ctx->base.pc_next & 0xfffffffc) + 12896f1c2af6SRichard Henderson 4 + B7_0 * 4) & ~3); 1290fcf5ef2aSThomas Huth return; 1291fcf5ef2aSThomas Huth case 0xcb00: /* or #imm,R0 */ 1292fcf5ef2aSThomas Huth tcg_gen_ori_i32(REG(0), REG(0), B7_0); 1293fcf5ef2aSThomas Huth return; 1294fcf5ef2aSThomas Huth case 0xcf00: /* or.b #imm,@(R0,GBR) */ 1295fcf5ef2aSThomas Huth { 1296fcf5ef2aSThomas Huth TCGv addr, val; 1297fcf5ef2aSThomas Huth addr = tcg_temp_new(); 1298fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(0), cpu_gbr); 1299fcf5ef2aSThomas Huth val = tcg_temp_new(); 1300fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, addr, ctx->memidx, MO_UB); 1301fcf5ef2aSThomas Huth tcg_gen_ori_i32(val, val, B7_0); 1302fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_UB); 1303fcf5ef2aSThomas Huth tcg_temp_free(val); 1304fcf5ef2aSThomas Huth tcg_temp_free(addr); 1305fcf5ef2aSThomas Huth } 1306fcf5ef2aSThomas Huth return; 1307fcf5ef2aSThomas Huth case 0xc300: /* trapa #imm */ 1308fcf5ef2aSThomas Huth { 1309fcf5ef2aSThomas Huth TCGv imm; 1310fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1311ac9707eaSAurelien Jarno gen_save_cpu_state(ctx, true); 1312fcf5ef2aSThomas Huth imm = tcg_const_i32(B7_0); 1313fcf5ef2aSThomas Huth gen_helper_trapa(cpu_env, imm); 1314fcf5ef2aSThomas Huth tcg_temp_free(imm); 13156f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NORETURN; 1316fcf5ef2aSThomas Huth } 1317fcf5ef2aSThomas Huth return; 1318fcf5ef2aSThomas Huth case 0xc800: /* tst #imm,R0 */ 1319fcf5ef2aSThomas Huth { 1320fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1321fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, REG(0), B7_0); 1322fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, val, 0); 1323fcf5ef2aSThomas Huth tcg_temp_free(val); 1324fcf5ef2aSThomas Huth } 1325fcf5ef2aSThomas Huth return; 1326fcf5ef2aSThomas Huth case 0xcc00: /* tst.b #imm,@(R0,GBR) */ 1327fcf5ef2aSThomas Huth { 1328fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1329fcf5ef2aSThomas Huth tcg_gen_add_i32(val, REG(0), cpu_gbr); 1330fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, val, ctx->memidx, MO_UB); 1331fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, val, B7_0); 1332fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, val, 0); 1333fcf5ef2aSThomas Huth tcg_temp_free(val); 1334fcf5ef2aSThomas Huth } 1335fcf5ef2aSThomas Huth return; 1336fcf5ef2aSThomas Huth case 0xca00: /* xor #imm,R0 */ 1337fcf5ef2aSThomas Huth tcg_gen_xori_i32(REG(0), REG(0), B7_0); 1338fcf5ef2aSThomas Huth return; 1339fcf5ef2aSThomas Huth case 0xce00: /* xor.b #imm,@(R0,GBR) */ 1340fcf5ef2aSThomas Huth { 1341fcf5ef2aSThomas Huth TCGv addr, val; 1342fcf5ef2aSThomas Huth addr = tcg_temp_new(); 1343fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(0), cpu_gbr); 1344fcf5ef2aSThomas Huth val = tcg_temp_new(); 1345fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, addr, ctx->memidx, MO_UB); 1346fcf5ef2aSThomas Huth tcg_gen_xori_i32(val, val, B7_0); 1347fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_UB); 1348fcf5ef2aSThomas Huth tcg_temp_free(val); 1349fcf5ef2aSThomas Huth tcg_temp_free(addr); 1350fcf5ef2aSThomas Huth } 1351fcf5ef2aSThomas Huth return; 1352fcf5ef2aSThomas Huth } 1353fcf5ef2aSThomas Huth 1354fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf08f) { 1355fcf5ef2aSThomas Huth case 0x408e: /* ldc Rm,Rn_BANK */ 1356fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1357fcf5ef2aSThomas Huth tcg_gen_mov_i32(ALTREG(B6_4), REG(B11_8)); 1358fcf5ef2aSThomas Huth return; 1359fcf5ef2aSThomas Huth case 0x4087: /* ldc.l @Rm+,Rn_BANK */ 1360fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1361fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(ALTREG(B6_4), REG(B11_8), ctx->memidx, MO_TESL); 1362fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 1363fcf5ef2aSThomas Huth return; 1364fcf5ef2aSThomas Huth case 0x0082: /* stc Rm_BANK,Rn */ 1365fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1366fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), ALTREG(B6_4)); 1367fcf5ef2aSThomas Huth return; 1368fcf5ef2aSThomas Huth case 0x4083: /* stc.l Rm_BANK,@-Rn */ 1369fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1370fcf5ef2aSThomas Huth { 1371fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1372fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); 1373fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(ALTREG(B6_4), addr, ctx->memidx, MO_TEUL); 1374fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 1375fcf5ef2aSThomas Huth tcg_temp_free(addr); 1376fcf5ef2aSThomas Huth } 1377fcf5ef2aSThomas Huth return; 1378fcf5ef2aSThomas Huth } 1379fcf5ef2aSThomas Huth 1380fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf0ff) { 1381fcf5ef2aSThomas Huth case 0x0023: /* braf Rn */ 1382fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 13836f1c2af6SRichard Henderson tcg_gen_addi_i32(cpu_delayed_pc, REG(B11_8), ctx->base.pc_next + 4); 1384a6215749SAurelien Jarno ctx->envflags |= DELAY_SLOT; 1385fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 1386fcf5ef2aSThomas Huth return; 1387fcf5ef2aSThomas Huth case 0x0003: /* bsrf Rn */ 1388fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 13896f1c2af6SRichard Henderson tcg_gen_movi_i32(cpu_pr, ctx->base.pc_next + 4); 1390fcf5ef2aSThomas Huth tcg_gen_add_i32(cpu_delayed_pc, REG(B11_8), cpu_pr); 1391a6215749SAurelien Jarno ctx->envflags |= DELAY_SLOT; 1392fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 1393fcf5ef2aSThomas Huth return; 1394fcf5ef2aSThomas Huth case 0x4015: /* cmp/pl Rn */ 1395fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_GT, cpu_sr_t, REG(B11_8), 0); 1396fcf5ef2aSThomas Huth return; 1397fcf5ef2aSThomas Huth case 0x4011: /* cmp/pz Rn */ 1398fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_GE, cpu_sr_t, REG(B11_8), 0); 1399fcf5ef2aSThomas Huth return; 1400fcf5ef2aSThomas Huth case 0x4010: /* dt Rn */ 1401fcf5ef2aSThomas Huth tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 1); 1402fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, REG(B11_8), 0); 1403fcf5ef2aSThomas Huth return; 1404fcf5ef2aSThomas Huth case 0x402b: /* jmp @Rn */ 1405fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1406fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8)); 1407a6215749SAurelien Jarno ctx->envflags |= DELAY_SLOT; 1408fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 1409fcf5ef2aSThomas Huth return; 1410fcf5ef2aSThomas Huth case 0x400b: /* jsr @Rn */ 1411fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 14126f1c2af6SRichard Henderson tcg_gen_movi_i32(cpu_pr, ctx->base.pc_next + 4); 1413fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8)); 1414a6215749SAurelien Jarno ctx->envflags |= DELAY_SLOT; 1415fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 1416fcf5ef2aSThomas Huth return; 1417fcf5ef2aSThomas Huth case 0x400e: /* ldc Rm,SR */ 1418fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1419fcf5ef2aSThomas Huth { 1420fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1421fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, REG(B11_8), 0x700083f3); 1422fcf5ef2aSThomas Huth gen_write_sr(val); 1423fcf5ef2aSThomas Huth tcg_temp_free(val); 14246f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 1425fcf5ef2aSThomas Huth } 1426fcf5ef2aSThomas Huth return; 1427fcf5ef2aSThomas Huth case 0x4007: /* ldc.l @Rm+,SR */ 1428fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1429fcf5ef2aSThomas Huth { 1430fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1431fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, REG(B11_8), ctx->memidx, MO_TESL); 1432fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, val, 0x700083f3); 1433fcf5ef2aSThomas Huth gen_write_sr(val); 1434fcf5ef2aSThomas Huth tcg_temp_free(val); 1435fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 14366f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 1437fcf5ef2aSThomas Huth } 1438fcf5ef2aSThomas Huth return; 1439fcf5ef2aSThomas Huth case 0x0002: /* stc SR,Rn */ 1440fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1441fcf5ef2aSThomas Huth gen_read_sr(REG(B11_8)); 1442fcf5ef2aSThomas Huth return; 1443fcf5ef2aSThomas Huth case 0x4003: /* stc SR,@-Rn */ 1444fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1445fcf5ef2aSThomas Huth { 1446fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1447fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1448fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); 1449fcf5ef2aSThomas Huth gen_read_sr(val); 1450fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_TEUL); 1451fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 1452fcf5ef2aSThomas Huth tcg_temp_free(val); 1453fcf5ef2aSThomas Huth tcg_temp_free(addr); 1454fcf5ef2aSThomas Huth } 1455fcf5ef2aSThomas Huth return; 1456fcf5ef2aSThomas Huth #define LD(reg,ldnum,ldpnum,prechk) \ 1457fcf5ef2aSThomas Huth case ldnum: \ 1458fcf5ef2aSThomas Huth prechk \ 1459fcf5ef2aSThomas Huth tcg_gen_mov_i32 (cpu_##reg, REG(B11_8)); \ 1460fcf5ef2aSThomas Huth return; \ 1461fcf5ef2aSThomas Huth case ldpnum: \ 1462fcf5ef2aSThomas Huth prechk \ 1463fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(cpu_##reg, REG(B11_8), ctx->memidx, MO_TESL); \ 1464fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); \ 1465fcf5ef2aSThomas Huth return; 1466fcf5ef2aSThomas Huth #define ST(reg,stnum,stpnum,prechk) \ 1467fcf5ef2aSThomas Huth case stnum: \ 1468fcf5ef2aSThomas Huth prechk \ 1469fcf5ef2aSThomas Huth tcg_gen_mov_i32 (REG(B11_8), cpu_##reg); \ 1470fcf5ef2aSThomas Huth return; \ 1471fcf5ef2aSThomas Huth case stpnum: \ 1472fcf5ef2aSThomas Huth prechk \ 1473fcf5ef2aSThomas Huth { \ 1474fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); \ 1475fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); \ 1476fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(cpu_##reg, addr, ctx->memidx, MO_TEUL); \ 1477fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); \ 1478fcf5ef2aSThomas Huth tcg_temp_free(addr); \ 1479fcf5ef2aSThomas Huth } \ 1480fcf5ef2aSThomas Huth return; 1481fcf5ef2aSThomas Huth #define LDST(reg,ldnum,ldpnum,stnum,stpnum,prechk) \ 1482fcf5ef2aSThomas Huth LD(reg,ldnum,ldpnum,prechk) \ 1483fcf5ef2aSThomas Huth ST(reg,stnum,stpnum,prechk) 1484fcf5ef2aSThomas Huth LDST(gbr, 0x401e, 0x4017, 0x0012, 0x4013, {}) 1485fcf5ef2aSThomas Huth LDST(vbr, 0x402e, 0x4027, 0x0022, 0x4023, CHECK_PRIVILEGED) 1486fcf5ef2aSThomas Huth LDST(ssr, 0x403e, 0x4037, 0x0032, 0x4033, CHECK_PRIVILEGED) 1487fcf5ef2aSThomas Huth LDST(spc, 0x404e, 0x4047, 0x0042, 0x4043, CHECK_PRIVILEGED) 1488fcf5ef2aSThomas Huth ST(sgr, 0x003a, 0x4032, CHECK_PRIVILEGED) 1489ccae24d4SRichard Henderson LD(sgr, 0x403a, 0x4036, CHECK_PRIVILEGED CHECK_SH4A) 1490fcf5ef2aSThomas Huth LDST(dbr, 0x40fa, 0x40f6, 0x00fa, 0x40f2, CHECK_PRIVILEGED) 1491fcf5ef2aSThomas Huth LDST(mach, 0x400a, 0x4006, 0x000a, 0x4002, {}) 1492fcf5ef2aSThomas Huth LDST(macl, 0x401a, 0x4016, 0x001a, 0x4012, {}) 1493fcf5ef2aSThomas Huth LDST(pr, 0x402a, 0x4026, 0x002a, 0x4022, {}) 1494fcf5ef2aSThomas Huth LDST(fpul, 0x405a, 0x4056, 0x005a, 0x4052, {CHECK_FPU_ENABLED}) 1495fcf5ef2aSThomas Huth case 0x406a: /* lds Rm,FPSCR */ 1496fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1497fcf5ef2aSThomas Huth gen_helper_ld_fpscr(cpu_env, REG(B11_8)); 14986f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 1499fcf5ef2aSThomas Huth return; 1500fcf5ef2aSThomas Huth case 0x4066: /* lds.l @Rm+,FPSCR */ 1501fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1502fcf5ef2aSThomas Huth { 1503fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1504fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(addr, REG(B11_8), ctx->memidx, MO_TESL); 1505fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 1506fcf5ef2aSThomas Huth gen_helper_ld_fpscr(cpu_env, addr); 1507fcf5ef2aSThomas Huth tcg_temp_free(addr); 15086f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 1509fcf5ef2aSThomas Huth } 1510fcf5ef2aSThomas Huth return; 1511fcf5ef2aSThomas Huth case 0x006a: /* sts FPSCR,Rn */ 1512fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1513fcf5ef2aSThomas Huth tcg_gen_andi_i32(REG(B11_8), cpu_fpscr, 0x003fffff); 1514fcf5ef2aSThomas Huth return; 1515fcf5ef2aSThomas Huth case 0x4062: /* sts FPSCR,@-Rn */ 1516fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1517fcf5ef2aSThomas Huth { 1518fcf5ef2aSThomas Huth TCGv addr, val; 1519fcf5ef2aSThomas Huth val = tcg_temp_new(); 1520fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, cpu_fpscr, 0x003fffff); 1521fcf5ef2aSThomas Huth addr = tcg_temp_new(); 1522fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); 1523fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_TEUL); 1524fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 1525fcf5ef2aSThomas Huth tcg_temp_free(addr); 1526fcf5ef2aSThomas Huth tcg_temp_free(val); 1527fcf5ef2aSThomas Huth } 1528fcf5ef2aSThomas Huth return; 1529fcf5ef2aSThomas Huth case 0x00c3: /* movca.l R0,@Rm */ 1530fcf5ef2aSThomas Huth { 1531fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1532fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, REG(B11_8), ctx->memidx, MO_TEUL); 1533fcf5ef2aSThomas Huth gen_helper_movcal(cpu_env, REG(B11_8), val); 1534fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), REG(B11_8), ctx->memidx, MO_TEUL); 1535e691e0edSPhilippe Mathieu-Daudé tcg_temp_free(val); 1536fcf5ef2aSThomas Huth } 1537fcf5ef2aSThomas Huth ctx->has_movcal = 1; 1538fcf5ef2aSThomas Huth return; 1539143021b2SAurelien Jarno case 0x40a9: /* movua.l @Rm,R0 */ 1540ccae24d4SRichard Henderson CHECK_SH4A 1541143021b2SAurelien Jarno /* Load non-boundary-aligned data */ 154234257c21SAurelien Jarno tcg_gen_qemu_ld_i32(REG(0), REG(B11_8), ctx->memidx, 154334257c21SAurelien Jarno MO_TEUL | MO_UNALN); 1544fcf5ef2aSThomas Huth return; 1545143021b2SAurelien Jarno case 0x40e9: /* movua.l @Rm+,R0 */ 1546ccae24d4SRichard Henderson CHECK_SH4A 1547143021b2SAurelien Jarno /* Load non-boundary-aligned data */ 154834257c21SAurelien Jarno tcg_gen_qemu_ld_i32(REG(0), REG(B11_8), ctx->memidx, 154934257c21SAurelien Jarno MO_TEUL | MO_UNALN); 1550fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 1551fcf5ef2aSThomas Huth return; 1552fcf5ef2aSThomas Huth case 0x0029: /* movt Rn */ 1553fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), cpu_sr_t); 1554fcf5ef2aSThomas Huth return; 1555fcf5ef2aSThomas Huth case 0x0073: 1556fcf5ef2aSThomas Huth /* MOVCO.L 1557f85da308SRichard Henderson * LDST -> T 1558f85da308SRichard Henderson * If (T == 1) R0 -> (Rn) 1559f85da308SRichard Henderson * 0 -> LDST 1560f85da308SRichard Henderson * 1561f85da308SRichard Henderson * The above description doesn't work in a parallel context. 1562f85da308SRichard Henderson * Since we currently support no smp boards, this implies user-mode. 1563f85da308SRichard Henderson * But we can still support the official mechanism while user-mode 1564f85da308SRichard Henderson * is single-threaded. */ 1565ccae24d4SRichard Henderson CHECK_SH4A 1566ccae24d4SRichard Henderson { 1567f85da308SRichard Henderson TCGLabel *fail = gen_new_label(); 1568f85da308SRichard Henderson TCGLabel *done = gen_new_label(); 1569f85da308SRichard Henderson 15706f1c2af6SRichard Henderson if ((tb_cflags(ctx->base.tb) & CF_PARALLEL)) { 1571f85da308SRichard Henderson TCGv tmp; 1572f85da308SRichard Henderson 1573f85da308SRichard Henderson tcg_gen_brcond_i32(TCG_COND_NE, REG(B11_8), 1574f85da308SRichard Henderson cpu_lock_addr, fail); 1575f85da308SRichard Henderson tmp = tcg_temp_new(); 1576f85da308SRichard Henderson tcg_gen_atomic_cmpxchg_i32(tmp, REG(B11_8), cpu_lock_value, 1577f85da308SRichard Henderson REG(0), ctx->memidx, MO_TEUL); 1578f85da308SRichard Henderson tcg_gen_setcond_i32(TCG_COND_EQ, cpu_sr_t, tmp, cpu_lock_value); 1579f85da308SRichard Henderson tcg_temp_free(tmp); 1580f85da308SRichard Henderson } else { 1581f85da308SRichard Henderson tcg_gen_brcondi_i32(TCG_COND_EQ, cpu_lock_addr, -1, fail); 1582fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), REG(B11_8), ctx->memidx, MO_TEUL); 1583f85da308SRichard Henderson tcg_gen_movi_i32(cpu_sr_t, 1); 1584ccae24d4SRichard Henderson } 1585f85da308SRichard Henderson tcg_gen_br(done); 1586f85da308SRichard Henderson 1587f85da308SRichard Henderson gen_set_label(fail); 1588f85da308SRichard Henderson tcg_gen_movi_i32(cpu_sr_t, 0); 1589f85da308SRichard Henderson 1590f85da308SRichard Henderson gen_set_label(done); 1591f85da308SRichard Henderson tcg_gen_movi_i32(cpu_lock_addr, -1); 1592f85da308SRichard Henderson } 1593f85da308SRichard Henderson return; 1594fcf5ef2aSThomas Huth case 0x0063: 1595fcf5ef2aSThomas Huth /* MOVLI.L @Rm,R0 1596f85da308SRichard Henderson * 1 -> LDST 1597f85da308SRichard Henderson * (Rm) -> R0 1598f85da308SRichard Henderson * When interrupt/exception 1599f85da308SRichard Henderson * occurred 0 -> LDST 1600f85da308SRichard Henderson * 1601f85da308SRichard Henderson * In a parallel context, we must also save the loaded value 1602f85da308SRichard Henderson * for use with the cmpxchg that we'll use with movco.l. */ 1603ccae24d4SRichard Henderson CHECK_SH4A 16046f1c2af6SRichard Henderson if ((tb_cflags(ctx->base.tb) & CF_PARALLEL)) { 1605f85da308SRichard Henderson TCGv tmp = tcg_temp_new(); 1606f85da308SRichard Henderson tcg_gen_mov_i32(tmp, REG(B11_8)); 1607fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), REG(B11_8), ctx->memidx, MO_TESL); 1608f85da308SRichard Henderson tcg_gen_mov_i32(cpu_lock_value, REG(0)); 1609f85da308SRichard Henderson tcg_gen_mov_i32(cpu_lock_addr, tmp); 1610f85da308SRichard Henderson tcg_temp_free(tmp); 1611f85da308SRichard Henderson } else { 1612f85da308SRichard Henderson tcg_gen_qemu_ld_i32(REG(0), REG(B11_8), ctx->memidx, MO_TESL); 1613f85da308SRichard Henderson tcg_gen_movi_i32(cpu_lock_addr, 0); 1614f85da308SRichard Henderson } 1615fcf5ef2aSThomas Huth return; 1616fcf5ef2aSThomas Huth case 0x0093: /* ocbi @Rn */ 1617fcf5ef2aSThomas Huth { 1618fcf5ef2aSThomas Huth gen_helper_ocbi(cpu_env, REG(B11_8)); 1619fcf5ef2aSThomas Huth } 1620fcf5ef2aSThomas Huth return; 1621fcf5ef2aSThomas Huth case 0x00a3: /* ocbp @Rn */ 1622fcf5ef2aSThomas Huth case 0x00b3: /* ocbwb @Rn */ 1623fcf5ef2aSThomas Huth /* These instructions are supposed to do nothing in case of 1624fcf5ef2aSThomas Huth a cache miss. Given that we only partially emulate caches 1625fcf5ef2aSThomas Huth it is safe to simply ignore them. */ 1626fcf5ef2aSThomas Huth return; 1627fcf5ef2aSThomas Huth case 0x0083: /* pref @Rn */ 1628fcf5ef2aSThomas Huth return; 1629fcf5ef2aSThomas Huth case 0x00d3: /* prefi @Rn */ 1630ccae24d4SRichard Henderson CHECK_SH4A 1631fcf5ef2aSThomas Huth return; 1632fcf5ef2aSThomas Huth case 0x00e3: /* icbi @Rn */ 1633ccae24d4SRichard Henderson CHECK_SH4A 1634fcf5ef2aSThomas Huth return; 1635fcf5ef2aSThomas Huth case 0x00ab: /* synco */ 1636ccae24d4SRichard Henderson CHECK_SH4A 1637aa351317SAurelien Jarno tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 1638fcf5ef2aSThomas Huth return; 1639fcf5ef2aSThomas Huth case 0x4024: /* rotcl Rn */ 1640fcf5ef2aSThomas Huth { 1641fcf5ef2aSThomas Huth TCGv tmp = tcg_temp_new(); 1642fcf5ef2aSThomas Huth tcg_gen_mov_i32(tmp, cpu_sr_t); 1643fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_t, REG(B11_8), 31); 1644fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1); 1645fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), REG(B11_8), tmp); 1646fcf5ef2aSThomas Huth tcg_temp_free(tmp); 1647fcf5ef2aSThomas Huth } 1648fcf5ef2aSThomas Huth return; 1649fcf5ef2aSThomas Huth case 0x4025: /* rotcr Rn */ 1650fcf5ef2aSThomas Huth { 1651fcf5ef2aSThomas Huth TCGv tmp = tcg_temp_new(); 1652fcf5ef2aSThomas Huth tcg_gen_shli_i32(tmp, cpu_sr_t, 31); 1653fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 1); 1654fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1); 1655fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), REG(B11_8), tmp); 1656fcf5ef2aSThomas Huth tcg_temp_free(tmp); 1657fcf5ef2aSThomas Huth } 1658fcf5ef2aSThomas Huth return; 1659fcf5ef2aSThomas Huth case 0x4004: /* rotl Rn */ 1660fcf5ef2aSThomas Huth tcg_gen_rotli_i32(REG(B11_8), REG(B11_8), 1); 1661fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 0); 1662fcf5ef2aSThomas Huth return; 1663fcf5ef2aSThomas Huth case 0x4005: /* rotr Rn */ 1664fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 0); 1665fcf5ef2aSThomas Huth tcg_gen_rotri_i32(REG(B11_8), REG(B11_8), 1); 1666fcf5ef2aSThomas Huth return; 1667fcf5ef2aSThomas Huth case 0x4000: /* shll Rn */ 1668fcf5ef2aSThomas Huth case 0x4020: /* shal Rn */ 1669fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_t, REG(B11_8), 31); 1670fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1); 1671fcf5ef2aSThomas Huth return; 1672fcf5ef2aSThomas Huth case 0x4021: /* shar Rn */ 1673fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 1); 1674fcf5ef2aSThomas Huth tcg_gen_sari_i32(REG(B11_8), REG(B11_8), 1); 1675fcf5ef2aSThomas Huth return; 1676fcf5ef2aSThomas Huth case 0x4001: /* shlr Rn */ 1677fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 1); 1678fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1); 1679fcf5ef2aSThomas Huth return; 1680fcf5ef2aSThomas Huth case 0x4008: /* shll2 Rn */ 1681fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 2); 1682fcf5ef2aSThomas Huth return; 1683fcf5ef2aSThomas Huth case 0x4018: /* shll8 Rn */ 1684fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 8); 1685fcf5ef2aSThomas Huth return; 1686fcf5ef2aSThomas Huth case 0x4028: /* shll16 Rn */ 1687fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 16); 1688fcf5ef2aSThomas Huth return; 1689fcf5ef2aSThomas Huth case 0x4009: /* shlr2 Rn */ 1690fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 2); 1691fcf5ef2aSThomas Huth return; 1692fcf5ef2aSThomas Huth case 0x4019: /* shlr8 Rn */ 1693fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 8); 1694fcf5ef2aSThomas Huth return; 1695fcf5ef2aSThomas Huth case 0x4029: /* shlr16 Rn */ 1696fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 16); 1697fcf5ef2aSThomas Huth return; 1698fcf5ef2aSThomas Huth case 0x401b: /* tas.b @Rn */ 1699fcf5ef2aSThomas Huth { 1700cb32f179SAurelien Jarno TCGv val = tcg_const_i32(0x80); 1701cb32f179SAurelien Jarno tcg_gen_atomic_fetch_or_i32(val, REG(B11_8), val, 1702cb32f179SAurelien Jarno ctx->memidx, MO_UB); 1703fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, val, 0); 1704fcf5ef2aSThomas Huth tcg_temp_free(val); 1705fcf5ef2aSThomas Huth } 1706fcf5ef2aSThomas Huth return; 1707fcf5ef2aSThomas Huth case 0xf00d: /* fsts FPUL,FRn - FPSCR: Nothing */ 1708fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 17097c9f7038SRichard Henderson tcg_gen_mov_i32(FREG(B11_8), cpu_fpul); 1710fcf5ef2aSThomas Huth return; 1711fcf5ef2aSThomas Huth case 0xf01d: /* flds FRm,FPUL - FPSCR: Nothing */ 1712fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 17137c9f7038SRichard Henderson tcg_gen_mov_i32(cpu_fpul, FREG(B11_8)); 1714fcf5ef2aSThomas Huth return; 1715fcf5ef2aSThomas Huth case 0xf02d: /* float FPUL,FRn/DRn - FPSCR: R[PR,Enable.I]/W[Cause,Flag] */ 1716fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1717a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_PR) { 1718fcf5ef2aSThomas Huth TCGv_i64 fp; 171993dc9c89SRichard Henderson if (ctx->opcode & 0x0100) { 172093dc9c89SRichard Henderson goto do_illegal; 172193dc9c89SRichard Henderson } 1722fcf5ef2aSThomas Huth fp = tcg_temp_new_i64(); 1723fcf5ef2aSThomas Huth gen_helper_float_DT(fp, cpu_env, cpu_fpul); 17241e0b21d8SRichard Henderson gen_store_fpr64(ctx, fp, B11_8); 1725fcf5ef2aSThomas Huth tcg_temp_free_i64(fp); 1726fcf5ef2aSThomas Huth } 1727fcf5ef2aSThomas Huth else { 17287c9f7038SRichard Henderson gen_helper_float_FT(FREG(B11_8), cpu_env, cpu_fpul); 1729fcf5ef2aSThomas Huth } 1730fcf5ef2aSThomas Huth return; 1731fcf5ef2aSThomas Huth case 0xf03d: /* ftrc FRm/DRm,FPUL - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */ 1732fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1733a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_PR) { 1734fcf5ef2aSThomas Huth TCGv_i64 fp; 173593dc9c89SRichard Henderson if (ctx->opcode & 0x0100) { 173693dc9c89SRichard Henderson goto do_illegal; 173793dc9c89SRichard Henderson } 1738fcf5ef2aSThomas Huth fp = tcg_temp_new_i64(); 17391e0b21d8SRichard Henderson gen_load_fpr64(ctx, fp, B11_8); 1740fcf5ef2aSThomas Huth gen_helper_ftrc_DT(cpu_fpul, cpu_env, fp); 1741fcf5ef2aSThomas Huth tcg_temp_free_i64(fp); 1742fcf5ef2aSThomas Huth } 1743fcf5ef2aSThomas Huth else { 17447c9f7038SRichard Henderson gen_helper_ftrc_FT(cpu_fpul, cpu_env, FREG(B11_8)); 1745fcf5ef2aSThomas Huth } 1746fcf5ef2aSThomas Huth return; 1747fcf5ef2aSThomas Huth case 0xf04d: /* fneg FRn/DRn - FPSCR: Nothing */ 1748fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 17497c9f7038SRichard Henderson tcg_gen_xori_i32(FREG(B11_8), FREG(B11_8), 0x80000000); 1750fcf5ef2aSThomas Huth return; 175157f5c1b0SAurelien Jarno case 0xf05d: /* fabs FRn/DRn - FPCSR: Nothing */ 1752fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 17537c9f7038SRichard Henderson tcg_gen_andi_i32(FREG(B11_8), FREG(B11_8), 0x7fffffff); 1754fcf5ef2aSThomas Huth return; 1755fcf5ef2aSThomas Huth case 0xf06d: /* fsqrt FRn */ 1756fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1757a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_PR) { 175893dc9c89SRichard Henderson if (ctx->opcode & 0x0100) { 175993dc9c89SRichard Henderson goto do_illegal; 176093dc9c89SRichard Henderson } 1761fcf5ef2aSThomas Huth TCGv_i64 fp = tcg_temp_new_i64(); 17621e0b21d8SRichard Henderson gen_load_fpr64(ctx, fp, B11_8); 1763fcf5ef2aSThomas Huth gen_helper_fsqrt_DT(fp, cpu_env, fp); 17641e0b21d8SRichard Henderson gen_store_fpr64(ctx, fp, B11_8); 1765fcf5ef2aSThomas Huth tcg_temp_free_i64(fp); 1766fcf5ef2aSThomas Huth } else { 17677c9f7038SRichard Henderson gen_helper_fsqrt_FT(FREG(B11_8), cpu_env, FREG(B11_8)); 1768fcf5ef2aSThomas Huth } 1769fcf5ef2aSThomas Huth return; 1770fcf5ef2aSThomas Huth case 0xf07d: /* fsrra FRn */ 1771fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 177211b7aa23SRichard Henderson CHECK_FPSCR_PR_0 177311b7aa23SRichard Henderson gen_helper_fsrra_FT(FREG(B11_8), cpu_env, FREG(B11_8)); 1774fcf5ef2aSThomas Huth break; 1775fcf5ef2aSThomas Huth case 0xf08d: /* fldi0 FRn - FPSCR: R[PR] */ 1776fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 17777e9f7ca8SRichard Henderson CHECK_FPSCR_PR_0 17787c9f7038SRichard Henderson tcg_gen_movi_i32(FREG(B11_8), 0); 1779fcf5ef2aSThomas Huth return; 1780fcf5ef2aSThomas Huth case 0xf09d: /* fldi1 FRn - FPSCR: R[PR] */ 1781fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 17827e9f7ca8SRichard Henderson CHECK_FPSCR_PR_0 17837c9f7038SRichard Henderson tcg_gen_movi_i32(FREG(B11_8), 0x3f800000); 1784fcf5ef2aSThomas Huth return; 1785fcf5ef2aSThomas Huth case 0xf0ad: /* fcnvsd FPUL,DRn */ 1786fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1787fcf5ef2aSThomas Huth { 1788fcf5ef2aSThomas Huth TCGv_i64 fp = tcg_temp_new_i64(); 1789fcf5ef2aSThomas Huth gen_helper_fcnvsd_FT_DT(fp, cpu_env, cpu_fpul); 17901e0b21d8SRichard Henderson gen_store_fpr64(ctx, fp, B11_8); 1791fcf5ef2aSThomas Huth tcg_temp_free_i64(fp); 1792fcf5ef2aSThomas Huth } 1793fcf5ef2aSThomas Huth return; 1794fcf5ef2aSThomas Huth case 0xf0bd: /* fcnvds DRn,FPUL */ 1795fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1796fcf5ef2aSThomas Huth { 1797fcf5ef2aSThomas Huth TCGv_i64 fp = tcg_temp_new_i64(); 17981e0b21d8SRichard Henderson gen_load_fpr64(ctx, fp, B11_8); 1799fcf5ef2aSThomas Huth gen_helper_fcnvds_DT_FT(cpu_fpul, cpu_env, fp); 1800fcf5ef2aSThomas Huth tcg_temp_free_i64(fp); 1801fcf5ef2aSThomas Huth } 1802fcf5ef2aSThomas Huth return; 1803fcf5ef2aSThomas Huth case 0xf0ed: /* fipr FVm,FVn */ 1804fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 18057e9f7ca8SRichard Henderson CHECK_FPSCR_PR_1 18067e9f7ca8SRichard Henderson { 18077e9f7ca8SRichard Henderson TCGv m = tcg_const_i32((ctx->opcode >> 8) & 3); 18087e9f7ca8SRichard Henderson TCGv n = tcg_const_i32((ctx->opcode >> 10) & 3); 1809fcf5ef2aSThomas Huth gen_helper_fipr(cpu_env, m, n); 1810fcf5ef2aSThomas Huth tcg_temp_free(m); 1811fcf5ef2aSThomas Huth tcg_temp_free(n); 1812fcf5ef2aSThomas Huth return; 1813fcf5ef2aSThomas Huth } 1814fcf5ef2aSThomas Huth break; 1815fcf5ef2aSThomas Huth case 0xf0fd: /* ftrv XMTRX,FVn */ 1816fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 18177e9f7ca8SRichard Henderson CHECK_FPSCR_PR_1 18187e9f7ca8SRichard Henderson { 18197e9f7ca8SRichard Henderson if ((ctx->opcode & 0x0300) != 0x0100) { 18207e9f7ca8SRichard Henderson goto do_illegal; 18217e9f7ca8SRichard Henderson } 18227e9f7ca8SRichard Henderson TCGv n = tcg_const_i32((ctx->opcode >> 10) & 3); 1823fcf5ef2aSThomas Huth gen_helper_ftrv(cpu_env, n); 1824fcf5ef2aSThomas Huth tcg_temp_free(n); 1825fcf5ef2aSThomas Huth return; 1826fcf5ef2aSThomas Huth } 1827fcf5ef2aSThomas Huth break; 1828fcf5ef2aSThomas Huth } 1829fcf5ef2aSThomas Huth #if 0 1830fcf5ef2aSThomas Huth fprintf(stderr, "unknown instruction 0x%04x at pc 0x%08x\n", 18316f1c2af6SRichard Henderson ctx->opcode, ctx->base.pc_next); 1832fcf5ef2aSThomas Huth fflush(stderr); 1833fcf5ef2aSThomas Huth #endif 18346b98213dSRichard Henderson do_illegal: 18359a562ae7SAurelien Jarno if (ctx->envflags & DELAY_SLOT_MASK) { 1836dec16c6eSRichard Henderson do_illegal_slot: 1837dec16c6eSRichard Henderson gen_save_cpu_state(ctx, true); 1838fcf5ef2aSThomas Huth gen_helper_raise_slot_illegal_instruction(cpu_env); 1839fcf5ef2aSThomas Huth } else { 1840dec16c6eSRichard Henderson gen_save_cpu_state(ctx, true); 1841fcf5ef2aSThomas Huth gen_helper_raise_illegal_instruction(cpu_env); 1842fcf5ef2aSThomas Huth } 18436f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NORETURN; 1844dec4f042SRichard Henderson return; 1845dec4f042SRichard Henderson 1846dec4f042SRichard Henderson do_fpu_disabled: 1847dec4f042SRichard Henderson gen_save_cpu_state(ctx, true); 1848dec4f042SRichard Henderson if (ctx->envflags & DELAY_SLOT_MASK) { 1849dec4f042SRichard Henderson gen_helper_raise_slot_fpu_disable(cpu_env); 1850dec4f042SRichard Henderson } else { 1851dec4f042SRichard Henderson gen_helper_raise_fpu_disable(cpu_env); 1852dec4f042SRichard Henderson } 18536f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NORETURN; 1854dec4f042SRichard Henderson return; 1855fcf5ef2aSThomas Huth } 1856fcf5ef2aSThomas Huth 1857fcf5ef2aSThomas Huth static void decode_opc(DisasContext * ctx) 1858fcf5ef2aSThomas Huth { 1859a6215749SAurelien Jarno uint32_t old_flags = ctx->envflags; 1860fcf5ef2aSThomas Huth 1861fcf5ef2aSThomas Huth _decode_opc(ctx); 1862fcf5ef2aSThomas Huth 18639a562ae7SAurelien Jarno if (old_flags & DELAY_SLOT_MASK) { 1864fcf5ef2aSThomas Huth /* go out of the delay slot */ 18659a562ae7SAurelien Jarno ctx->envflags &= ~DELAY_SLOT_MASK; 18664bfa602bSRichard Henderson 18674bfa602bSRichard Henderson /* When in an exclusive region, we must continue to the end 18684bfa602bSRichard Henderson for conditional branches. */ 18694bfa602bSRichard Henderson if (ctx->tbflags & GUSA_EXCLUSIVE 18704bfa602bSRichard Henderson && old_flags & DELAY_SLOT_CONDITIONAL) { 18714bfa602bSRichard Henderson gen_delayed_conditional_jump(ctx); 18724bfa602bSRichard Henderson return; 18734bfa602bSRichard Henderson } 18744bfa602bSRichard Henderson /* Otherwise this is probably an invalid gUSA region. 18754bfa602bSRichard Henderson Drop the GUSA bits so the next TB doesn't see them. */ 18764bfa602bSRichard Henderson ctx->envflags &= ~GUSA_MASK; 18774bfa602bSRichard Henderson 1878ac9707eaSAurelien Jarno tcg_gen_movi_i32(cpu_flags, ctx->envflags); 1879fcf5ef2aSThomas Huth if (old_flags & DELAY_SLOT_CONDITIONAL) { 1880fcf5ef2aSThomas Huth gen_delayed_conditional_jump(ctx); 1881be53081aSAurelien Jarno } else { 1882fcf5ef2aSThomas Huth gen_jump(ctx); 1883fcf5ef2aSThomas Huth } 18844bfa602bSRichard Henderson } 18854bfa602bSRichard Henderson } 1886fcf5ef2aSThomas Huth 18874bfa602bSRichard Henderson #ifdef CONFIG_USER_ONLY 18884bfa602bSRichard Henderson /* For uniprocessors, SH4 uses optimistic restartable atomic sequences. 18894bfa602bSRichard Henderson Upon an interrupt, a real kernel would simply notice magic values in 18904bfa602bSRichard Henderson the registers and reset the PC to the start of the sequence. 18914bfa602bSRichard Henderson 18924bfa602bSRichard Henderson For QEMU, we cannot do this in quite the same way. Instead, we notice 18934bfa602bSRichard Henderson the normal start of such a sequence (mov #-x,r15). While we can handle 18944bfa602bSRichard Henderson any sequence via cpu_exec_step_atomic, we can recognize the "normal" 18954bfa602bSRichard Henderson sequences and transform them into atomic operations as seen by the host. 18964bfa602bSRichard Henderson */ 1897be0e3d7aSRichard Henderson static void decode_gusa(DisasContext *ctx, CPUSH4State *env) 18984bfa602bSRichard Henderson { 1899d6a6cffdSRichard Henderson uint16_t insns[5]; 1900d6a6cffdSRichard Henderson int ld_adr, ld_dst, ld_mop; 1901d6a6cffdSRichard Henderson int op_dst, op_src, op_opc; 1902d6a6cffdSRichard Henderson int mv_src, mt_dst, st_src, st_mop; 1903d6a6cffdSRichard Henderson TCGv op_arg; 19046f1c2af6SRichard Henderson uint32_t pc = ctx->base.pc_next; 19056f1c2af6SRichard Henderson uint32_t pc_end = ctx->base.tb->cs_base; 19064bfa602bSRichard Henderson int max_insns = (pc_end - pc) / 2; 1907d6a6cffdSRichard Henderson int i; 19084bfa602bSRichard Henderson 1909d6a6cffdSRichard Henderson /* The state machine below will consume only a few insns. 1910d6a6cffdSRichard Henderson If there are more than that in a region, fail now. */ 1911d6a6cffdSRichard Henderson if (max_insns > ARRAY_SIZE(insns)) { 1912d6a6cffdSRichard Henderson goto fail; 1913d6a6cffdSRichard Henderson } 1914d6a6cffdSRichard Henderson 1915d6a6cffdSRichard Henderson /* Read all of the insns for the region. */ 1916d6a6cffdSRichard Henderson for (i = 0; i < max_insns; ++i) { 1917da94123fSEmilio G. Cota insns[i] = translator_lduw(env, pc + i * 2); 1918d6a6cffdSRichard Henderson } 1919d6a6cffdSRichard Henderson 1920d6a6cffdSRichard Henderson ld_adr = ld_dst = ld_mop = -1; 1921d6a6cffdSRichard Henderson mv_src = -1; 1922d6a6cffdSRichard Henderson op_dst = op_src = op_opc = -1; 1923d6a6cffdSRichard Henderson mt_dst = -1; 1924d6a6cffdSRichard Henderson st_src = st_mop = -1; 1925f764718dSRichard Henderson op_arg = NULL; 1926d6a6cffdSRichard Henderson i = 0; 1927d6a6cffdSRichard Henderson 1928d6a6cffdSRichard Henderson #define NEXT_INSN \ 1929d6a6cffdSRichard Henderson do { if (i >= max_insns) goto fail; ctx->opcode = insns[i++]; } while (0) 1930d6a6cffdSRichard Henderson 1931d6a6cffdSRichard Henderson /* 1932d6a6cffdSRichard Henderson * Expect a load to begin the region. 1933d6a6cffdSRichard Henderson */ 1934d6a6cffdSRichard Henderson NEXT_INSN; 1935d6a6cffdSRichard Henderson switch (ctx->opcode & 0xf00f) { 1936d6a6cffdSRichard Henderson case 0x6000: /* mov.b @Rm,Rn */ 1937d6a6cffdSRichard Henderson ld_mop = MO_SB; 1938d6a6cffdSRichard Henderson break; 1939d6a6cffdSRichard Henderson case 0x6001: /* mov.w @Rm,Rn */ 1940d6a6cffdSRichard Henderson ld_mop = MO_TESW; 1941d6a6cffdSRichard Henderson break; 1942d6a6cffdSRichard Henderson case 0x6002: /* mov.l @Rm,Rn */ 1943d6a6cffdSRichard Henderson ld_mop = MO_TESL; 1944d6a6cffdSRichard Henderson break; 1945d6a6cffdSRichard Henderson default: 1946d6a6cffdSRichard Henderson goto fail; 1947d6a6cffdSRichard Henderson } 1948d6a6cffdSRichard Henderson ld_adr = B7_4; 1949d6a6cffdSRichard Henderson ld_dst = B11_8; 1950d6a6cffdSRichard Henderson if (ld_adr == ld_dst) { 1951d6a6cffdSRichard Henderson goto fail; 1952d6a6cffdSRichard Henderson } 1953d6a6cffdSRichard Henderson /* Unless we see a mov, any two-operand operation must use ld_dst. */ 1954d6a6cffdSRichard Henderson op_dst = ld_dst; 1955d6a6cffdSRichard Henderson 1956d6a6cffdSRichard Henderson /* 1957d6a6cffdSRichard Henderson * Expect an optional register move. 1958d6a6cffdSRichard Henderson */ 1959d6a6cffdSRichard Henderson NEXT_INSN; 1960d6a6cffdSRichard Henderson switch (ctx->opcode & 0xf00f) { 1961d6a6cffdSRichard Henderson case 0x6003: /* mov Rm,Rn */ 196202b8e735SPhilippe Mathieu-Daudé /* 1963*23b5d9faSLichang Zhao * Here we want to recognize ld_dst being saved for later consumption, 196402b8e735SPhilippe Mathieu-Daudé * or for another input register being copied so that ld_dst need not 196502b8e735SPhilippe Mathieu-Daudé * be clobbered during the operation. 196602b8e735SPhilippe Mathieu-Daudé */ 1967d6a6cffdSRichard Henderson op_dst = B11_8; 1968d6a6cffdSRichard Henderson mv_src = B7_4; 1969d6a6cffdSRichard Henderson if (op_dst == ld_dst) { 1970d6a6cffdSRichard Henderson /* Overwriting the load output. */ 1971d6a6cffdSRichard Henderson goto fail; 1972d6a6cffdSRichard Henderson } 1973d6a6cffdSRichard Henderson if (mv_src != ld_dst) { 1974d6a6cffdSRichard Henderson /* Copying a new input; constrain op_src to match the load. */ 1975d6a6cffdSRichard Henderson op_src = ld_dst; 1976d6a6cffdSRichard Henderson } 1977d6a6cffdSRichard Henderson break; 1978d6a6cffdSRichard Henderson 1979d6a6cffdSRichard Henderson default: 1980d6a6cffdSRichard Henderson /* Put back and re-examine as operation. */ 1981d6a6cffdSRichard Henderson --i; 1982d6a6cffdSRichard Henderson } 1983d6a6cffdSRichard Henderson 1984d6a6cffdSRichard Henderson /* 1985d6a6cffdSRichard Henderson * Expect the operation. 1986d6a6cffdSRichard Henderson */ 1987d6a6cffdSRichard Henderson NEXT_INSN; 1988d6a6cffdSRichard Henderson switch (ctx->opcode & 0xf00f) { 1989d6a6cffdSRichard Henderson case 0x300c: /* add Rm,Rn */ 1990d6a6cffdSRichard Henderson op_opc = INDEX_op_add_i32; 1991d6a6cffdSRichard Henderson goto do_reg_op; 1992d6a6cffdSRichard Henderson case 0x2009: /* and Rm,Rn */ 1993d6a6cffdSRichard Henderson op_opc = INDEX_op_and_i32; 1994d6a6cffdSRichard Henderson goto do_reg_op; 1995d6a6cffdSRichard Henderson case 0x200a: /* xor Rm,Rn */ 1996d6a6cffdSRichard Henderson op_opc = INDEX_op_xor_i32; 1997d6a6cffdSRichard Henderson goto do_reg_op; 1998d6a6cffdSRichard Henderson case 0x200b: /* or Rm,Rn */ 1999d6a6cffdSRichard Henderson op_opc = INDEX_op_or_i32; 2000d6a6cffdSRichard Henderson do_reg_op: 2001d6a6cffdSRichard Henderson /* The operation register should be as expected, and the 2002d6a6cffdSRichard Henderson other input cannot depend on the load. */ 2003d6a6cffdSRichard Henderson if (op_dst != B11_8) { 2004d6a6cffdSRichard Henderson goto fail; 2005d6a6cffdSRichard Henderson } 2006d6a6cffdSRichard Henderson if (op_src < 0) { 2007d6a6cffdSRichard Henderson /* Unconstrainted input. */ 2008d6a6cffdSRichard Henderson op_src = B7_4; 2009d6a6cffdSRichard Henderson } else if (op_src == B7_4) { 2010d6a6cffdSRichard Henderson /* Constrained input matched load. All operations are 2011d6a6cffdSRichard Henderson commutative; "swap" them by "moving" the load output 2012d6a6cffdSRichard Henderson to the (implicit) first argument and the move source 2013d6a6cffdSRichard Henderson to the (explicit) second argument. */ 2014d6a6cffdSRichard Henderson op_src = mv_src; 2015d6a6cffdSRichard Henderson } else { 2016d6a6cffdSRichard Henderson goto fail; 2017d6a6cffdSRichard Henderson } 2018d6a6cffdSRichard Henderson op_arg = REG(op_src); 2019d6a6cffdSRichard Henderson break; 2020d6a6cffdSRichard Henderson 2021d6a6cffdSRichard Henderson case 0x6007: /* not Rm,Rn */ 2022d6a6cffdSRichard Henderson if (ld_dst != B7_4 || mv_src >= 0) { 2023d6a6cffdSRichard Henderson goto fail; 2024d6a6cffdSRichard Henderson } 2025d6a6cffdSRichard Henderson op_dst = B11_8; 2026d6a6cffdSRichard Henderson op_opc = INDEX_op_xor_i32; 2027d6a6cffdSRichard Henderson op_arg = tcg_const_i32(-1); 2028d6a6cffdSRichard Henderson break; 2029d6a6cffdSRichard Henderson 2030d6a6cffdSRichard Henderson case 0x7000 ... 0x700f: /* add #imm,Rn */ 2031d6a6cffdSRichard Henderson if (op_dst != B11_8 || mv_src >= 0) { 2032d6a6cffdSRichard Henderson goto fail; 2033d6a6cffdSRichard Henderson } 2034d6a6cffdSRichard Henderson op_opc = INDEX_op_add_i32; 2035d6a6cffdSRichard Henderson op_arg = tcg_const_i32(B7_0s); 2036d6a6cffdSRichard Henderson break; 2037d6a6cffdSRichard Henderson 2038d6a6cffdSRichard Henderson case 0x3000: /* cmp/eq Rm,Rn */ 2039d6a6cffdSRichard Henderson /* Looking for the middle of a compare-and-swap sequence, 2040d6a6cffdSRichard Henderson beginning with the compare. Operands can be either order, 2041d6a6cffdSRichard Henderson but with only one overlapping the load. */ 2042d6a6cffdSRichard Henderson if ((ld_dst == B11_8) + (ld_dst == B7_4) != 1 || mv_src >= 0) { 2043d6a6cffdSRichard Henderson goto fail; 2044d6a6cffdSRichard Henderson } 2045d6a6cffdSRichard Henderson op_opc = INDEX_op_setcond_i32; /* placeholder */ 2046d6a6cffdSRichard Henderson op_src = (ld_dst == B11_8 ? B7_4 : B11_8); 2047d6a6cffdSRichard Henderson op_arg = REG(op_src); 2048d6a6cffdSRichard Henderson 2049d6a6cffdSRichard Henderson NEXT_INSN; 2050d6a6cffdSRichard Henderson switch (ctx->opcode & 0xff00) { 2051d6a6cffdSRichard Henderson case 0x8b00: /* bf label */ 2052d6a6cffdSRichard Henderson case 0x8f00: /* bf/s label */ 2053d6a6cffdSRichard Henderson if (pc + (i + 1 + B7_0s) * 2 != pc_end) { 2054d6a6cffdSRichard Henderson goto fail; 2055d6a6cffdSRichard Henderson } 2056d6a6cffdSRichard Henderson if ((ctx->opcode & 0xff00) == 0x8b00) { /* bf label */ 2057d6a6cffdSRichard Henderson break; 2058d6a6cffdSRichard Henderson } 2059d6a6cffdSRichard Henderson /* We're looking to unconditionally modify Rn with the 2060d6a6cffdSRichard Henderson result of the comparison, within the delay slot of 2061d6a6cffdSRichard Henderson the branch. This is used by older gcc. */ 2062d6a6cffdSRichard Henderson NEXT_INSN; 2063d6a6cffdSRichard Henderson if ((ctx->opcode & 0xf0ff) == 0x0029) { /* movt Rn */ 2064d6a6cffdSRichard Henderson mt_dst = B11_8; 2065d6a6cffdSRichard Henderson } else { 2066d6a6cffdSRichard Henderson goto fail; 2067d6a6cffdSRichard Henderson } 2068d6a6cffdSRichard Henderson break; 2069d6a6cffdSRichard Henderson 2070d6a6cffdSRichard Henderson default: 2071d6a6cffdSRichard Henderson goto fail; 2072d6a6cffdSRichard Henderson } 2073d6a6cffdSRichard Henderson break; 2074d6a6cffdSRichard Henderson 2075d6a6cffdSRichard Henderson case 0x2008: /* tst Rm,Rn */ 2076d6a6cffdSRichard Henderson /* Looking for a compare-and-swap against zero. */ 2077d6a6cffdSRichard Henderson if (ld_dst != B11_8 || ld_dst != B7_4 || mv_src >= 0) { 2078d6a6cffdSRichard Henderson goto fail; 2079d6a6cffdSRichard Henderson } 2080d6a6cffdSRichard Henderson op_opc = INDEX_op_setcond_i32; 2081d6a6cffdSRichard Henderson op_arg = tcg_const_i32(0); 2082d6a6cffdSRichard Henderson 2083d6a6cffdSRichard Henderson NEXT_INSN; 2084d6a6cffdSRichard Henderson if ((ctx->opcode & 0xff00) != 0x8900 /* bt label */ 2085d6a6cffdSRichard Henderson || pc + (i + 1 + B7_0s) * 2 != pc_end) { 2086d6a6cffdSRichard Henderson goto fail; 2087d6a6cffdSRichard Henderson } 2088d6a6cffdSRichard Henderson break; 2089d6a6cffdSRichard Henderson 2090d6a6cffdSRichard Henderson default: 2091d6a6cffdSRichard Henderson /* Put back and re-examine as store. */ 2092d6a6cffdSRichard Henderson --i; 2093d6a6cffdSRichard Henderson } 2094d6a6cffdSRichard Henderson 2095d6a6cffdSRichard Henderson /* 2096d6a6cffdSRichard Henderson * Expect the store. 2097d6a6cffdSRichard Henderson */ 2098d6a6cffdSRichard Henderson /* The store must be the last insn. */ 2099d6a6cffdSRichard Henderson if (i != max_insns - 1) { 2100d6a6cffdSRichard Henderson goto fail; 2101d6a6cffdSRichard Henderson } 2102d6a6cffdSRichard Henderson NEXT_INSN; 2103d6a6cffdSRichard Henderson switch (ctx->opcode & 0xf00f) { 2104d6a6cffdSRichard Henderson case 0x2000: /* mov.b Rm,@Rn */ 2105d6a6cffdSRichard Henderson st_mop = MO_UB; 2106d6a6cffdSRichard Henderson break; 2107d6a6cffdSRichard Henderson case 0x2001: /* mov.w Rm,@Rn */ 2108d6a6cffdSRichard Henderson st_mop = MO_UW; 2109d6a6cffdSRichard Henderson break; 2110d6a6cffdSRichard Henderson case 0x2002: /* mov.l Rm,@Rn */ 2111d6a6cffdSRichard Henderson st_mop = MO_UL; 2112d6a6cffdSRichard Henderson break; 2113d6a6cffdSRichard Henderson default: 2114d6a6cffdSRichard Henderson goto fail; 2115d6a6cffdSRichard Henderson } 2116d6a6cffdSRichard Henderson /* The store must match the load. */ 2117d6a6cffdSRichard Henderson if (ld_adr != B11_8 || st_mop != (ld_mop & MO_SIZE)) { 2118d6a6cffdSRichard Henderson goto fail; 2119d6a6cffdSRichard Henderson } 2120d6a6cffdSRichard Henderson st_src = B7_4; 2121d6a6cffdSRichard Henderson 2122d6a6cffdSRichard Henderson #undef NEXT_INSN 2123d6a6cffdSRichard Henderson 2124d6a6cffdSRichard Henderson /* 2125d6a6cffdSRichard Henderson * Emit the operation. 2126d6a6cffdSRichard Henderson */ 2127d6a6cffdSRichard Henderson switch (op_opc) { 2128d6a6cffdSRichard Henderson case -1: 2129d6a6cffdSRichard Henderson /* No operation found. Look for exchange pattern. */ 2130d6a6cffdSRichard Henderson if (st_src == ld_dst || mv_src >= 0) { 2131d6a6cffdSRichard Henderson goto fail; 2132d6a6cffdSRichard Henderson } 2133d6a6cffdSRichard Henderson tcg_gen_atomic_xchg_i32(REG(ld_dst), REG(ld_adr), REG(st_src), 2134d6a6cffdSRichard Henderson ctx->memidx, ld_mop); 2135d6a6cffdSRichard Henderson break; 2136d6a6cffdSRichard Henderson 2137d6a6cffdSRichard Henderson case INDEX_op_add_i32: 2138d6a6cffdSRichard Henderson if (op_dst != st_src) { 2139d6a6cffdSRichard Henderson goto fail; 2140d6a6cffdSRichard Henderson } 2141d6a6cffdSRichard Henderson if (op_dst == ld_dst && st_mop == MO_UL) { 2142d6a6cffdSRichard Henderson tcg_gen_atomic_add_fetch_i32(REG(ld_dst), REG(ld_adr), 2143d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2144d6a6cffdSRichard Henderson } else { 2145d6a6cffdSRichard Henderson tcg_gen_atomic_fetch_add_i32(REG(ld_dst), REG(ld_adr), 2146d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2147d6a6cffdSRichard Henderson if (op_dst != ld_dst) { 2148d6a6cffdSRichard Henderson /* Note that mop sizes < 4 cannot use add_fetch 2149d6a6cffdSRichard Henderson because it won't carry into the higher bits. */ 2150d6a6cffdSRichard Henderson tcg_gen_add_i32(REG(op_dst), REG(ld_dst), op_arg); 2151d6a6cffdSRichard Henderson } 2152d6a6cffdSRichard Henderson } 2153d6a6cffdSRichard Henderson break; 2154d6a6cffdSRichard Henderson 2155d6a6cffdSRichard Henderson case INDEX_op_and_i32: 2156d6a6cffdSRichard Henderson if (op_dst != st_src) { 2157d6a6cffdSRichard Henderson goto fail; 2158d6a6cffdSRichard Henderson } 2159d6a6cffdSRichard Henderson if (op_dst == ld_dst) { 2160d6a6cffdSRichard Henderson tcg_gen_atomic_and_fetch_i32(REG(ld_dst), REG(ld_adr), 2161d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2162d6a6cffdSRichard Henderson } else { 2163d6a6cffdSRichard Henderson tcg_gen_atomic_fetch_and_i32(REG(ld_dst), REG(ld_adr), 2164d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2165d6a6cffdSRichard Henderson tcg_gen_and_i32(REG(op_dst), REG(ld_dst), op_arg); 2166d6a6cffdSRichard Henderson } 2167d6a6cffdSRichard Henderson break; 2168d6a6cffdSRichard Henderson 2169d6a6cffdSRichard Henderson case INDEX_op_or_i32: 2170d6a6cffdSRichard Henderson if (op_dst != st_src) { 2171d6a6cffdSRichard Henderson goto fail; 2172d6a6cffdSRichard Henderson } 2173d6a6cffdSRichard Henderson if (op_dst == ld_dst) { 2174d6a6cffdSRichard Henderson tcg_gen_atomic_or_fetch_i32(REG(ld_dst), REG(ld_adr), 2175d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2176d6a6cffdSRichard Henderson } else { 2177d6a6cffdSRichard Henderson tcg_gen_atomic_fetch_or_i32(REG(ld_dst), REG(ld_adr), 2178d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2179d6a6cffdSRichard Henderson tcg_gen_or_i32(REG(op_dst), REG(ld_dst), op_arg); 2180d6a6cffdSRichard Henderson } 2181d6a6cffdSRichard Henderson break; 2182d6a6cffdSRichard Henderson 2183d6a6cffdSRichard Henderson case INDEX_op_xor_i32: 2184d6a6cffdSRichard Henderson if (op_dst != st_src) { 2185d6a6cffdSRichard Henderson goto fail; 2186d6a6cffdSRichard Henderson } 2187d6a6cffdSRichard Henderson if (op_dst == ld_dst) { 2188d6a6cffdSRichard Henderson tcg_gen_atomic_xor_fetch_i32(REG(ld_dst), REG(ld_adr), 2189d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2190d6a6cffdSRichard Henderson } else { 2191d6a6cffdSRichard Henderson tcg_gen_atomic_fetch_xor_i32(REG(ld_dst), REG(ld_adr), 2192d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2193d6a6cffdSRichard Henderson tcg_gen_xor_i32(REG(op_dst), REG(ld_dst), op_arg); 2194d6a6cffdSRichard Henderson } 2195d6a6cffdSRichard Henderson break; 2196d6a6cffdSRichard Henderson 2197d6a6cffdSRichard Henderson case INDEX_op_setcond_i32: 2198d6a6cffdSRichard Henderson if (st_src == ld_dst) { 2199d6a6cffdSRichard Henderson goto fail; 2200d6a6cffdSRichard Henderson } 2201d6a6cffdSRichard Henderson tcg_gen_atomic_cmpxchg_i32(REG(ld_dst), REG(ld_adr), op_arg, 2202d6a6cffdSRichard Henderson REG(st_src), ctx->memidx, ld_mop); 2203d6a6cffdSRichard Henderson tcg_gen_setcond_i32(TCG_COND_EQ, cpu_sr_t, REG(ld_dst), op_arg); 2204d6a6cffdSRichard Henderson if (mt_dst >= 0) { 2205d6a6cffdSRichard Henderson tcg_gen_mov_i32(REG(mt_dst), cpu_sr_t); 2206d6a6cffdSRichard Henderson } 2207d6a6cffdSRichard Henderson break; 2208d6a6cffdSRichard Henderson 2209d6a6cffdSRichard Henderson default: 2210d6a6cffdSRichard Henderson g_assert_not_reached(); 2211d6a6cffdSRichard Henderson } 2212d6a6cffdSRichard Henderson 2213d6a6cffdSRichard Henderson /* If op_src is not a valid register, then op_arg was a constant. */ 2214f764718dSRichard Henderson if (op_src < 0 && op_arg) { 2215d6a6cffdSRichard Henderson tcg_temp_free_i32(op_arg); 2216d6a6cffdSRichard Henderson } 2217d6a6cffdSRichard Henderson 2218d6a6cffdSRichard Henderson /* The entire region has been translated. */ 2219d6a6cffdSRichard Henderson ctx->envflags &= ~GUSA_MASK; 22206f1c2af6SRichard Henderson ctx->base.pc_next = pc_end; 2221be0e3d7aSRichard Henderson ctx->base.num_insns += max_insns - 1; 2222be0e3d7aSRichard Henderson return; 2223d6a6cffdSRichard Henderson 2224d6a6cffdSRichard Henderson fail: 22254bfa602bSRichard Henderson qemu_log_mask(LOG_UNIMP, "Unrecognized gUSA sequence %08x-%08x\n", 22264bfa602bSRichard Henderson pc, pc_end); 22274bfa602bSRichard Henderson 22284bfa602bSRichard Henderson /* Restart with the EXCLUSIVE bit set, within a TB run via 22294bfa602bSRichard Henderson cpu_exec_step_atomic holding the exclusive lock. */ 22304bfa602bSRichard Henderson ctx->envflags |= GUSA_EXCLUSIVE; 22314bfa602bSRichard Henderson gen_save_cpu_state(ctx, false); 22324bfa602bSRichard Henderson gen_helper_exclusive(cpu_env); 22336f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NORETURN; 22344bfa602bSRichard Henderson 22354bfa602bSRichard Henderson /* We're not executing an instruction, but we must report one for the 22364bfa602bSRichard Henderson purposes of accounting within the TB. We might as well report the 22376f1c2af6SRichard Henderson entire region consumed via ctx->base.pc_next so that it's immediately 22386f1c2af6SRichard Henderson available in the disassembly dump. */ 22396f1c2af6SRichard Henderson ctx->base.pc_next = pc_end; 2240be0e3d7aSRichard Henderson ctx->base.num_insns += max_insns - 1; 22414bfa602bSRichard Henderson } 22424bfa602bSRichard Henderson #endif 22434bfa602bSRichard Henderson 2244fd1b3d38SEmilio G. Cota static void sh4_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs) 2245fcf5ef2aSThomas Huth { 2246fd1b3d38SEmilio G. Cota DisasContext *ctx = container_of(dcbase, DisasContext, base); 22479c489ea6SLluís Vilanova CPUSH4State *env = cs->env_ptr; 2248be0e3d7aSRichard Henderson uint32_t tbflags; 2249fd1b3d38SEmilio G. Cota int bound; 2250fcf5ef2aSThomas Huth 2251be0e3d7aSRichard Henderson ctx->tbflags = tbflags = ctx->base.tb->flags; 2252be0e3d7aSRichard Henderson ctx->envflags = tbflags & TB_FLAG_ENVFLAGS_MASK; 2253be0e3d7aSRichard Henderson ctx->memidx = (tbflags & (1u << SR_MD)) == 0 ? 1 : 0; 2254fcf5ef2aSThomas Huth /* We don't know if the delayed pc came from a dynamic or static branch, 2255fcf5ef2aSThomas Huth so assume it is a dynamic branch. */ 2256fd1b3d38SEmilio G. Cota ctx->delayed_pc = -1; /* use delayed pc from env pointer */ 2257fd1b3d38SEmilio G. Cota ctx->features = env->features; 2258be0e3d7aSRichard Henderson ctx->has_movcal = (tbflags & TB_FLAG_PENDING_MOVCA); 2259be0e3d7aSRichard Henderson ctx->gbank = ((tbflags & (1 << SR_MD)) && 2260be0e3d7aSRichard Henderson (tbflags & (1 << SR_RB))) * 0x10; 2261be0e3d7aSRichard Henderson ctx->fbank = tbflags & FPSCR_FR ? 0x10 : 0; 2262be0e3d7aSRichard Henderson 2263be0e3d7aSRichard Henderson if (tbflags & GUSA_MASK) { 2264be0e3d7aSRichard Henderson uint32_t pc = ctx->base.pc_next; 2265be0e3d7aSRichard Henderson uint32_t pc_end = ctx->base.tb->cs_base; 2266be0e3d7aSRichard Henderson int backup = sextract32(ctx->tbflags, GUSA_SHIFT, 8); 2267be0e3d7aSRichard Henderson int max_insns = (pc_end - pc) / 2; 2268be0e3d7aSRichard Henderson 2269be0e3d7aSRichard Henderson if (pc != pc_end + backup || max_insns < 2) { 2270be0e3d7aSRichard Henderson /* This is a malformed gUSA region. Don't do anything special, 2271be0e3d7aSRichard Henderson since the interpreter is likely to get confused. */ 2272be0e3d7aSRichard Henderson ctx->envflags &= ~GUSA_MASK; 2273be0e3d7aSRichard Henderson } else if (tbflags & GUSA_EXCLUSIVE) { 2274be0e3d7aSRichard Henderson /* Regardless of single-stepping or the end of the page, 2275be0e3d7aSRichard Henderson we must complete execution of the gUSA region while 2276be0e3d7aSRichard Henderson holding the exclusive lock. */ 2277be0e3d7aSRichard Henderson ctx->base.max_insns = max_insns; 2278be0e3d7aSRichard Henderson return; 2279be0e3d7aSRichard Henderson } 2280be0e3d7aSRichard Henderson } 22814448a836SRichard Henderson 22824448a836SRichard Henderson /* Since the ISA is fixed-width, we can bound by the number 22834448a836SRichard Henderson of instructions remaining on the page. */ 2284fd1b3d38SEmilio G. Cota bound = -(ctx->base.pc_next | TARGET_PAGE_MASK) / 2; 2285fd1b3d38SEmilio G. Cota ctx->base.max_insns = MIN(ctx->base.max_insns, bound); 2286fcf5ef2aSThomas Huth } 2287fcf5ef2aSThomas Huth 2288fd1b3d38SEmilio G. Cota static void sh4_tr_tb_start(DisasContextBase *dcbase, CPUState *cs) 2289fd1b3d38SEmilio G. Cota { 2290fd1b3d38SEmilio G. Cota } 22914bfa602bSRichard Henderson 2292fd1b3d38SEmilio G. Cota static void sh4_tr_insn_start(DisasContextBase *dcbase, CPUState *cs) 2293fd1b3d38SEmilio G. Cota { 2294fd1b3d38SEmilio G. Cota DisasContext *ctx = container_of(dcbase, DisasContext, base); 2295fcf5ef2aSThomas Huth 2296fd1b3d38SEmilio G. Cota tcg_gen_insn_start(ctx->base.pc_next, ctx->envflags); 2297fd1b3d38SEmilio G. Cota } 2298fd1b3d38SEmilio G. Cota 2299fd1b3d38SEmilio G. Cota static bool sh4_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cs, 2300fd1b3d38SEmilio G. Cota const CPUBreakpoint *bp) 2301fd1b3d38SEmilio G. Cota { 2302fd1b3d38SEmilio G. Cota DisasContext *ctx = container_of(dcbase, DisasContext, base); 2303fd1b3d38SEmilio G. Cota 2304fcf5ef2aSThomas Huth /* We have hit a breakpoint - make sure PC is up-to-date */ 2305fd1b3d38SEmilio G. Cota gen_save_cpu_state(ctx, true); 2306fcf5ef2aSThomas Huth gen_helper_debug(cpu_env); 2307fd1b3d38SEmilio G. Cota ctx->base.is_jmp = DISAS_NORETURN; 2308fcf5ef2aSThomas Huth /* The address covered by the breakpoint must be included in 2309fcf5ef2aSThomas Huth [tb->pc, tb->pc + tb->size) in order to for it to be 2310fcf5ef2aSThomas Huth properly cleared -- thus we increment the PC here so that 2311fcf5ef2aSThomas Huth the logic setting tb->size below does the right thing. */ 2312fd1b3d38SEmilio G. Cota ctx->base.pc_next += 2; 2313fd1b3d38SEmilio G. Cota return true; 2314fcf5ef2aSThomas Huth } 2315fcf5ef2aSThomas Huth 2316fd1b3d38SEmilio G. Cota static void sh4_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs) 2317fd1b3d38SEmilio G. Cota { 2318fd1b3d38SEmilio G. Cota CPUSH4State *env = cs->env_ptr; 2319fd1b3d38SEmilio G. Cota DisasContext *ctx = container_of(dcbase, DisasContext, base); 2320fd1b3d38SEmilio G. Cota 2321be0e3d7aSRichard Henderson #ifdef CONFIG_USER_ONLY 2322be0e3d7aSRichard Henderson if (unlikely(ctx->envflags & GUSA_MASK) 2323be0e3d7aSRichard Henderson && !(ctx->envflags & GUSA_EXCLUSIVE)) { 2324be0e3d7aSRichard Henderson /* We're in an gUSA region, and we have not already fallen 2325be0e3d7aSRichard Henderson back on using an exclusive region. Attempt to parse the 2326be0e3d7aSRichard Henderson region into a single supported atomic operation. Failure 2327be0e3d7aSRichard Henderson is handled within the parser by raising an exception to 2328be0e3d7aSRichard Henderson retry using an exclusive region. */ 2329be0e3d7aSRichard Henderson decode_gusa(ctx, env); 2330be0e3d7aSRichard Henderson return; 2331be0e3d7aSRichard Henderson } 2332be0e3d7aSRichard Henderson #endif 2333be0e3d7aSRichard Henderson 2334da94123fSEmilio G. Cota ctx->opcode = translator_lduw(env, ctx->base.pc_next); 2335fd1b3d38SEmilio G. Cota decode_opc(ctx); 2336fd1b3d38SEmilio G. Cota ctx->base.pc_next += 2; 2337fcf5ef2aSThomas Huth } 2338fcf5ef2aSThomas Huth 2339fd1b3d38SEmilio G. Cota static void sh4_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs) 2340fd1b3d38SEmilio G. Cota { 2341fd1b3d38SEmilio G. Cota DisasContext *ctx = container_of(dcbase, DisasContext, base); 23424bfa602bSRichard Henderson 2343fd1b3d38SEmilio G. Cota if (ctx->tbflags & GUSA_EXCLUSIVE) { 23444bfa602bSRichard Henderson /* Ending the region of exclusivity. Clear the bits. */ 2345fd1b3d38SEmilio G. Cota ctx->envflags &= ~GUSA_MASK; 23464bfa602bSRichard Henderson } 23474bfa602bSRichard Henderson 2348fd1b3d38SEmilio G. Cota switch (ctx->base.is_jmp) { 23494834871bSRichard Henderson case DISAS_STOP: 2350fd1b3d38SEmilio G. Cota gen_save_cpu_state(ctx, true); 2351fd1b3d38SEmilio G. Cota if (ctx->base.singlestep_enabled) { 235234cf5678SRichard Henderson gen_helper_debug(cpu_env); 235334cf5678SRichard Henderson } else { 235407ea28b4SRichard Henderson tcg_gen_exit_tb(NULL, 0); 235534cf5678SRichard Henderson } 23560fc37a8bSAurelien Jarno break; 23574834871bSRichard Henderson case DISAS_NEXT: 2358fd1b3d38SEmilio G. Cota case DISAS_TOO_MANY: 2359fd1b3d38SEmilio G. Cota gen_save_cpu_state(ctx, false); 2360fd1b3d38SEmilio G. Cota gen_goto_tb(ctx, 0, ctx->base.pc_next); 2361fcf5ef2aSThomas Huth break; 23624834871bSRichard Henderson case DISAS_NORETURN: 2363fcf5ef2aSThomas Huth break; 23644834871bSRichard Henderson default: 23654834871bSRichard Henderson g_assert_not_reached(); 2366fcf5ef2aSThomas Huth } 2367fcf5ef2aSThomas Huth } 2368fd1b3d38SEmilio G. Cota 2369fd1b3d38SEmilio G. Cota static void sh4_tr_disas_log(const DisasContextBase *dcbase, CPUState *cs) 2370fd1b3d38SEmilio G. Cota { 2371fd1b3d38SEmilio G. Cota qemu_log("IN:\n"); /* , lookup_symbol(dcbase->pc_first)); */ 2372fd1b3d38SEmilio G. Cota log_target_disas(cs, dcbase->pc_first, dcbase->tb->size); 2373fd1b3d38SEmilio G. Cota } 2374fd1b3d38SEmilio G. Cota 2375fd1b3d38SEmilio G. Cota static const TranslatorOps sh4_tr_ops = { 2376fd1b3d38SEmilio G. Cota .init_disas_context = sh4_tr_init_disas_context, 2377fd1b3d38SEmilio G. Cota .tb_start = sh4_tr_tb_start, 2378fd1b3d38SEmilio G. Cota .insn_start = sh4_tr_insn_start, 2379fd1b3d38SEmilio G. Cota .breakpoint_check = sh4_tr_breakpoint_check, 2380fd1b3d38SEmilio G. Cota .translate_insn = sh4_tr_translate_insn, 2381fd1b3d38SEmilio G. Cota .tb_stop = sh4_tr_tb_stop, 2382fd1b3d38SEmilio G. Cota .disas_log = sh4_tr_disas_log, 2383fd1b3d38SEmilio G. Cota }; 2384fd1b3d38SEmilio G. Cota 23858b86d6d2SRichard Henderson void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int max_insns) 2386fd1b3d38SEmilio G. Cota { 2387fd1b3d38SEmilio G. Cota DisasContext ctx; 2388fd1b3d38SEmilio G. Cota 23898b86d6d2SRichard Henderson translator_loop(&sh4_tr_ops, &ctx.base, cs, tb, max_insns); 2390fcf5ef2aSThomas Huth } 2391fcf5ef2aSThomas Huth 2392fcf5ef2aSThomas Huth void restore_state_to_opc(CPUSH4State *env, TranslationBlock *tb, 2393fcf5ef2aSThomas Huth target_ulong *data) 2394fcf5ef2aSThomas Huth { 2395fcf5ef2aSThomas Huth env->pc = data[0]; 2396fcf5ef2aSThomas Huth env->flags = data[1]; 2397ac9707eaSAurelien Jarno /* Theoretically delayed_pc should also be restored. In practice the 2398ac9707eaSAurelien Jarno branch instruction is re-executed after exception, so the delayed 2399ac9707eaSAurelien Jarno branch target will be recomputed. */ 2400fcf5ef2aSThomas Huth } 2401