1fcf5ef2aSThomas Huth /* 2fcf5ef2aSThomas Huth * SH4 translation 3fcf5ef2aSThomas Huth * 4fcf5ef2aSThomas Huth * Copyright (c) 2005 Samuel Tardieu 5fcf5ef2aSThomas Huth * 6fcf5ef2aSThomas Huth * This library is free software; you can redistribute it and/or 7fcf5ef2aSThomas Huth * modify it under the terms of the GNU Lesser General Public 8fcf5ef2aSThomas Huth * License as published by the Free Software Foundation; either 96faf2b6cSThomas Huth * version 2.1 of the License, or (at your option) any later version. 10fcf5ef2aSThomas Huth * 11fcf5ef2aSThomas Huth * This library is distributed in the hope that it will be useful, 12fcf5ef2aSThomas Huth * but WITHOUT ANY WARRANTY; without even the implied warranty of 13fcf5ef2aSThomas Huth * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 14fcf5ef2aSThomas Huth * Lesser General Public License for more details. 15fcf5ef2aSThomas Huth * 16fcf5ef2aSThomas Huth * You should have received a copy of the GNU Lesser General Public 17fcf5ef2aSThomas Huth * License along with this library; if not, see <http://www.gnu.org/licenses/>. 18fcf5ef2aSThomas Huth */ 19fcf5ef2aSThomas Huth 20fcf5ef2aSThomas Huth #include "qemu/osdep.h" 21fcf5ef2aSThomas Huth #include "cpu.h" 22fcf5ef2aSThomas Huth #include "disas/disas.h" 23fcf5ef2aSThomas Huth #include "exec/exec-all.h" 24dcb32f1dSPhilippe Mathieu-Daudé #include "tcg/tcg-op.h" 25fcf5ef2aSThomas Huth #include "exec/helper-proto.h" 26fcf5ef2aSThomas Huth #include "exec/helper-gen.h" 274834871bSRichard Henderson #include "exec/translator.h" 28fcf5ef2aSThomas Huth #include "exec/log.h" 2990c84c56SMarkus Armbruster #include "qemu/qemu-print.h" 30fcf5ef2aSThomas Huth 31d53106c9SRichard Henderson #define HELPER_H "helper.h" 32d53106c9SRichard Henderson #include "exec/helper-info.c.inc" 33d53106c9SRichard Henderson #undef HELPER_H 34d53106c9SRichard Henderson 35fcf5ef2aSThomas Huth 36fcf5ef2aSThomas Huth typedef struct DisasContext { 376f1c2af6SRichard Henderson DisasContextBase base; 386f1c2af6SRichard Henderson 39a6215749SAurelien Jarno uint32_t tbflags; /* should stay unmodified during the TB translation */ 40a6215749SAurelien Jarno uint32_t envflags; /* should stay in sync with env->flags using TCG ops */ 41fcf5ef2aSThomas Huth int memidx; 423a3bb8d2SRichard Henderson int gbank; 435c13bad9SRichard Henderson int fbank; 44fcf5ef2aSThomas Huth uint32_t delayed_pc; 45fcf5ef2aSThomas Huth uint32_t features; 466f1c2af6SRichard Henderson 476f1c2af6SRichard Henderson uint16_t opcode; 486f1c2af6SRichard Henderson 496f1c2af6SRichard Henderson bool has_movcal; 50fcf5ef2aSThomas Huth } DisasContext; 51fcf5ef2aSThomas Huth 52fcf5ef2aSThomas Huth #if defined(CONFIG_USER_ONLY) 53fcf5ef2aSThomas Huth #define IS_USER(ctx) 1 544da06fb3SRichard Henderson #define UNALIGN(C) (ctx->tbflags & TB_FLAG_UNALIGN ? MO_UNALN : MO_ALIGN) 55fcf5ef2aSThomas Huth #else 56a6215749SAurelien Jarno #define IS_USER(ctx) (!(ctx->tbflags & (1u << SR_MD))) 574da06fb3SRichard Henderson #define UNALIGN(C) 0 58fcf5ef2aSThomas Huth #endif 59fcf5ef2aSThomas Huth 606f1c2af6SRichard Henderson /* Target-specific values for ctx->base.is_jmp. */ 614834871bSRichard Henderson /* We want to exit back to the cpu loop for some reason. 624834871bSRichard Henderson Usually this is to recognize interrupts immediately. */ 634834871bSRichard Henderson #define DISAS_STOP DISAS_TARGET_0 64fcf5ef2aSThomas Huth 65fcf5ef2aSThomas Huth /* global register indexes */ 663a3bb8d2SRichard Henderson static TCGv cpu_gregs[32]; 67fcf5ef2aSThomas Huth static TCGv cpu_sr, cpu_sr_m, cpu_sr_q, cpu_sr_t; 68fcf5ef2aSThomas Huth static TCGv cpu_pc, cpu_ssr, cpu_spc, cpu_gbr; 69fcf5ef2aSThomas Huth static TCGv cpu_vbr, cpu_sgr, cpu_dbr, cpu_mach, cpu_macl; 70f85da308SRichard Henderson static TCGv cpu_pr, cpu_fpscr, cpu_fpul; 71f85da308SRichard Henderson static TCGv cpu_lock_addr, cpu_lock_value; 72fcf5ef2aSThomas Huth static TCGv cpu_fregs[32]; 73fcf5ef2aSThomas Huth 74fcf5ef2aSThomas Huth /* internal register indexes */ 7547b9f4d5SAurelien Jarno static TCGv cpu_flags, cpu_delayed_pc, cpu_delayed_cond; 76fcf5ef2aSThomas Huth 77fcf5ef2aSThomas Huth void sh4_translate_init(void) 78fcf5ef2aSThomas Huth { 79fcf5ef2aSThomas Huth int i; 80fcf5ef2aSThomas Huth static const char * const gregnames[24] = { 81fcf5ef2aSThomas Huth "R0_BANK0", "R1_BANK0", "R2_BANK0", "R3_BANK0", 82fcf5ef2aSThomas Huth "R4_BANK0", "R5_BANK0", "R6_BANK0", "R7_BANK0", 83fcf5ef2aSThomas Huth "R8", "R9", "R10", "R11", "R12", "R13", "R14", "R15", 84fcf5ef2aSThomas Huth "R0_BANK1", "R1_BANK1", "R2_BANK1", "R3_BANK1", 85fcf5ef2aSThomas Huth "R4_BANK1", "R5_BANK1", "R6_BANK1", "R7_BANK1" 86fcf5ef2aSThomas Huth }; 87fcf5ef2aSThomas Huth static const char * const fregnames[32] = { 88fcf5ef2aSThomas Huth "FPR0_BANK0", "FPR1_BANK0", "FPR2_BANK0", "FPR3_BANK0", 89fcf5ef2aSThomas Huth "FPR4_BANK0", "FPR5_BANK0", "FPR6_BANK0", "FPR7_BANK0", 90fcf5ef2aSThomas Huth "FPR8_BANK0", "FPR9_BANK0", "FPR10_BANK0", "FPR11_BANK0", 91fcf5ef2aSThomas Huth "FPR12_BANK0", "FPR13_BANK0", "FPR14_BANK0", "FPR15_BANK0", 92fcf5ef2aSThomas Huth "FPR0_BANK1", "FPR1_BANK1", "FPR2_BANK1", "FPR3_BANK1", 93fcf5ef2aSThomas Huth "FPR4_BANK1", "FPR5_BANK1", "FPR6_BANK1", "FPR7_BANK1", 94fcf5ef2aSThomas Huth "FPR8_BANK1", "FPR9_BANK1", "FPR10_BANK1", "FPR11_BANK1", 95fcf5ef2aSThomas Huth "FPR12_BANK1", "FPR13_BANK1", "FPR14_BANK1", "FPR15_BANK1", 96fcf5ef2aSThomas Huth }; 97fcf5ef2aSThomas Huth 983a3bb8d2SRichard Henderson for (i = 0; i < 24; i++) { 99ad75a51eSRichard Henderson cpu_gregs[i] = tcg_global_mem_new_i32(tcg_env, 100fcf5ef2aSThomas Huth offsetof(CPUSH4State, gregs[i]), 101fcf5ef2aSThomas Huth gregnames[i]); 1023a3bb8d2SRichard Henderson } 1033a3bb8d2SRichard Henderson memcpy(cpu_gregs + 24, cpu_gregs + 8, 8 * sizeof(TCGv)); 104fcf5ef2aSThomas Huth 105ad75a51eSRichard Henderson cpu_pc = tcg_global_mem_new_i32(tcg_env, 106fcf5ef2aSThomas Huth offsetof(CPUSH4State, pc), "PC"); 107ad75a51eSRichard Henderson cpu_sr = tcg_global_mem_new_i32(tcg_env, 108fcf5ef2aSThomas Huth offsetof(CPUSH4State, sr), "SR"); 109ad75a51eSRichard Henderson cpu_sr_m = tcg_global_mem_new_i32(tcg_env, 110fcf5ef2aSThomas Huth offsetof(CPUSH4State, sr_m), "SR_M"); 111ad75a51eSRichard Henderson cpu_sr_q = tcg_global_mem_new_i32(tcg_env, 112fcf5ef2aSThomas Huth offsetof(CPUSH4State, sr_q), "SR_Q"); 113ad75a51eSRichard Henderson cpu_sr_t = tcg_global_mem_new_i32(tcg_env, 114fcf5ef2aSThomas Huth offsetof(CPUSH4State, sr_t), "SR_T"); 115ad75a51eSRichard Henderson cpu_ssr = tcg_global_mem_new_i32(tcg_env, 116fcf5ef2aSThomas Huth offsetof(CPUSH4State, ssr), "SSR"); 117ad75a51eSRichard Henderson cpu_spc = tcg_global_mem_new_i32(tcg_env, 118fcf5ef2aSThomas Huth offsetof(CPUSH4State, spc), "SPC"); 119ad75a51eSRichard Henderson cpu_gbr = tcg_global_mem_new_i32(tcg_env, 120fcf5ef2aSThomas Huth offsetof(CPUSH4State, gbr), "GBR"); 121ad75a51eSRichard Henderson cpu_vbr = tcg_global_mem_new_i32(tcg_env, 122fcf5ef2aSThomas Huth offsetof(CPUSH4State, vbr), "VBR"); 123ad75a51eSRichard Henderson cpu_sgr = tcg_global_mem_new_i32(tcg_env, 124fcf5ef2aSThomas Huth offsetof(CPUSH4State, sgr), "SGR"); 125ad75a51eSRichard Henderson cpu_dbr = tcg_global_mem_new_i32(tcg_env, 126fcf5ef2aSThomas Huth offsetof(CPUSH4State, dbr), "DBR"); 127ad75a51eSRichard Henderson cpu_mach = tcg_global_mem_new_i32(tcg_env, 128fcf5ef2aSThomas Huth offsetof(CPUSH4State, mach), "MACH"); 129ad75a51eSRichard Henderson cpu_macl = tcg_global_mem_new_i32(tcg_env, 130fcf5ef2aSThomas Huth offsetof(CPUSH4State, macl), "MACL"); 131ad75a51eSRichard Henderson cpu_pr = tcg_global_mem_new_i32(tcg_env, 132fcf5ef2aSThomas Huth offsetof(CPUSH4State, pr), "PR"); 133ad75a51eSRichard Henderson cpu_fpscr = tcg_global_mem_new_i32(tcg_env, 134fcf5ef2aSThomas Huth offsetof(CPUSH4State, fpscr), "FPSCR"); 135ad75a51eSRichard Henderson cpu_fpul = tcg_global_mem_new_i32(tcg_env, 136fcf5ef2aSThomas Huth offsetof(CPUSH4State, fpul), "FPUL"); 137fcf5ef2aSThomas Huth 138ad75a51eSRichard Henderson cpu_flags = tcg_global_mem_new_i32(tcg_env, 139fcf5ef2aSThomas Huth offsetof(CPUSH4State, flags), "_flags_"); 140ad75a51eSRichard Henderson cpu_delayed_pc = tcg_global_mem_new_i32(tcg_env, 141fcf5ef2aSThomas Huth offsetof(CPUSH4State, delayed_pc), 142fcf5ef2aSThomas Huth "_delayed_pc_"); 143ad75a51eSRichard Henderson cpu_delayed_cond = tcg_global_mem_new_i32(tcg_env, 14447b9f4d5SAurelien Jarno offsetof(CPUSH4State, 14547b9f4d5SAurelien Jarno delayed_cond), 14647b9f4d5SAurelien Jarno "_delayed_cond_"); 147ad75a51eSRichard Henderson cpu_lock_addr = tcg_global_mem_new_i32(tcg_env, 148f85da308SRichard Henderson offsetof(CPUSH4State, lock_addr), 149f85da308SRichard Henderson "_lock_addr_"); 150ad75a51eSRichard Henderson cpu_lock_value = tcg_global_mem_new_i32(tcg_env, 151f85da308SRichard Henderson offsetof(CPUSH4State, lock_value), 152f85da308SRichard Henderson "_lock_value_"); 153fcf5ef2aSThomas Huth 154fcf5ef2aSThomas Huth for (i = 0; i < 32; i++) 155ad75a51eSRichard Henderson cpu_fregs[i] = tcg_global_mem_new_i32(tcg_env, 156fcf5ef2aSThomas Huth offsetof(CPUSH4State, fregs[i]), 157fcf5ef2aSThomas Huth fregnames[i]); 158fcf5ef2aSThomas Huth } 159fcf5ef2aSThomas Huth 16090c84c56SMarkus Armbruster void superh_cpu_dump_state(CPUState *cs, FILE *f, int flags) 161fcf5ef2aSThomas Huth { 162795bec96SPhilippe Mathieu-Daudé CPUSH4State *env = cpu_env(cs); 163fcf5ef2aSThomas Huth int i; 16490c84c56SMarkus Armbruster 16590c84c56SMarkus Armbruster qemu_fprintf(f, "pc=0x%08x sr=0x%08x pr=0x%08x fpscr=0x%08x\n", 166fcf5ef2aSThomas Huth env->pc, cpu_read_sr(env), env->pr, env->fpscr); 16790c84c56SMarkus Armbruster qemu_fprintf(f, "spc=0x%08x ssr=0x%08x gbr=0x%08x vbr=0x%08x\n", 168fcf5ef2aSThomas Huth env->spc, env->ssr, env->gbr, env->vbr); 16990c84c56SMarkus Armbruster qemu_fprintf(f, "sgr=0x%08x dbr=0x%08x delayed_pc=0x%08x fpul=0x%08x\n", 170fcf5ef2aSThomas Huth env->sgr, env->dbr, env->delayed_pc, env->fpul); 171fcf5ef2aSThomas Huth for (i = 0; i < 24; i += 4) { 172ad4052f1SIlya Leoshkevich qemu_fprintf(f, "r%d=0x%08x r%d=0x%08x r%d=0x%08x r%d=0x%08x\n", 173fcf5ef2aSThomas Huth i, env->gregs[i], i + 1, env->gregs[i + 1], 174fcf5ef2aSThomas Huth i + 2, env->gregs[i + 2], i + 3, env->gregs[i + 3]); 175fcf5ef2aSThomas Huth } 176ab419fd8SRichard Henderson if (env->flags & TB_FLAG_DELAY_SLOT) { 177ad4052f1SIlya Leoshkevich qemu_fprintf(f, "in delay slot (delayed_pc=0x%08x)\n", 178fcf5ef2aSThomas Huth env->delayed_pc); 179ab419fd8SRichard Henderson } else if (env->flags & TB_FLAG_DELAY_SLOT_COND) { 180ad4052f1SIlya Leoshkevich qemu_fprintf(f, "in conditional delay slot (delayed_pc=0x%08x)\n", 181fcf5ef2aSThomas Huth env->delayed_pc); 182ab419fd8SRichard Henderson } else if (env->flags & TB_FLAG_DELAY_SLOT_RTE) { 18390c84c56SMarkus Armbruster qemu_fprintf(f, "in rte delay slot (delayed_pc=0x%08x)\n", 184be53081aSAurelien Jarno env->delayed_pc); 185fcf5ef2aSThomas Huth } 186fcf5ef2aSThomas Huth } 187fcf5ef2aSThomas Huth 188fcf5ef2aSThomas Huth static void gen_read_sr(TCGv dst) 189fcf5ef2aSThomas Huth { 190fcf5ef2aSThomas Huth TCGv t0 = tcg_temp_new(); 191fcf5ef2aSThomas Huth tcg_gen_shli_i32(t0, cpu_sr_q, SR_Q); 192fcf5ef2aSThomas Huth tcg_gen_or_i32(dst, dst, t0); 193fcf5ef2aSThomas Huth tcg_gen_shli_i32(t0, cpu_sr_m, SR_M); 194fcf5ef2aSThomas Huth tcg_gen_or_i32(dst, dst, t0); 195fcf5ef2aSThomas Huth tcg_gen_shli_i32(t0, cpu_sr_t, SR_T); 196fcf5ef2aSThomas Huth tcg_gen_or_i32(dst, cpu_sr, t0); 197fcf5ef2aSThomas Huth } 198fcf5ef2aSThomas Huth 199fcf5ef2aSThomas Huth static void gen_write_sr(TCGv src) 200fcf5ef2aSThomas Huth { 201fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr, src, 202fcf5ef2aSThomas Huth ~((1u << SR_Q) | (1u << SR_M) | (1u << SR_T))); 203a380f9dbSAurelien Jarno tcg_gen_extract_i32(cpu_sr_q, src, SR_Q, 1); 204a380f9dbSAurelien Jarno tcg_gen_extract_i32(cpu_sr_m, src, SR_M, 1); 205a380f9dbSAurelien Jarno tcg_gen_extract_i32(cpu_sr_t, src, SR_T, 1); 206fcf5ef2aSThomas Huth } 207fcf5ef2aSThomas Huth 208ac9707eaSAurelien Jarno static inline void gen_save_cpu_state(DisasContext *ctx, bool save_pc) 209ac9707eaSAurelien Jarno { 210ac9707eaSAurelien Jarno if (save_pc) { 2116f1c2af6SRichard Henderson tcg_gen_movi_i32(cpu_pc, ctx->base.pc_next); 212ac9707eaSAurelien Jarno } 213ac9707eaSAurelien Jarno if (ctx->delayed_pc != (uint32_t) -1) { 214ac9707eaSAurelien Jarno tcg_gen_movi_i32(cpu_delayed_pc, ctx->delayed_pc); 215ac9707eaSAurelien Jarno } 216e1933d14SRichard Henderson if ((ctx->tbflags & TB_FLAG_ENVFLAGS_MASK) != ctx->envflags) { 217ac9707eaSAurelien Jarno tcg_gen_movi_i32(cpu_flags, ctx->envflags); 218ac9707eaSAurelien Jarno } 219ac9707eaSAurelien Jarno } 220ac9707eaSAurelien Jarno 221ec2eb22eSRichard Henderson static inline bool use_exit_tb(DisasContext *ctx) 222ec2eb22eSRichard Henderson { 223ab419fd8SRichard Henderson return (ctx->tbflags & TB_FLAG_GUSA_EXCLUSIVE) != 0; 224ec2eb22eSRichard Henderson } 225ec2eb22eSRichard Henderson 2263f1e2098SRichard Henderson static bool use_goto_tb(DisasContext *ctx, target_ulong dest) 227fcf5ef2aSThomas Huth { 2283f1e2098SRichard Henderson if (use_exit_tb(ctx)) { 2294bfa602bSRichard Henderson return false; 2304bfa602bSRichard Henderson } 2313f1e2098SRichard Henderson return translator_use_goto_tb(&ctx->base, dest); 232fcf5ef2aSThomas Huth } 233fcf5ef2aSThomas Huth 234fcf5ef2aSThomas Huth static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest) 235fcf5ef2aSThomas Huth { 236fcf5ef2aSThomas Huth if (use_goto_tb(ctx, dest)) { 237fcf5ef2aSThomas Huth tcg_gen_goto_tb(n); 238fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_pc, dest); 23907ea28b4SRichard Henderson tcg_gen_exit_tb(ctx->base.tb, n); 240fcf5ef2aSThomas Huth } else { 241fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_pc, dest); 24252df5adcSRichard Henderson if (use_exit_tb(ctx)) { 24307ea28b4SRichard Henderson tcg_gen_exit_tb(NULL, 0); 244ec2eb22eSRichard Henderson } else { 2457f11636dSEmilio G. Cota tcg_gen_lookup_and_goto_ptr(); 246ec2eb22eSRichard Henderson } 247fcf5ef2aSThomas Huth } 2486f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NORETURN; 249fcf5ef2aSThomas Huth } 250fcf5ef2aSThomas Huth 251fcf5ef2aSThomas Huth static void gen_jump(DisasContext * ctx) 252fcf5ef2aSThomas Huth { 253ec2eb22eSRichard Henderson if (ctx->delayed_pc == -1) { 254fcf5ef2aSThomas Huth /* Target is not statically known, it comes necessarily from a 255fcf5ef2aSThomas Huth delayed jump as immediate jump are conditinal jumps */ 256fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_pc, cpu_delayed_pc); 257ac9707eaSAurelien Jarno tcg_gen_discard_i32(cpu_delayed_pc); 25852df5adcSRichard Henderson if (use_exit_tb(ctx)) { 25907ea28b4SRichard Henderson tcg_gen_exit_tb(NULL, 0); 260fcf5ef2aSThomas Huth } else { 2617f11636dSEmilio G. Cota tcg_gen_lookup_and_goto_ptr(); 262ec2eb22eSRichard Henderson } 2636f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NORETURN; 264ec2eb22eSRichard Henderson } else { 265fcf5ef2aSThomas Huth gen_goto_tb(ctx, 0, ctx->delayed_pc); 266fcf5ef2aSThomas Huth } 267fcf5ef2aSThomas Huth } 268fcf5ef2aSThomas Huth 269fcf5ef2aSThomas Huth /* Immediate conditional jump (bt or bf) */ 2704bfa602bSRichard Henderson static void gen_conditional_jump(DisasContext *ctx, target_ulong dest, 2714bfa602bSRichard Henderson bool jump_if_true) 272fcf5ef2aSThomas Huth { 273fcf5ef2aSThomas Huth TCGLabel *l1 = gen_new_label(); 2744bfa602bSRichard Henderson TCGCond cond_not_taken = jump_if_true ? TCG_COND_EQ : TCG_COND_NE; 2754bfa602bSRichard Henderson 276ab419fd8SRichard Henderson if (ctx->tbflags & TB_FLAG_GUSA_EXCLUSIVE) { 2774bfa602bSRichard Henderson /* When in an exclusive region, we must continue to the end. 2784bfa602bSRichard Henderson Therefore, exit the region on a taken branch, but otherwise 2794bfa602bSRichard Henderson fall through to the next instruction. */ 2804bfa602bSRichard Henderson tcg_gen_brcondi_i32(cond_not_taken, cpu_sr_t, 0, l1); 281ab419fd8SRichard Henderson tcg_gen_movi_i32(cpu_flags, ctx->envflags & ~TB_FLAG_GUSA_MASK); 2824bfa602bSRichard Henderson /* Note that this won't actually use a goto_tb opcode because we 2834bfa602bSRichard Henderson disallow it in use_goto_tb, but it handles exit + singlestep. */ 2844bfa602bSRichard Henderson gen_goto_tb(ctx, 0, dest); 285fcf5ef2aSThomas Huth gen_set_label(l1); 2865b38d026SLaurent Vivier ctx->base.is_jmp = DISAS_NEXT; 2874bfa602bSRichard Henderson return; 2884bfa602bSRichard Henderson } 2894bfa602bSRichard Henderson 2904bfa602bSRichard Henderson gen_save_cpu_state(ctx, false); 2914bfa602bSRichard Henderson tcg_gen_brcondi_i32(cond_not_taken, cpu_sr_t, 0, l1); 2924bfa602bSRichard Henderson gen_goto_tb(ctx, 0, dest); 2934bfa602bSRichard Henderson gen_set_label(l1); 2946f1c2af6SRichard Henderson gen_goto_tb(ctx, 1, ctx->base.pc_next + 2); 2956f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NORETURN; 296fcf5ef2aSThomas Huth } 297fcf5ef2aSThomas Huth 298fcf5ef2aSThomas Huth /* Delayed conditional jump (bt or bf) */ 299fcf5ef2aSThomas Huth static void gen_delayed_conditional_jump(DisasContext * ctx) 300fcf5ef2aSThomas Huth { 3014bfa602bSRichard Henderson TCGLabel *l1 = gen_new_label(); 3024bfa602bSRichard Henderson TCGv ds = tcg_temp_new(); 303fcf5ef2aSThomas Huth 30447b9f4d5SAurelien Jarno tcg_gen_mov_i32(ds, cpu_delayed_cond); 30547b9f4d5SAurelien Jarno tcg_gen_discard_i32(cpu_delayed_cond); 3064bfa602bSRichard Henderson 307ab419fd8SRichard Henderson if (ctx->tbflags & TB_FLAG_GUSA_EXCLUSIVE) { 3084bfa602bSRichard Henderson /* When in an exclusive region, we must continue to the end. 3094bfa602bSRichard Henderson Therefore, exit the region on a taken branch, but otherwise 3104bfa602bSRichard Henderson fall through to the next instruction. */ 3114bfa602bSRichard Henderson tcg_gen_brcondi_i32(TCG_COND_EQ, ds, 0, l1); 3124bfa602bSRichard Henderson 3134bfa602bSRichard Henderson /* Leave the gUSA region. */ 314ab419fd8SRichard Henderson tcg_gen_movi_i32(cpu_flags, ctx->envflags & ~TB_FLAG_GUSA_MASK); 3154bfa602bSRichard Henderson gen_jump(ctx); 3164bfa602bSRichard Henderson 3174bfa602bSRichard Henderson gen_set_label(l1); 3186f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NEXT; 3194bfa602bSRichard Henderson return; 3204bfa602bSRichard Henderson } 3214bfa602bSRichard Henderson 322fcf5ef2aSThomas Huth tcg_gen_brcondi_i32(TCG_COND_NE, ds, 0, l1); 3236f1c2af6SRichard Henderson gen_goto_tb(ctx, 1, ctx->base.pc_next + 2); 324fcf5ef2aSThomas Huth gen_set_label(l1); 325fcf5ef2aSThomas Huth gen_jump(ctx); 326fcf5ef2aSThomas Huth } 327fcf5ef2aSThomas Huth 328e5d8053eSRichard Henderson static inline void gen_load_fpr64(DisasContext *ctx, TCGv_i64 t, int reg) 329fcf5ef2aSThomas Huth { 3301e0b21d8SRichard Henderson /* We have already signaled illegal instruction for odd Dr. */ 3311e0b21d8SRichard Henderson tcg_debug_assert((reg & 1) == 0); 3321e0b21d8SRichard Henderson reg ^= ctx->fbank; 333fcf5ef2aSThomas Huth tcg_gen_concat_i32_i64(t, cpu_fregs[reg + 1], cpu_fregs[reg]); 334fcf5ef2aSThomas Huth } 335fcf5ef2aSThomas Huth 336e5d8053eSRichard Henderson static inline void gen_store_fpr64(DisasContext *ctx, TCGv_i64 t, int reg) 337fcf5ef2aSThomas Huth { 3381e0b21d8SRichard Henderson /* We have already signaled illegal instruction for odd Dr. */ 3391e0b21d8SRichard Henderson tcg_debug_assert((reg & 1) == 0); 3401e0b21d8SRichard Henderson reg ^= ctx->fbank; 34158d2a9aeSAurelien Jarno tcg_gen_extr_i64_i32(cpu_fregs[reg + 1], cpu_fregs[reg], t); 342fcf5ef2aSThomas Huth } 343fcf5ef2aSThomas Huth 344fcf5ef2aSThomas Huth #define B3_0 (ctx->opcode & 0xf) 345fcf5ef2aSThomas Huth #define B6_4 ((ctx->opcode >> 4) & 0x7) 346fcf5ef2aSThomas Huth #define B7_4 ((ctx->opcode >> 4) & 0xf) 347fcf5ef2aSThomas Huth #define B7_0 (ctx->opcode & 0xff) 348fcf5ef2aSThomas Huth #define B7_0s ((int32_t) (int8_t) (ctx->opcode & 0xff)) 349fcf5ef2aSThomas Huth #define B11_0s (ctx->opcode & 0x800 ? 0xfffff000 | (ctx->opcode & 0xfff) : \ 350fcf5ef2aSThomas Huth (ctx->opcode & 0xfff)) 351fcf5ef2aSThomas Huth #define B11_8 ((ctx->opcode >> 8) & 0xf) 352fcf5ef2aSThomas Huth #define B15_12 ((ctx->opcode >> 12) & 0xf) 353fcf5ef2aSThomas Huth 3543a3bb8d2SRichard Henderson #define REG(x) cpu_gregs[(x) ^ ctx->gbank] 3553a3bb8d2SRichard Henderson #define ALTREG(x) cpu_gregs[(x) ^ ctx->gbank ^ 0x10] 3565c13bad9SRichard Henderson #define FREG(x) cpu_fregs[(x) ^ ctx->fbank] 357fcf5ef2aSThomas Huth 358fcf5ef2aSThomas Huth #define XHACK(x) ((((x) & 1 ) << 4) | ((x) & 0xe)) 359fcf5ef2aSThomas Huth 360fcf5ef2aSThomas Huth #define CHECK_NOT_DELAY_SLOT \ 361ab419fd8SRichard Henderson if (ctx->envflags & TB_FLAG_DELAY_SLOT_MASK) { \ 362dec16c6eSRichard Henderson goto do_illegal_slot; \ 363fcf5ef2aSThomas Huth } 364fcf5ef2aSThomas Huth 365fcf5ef2aSThomas Huth #define CHECK_PRIVILEGED \ 366fcf5ef2aSThomas Huth if (IS_USER(ctx)) { \ 3676b98213dSRichard Henderson goto do_illegal; \ 368fcf5ef2aSThomas Huth } 369fcf5ef2aSThomas Huth 370fcf5ef2aSThomas Huth #define CHECK_FPU_ENABLED \ 371a6215749SAurelien Jarno if (ctx->tbflags & (1u << SR_FD)) { \ 372dec4f042SRichard Henderson goto do_fpu_disabled; \ 373fcf5ef2aSThomas Huth } 374fcf5ef2aSThomas Huth 3757e9f7ca8SRichard Henderson #define CHECK_FPSCR_PR_0 \ 3767e9f7ca8SRichard Henderson if (ctx->tbflags & FPSCR_PR) { \ 3777e9f7ca8SRichard Henderson goto do_illegal; \ 3787e9f7ca8SRichard Henderson } 3797e9f7ca8SRichard Henderson 3807e9f7ca8SRichard Henderson #define CHECK_FPSCR_PR_1 \ 3817e9f7ca8SRichard Henderson if (!(ctx->tbflags & FPSCR_PR)) { \ 3827e9f7ca8SRichard Henderson goto do_illegal; \ 3837e9f7ca8SRichard Henderson } 3847e9f7ca8SRichard Henderson 385ccae24d4SRichard Henderson #define CHECK_SH4A \ 386ccae24d4SRichard Henderson if (!(ctx->features & SH_FEATURE_SH4A)) { \ 387ccae24d4SRichard Henderson goto do_illegal; \ 388ccae24d4SRichard Henderson } 389ccae24d4SRichard Henderson 390fcf5ef2aSThomas Huth static void _decode_opc(DisasContext * ctx) 391fcf5ef2aSThomas Huth { 392fcf5ef2aSThomas Huth /* This code tries to make movcal emulation sufficiently 393fcf5ef2aSThomas Huth accurate for Linux purposes. This instruction writes 394fcf5ef2aSThomas Huth memory, and prior to that, always allocates a cache line. 395fcf5ef2aSThomas Huth It is used in two contexts: 396fcf5ef2aSThomas Huth - in memcpy, where data is copied in blocks, the first write 397fcf5ef2aSThomas Huth of to a block uses movca.l for performance. 398fcf5ef2aSThomas Huth - in arch/sh/mm/cache-sh4.c, movcal.l + ocbi combination is used 399fcf5ef2aSThomas Huth to flush the cache. Here, the data written by movcal.l is never 400fcf5ef2aSThomas Huth written to memory, and the data written is just bogus. 401fcf5ef2aSThomas Huth 402fcf5ef2aSThomas Huth To simulate this, we simulate movcal.l, we store the value to memory, 403fcf5ef2aSThomas Huth but we also remember the previous content. If we see ocbi, we check 404fcf5ef2aSThomas Huth if movcal.l for that address was done previously. If so, the write should 405fcf5ef2aSThomas Huth not have hit the memory, so we restore the previous content. 406fcf5ef2aSThomas Huth When we see an instruction that is neither movca.l 407fcf5ef2aSThomas Huth nor ocbi, the previous content is discarded. 408fcf5ef2aSThomas Huth 409fcf5ef2aSThomas Huth To optimize, we only try to flush stores when we're at the start of 410fcf5ef2aSThomas Huth TB, or if we already saw movca.l in this TB and did not flush stores 411fcf5ef2aSThomas Huth yet. */ 412fcf5ef2aSThomas Huth if (ctx->has_movcal) 413fcf5ef2aSThomas Huth { 414fcf5ef2aSThomas Huth int opcode = ctx->opcode & 0xf0ff; 415fcf5ef2aSThomas Huth if (opcode != 0x0093 /* ocbi */ 416fcf5ef2aSThomas Huth && opcode != 0x00c3 /* movca.l */) 417fcf5ef2aSThomas Huth { 418ad75a51eSRichard Henderson gen_helper_discard_movcal_backup(tcg_env); 419fcf5ef2aSThomas Huth ctx->has_movcal = 0; 420fcf5ef2aSThomas Huth } 421fcf5ef2aSThomas Huth } 422fcf5ef2aSThomas Huth 423fcf5ef2aSThomas Huth #if 0 424fcf5ef2aSThomas Huth fprintf(stderr, "Translating opcode 0x%04x\n", ctx->opcode); 425fcf5ef2aSThomas Huth #endif 426fcf5ef2aSThomas Huth 427fcf5ef2aSThomas Huth switch (ctx->opcode) { 428fcf5ef2aSThomas Huth case 0x0019: /* div0u */ 429fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_m, 0); 430fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_q, 0); 431fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_t, 0); 432fcf5ef2aSThomas Huth return; 433fcf5ef2aSThomas Huth case 0x000b: /* rts */ 434fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 435fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_delayed_pc, cpu_pr); 436ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT; 437fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 438fcf5ef2aSThomas Huth return; 439fcf5ef2aSThomas Huth case 0x0028: /* clrmac */ 440fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_mach, 0); 441fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_macl, 0); 442fcf5ef2aSThomas Huth return; 443fcf5ef2aSThomas Huth case 0x0048: /* clrs */ 444fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr, cpu_sr, ~(1u << SR_S)); 445fcf5ef2aSThomas Huth return; 446fcf5ef2aSThomas Huth case 0x0008: /* clrt */ 447fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_t, 0); 448fcf5ef2aSThomas Huth return; 449fcf5ef2aSThomas Huth case 0x0038: /* ldtlb */ 450fcf5ef2aSThomas Huth CHECK_PRIVILEGED 451ad75a51eSRichard Henderson gen_helper_ldtlb(tcg_env); 452fcf5ef2aSThomas Huth return; 453fcf5ef2aSThomas Huth case 0x002b: /* rte */ 454fcf5ef2aSThomas Huth CHECK_PRIVILEGED 455fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 456fcf5ef2aSThomas Huth gen_write_sr(cpu_ssr); 457fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_delayed_pc, cpu_spc); 458ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT_RTE; 459fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 4606f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 461fcf5ef2aSThomas Huth return; 462fcf5ef2aSThomas Huth case 0x0058: /* sets */ 463fcf5ef2aSThomas Huth tcg_gen_ori_i32(cpu_sr, cpu_sr, (1u << SR_S)); 464fcf5ef2aSThomas Huth return; 465fcf5ef2aSThomas Huth case 0x0018: /* sett */ 466fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_t, 1); 467fcf5ef2aSThomas Huth return; 468fcf5ef2aSThomas Huth case 0xfbfd: /* frchg */ 46961dedf2aSRichard Henderson CHECK_FPSCR_PR_0 470fcf5ef2aSThomas Huth tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_FR); 4716f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 472fcf5ef2aSThomas Huth return; 473fcf5ef2aSThomas Huth case 0xf3fd: /* fschg */ 47461dedf2aSRichard Henderson CHECK_FPSCR_PR_0 475fcf5ef2aSThomas Huth tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_SZ); 4766f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 477fcf5ef2aSThomas Huth return; 478907759f9SRichard Henderson case 0xf7fd: /* fpchg */ 479907759f9SRichard Henderson CHECK_SH4A 480907759f9SRichard Henderson tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_PR); 4816f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 482907759f9SRichard Henderson return; 483fcf5ef2aSThomas Huth case 0x0009: /* nop */ 484fcf5ef2aSThomas Huth return; 485fcf5ef2aSThomas Huth case 0x001b: /* sleep */ 486fcf5ef2aSThomas Huth CHECK_PRIVILEGED 4876f1c2af6SRichard Henderson tcg_gen_movi_i32(cpu_pc, ctx->base.pc_next + 2); 488ad75a51eSRichard Henderson gen_helper_sleep(tcg_env); 489fcf5ef2aSThomas Huth return; 490fcf5ef2aSThomas Huth } 491fcf5ef2aSThomas Huth 492fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf000) { 493fcf5ef2aSThomas Huth case 0x1000: /* mov.l Rm,@(disp,Rn) */ 494fcf5ef2aSThomas Huth { 495fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 496fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B11_8), B3_0 * 4); 4974da06fb3SRichard Henderson tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, 4984da06fb3SRichard Henderson MO_TEUL | UNALIGN(ctx)); 499fcf5ef2aSThomas Huth } 500fcf5ef2aSThomas Huth return; 501fcf5ef2aSThomas Huth case 0x5000: /* mov.l @(disp,Rm),Rn */ 502fcf5ef2aSThomas Huth { 503fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 504fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 4); 5054da06fb3SRichard Henderson tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, 5064da06fb3SRichard Henderson MO_TESL | UNALIGN(ctx)); 507fcf5ef2aSThomas Huth } 508fcf5ef2aSThomas Huth return; 509fcf5ef2aSThomas Huth case 0xe000: /* mov #imm,Rn */ 5104bfa602bSRichard Henderson #ifdef CONFIG_USER_ONLY 511ab419fd8SRichard Henderson /* 512ab419fd8SRichard Henderson * Detect the start of a gUSA region (mov #-n, r15). 513ab419fd8SRichard Henderson * If so, update envflags and end the TB. This will allow us 514ab419fd8SRichard Henderson * to see the end of the region (stored in R0) in the next TB. 515ab419fd8SRichard Henderson */ 5166f1c2af6SRichard Henderson if (B11_8 == 15 && B7_0s < 0 && 5176f1c2af6SRichard Henderson (tb_cflags(ctx->base.tb) & CF_PARALLEL)) { 518ab419fd8SRichard Henderson ctx->envflags = 519ab419fd8SRichard Henderson deposit32(ctx->envflags, TB_FLAG_GUSA_SHIFT, 8, B7_0s); 5206f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 5214bfa602bSRichard Henderson } 5224bfa602bSRichard Henderson #endif 523fcf5ef2aSThomas Huth tcg_gen_movi_i32(REG(B11_8), B7_0s); 524fcf5ef2aSThomas Huth return; 525fcf5ef2aSThomas Huth case 0x9000: /* mov.w @(disp,PC),Rn */ 526b754cb2dSZack Buhman CHECK_NOT_DELAY_SLOT 527fcf5ef2aSThomas Huth { 528950b91beSRichard Henderson TCGv addr = tcg_constant_i32(ctx->base.pc_next + 4 + B7_0 * 2); 52903a0d87eSRichard Henderson tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, 53003a0d87eSRichard Henderson MO_TESW | MO_ALIGN); 531fcf5ef2aSThomas Huth } 532fcf5ef2aSThomas Huth return; 533fcf5ef2aSThomas Huth case 0xd000: /* mov.l @(disp,PC),Rn */ 534b754cb2dSZack Buhman CHECK_NOT_DELAY_SLOT 535fcf5ef2aSThomas Huth { 536950b91beSRichard Henderson TCGv addr = tcg_constant_i32((ctx->base.pc_next + 4 + B7_0 * 4) & ~3); 53703a0d87eSRichard Henderson tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, 53803a0d87eSRichard Henderson MO_TESL | MO_ALIGN); 539fcf5ef2aSThomas Huth } 540fcf5ef2aSThomas Huth return; 541fcf5ef2aSThomas Huth case 0x7000: /* add #imm,Rn */ 542fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), B7_0s); 543fcf5ef2aSThomas Huth return; 544fcf5ef2aSThomas Huth case 0xa000: /* bra disp */ 545fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 5466f1c2af6SRichard Henderson ctx->delayed_pc = ctx->base.pc_next + 4 + B11_0s * 2; 547ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT; 548fcf5ef2aSThomas Huth return; 549fcf5ef2aSThomas Huth case 0xb000: /* bsr disp */ 550fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 5516f1c2af6SRichard Henderson tcg_gen_movi_i32(cpu_pr, ctx->base.pc_next + 4); 5526f1c2af6SRichard Henderson ctx->delayed_pc = ctx->base.pc_next + 4 + B11_0s * 2; 553ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT; 554fcf5ef2aSThomas Huth return; 555fcf5ef2aSThomas Huth } 556fcf5ef2aSThomas Huth 557fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf00f) { 558fcf5ef2aSThomas Huth case 0x6003: /* mov Rm,Rn */ 559fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), REG(B7_4)); 560fcf5ef2aSThomas Huth return; 561fcf5ef2aSThomas Huth case 0x2000: /* mov.b Rm,@Rn */ 562fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), REG(B11_8), ctx->memidx, MO_UB); 563fcf5ef2aSThomas Huth return; 564fcf5ef2aSThomas Huth case 0x2001: /* mov.w Rm,@Rn */ 5654da06fb3SRichard Henderson tcg_gen_qemu_st_i32(REG(B7_4), REG(B11_8), ctx->memidx, 5664da06fb3SRichard Henderson MO_TEUW | UNALIGN(ctx)); 567fcf5ef2aSThomas Huth return; 568fcf5ef2aSThomas Huth case 0x2002: /* mov.l Rm,@Rn */ 5694da06fb3SRichard Henderson tcg_gen_qemu_st_i32(REG(B7_4), REG(B11_8), ctx->memidx, 5704da06fb3SRichard Henderson MO_TEUL | UNALIGN(ctx)); 571fcf5ef2aSThomas Huth return; 572fcf5ef2aSThomas Huth case 0x6000: /* mov.b @Rm,Rn */ 573fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_SB); 574fcf5ef2aSThomas Huth return; 575fcf5ef2aSThomas Huth case 0x6001: /* mov.w @Rm,Rn */ 5764da06fb3SRichard Henderson tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, 5774da06fb3SRichard Henderson MO_TESW | UNALIGN(ctx)); 578fcf5ef2aSThomas Huth return; 579fcf5ef2aSThomas Huth case 0x6002: /* mov.l @Rm,Rn */ 5804da06fb3SRichard Henderson tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, 5814da06fb3SRichard Henderson MO_TESL | UNALIGN(ctx)); 582fcf5ef2aSThomas Huth return; 583fcf5ef2aSThomas Huth case 0x2004: /* mov.b Rm,@-Rn */ 584fcf5ef2aSThomas Huth { 585fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 586fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 1); 587fcf5ef2aSThomas Huth /* might cause re-execution */ 588fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_UB); 589fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); /* modify register status */ 590fcf5ef2aSThomas Huth } 591fcf5ef2aSThomas Huth return; 592fcf5ef2aSThomas Huth case 0x2005: /* mov.w Rm,@-Rn */ 593fcf5ef2aSThomas Huth { 594fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 595fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 2); 5964da06fb3SRichard Henderson tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, 5974da06fb3SRichard Henderson MO_TEUW | UNALIGN(ctx)); 598fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 599fcf5ef2aSThomas Huth } 600fcf5ef2aSThomas Huth return; 601fcf5ef2aSThomas Huth case 0x2006: /* mov.l Rm,@-Rn */ 602fcf5ef2aSThomas Huth { 603fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 604fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); 6054da06fb3SRichard Henderson tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, 6064da06fb3SRichard Henderson MO_TEUL | UNALIGN(ctx)); 607fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 608fcf5ef2aSThomas Huth } 609fcf5ef2aSThomas Huth return; 610fcf5ef2aSThomas Huth case 0x6004: /* mov.b @Rm+,Rn */ 611fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_SB); 612fcf5ef2aSThomas Huth if ( B11_8 != B7_4 ) 613fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 1); 614fcf5ef2aSThomas Huth return; 615fcf5ef2aSThomas Huth case 0x6005: /* mov.w @Rm+,Rn */ 6164da06fb3SRichard Henderson tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, 6174da06fb3SRichard Henderson MO_TESW | UNALIGN(ctx)); 618fcf5ef2aSThomas Huth if ( B11_8 != B7_4 ) 619fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2); 620fcf5ef2aSThomas Huth return; 621fcf5ef2aSThomas Huth case 0x6006: /* mov.l @Rm+,Rn */ 6224da06fb3SRichard Henderson tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, 6234da06fb3SRichard Henderson MO_TESL | UNALIGN(ctx)); 624fcf5ef2aSThomas Huth if ( B11_8 != B7_4 ) 625fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4); 626fcf5ef2aSThomas Huth return; 627fcf5ef2aSThomas Huth case 0x0004: /* mov.b Rm,@(R0,Rn) */ 628fcf5ef2aSThomas Huth { 629fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 630fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B11_8), REG(0)); 631fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_UB); 632fcf5ef2aSThomas Huth } 633fcf5ef2aSThomas Huth return; 634fcf5ef2aSThomas Huth case 0x0005: /* mov.w Rm,@(R0,Rn) */ 635fcf5ef2aSThomas Huth { 636fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 637fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B11_8), REG(0)); 6384da06fb3SRichard Henderson tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, 6394da06fb3SRichard Henderson MO_TEUW | UNALIGN(ctx)); 640fcf5ef2aSThomas Huth } 641fcf5ef2aSThomas Huth return; 642fcf5ef2aSThomas Huth case 0x0006: /* mov.l Rm,@(R0,Rn) */ 643fcf5ef2aSThomas Huth { 644fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 645fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B11_8), REG(0)); 6464da06fb3SRichard Henderson tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, 6474da06fb3SRichard Henderson MO_TEUL | UNALIGN(ctx)); 648fcf5ef2aSThomas Huth } 649fcf5ef2aSThomas Huth return; 650fcf5ef2aSThomas Huth case 0x000c: /* mov.b @(R0,Rm),Rn */ 651fcf5ef2aSThomas Huth { 652fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 653fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B7_4), REG(0)); 654fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_SB); 655fcf5ef2aSThomas Huth } 656fcf5ef2aSThomas Huth return; 657fcf5ef2aSThomas Huth case 0x000d: /* mov.w @(R0,Rm),Rn */ 658fcf5ef2aSThomas Huth { 659fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 660fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B7_4), REG(0)); 6614da06fb3SRichard Henderson tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, 6624da06fb3SRichard Henderson MO_TESW | UNALIGN(ctx)); 663fcf5ef2aSThomas Huth } 664fcf5ef2aSThomas Huth return; 665fcf5ef2aSThomas Huth case 0x000e: /* mov.l @(R0,Rm),Rn */ 666fcf5ef2aSThomas Huth { 667fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 668fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B7_4), REG(0)); 6694da06fb3SRichard Henderson tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, 6704da06fb3SRichard Henderson MO_TESL | UNALIGN(ctx)); 671fcf5ef2aSThomas Huth } 672fcf5ef2aSThomas Huth return; 673fcf5ef2aSThomas Huth case 0x6008: /* swap.b Rm,Rn */ 674fcf5ef2aSThomas Huth { 6753c254ab8SLadi Prosek TCGv low = tcg_temp_new(); 676b983a0e1SRichard Henderson tcg_gen_bswap16_i32(low, REG(B7_4), 0); 677fcf5ef2aSThomas Huth tcg_gen_deposit_i32(REG(B11_8), REG(B7_4), low, 0, 16); 678fcf5ef2aSThomas Huth } 679fcf5ef2aSThomas Huth return; 680fcf5ef2aSThomas Huth case 0x6009: /* swap.w Rm,Rn */ 681fcf5ef2aSThomas Huth tcg_gen_rotli_i32(REG(B11_8), REG(B7_4), 16); 682fcf5ef2aSThomas Huth return; 683fcf5ef2aSThomas Huth case 0x200d: /* xtrct Rm,Rn */ 684fcf5ef2aSThomas Huth { 685fcf5ef2aSThomas Huth TCGv high, low; 686fcf5ef2aSThomas Huth high = tcg_temp_new(); 687fcf5ef2aSThomas Huth tcg_gen_shli_i32(high, REG(B7_4), 16); 688fcf5ef2aSThomas Huth low = tcg_temp_new(); 689fcf5ef2aSThomas Huth tcg_gen_shri_i32(low, REG(B11_8), 16); 690fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), high, low); 691fcf5ef2aSThomas Huth } 692fcf5ef2aSThomas Huth return; 693fcf5ef2aSThomas Huth case 0x300c: /* add Rm,Rn */ 694fcf5ef2aSThomas Huth tcg_gen_add_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 695fcf5ef2aSThomas Huth return; 696fcf5ef2aSThomas Huth case 0x300e: /* addc Rm,Rn */ 697fcf5ef2aSThomas Huth { 698fcf5ef2aSThomas Huth TCGv t0, t1; 699950b91beSRichard Henderson t0 = tcg_constant_tl(0); 700fcf5ef2aSThomas Huth t1 = tcg_temp_new(); 701fcf5ef2aSThomas Huth tcg_gen_add2_i32(t1, cpu_sr_t, cpu_sr_t, t0, REG(B7_4), t0); 702fcf5ef2aSThomas Huth tcg_gen_add2_i32(REG(B11_8), cpu_sr_t, 703fcf5ef2aSThomas Huth REG(B11_8), t0, t1, cpu_sr_t); 704fcf5ef2aSThomas Huth } 705fcf5ef2aSThomas Huth return; 706fcf5ef2aSThomas Huth case 0x300f: /* addv Rm,Rn */ 707fcf5ef2aSThomas Huth { 70840ed073fSPhilippe Mathieu-Daudé TCGv Rn = REG(B11_8); 70940ed073fSPhilippe Mathieu-Daudé TCGv Rm = REG(B7_4); 71040ed073fSPhilippe Mathieu-Daudé TCGv result, t1, t2; 71140ed073fSPhilippe Mathieu-Daudé 71240ed073fSPhilippe Mathieu-Daudé result = tcg_temp_new(); 713fcf5ef2aSThomas Huth t1 = tcg_temp_new(); 714fcf5ef2aSThomas Huth t2 = tcg_temp_new(); 71540ed073fSPhilippe Mathieu-Daudé tcg_gen_add_i32(result, Rm, Rn); 71640ed073fSPhilippe Mathieu-Daudé /* T = ((Rn ^ Rm) & (Result ^ Rn)) >> 31 */ 71740ed073fSPhilippe Mathieu-Daudé tcg_gen_xor_i32(t1, result, Rn); 71840ed073fSPhilippe Mathieu-Daudé tcg_gen_xor_i32(t2, Rm, Rn); 719fcf5ef2aSThomas Huth tcg_gen_andc_i32(cpu_sr_t, t1, t2); 720fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_t, cpu_sr_t, 31); 72140ed073fSPhilippe Mathieu-Daudé tcg_gen_mov_i32(Rn, result); 722fcf5ef2aSThomas Huth } 723fcf5ef2aSThomas Huth return; 724fcf5ef2aSThomas Huth case 0x2009: /* and Rm,Rn */ 725fcf5ef2aSThomas Huth tcg_gen_and_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 726fcf5ef2aSThomas Huth return; 727fcf5ef2aSThomas Huth case 0x3000: /* cmp/eq Rm,Rn */ 728fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_EQ, cpu_sr_t, REG(B11_8), REG(B7_4)); 729fcf5ef2aSThomas Huth return; 730fcf5ef2aSThomas Huth case 0x3003: /* cmp/ge Rm,Rn */ 731fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_GE, cpu_sr_t, REG(B11_8), REG(B7_4)); 732fcf5ef2aSThomas Huth return; 733fcf5ef2aSThomas Huth case 0x3007: /* cmp/gt Rm,Rn */ 734fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_GT, cpu_sr_t, REG(B11_8), REG(B7_4)); 735fcf5ef2aSThomas Huth return; 736fcf5ef2aSThomas Huth case 0x3006: /* cmp/hi Rm,Rn */ 737fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_GTU, cpu_sr_t, REG(B11_8), REG(B7_4)); 738fcf5ef2aSThomas Huth return; 739fcf5ef2aSThomas Huth case 0x3002: /* cmp/hs Rm,Rn */ 740fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_GEU, cpu_sr_t, REG(B11_8), REG(B7_4)); 741fcf5ef2aSThomas Huth return; 742fcf5ef2aSThomas Huth case 0x200c: /* cmp/str Rm,Rn */ 743fcf5ef2aSThomas Huth { 744fcf5ef2aSThomas Huth TCGv cmp1 = tcg_temp_new(); 745fcf5ef2aSThomas Huth TCGv cmp2 = tcg_temp_new(); 746fcf5ef2aSThomas Huth tcg_gen_xor_i32(cmp2, REG(B7_4), REG(B11_8)); 747fcf5ef2aSThomas Huth tcg_gen_subi_i32(cmp1, cmp2, 0x01010101); 748fcf5ef2aSThomas Huth tcg_gen_andc_i32(cmp1, cmp1, cmp2); 749fcf5ef2aSThomas Huth tcg_gen_andi_i32(cmp1, cmp1, 0x80808080); 750fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_NE, cpu_sr_t, cmp1, 0); 751fcf5ef2aSThomas Huth } 752fcf5ef2aSThomas Huth return; 753fcf5ef2aSThomas Huth case 0x2007: /* div0s Rm,Rn */ 754fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_q, REG(B11_8), 31); /* SR_Q */ 755fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_m, REG(B7_4), 31); /* SR_M */ 756fcf5ef2aSThomas Huth tcg_gen_xor_i32(cpu_sr_t, cpu_sr_q, cpu_sr_m); /* SR_T */ 757fcf5ef2aSThomas Huth return; 758fcf5ef2aSThomas Huth case 0x3004: /* div1 Rm,Rn */ 759fcf5ef2aSThomas Huth { 760fcf5ef2aSThomas Huth TCGv t0 = tcg_temp_new(); 761fcf5ef2aSThomas Huth TCGv t1 = tcg_temp_new(); 762fcf5ef2aSThomas Huth TCGv t2 = tcg_temp_new(); 763950b91beSRichard Henderson TCGv zero = tcg_constant_i32(0); 764fcf5ef2aSThomas Huth 765fcf5ef2aSThomas Huth /* shift left arg1, saving the bit being pushed out and inserting 766fcf5ef2aSThomas Huth T on the right */ 767fcf5ef2aSThomas Huth tcg_gen_shri_i32(t0, REG(B11_8), 31); 768fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1); 769fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), REG(B11_8), cpu_sr_t); 770fcf5ef2aSThomas Huth 771fcf5ef2aSThomas Huth /* Add or subtract arg0 from arg1 depending if Q == M. To avoid 772fcf5ef2aSThomas Huth using 64-bit temps, we compute arg0's high part from q ^ m, so 773fcf5ef2aSThomas Huth that it is 0x00000000 when adding the value or 0xffffffff when 774fcf5ef2aSThomas Huth subtracting it. */ 775fcf5ef2aSThomas Huth tcg_gen_xor_i32(t1, cpu_sr_q, cpu_sr_m); 776fcf5ef2aSThomas Huth tcg_gen_subi_i32(t1, t1, 1); 777fcf5ef2aSThomas Huth tcg_gen_neg_i32(t2, REG(B7_4)); 778fcf5ef2aSThomas Huth tcg_gen_movcond_i32(TCG_COND_EQ, t2, t1, zero, REG(B7_4), t2); 779fcf5ef2aSThomas Huth tcg_gen_add2_i32(REG(B11_8), t1, REG(B11_8), zero, t2, t1); 780fcf5ef2aSThomas Huth 781fcf5ef2aSThomas Huth /* compute T and Q depending on carry */ 782fcf5ef2aSThomas Huth tcg_gen_andi_i32(t1, t1, 1); 783fcf5ef2aSThomas Huth tcg_gen_xor_i32(t1, t1, t0); 784fcf5ef2aSThomas Huth tcg_gen_xori_i32(cpu_sr_t, t1, 1); 785fcf5ef2aSThomas Huth tcg_gen_xor_i32(cpu_sr_q, cpu_sr_m, t1); 786fcf5ef2aSThomas Huth } 787fcf5ef2aSThomas Huth return; 788fcf5ef2aSThomas Huth case 0x300d: /* dmuls.l Rm,Rn */ 789fcf5ef2aSThomas Huth tcg_gen_muls2_i32(cpu_macl, cpu_mach, REG(B7_4), REG(B11_8)); 790fcf5ef2aSThomas Huth return; 791fcf5ef2aSThomas Huth case 0x3005: /* dmulu.l Rm,Rn */ 792fcf5ef2aSThomas Huth tcg_gen_mulu2_i32(cpu_macl, cpu_mach, REG(B7_4), REG(B11_8)); 793fcf5ef2aSThomas Huth return; 794fcf5ef2aSThomas Huth case 0x600e: /* exts.b Rm,Rn */ 795fcf5ef2aSThomas Huth tcg_gen_ext8s_i32(REG(B11_8), REG(B7_4)); 796fcf5ef2aSThomas Huth return; 797fcf5ef2aSThomas Huth case 0x600f: /* exts.w Rm,Rn */ 798fcf5ef2aSThomas Huth tcg_gen_ext16s_i32(REG(B11_8), REG(B7_4)); 799fcf5ef2aSThomas Huth return; 800fcf5ef2aSThomas Huth case 0x600c: /* extu.b Rm,Rn */ 801fcf5ef2aSThomas Huth tcg_gen_ext8u_i32(REG(B11_8), REG(B7_4)); 802fcf5ef2aSThomas Huth return; 803fcf5ef2aSThomas Huth case 0x600d: /* extu.w Rm,Rn */ 804fcf5ef2aSThomas Huth tcg_gen_ext16u_i32(REG(B11_8), REG(B7_4)); 805fcf5ef2aSThomas Huth return; 806fcf5ef2aSThomas Huth case 0x000f: /* mac.l @Rm+,@Rn+ */ 807fcf5ef2aSThomas Huth { 808fcf5ef2aSThomas Huth TCGv arg0, arg1; 809fcf5ef2aSThomas Huth arg0 = tcg_temp_new(); 81003a0d87eSRichard Henderson tcg_gen_qemu_ld_i32(arg0, REG(B7_4), ctx->memidx, 81103a0d87eSRichard Henderson MO_TESL | MO_ALIGN); 812fcf5ef2aSThomas Huth arg1 = tcg_temp_new(); 81303a0d87eSRichard Henderson tcg_gen_qemu_ld_i32(arg1, REG(B11_8), ctx->memidx, 81403a0d87eSRichard Henderson MO_TESL | MO_ALIGN); 815ad75a51eSRichard Henderson gen_helper_macl(tcg_env, arg0, arg1); 816fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4); 817fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 818fcf5ef2aSThomas Huth } 819fcf5ef2aSThomas Huth return; 820fcf5ef2aSThomas Huth case 0x400f: /* mac.w @Rm+,@Rn+ */ 821fcf5ef2aSThomas Huth { 822fcf5ef2aSThomas Huth TCGv arg0, arg1; 823fcf5ef2aSThomas Huth arg0 = tcg_temp_new(); 82403a0d87eSRichard Henderson tcg_gen_qemu_ld_i32(arg0, REG(B7_4), ctx->memidx, 825b0f2f297SZack Buhman MO_TESW | MO_ALIGN); 826fcf5ef2aSThomas Huth arg1 = tcg_temp_new(); 82703a0d87eSRichard Henderson tcg_gen_qemu_ld_i32(arg1, REG(B11_8), ctx->memidx, 828b0f2f297SZack Buhman MO_TESW | MO_ALIGN); 829ad75a51eSRichard Henderson gen_helper_macw(tcg_env, arg0, arg1); 830fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 2); 831fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2); 832fcf5ef2aSThomas Huth } 833fcf5ef2aSThomas Huth return; 834fcf5ef2aSThomas Huth case 0x0007: /* mul.l Rm,Rn */ 835fcf5ef2aSThomas Huth tcg_gen_mul_i32(cpu_macl, REG(B7_4), REG(B11_8)); 836fcf5ef2aSThomas Huth return; 837fcf5ef2aSThomas Huth case 0x200f: /* muls.w Rm,Rn */ 838fcf5ef2aSThomas Huth { 839fcf5ef2aSThomas Huth TCGv arg0, arg1; 840fcf5ef2aSThomas Huth arg0 = tcg_temp_new(); 841fcf5ef2aSThomas Huth tcg_gen_ext16s_i32(arg0, REG(B7_4)); 842fcf5ef2aSThomas Huth arg1 = tcg_temp_new(); 843fcf5ef2aSThomas Huth tcg_gen_ext16s_i32(arg1, REG(B11_8)); 844fcf5ef2aSThomas Huth tcg_gen_mul_i32(cpu_macl, arg0, arg1); 845fcf5ef2aSThomas Huth } 846fcf5ef2aSThomas Huth return; 847fcf5ef2aSThomas Huth case 0x200e: /* mulu.w Rm,Rn */ 848fcf5ef2aSThomas Huth { 849fcf5ef2aSThomas Huth TCGv arg0, arg1; 850fcf5ef2aSThomas Huth arg0 = tcg_temp_new(); 851fcf5ef2aSThomas Huth tcg_gen_ext16u_i32(arg0, REG(B7_4)); 852fcf5ef2aSThomas Huth arg1 = tcg_temp_new(); 853fcf5ef2aSThomas Huth tcg_gen_ext16u_i32(arg1, REG(B11_8)); 854fcf5ef2aSThomas Huth tcg_gen_mul_i32(cpu_macl, arg0, arg1); 855fcf5ef2aSThomas Huth } 856fcf5ef2aSThomas Huth return; 857fcf5ef2aSThomas Huth case 0x600b: /* neg Rm,Rn */ 858fcf5ef2aSThomas Huth tcg_gen_neg_i32(REG(B11_8), REG(B7_4)); 859fcf5ef2aSThomas Huth return; 860fcf5ef2aSThomas Huth case 0x600a: /* negc Rm,Rn */ 861fcf5ef2aSThomas Huth { 862950b91beSRichard Henderson TCGv t0 = tcg_constant_i32(0); 863fcf5ef2aSThomas Huth tcg_gen_add2_i32(REG(B11_8), cpu_sr_t, 864fcf5ef2aSThomas Huth REG(B7_4), t0, cpu_sr_t, t0); 865fcf5ef2aSThomas Huth tcg_gen_sub2_i32(REG(B11_8), cpu_sr_t, 866fcf5ef2aSThomas Huth t0, t0, REG(B11_8), cpu_sr_t); 867fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, cpu_sr_t, 1); 868fcf5ef2aSThomas Huth } 869fcf5ef2aSThomas Huth return; 870fcf5ef2aSThomas Huth case 0x6007: /* not Rm,Rn */ 871fcf5ef2aSThomas Huth tcg_gen_not_i32(REG(B11_8), REG(B7_4)); 872fcf5ef2aSThomas Huth return; 873fcf5ef2aSThomas Huth case 0x200b: /* or Rm,Rn */ 874fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 875fcf5ef2aSThomas Huth return; 876fcf5ef2aSThomas Huth case 0x400c: /* shad Rm,Rn */ 877fcf5ef2aSThomas Huth { 878fcf5ef2aSThomas Huth TCGv t0 = tcg_temp_new(); 879fcf5ef2aSThomas Huth TCGv t1 = tcg_temp_new(); 880fcf5ef2aSThomas Huth TCGv t2 = tcg_temp_new(); 881fcf5ef2aSThomas Huth 882fcf5ef2aSThomas Huth tcg_gen_andi_i32(t0, REG(B7_4), 0x1f); 883fcf5ef2aSThomas Huth 884fcf5ef2aSThomas Huth /* positive case: shift to the left */ 885fcf5ef2aSThomas Huth tcg_gen_shl_i32(t1, REG(B11_8), t0); 886fcf5ef2aSThomas Huth 887fcf5ef2aSThomas Huth /* negative case: shift to the right in two steps to 888fcf5ef2aSThomas Huth correctly handle the -32 case */ 889fcf5ef2aSThomas Huth tcg_gen_xori_i32(t0, t0, 0x1f); 890fcf5ef2aSThomas Huth tcg_gen_sar_i32(t2, REG(B11_8), t0); 891fcf5ef2aSThomas Huth tcg_gen_sari_i32(t2, t2, 1); 892fcf5ef2aSThomas Huth 893fcf5ef2aSThomas Huth /* select between the two cases */ 894fcf5ef2aSThomas Huth tcg_gen_movi_i32(t0, 0); 895fcf5ef2aSThomas Huth tcg_gen_movcond_i32(TCG_COND_GE, REG(B11_8), REG(B7_4), t0, t1, t2); 896fcf5ef2aSThomas Huth } 897fcf5ef2aSThomas Huth return; 898fcf5ef2aSThomas Huth case 0x400d: /* shld Rm,Rn */ 899fcf5ef2aSThomas Huth { 900fcf5ef2aSThomas Huth TCGv t0 = tcg_temp_new(); 901fcf5ef2aSThomas Huth TCGv t1 = tcg_temp_new(); 902fcf5ef2aSThomas Huth TCGv t2 = tcg_temp_new(); 903fcf5ef2aSThomas Huth 904fcf5ef2aSThomas Huth tcg_gen_andi_i32(t0, REG(B7_4), 0x1f); 905fcf5ef2aSThomas Huth 906fcf5ef2aSThomas Huth /* positive case: shift to the left */ 907fcf5ef2aSThomas Huth tcg_gen_shl_i32(t1, REG(B11_8), t0); 908fcf5ef2aSThomas Huth 909fcf5ef2aSThomas Huth /* negative case: shift to the right in two steps to 910fcf5ef2aSThomas Huth correctly handle the -32 case */ 911fcf5ef2aSThomas Huth tcg_gen_xori_i32(t0, t0, 0x1f); 912fcf5ef2aSThomas Huth tcg_gen_shr_i32(t2, REG(B11_8), t0); 913fcf5ef2aSThomas Huth tcg_gen_shri_i32(t2, t2, 1); 914fcf5ef2aSThomas Huth 915fcf5ef2aSThomas Huth /* select between the two cases */ 916fcf5ef2aSThomas Huth tcg_gen_movi_i32(t0, 0); 917fcf5ef2aSThomas Huth tcg_gen_movcond_i32(TCG_COND_GE, REG(B11_8), REG(B7_4), t0, t1, t2); 918fcf5ef2aSThomas Huth } 919fcf5ef2aSThomas Huth return; 920fcf5ef2aSThomas Huth case 0x3008: /* sub Rm,Rn */ 921fcf5ef2aSThomas Huth tcg_gen_sub_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 922fcf5ef2aSThomas Huth return; 923fcf5ef2aSThomas Huth case 0x300a: /* subc Rm,Rn */ 924fcf5ef2aSThomas Huth { 925fcf5ef2aSThomas Huth TCGv t0, t1; 926950b91beSRichard Henderson t0 = tcg_constant_tl(0); 927fcf5ef2aSThomas Huth t1 = tcg_temp_new(); 928fcf5ef2aSThomas Huth tcg_gen_add2_i32(t1, cpu_sr_t, cpu_sr_t, t0, REG(B7_4), t0); 929fcf5ef2aSThomas Huth tcg_gen_sub2_i32(REG(B11_8), cpu_sr_t, 930fcf5ef2aSThomas Huth REG(B11_8), t0, t1, cpu_sr_t); 931fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, cpu_sr_t, 1); 932fcf5ef2aSThomas Huth } 933fcf5ef2aSThomas Huth return; 934fcf5ef2aSThomas Huth case 0x300b: /* subv Rm,Rn */ 935fcf5ef2aSThomas Huth { 936*942ba09dSPhilippe Mathieu-Daudé TCGv Rn = REG(B11_8); 937*942ba09dSPhilippe Mathieu-Daudé TCGv Rm = REG(B7_4); 938*942ba09dSPhilippe Mathieu-Daudé TCGv result, t1, t2; 939*942ba09dSPhilippe Mathieu-Daudé 940*942ba09dSPhilippe Mathieu-Daudé result = tcg_temp_new(); 941fcf5ef2aSThomas Huth t1 = tcg_temp_new(); 942fcf5ef2aSThomas Huth t2 = tcg_temp_new(); 943*942ba09dSPhilippe Mathieu-Daudé tcg_gen_sub_i32(result, Rn, Rm); 944*942ba09dSPhilippe Mathieu-Daudé /* T = ((Rn ^ Rm) & (Result ^ Rn)) >> 31 */ 945*942ba09dSPhilippe Mathieu-Daudé tcg_gen_xor_i32(t1, result, Rn); 946*942ba09dSPhilippe Mathieu-Daudé tcg_gen_xor_i32(t2, Rn, Rm); 947fcf5ef2aSThomas Huth tcg_gen_and_i32(t1, t1, t2); 948fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_t, t1, 31); 949*942ba09dSPhilippe Mathieu-Daudé tcg_gen_mov_i32(Rn, result); 950fcf5ef2aSThomas Huth } 951fcf5ef2aSThomas Huth return; 952fcf5ef2aSThomas Huth case 0x2008: /* tst Rm,Rn */ 953fcf5ef2aSThomas Huth { 954fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 955fcf5ef2aSThomas Huth tcg_gen_and_i32(val, REG(B7_4), REG(B11_8)); 956fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, val, 0); 957fcf5ef2aSThomas Huth } 958fcf5ef2aSThomas Huth return; 959fcf5ef2aSThomas Huth case 0x200a: /* xor Rm,Rn */ 960fcf5ef2aSThomas Huth tcg_gen_xor_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 961fcf5ef2aSThomas Huth return; 962fcf5ef2aSThomas Huth case 0xf00c: /* fmov {F,D,X}Rm,{F,D,X}Rn - FPSCR: Nothing */ 963fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 964a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 965bdcb3739SRichard Henderson int xsrc = XHACK(B7_4); 966bdcb3739SRichard Henderson int xdst = XHACK(B11_8); 967bdcb3739SRichard Henderson tcg_gen_mov_i32(FREG(xdst), FREG(xsrc)); 968bdcb3739SRichard Henderson tcg_gen_mov_i32(FREG(xdst + 1), FREG(xsrc + 1)); 969fcf5ef2aSThomas Huth } else { 9707c9f7038SRichard Henderson tcg_gen_mov_i32(FREG(B11_8), FREG(B7_4)); 971fcf5ef2aSThomas Huth } 972fcf5ef2aSThomas Huth return; 973fcf5ef2aSThomas Huth case 0xf00a: /* fmov {F,D,X}Rm,@Rn - FPSCR: Nothing */ 974fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 975a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 9764d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 9774d57fa50SRichard Henderson gen_load_fpr64(ctx, fp, XHACK(B7_4)); 97803a0d87eSRichard Henderson tcg_gen_qemu_st_i64(fp, REG(B11_8), ctx->memidx, 97903a0d87eSRichard Henderson MO_TEUQ | MO_ALIGN); 980fcf5ef2aSThomas Huth } else { 98103a0d87eSRichard Henderson tcg_gen_qemu_st_i32(FREG(B7_4), REG(B11_8), ctx->memidx, 98203a0d87eSRichard Henderson MO_TEUL | MO_ALIGN); 983fcf5ef2aSThomas Huth } 984fcf5ef2aSThomas Huth return; 985fcf5ef2aSThomas Huth case 0xf008: /* fmov @Rm,{F,D,X}Rn - FPSCR: Nothing */ 986fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 987a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 9884d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 98903a0d87eSRichard Henderson tcg_gen_qemu_ld_i64(fp, REG(B7_4), ctx->memidx, 99003a0d87eSRichard Henderson MO_TEUQ | MO_ALIGN); 9914d57fa50SRichard Henderson gen_store_fpr64(ctx, fp, XHACK(B11_8)); 992fcf5ef2aSThomas Huth } else { 99303a0d87eSRichard Henderson tcg_gen_qemu_ld_i32(FREG(B11_8), REG(B7_4), ctx->memidx, 99403a0d87eSRichard Henderson MO_TEUL | MO_ALIGN); 995fcf5ef2aSThomas Huth } 996fcf5ef2aSThomas Huth return; 997fcf5ef2aSThomas Huth case 0xf009: /* fmov @Rm+,{F,D,X}Rn - FPSCR: Nothing */ 998fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 999a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 10004d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 100103a0d87eSRichard Henderson tcg_gen_qemu_ld_i64(fp, REG(B7_4), ctx->memidx, 100203a0d87eSRichard Henderson MO_TEUQ | MO_ALIGN); 10034d57fa50SRichard Henderson gen_store_fpr64(ctx, fp, XHACK(B11_8)); 1004fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 8); 1005fcf5ef2aSThomas Huth } else { 100603a0d87eSRichard Henderson tcg_gen_qemu_ld_i32(FREG(B11_8), REG(B7_4), ctx->memidx, 100703a0d87eSRichard Henderson MO_TEUL | MO_ALIGN); 1008fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4); 1009fcf5ef2aSThomas Huth } 1010fcf5ef2aSThomas Huth return; 1011fcf5ef2aSThomas Huth case 0xf00b: /* fmov {F,D,X}Rm,@-Rn - FPSCR: Nothing */ 1012fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 10134d57fa50SRichard Henderson { 1014fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new_i32(); 1015a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 10164d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 10174d57fa50SRichard Henderson gen_load_fpr64(ctx, fp, XHACK(B7_4)); 10184d57fa50SRichard Henderson tcg_gen_subi_i32(addr, REG(B11_8), 8); 101903a0d87eSRichard Henderson tcg_gen_qemu_st_i64(fp, addr, ctx->memidx, 102003a0d87eSRichard Henderson MO_TEUQ | MO_ALIGN); 1021fcf5ef2aSThomas Huth } else { 10224d57fa50SRichard Henderson tcg_gen_subi_i32(addr, REG(B11_8), 4); 102303a0d87eSRichard Henderson tcg_gen_qemu_st_i32(FREG(B7_4), addr, ctx->memidx, 102403a0d87eSRichard Henderson MO_TEUL | MO_ALIGN); 1025fcf5ef2aSThomas Huth } 1026fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 10274d57fa50SRichard Henderson } 1028fcf5ef2aSThomas Huth return; 1029fcf5ef2aSThomas Huth case 0xf006: /* fmov @(R0,Rm),{F,D,X}Rm - FPSCR: Nothing */ 1030fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1031fcf5ef2aSThomas Huth { 1032fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new_i32(); 1033fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B7_4), REG(0)); 1034a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 10354d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 103603a0d87eSRichard Henderson tcg_gen_qemu_ld_i64(fp, addr, ctx->memidx, 103703a0d87eSRichard Henderson MO_TEUQ | MO_ALIGN); 10384d57fa50SRichard Henderson gen_store_fpr64(ctx, fp, XHACK(B11_8)); 1039fcf5ef2aSThomas Huth } else { 104003a0d87eSRichard Henderson tcg_gen_qemu_ld_i32(FREG(B11_8), addr, ctx->memidx, 104103a0d87eSRichard Henderson MO_TEUL | MO_ALIGN); 1042fcf5ef2aSThomas Huth } 1043fcf5ef2aSThomas Huth } 1044fcf5ef2aSThomas Huth return; 1045fcf5ef2aSThomas Huth case 0xf007: /* fmov {F,D,X}Rn,@(R0,Rn) - FPSCR: Nothing */ 1046fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1047fcf5ef2aSThomas Huth { 1048fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1049fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B11_8), REG(0)); 1050a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 10514d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 10524d57fa50SRichard Henderson gen_load_fpr64(ctx, fp, XHACK(B7_4)); 105303a0d87eSRichard Henderson tcg_gen_qemu_st_i64(fp, addr, ctx->memidx, 105403a0d87eSRichard Henderson MO_TEUQ | MO_ALIGN); 1055fcf5ef2aSThomas Huth } else { 105603a0d87eSRichard Henderson tcg_gen_qemu_st_i32(FREG(B7_4), addr, ctx->memidx, 105703a0d87eSRichard Henderson MO_TEUL | MO_ALIGN); 1058fcf5ef2aSThomas Huth } 1059fcf5ef2aSThomas Huth } 1060fcf5ef2aSThomas Huth return; 1061fcf5ef2aSThomas Huth case 0xf000: /* fadd Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */ 1062fcf5ef2aSThomas Huth case 0xf001: /* fsub Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */ 1063fcf5ef2aSThomas Huth case 0xf002: /* fmul Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */ 1064fcf5ef2aSThomas Huth case 0xf003: /* fdiv Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */ 1065fcf5ef2aSThomas Huth case 0xf004: /* fcmp/eq Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */ 1066fcf5ef2aSThomas Huth case 0xf005: /* fcmp/gt Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */ 1067fcf5ef2aSThomas Huth { 1068fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1069a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_PR) { 1070fcf5ef2aSThomas Huth TCGv_i64 fp0, fp1; 1071fcf5ef2aSThomas Huth 107293dc9c89SRichard Henderson if (ctx->opcode & 0x0110) { 107393dc9c89SRichard Henderson goto do_illegal; 107493dc9c89SRichard Henderson } 1075fcf5ef2aSThomas Huth fp0 = tcg_temp_new_i64(); 1076fcf5ef2aSThomas Huth fp1 = tcg_temp_new_i64(); 10771e0b21d8SRichard Henderson gen_load_fpr64(ctx, fp0, B11_8); 10781e0b21d8SRichard Henderson gen_load_fpr64(ctx, fp1, B7_4); 1079fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf00f) { 1080fcf5ef2aSThomas Huth case 0xf000: /* fadd Rm,Rn */ 1081ad75a51eSRichard Henderson gen_helper_fadd_DT(fp0, tcg_env, fp0, fp1); 1082fcf5ef2aSThomas Huth break; 1083fcf5ef2aSThomas Huth case 0xf001: /* fsub Rm,Rn */ 1084ad75a51eSRichard Henderson gen_helper_fsub_DT(fp0, tcg_env, fp0, fp1); 1085fcf5ef2aSThomas Huth break; 1086fcf5ef2aSThomas Huth case 0xf002: /* fmul Rm,Rn */ 1087ad75a51eSRichard Henderson gen_helper_fmul_DT(fp0, tcg_env, fp0, fp1); 1088fcf5ef2aSThomas Huth break; 1089fcf5ef2aSThomas Huth case 0xf003: /* fdiv Rm,Rn */ 1090ad75a51eSRichard Henderson gen_helper_fdiv_DT(fp0, tcg_env, fp0, fp1); 1091fcf5ef2aSThomas Huth break; 1092fcf5ef2aSThomas Huth case 0xf004: /* fcmp/eq Rm,Rn */ 1093ad75a51eSRichard Henderson gen_helper_fcmp_eq_DT(cpu_sr_t, tcg_env, fp0, fp1); 1094fcf5ef2aSThomas Huth return; 1095fcf5ef2aSThomas Huth case 0xf005: /* fcmp/gt Rm,Rn */ 1096ad75a51eSRichard Henderson gen_helper_fcmp_gt_DT(cpu_sr_t, tcg_env, fp0, fp1); 1097fcf5ef2aSThomas Huth return; 1098fcf5ef2aSThomas Huth } 10991e0b21d8SRichard Henderson gen_store_fpr64(ctx, fp0, B11_8); 1100fcf5ef2aSThomas Huth } else { 1101fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf00f) { 1102fcf5ef2aSThomas Huth case 0xf000: /* fadd Rm,Rn */ 1103ad75a51eSRichard Henderson gen_helper_fadd_FT(FREG(B11_8), tcg_env, 11047c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1105fcf5ef2aSThomas Huth break; 1106fcf5ef2aSThomas Huth case 0xf001: /* fsub Rm,Rn */ 1107ad75a51eSRichard Henderson gen_helper_fsub_FT(FREG(B11_8), tcg_env, 11087c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1109fcf5ef2aSThomas Huth break; 1110fcf5ef2aSThomas Huth case 0xf002: /* fmul Rm,Rn */ 1111ad75a51eSRichard Henderson gen_helper_fmul_FT(FREG(B11_8), tcg_env, 11127c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1113fcf5ef2aSThomas Huth break; 1114fcf5ef2aSThomas Huth case 0xf003: /* fdiv Rm,Rn */ 1115ad75a51eSRichard Henderson gen_helper_fdiv_FT(FREG(B11_8), tcg_env, 11167c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1117fcf5ef2aSThomas Huth break; 1118fcf5ef2aSThomas Huth case 0xf004: /* fcmp/eq Rm,Rn */ 1119ad75a51eSRichard Henderson gen_helper_fcmp_eq_FT(cpu_sr_t, tcg_env, 11207c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1121fcf5ef2aSThomas Huth return; 1122fcf5ef2aSThomas Huth case 0xf005: /* fcmp/gt Rm,Rn */ 1123ad75a51eSRichard Henderson gen_helper_fcmp_gt_FT(cpu_sr_t, tcg_env, 11247c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1125fcf5ef2aSThomas Huth return; 1126fcf5ef2aSThomas Huth } 1127fcf5ef2aSThomas Huth } 1128fcf5ef2aSThomas Huth } 1129fcf5ef2aSThomas Huth return; 1130fcf5ef2aSThomas Huth case 0xf00e: /* fmac FR0,RM,Rn */ 1131fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 11327e9f7ca8SRichard Henderson CHECK_FPSCR_PR_0 1133ad75a51eSRichard Henderson gen_helper_fmac_FT(FREG(B11_8), tcg_env, 11347c9f7038SRichard Henderson FREG(0), FREG(B7_4), FREG(B11_8)); 1135fcf5ef2aSThomas Huth return; 1136fcf5ef2aSThomas Huth } 1137fcf5ef2aSThomas Huth 1138fcf5ef2aSThomas Huth switch (ctx->opcode & 0xff00) { 1139fcf5ef2aSThomas Huth case 0xc900: /* and #imm,R0 */ 1140fcf5ef2aSThomas Huth tcg_gen_andi_i32(REG(0), REG(0), B7_0); 1141fcf5ef2aSThomas Huth return; 1142fcf5ef2aSThomas Huth case 0xcd00: /* and.b #imm,@(R0,GBR) */ 1143fcf5ef2aSThomas Huth { 1144fcf5ef2aSThomas Huth TCGv addr, val; 1145fcf5ef2aSThomas Huth addr = tcg_temp_new(); 1146fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(0), cpu_gbr); 1147fcf5ef2aSThomas Huth val = tcg_temp_new(); 1148fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, addr, ctx->memidx, MO_UB); 1149fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, val, B7_0); 1150fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_UB); 1151fcf5ef2aSThomas Huth } 1152fcf5ef2aSThomas Huth return; 1153fcf5ef2aSThomas Huth case 0x8b00: /* bf label */ 1154fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 11556f1c2af6SRichard Henderson gen_conditional_jump(ctx, ctx->base.pc_next + 4 + B7_0s * 2, false); 1156fcf5ef2aSThomas Huth return; 1157fcf5ef2aSThomas Huth case 0x8f00: /* bf/s label */ 1158fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1159ac9707eaSAurelien Jarno tcg_gen_xori_i32(cpu_delayed_cond, cpu_sr_t, 1); 11606f1c2af6SRichard Henderson ctx->delayed_pc = ctx->base.pc_next + 4 + B7_0s * 2; 1161ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT_COND; 1162fcf5ef2aSThomas Huth return; 1163fcf5ef2aSThomas Huth case 0x8900: /* bt label */ 1164fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 11656f1c2af6SRichard Henderson gen_conditional_jump(ctx, ctx->base.pc_next + 4 + B7_0s * 2, true); 1166fcf5ef2aSThomas Huth return; 1167fcf5ef2aSThomas Huth case 0x8d00: /* bt/s label */ 1168fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1169ac9707eaSAurelien Jarno tcg_gen_mov_i32(cpu_delayed_cond, cpu_sr_t); 11706f1c2af6SRichard Henderson ctx->delayed_pc = ctx->base.pc_next + 4 + B7_0s * 2; 1171ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT_COND; 1172fcf5ef2aSThomas Huth return; 1173fcf5ef2aSThomas Huth case 0x8800: /* cmp/eq #imm,R0 */ 1174fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, REG(0), B7_0s); 1175fcf5ef2aSThomas Huth return; 1176fcf5ef2aSThomas Huth case 0xc400: /* mov.b @(disp,GBR),R0 */ 1177fcf5ef2aSThomas Huth { 1178fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1179fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0); 1180fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_SB); 1181fcf5ef2aSThomas Huth } 1182fcf5ef2aSThomas Huth return; 1183fcf5ef2aSThomas Huth case 0xc500: /* mov.w @(disp,GBR),R0 */ 1184fcf5ef2aSThomas Huth { 1185fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1186fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2); 118703a0d87eSRichard Henderson tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_TESW | MO_ALIGN); 1188fcf5ef2aSThomas Huth } 1189fcf5ef2aSThomas Huth return; 1190fcf5ef2aSThomas Huth case 0xc600: /* mov.l @(disp,GBR),R0 */ 1191fcf5ef2aSThomas Huth { 1192fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1193fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4); 119403a0d87eSRichard Henderson tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_TESL | MO_ALIGN); 1195fcf5ef2aSThomas Huth } 1196fcf5ef2aSThomas Huth return; 1197fcf5ef2aSThomas Huth case 0xc000: /* mov.b R0,@(disp,GBR) */ 1198fcf5ef2aSThomas Huth { 1199fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1200fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0); 1201fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_UB); 1202fcf5ef2aSThomas Huth } 1203fcf5ef2aSThomas Huth return; 1204fcf5ef2aSThomas Huth case 0xc100: /* mov.w R0,@(disp,GBR) */ 1205fcf5ef2aSThomas Huth { 1206fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1207fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2); 120803a0d87eSRichard Henderson tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_TEUW | MO_ALIGN); 1209fcf5ef2aSThomas Huth } 1210fcf5ef2aSThomas Huth return; 1211fcf5ef2aSThomas Huth case 0xc200: /* mov.l R0,@(disp,GBR) */ 1212fcf5ef2aSThomas Huth { 1213fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1214fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4); 121503a0d87eSRichard Henderson tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_TEUL | MO_ALIGN); 1216fcf5ef2aSThomas Huth } 1217fcf5ef2aSThomas Huth return; 1218fcf5ef2aSThomas Huth case 0x8000: /* mov.b R0,@(disp,Rn) */ 1219fcf5ef2aSThomas Huth { 1220fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1221fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0); 1222fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_UB); 1223fcf5ef2aSThomas Huth } 1224fcf5ef2aSThomas Huth return; 1225fcf5ef2aSThomas Huth case 0x8100: /* mov.w R0,@(disp,Rn) */ 1226fcf5ef2aSThomas Huth { 1227fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1228fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2); 12294da06fb3SRichard Henderson tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, 12304da06fb3SRichard Henderson MO_TEUW | UNALIGN(ctx)); 1231fcf5ef2aSThomas Huth } 1232fcf5ef2aSThomas Huth return; 1233fcf5ef2aSThomas Huth case 0x8400: /* mov.b @(disp,Rn),R0 */ 1234fcf5ef2aSThomas Huth { 1235fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1236fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0); 1237fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_SB); 1238fcf5ef2aSThomas Huth } 1239fcf5ef2aSThomas Huth return; 1240fcf5ef2aSThomas Huth case 0x8500: /* mov.w @(disp,Rn),R0 */ 1241fcf5ef2aSThomas Huth { 1242fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1243fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2); 12444da06fb3SRichard Henderson tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, 12454da06fb3SRichard Henderson MO_TESW | UNALIGN(ctx)); 1246fcf5ef2aSThomas Huth } 1247fcf5ef2aSThomas Huth return; 1248fcf5ef2aSThomas Huth case 0xc700: /* mova @(disp,PC),R0 */ 1249b754cb2dSZack Buhman CHECK_NOT_DELAY_SLOT 12506f1c2af6SRichard Henderson tcg_gen_movi_i32(REG(0), ((ctx->base.pc_next & 0xfffffffc) + 12516f1c2af6SRichard Henderson 4 + B7_0 * 4) & ~3); 1252fcf5ef2aSThomas Huth return; 1253fcf5ef2aSThomas Huth case 0xcb00: /* or #imm,R0 */ 1254fcf5ef2aSThomas Huth tcg_gen_ori_i32(REG(0), REG(0), B7_0); 1255fcf5ef2aSThomas Huth return; 1256fcf5ef2aSThomas Huth case 0xcf00: /* or.b #imm,@(R0,GBR) */ 1257fcf5ef2aSThomas Huth { 1258fcf5ef2aSThomas Huth TCGv addr, val; 1259fcf5ef2aSThomas Huth addr = tcg_temp_new(); 1260fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(0), cpu_gbr); 1261fcf5ef2aSThomas Huth val = tcg_temp_new(); 1262fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, addr, ctx->memidx, MO_UB); 1263fcf5ef2aSThomas Huth tcg_gen_ori_i32(val, val, B7_0); 1264fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_UB); 1265fcf5ef2aSThomas Huth } 1266fcf5ef2aSThomas Huth return; 1267fcf5ef2aSThomas Huth case 0xc300: /* trapa #imm */ 1268fcf5ef2aSThomas Huth { 1269fcf5ef2aSThomas Huth TCGv imm; 1270fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1271ac9707eaSAurelien Jarno gen_save_cpu_state(ctx, true); 1272950b91beSRichard Henderson imm = tcg_constant_i32(B7_0); 1273ad75a51eSRichard Henderson gen_helper_trapa(tcg_env, imm); 12746f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NORETURN; 1275fcf5ef2aSThomas Huth } 1276fcf5ef2aSThomas Huth return; 1277fcf5ef2aSThomas Huth case 0xc800: /* tst #imm,R0 */ 1278fcf5ef2aSThomas Huth { 1279fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1280fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, REG(0), B7_0); 1281fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, val, 0); 1282fcf5ef2aSThomas Huth } 1283fcf5ef2aSThomas Huth return; 1284fcf5ef2aSThomas Huth case 0xcc00: /* tst.b #imm,@(R0,GBR) */ 1285fcf5ef2aSThomas Huth { 1286fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1287fcf5ef2aSThomas Huth tcg_gen_add_i32(val, REG(0), cpu_gbr); 1288fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, val, ctx->memidx, MO_UB); 1289fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, val, B7_0); 1290fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, val, 0); 1291fcf5ef2aSThomas Huth } 1292fcf5ef2aSThomas Huth return; 1293fcf5ef2aSThomas Huth case 0xca00: /* xor #imm,R0 */ 1294fcf5ef2aSThomas Huth tcg_gen_xori_i32(REG(0), REG(0), B7_0); 1295fcf5ef2aSThomas Huth return; 1296fcf5ef2aSThomas Huth case 0xce00: /* xor.b #imm,@(R0,GBR) */ 1297fcf5ef2aSThomas Huth { 1298fcf5ef2aSThomas Huth TCGv addr, val; 1299fcf5ef2aSThomas Huth addr = tcg_temp_new(); 1300fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(0), cpu_gbr); 1301fcf5ef2aSThomas Huth val = tcg_temp_new(); 1302fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, addr, ctx->memidx, MO_UB); 1303fcf5ef2aSThomas Huth tcg_gen_xori_i32(val, val, B7_0); 1304fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_UB); 1305fcf5ef2aSThomas Huth } 1306fcf5ef2aSThomas Huth return; 1307fcf5ef2aSThomas Huth } 1308fcf5ef2aSThomas Huth 1309fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf08f) { 1310fcf5ef2aSThomas Huth case 0x408e: /* ldc Rm,Rn_BANK */ 1311fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1312fcf5ef2aSThomas Huth tcg_gen_mov_i32(ALTREG(B6_4), REG(B11_8)); 1313fcf5ef2aSThomas Huth return; 1314fcf5ef2aSThomas Huth case 0x4087: /* ldc.l @Rm+,Rn_BANK */ 1315fcf5ef2aSThomas Huth CHECK_PRIVILEGED 131603a0d87eSRichard Henderson tcg_gen_qemu_ld_i32(ALTREG(B6_4), REG(B11_8), ctx->memidx, 131703a0d87eSRichard Henderson MO_TESL | MO_ALIGN); 1318fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 1319fcf5ef2aSThomas Huth return; 1320fcf5ef2aSThomas Huth case 0x0082: /* stc Rm_BANK,Rn */ 1321fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1322fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), ALTREG(B6_4)); 1323fcf5ef2aSThomas Huth return; 1324fcf5ef2aSThomas Huth case 0x4083: /* stc.l Rm_BANK,@-Rn */ 1325fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1326fcf5ef2aSThomas Huth { 1327fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1328fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); 132903a0d87eSRichard Henderson tcg_gen_qemu_st_i32(ALTREG(B6_4), addr, ctx->memidx, 133003a0d87eSRichard Henderson MO_TEUL | MO_ALIGN); 1331fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 1332fcf5ef2aSThomas Huth } 1333fcf5ef2aSThomas Huth return; 1334fcf5ef2aSThomas Huth } 1335fcf5ef2aSThomas Huth 1336fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf0ff) { 1337fcf5ef2aSThomas Huth case 0x0023: /* braf Rn */ 1338fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 13396f1c2af6SRichard Henderson tcg_gen_addi_i32(cpu_delayed_pc, REG(B11_8), ctx->base.pc_next + 4); 1340ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT; 1341fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 1342fcf5ef2aSThomas Huth return; 1343fcf5ef2aSThomas Huth case 0x0003: /* bsrf Rn */ 1344fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 13456f1c2af6SRichard Henderson tcg_gen_movi_i32(cpu_pr, ctx->base.pc_next + 4); 1346fcf5ef2aSThomas Huth tcg_gen_add_i32(cpu_delayed_pc, REG(B11_8), cpu_pr); 1347ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT; 1348fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 1349fcf5ef2aSThomas Huth return; 1350fcf5ef2aSThomas Huth case 0x4015: /* cmp/pl Rn */ 1351fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_GT, cpu_sr_t, REG(B11_8), 0); 1352fcf5ef2aSThomas Huth return; 1353fcf5ef2aSThomas Huth case 0x4011: /* cmp/pz Rn */ 1354fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_GE, cpu_sr_t, REG(B11_8), 0); 1355fcf5ef2aSThomas Huth return; 1356fcf5ef2aSThomas Huth case 0x4010: /* dt Rn */ 1357fcf5ef2aSThomas Huth tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 1); 1358fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, REG(B11_8), 0); 1359fcf5ef2aSThomas Huth return; 1360fcf5ef2aSThomas Huth case 0x402b: /* jmp @Rn */ 1361fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1362fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8)); 1363ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT; 1364fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 1365fcf5ef2aSThomas Huth return; 1366fcf5ef2aSThomas Huth case 0x400b: /* jsr @Rn */ 1367fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 13686f1c2af6SRichard Henderson tcg_gen_movi_i32(cpu_pr, ctx->base.pc_next + 4); 1369fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8)); 1370ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT; 1371fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 1372fcf5ef2aSThomas Huth return; 1373fcf5ef2aSThomas Huth case 0x400e: /* ldc Rm,SR */ 1374fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1375fcf5ef2aSThomas Huth { 1376fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1377fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, REG(B11_8), 0x700083f3); 1378fcf5ef2aSThomas Huth gen_write_sr(val); 13796f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 1380fcf5ef2aSThomas Huth } 1381fcf5ef2aSThomas Huth return; 1382fcf5ef2aSThomas Huth case 0x4007: /* ldc.l @Rm+,SR */ 1383fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1384fcf5ef2aSThomas Huth { 1385fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 138603a0d87eSRichard Henderson tcg_gen_qemu_ld_i32(val, REG(B11_8), ctx->memidx, 138703a0d87eSRichard Henderson MO_TESL | MO_ALIGN); 1388fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, val, 0x700083f3); 1389fcf5ef2aSThomas Huth gen_write_sr(val); 1390fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 13916f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 1392fcf5ef2aSThomas Huth } 1393fcf5ef2aSThomas Huth return; 1394fcf5ef2aSThomas Huth case 0x0002: /* stc SR,Rn */ 1395fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1396fcf5ef2aSThomas Huth gen_read_sr(REG(B11_8)); 1397fcf5ef2aSThomas Huth return; 1398fcf5ef2aSThomas Huth case 0x4003: /* stc SR,@-Rn */ 1399fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1400fcf5ef2aSThomas Huth { 1401fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1402fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1403fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); 1404fcf5ef2aSThomas Huth gen_read_sr(val); 140503a0d87eSRichard Henderson tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_TEUL | MO_ALIGN); 1406fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 1407fcf5ef2aSThomas Huth } 1408fcf5ef2aSThomas Huth return; 1409fcf5ef2aSThomas Huth #define LD(reg,ldnum,ldpnum,prechk) \ 1410fcf5ef2aSThomas Huth case ldnum: \ 1411fcf5ef2aSThomas Huth prechk \ 1412fcf5ef2aSThomas Huth tcg_gen_mov_i32 (cpu_##reg, REG(B11_8)); \ 1413fcf5ef2aSThomas Huth return; \ 1414fcf5ef2aSThomas Huth case ldpnum: \ 1415fcf5ef2aSThomas Huth prechk \ 141603a0d87eSRichard Henderson tcg_gen_qemu_ld_i32(cpu_##reg, REG(B11_8), ctx->memidx, \ 141703a0d87eSRichard Henderson MO_TESL | MO_ALIGN); \ 1418fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); \ 1419fcf5ef2aSThomas Huth return; 1420fcf5ef2aSThomas Huth #define ST(reg,stnum,stpnum,prechk) \ 1421fcf5ef2aSThomas Huth case stnum: \ 1422fcf5ef2aSThomas Huth prechk \ 1423fcf5ef2aSThomas Huth tcg_gen_mov_i32 (REG(B11_8), cpu_##reg); \ 1424fcf5ef2aSThomas Huth return; \ 1425fcf5ef2aSThomas Huth case stpnum: \ 1426fcf5ef2aSThomas Huth prechk \ 1427fcf5ef2aSThomas Huth { \ 1428fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); \ 1429fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); \ 143003a0d87eSRichard Henderson tcg_gen_qemu_st_i32(cpu_##reg, addr, ctx->memidx, \ 143103a0d87eSRichard Henderson MO_TEUL | MO_ALIGN); \ 1432fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); \ 1433fcf5ef2aSThomas Huth } \ 1434fcf5ef2aSThomas Huth return; 1435fcf5ef2aSThomas Huth #define LDST(reg,ldnum,ldpnum,stnum,stpnum,prechk) \ 1436fcf5ef2aSThomas Huth LD(reg,ldnum,ldpnum,prechk) \ 1437fcf5ef2aSThomas Huth ST(reg,stnum,stpnum,prechk) 1438fcf5ef2aSThomas Huth LDST(gbr, 0x401e, 0x4017, 0x0012, 0x4013, {}) 1439fcf5ef2aSThomas Huth LDST(vbr, 0x402e, 0x4027, 0x0022, 0x4023, CHECK_PRIVILEGED) 1440fcf5ef2aSThomas Huth LDST(ssr, 0x403e, 0x4037, 0x0032, 0x4033, CHECK_PRIVILEGED) 1441fcf5ef2aSThomas Huth LDST(spc, 0x404e, 0x4047, 0x0042, 0x4043, CHECK_PRIVILEGED) 1442fcf5ef2aSThomas Huth ST(sgr, 0x003a, 0x4032, CHECK_PRIVILEGED) 1443ccae24d4SRichard Henderson LD(sgr, 0x403a, 0x4036, CHECK_PRIVILEGED CHECK_SH4A) 1444fcf5ef2aSThomas Huth LDST(dbr, 0x40fa, 0x40f6, 0x00fa, 0x40f2, CHECK_PRIVILEGED) 1445fcf5ef2aSThomas Huth LDST(mach, 0x400a, 0x4006, 0x000a, 0x4002, {}) 1446fcf5ef2aSThomas Huth LDST(macl, 0x401a, 0x4016, 0x001a, 0x4012, {}) 1447fcf5ef2aSThomas Huth LDST(pr, 0x402a, 0x4026, 0x002a, 0x4022, {}) 1448fcf5ef2aSThomas Huth LDST(fpul, 0x405a, 0x4056, 0x005a, 0x4052, {CHECK_FPU_ENABLED}) 1449fcf5ef2aSThomas Huth case 0x406a: /* lds Rm,FPSCR */ 1450fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1451ad75a51eSRichard Henderson gen_helper_ld_fpscr(tcg_env, REG(B11_8)); 14526f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 1453fcf5ef2aSThomas Huth return; 1454fcf5ef2aSThomas Huth case 0x4066: /* lds.l @Rm+,FPSCR */ 1455fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1456fcf5ef2aSThomas Huth { 1457fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 145803a0d87eSRichard Henderson tcg_gen_qemu_ld_i32(addr, REG(B11_8), ctx->memidx, 145903a0d87eSRichard Henderson MO_TESL | MO_ALIGN); 1460fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 1461ad75a51eSRichard Henderson gen_helper_ld_fpscr(tcg_env, addr); 14626f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 1463fcf5ef2aSThomas Huth } 1464fcf5ef2aSThomas Huth return; 1465fcf5ef2aSThomas Huth case 0x006a: /* sts FPSCR,Rn */ 1466fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1467fcf5ef2aSThomas Huth tcg_gen_andi_i32(REG(B11_8), cpu_fpscr, 0x003fffff); 1468fcf5ef2aSThomas Huth return; 1469fcf5ef2aSThomas Huth case 0x4062: /* sts FPSCR,@-Rn */ 1470fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1471fcf5ef2aSThomas Huth { 1472fcf5ef2aSThomas Huth TCGv addr, val; 1473fcf5ef2aSThomas Huth val = tcg_temp_new(); 1474fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, cpu_fpscr, 0x003fffff); 1475fcf5ef2aSThomas Huth addr = tcg_temp_new(); 1476fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); 147703a0d87eSRichard Henderson tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_TEUL | MO_ALIGN); 1478fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 1479fcf5ef2aSThomas Huth } 1480fcf5ef2aSThomas Huth return; 1481fcf5ef2aSThomas Huth case 0x00c3: /* movca.l R0,@Rm */ 1482fcf5ef2aSThomas Huth { 1483fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 148403a0d87eSRichard Henderson tcg_gen_qemu_ld_i32(val, REG(B11_8), ctx->memidx, 148503a0d87eSRichard Henderson MO_TEUL | MO_ALIGN); 1486ad75a51eSRichard Henderson gen_helper_movcal(tcg_env, REG(B11_8), val); 148703a0d87eSRichard Henderson tcg_gen_qemu_st_i32(REG(0), REG(B11_8), ctx->memidx, 148803a0d87eSRichard Henderson MO_TEUL | MO_ALIGN); 1489fcf5ef2aSThomas Huth } 1490fcf5ef2aSThomas Huth ctx->has_movcal = 1; 1491fcf5ef2aSThomas Huth return; 1492143021b2SAurelien Jarno case 0x40a9: /* movua.l @Rm,R0 */ 1493ccae24d4SRichard Henderson CHECK_SH4A 1494143021b2SAurelien Jarno /* Load non-boundary-aligned data */ 149534257c21SAurelien Jarno tcg_gen_qemu_ld_i32(REG(0), REG(B11_8), ctx->memidx, 149634257c21SAurelien Jarno MO_TEUL | MO_UNALN); 1497fcf5ef2aSThomas Huth return; 1498143021b2SAurelien Jarno case 0x40e9: /* movua.l @Rm+,R0 */ 1499ccae24d4SRichard Henderson CHECK_SH4A 1500143021b2SAurelien Jarno /* Load non-boundary-aligned data */ 150134257c21SAurelien Jarno tcg_gen_qemu_ld_i32(REG(0), REG(B11_8), ctx->memidx, 150234257c21SAurelien Jarno MO_TEUL | MO_UNALN); 1503fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 1504fcf5ef2aSThomas Huth return; 1505fcf5ef2aSThomas Huth case 0x0029: /* movt Rn */ 1506fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), cpu_sr_t); 1507fcf5ef2aSThomas Huth return; 1508fcf5ef2aSThomas Huth case 0x0073: 1509fcf5ef2aSThomas Huth /* MOVCO.L 1510f85da308SRichard Henderson * LDST -> T 1511f85da308SRichard Henderson * If (T == 1) R0 -> (Rn) 1512f85da308SRichard Henderson * 0 -> LDST 1513f85da308SRichard Henderson * 1514f85da308SRichard Henderson * The above description doesn't work in a parallel context. 1515f85da308SRichard Henderson * Since we currently support no smp boards, this implies user-mode. 1516f85da308SRichard Henderson * But we can still support the official mechanism while user-mode 1517f85da308SRichard Henderson * is single-threaded. */ 1518ccae24d4SRichard Henderson CHECK_SH4A 1519ccae24d4SRichard Henderson { 1520f85da308SRichard Henderson TCGLabel *fail = gen_new_label(); 1521f85da308SRichard Henderson TCGLabel *done = gen_new_label(); 1522f85da308SRichard Henderson 15236f1c2af6SRichard Henderson if ((tb_cflags(ctx->base.tb) & CF_PARALLEL)) { 1524f85da308SRichard Henderson TCGv tmp; 1525f85da308SRichard Henderson 1526f85da308SRichard Henderson tcg_gen_brcond_i32(TCG_COND_NE, REG(B11_8), 1527f85da308SRichard Henderson cpu_lock_addr, fail); 1528f85da308SRichard Henderson tmp = tcg_temp_new(); 1529f85da308SRichard Henderson tcg_gen_atomic_cmpxchg_i32(tmp, REG(B11_8), cpu_lock_value, 153003a0d87eSRichard Henderson REG(0), ctx->memidx, 153103a0d87eSRichard Henderson MO_TEUL | MO_ALIGN); 1532f85da308SRichard Henderson tcg_gen_setcond_i32(TCG_COND_EQ, cpu_sr_t, tmp, cpu_lock_value); 1533f85da308SRichard Henderson } else { 1534f85da308SRichard Henderson tcg_gen_brcondi_i32(TCG_COND_EQ, cpu_lock_addr, -1, fail); 153503a0d87eSRichard Henderson tcg_gen_qemu_st_i32(REG(0), REG(B11_8), ctx->memidx, 153603a0d87eSRichard Henderson MO_TEUL | MO_ALIGN); 1537f85da308SRichard Henderson tcg_gen_movi_i32(cpu_sr_t, 1); 1538ccae24d4SRichard Henderson } 1539f85da308SRichard Henderson tcg_gen_br(done); 1540f85da308SRichard Henderson 1541f85da308SRichard Henderson gen_set_label(fail); 1542f85da308SRichard Henderson tcg_gen_movi_i32(cpu_sr_t, 0); 1543f85da308SRichard Henderson 1544f85da308SRichard Henderson gen_set_label(done); 1545f85da308SRichard Henderson tcg_gen_movi_i32(cpu_lock_addr, -1); 1546f85da308SRichard Henderson } 1547f85da308SRichard Henderson return; 1548fcf5ef2aSThomas Huth case 0x0063: 1549fcf5ef2aSThomas Huth /* MOVLI.L @Rm,R0 1550f85da308SRichard Henderson * 1 -> LDST 1551f85da308SRichard Henderson * (Rm) -> R0 1552f85da308SRichard Henderson * When interrupt/exception 1553f85da308SRichard Henderson * occurred 0 -> LDST 1554f85da308SRichard Henderson * 1555f85da308SRichard Henderson * In a parallel context, we must also save the loaded value 1556f85da308SRichard Henderson * for use with the cmpxchg that we'll use with movco.l. */ 1557ccae24d4SRichard Henderson CHECK_SH4A 15586f1c2af6SRichard Henderson if ((tb_cflags(ctx->base.tb) & CF_PARALLEL)) { 1559f85da308SRichard Henderson TCGv tmp = tcg_temp_new(); 1560f85da308SRichard Henderson tcg_gen_mov_i32(tmp, REG(B11_8)); 156103a0d87eSRichard Henderson tcg_gen_qemu_ld_i32(REG(0), REG(B11_8), ctx->memidx, 156203a0d87eSRichard Henderson MO_TESL | MO_ALIGN); 1563f85da308SRichard Henderson tcg_gen_mov_i32(cpu_lock_value, REG(0)); 1564f85da308SRichard Henderson tcg_gen_mov_i32(cpu_lock_addr, tmp); 1565f85da308SRichard Henderson } else { 156603a0d87eSRichard Henderson tcg_gen_qemu_ld_i32(REG(0), REG(B11_8), ctx->memidx, 156703a0d87eSRichard Henderson MO_TESL | MO_ALIGN); 1568f85da308SRichard Henderson tcg_gen_movi_i32(cpu_lock_addr, 0); 1569f85da308SRichard Henderson } 1570fcf5ef2aSThomas Huth return; 1571fcf5ef2aSThomas Huth case 0x0093: /* ocbi @Rn */ 1572fcf5ef2aSThomas Huth { 1573ad75a51eSRichard Henderson gen_helper_ocbi(tcg_env, REG(B11_8)); 1574fcf5ef2aSThomas Huth } 1575fcf5ef2aSThomas Huth return; 1576fcf5ef2aSThomas Huth case 0x00a3: /* ocbp @Rn */ 1577fcf5ef2aSThomas Huth case 0x00b3: /* ocbwb @Rn */ 1578fcf5ef2aSThomas Huth /* These instructions are supposed to do nothing in case of 1579fcf5ef2aSThomas Huth a cache miss. Given that we only partially emulate caches 1580fcf5ef2aSThomas Huth it is safe to simply ignore them. */ 1581fcf5ef2aSThomas Huth return; 1582fcf5ef2aSThomas Huth case 0x0083: /* pref @Rn */ 1583fcf5ef2aSThomas Huth return; 1584fcf5ef2aSThomas Huth case 0x00d3: /* prefi @Rn */ 1585ccae24d4SRichard Henderson CHECK_SH4A 1586fcf5ef2aSThomas Huth return; 1587fcf5ef2aSThomas Huth case 0x00e3: /* icbi @Rn */ 1588ccae24d4SRichard Henderson CHECK_SH4A 1589fcf5ef2aSThomas Huth return; 1590fcf5ef2aSThomas Huth case 0x00ab: /* synco */ 1591ccae24d4SRichard Henderson CHECK_SH4A 1592aa351317SAurelien Jarno tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 1593fcf5ef2aSThomas Huth return; 1594fcf5ef2aSThomas Huth case 0x4024: /* rotcl Rn */ 1595fcf5ef2aSThomas Huth { 1596fcf5ef2aSThomas Huth TCGv tmp = tcg_temp_new(); 1597fcf5ef2aSThomas Huth tcg_gen_mov_i32(tmp, cpu_sr_t); 1598fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_t, REG(B11_8), 31); 1599fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1); 1600fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), REG(B11_8), tmp); 1601fcf5ef2aSThomas Huth } 1602fcf5ef2aSThomas Huth return; 1603fcf5ef2aSThomas Huth case 0x4025: /* rotcr Rn */ 1604fcf5ef2aSThomas Huth { 1605fcf5ef2aSThomas Huth TCGv tmp = tcg_temp_new(); 1606fcf5ef2aSThomas Huth tcg_gen_shli_i32(tmp, cpu_sr_t, 31); 1607fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 1); 1608fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1); 1609fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), REG(B11_8), tmp); 1610fcf5ef2aSThomas Huth } 1611fcf5ef2aSThomas Huth return; 1612fcf5ef2aSThomas Huth case 0x4004: /* rotl Rn */ 1613fcf5ef2aSThomas Huth tcg_gen_rotli_i32(REG(B11_8), REG(B11_8), 1); 1614fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 0); 1615fcf5ef2aSThomas Huth return; 1616fcf5ef2aSThomas Huth case 0x4005: /* rotr Rn */ 1617fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 0); 1618fcf5ef2aSThomas Huth tcg_gen_rotri_i32(REG(B11_8), REG(B11_8), 1); 1619fcf5ef2aSThomas Huth return; 1620fcf5ef2aSThomas Huth case 0x4000: /* shll Rn */ 1621fcf5ef2aSThomas Huth case 0x4020: /* shal Rn */ 1622fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_t, REG(B11_8), 31); 1623fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1); 1624fcf5ef2aSThomas Huth return; 1625fcf5ef2aSThomas Huth case 0x4021: /* shar Rn */ 1626fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 1); 1627fcf5ef2aSThomas Huth tcg_gen_sari_i32(REG(B11_8), REG(B11_8), 1); 1628fcf5ef2aSThomas Huth return; 1629fcf5ef2aSThomas Huth case 0x4001: /* shlr Rn */ 1630fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 1); 1631fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1); 1632fcf5ef2aSThomas Huth return; 1633fcf5ef2aSThomas Huth case 0x4008: /* shll2 Rn */ 1634fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 2); 1635fcf5ef2aSThomas Huth return; 1636fcf5ef2aSThomas Huth case 0x4018: /* shll8 Rn */ 1637fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 8); 1638fcf5ef2aSThomas Huth return; 1639fcf5ef2aSThomas Huth case 0x4028: /* shll16 Rn */ 1640fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 16); 1641fcf5ef2aSThomas Huth return; 1642fcf5ef2aSThomas Huth case 0x4009: /* shlr2 Rn */ 1643fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 2); 1644fcf5ef2aSThomas Huth return; 1645fcf5ef2aSThomas Huth case 0x4019: /* shlr8 Rn */ 1646fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 8); 1647fcf5ef2aSThomas Huth return; 1648fcf5ef2aSThomas Huth case 0x4029: /* shlr16 Rn */ 1649fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 16); 1650fcf5ef2aSThomas Huth return; 1651fcf5ef2aSThomas Huth case 0x401b: /* tas.b @Rn */ 1652d3c2b2b3SRichard Henderson tcg_gen_atomic_fetch_or_i32(cpu_sr_t, REG(B11_8), 1653d3c2b2b3SRichard Henderson tcg_constant_i32(0x80), ctx->memidx, MO_UB); 1654d3c2b2b3SRichard Henderson tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, cpu_sr_t, 0); 1655fcf5ef2aSThomas Huth return; 1656fcf5ef2aSThomas Huth case 0xf00d: /* fsts FPUL,FRn - FPSCR: Nothing */ 1657fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 16587c9f7038SRichard Henderson tcg_gen_mov_i32(FREG(B11_8), cpu_fpul); 1659fcf5ef2aSThomas Huth return; 1660fcf5ef2aSThomas Huth case 0xf01d: /* flds FRm,FPUL - FPSCR: Nothing */ 1661fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 16627c9f7038SRichard Henderson tcg_gen_mov_i32(cpu_fpul, FREG(B11_8)); 1663fcf5ef2aSThomas Huth return; 1664fcf5ef2aSThomas Huth case 0xf02d: /* float FPUL,FRn/DRn - FPSCR: R[PR,Enable.I]/W[Cause,Flag] */ 1665fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1666a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_PR) { 1667fcf5ef2aSThomas Huth TCGv_i64 fp; 166893dc9c89SRichard Henderson if (ctx->opcode & 0x0100) { 166993dc9c89SRichard Henderson goto do_illegal; 167093dc9c89SRichard Henderson } 1671fcf5ef2aSThomas Huth fp = tcg_temp_new_i64(); 1672ad75a51eSRichard Henderson gen_helper_float_DT(fp, tcg_env, cpu_fpul); 16731e0b21d8SRichard Henderson gen_store_fpr64(ctx, fp, B11_8); 1674fcf5ef2aSThomas Huth } 1675fcf5ef2aSThomas Huth else { 1676ad75a51eSRichard Henderson gen_helper_float_FT(FREG(B11_8), tcg_env, cpu_fpul); 1677fcf5ef2aSThomas Huth } 1678fcf5ef2aSThomas Huth return; 1679fcf5ef2aSThomas Huth case 0xf03d: /* ftrc FRm/DRm,FPUL - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */ 1680fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1681a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_PR) { 1682fcf5ef2aSThomas Huth TCGv_i64 fp; 168393dc9c89SRichard Henderson if (ctx->opcode & 0x0100) { 168493dc9c89SRichard Henderson goto do_illegal; 168593dc9c89SRichard Henderson } 1686fcf5ef2aSThomas Huth fp = tcg_temp_new_i64(); 16871e0b21d8SRichard Henderson gen_load_fpr64(ctx, fp, B11_8); 1688ad75a51eSRichard Henderson gen_helper_ftrc_DT(cpu_fpul, tcg_env, fp); 1689fcf5ef2aSThomas Huth } 1690fcf5ef2aSThomas Huth else { 1691ad75a51eSRichard Henderson gen_helper_ftrc_FT(cpu_fpul, tcg_env, FREG(B11_8)); 1692fcf5ef2aSThomas Huth } 1693fcf5ef2aSThomas Huth return; 1694fcf5ef2aSThomas Huth case 0xf04d: /* fneg FRn/DRn - FPSCR: Nothing */ 1695fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 16967c9f7038SRichard Henderson tcg_gen_xori_i32(FREG(B11_8), FREG(B11_8), 0x80000000); 1697fcf5ef2aSThomas Huth return; 169857f5c1b0SAurelien Jarno case 0xf05d: /* fabs FRn/DRn - FPCSR: Nothing */ 1699fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 17007c9f7038SRichard Henderson tcg_gen_andi_i32(FREG(B11_8), FREG(B11_8), 0x7fffffff); 1701fcf5ef2aSThomas Huth return; 1702fcf5ef2aSThomas Huth case 0xf06d: /* fsqrt FRn */ 1703fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1704a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_PR) { 170593dc9c89SRichard Henderson if (ctx->opcode & 0x0100) { 170693dc9c89SRichard Henderson goto do_illegal; 170793dc9c89SRichard Henderson } 1708fcf5ef2aSThomas Huth TCGv_i64 fp = tcg_temp_new_i64(); 17091e0b21d8SRichard Henderson gen_load_fpr64(ctx, fp, B11_8); 1710ad75a51eSRichard Henderson gen_helper_fsqrt_DT(fp, tcg_env, fp); 17111e0b21d8SRichard Henderson gen_store_fpr64(ctx, fp, B11_8); 1712fcf5ef2aSThomas Huth } else { 1713ad75a51eSRichard Henderson gen_helper_fsqrt_FT(FREG(B11_8), tcg_env, FREG(B11_8)); 1714fcf5ef2aSThomas Huth } 1715fcf5ef2aSThomas Huth return; 1716fcf5ef2aSThomas Huth case 0xf07d: /* fsrra FRn */ 1717fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 171811b7aa23SRichard Henderson CHECK_FPSCR_PR_0 1719ad75a51eSRichard Henderson gen_helper_fsrra_FT(FREG(B11_8), tcg_env, FREG(B11_8)); 1720fcf5ef2aSThomas Huth break; 1721fcf5ef2aSThomas Huth case 0xf08d: /* fldi0 FRn - FPSCR: R[PR] */ 1722fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 17237e9f7ca8SRichard Henderson CHECK_FPSCR_PR_0 17247c9f7038SRichard Henderson tcg_gen_movi_i32(FREG(B11_8), 0); 1725fcf5ef2aSThomas Huth return; 1726fcf5ef2aSThomas Huth case 0xf09d: /* fldi1 FRn - FPSCR: R[PR] */ 1727fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 17287e9f7ca8SRichard Henderson CHECK_FPSCR_PR_0 17297c9f7038SRichard Henderson tcg_gen_movi_i32(FREG(B11_8), 0x3f800000); 1730fcf5ef2aSThomas Huth return; 1731fcf5ef2aSThomas Huth case 0xf0ad: /* fcnvsd FPUL,DRn */ 1732fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1733fcf5ef2aSThomas Huth { 1734fcf5ef2aSThomas Huth TCGv_i64 fp = tcg_temp_new_i64(); 1735ad75a51eSRichard Henderson gen_helper_fcnvsd_FT_DT(fp, tcg_env, cpu_fpul); 17361e0b21d8SRichard Henderson gen_store_fpr64(ctx, fp, B11_8); 1737fcf5ef2aSThomas Huth } 1738fcf5ef2aSThomas Huth return; 1739fcf5ef2aSThomas Huth case 0xf0bd: /* fcnvds DRn,FPUL */ 1740fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1741fcf5ef2aSThomas Huth { 1742fcf5ef2aSThomas Huth TCGv_i64 fp = tcg_temp_new_i64(); 17431e0b21d8SRichard Henderson gen_load_fpr64(ctx, fp, B11_8); 1744ad75a51eSRichard Henderson gen_helper_fcnvds_DT_FT(cpu_fpul, tcg_env, fp); 1745fcf5ef2aSThomas Huth } 1746fcf5ef2aSThomas Huth return; 1747fcf5ef2aSThomas Huth case 0xf0ed: /* fipr FVm,FVn */ 1748fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 17497e9f7ca8SRichard Henderson CHECK_FPSCR_PR_1 17507e9f7ca8SRichard Henderson { 1751950b91beSRichard Henderson TCGv m = tcg_constant_i32((ctx->opcode >> 8) & 3); 1752950b91beSRichard Henderson TCGv n = tcg_constant_i32((ctx->opcode >> 10) & 3); 1753ad75a51eSRichard Henderson gen_helper_fipr(tcg_env, m, n); 1754fcf5ef2aSThomas Huth return; 1755fcf5ef2aSThomas Huth } 1756fcf5ef2aSThomas Huth break; 1757fcf5ef2aSThomas Huth case 0xf0fd: /* ftrv XMTRX,FVn */ 1758fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 17597e9f7ca8SRichard Henderson CHECK_FPSCR_PR_1 17607e9f7ca8SRichard Henderson { 17617e9f7ca8SRichard Henderson if ((ctx->opcode & 0x0300) != 0x0100) { 17627e9f7ca8SRichard Henderson goto do_illegal; 17637e9f7ca8SRichard Henderson } 1764950b91beSRichard Henderson TCGv n = tcg_constant_i32((ctx->opcode >> 10) & 3); 1765ad75a51eSRichard Henderson gen_helper_ftrv(tcg_env, n); 1766fcf5ef2aSThomas Huth return; 1767fcf5ef2aSThomas Huth } 1768fcf5ef2aSThomas Huth break; 1769fcf5ef2aSThomas Huth } 1770fcf5ef2aSThomas Huth #if 0 1771fcf5ef2aSThomas Huth fprintf(stderr, "unknown instruction 0x%04x at pc 0x%08x\n", 17726f1c2af6SRichard Henderson ctx->opcode, ctx->base.pc_next); 1773fcf5ef2aSThomas Huth fflush(stderr); 1774fcf5ef2aSThomas Huth #endif 17756b98213dSRichard Henderson do_illegal: 1776ab419fd8SRichard Henderson if (ctx->envflags & TB_FLAG_DELAY_SLOT_MASK) { 1777dec16c6eSRichard Henderson do_illegal_slot: 1778dec16c6eSRichard Henderson gen_save_cpu_state(ctx, true); 1779ad75a51eSRichard Henderson gen_helper_raise_slot_illegal_instruction(tcg_env); 1780fcf5ef2aSThomas Huth } else { 1781dec16c6eSRichard Henderson gen_save_cpu_state(ctx, true); 1782ad75a51eSRichard Henderson gen_helper_raise_illegal_instruction(tcg_env); 1783fcf5ef2aSThomas Huth } 17846f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NORETURN; 1785dec4f042SRichard Henderson return; 1786dec4f042SRichard Henderson 1787dec4f042SRichard Henderson do_fpu_disabled: 1788dec4f042SRichard Henderson gen_save_cpu_state(ctx, true); 1789ab419fd8SRichard Henderson if (ctx->envflags & TB_FLAG_DELAY_SLOT_MASK) { 1790ad75a51eSRichard Henderson gen_helper_raise_slot_fpu_disable(tcg_env); 1791dec4f042SRichard Henderson } else { 1792ad75a51eSRichard Henderson gen_helper_raise_fpu_disable(tcg_env); 1793dec4f042SRichard Henderson } 17946f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NORETURN; 1795dec4f042SRichard Henderson return; 1796fcf5ef2aSThomas Huth } 1797fcf5ef2aSThomas Huth 1798fcf5ef2aSThomas Huth static void decode_opc(DisasContext * ctx) 1799fcf5ef2aSThomas Huth { 1800a6215749SAurelien Jarno uint32_t old_flags = ctx->envflags; 1801fcf5ef2aSThomas Huth 1802fcf5ef2aSThomas Huth _decode_opc(ctx); 1803fcf5ef2aSThomas Huth 1804ab419fd8SRichard Henderson if (old_flags & TB_FLAG_DELAY_SLOT_MASK) { 1805fcf5ef2aSThomas Huth /* go out of the delay slot */ 1806ab419fd8SRichard Henderson ctx->envflags &= ~TB_FLAG_DELAY_SLOT_MASK; 18074bfa602bSRichard Henderson 18084bfa602bSRichard Henderson /* When in an exclusive region, we must continue to the end 18094bfa602bSRichard Henderson for conditional branches. */ 1810ab419fd8SRichard Henderson if (ctx->tbflags & TB_FLAG_GUSA_EXCLUSIVE 1811ab419fd8SRichard Henderson && old_flags & TB_FLAG_DELAY_SLOT_COND) { 18124bfa602bSRichard Henderson gen_delayed_conditional_jump(ctx); 18134bfa602bSRichard Henderson return; 18144bfa602bSRichard Henderson } 18154bfa602bSRichard Henderson /* Otherwise this is probably an invalid gUSA region. 18164bfa602bSRichard Henderson Drop the GUSA bits so the next TB doesn't see them. */ 1817ab419fd8SRichard Henderson ctx->envflags &= ~TB_FLAG_GUSA_MASK; 18184bfa602bSRichard Henderson 1819ac9707eaSAurelien Jarno tcg_gen_movi_i32(cpu_flags, ctx->envflags); 1820ab419fd8SRichard Henderson if (old_flags & TB_FLAG_DELAY_SLOT_COND) { 1821fcf5ef2aSThomas Huth gen_delayed_conditional_jump(ctx); 1822be53081aSAurelien Jarno } else { 1823fcf5ef2aSThomas Huth gen_jump(ctx); 1824fcf5ef2aSThomas Huth } 18254bfa602bSRichard Henderson } 18264bfa602bSRichard Henderson } 1827fcf5ef2aSThomas Huth 18284bfa602bSRichard Henderson #ifdef CONFIG_USER_ONLY 18294f9ef4eeSRichard Henderson /* 18304f9ef4eeSRichard Henderson * Restart with the EXCLUSIVE bit set, within a TB run via 18314f9ef4eeSRichard Henderson * cpu_exec_step_atomic holding the exclusive lock. 18324f9ef4eeSRichard Henderson */ 18334f9ef4eeSRichard Henderson static void gen_restart_exclusive(DisasContext *ctx) 18344f9ef4eeSRichard Henderson { 18354f9ef4eeSRichard Henderson ctx->envflags |= TB_FLAG_GUSA_EXCLUSIVE; 18364f9ef4eeSRichard Henderson gen_save_cpu_state(ctx, false); 18374f9ef4eeSRichard Henderson gen_helper_exclusive(tcg_env); 18384f9ef4eeSRichard Henderson ctx->base.is_jmp = DISAS_NORETURN; 18394f9ef4eeSRichard Henderson } 18404f9ef4eeSRichard Henderson 18414bfa602bSRichard Henderson /* For uniprocessors, SH4 uses optimistic restartable atomic sequences. 18424bfa602bSRichard Henderson Upon an interrupt, a real kernel would simply notice magic values in 18434bfa602bSRichard Henderson the registers and reset the PC to the start of the sequence. 18444bfa602bSRichard Henderson 18454bfa602bSRichard Henderson For QEMU, we cannot do this in quite the same way. Instead, we notice 18464bfa602bSRichard Henderson the normal start of such a sequence (mov #-x,r15). While we can handle 18474bfa602bSRichard Henderson any sequence via cpu_exec_step_atomic, we can recognize the "normal" 18484bfa602bSRichard Henderson sequences and transform them into atomic operations as seen by the host. 18494bfa602bSRichard Henderson */ 1850be0e3d7aSRichard Henderson static void decode_gusa(DisasContext *ctx, CPUSH4State *env) 18514bfa602bSRichard Henderson { 1852d6a6cffdSRichard Henderson uint16_t insns[5]; 1853d6a6cffdSRichard Henderson int ld_adr, ld_dst, ld_mop; 1854d6a6cffdSRichard Henderson int op_dst, op_src, op_opc; 1855d6a6cffdSRichard Henderson int mv_src, mt_dst, st_src, st_mop; 1856d6a6cffdSRichard Henderson TCGv op_arg; 18576f1c2af6SRichard Henderson uint32_t pc = ctx->base.pc_next; 18586f1c2af6SRichard Henderson uint32_t pc_end = ctx->base.tb->cs_base; 18594bfa602bSRichard Henderson int max_insns = (pc_end - pc) / 2; 1860d6a6cffdSRichard Henderson int i; 18614bfa602bSRichard Henderson 1862d6a6cffdSRichard Henderson /* The state machine below will consume only a few insns. 1863d6a6cffdSRichard Henderson If there are more than that in a region, fail now. */ 1864d6a6cffdSRichard Henderson if (max_insns > ARRAY_SIZE(insns)) { 1865d6a6cffdSRichard Henderson goto fail; 1866d6a6cffdSRichard Henderson } 1867d6a6cffdSRichard Henderson 1868d6a6cffdSRichard Henderson /* Read all of the insns for the region. */ 1869d6a6cffdSRichard Henderson for (i = 0; i < max_insns; ++i) { 18704e116893SIlya Leoshkevich insns[i] = translator_lduw(env, &ctx->base, pc + i * 2); 1871d6a6cffdSRichard Henderson } 1872d6a6cffdSRichard Henderson 1873d6a6cffdSRichard Henderson ld_adr = ld_dst = ld_mop = -1; 1874d6a6cffdSRichard Henderson mv_src = -1; 1875d6a6cffdSRichard Henderson op_dst = op_src = op_opc = -1; 1876d6a6cffdSRichard Henderson mt_dst = -1; 1877d6a6cffdSRichard Henderson st_src = st_mop = -1; 1878f764718dSRichard Henderson op_arg = NULL; 1879d6a6cffdSRichard Henderson i = 0; 1880d6a6cffdSRichard Henderson 1881d6a6cffdSRichard Henderson #define NEXT_INSN \ 1882d6a6cffdSRichard Henderson do { if (i >= max_insns) goto fail; ctx->opcode = insns[i++]; } while (0) 1883d6a6cffdSRichard Henderson 1884d6a6cffdSRichard Henderson /* 1885d6a6cffdSRichard Henderson * Expect a load to begin the region. 1886d6a6cffdSRichard Henderson */ 1887d6a6cffdSRichard Henderson NEXT_INSN; 1888d6a6cffdSRichard Henderson switch (ctx->opcode & 0xf00f) { 1889d6a6cffdSRichard Henderson case 0x6000: /* mov.b @Rm,Rn */ 1890d6a6cffdSRichard Henderson ld_mop = MO_SB; 1891d6a6cffdSRichard Henderson break; 1892d6a6cffdSRichard Henderson case 0x6001: /* mov.w @Rm,Rn */ 1893d6a6cffdSRichard Henderson ld_mop = MO_TESW; 1894d6a6cffdSRichard Henderson break; 1895d6a6cffdSRichard Henderson case 0x6002: /* mov.l @Rm,Rn */ 1896d6a6cffdSRichard Henderson ld_mop = MO_TESL; 1897d6a6cffdSRichard Henderson break; 1898d6a6cffdSRichard Henderson default: 1899d6a6cffdSRichard Henderson goto fail; 1900d6a6cffdSRichard Henderson } 1901d6a6cffdSRichard Henderson ld_adr = B7_4; 1902d6a6cffdSRichard Henderson ld_dst = B11_8; 1903d6a6cffdSRichard Henderson if (ld_adr == ld_dst) { 1904d6a6cffdSRichard Henderson goto fail; 1905d6a6cffdSRichard Henderson } 1906d6a6cffdSRichard Henderson /* Unless we see a mov, any two-operand operation must use ld_dst. */ 1907d6a6cffdSRichard Henderson op_dst = ld_dst; 1908d6a6cffdSRichard Henderson 1909d6a6cffdSRichard Henderson /* 1910d6a6cffdSRichard Henderson * Expect an optional register move. 1911d6a6cffdSRichard Henderson */ 1912d6a6cffdSRichard Henderson NEXT_INSN; 1913d6a6cffdSRichard Henderson switch (ctx->opcode & 0xf00f) { 1914d6a6cffdSRichard Henderson case 0x6003: /* mov Rm,Rn */ 191502b8e735SPhilippe Mathieu-Daudé /* 191623b5d9faSLichang Zhao * Here we want to recognize ld_dst being saved for later consumption, 191702b8e735SPhilippe Mathieu-Daudé * or for another input register being copied so that ld_dst need not 191802b8e735SPhilippe Mathieu-Daudé * be clobbered during the operation. 191902b8e735SPhilippe Mathieu-Daudé */ 1920d6a6cffdSRichard Henderson op_dst = B11_8; 1921d6a6cffdSRichard Henderson mv_src = B7_4; 1922d6a6cffdSRichard Henderson if (op_dst == ld_dst) { 1923d6a6cffdSRichard Henderson /* Overwriting the load output. */ 1924d6a6cffdSRichard Henderson goto fail; 1925d6a6cffdSRichard Henderson } 1926d6a6cffdSRichard Henderson if (mv_src != ld_dst) { 1927d6a6cffdSRichard Henderson /* Copying a new input; constrain op_src to match the load. */ 1928d6a6cffdSRichard Henderson op_src = ld_dst; 1929d6a6cffdSRichard Henderson } 1930d6a6cffdSRichard Henderson break; 1931d6a6cffdSRichard Henderson 1932d6a6cffdSRichard Henderson default: 1933d6a6cffdSRichard Henderson /* Put back and re-examine as operation. */ 1934d6a6cffdSRichard Henderson --i; 1935d6a6cffdSRichard Henderson } 1936d6a6cffdSRichard Henderson 1937d6a6cffdSRichard Henderson /* 1938d6a6cffdSRichard Henderson * Expect the operation. 1939d6a6cffdSRichard Henderson */ 1940d6a6cffdSRichard Henderson NEXT_INSN; 1941d6a6cffdSRichard Henderson switch (ctx->opcode & 0xf00f) { 1942d6a6cffdSRichard Henderson case 0x300c: /* add Rm,Rn */ 1943d6a6cffdSRichard Henderson op_opc = INDEX_op_add_i32; 1944d6a6cffdSRichard Henderson goto do_reg_op; 1945d6a6cffdSRichard Henderson case 0x2009: /* and Rm,Rn */ 1946d6a6cffdSRichard Henderson op_opc = INDEX_op_and_i32; 1947d6a6cffdSRichard Henderson goto do_reg_op; 1948d6a6cffdSRichard Henderson case 0x200a: /* xor Rm,Rn */ 1949d6a6cffdSRichard Henderson op_opc = INDEX_op_xor_i32; 1950d6a6cffdSRichard Henderson goto do_reg_op; 1951d6a6cffdSRichard Henderson case 0x200b: /* or Rm,Rn */ 1952d6a6cffdSRichard Henderson op_opc = INDEX_op_or_i32; 1953d6a6cffdSRichard Henderson do_reg_op: 1954d6a6cffdSRichard Henderson /* The operation register should be as expected, and the 1955d6a6cffdSRichard Henderson other input cannot depend on the load. */ 1956d6a6cffdSRichard Henderson if (op_dst != B11_8) { 1957d6a6cffdSRichard Henderson goto fail; 1958d6a6cffdSRichard Henderson } 1959d6a6cffdSRichard Henderson if (op_src < 0) { 1960d6a6cffdSRichard Henderson /* Unconstrainted input. */ 1961d6a6cffdSRichard Henderson op_src = B7_4; 1962d6a6cffdSRichard Henderson } else if (op_src == B7_4) { 1963d6a6cffdSRichard Henderson /* Constrained input matched load. All operations are 1964d6a6cffdSRichard Henderson commutative; "swap" them by "moving" the load output 1965d6a6cffdSRichard Henderson to the (implicit) first argument and the move source 1966d6a6cffdSRichard Henderson to the (explicit) second argument. */ 1967d6a6cffdSRichard Henderson op_src = mv_src; 1968d6a6cffdSRichard Henderson } else { 1969d6a6cffdSRichard Henderson goto fail; 1970d6a6cffdSRichard Henderson } 1971d6a6cffdSRichard Henderson op_arg = REG(op_src); 1972d6a6cffdSRichard Henderson break; 1973d6a6cffdSRichard Henderson 1974d6a6cffdSRichard Henderson case 0x6007: /* not Rm,Rn */ 1975d6a6cffdSRichard Henderson if (ld_dst != B7_4 || mv_src >= 0) { 1976d6a6cffdSRichard Henderson goto fail; 1977d6a6cffdSRichard Henderson } 1978d6a6cffdSRichard Henderson op_dst = B11_8; 1979d6a6cffdSRichard Henderson op_opc = INDEX_op_xor_i32; 1980950b91beSRichard Henderson op_arg = tcg_constant_i32(-1); 1981d6a6cffdSRichard Henderson break; 1982d6a6cffdSRichard Henderson 1983d6a6cffdSRichard Henderson case 0x7000 ... 0x700f: /* add #imm,Rn */ 1984d6a6cffdSRichard Henderson if (op_dst != B11_8 || mv_src >= 0) { 1985d6a6cffdSRichard Henderson goto fail; 1986d6a6cffdSRichard Henderson } 1987d6a6cffdSRichard Henderson op_opc = INDEX_op_add_i32; 1988950b91beSRichard Henderson op_arg = tcg_constant_i32(B7_0s); 1989d6a6cffdSRichard Henderson break; 1990d6a6cffdSRichard Henderson 1991d6a6cffdSRichard Henderson case 0x3000: /* cmp/eq Rm,Rn */ 1992d6a6cffdSRichard Henderson /* Looking for the middle of a compare-and-swap sequence, 1993d6a6cffdSRichard Henderson beginning with the compare. Operands can be either order, 1994d6a6cffdSRichard Henderson but with only one overlapping the load. */ 1995d6a6cffdSRichard Henderson if ((ld_dst == B11_8) + (ld_dst == B7_4) != 1 || mv_src >= 0) { 1996d6a6cffdSRichard Henderson goto fail; 1997d6a6cffdSRichard Henderson } 1998d6a6cffdSRichard Henderson op_opc = INDEX_op_setcond_i32; /* placeholder */ 1999d6a6cffdSRichard Henderson op_src = (ld_dst == B11_8 ? B7_4 : B11_8); 2000d6a6cffdSRichard Henderson op_arg = REG(op_src); 2001d6a6cffdSRichard Henderson 2002d6a6cffdSRichard Henderson NEXT_INSN; 2003d6a6cffdSRichard Henderson switch (ctx->opcode & 0xff00) { 2004d6a6cffdSRichard Henderson case 0x8b00: /* bf label */ 2005d6a6cffdSRichard Henderson case 0x8f00: /* bf/s label */ 2006d6a6cffdSRichard Henderson if (pc + (i + 1 + B7_0s) * 2 != pc_end) { 2007d6a6cffdSRichard Henderson goto fail; 2008d6a6cffdSRichard Henderson } 2009d6a6cffdSRichard Henderson if ((ctx->opcode & 0xff00) == 0x8b00) { /* bf label */ 2010d6a6cffdSRichard Henderson break; 2011d6a6cffdSRichard Henderson } 2012d6a6cffdSRichard Henderson /* We're looking to unconditionally modify Rn with the 2013d6a6cffdSRichard Henderson result of the comparison, within the delay slot of 2014d6a6cffdSRichard Henderson the branch. This is used by older gcc. */ 2015d6a6cffdSRichard Henderson NEXT_INSN; 2016d6a6cffdSRichard Henderson if ((ctx->opcode & 0xf0ff) == 0x0029) { /* movt Rn */ 2017d6a6cffdSRichard Henderson mt_dst = B11_8; 2018d6a6cffdSRichard Henderson } else { 2019d6a6cffdSRichard Henderson goto fail; 2020d6a6cffdSRichard Henderson } 2021d6a6cffdSRichard Henderson break; 2022d6a6cffdSRichard Henderson 2023d6a6cffdSRichard Henderson default: 2024d6a6cffdSRichard Henderson goto fail; 2025d6a6cffdSRichard Henderson } 2026d6a6cffdSRichard Henderson break; 2027d6a6cffdSRichard Henderson 2028d6a6cffdSRichard Henderson case 0x2008: /* tst Rm,Rn */ 2029d6a6cffdSRichard Henderson /* Looking for a compare-and-swap against zero. */ 2030d6a6cffdSRichard Henderson if (ld_dst != B11_8 || ld_dst != B7_4 || mv_src >= 0) { 2031d6a6cffdSRichard Henderson goto fail; 2032d6a6cffdSRichard Henderson } 2033d6a6cffdSRichard Henderson op_opc = INDEX_op_setcond_i32; 2034950b91beSRichard Henderson op_arg = tcg_constant_i32(0); 2035d6a6cffdSRichard Henderson 2036d6a6cffdSRichard Henderson NEXT_INSN; 2037d6a6cffdSRichard Henderson if ((ctx->opcode & 0xff00) != 0x8900 /* bt label */ 2038d6a6cffdSRichard Henderson || pc + (i + 1 + B7_0s) * 2 != pc_end) { 2039d6a6cffdSRichard Henderson goto fail; 2040d6a6cffdSRichard Henderson } 2041d6a6cffdSRichard Henderson break; 2042d6a6cffdSRichard Henderson 2043d6a6cffdSRichard Henderson default: 2044d6a6cffdSRichard Henderson /* Put back and re-examine as store. */ 2045d6a6cffdSRichard Henderson --i; 2046d6a6cffdSRichard Henderson } 2047d6a6cffdSRichard Henderson 2048d6a6cffdSRichard Henderson /* 2049d6a6cffdSRichard Henderson * Expect the store. 2050d6a6cffdSRichard Henderson */ 2051d6a6cffdSRichard Henderson /* The store must be the last insn. */ 2052d6a6cffdSRichard Henderson if (i != max_insns - 1) { 2053d6a6cffdSRichard Henderson goto fail; 2054d6a6cffdSRichard Henderson } 2055d6a6cffdSRichard Henderson NEXT_INSN; 2056d6a6cffdSRichard Henderson switch (ctx->opcode & 0xf00f) { 2057d6a6cffdSRichard Henderson case 0x2000: /* mov.b Rm,@Rn */ 2058d6a6cffdSRichard Henderson st_mop = MO_UB; 2059d6a6cffdSRichard Henderson break; 2060d6a6cffdSRichard Henderson case 0x2001: /* mov.w Rm,@Rn */ 2061d6a6cffdSRichard Henderson st_mop = MO_UW; 2062d6a6cffdSRichard Henderson break; 2063d6a6cffdSRichard Henderson case 0x2002: /* mov.l Rm,@Rn */ 2064d6a6cffdSRichard Henderson st_mop = MO_UL; 2065d6a6cffdSRichard Henderson break; 2066d6a6cffdSRichard Henderson default: 2067d6a6cffdSRichard Henderson goto fail; 2068d6a6cffdSRichard Henderson } 2069d6a6cffdSRichard Henderson /* The store must match the load. */ 2070d6a6cffdSRichard Henderson if (ld_adr != B11_8 || st_mop != (ld_mop & MO_SIZE)) { 2071d6a6cffdSRichard Henderson goto fail; 2072d6a6cffdSRichard Henderson } 2073d6a6cffdSRichard Henderson st_src = B7_4; 2074d6a6cffdSRichard Henderson 2075d6a6cffdSRichard Henderson #undef NEXT_INSN 2076d6a6cffdSRichard Henderson 2077d6a6cffdSRichard Henderson /* 2078d6a6cffdSRichard Henderson * Emit the operation. 2079d6a6cffdSRichard Henderson */ 2080d6a6cffdSRichard Henderson switch (op_opc) { 2081d6a6cffdSRichard Henderson case -1: 2082d6a6cffdSRichard Henderson /* No operation found. Look for exchange pattern. */ 2083d6a6cffdSRichard Henderson if (st_src == ld_dst || mv_src >= 0) { 2084d6a6cffdSRichard Henderson goto fail; 2085d6a6cffdSRichard Henderson } 2086d6a6cffdSRichard Henderson tcg_gen_atomic_xchg_i32(REG(ld_dst), REG(ld_adr), REG(st_src), 2087d6a6cffdSRichard Henderson ctx->memidx, ld_mop); 2088d6a6cffdSRichard Henderson break; 2089d6a6cffdSRichard Henderson 2090d6a6cffdSRichard Henderson case INDEX_op_add_i32: 2091d6a6cffdSRichard Henderson if (op_dst != st_src) { 2092d6a6cffdSRichard Henderson goto fail; 2093d6a6cffdSRichard Henderson } 2094d6a6cffdSRichard Henderson if (op_dst == ld_dst && st_mop == MO_UL) { 2095d6a6cffdSRichard Henderson tcg_gen_atomic_add_fetch_i32(REG(ld_dst), REG(ld_adr), 2096d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2097d6a6cffdSRichard Henderson } else { 2098d6a6cffdSRichard Henderson tcg_gen_atomic_fetch_add_i32(REG(ld_dst), REG(ld_adr), 2099d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2100d6a6cffdSRichard Henderson if (op_dst != ld_dst) { 2101d6a6cffdSRichard Henderson /* Note that mop sizes < 4 cannot use add_fetch 2102d6a6cffdSRichard Henderson because it won't carry into the higher bits. */ 2103d6a6cffdSRichard Henderson tcg_gen_add_i32(REG(op_dst), REG(ld_dst), op_arg); 2104d6a6cffdSRichard Henderson } 2105d6a6cffdSRichard Henderson } 2106d6a6cffdSRichard Henderson break; 2107d6a6cffdSRichard Henderson 2108d6a6cffdSRichard Henderson case INDEX_op_and_i32: 2109d6a6cffdSRichard Henderson if (op_dst != st_src) { 2110d6a6cffdSRichard Henderson goto fail; 2111d6a6cffdSRichard Henderson } 2112d6a6cffdSRichard Henderson if (op_dst == ld_dst) { 2113d6a6cffdSRichard Henderson tcg_gen_atomic_and_fetch_i32(REG(ld_dst), REG(ld_adr), 2114d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2115d6a6cffdSRichard Henderson } else { 2116d6a6cffdSRichard Henderson tcg_gen_atomic_fetch_and_i32(REG(ld_dst), REG(ld_adr), 2117d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2118d6a6cffdSRichard Henderson tcg_gen_and_i32(REG(op_dst), REG(ld_dst), op_arg); 2119d6a6cffdSRichard Henderson } 2120d6a6cffdSRichard Henderson break; 2121d6a6cffdSRichard Henderson 2122d6a6cffdSRichard Henderson case INDEX_op_or_i32: 2123d6a6cffdSRichard Henderson if (op_dst != st_src) { 2124d6a6cffdSRichard Henderson goto fail; 2125d6a6cffdSRichard Henderson } 2126d6a6cffdSRichard Henderson if (op_dst == ld_dst) { 2127d6a6cffdSRichard Henderson tcg_gen_atomic_or_fetch_i32(REG(ld_dst), REG(ld_adr), 2128d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2129d6a6cffdSRichard Henderson } else { 2130d6a6cffdSRichard Henderson tcg_gen_atomic_fetch_or_i32(REG(ld_dst), REG(ld_adr), 2131d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2132d6a6cffdSRichard Henderson tcg_gen_or_i32(REG(op_dst), REG(ld_dst), op_arg); 2133d6a6cffdSRichard Henderson } 2134d6a6cffdSRichard Henderson break; 2135d6a6cffdSRichard Henderson 2136d6a6cffdSRichard Henderson case INDEX_op_xor_i32: 2137d6a6cffdSRichard Henderson if (op_dst != st_src) { 2138d6a6cffdSRichard Henderson goto fail; 2139d6a6cffdSRichard Henderson } 2140d6a6cffdSRichard Henderson if (op_dst == ld_dst) { 2141d6a6cffdSRichard Henderson tcg_gen_atomic_xor_fetch_i32(REG(ld_dst), REG(ld_adr), 2142d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2143d6a6cffdSRichard Henderson } else { 2144d6a6cffdSRichard Henderson tcg_gen_atomic_fetch_xor_i32(REG(ld_dst), REG(ld_adr), 2145d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2146d6a6cffdSRichard Henderson tcg_gen_xor_i32(REG(op_dst), REG(ld_dst), op_arg); 2147d6a6cffdSRichard Henderson } 2148d6a6cffdSRichard Henderson break; 2149d6a6cffdSRichard Henderson 2150d6a6cffdSRichard Henderson case INDEX_op_setcond_i32: 2151d6a6cffdSRichard Henderson if (st_src == ld_dst) { 2152d6a6cffdSRichard Henderson goto fail; 2153d6a6cffdSRichard Henderson } 2154d6a6cffdSRichard Henderson tcg_gen_atomic_cmpxchg_i32(REG(ld_dst), REG(ld_adr), op_arg, 2155d6a6cffdSRichard Henderson REG(st_src), ctx->memidx, ld_mop); 2156d6a6cffdSRichard Henderson tcg_gen_setcond_i32(TCG_COND_EQ, cpu_sr_t, REG(ld_dst), op_arg); 2157d6a6cffdSRichard Henderson if (mt_dst >= 0) { 2158d6a6cffdSRichard Henderson tcg_gen_mov_i32(REG(mt_dst), cpu_sr_t); 2159d6a6cffdSRichard Henderson } 2160d6a6cffdSRichard Henderson break; 2161d6a6cffdSRichard Henderson 2162d6a6cffdSRichard Henderson default: 2163d6a6cffdSRichard Henderson g_assert_not_reached(); 2164d6a6cffdSRichard Henderson } 2165d6a6cffdSRichard Henderson 2166d6a6cffdSRichard Henderson /* The entire region has been translated. */ 2167ab419fd8SRichard Henderson ctx->envflags &= ~TB_FLAG_GUSA_MASK; 2168e03291cdSRichard Henderson goto done; 2169d6a6cffdSRichard Henderson 2170d6a6cffdSRichard Henderson fail: 21714bfa602bSRichard Henderson qemu_log_mask(LOG_UNIMP, "Unrecognized gUSA sequence %08x-%08x\n", 21724bfa602bSRichard Henderson pc, pc_end); 21734bfa602bSRichard Henderson 21744f9ef4eeSRichard Henderson gen_restart_exclusive(ctx); 21754bfa602bSRichard Henderson 21764bfa602bSRichard Henderson /* We're not executing an instruction, but we must report one for the 21774bfa602bSRichard Henderson purposes of accounting within the TB. We might as well report the 21786f1c2af6SRichard Henderson entire region consumed via ctx->base.pc_next so that it's immediately 21796f1c2af6SRichard Henderson available in the disassembly dump. */ 2180e03291cdSRichard Henderson 2181e03291cdSRichard Henderson done: 21826f1c2af6SRichard Henderson ctx->base.pc_next = pc_end; 2183be0e3d7aSRichard Henderson ctx->base.num_insns += max_insns - 1; 2184e03291cdSRichard Henderson 2185e03291cdSRichard Henderson /* 2186e03291cdSRichard Henderson * Emit insn_start to cover each of the insns in the region. 2187e03291cdSRichard Henderson * This matches an assert in tcg.c making sure that we have 2188e03291cdSRichard Henderson * tb->icount * insn_start. 2189e03291cdSRichard Henderson */ 2190e03291cdSRichard Henderson for (i = 1; i < max_insns; ++i) { 2191e03291cdSRichard Henderson tcg_gen_insn_start(pc + i * 2, ctx->envflags); 2192e03291cdSRichard Henderson } 21934bfa602bSRichard Henderson } 21944bfa602bSRichard Henderson #endif 21954bfa602bSRichard Henderson 2196fd1b3d38SEmilio G. Cota static void sh4_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs) 2197fcf5ef2aSThomas Huth { 2198fd1b3d38SEmilio G. Cota DisasContext *ctx = container_of(dcbase, DisasContext, base); 2199be0e3d7aSRichard Henderson uint32_t tbflags; 2200fd1b3d38SEmilio G. Cota int bound; 2201fcf5ef2aSThomas Huth 2202be0e3d7aSRichard Henderson ctx->tbflags = tbflags = ctx->base.tb->flags; 2203be0e3d7aSRichard Henderson ctx->envflags = tbflags & TB_FLAG_ENVFLAGS_MASK; 2204be0e3d7aSRichard Henderson ctx->memidx = (tbflags & (1u << SR_MD)) == 0 ? 1 : 0; 2205fcf5ef2aSThomas Huth /* We don't know if the delayed pc came from a dynamic or static branch, 2206fcf5ef2aSThomas Huth so assume it is a dynamic branch. */ 2207fd1b3d38SEmilio G. Cota ctx->delayed_pc = -1; /* use delayed pc from env pointer */ 2208795bec96SPhilippe Mathieu-Daudé ctx->features = cpu_env(cs)->features; 2209be0e3d7aSRichard Henderson ctx->has_movcal = (tbflags & TB_FLAG_PENDING_MOVCA); 2210be0e3d7aSRichard Henderson ctx->gbank = ((tbflags & (1 << SR_MD)) && 2211be0e3d7aSRichard Henderson (tbflags & (1 << SR_RB))) * 0x10; 2212be0e3d7aSRichard Henderson ctx->fbank = tbflags & FPSCR_FR ? 0x10 : 0; 2213be0e3d7aSRichard Henderson 2214ab419fd8SRichard Henderson #ifdef CONFIG_USER_ONLY 2215ab419fd8SRichard Henderson if (tbflags & TB_FLAG_GUSA_MASK) { 2216ab419fd8SRichard Henderson /* In gUSA exclusive region. */ 2217be0e3d7aSRichard Henderson uint32_t pc = ctx->base.pc_next; 2218be0e3d7aSRichard Henderson uint32_t pc_end = ctx->base.tb->cs_base; 2219ab419fd8SRichard Henderson int backup = sextract32(ctx->tbflags, TB_FLAG_GUSA_SHIFT, 8); 2220be0e3d7aSRichard Henderson int max_insns = (pc_end - pc) / 2; 2221be0e3d7aSRichard Henderson 2222be0e3d7aSRichard Henderson if (pc != pc_end + backup || max_insns < 2) { 2223be0e3d7aSRichard Henderson /* This is a malformed gUSA region. Don't do anything special, 2224be0e3d7aSRichard Henderson since the interpreter is likely to get confused. */ 2225ab419fd8SRichard Henderson ctx->envflags &= ~TB_FLAG_GUSA_MASK; 2226ab419fd8SRichard Henderson } else if (tbflags & TB_FLAG_GUSA_EXCLUSIVE) { 2227be0e3d7aSRichard Henderson /* Regardless of single-stepping or the end of the page, 2228be0e3d7aSRichard Henderson we must complete execution of the gUSA region while 2229be0e3d7aSRichard Henderson holding the exclusive lock. */ 2230be0e3d7aSRichard Henderson ctx->base.max_insns = max_insns; 2231be0e3d7aSRichard Henderson return; 2232be0e3d7aSRichard Henderson } 2233be0e3d7aSRichard Henderson } 2234ab419fd8SRichard Henderson #endif 22354448a836SRichard Henderson 22364448a836SRichard Henderson /* Since the ISA is fixed-width, we can bound by the number 22374448a836SRichard Henderson of instructions remaining on the page. */ 2238fd1b3d38SEmilio G. Cota bound = -(ctx->base.pc_next | TARGET_PAGE_MASK) / 2; 2239fd1b3d38SEmilio G. Cota ctx->base.max_insns = MIN(ctx->base.max_insns, bound); 2240fcf5ef2aSThomas Huth } 2241fcf5ef2aSThomas Huth 2242fd1b3d38SEmilio G. Cota static void sh4_tr_tb_start(DisasContextBase *dcbase, CPUState *cs) 2243fd1b3d38SEmilio G. Cota { 2244fd1b3d38SEmilio G. Cota } 22454bfa602bSRichard Henderson 2246fd1b3d38SEmilio G. Cota static void sh4_tr_insn_start(DisasContextBase *dcbase, CPUState *cs) 2247fd1b3d38SEmilio G. Cota { 2248fd1b3d38SEmilio G. Cota DisasContext *ctx = container_of(dcbase, DisasContext, base); 2249fcf5ef2aSThomas Huth 2250fd1b3d38SEmilio G. Cota tcg_gen_insn_start(ctx->base.pc_next, ctx->envflags); 2251fd1b3d38SEmilio G. Cota } 2252fd1b3d38SEmilio G. Cota 2253fd1b3d38SEmilio G. Cota static void sh4_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs) 2254fd1b3d38SEmilio G. Cota { 2255b77af26eSRichard Henderson CPUSH4State *env = cpu_env(cs); 2256fd1b3d38SEmilio G. Cota DisasContext *ctx = container_of(dcbase, DisasContext, base); 2257fd1b3d38SEmilio G. Cota 2258be0e3d7aSRichard Henderson #ifdef CONFIG_USER_ONLY 2259ab419fd8SRichard Henderson if (unlikely(ctx->envflags & TB_FLAG_GUSA_MASK) 2260ab419fd8SRichard Henderson && !(ctx->envflags & TB_FLAG_GUSA_EXCLUSIVE)) { 22614f9ef4eeSRichard Henderson /* 22624f9ef4eeSRichard Henderson * We're in an gUSA region, and we have not already fallen 22634f9ef4eeSRichard Henderson * back on using an exclusive region. Attempt to parse the 22644f9ef4eeSRichard Henderson * region into a single supported atomic operation. Failure 22654f9ef4eeSRichard Henderson * is handled within the parser by raising an exception to 22664f9ef4eeSRichard Henderson * retry using an exclusive region. 22674f9ef4eeSRichard Henderson * 22684f9ef4eeSRichard Henderson * Parsing the region in one block conflicts with plugins, 22694f9ef4eeSRichard Henderson * so always use exclusive mode if plugins enabled. 22704f9ef4eeSRichard Henderson */ 22714f9ef4eeSRichard Henderson if (ctx->base.plugin_enabled) { 22724f9ef4eeSRichard Henderson gen_restart_exclusive(ctx); 22734f9ef4eeSRichard Henderson ctx->base.pc_next += 2; 22744f9ef4eeSRichard Henderson } else { 2275be0e3d7aSRichard Henderson decode_gusa(ctx, env); 22764f9ef4eeSRichard Henderson } 2277be0e3d7aSRichard Henderson return; 2278be0e3d7aSRichard Henderson } 2279be0e3d7aSRichard Henderson #endif 2280be0e3d7aSRichard Henderson 22814e116893SIlya Leoshkevich ctx->opcode = translator_lduw(env, &ctx->base, ctx->base.pc_next); 2282fd1b3d38SEmilio G. Cota decode_opc(ctx); 2283fd1b3d38SEmilio G. Cota ctx->base.pc_next += 2; 2284fcf5ef2aSThomas Huth } 2285fcf5ef2aSThomas Huth 2286fd1b3d38SEmilio G. Cota static void sh4_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs) 2287fd1b3d38SEmilio G. Cota { 2288fd1b3d38SEmilio G. Cota DisasContext *ctx = container_of(dcbase, DisasContext, base); 22894bfa602bSRichard Henderson 2290ab419fd8SRichard Henderson if (ctx->tbflags & TB_FLAG_GUSA_EXCLUSIVE) { 22914bfa602bSRichard Henderson /* Ending the region of exclusivity. Clear the bits. */ 2292ab419fd8SRichard Henderson ctx->envflags &= ~TB_FLAG_GUSA_MASK; 22934bfa602bSRichard Henderson } 22944bfa602bSRichard Henderson 2295fd1b3d38SEmilio G. Cota switch (ctx->base.is_jmp) { 22964834871bSRichard Henderson case DISAS_STOP: 2297fd1b3d38SEmilio G. Cota gen_save_cpu_state(ctx, true); 229807ea28b4SRichard Henderson tcg_gen_exit_tb(NULL, 0); 22990fc37a8bSAurelien Jarno break; 23004834871bSRichard Henderson case DISAS_NEXT: 2301fd1b3d38SEmilio G. Cota case DISAS_TOO_MANY: 2302fd1b3d38SEmilio G. Cota gen_save_cpu_state(ctx, false); 2303fd1b3d38SEmilio G. Cota gen_goto_tb(ctx, 0, ctx->base.pc_next); 2304fcf5ef2aSThomas Huth break; 23054834871bSRichard Henderson case DISAS_NORETURN: 2306fcf5ef2aSThomas Huth break; 23074834871bSRichard Henderson default: 23084834871bSRichard Henderson g_assert_not_reached(); 2309fcf5ef2aSThomas Huth } 2310fcf5ef2aSThomas Huth } 2311fd1b3d38SEmilio G. Cota 23128eb806a7SRichard Henderson static void sh4_tr_disas_log(const DisasContextBase *dcbase, 23138eb806a7SRichard Henderson CPUState *cs, FILE *logfile) 2314fd1b3d38SEmilio G. Cota { 23158eb806a7SRichard Henderson fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first)); 23168eb806a7SRichard Henderson target_disas(logfile, cs, dcbase->pc_first, dcbase->tb->size); 2317fd1b3d38SEmilio G. Cota } 2318fd1b3d38SEmilio G. Cota 2319fd1b3d38SEmilio G. Cota static const TranslatorOps sh4_tr_ops = { 2320fd1b3d38SEmilio G. Cota .init_disas_context = sh4_tr_init_disas_context, 2321fd1b3d38SEmilio G. Cota .tb_start = sh4_tr_tb_start, 2322fd1b3d38SEmilio G. Cota .insn_start = sh4_tr_insn_start, 2323fd1b3d38SEmilio G. Cota .translate_insn = sh4_tr_translate_insn, 2324fd1b3d38SEmilio G. Cota .tb_stop = sh4_tr_tb_stop, 2325fd1b3d38SEmilio G. Cota .disas_log = sh4_tr_disas_log, 2326fd1b3d38SEmilio G. Cota }; 2327fd1b3d38SEmilio G. Cota 2328597f9b2dSRichard Henderson void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int *max_insns, 232932f0c394SAnton Johansson vaddr pc, void *host_pc) 2330fd1b3d38SEmilio G. Cota { 2331fd1b3d38SEmilio G. Cota DisasContext ctx; 2332fd1b3d38SEmilio G. Cota 2333306c8721SRichard Henderson translator_loop(cs, tb, max_insns, pc, host_pc, &sh4_tr_ops, &ctx.base); 2334fcf5ef2aSThomas Huth } 2335