1fcf5ef2aSThomas Huth /* 2fcf5ef2aSThomas Huth * SH4 translation 3fcf5ef2aSThomas Huth * 4fcf5ef2aSThomas Huth * Copyright (c) 2005 Samuel Tardieu 5fcf5ef2aSThomas Huth * 6fcf5ef2aSThomas Huth * This library is free software; you can redistribute it and/or 7fcf5ef2aSThomas Huth * modify it under the terms of the GNU Lesser General Public 8fcf5ef2aSThomas Huth * License as published by the Free Software Foundation; either 96faf2b6cSThomas Huth * version 2.1 of the License, or (at your option) any later version. 10fcf5ef2aSThomas Huth * 11fcf5ef2aSThomas Huth * This library is distributed in the hope that it will be useful, 12fcf5ef2aSThomas Huth * but WITHOUT ANY WARRANTY; without even the implied warranty of 13fcf5ef2aSThomas Huth * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 14fcf5ef2aSThomas Huth * Lesser General Public License for more details. 15fcf5ef2aSThomas Huth * 16fcf5ef2aSThomas Huth * You should have received a copy of the GNU Lesser General Public 17fcf5ef2aSThomas Huth * License along with this library; if not, see <http://www.gnu.org/licenses/>. 18fcf5ef2aSThomas Huth */ 19fcf5ef2aSThomas Huth 20fcf5ef2aSThomas Huth #define DEBUG_DISAS 21fcf5ef2aSThomas Huth 22fcf5ef2aSThomas Huth #include "qemu/osdep.h" 23fcf5ef2aSThomas Huth #include "cpu.h" 24fcf5ef2aSThomas Huth #include "disas/disas.h" 25fcf5ef2aSThomas Huth #include "exec/exec-all.h" 26dcb32f1dSPhilippe Mathieu-Daudé #include "tcg/tcg-op.h" 27fcf5ef2aSThomas Huth #include "exec/cpu_ldst.h" 28fcf5ef2aSThomas Huth #include "exec/helper-proto.h" 29fcf5ef2aSThomas Huth #include "exec/helper-gen.h" 304834871bSRichard Henderson #include "exec/translator.h" 31fcf5ef2aSThomas Huth #include "exec/log.h" 3290c84c56SMarkus Armbruster #include "qemu/qemu-print.h" 33fcf5ef2aSThomas Huth 34fcf5ef2aSThomas Huth 35fcf5ef2aSThomas Huth typedef struct DisasContext { 366f1c2af6SRichard Henderson DisasContextBase base; 376f1c2af6SRichard Henderson 38a6215749SAurelien Jarno uint32_t tbflags; /* should stay unmodified during the TB translation */ 39a6215749SAurelien Jarno uint32_t envflags; /* should stay in sync with env->flags using TCG ops */ 40fcf5ef2aSThomas Huth int memidx; 413a3bb8d2SRichard Henderson int gbank; 425c13bad9SRichard Henderson int fbank; 43fcf5ef2aSThomas Huth uint32_t delayed_pc; 44fcf5ef2aSThomas Huth uint32_t features; 456f1c2af6SRichard Henderson 466f1c2af6SRichard Henderson uint16_t opcode; 476f1c2af6SRichard Henderson 486f1c2af6SRichard Henderson bool has_movcal; 49fcf5ef2aSThomas Huth } DisasContext; 50fcf5ef2aSThomas Huth 51fcf5ef2aSThomas Huth #if defined(CONFIG_USER_ONLY) 52fcf5ef2aSThomas Huth #define IS_USER(ctx) 1 534da06fb3SRichard Henderson #define UNALIGN(C) (ctx->tbflags & TB_FLAG_UNALIGN ? MO_UNALN : MO_ALIGN) 54fcf5ef2aSThomas Huth #else 55a6215749SAurelien Jarno #define IS_USER(ctx) (!(ctx->tbflags & (1u << SR_MD))) 564da06fb3SRichard Henderson #define UNALIGN(C) 0 57fcf5ef2aSThomas Huth #endif 58fcf5ef2aSThomas Huth 596f1c2af6SRichard Henderson /* Target-specific values for ctx->base.is_jmp. */ 604834871bSRichard Henderson /* We want to exit back to the cpu loop for some reason. 614834871bSRichard Henderson Usually this is to recognize interrupts immediately. */ 624834871bSRichard Henderson #define DISAS_STOP DISAS_TARGET_0 63fcf5ef2aSThomas Huth 64fcf5ef2aSThomas Huth /* global register indexes */ 653a3bb8d2SRichard Henderson static TCGv cpu_gregs[32]; 66fcf5ef2aSThomas Huth static TCGv cpu_sr, cpu_sr_m, cpu_sr_q, cpu_sr_t; 67fcf5ef2aSThomas Huth static TCGv cpu_pc, cpu_ssr, cpu_spc, cpu_gbr; 68fcf5ef2aSThomas Huth static TCGv cpu_vbr, cpu_sgr, cpu_dbr, cpu_mach, cpu_macl; 69f85da308SRichard Henderson static TCGv cpu_pr, cpu_fpscr, cpu_fpul; 70f85da308SRichard Henderson static TCGv cpu_lock_addr, cpu_lock_value; 71fcf5ef2aSThomas Huth static TCGv cpu_fregs[32]; 72fcf5ef2aSThomas Huth 73fcf5ef2aSThomas Huth /* internal register indexes */ 7447b9f4d5SAurelien Jarno static TCGv cpu_flags, cpu_delayed_pc, cpu_delayed_cond; 75fcf5ef2aSThomas Huth 76fcf5ef2aSThomas Huth #include "exec/gen-icount.h" 77fcf5ef2aSThomas Huth 78fcf5ef2aSThomas Huth void sh4_translate_init(void) 79fcf5ef2aSThomas Huth { 80fcf5ef2aSThomas Huth int i; 81fcf5ef2aSThomas Huth static const char * const gregnames[24] = { 82fcf5ef2aSThomas Huth "R0_BANK0", "R1_BANK0", "R2_BANK0", "R3_BANK0", 83fcf5ef2aSThomas Huth "R4_BANK0", "R5_BANK0", "R6_BANK0", "R7_BANK0", 84fcf5ef2aSThomas Huth "R8", "R9", "R10", "R11", "R12", "R13", "R14", "R15", 85fcf5ef2aSThomas Huth "R0_BANK1", "R1_BANK1", "R2_BANK1", "R3_BANK1", 86fcf5ef2aSThomas Huth "R4_BANK1", "R5_BANK1", "R6_BANK1", "R7_BANK1" 87fcf5ef2aSThomas Huth }; 88fcf5ef2aSThomas Huth static const char * const fregnames[32] = { 89fcf5ef2aSThomas Huth "FPR0_BANK0", "FPR1_BANK0", "FPR2_BANK0", "FPR3_BANK0", 90fcf5ef2aSThomas Huth "FPR4_BANK0", "FPR5_BANK0", "FPR6_BANK0", "FPR7_BANK0", 91fcf5ef2aSThomas Huth "FPR8_BANK0", "FPR9_BANK0", "FPR10_BANK0", "FPR11_BANK0", 92fcf5ef2aSThomas Huth "FPR12_BANK0", "FPR13_BANK0", "FPR14_BANK0", "FPR15_BANK0", 93fcf5ef2aSThomas Huth "FPR0_BANK1", "FPR1_BANK1", "FPR2_BANK1", "FPR3_BANK1", 94fcf5ef2aSThomas Huth "FPR4_BANK1", "FPR5_BANK1", "FPR6_BANK1", "FPR7_BANK1", 95fcf5ef2aSThomas Huth "FPR8_BANK1", "FPR9_BANK1", "FPR10_BANK1", "FPR11_BANK1", 96fcf5ef2aSThomas Huth "FPR12_BANK1", "FPR13_BANK1", "FPR14_BANK1", "FPR15_BANK1", 97fcf5ef2aSThomas Huth }; 98fcf5ef2aSThomas Huth 993a3bb8d2SRichard Henderson for (i = 0; i < 24; i++) { 100fcf5ef2aSThomas Huth cpu_gregs[i] = tcg_global_mem_new_i32(cpu_env, 101fcf5ef2aSThomas Huth offsetof(CPUSH4State, gregs[i]), 102fcf5ef2aSThomas Huth gregnames[i]); 1033a3bb8d2SRichard Henderson } 1043a3bb8d2SRichard Henderson memcpy(cpu_gregs + 24, cpu_gregs + 8, 8 * sizeof(TCGv)); 105fcf5ef2aSThomas Huth 106fcf5ef2aSThomas Huth cpu_pc = tcg_global_mem_new_i32(cpu_env, 107fcf5ef2aSThomas Huth offsetof(CPUSH4State, pc), "PC"); 108fcf5ef2aSThomas Huth cpu_sr = tcg_global_mem_new_i32(cpu_env, 109fcf5ef2aSThomas Huth offsetof(CPUSH4State, sr), "SR"); 110fcf5ef2aSThomas Huth cpu_sr_m = tcg_global_mem_new_i32(cpu_env, 111fcf5ef2aSThomas Huth offsetof(CPUSH4State, sr_m), "SR_M"); 112fcf5ef2aSThomas Huth cpu_sr_q = tcg_global_mem_new_i32(cpu_env, 113fcf5ef2aSThomas Huth offsetof(CPUSH4State, sr_q), "SR_Q"); 114fcf5ef2aSThomas Huth cpu_sr_t = tcg_global_mem_new_i32(cpu_env, 115fcf5ef2aSThomas Huth offsetof(CPUSH4State, sr_t), "SR_T"); 116fcf5ef2aSThomas Huth cpu_ssr = tcg_global_mem_new_i32(cpu_env, 117fcf5ef2aSThomas Huth offsetof(CPUSH4State, ssr), "SSR"); 118fcf5ef2aSThomas Huth cpu_spc = tcg_global_mem_new_i32(cpu_env, 119fcf5ef2aSThomas Huth offsetof(CPUSH4State, spc), "SPC"); 120fcf5ef2aSThomas Huth cpu_gbr = tcg_global_mem_new_i32(cpu_env, 121fcf5ef2aSThomas Huth offsetof(CPUSH4State, gbr), "GBR"); 122fcf5ef2aSThomas Huth cpu_vbr = tcg_global_mem_new_i32(cpu_env, 123fcf5ef2aSThomas Huth offsetof(CPUSH4State, vbr), "VBR"); 124fcf5ef2aSThomas Huth cpu_sgr = tcg_global_mem_new_i32(cpu_env, 125fcf5ef2aSThomas Huth offsetof(CPUSH4State, sgr), "SGR"); 126fcf5ef2aSThomas Huth cpu_dbr = tcg_global_mem_new_i32(cpu_env, 127fcf5ef2aSThomas Huth offsetof(CPUSH4State, dbr), "DBR"); 128fcf5ef2aSThomas Huth cpu_mach = tcg_global_mem_new_i32(cpu_env, 129fcf5ef2aSThomas Huth offsetof(CPUSH4State, mach), "MACH"); 130fcf5ef2aSThomas Huth cpu_macl = tcg_global_mem_new_i32(cpu_env, 131fcf5ef2aSThomas Huth offsetof(CPUSH4State, macl), "MACL"); 132fcf5ef2aSThomas Huth cpu_pr = tcg_global_mem_new_i32(cpu_env, 133fcf5ef2aSThomas Huth offsetof(CPUSH4State, pr), "PR"); 134fcf5ef2aSThomas Huth cpu_fpscr = tcg_global_mem_new_i32(cpu_env, 135fcf5ef2aSThomas Huth offsetof(CPUSH4State, fpscr), "FPSCR"); 136fcf5ef2aSThomas Huth cpu_fpul = tcg_global_mem_new_i32(cpu_env, 137fcf5ef2aSThomas Huth offsetof(CPUSH4State, fpul), "FPUL"); 138fcf5ef2aSThomas Huth 139fcf5ef2aSThomas Huth cpu_flags = tcg_global_mem_new_i32(cpu_env, 140fcf5ef2aSThomas Huth offsetof(CPUSH4State, flags), "_flags_"); 141fcf5ef2aSThomas Huth cpu_delayed_pc = tcg_global_mem_new_i32(cpu_env, 142fcf5ef2aSThomas Huth offsetof(CPUSH4State, delayed_pc), 143fcf5ef2aSThomas Huth "_delayed_pc_"); 14447b9f4d5SAurelien Jarno cpu_delayed_cond = tcg_global_mem_new_i32(cpu_env, 14547b9f4d5SAurelien Jarno offsetof(CPUSH4State, 14647b9f4d5SAurelien Jarno delayed_cond), 14747b9f4d5SAurelien Jarno "_delayed_cond_"); 148f85da308SRichard Henderson cpu_lock_addr = tcg_global_mem_new_i32(cpu_env, 149f85da308SRichard Henderson offsetof(CPUSH4State, lock_addr), 150f85da308SRichard Henderson "_lock_addr_"); 151f85da308SRichard Henderson cpu_lock_value = tcg_global_mem_new_i32(cpu_env, 152f85da308SRichard Henderson offsetof(CPUSH4State, lock_value), 153f85da308SRichard Henderson "_lock_value_"); 154fcf5ef2aSThomas Huth 155fcf5ef2aSThomas Huth for (i = 0; i < 32; i++) 156fcf5ef2aSThomas Huth cpu_fregs[i] = tcg_global_mem_new_i32(cpu_env, 157fcf5ef2aSThomas Huth offsetof(CPUSH4State, fregs[i]), 158fcf5ef2aSThomas Huth fregnames[i]); 159fcf5ef2aSThomas Huth } 160fcf5ef2aSThomas Huth 16190c84c56SMarkus Armbruster void superh_cpu_dump_state(CPUState *cs, FILE *f, int flags) 162fcf5ef2aSThomas Huth { 163fcf5ef2aSThomas Huth SuperHCPU *cpu = SUPERH_CPU(cs); 164fcf5ef2aSThomas Huth CPUSH4State *env = &cpu->env; 165fcf5ef2aSThomas Huth int i; 16690c84c56SMarkus Armbruster 16790c84c56SMarkus Armbruster qemu_fprintf(f, "pc=0x%08x sr=0x%08x pr=0x%08x fpscr=0x%08x\n", 168fcf5ef2aSThomas Huth env->pc, cpu_read_sr(env), env->pr, env->fpscr); 16990c84c56SMarkus Armbruster qemu_fprintf(f, "spc=0x%08x ssr=0x%08x gbr=0x%08x vbr=0x%08x\n", 170fcf5ef2aSThomas Huth env->spc, env->ssr, env->gbr, env->vbr); 17190c84c56SMarkus Armbruster qemu_fprintf(f, "sgr=0x%08x dbr=0x%08x delayed_pc=0x%08x fpul=0x%08x\n", 172fcf5ef2aSThomas Huth env->sgr, env->dbr, env->delayed_pc, env->fpul); 173fcf5ef2aSThomas Huth for (i = 0; i < 24; i += 4) { 174*ad4052f1SIlya Leoshkevich qemu_fprintf(f, "r%d=0x%08x r%d=0x%08x r%d=0x%08x r%d=0x%08x\n", 175fcf5ef2aSThomas Huth i, env->gregs[i], i + 1, env->gregs[i + 1], 176fcf5ef2aSThomas Huth i + 2, env->gregs[i + 2], i + 3, env->gregs[i + 3]); 177fcf5ef2aSThomas Huth } 178ab419fd8SRichard Henderson if (env->flags & TB_FLAG_DELAY_SLOT) { 179*ad4052f1SIlya Leoshkevich qemu_fprintf(f, "in delay slot (delayed_pc=0x%08x)\n", 180fcf5ef2aSThomas Huth env->delayed_pc); 181ab419fd8SRichard Henderson } else if (env->flags & TB_FLAG_DELAY_SLOT_COND) { 182*ad4052f1SIlya Leoshkevich qemu_fprintf(f, "in conditional delay slot (delayed_pc=0x%08x)\n", 183fcf5ef2aSThomas Huth env->delayed_pc); 184ab419fd8SRichard Henderson } else if (env->flags & TB_FLAG_DELAY_SLOT_RTE) { 18590c84c56SMarkus Armbruster qemu_fprintf(f, "in rte delay slot (delayed_pc=0x%08x)\n", 186be53081aSAurelien Jarno env->delayed_pc); 187fcf5ef2aSThomas Huth } 188fcf5ef2aSThomas Huth } 189fcf5ef2aSThomas Huth 190fcf5ef2aSThomas Huth static void gen_read_sr(TCGv dst) 191fcf5ef2aSThomas Huth { 192fcf5ef2aSThomas Huth TCGv t0 = tcg_temp_new(); 193fcf5ef2aSThomas Huth tcg_gen_shli_i32(t0, cpu_sr_q, SR_Q); 194fcf5ef2aSThomas Huth tcg_gen_or_i32(dst, dst, t0); 195fcf5ef2aSThomas Huth tcg_gen_shli_i32(t0, cpu_sr_m, SR_M); 196fcf5ef2aSThomas Huth tcg_gen_or_i32(dst, dst, t0); 197fcf5ef2aSThomas Huth tcg_gen_shli_i32(t0, cpu_sr_t, SR_T); 198fcf5ef2aSThomas Huth tcg_gen_or_i32(dst, cpu_sr, t0); 199fcf5ef2aSThomas Huth } 200fcf5ef2aSThomas Huth 201fcf5ef2aSThomas Huth static void gen_write_sr(TCGv src) 202fcf5ef2aSThomas Huth { 203fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr, src, 204fcf5ef2aSThomas Huth ~((1u << SR_Q) | (1u << SR_M) | (1u << SR_T))); 205a380f9dbSAurelien Jarno tcg_gen_extract_i32(cpu_sr_q, src, SR_Q, 1); 206a380f9dbSAurelien Jarno tcg_gen_extract_i32(cpu_sr_m, src, SR_M, 1); 207a380f9dbSAurelien Jarno tcg_gen_extract_i32(cpu_sr_t, src, SR_T, 1); 208fcf5ef2aSThomas Huth } 209fcf5ef2aSThomas Huth 210ac9707eaSAurelien Jarno static inline void gen_save_cpu_state(DisasContext *ctx, bool save_pc) 211ac9707eaSAurelien Jarno { 212ac9707eaSAurelien Jarno if (save_pc) { 2136f1c2af6SRichard Henderson tcg_gen_movi_i32(cpu_pc, ctx->base.pc_next); 214ac9707eaSAurelien Jarno } 215ac9707eaSAurelien Jarno if (ctx->delayed_pc != (uint32_t) -1) { 216ac9707eaSAurelien Jarno tcg_gen_movi_i32(cpu_delayed_pc, ctx->delayed_pc); 217ac9707eaSAurelien Jarno } 218e1933d14SRichard Henderson if ((ctx->tbflags & TB_FLAG_ENVFLAGS_MASK) != ctx->envflags) { 219ac9707eaSAurelien Jarno tcg_gen_movi_i32(cpu_flags, ctx->envflags); 220ac9707eaSAurelien Jarno } 221ac9707eaSAurelien Jarno } 222ac9707eaSAurelien Jarno 223ec2eb22eSRichard Henderson static inline bool use_exit_tb(DisasContext *ctx) 224ec2eb22eSRichard Henderson { 225ab419fd8SRichard Henderson return (ctx->tbflags & TB_FLAG_GUSA_EXCLUSIVE) != 0; 226ec2eb22eSRichard Henderson } 227ec2eb22eSRichard Henderson 2283f1e2098SRichard Henderson static bool use_goto_tb(DisasContext *ctx, target_ulong dest) 229fcf5ef2aSThomas Huth { 2303f1e2098SRichard Henderson if (use_exit_tb(ctx)) { 2314bfa602bSRichard Henderson return false; 2324bfa602bSRichard Henderson } 2333f1e2098SRichard Henderson return translator_use_goto_tb(&ctx->base, dest); 234fcf5ef2aSThomas Huth } 235fcf5ef2aSThomas Huth 236fcf5ef2aSThomas Huth static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest) 237fcf5ef2aSThomas Huth { 238fcf5ef2aSThomas Huth if (use_goto_tb(ctx, dest)) { 239fcf5ef2aSThomas Huth tcg_gen_goto_tb(n); 240fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_pc, dest); 24107ea28b4SRichard Henderson tcg_gen_exit_tb(ctx->base.tb, n); 242fcf5ef2aSThomas Huth } else { 243fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_pc, dest); 24452df5adcSRichard Henderson if (use_exit_tb(ctx)) { 24507ea28b4SRichard Henderson tcg_gen_exit_tb(NULL, 0); 246ec2eb22eSRichard Henderson } else { 2477f11636dSEmilio G. Cota tcg_gen_lookup_and_goto_ptr(); 248ec2eb22eSRichard Henderson } 249fcf5ef2aSThomas Huth } 2506f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NORETURN; 251fcf5ef2aSThomas Huth } 252fcf5ef2aSThomas Huth 253fcf5ef2aSThomas Huth static void gen_jump(DisasContext * ctx) 254fcf5ef2aSThomas Huth { 255ec2eb22eSRichard Henderson if (ctx->delayed_pc == -1) { 256fcf5ef2aSThomas Huth /* Target is not statically known, it comes necessarily from a 257fcf5ef2aSThomas Huth delayed jump as immediate jump are conditinal jumps */ 258fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_pc, cpu_delayed_pc); 259ac9707eaSAurelien Jarno tcg_gen_discard_i32(cpu_delayed_pc); 26052df5adcSRichard Henderson if (use_exit_tb(ctx)) { 26107ea28b4SRichard Henderson tcg_gen_exit_tb(NULL, 0); 262fcf5ef2aSThomas Huth } else { 2637f11636dSEmilio G. Cota tcg_gen_lookup_and_goto_ptr(); 264ec2eb22eSRichard Henderson } 2656f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NORETURN; 266ec2eb22eSRichard Henderson } else { 267fcf5ef2aSThomas Huth gen_goto_tb(ctx, 0, ctx->delayed_pc); 268fcf5ef2aSThomas Huth } 269fcf5ef2aSThomas Huth } 270fcf5ef2aSThomas Huth 271fcf5ef2aSThomas Huth /* Immediate conditional jump (bt or bf) */ 2724bfa602bSRichard Henderson static void gen_conditional_jump(DisasContext *ctx, target_ulong dest, 2734bfa602bSRichard Henderson bool jump_if_true) 274fcf5ef2aSThomas Huth { 275fcf5ef2aSThomas Huth TCGLabel *l1 = gen_new_label(); 2764bfa602bSRichard Henderson TCGCond cond_not_taken = jump_if_true ? TCG_COND_EQ : TCG_COND_NE; 2774bfa602bSRichard Henderson 278ab419fd8SRichard Henderson if (ctx->tbflags & TB_FLAG_GUSA_EXCLUSIVE) { 2794bfa602bSRichard Henderson /* When in an exclusive region, we must continue to the end. 2804bfa602bSRichard Henderson Therefore, exit the region on a taken branch, but otherwise 2814bfa602bSRichard Henderson fall through to the next instruction. */ 2824bfa602bSRichard Henderson tcg_gen_brcondi_i32(cond_not_taken, cpu_sr_t, 0, l1); 283ab419fd8SRichard Henderson tcg_gen_movi_i32(cpu_flags, ctx->envflags & ~TB_FLAG_GUSA_MASK); 2844bfa602bSRichard Henderson /* Note that this won't actually use a goto_tb opcode because we 2854bfa602bSRichard Henderson disallow it in use_goto_tb, but it handles exit + singlestep. */ 2864bfa602bSRichard Henderson gen_goto_tb(ctx, 0, dest); 287fcf5ef2aSThomas Huth gen_set_label(l1); 2885b38d026SLaurent Vivier ctx->base.is_jmp = DISAS_NEXT; 2894bfa602bSRichard Henderson return; 2904bfa602bSRichard Henderson } 2914bfa602bSRichard Henderson 2924bfa602bSRichard Henderson gen_save_cpu_state(ctx, false); 2934bfa602bSRichard Henderson tcg_gen_brcondi_i32(cond_not_taken, cpu_sr_t, 0, l1); 2944bfa602bSRichard Henderson gen_goto_tb(ctx, 0, dest); 2954bfa602bSRichard Henderson gen_set_label(l1); 2966f1c2af6SRichard Henderson gen_goto_tb(ctx, 1, ctx->base.pc_next + 2); 2976f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NORETURN; 298fcf5ef2aSThomas Huth } 299fcf5ef2aSThomas Huth 300fcf5ef2aSThomas Huth /* Delayed conditional jump (bt or bf) */ 301fcf5ef2aSThomas Huth static void gen_delayed_conditional_jump(DisasContext * ctx) 302fcf5ef2aSThomas Huth { 3034bfa602bSRichard Henderson TCGLabel *l1 = gen_new_label(); 3044bfa602bSRichard Henderson TCGv ds = tcg_temp_new(); 305fcf5ef2aSThomas Huth 30647b9f4d5SAurelien Jarno tcg_gen_mov_i32(ds, cpu_delayed_cond); 30747b9f4d5SAurelien Jarno tcg_gen_discard_i32(cpu_delayed_cond); 3084bfa602bSRichard Henderson 309ab419fd8SRichard Henderson if (ctx->tbflags & TB_FLAG_GUSA_EXCLUSIVE) { 3104bfa602bSRichard Henderson /* When in an exclusive region, we must continue to the end. 3114bfa602bSRichard Henderson Therefore, exit the region on a taken branch, but otherwise 3124bfa602bSRichard Henderson fall through to the next instruction. */ 3134bfa602bSRichard Henderson tcg_gen_brcondi_i32(TCG_COND_EQ, ds, 0, l1); 3144bfa602bSRichard Henderson 3154bfa602bSRichard Henderson /* Leave the gUSA region. */ 316ab419fd8SRichard Henderson tcg_gen_movi_i32(cpu_flags, ctx->envflags & ~TB_FLAG_GUSA_MASK); 3174bfa602bSRichard Henderson gen_jump(ctx); 3184bfa602bSRichard Henderson 3194bfa602bSRichard Henderson gen_set_label(l1); 3206f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NEXT; 3214bfa602bSRichard Henderson return; 3224bfa602bSRichard Henderson } 3234bfa602bSRichard Henderson 324fcf5ef2aSThomas Huth tcg_gen_brcondi_i32(TCG_COND_NE, ds, 0, l1); 3256f1c2af6SRichard Henderson gen_goto_tb(ctx, 1, ctx->base.pc_next + 2); 326fcf5ef2aSThomas Huth gen_set_label(l1); 327fcf5ef2aSThomas Huth gen_jump(ctx); 328fcf5ef2aSThomas Huth } 329fcf5ef2aSThomas Huth 330e5d8053eSRichard Henderson static inline void gen_load_fpr64(DisasContext *ctx, TCGv_i64 t, int reg) 331fcf5ef2aSThomas Huth { 3321e0b21d8SRichard Henderson /* We have already signaled illegal instruction for odd Dr. */ 3331e0b21d8SRichard Henderson tcg_debug_assert((reg & 1) == 0); 3341e0b21d8SRichard Henderson reg ^= ctx->fbank; 335fcf5ef2aSThomas Huth tcg_gen_concat_i32_i64(t, cpu_fregs[reg + 1], cpu_fregs[reg]); 336fcf5ef2aSThomas Huth } 337fcf5ef2aSThomas Huth 338e5d8053eSRichard Henderson static inline void gen_store_fpr64(DisasContext *ctx, TCGv_i64 t, int reg) 339fcf5ef2aSThomas Huth { 3401e0b21d8SRichard Henderson /* We have already signaled illegal instruction for odd Dr. */ 3411e0b21d8SRichard Henderson tcg_debug_assert((reg & 1) == 0); 3421e0b21d8SRichard Henderson reg ^= ctx->fbank; 34358d2a9aeSAurelien Jarno tcg_gen_extr_i64_i32(cpu_fregs[reg + 1], cpu_fregs[reg], t); 344fcf5ef2aSThomas Huth } 345fcf5ef2aSThomas Huth 346fcf5ef2aSThomas Huth #define B3_0 (ctx->opcode & 0xf) 347fcf5ef2aSThomas Huth #define B6_4 ((ctx->opcode >> 4) & 0x7) 348fcf5ef2aSThomas Huth #define B7_4 ((ctx->opcode >> 4) & 0xf) 349fcf5ef2aSThomas Huth #define B7_0 (ctx->opcode & 0xff) 350fcf5ef2aSThomas Huth #define B7_0s ((int32_t) (int8_t) (ctx->opcode & 0xff)) 351fcf5ef2aSThomas Huth #define B11_0s (ctx->opcode & 0x800 ? 0xfffff000 | (ctx->opcode & 0xfff) : \ 352fcf5ef2aSThomas Huth (ctx->opcode & 0xfff)) 353fcf5ef2aSThomas Huth #define B11_8 ((ctx->opcode >> 8) & 0xf) 354fcf5ef2aSThomas Huth #define B15_12 ((ctx->opcode >> 12) & 0xf) 355fcf5ef2aSThomas Huth 3563a3bb8d2SRichard Henderson #define REG(x) cpu_gregs[(x) ^ ctx->gbank] 3573a3bb8d2SRichard Henderson #define ALTREG(x) cpu_gregs[(x) ^ ctx->gbank ^ 0x10] 3585c13bad9SRichard Henderson #define FREG(x) cpu_fregs[(x) ^ ctx->fbank] 359fcf5ef2aSThomas Huth 360fcf5ef2aSThomas Huth #define XHACK(x) ((((x) & 1 ) << 4) | ((x) & 0xe)) 361fcf5ef2aSThomas Huth 362fcf5ef2aSThomas Huth #define CHECK_NOT_DELAY_SLOT \ 363ab419fd8SRichard Henderson if (ctx->envflags & TB_FLAG_DELAY_SLOT_MASK) { \ 364dec16c6eSRichard Henderson goto do_illegal_slot; \ 365fcf5ef2aSThomas Huth } 366fcf5ef2aSThomas Huth 367fcf5ef2aSThomas Huth #define CHECK_PRIVILEGED \ 368fcf5ef2aSThomas Huth if (IS_USER(ctx)) { \ 3696b98213dSRichard Henderson goto do_illegal; \ 370fcf5ef2aSThomas Huth } 371fcf5ef2aSThomas Huth 372fcf5ef2aSThomas Huth #define CHECK_FPU_ENABLED \ 373a6215749SAurelien Jarno if (ctx->tbflags & (1u << SR_FD)) { \ 374dec4f042SRichard Henderson goto do_fpu_disabled; \ 375fcf5ef2aSThomas Huth } 376fcf5ef2aSThomas Huth 3777e9f7ca8SRichard Henderson #define CHECK_FPSCR_PR_0 \ 3787e9f7ca8SRichard Henderson if (ctx->tbflags & FPSCR_PR) { \ 3797e9f7ca8SRichard Henderson goto do_illegal; \ 3807e9f7ca8SRichard Henderson } 3817e9f7ca8SRichard Henderson 3827e9f7ca8SRichard Henderson #define CHECK_FPSCR_PR_1 \ 3837e9f7ca8SRichard Henderson if (!(ctx->tbflags & FPSCR_PR)) { \ 3847e9f7ca8SRichard Henderson goto do_illegal; \ 3857e9f7ca8SRichard Henderson } 3867e9f7ca8SRichard Henderson 387ccae24d4SRichard Henderson #define CHECK_SH4A \ 388ccae24d4SRichard Henderson if (!(ctx->features & SH_FEATURE_SH4A)) { \ 389ccae24d4SRichard Henderson goto do_illegal; \ 390ccae24d4SRichard Henderson } 391ccae24d4SRichard Henderson 392fcf5ef2aSThomas Huth static void _decode_opc(DisasContext * ctx) 393fcf5ef2aSThomas Huth { 394fcf5ef2aSThomas Huth /* This code tries to make movcal emulation sufficiently 395fcf5ef2aSThomas Huth accurate for Linux purposes. This instruction writes 396fcf5ef2aSThomas Huth memory, and prior to that, always allocates a cache line. 397fcf5ef2aSThomas Huth It is used in two contexts: 398fcf5ef2aSThomas Huth - in memcpy, where data is copied in blocks, the first write 399fcf5ef2aSThomas Huth of to a block uses movca.l for performance. 400fcf5ef2aSThomas Huth - in arch/sh/mm/cache-sh4.c, movcal.l + ocbi combination is used 401fcf5ef2aSThomas Huth to flush the cache. Here, the data written by movcal.l is never 402fcf5ef2aSThomas Huth written to memory, and the data written is just bogus. 403fcf5ef2aSThomas Huth 404fcf5ef2aSThomas Huth To simulate this, we simulate movcal.l, we store the value to memory, 405fcf5ef2aSThomas Huth but we also remember the previous content. If we see ocbi, we check 406fcf5ef2aSThomas Huth if movcal.l for that address was done previously. If so, the write should 407fcf5ef2aSThomas Huth not have hit the memory, so we restore the previous content. 408fcf5ef2aSThomas Huth When we see an instruction that is neither movca.l 409fcf5ef2aSThomas Huth nor ocbi, the previous content is discarded. 410fcf5ef2aSThomas Huth 411fcf5ef2aSThomas Huth To optimize, we only try to flush stores when we're at the start of 412fcf5ef2aSThomas Huth TB, or if we already saw movca.l in this TB and did not flush stores 413fcf5ef2aSThomas Huth yet. */ 414fcf5ef2aSThomas Huth if (ctx->has_movcal) 415fcf5ef2aSThomas Huth { 416fcf5ef2aSThomas Huth int opcode = ctx->opcode & 0xf0ff; 417fcf5ef2aSThomas Huth if (opcode != 0x0093 /* ocbi */ 418fcf5ef2aSThomas Huth && opcode != 0x00c3 /* movca.l */) 419fcf5ef2aSThomas Huth { 420fcf5ef2aSThomas Huth gen_helper_discard_movcal_backup(cpu_env); 421fcf5ef2aSThomas Huth ctx->has_movcal = 0; 422fcf5ef2aSThomas Huth } 423fcf5ef2aSThomas Huth } 424fcf5ef2aSThomas Huth 425fcf5ef2aSThomas Huth #if 0 426fcf5ef2aSThomas Huth fprintf(stderr, "Translating opcode 0x%04x\n", ctx->opcode); 427fcf5ef2aSThomas Huth #endif 428fcf5ef2aSThomas Huth 429fcf5ef2aSThomas Huth switch (ctx->opcode) { 430fcf5ef2aSThomas Huth case 0x0019: /* div0u */ 431fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_m, 0); 432fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_q, 0); 433fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_t, 0); 434fcf5ef2aSThomas Huth return; 435fcf5ef2aSThomas Huth case 0x000b: /* rts */ 436fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 437fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_delayed_pc, cpu_pr); 438ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT; 439fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 440fcf5ef2aSThomas Huth return; 441fcf5ef2aSThomas Huth case 0x0028: /* clrmac */ 442fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_mach, 0); 443fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_macl, 0); 444fcf5ef2aSThomas Huth return; 445fcf5ef2aSThomas Huth case 0x0048: /* clrs */ 446fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr, cpu_sr, ~(1u << SR_S)); 447fcf5ef2aSThomas Huth return; 448fcf5ef2aSThomas Huth case 0x0008: /* clrt */ 449fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_t, 0); 450fcf5ef2aSThomas Huth return; 451fcf5ef2aSThomas Huth case 0x0038: /* ldtlb */ 452fcf5ef2aSThomas Huth CHECK_PRIVILEGED 453fcf5ef2aSThomas Huth gen_helper_ldtlb(cpu_env); 454fcf5ef2aSThomas Huth return; 455fcf5ef2aSThomas Huth case 0x002b: /* rte */ 456fcf5ef2aSThomas Huth CHECK_PRIVILEGED 457fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 458fcf5ef2aSThomas Huth gen_write_sr(cpu_ssr); 459fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_delayed_pc, cpu_spc); 460ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT_RTE; 461fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 4626f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 463fcf5ef2aSThomas Huth return; 464fcf5ef2aSThomas Huth case 0x0058: /* sets */ 465fcf5ef2aSThomas Huth tcg_gen_ori_i32(cpu_sr, cpu_sr, (1u << SR_S)); 466fcf5ef2aSThomas Huth return; 467fcf5ef2aSThomas Huth case 0x0018: /* sett */ 468fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_t, 1); 469fcf5ef2aSThomas Huth return; 470fcf5ef2aSThomas Huth case 0xfbfd: /* frchg */ 47161dedf2aSRichard Henderson CHECK_FPSCR_PR_0 472fcf5ef2aSThomas Huth tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_FR); 4736f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 474fcf5ef2aSThomas Huth return; 475fcf5ef2aSThomas Huth case 0xf3fd: /* fschg */ 47661dedf2aSRichard Henderson CHECK_FPSCR_PR_0 477fcf5ef2aSThomas Huth tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_SZ); 4786f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 479fcf5ef2aSThomas Huth return; 480907759f9SRichard Henderson case 0xf7fd: /* fpchg */ 481907759f9SRichard Henderson CHECK_SH4A 482907759f9SRichard Henderson tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_PR); 4836f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 484907759f9SRichard Henderson return; 485fcf5ef2aSThomas Huth case 0x0009: /* nop */ 486fcf5ef2aSThomas Huth return; 487fcf5ef2aSThomas Huth case 0x001b: /* sleep */ 488fcf5ef2aSThomas Huth CHECK_PRIVILEGED 4896f1c2af6SRichard Henderson tcg_gen_movi_i32(cpu_pc, ctx->base.pc_next + 2); 490fcf5ef2aSThomas Huth gen_helper_sleep(cpu_env); 491fcf5ef2aSThomas Huth return; 492fcf5ef2aSThomas Huth } 493fcf5ef2aSThomas Huth 494fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf000) { 495fcf5ef2aSThomas Huth case 0x1000: /* mov.l Rm,@(disp,Rn) */ 496fcf5ef2aSThomas Huth { 497fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 498fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B11_8), B3_0 * 4); 4994da06fb3SRichard Henderson tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, 5004da06fb3SRichard Henderson MO_TEUL | UNALIGN(ctx)); 501fcf5ef2aSThomas Huth } 502fcf5ef2aSThomas Huth return; 503fcf5ef2aSThomas Huth case 0x5000: /* mov.l @(disp,Rm),Rn */ 504fcf5ef2aSThomas Huth { 505fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 506fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 4); 5074da06fb3SRichard Henderson tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, 5084da06fb3SRichard Henderson MO_TESL | UNALIGN(ctx)); 509fcf5ef2aSThomas Huth } 510fcf5ef2aSThomas Huth return; 511fcf5ef2aSThomas Huth case 0xe000: /* mov #imm,Rn */ 5124bfa602bSRichard Henderson #ifdef CONFIG_USER_ONLY 513ab419fd8SRichard Henderson /* 514ab419fd8SRichard Henderson * Detect the start of a gUSA region (mov #-n, r15). 515ab419fd8SRichard Henderson * If so, update envflags and end the TB. This will allow us 516ab419fd8SRichard Henderson * to see the end of the region (stored in R0) in the next TB. 517ab419fd8SRichard Henderson */ 5186f1c2af6SRichard Henderson if (B11_8 == 15 && B7_0s < 0 && 5196f1c2af6SRichard Henderson (tb_cflags(ctx->base.tb) & CF_PARALLEL)) { 520ab419fd8SRichard Henderson ctx->envflags = 521ab419fd8SRichard Henderson deposit32(ctx->envflags, TB_FLAG_GUSA_SHIFT, 8, B7_0s); 5226f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 5234bfa602bSRichard Henderson } 5244bfa602bSRichard Henderson #endif 525fcf5ef2aSThomas Huth tcg_gen_movi_i32(REG(B11_8), B7_0s); 526fcf5ef2aSThomas Huth return; 527fcf5ef2aSThomas Huth case 0x9000: /* mov.w @(disp,PC),Rn */ 528fcf5ef2aSThomas Huth { 529950b91beSRichard Henderson TCGv addr = tcg_constant_i32(ctx->base.pc_next + 4 + B7_0 * 2); 530fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_TESW); 531fcf5ef2aSThomas Huth } 532fcf5ef2aSThomas Huth return; 533fcf5ef2aSThomas Huth case 0xd000: /* mov.l @(disp,PC),Rn */ 534fcf5ef2aSThomas Huth { 535950b91beSRichard Henderson TCGv addr = tcg_constant_i32((ctx->base.pc_next + 4 + B7_0 * 4) & ~3); 536fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_TESL); 537fcf5ef2aSThomas Huth } 538fcf5ef2aSThomas Huth return; 539fcf5ef2aSThomas Huth case 0x7000: /* add #imm,Rn */ 540fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), B7_0s); 541fcf5ef2aSThomas Huth return; 542fcf5ef2aSThomas Huth case 0xa000: /* bra disp */ 543fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 5446f1c2af6SRichard Henderson ctx->delayed_pc = ctx->base.pc_next + 4 + B11_0s * 2; 545ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT; 546fcf5ef2aSThomas Huth return; 547fcf5ef2aSThomas Huth case 0xb000: /* bsr disp */ 548fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 5496f1c2af6SRichard Henderson tcg_gen_movi_i32(cpu_pr, ctx->base.pc_next + 4); 5506f1c2af6SRichard Henderson ctx->delayed_pc = ctx->base.pc_next + 4 + B11_0s * 2; 551ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT; 552fcf5ef2aSThomas Huth return; 553fcf5ef2aSThomas Huth } 554fcf5ef2aSThomas Huth 555fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf00f) { 556fcf5ef2aSThomas Huth case 0x6003: /* mov Rm,Rn */ 557fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), REG(B7_4)); 558fcf5ef2aSThomas Huth return; 559fcf5ef2aSThomas Huth case 0x2000: /* mov.b Rm,@Rn */ 560fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), REG(B11_8), ctx->memidx, MO_UB); 561fcf5ef2aSThomas Huth return; 562fcf5ef2aSThomas Huth case 0x2001: /* mov.w Rm,@Rn */ 5634da06fb3SRichard Henderson tcg_gen_qemu_st_i32(REG(B7_4), REG(B11_8), ctx->memidx, 5644da06fb3SRichard Henderson MO_TEUW | UNALIGN(ctx)); 565fcf5ef2aSThomas Huth return; 566fcf5ef2aSThomas Huth case 0x2002: /* mov.l Rm,@Rn */ 5674da06fb3SRichard Henderson tcg_gen_qemu_st_i32(REG(B7_4), REG(B11_8), ctx->memidx, 5684da06fb3SRichard Henderson MO_TEUL | UNALIGN(ctx)); 569fcf5ef2aSThomas Huth return; 570fcf5ef2aSThomas Huth case 0x6000: /* mov.b @Rm,Rn */ 571fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_SB); 572fcf5ef2aSThomas Huth return; 573fcf5ef2aSThomas Huth case 0x6001: /* mov.w @Rm,Rn */ 5744da06fb3SRichard Henderson tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, 5754da06fb3SRichard Henderson MO_TESW | UNALIGN(ctx)); 576fcf5ef2aSThomas Huth return; 577fcf5ef2aSThomas Huth case 0x6002: /* mov.l @Rm,Rn */ 5784da06fb3SRichard Henderson tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, 5794da06fb3SRichard Henderson MO_TESL | UNALIGN(ctx)); 580fcf5ef2aSThomas Huth return; 581fcf5ef2aSThomas Huth case 0x2004: /* mov.b Rm,@-Rn */ 582fcf5ef2aSThomas Huth { 583fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 584fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 1); 585fcf5ef2aSThomas Huth /* might cause re-execution */ 586fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_UB); 587fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); /* modify register status */ 588fcf5ef2aSThomas Huth } 589fcf5ef2aSThomas Huth return; 590fcf5ef2aSThomas Huth case 0x2005: /* mov.w Rm,@-Rn */ 591fcf5ef2aSThomas Huth { 592fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 593fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 2); 5944da06fb3SRichard Henderson tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, 5954da06fb3SRichard Henderson MO_TEUW | UNALIGN(ctx)); 596fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 597fcf5ef2aSThomas Huth } 598fcf5ef2aSThomas Huth return; 599fcf5ef2aSThomas Huth case 0x2006: /* mov.l Rm,@-Rn */ 600fcf5ef2aSThomas Huth { 601fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 602fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); 6034da06fb3SRichard Henderson tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, 6044da06fb3SRichard Henderson MO_TEUL | UNALIGN(ctx)); 605fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 606fcf5ef2aSThomas Huth } 607fcf5ef2aSThomas Huth return; 608fcf5ef2aSThomas Huth case 0x6004: /* mov.b @Rm+,Rn */ 609fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_SB); 610fcf5ef2aSThomas Huth if ( B11_8 != B7_4 ) 611fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 1); 612fcf5ef2aSThomas Huth return; 613fcf5ef2aSThomas Huth case 0x6005: /* mov.w @Rm+,Rn */ 6144da06fb3SRichard Henderson tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, 6154da06fb3SRichard Henderson MO_TESW | UNALIGN(ctx)); 616fcf5ef2aSThomas Huth if ( B11_8 != B7_4 ) 617fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2); 618fcf5ef2aSThomas Huth return; 619fcf5ef2aSThomas Huth case 0x6006: /* mov.l @Rm+,Rn */ 6204da06fb3SRichard Henderson tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, 6214da06fb3SRichard Henderson MO_TESL | UNALIGN(ctx)); 622fcf5ef2aSThomas Huth if ( B11_8 != B7_4 ) 623fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4); 624fcf5ef2aSThomas Huth return; 625fcf5ef2aSThomas Huth case 0x0004: /* mov.b Rm,@(R0,Rn) */ 626fcf5ef2aSThomas Huth { 627fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 628fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B11_8), REG(0)); 629fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_UB); 630fcf5ef2aSThomas Huth } 631fcf5ef2aSThomas Huth return; 632fcf5ef2aSThomas Huth case 0x0005: /* mov.w Rm,@(R0,Rn) */ 633fcf5ef2aSThomas Huth { 634fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 635fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B11_8), REG(0)); 6364da06fb3SRichard Henderson tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, 6374da06fb3SRichard Henderson MO_TEUW | UNALIGN(ctx)); 638fcf5ef2aSThomas Huth } 639fcf5ef2aSThomas Huth return; 640fcf5ef2aSThomas Huth case 0x0006: /* mov.l Rm,@(R0,Rn) */ 641fcf5ef2aSThomas Huth { 642fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 643fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B11_8), REG(0)); 6444da06fb3SRichard Henderson tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, 6454da06fb3SRichard Henderson MO_TEUL | UNALIGN(ctx)); 646fcf5ef2aSThomas Huth } 647fcf5ef2aSThomas Huth return; 648fcf5ef2aSThomas Huth case 0x000c: /* mov.b @(R0,Rm),Rn */ 649fcf5ef2aSThomas Huth { 650fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 651fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B7_4), REG(0)); 652fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_SB); 653fcf5ef2aSThomas Huth } 654fcf5ef2aSThomas Huth return; 655fcf5ef2aSThomas Huth case 0x000d: /* mov.w @(R0,Rm),Rn */ 656fcf5ef2aSThomas Huth { 657fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 658fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B7_4), REG(0)); 6594da06fb3SRichard Henderson tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, 6604da06fb3SRichard Henderson MO_TESW | UNALIGN(ctx)); 661fcf5ef2aSThomas Huth } 662fcf5ef2aSThomas Huth return; 663fcf5ef2aSThomas Huth case 0x000e: /* mov.l @(R0,Rm),Rn */ 664fcf5ef2aSThomas Huth { 665fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 666fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B7_4), REG(0)); 6674da06fb3SRichard Henderson tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, 6684da06fb3SRichard Henderson MO_TESL | UNALIGN(ctx)); 669fcf5ef2aSThomas Huth } 670fcf5ef2aSThomas Huth return; 671fcf5ef2aSThomas Huth case 0x6008: /* swap.b Rm,Rn */ 672fcf5ef2aSThomas Huth { 6733c254ab8SLadi Prosek TCGv low = tcg_temp_new(); 674b983a0e1SRichard Henderson tcg_gen_bswap16_i32(low, REG(B7_4), 0); 675fcf5ef2aSThomas Huth tcg_gen_deposit_i32(REG(B11_8), REG(B7_4), low, 0, 16); 676fcf5ef2aSThomas Huth } 677fcf5ef2aSThomas Huth return; 678fcf5ef2aSThomas Huth case 0x6009: /* swap.w Rm,Rn */ 679fcf5ef2aSThomas Huth tcg_gen_rotli_i32(REG(B11_8), REG(B7_4), 16); 680fcf5ef2aSThomas Huth return; 681fcf5ef2aSThomas Huth case 0x200d: /* xtrct Rm,Rn */ 682fcf5ef2aSThomas Huth { 683fcf5ef2aSThomas Huth TCGv high, low; 684fcf5ef2aSThomas Huth high = tcg_temp_new(); 685fcf5ef2aSThomas Huth tcg_gen_shli_i32(high, REG(B7_4), 16); 686fcf5ef2aSThomas Huth low = tcg_temp_new(); 687fcf5ef2aSThomas Huth tcg_gen_shri_i32(low, REG(B11_8), 16); 688fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), high, low); 689fcf5ef2aSThomas Huth } 690fcf5ef2aSThomas Huth return; 691fcf5ef2aSThomas Huth case 0x300c: /* add Rm,Rn */ 692fcf5ef2aSThomas Huth tcg_gen_add_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 693fcf5ef2aSThomas Huth return; 694fcf5ef2aSThomas Huth case 0x300e: /* addc Rm,Rn */ 695fcf5ef2aSThomas Huth { 696fcf5ef2aSThomas Huth TCGv t0, t1; 697950b91beSRichard Henderson t0 = tcg_constant_tl(0); 698fcf5ef2aSThomas Huth t1 = tcg_temp_new(); 699fcf5ef2aSThomas Huth tcg_gen_add2_i32(t1, cpu_sr_t, cpu_sr_t, t0, REG(B7_4), t0); 700fcf5ef2aSThomas Huth tcg_gen_add2_i32(REG(B11_8), cpu_sr_t, 701fcf5ef2aSThomas Huth REG(B11_8), t0, t1, cpu_sr_t); 702fcf5ef2aSThomas Huth } 703fcf5ef2aSThomas Huth return; 704fcf5ef2aSThomas Huth case 0x300f: /* addv Rm,Rn */ 705fcf5ef2aSThomas Huth { 706fcf5ef2aSThomas Huth TCGv t0, t1, t2; 707fcf5ef2aSThomas Huth t0 = tcg_temp_new(); 708fcf5ef2aSThomas Huth tcg_gen_add_i32(t0, REG(B7_4), REG(B11_8)); 709fcf5ef2aSThomas Huth t1 = tcg_temp_new(); 710fcf5ef2aSThomas Huth tcg_gen_xor_i32(t1, t0, REG(B11_8)); 711fcf5ef2aSThomas Huth t2 = tcg_temp_new(); 712fcf5ef2aSThomas Huth tcg_gen_xor_i32(t2, REG(B7_4), REG(B11_8)); 713fcf5ef2aSThomas Huth tcg_gen_andc_i32(cpu_sr_t, t1, t2); 714fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_t, cpu_sr_t, 31); 715fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B7_4), t0); 716fcf5ef2aSThomas Huth } 717fcf5ef2aSThomas Huth return; 718fcf5ef2aSThomas Huth case 0x2009: /* and Rm,Rn */ 719fcf5ef2aSThomas Huth tcg_gen_and_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 720fcf5ef2aSThomas Huth return; 721fcf5ef2aSThomas Huth case 0x3000: /* cmp/eq Rm,Rn */ 722fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_EQ, cpu_sr_t, REG(B11_8), REG(B7_4)); 723fcf5ef2aSThomas Huth return; 724fcf5ef2aSThomas Huth case 0x3003: /* cmp/ge Rm,Rn */ 725fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_GE, cpu_sr_t, REG(B11_8), REG(B7_4)); 726fcf5ef2aSThomas Huth return; 727fcf5ef2aSThomas Huth case 0x3007: /* cmp/gt Rm,Rn */ 728fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_GT, cpu_sr_t, REG(B11_8), REG(B7_4)); 729fcf5ef2aSThomas Huth return; 730fcf5ef2aSThomas Huth case 0x3006: /* cmp/hi Rm,Rn */ 731fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_GTU, cpu_sr_t, REG(B11_8), REG(B7_4)); 732fcf5ef2aSThomas Huth return; 733fcf5ef2aSThomas Huth case 0x3002: /* cmp/hs Rm,Rn */ 734fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_GEU, cpu_sr_t, REG(B11_8), REG(B7_4)); 735fcf5ef2aSThomas Huth return; 736fcf5ef2aSThomas Huth case 0x200c: /* cmp/str Rm,Rn */ 737fcf5ef2aSThomas Huth { 738fcf5ef2aSThomas Huth TCGv cmp1 = tcg_temp_new(); 739fcf5ef2aSThomas Huth TCGv cmp2 = tcg_temp_new(); 740fcf5ef2aSThomas Huth tcg_gen_xor_i32(cmp2, REG(B7_4), REG(B11_8)); 741fcf5ef2aSThomas Huth tcg_gen_subi_i32(cmp1, cmp2, 0x01010101); 742fcf5ef2aSThomas Huth tcg_gen_andc_i32(cmp1, cmp1, cmp2); 743fcf5ef2aSThomas Huth tcg_gen_andi_i32(cmp1, cmp1, 0x80808080); 744fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_NE, cpu_sr_t, cmp1, 0); 745fcf5ef2aSThomas Huth } 746fcf5ef2aSThomas Huth return; 747fcf5ef2aSThomas Huth case 0x2007: /* div0s Rm,Rn */ 748fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_q, REG(B11_8), 31); /* SR_Q */ 749fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_m, REG(B7_4), 31); /* SR_M */ 750fcf5ef2aSThomas Huth tcg_gen_xor_i32(cpu_sr_t, cpu_sr_q, cpu_sr_m); /* SR_T */ 751fcf5ef2aSThomas Huth return; 752fcf5ef2aSThomas Huth case 0x3004: /* div1 Rm,Rn */ 753fcf5ef2aSThomas Huth { 754fcf5ef2aSThomas Huth TCGv t0 = tcg_temp_new(); 755fcf5ef2aSThomas Huth TCGv t1 = tcg_temp_new(); 756fcf5ef2aSThomas Huth TCGv t2 = tcg_temp_new(); 757950b91beSRichard Henderson TCGv zero = tcg_constant_i32(0); 758fcf5ef2aSThomas Huth 759fcf5ef2aSThomas Huth /* shift left arg1, saving the bit being pushed out and inserting 760fcf5ef2aSThomas Huth T on the right */ 761fcf5ef2aSThomas Huth tcg_gen_shri_i32(t0, REG(B11_8), 31); 762fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1); 763fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), REG(B11_8), cpu_sr_t); 764fcf5ef2aSThomas Huth 765fcf5ef2aSThomas Huth /* Add or subtract arg0 from arg1 depending if Q == M. To avoid 766fcf5ef2aSThomas Huth using 64-bit temps, we compute arg0's high part from q ^ m, so 767fcf5ef2aSThomas Huth that it is 0x00000000 when adding the value or 0xffffffff when 768fcf5ef2aSThomas Huth subtracting it. */ 769fcf5ef2aSThomas Huth tcg_gen_xor_i32(t1, cpu_sr_q, cpu_sr_m); 770fcf5ef2aSThomas Huth tcg_gen_subi_i32(t1, t1, 1); 771fcf5ef2aSThomas Huth tcg_gen_neg_i32(t2, REG(B7_4)); 772fcf5ef2aSThomas Huth tcg_gen_movcond_i32(TCG_COND_EQ, t2, t1, zero, REG(B7_4), t2); 773fcf5ef2aSThomas Huth tcg_gen_add2_i32(REG(B11_8), t1, REG(B11_8), zero, t2, t1); 774fcf5ef2aSThomas Huth 775fcf5ef2aSThomas Huth /* compute T and Q depending on carry */ 776fcf5ef2aSThomas Huth tcg_gen_andi_i32(t1, t1, 1); 777fcf5ef2aSThomas Huth tcg_gen_xor_i32(t1, t1, t0); 778fcf5ef2aSThomas Huth tcg_gen_xori_i32(cpu_sr_t, t1, 1); 779fcf5ef2aSThomas Huth tcg_gen_xor_i32(cpu_sr_q, cpu_sr_m, t1); 780fcf5ef2aSThomas Huth } 781fcf5ef2aSThomas Huth return; 782fcf5ef2aSThomas Huth case 0x300d: /* dmuls.l Rm,Rn */ 783fcf5ef2aSThomas Huth tcg_gen_muls2_i32(cpu_macl, cpu_mach, REG(B7_4), REG(B11_8)); 784fcf5ef2aSThomas Huth return; 785fcf5ef2aSThomas Huth case 0x3005: /* dmulu.l Rm,Rn */ 786fcf5ef2aSThomas Huth tcg_gen_mulu2_i32(cpu_macl, cpu_mach, REG(B7_4), REG(B11_8)); 787fcf5ef2aSThomas Huth return; 788fcf5ef2aSThomas Huth case 0x600e: /* exts.b Rm,Rn */ 789fcf5ef2aSThomas Huth tcg_gen_ext8s_i32(REG(B11_8), REG(B7_4)); 790fcf5ef2aSThomas Huth return; 791fcf5ef2aSThomas Huth case 0x600f: /* exts.w Rm,Rn */ 792fcf5ef2aSThomas Huth tcg_gen_ext16s_i32(REG(B11_8), REG(B7_4)); 793fcf5ef2aSThomas Huth return; 794fcf5ef2aSThomas Huth case 0x600c: /* extu.b Rm,Rn */ 795fcf5ef2aSThomas Huth tcg_gen_ext8u_i32(REG(B11_8), REG(B7_4)); 796fcf5ef2aSThomas Huth return; 797fcf5ef2aSThomas Huth case 0x600d: /* extu.w Rm,Rn */ 798fcf5ef2aSThomas Huth tcg_gen_ext16u_i32(REG(B11_8), REG(B7_4)); 799fcf5ef2aSThomas Huth return; 800fcf5ef2aSThomas Huth case 0x000f: /* mac.l @Rm+,@Rn+ */ 801fcf5ef2aSThomas Huth { 802fcf5ef2aSThomas Huth TCGv arg0, arg1; 803fcf5ef2aSThomas Huth arg0 = tcg_temp_new(); 804fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(arg0, REG(B7_4), ctx->memidx, MO_TESL); 805fcf5ef2aSThomas Huth arg1 = tcg_temp_new(); 806fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(arg1, REG(B11_8), ctx->memidx, MO_TESL); 807fcf5ef2aSThomas Huth gen_helper_macl(cpu_env, arg0, arg1); 808fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4); 809fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 810fcf5ef2aSThomas Huth } 811fcf5ef2aSThomas Huth return; 812fcf5ef2aSThomas Huth case 0x400f: /* mac.w @Rm+,@Rn+ */ 813fcf5ef2aSThomas Huth { 814fcf5ef2aSThomas Huth TCGv arg0, arg1; 815fcf5ef2aSThomas Huth arg0 = tcg_temp_new(); 816fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(arg0, REG(B7_4), ctx->memidx, MO_TESL); 817fcf5ef2aSThomas Huth arg1 = tcg_temp_new(); 818fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(arg1, REG(B11_8), ctx->memidx, MO_TESL); 819fcf5ef2aSThomas Huth gen_helper_macw(cpu_env, arg0, arg1); 820fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 2); 821fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2); 822fcf5ef2aSThomas Huth } 823fcf5ef2aSThomas Huth return; 824fcf5ef2aSThomas Huth case 0x0007: /* mul.l Rm,Rn */ 825fcf5ef2aSThomas Huth tcg_gen_mul_i32(cpu_macl, REG(B7_4), REG(B11_8)); 826fcf5ef2aSThomas Huth return; 827fcf5ef2aSThomas Huth case 0x200f: /* muls.w Rm,Rn */ 828fcf5ef2aSThomas Huth { 829fcf5ef2aSThomas Huth TCGv arg0, arg1; 830fcf5ef2aSThomas Huth arg0 = tcg_temp_new(); 831fcf5ef2aSThomas Huth tcg_gen_ext16s_i32(arg0, REG(B7_4)); 832fcf5ef2aSThomas Huth arg1 = tcg_temp_new(); 833fcf5ef2aSThomas Huth tcg_gen_ext16s_i32(arg1, REG(B11_8)); 834fcf5ef2aSThomas Huth tcg_gen_mul_i32(cpu_macl, arg0, arg1); 835fcf5ef2aSThomas Huth } 836fcf5ef2aSThomas Huth return; 837fcf5ef2aSThomas Huth case 0x200e: /* mulu.w Rm,Rn */ 838fcf5ef2aSThomas Huth { 839fcf5ef2aSThomas Huth TCGv arg0, arg1; 840fcf5ef2aSThomas Huth arg0 = tcg_temp_new(); 841fcf5ef2aSThomas Huth tcg_gen_ext16u_i32(arg0, REG(B7_4)); 842fcf5ef2aSThomas Huth arg1 = tcg_temp_new(); 843fcf5ef2aSThomas Huth tcg_gen_ext16u_i32(arg1, REG(B11_8)); 844fcf5ef2aSThomas Huth tcg_gen_mul_i32(cpu_macl, arg0, arg1); 845fcf5ef2aSThomas Huth } 846fcf5ef2aSThomas Huth return; 847fcf5ef2aSThomas Huth case 0x600b: /* neg Rm,Rn */ 848fcf5ef2aSThomas Huth tcg_gen_neg_i32(REG(B11_8), REG(B7_4)); 849fcf5ef2aSThomas Huth return; 850fcf5ef2aSThomas Huth case 0x600a: /* negc Rm,Rn */ 851fcf5ef2aSThomas Huth { 852950b91beSRichard Henderson TCGv t0 = tcg_constant_i32(0); 853fcf5ef2aSThomas Huth tcg_gen_add2_i32(REG(B11_8), cpu_sr_t, 854fcf5ef2aSThomas Huth REG(B7_4), t0, cpu_sr_t, t0); 855fcf5ef2aSThomas Huth tcg_gen_sub2_i32(REG(B11_8), cpu_sr_t, 856fcf5ef2aSThomas Huth t0, t0, REG(B11_8), cpu_sr_t); 857fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, cpu_sr_t, 1); 858fcf5ef2aSThomas Huth } 859fcf5ef2aSThomas Huth return; 860fcf5ef2aSThomas Huth case 0x6007: /* not Rm,Rn */ 861fcf5ef2aSThomas Huth tcg_gen_not_i32(REG(B11_8), REG(B7_4)); 862fcf5ef2aSThomas Huth return; 863fcf5ef2aSThomas Huth case 0x200b: /* or Rm,Rn */ 864fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 865fcf5ef2aSThomas Huth return; 866fcf5ef2aSThomas Huth case 0x400c: /* shad Rm,Rn */ 867fcf5ef2aSThomas Huth { 868fcf5ef2aSThomas Huth TCGv t0 = tcg_temp_new(); 869fcf5ef2aSThomas Huth TCGv t1 = tcg_temp_new(); 870fcf5ef2aSThomas Huth TCGv t2 = tcg_temp_new(); 871fcf5ef2aSThomas Huth 872fcf5ef2aSThomas Huth tcg_gen_andi_i32(t0, REG(B7_4), 0x1f); 873fcf5ef2aSThomas Huth 874fcf5ef2aSThomas Huth /* positive case: shift to the left */ 875fcf5ef2aSThomas Huth tcg_gen_shl_i32(t1, REG(B11_8), t0); 876fcf5ef2aSThomas Huth 877fcf5ef2aSThomas Huth /* negative case: shift to the right in two steps to 878fcf5ef2aSThomas Huth correctly handle the -32 case */ 879fcf5ef2aSThomas Huth tcg_gen_xori_i32(t0, t0, 0x1f); 880fcf5ef2aSThomas Huth tcg_gen_sar_i32(t2, REG(B11_8), t0); 881fcf5ef2aSThomas Huth tcg_gen_sari_i32(t2, t2, 1); 882fcf5ef2aSThomas Huth 883fcf5ef2aSThomas Huth /* select between the two cases */ 884fcf5ef2aSThomas Huth tcg_gen_movi_i32(t0, 0); 885fcf5ef2aSThomas Huth tcg_gen_movcond_i32(TCG_COND_GE, REG(B11_8), REG(B7_4), t0, t1, t2); 886fcf5ef2aSThomas Huth } 887fcf5ef2aSThomas Huth return; 888fcf5ef2aSThomas Huth case 0x400d: /* shld Rm,Rn */ 889fcf5ef2aSThomas Huth { 890fcf5ef2aSThomas Huth TCGv t0 = tcg_temp_new(); 891fcf5ef2aSThomas Huth TCGv t1 = tcg_temp_new(); 892fcf5ef2aSThomas Huth TCGv t2 = tcg_temp_new(); 893fcf5ef2aSThomas Huth 894fcf5ef2aSThomas Huth tcg_gen_andi_i32(t0, REG(B7_4), 0x1f); 895fcf5ef2aSThomas Huth 896fcf5ef2aSThomas Huth /* positive case: shift to the left */ 897fcf5ef2aSThomas Huth tcg_gen_shl_i32(t1, REG(B11_8), t0); 898fcf5ef2aSThomas Huth 899fcf5ef2aSThomas Huth /* negative case: shift to the right in two steps to 900fcf5ef2aSThomas Huth correctly handle the -32 case */ 901fcf5ef2aSThomas Huth tcg_gen_xori_i32(t0, t0, 0x1f); 902fcf5ef2aSThomas Huth tcg_gen_shr_i32(t2, REG(B11_8), t0); 903fcf5ef2aSThomas Huth tcg_gen_shri_i32(t2, t2, 1); 904fcf5ef2aSThomas Huth 905fcf5ef2aSThomas Huth /* select between the two cases */ 906fcf5ef2aSThomas Huth tcg_gen_movi_i32(t0, 0); 907fcf5ef2aSThomas Huth tcg_gen_movcond_i32(TCG_COND_GE, REG(B11_8), REG(B7_4), t0, t1, t2); 908fcf5ef2aSThomas Huth } 909fcf5ef2aSThomas Huth return; 910fcf5ef2aSThomas Huth case 0x3008: /* sub Rm,Rn */ 911fcf5ef2aSThomas Huth tcg_gen_sub_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 912fcf5ef2aSThomas Huth return; 913fcf5ef2aSThomas Huth case 0x300a: /* subc Rm,Rn */ 914fcf5ef2aSThomas Huth { 915fcf5ef2aSThomas Huth TCGv t0, t1; 916950b91beSRichard Henderson t0 = tcg_constant_tl(0); 917fcf5ef2aSThomas Huth t1 = tcg_temp_new(); 918fcf5ef2aSThomas Huth tcg_gen_add2_i32(t1, cpu_sr_t, cpu_sr_t, t0, REG(B7_4), t0); 919fcf5ef2aSThomas Huth tcg_gen_sub2_i32(REG(B11_8), cpu_sr_t, 920fcf5ef2aSThomas Huth REG(B11_8), t0, t1, cpu_sr_t); 921fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, cpu_sr_t, 1); 922fcf5ef2aSThomas Huth } 923fcf5ef2aSThomas Huth return; 924fcf5ef2aSThomas Huth case 0x300b: /* subv Rm,Rn */ 925fcf5ef2aSThomas Huth { 926fcf5ef2aSThomas Huth TCGv t0, t1, t2; 927fcf5ef2aSThomas Huth t0 = tcg_temp_new(); 928fcf5ef2aSThomas Huth tcg_gen_sub_i32(t0, REG(B11_8), REG(B7_4)); 929fcf5ef2aSThomas Huth t1 = tcg_temp_new(); 930fcf5ef2aSThomas Huth tcg_gen_xor_i32(t1, t0, REG(B7_4)); 931fcf5ef2aSThomas Huth t2 = tcg_temp_new(); 932fcf5ef2aSThomas Huth tcg_gen_xor_i32(t2, REG(B11_8), REG(B7_4)); 933fcf5ef2aSThomas Huth tcg_gen_and_i32(t1, t1, t2); 934fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_t, t1, 31); 935fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), t0); 936fcf5ef2aSThomas Huth } 937fcf5ef2aSThomas Huth return; 938fcf5ef2aSThomas Huth case 0x2008: /* tst Rm,Rn */ 939fcf5ef2aSThomas Huth { 940fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 941fcf5ef2aSThomas Huth tcg_gen_and_i32(val, REG(B7_4), REG(B11_8)); 942fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, val, 0); 943fcf5ef2aSThomas Huth } 944fcf5ef2aSThomas Huth return; 945fcf5ef2aSThomas Huth case 0x200a: /* xor Rm,Rn */ 946fcf5ef2aSThomas Huth tcg_gen_xor_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 947fcf5ef2aSThomas Huth return; 948fcf5ef2aSThomas Huth case 0xf00c: /* fmov {F,D,X}Rm,{F,D,X}Rn - FPSCR: Nothing */ 949fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 950a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 951bdcb3739SRichard Henderson int xsrc = XHACK(B7_4); 952bdcb3739SRichard Henderson int xdst = XHACK(B11_8); 953bdcb3739SRichard Henderson tcg_gen_mov_i32(FREG(xdst), FREG(xsrc)); 954bdcb3739SRichard Henderson tcg_gen_mov_i32(FREG(xdst + 1), FREG(xsrc + 1)); 955fcf5ef2aSThomas Huth } else { 9567c9f7038SRichard Henderson tcg_gen_mov_i32(FREG(B11_8), FREG(B7_4)); 957fcf5ef2aSThomas Huth } 958fcf5ef2aSThomas Huth return; 959fcf5ef2aSThomas Huth case 0xf00a: /* fmov {F,D,X}Rm,@Rn - FPSCR: Nothing */ 960fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 961a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 9624d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 9634d57fa50SRichard Henderson gen_load_fpr64(ctx, fp, XHACK(B7_4)); 964fc313c64SFrédéric Pétrot tcg_gen_qemu_st_i64(fp, REG(B11_8), ctx->memidx, MO_TEUQ); 965fcf5ef2aSThomas Huth } else { 9667c9f7038SRichard Henderson tcg_gen_qemu_st_i32(FREG(B7_4), REG(B11_8), ctx->memidx, MO_TEUL); 967fcf5ef2aSThomas Huth } 968fcf5ef2aSThomas Huth return; 969fcf5ef2aSThomas Huth case 0xf008: /* fmov @Rm,{F,D,X}Rn - FPSCR: Nothing */ 970fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 971a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 9724d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 973fc313c64SFrédéric Pétrot tcg_gen_qemu_ld_i64(fp, REG(B7_4), ctx->memidx, MO_TEUQ); 9744d57fa50SRichard Henderson gen_store_fpr64(ctx, fp, XHACK(B11_8)); 975fcf5ef2aSThomas Huth } else { 9767c9f7038SRichard Henderson tcg_gen_qemu_ld_i32(FREG(B11_8), REG(B7_4), ctx->memidx, MO_TEUL); 977fcf5ef2aSThomas Huth } 978fcf5ef2aSThomas Huth return; 979fcf5ef2aSThomas Huth case 0xf009: /* fmov @Rm+,{F,D,X}Rn - FPSCR: Nothing */ 980fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 981a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 9824d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 983fc313c64SFrédéric Pétrot tcg_gen_qemu_ld_i64(fp, REG(B7_4), ctx->memidx, MO_TEUQ); 9844d57fa50SRichard Henderson gen_store_fpr64(ctx, fp, XHACK(B11_8)); 985fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 8); 986fcf5ef2aSThomas Huth } else { 9877c9f7038SRichard Henderson tcg_gen_qemu_ld_i32(FREG(B11_8), REG(B7_4), ctx->memidx, MO_TEUL); 988fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4); 989fcf5ef2aSThomas Huth } 990fcf5ef2aSThomas Huth return; 991fcf5ef2aSThomas Huth case 0xf00b: /* fmov {F,D,X}Rm,@-Rn - FPSCR: Nothing */ 992fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 9934d57fa50SRichard Henderson { 994fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new_i32(); 995a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 9964d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 9974d57fa50SRichard Henderson gen_load_fpr64(ctx, fp, XHACK(B7_4)); 9984d57fa50SRichard Henderson tcg_gen_subi_i32(addr, REG(B11_8), 8); 999fc313c64SFrédéric Pétrot tcg_gen_qemu_st_i64(fp, addr, ctx->memidx, MO_TEUQ); 1000fcf5ef2aSThomas Huth } else { 10014d57fa50SRichard Henderson tcg_gen_subi_i32(addr, REG(B11_8), 4); 10027c9f7038SRichard Henderson tcg_gen_qemu_st_i32(FREG(B7_4), addr, ctx->memidx, MO_TEUL); 1003fcf5ef2aSThomas Huth } 1004fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 10054d57fa50SRichard Henderson } 1006fcf5ef2aSThomas Huth return; 1007fcf5ef2aSThomas Huth case 0xf006: /* fmov @(R0,Rm),{F,D,X}Rm - FPSCR: Nothing */ 1008fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1009fcf5ef2aSThomas Huth { 1010fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new_i32(); 1011fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B7_4), REG(0)); 1012a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 10134d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 1014fc313c64SFrédéric Pétrot tcg_gen_qemu_ld_i64(fp, addr, ctx->memidx, MO_TEUQ); 10154d57fa50SRichard Henderson gen_store_fpr64(ctx, fp, XHACK(B11_8)); 1016fcf5ef2aSThomas Huth } else { 10177c9f7038SRichard Henderson tcg_gen_qemu_ld_i32(FREG(B11_8), addr, ctx->memidx, MO_TEUL); 1018fcf5ef2aSThomas Huth } 1019fcf5ef2aSThomas Huth } 1020fcf5ef2aSThomas Huth return; 1021fcf5ef2aSThomas Huth case 0xf007: /* fmov {F,D,X}Rn,@(R0,Rn) - FPSCR: Nothing */ 1022fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1023fcf5ef2aSThomas Huth { 1024fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1025fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B11_8), REG(0)); 1026a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 10274d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 10284d57fa50SRichard Henderson gen_load_fpr64(ctx, fp, XHACK(B7_4)); 1029fc313c64SFrédéric Pétrot tcg_gen_qemu_st_i64(fp, addr, ctx->memidx, MO_TEUQ); 1030fcf5ef2aSThomas Huth } else { 10317c9f7038SRichard Henderson tcg_gen_qemu_st_i32(FREG(B7_4), addr, ctx->memidx, MO_TEUL); 1032fcf5ef2aSThomas Huth } 1033fcf5ef2aSThomas Huth } 1034fcf5ef2aSThomas Huth return; 1035fcf5ef2aSThomas Huth case 0xf000: /* fadd Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */ 1036fcf5ef2aSThomas Huth case 0xf001: /* fsub Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */ 1037fcf5ef2aSThomas Huth case 0xf002: /* fmul Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */ 1038fcf5ef2aSThomas Huth case 0xf003: /* fdiv Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */ 1039fcf5ef2aSThomas Huth case 0xf004: /* fcmp/eq Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */ 1040fcf5ef2aSThomas Huth case 0xf005: /* fcmp/gt Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */ 1041fcf5ef2aSThomas Huth { 1042fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1043a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_PR) { 1044fcf5ef2aSThomas Huth TCGv_i64 fp0, fp1; 1045fcf5ef2aSThomas Huth 104693dc9c89SRichard Henderson if (ctx->opcode & 0x0110) { 104793dc9c89SRichard Henderson goto do_illegal; 104893dc9c89SRichard Henderson } 1049fcf5ef2aSThomas Huth fp0 = tcg_temp_new_i64(); 1050fcf5ef2aSThomas Huth fp1 = tcg_temp_new_i64(); 10511e0b21d8SRichard Henderson gen_load_fpr64(ctx, fp0, B11_8); 10521e0b21d8SRichard Henderson gen_load_fpr64(ctx, fp1, B7_4); 1053fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf00f) { 1054fcf5ef2aSThomas Huth case 0xf000: /* fadd Rm,Rn */ 1055fcf5ef2aSThomas Huth gen_helper_fadd_DT(fp0, cpu_env, fp0, fp1); 1056fcf5ef2aSThomas Huth break; 1057fcf5ef2aSThomas Huth case 0xf001: /* fsub Rm,Rn */ 1058fcf5ef2aSThomas Huth gen_helper_fsub_DT(fp0, cpu_env, fp0, fp1); 1059fcf5ef2aSThomas Huth break; 1060fcf5ef2aSThomas Huth case 0xf002: /* fmul Rm,Rn */ 1061fcf5ef2aSThomas Huth gen_helper_fmul_DT(fp0, cpu_env, fp0, fp1); 1062fcf5ef2aSThomas Huth break; 1063fcf5ef2aSThomas Huth case 0xf003: /* fdiv Rm,Rn */ 1064fcf5ef2aSThomas Huth gen_helper_fdiv_DT(fp0, cpu_env, fp0, fp1); 1065fcf5ef2aSThomas Huth break; 1066fcf5ef2aSThomas Huth case 0xf004: /* fcmp/eq Rm,Rn */ 106792f1f83eSAurelien Jarno gen_helper_fcmp_eq_DT(cpu_sr_t, cpu_env, fp0, fp1); 1068fcf5ef2aSThomas Huth return; 1069fcf5ef2aSThomas Huth case 0xf005: /* fcmp/gt Rm,Rn */ 107092f1f83eSAurelien Jarno gen_helper_fcmp_gt_DT(cpu_sr_t, cpu_env, fp0, fp1); 1071fcf5ef2aSThomas Huth return; 1072fcf5ef2aSThomas Huth } 10731e0b21d8SRichard Henderson gen_store_fpr64(ctx, fp0, B11_8); 1074fcf5ef2aSThomas Huth } else { 1075fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf00f) { 1076fcf5ef2aSThomas Huth case 0xf000: /* fadd Rm,Rn */ 10777c9f7038SRichard Henderson gen_helper_fadd_FT(FREG(B11_8), cpu_env, 10787c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1079fcf5ef2aSThomas Huth break; 1080fcf5ef2aSThomas Huth case 0xf001: /* fsub Rm,Rn */ 10817c9f7038SRichard Henderson gen_helper_fsub_FT(FREG(B11_8), cpu_env, 10827c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1083fcf5ef2aSThomas Huth break; 1084fcf5ef2aSThomas Huth case 0xf002: /* fmul Rm,Rn */ 10857c9f7038SRichard Henderson gen_helper_fmul_FT(FREG(B11_8), cpu_env, 10867c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1087fcf5ef2aSThomas Huth break; 1088fcf5ef2aSThomas Huth case 0xf003: /* fdiv Rm,Rn */ 10897c9f7038SRichard Henderson gen_helper_fdiv_FT(FREG(B11_8), cpu_env, 10907c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1091fcf5ef2aSThomas Huth break; 1092fcf5ef2aSThomas Huth case 0xf004: /* fcmp/eq Rm,Rn */ 109392f1f83eSAurelien Jarno gen_helper_fcmp_eq_FT(cpu_sr_t, cpu_env, 10947c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1095fcf5ef2aSThomas Huth return; 1096fcf5ef2aSThomas Huth case 0xf005: /* fcmp/gt Rm,Rn */ 109792f1f83eSAurelien Jarno gen_helper_fcmp_gt_FT(cpu_sr_t, cpu_env, 10987c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1099fcf5ef2aSThomas Huth return; 1100fcf5ef2aSThomas Huth } 1101fcf5ef2aSThomas Huth } 1102fcf5ef2aSThomas Huth } 1103fcf5ef2aSThomas Huth return; 1104fcf5ef2aSThomas Huth case 0xf00e: /* fmac FR0,RM,Rn */ 1105fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 11067e9f7ca8SRichard Henderson CHECK_FPSCR_PR_0 11077c9f7038SRichard Henderson gen_helper_fmac_FT(FREG(B11_8), cpu_env, 11087c9f7038SRichard Henderson FREG(0), FREG(B7_4), FREG(B11_8)); 1109fcf5ef2aSThomas Huth return; 1110fcf5ef2aSThomas Huth } 1111fcf5ef2aSThomas Huth 1112fcf5ef2aSThomas Huth switch (ctx->opcode & 0xff00) { 1113fcf5ef2aSThomas Huth case 0xc900: /* and #imm,R0 */ 1114fcf5ef2aSThomas Huth tcg_gen_andi_i32(REG(0), REG(0), B7_0); 1115fcf5ef2aSThomas Huth return; 1116fcf5ef2aSThomas Huth case 0xcd00: /* and.b #imm,@(R0,GBR) */ 1117fcf5ef2aSThomas Huth { 1118fcf5ef2aSThomas Huth TCGv addr, val; 1119fcf5ef2aSThomas Huth addr = tcg_temp_new(); 1120fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(0), cpu_gbr); 1121fcf5ef2aSThomas Huth val = tcg_temp_new(); 1122fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, addr, ctx->memidx, MO_UB); 1123fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, val, B7_0); 1124fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_UB); 1125fcf5ef2aSThomas Huth } 1126fcf5ef2aSThomas Huth return; 1127fcf5ef2aSThomas Huth case 0x8b00: /* bf label */ 1128fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 11296f1c2af6SRichard Henderson gen_conditional_jump(ctx, ctx->base.pc_next + 4 + B7_0s * 2, false); 1130fcf5ef2aSThomas Huth return; 1131fcf5ef2aSThomas Huth case 0x8f00: /* bf/s label */ 1132fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1133ac9707eaSAurelien Jarno tcg_gen_xori_i32(cpu_delayed_cond, cpu_sr_t, 1); 11346f1c2af6SRichard Henderson ctx->delayed_pc = ctx->base.pc_next + 4 + B7_0s * 2; 1135ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT_COND; 1136fcf5ef2aSThomas Huth return; 1137fcf5ef2aSThomas Huth case 0x8900: /* bt label */ 1138fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 11396f1c2af6SRichard Henderson gen_conditional_jump(ctx, ctx->base.pc_next + 4 + B7_0s * 2, true); 1140fcf5ef2aSThomas Huth return; 1141fcf5ef2aSThomas Huth case 0x8d00: /* bt/s label */ 1142fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1143ac9707eaSAurelien Jarno tcg_gen_mov_i32(cpu_delayed_cond, cpu_sr_t); 11446f1c2af6SRichard Henderson ctx->delayed_pc = ctx->base.pc_next + 4 + B7_0s * 2; 1145ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT_COND; 1146fcf5ef2aSThomas Huth return; 1147fcf5ef2aSThomas Huth case 0x8800: /* cmp/eq #imm,R0 */ 1148fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, REG(0), B7_0s); 1149fcf5ef2aSThomas Huth return; 1150fcf5ef2aSThomas Huth case 0xc400: /* mov.b @(disp,GBR),R0 */ 1151fcf5ef2aSThomas Huth { 1152fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1153fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0); 1154fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_SB); 1155fcf5ef2aSThomas Huth } 1156fcf5ef2aSThomas Huth return; 1157fcf5ef2aSThomas Huth case 0xc500: /* mov.w @(disp,GBR),R0 */ 1158fcf5ef2aSThomas Huth { 1159fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1160fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2); 1161fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_TESW); 1162fcf5ef2aSThomas Huth } 1163fcf5ef2aSThomas Huth return; 1164fcf5ef2aSThomas Huth case 0xc600: /* mov.l @(disp,GBR),R0 */ 1165fcf5ef2aSThomas Huth { 1166fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1167fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4); 1168fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_TESL); 1169fcf5ef2aSThomas Huth } 1170fcf5ef2aSThomas Huth return; 1171fcf5ef2aSThomas Huth case 0xc000: /* mov.b R0,@(disp,GBR) */ 1172fcf5ef2aSThomas Huth { 1173fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1174fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0); 1175fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_UB); 1176fcf5ef2aSThomas Huth } 1177fcf5ef2aSThomas Huth return; 1178fcf5ef2aSThomas Huth case 0xc100: /* mov.w R0,@(disp,GBR) */ 1179fcf5ef2aSThomas Huth { 1180fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1181fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2); 1182fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_TEUW); 1183fcf5ef2aSThomas Huth } 1184fcf5ef2aSThomas Huth return; 1185fcf5ef2aSThomas Huth case 0xc200: /* mov.l R0,@(disp,GBR) */ 1186fcf5ef2aSThomas Huth { 1187fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1188fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4); 1189fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_TEUL); 1190fcf5ef2aSThomas Huth } 1191fcf5ef2aSThomas Huth return; 1192fcf5ef2aSThomas Huth case 0x8000: /* mov.b R0,@(disp,Rn) */ 1193fcf5ef2aSThomas Huth { 1194fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1195fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0); 1196fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_UB); 1197fcf5ef2aSThomas Huth } 1198fcf5ef2aSThomas Huth return; 1199fcf5ef2aSThomas Huth case 0x8100: /* mov.w R0,@(disp,Rn) */ 1200fcf5ef2aSThomas Huth { 1201fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1202fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2); 12034da06fb3SRichard Henderson tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, 12044da06fb3SRichard Henderson MO_TEUW | UNALIGN(ctx)); 1205fcf5ef2aSThomas Huth } 1206fcf5ef2aSThomas Huth return; 1207fcf5ef2aSThomas Huth case 0x8400: /* mov.b @(disp,Rn),R0 */ 1208fcf5ef2aSThomas Huth { 1209fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1210fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0); 1211fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_SB); 1212fcf5ef2aSThomas Huth } 1213fcf5ef2aSThomas Huth return; 1214fcf5ef2aSThomas Huth case 0x8500: /* mov.w @(disp,Rn),R0 */ 1215fcf5ef2aSThomas Huth { 1216fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1217fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2); 12184da06fb3SRichard Henderson tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, 12194da06fb3SRichard Henderson MO_TESW | UNALIGN(ctx)); 1220fcf5ef2aSThomas Huth } 1221fcf5ef2aSThomas Huth return; 1222fcf5ef2aSThomas Huth case 0xc700: /* mova @(disp,PC),R0 */ 12236f1c2af6SRichard Henderson tcg_gen_movi_i32(REG(0), ((ctx->base.pc_next & 0xfffffffc) + 12246f1c2af6SRichard Henderson 4 + B7_0 * 4) & ~3); 1225fcf5ef2aSThomas Huth return; 1226fcf5ef2aSThomas Huth case 0xcb00: /* or #imm,R0 */ 1227fcf5ef2aSThomas Huth tcg_gen_ori_i32(REG(0), REG(0), B7_0); 1228fcf5ef2aSThomas Huth return; 1229fcf5ef2aSThomas Huth case 0xcf00: /* or.b #imm,@(R0,GBR) */ 1230fcf5ef2aSThomas Huth { 1231fcf5ef2aSThomas Huth TCGv addr, val; 1232fcf5ef2aSThomas Huth addr = tcg_temp_new(); 1233fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(0), cpu_gbr); 1234fcf5ef2aSThomas Huth val = tcg_temp_new(); 1235fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, addr, ctx->memidx, MO_UB); 1236fcf5ef2aSThomas Huth tcg_gen_ori_i32(val, val, B7_0); 1237fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_UB); 1238fcf5ef2aSThomas Huth } 1239fcf5ef2aSThomas Huth return; 1240fcf5ef2aSThomas Huth case 0xc300: /* trapa #imm */ 1241fcf5ef2aSThomas Huth { 1242fcf5ef2aSThomas Huth TCGv imm; 1243fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1244ac9707eaSAurelien Jarno gen_save_cpu_state(ctx, true); 1245950b91beSRichard Henderson imm = tcg_constant_i32(B7_0); 1246fcf5ef2aSThomas Huth gen_helper_trapa(cpu_env, imm); 12476f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NORETURN; 1248fcf5ef2aSThomas Huth } 1249fcf5ef2aSThomas Huth return; 1250fcf5ef2aSThomas Huth case 0xc800: /* tst #imm,R0 */ 1251fcf5ef2aSThomas Huth { 1252fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1253fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, REG(0), B7_0); 1254fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, val, 0); 1255fcf5ef2aSThomas Huth } 1256fcf5ef2aSThomas Huth return; 1257fcf5ef2aSThomas Huth case 0xcc00: /* tst.b #imm,@(R0,GBR) */ 1258fcf5ef2aSThomas Huth { 1259fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1260fcf5ef2aSThomas Huth tcg_gen_add_i32(val, REG(0), cpu_gbr); 1261fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, val, ctx->memidx, MO_UB); 1262fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, val, B7_0); 1263fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, val, 0); 1264fcf5ef2aSThomas Huth } 1265fcf5ef2aSThomas Huth return; 1266fcf5ef2aSThomas Huth case 0xca00: /* xor #imm,R0 */ 1267fcf5ef2aSThomas Huth tcg_gen_xori_i32(REG(0), REG(0), B7_0); 1268fcf5ef2aSThomas Huth return; 1269fcf5ef2aSThomas Huth case 0xce00: /* xor.b #imm,@(R0,GBR) */ 1270fcf5ef2aSThomas Huth { 1271fcf5ef2aSThomas Huth TCGv addr, val; 1272fcf5ef2aSThomas Huth addr = tcg_temp_new(); 1273fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(0), cpu_gbr); 1274fcf5ef2aSThomas Huth val = tcg_temp_new(); 1275fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, addr, ctx->memidx, MO_UB); 1276fcf5ef2aSThomas Huth tcg_gen_xori_i32(val, val, B7_0); 1277fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_UB); 1278fcf5ef2aSThomas Huth } 1279fcf5ef2aSThomas Huth return; 1280fcf5ef2aSThomas Huth } 1281fcf5ef2aSThomas Huth 1282fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf08f) { 1283fcf5ef2aSThomas Huth case 0x408e: /* ldc Rm,Rn_BANK */ 1284fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1285fcf5ef2aSThomas Huth tcg_gen_mov_i32(ALTREG(B6_4), REG(B11_8)); 1286fcf5ef2aSThomas Huth return; 1287fcf5ef2aSThomas Huth case 0x4087: /* ldc.l @Rm+,Rn_BANK */ 1288fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1289fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(ALTREG(B6_4), REG(B11_8), ctx->memidx, MO_TESL); 1290fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 1291fcf5ef2aSThomas Huth return; 1292fcf5ef2aSThomas Huth case 0x0082: /* stc Rm_BANK,Rn */ 1293fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1294fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), ALTREG(B6_4)); 1295fcf5ef2aSThomas Huth return; 1296fcf5ef2aSThomas Huth case 0x4083: /* stc.l Rm_BANK,@-Rn */ 1297fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1298fcf5ef2aSThomas Huth { 1299fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1300fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); 1301fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(ALTREG(B6_4), addr, ctx->memidx, MO_TEUL); 1302fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 1303fcf5ef2aSThomas Huth } 1304fcf5ef2aSThomas Huth return; 1305fcf5ef2aSThomas Huth } 1306fcf5ef2aSThomas Huth 1307fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf0ff) { 1308fcf5ef2aSThomas Huth case 0x0023: /* braf Rn */ 1309fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 13106f1c2af6SRichard Henderson tcg_gen_addi_i32(cpu_delayed_pc, REG(B11_8), ctx->base.pc_next + 4); 1311ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT; 1312fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 1313fcf5ef2aSThomas Huth return; 1314fcf5ef2aSThomas Huth case 0x0003: /* bsrf Rn */ 1315fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 13166f1c2af6SRichard Henderson tcg_gen_movi_i32(cpu_pr, ctx->base.pc_next + 4); 1317fcf5ef2aSThomas Huth tcg_gen_add_i32(cpu_delayed_pc, REG(B11_8), cpu_pr); 1318ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT; 1319fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 1320fcf5ef2aSThomas Huth return; 1321fcf5ef2aSThomas Huth case 0x4015: /* cmp/pl Rn */ 1322fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_GT, cpu_sr_t, REG(B11_8), 0); 1323fcf5ef2aSThomas Huth return; 1324fcf5ef2aSThomas Huth case 0x4011: /* cmp/pz Rn */ 1325fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_GE, cpu_sr_t, REG(B11_8), 0); 1326fcf5ef2aSThomas Huth return; 1327fcf5ef2aSThomas Huth case 0x4010: /* dt Rn */ 1328fcf5ef2aSThomas Huth tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 1); 1329fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, REG(B11_8), 0); 1330fcf5ef2aSThomas Huth return; 1331fcf5ef2aSThomas Huth case 0x402b: /* jmp @Rn */ 1332fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1333fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8)); 1334ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT; 1335fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 1336fcf5ef2aSThomas Huth return; 1337fcf5ef2aSThomas Huth case 0x400b: /* jsr @Rn */ 1338fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 13396f1c2af6SRichard Henderson tcg_gen_movi_i32(cpu_pr, ctx->base.pc_next + 4); 1340fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8)); 1341ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT; 1342fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 1343fcf5ef2aSThomas Huth return; 1344fcf5ef2aSThomas Huth case 0x400e: /* ldc Rm,SR */ 1345fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1346fcf5ef2aSThomas Huth { 1347fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1348fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, REG(B11_8), 0x700083f3); 1349fcf5ef2aSThomas Huth gen_write_sr(val); 13506f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 1351fcf5ef2aSThomas Huth } 1352fcf5ef2aSThomas Huth return; 1353fcf5ef2aSThomas Huth case 0x4007: /* ldc.l @Rm+,SR */ 1354fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1355fcf5ef2aSThomas Huth { 1356fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1357fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, REG(B11_8), ctx->memidx, MO_TESL); 1358fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, val, 0x700083f3); 1359fcf5ef2aSThomas Huth gen_write_sr(val); 1360fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 13616f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 1362fcf5ef2aSThomas Huth } 1363fcf5ef2aSThomas Huth return; 1364fcf5ef2aSThomas Huth case 0x0002: /* stc SR,Rn */ 1365fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1366fcf5ef2aSThomas Huth gen_read_sr(REG(B11_8)); 1367fcf5ef2aSThomas Huth return; 1368fcf5ef2aSThomas Huth case 0x4003: /* stc SR,@-Rn */ 1369fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1370fcf5ef2aSThomas Huth { 1371fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1372fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1373fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); 1374fcf5ef2aSThomas Huth gen_read_sr(val); 1375fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_TEUL); 1376fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 1377fcf5ef2aSThomas Huth } 1378fcf5ef2aSThomas Huth return; 1379fcf5ef2aSThomas Huth #define LD(reg,ldnum,ldpnum,prechk) \ 1380fcf5ef2aSThomas Huth case ldnum: \ 1381fcf5ef2aSThomas Huth prechk \ 1382fcf5ef2aSThomas Huth tcg_gen_mov_i32 (cpu_##reg, REG(B11_8)); \ 1383fcf5ef2aSThomas Huth return; \ 1384fcf5ef2aSThomas Huth case ldpnum: \ 1385fcf5ef2aSThomas Huth prechk \ 1386fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(cpu_##reg, REG(B11_8), ctx->memidx, MO_TESL); \ 1387fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); \ 1388fcf5ef2aSThomas Huth return; 1389fcf5ef2aSThomas Huth #define ST(reg,stnum,stpnum,prechk) \ 1390fcf5ef2aSThomas Huth case stnum: \ 1391fcf5ef2aSThomas Huth prechk \ 1392fcf5ef2aSThomas Huth tcg_gen_mov_i32 (REG(B11_8), cpu_##reg); \ 1393fcf5ef2aSThomas Huth return; \ 1394fcf5ef2aSThomas Huth case stpnum: \ 1395fcf5ef2aSThomas Huth prechk \ 1396fcf5ef2aSThomas Huth { \ 1397fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); \ 1398fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); \ 1399fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(cpu_##reg, addr, ctx->memidx, MO_TEUL); \ 1400fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); \ 1401fcf5ef2aSThomas Huth } \ 1402fcf5ef2aSThomas Huth return; 1403fcf5ef2aSThomas Huth #define LDST(reg,ldnum,ldpnum,stnum,stpnum,prechk) \ 1404fcf5ef2aSThomas Huth LD(reg,ldnum,ldpnum,prechk) \ 1405fcf5ef2aSThomas Huth ST(reg,stnum,stpnum,prechk) 1406fcf5ef2aSThomas Huth LDST(gbr, 0x401e, 0x4017, 0x0012, 0x4013, {}) 1407fcf5ef2aSThomas Huth LDST(vbr, 0x402e, 0x4027, 0x0022, 0x4023, CHECK_PRIVILEGED) 1408fcf5ef2aSThomas Huth LDST(ssr, 0x403e, 0x4037, 0x0032, 0x4033, CHECK_PRIVILEGED) 1409fcf5ef2aSThomas Huth LDST(spc, 0x404e, 0x4047, 0x0042, 0x4043, CHECK_PRIVILEGED) 1410fcf5ef2aSThomas Huth ST(sgr, 0x003a, 0x4032, CHECK_PRIVILEGED) 1411ccae24d4SRichard Henderson LD(sgr, 0x403a, 0x4036, CHECK_PRIVILEGED CHECK_SH4A) 1412fcf5ef2aSThomas Huth LDST(dbr, 0x40fa, 0x40f6, 0x00fa, 0x40f2, CHECK_PRIVILEGED) 1413fcf5ef2aSThomas Huth LDST(mach, 0x400a, 0x4006, 0x000a, 0x4002, {}) 1414fcf5ef2aSThomas Huth LDST(macl, 0x401a, 0x4016, 0x001a, 0x4012, {}) 1415fcf5ef2aSThomas Huth LDST(pr, 0x402a, 0x4026, 0x002a, 0x4022, {}) 1416fcf5ef2aSThomas Huth LDST(fpul, 0x405a, 0x4056, 0x005a, 0x4052, {CHECK_FPU_ENABLED}) 1417fcf5ef2aSThomas Huth case 0x406a: /* lds Rm,FPSCR */ 1418fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1419fcf5ef2aSThomas Huth gen_helper_ld_fpscr(cpu_env, REG(B11_8)); 14206f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 1421fcf5ef2aSThomas Huth return; 1422fcf5ef2aSThomas Huth case 0x4066: /* lds.l @Rm+,FPSCR */ 1423fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1424fcf5ef2aSThomas Huth { 1425fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1426fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(addr, REG(B11_8), ctx->memidx, MO_TESL); 1427fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 1428fcf5ef2aSThomas Huth gen_helper_ld_fpscr(cpu_env, addr); 14296f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 1430fcf5ef2aSThomas Huth } 1431fcf5ef2aSThomas Huth return; 1432fcf5ef2aSThomas Huth case 0x006a: /* sts FPSCR,Rn */ 1433fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1434fcf5ef2aSThomas Huth tcg_gen_andi_i32(REG(B11_8), cpu_fpscr, 0x003fffff); 1435fcf5ef2aSThomas Huth return; 1436fcf5ef2aSThomas Huth case 0x4062: /* sts FPSCR,@-Rn */ 1437fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1438fcf5ef2aSThomas Huth { 1439fcf5ef2aSThomas Huth TCGv addr, val; 1440fcf5ef2aSThomas Huth val = tcg_temp_new(); 1441fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, cpu_fpscr, 0x003fffff); 1442fcf5ef2aSThomas Huth addr = tcg_temp_new(); 1443fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); 1444fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_TEUL); 1445fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 1446fcf5ef2aSThomas Huth } 1447fcf5ef2aSThomas Huth return; 1448fcf5ef2aSThomas Huth case 0x00c3: /* movca.l R0,@Rm */ 1449fcf5ef2aSThomas Huth { 1450fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1451fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, REG(B11_8), ctx->memidx, MO_TEUL); 1452fcf5ef2aSThomas Huth gen_helper_movcal(cpu_env, REG(B11_8), val); 1453fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), REG(B11_8), ctx->memidx, MO_TEUL); 1454fcf5ef2aSThomas Huth } 1455fcf5ef2aSThomas Huth ctx->has_movcal = 1; 1456fcf5ef2aSThomas Huth return; 1457143021b2SAurelien Jarno case 0x40a9: /* movua.l @Rm,R0 */ 1458ccae24d4SRichard Henderson CHECK_SH4A 1459143021b2SAurelien Jarno /* Load non-boundary-aligned data */ 146034257c21SAurelien Jarno tcg_gen_qemu_ld_i32(REG(0), REG(B11_8), ctx->memidx, 146134257c21SAurelien Jarno MO_TEUL | MO_UNALN); 1462fcf5ef2aSThomas Huth return; 1463143021b2SAurelien Jarno case 0x40e9: /* movua.l @Rm+,R0 */ 1464ccae24d4SRichard Henderson CHECK_SH4A 1465143021b2SAurelien Jarno /* Load non-boundary-aligned data */ 146634257c21SAurelien Jarno tcg_gen_qemu_ld_i32(REG(0), REG(B11_8), ctx->memidx, 146734257c21SAurelien Jarno MO_TEUL | MO_UNALN); 1468fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 1469fcf5ef2aSThomas Huth return; 1470fcf5ef2aSThomas Huth case 0x0029: /* movt Rn */ 1471fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), cpu_sr_t); 1472fcf5ef2aSThomas Huth return; 1473fcf5ef2aSThomas Huth case 0x0073: 1474fcf5ef2aSThomas Huth /* MOVCO.L 1475f85da308SRichard Henderson * LDST -> T 1476f85da308SRichard Henderson * If (T == 1) R0 -> (Rn) 1477f85da308SRichard Henderson * 0 -> LDST 1478f85da308SRichard Henderson * 1479f85da308SRichard Henderson * The above description doesn't work in a parallel context. 1480f85da308SRichard Henderson * Since we currently support no smp boards, this implies user-mode. 1481f85da308SRichard Henderson * But we can still support the official mechanism while user-mode 1482f85da308SRichard Henderson * is single-threaded. */ 1483ccae24d4SRichard Henderson CHECK_SH4A 1484ccae24d4SRichard Henderson { 1485f85da308SRichard Henderson TCGLabel *fail = gen_new_label(); 1486f85da308SRichard Henderson TCGLabel *done = gen_new_label(); 1487f85da308SRichard Henderson 14886f1c2af6SRichard Henderson if ((tb_cflags(ctx->base.tb) & CF_PARALLEL)) { 1489f85da308SRichard Henderson TCGv tmp; 1490f85da308SRichard Henderson 1491f85da308SRichard Henderson tcg_gen_brcond_i32(TCG_COND_NE, REG(B11_8), 1492f85da308SRichard Henderson cpu_lock_addr, fail); 1493f85da308SRichard Henderson tmp = tcg_temp_new(); 1494f85da308SRichard Henderson tcg_gen_atomic_cmpxchg_i32(tmp, REG(B11_8), cpu_lock_value, 1495f85da308SRichard Henderson REG(0), ctx->memidx, MO_TEUL); 1496f85da308SRichard Henderson tcg_gen_setcond_i32(TCG_COND_EQ, cpu_sr_t, tmp, cpu_lock_value); 1497f85da308SRichard Henderson } else { 1498f85da308SRichard Henderson tcg_gen_brcondi_i32(TCG_COND_EQ, cpu_lock_addr, -1, fail); 1499fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), REG(B11_8), ctx->memidx, MO_TEUL); 1500f85da308SRichard Henderson tcg_gen_movi_i32(cpu_sr_t, 1); 1501ccae24d4SRichard Henderson } 1502f85da308SRichard Henderson tcg_gen_br(done); 1503f85da308SRichard Henderson 1504f85da308SRichard Henderson gen_set_label(fail); 1505f85da308SRichard Henderson tcg_gen_movi_i32(cpu_sr_t, 0); 1506f85da308SRichard Henderson 1507f85da308SRichard Henderson gen_set_label(done); 1508f85da308SRichard Henderson tcg_gen_movi_i32(cpu_lock_addr, -1); 1509f85da308SRichard Henderson } 1510f85da308SRichard Henderson return; 1511fcf5ef2aSThomas Huth case 0x0063: 1512fcf5ef2aSThomas Huth /* MOVLI.L @Rm,R0 1513f85da308SRichard Henderson * 1 -> LDST 1514f85da308SRichard Henderson * (Rm) -> R0 1515f85da308SRichard Henderson * When interrupt/exception 1516f85da308SRichard Henderson * occurred 0 -> LDST 1517f85da308SRichard Henderson * 1518f85da308SRichard Henderson * In a parallel context, we must also save the loaded value 1519f85da308SRichard Henderson * for use with the cmpxchg that we'll use with movco.l. */ 1520ccae24d4SRichard Henderson CHECK_SH4A 15216f1c2af6SRichard Henderson if ((tb_cflags(ctx->base.tb) & CF_PARALLEL)) { 1522f85da308SRichard Henderson TCGv tmp = tcg_temp_new(); 1523f85da308SRichard Henderson tcg_gen_mov_i32(tmp, REG(B11_8)); 1524fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), REG(B11_8), ctx->memidx, MO_TESL); 1525f85da308SRichard Henderson tcg_gen_mov_i32(cpu_lock_value, REG(0)); 1526f85da308SRichard Henderson tcg_gen_mov_i32(cpu_lock_addr, tmp); 1527f85da308SRichard Henderson } else { 1528f85da308SRichard Henderson tcg_gen_qemu_ld_i32(REG(0), REG(B11_8), ctx->memidx, MO_TESL); 1529f85da308SRichard Henderson tcg_gen_movi_i32(cpu_lock_addr, 0); 1530f85da308SRichard Henderson } 1531fcf5ef2aSThomas Huth return; 1532fcf5ef2aSThomas Huth case 0x0093: /* ocbi @Rn */ 1533fcf5ef2aSThomas Huth { 1534fcf5ef2aSThomas Huth gen_helper_ocbi(cpu_env, REG(B11_8)); 1535fcf5ef2aSThomas Huth } 1536fcf5ef2aSThomas Huth return; 1537fcf5ef2aSThomas Huth case 0x00a3: /* ocbp @Rn */ 1538fcf5ef2aSThomas Huth case 0x00b3: /* ocbwb @Rn */ 1539fcf5ef2aSThomas Huth /* These instructions are supposed to do nothing in case of 1540fcf5ef2aSThomas Huth a cache miss. Given that we only partially emulate caches 1541fcf5ef2aSThomas Huth it is safe to simply ignore them. */ 1542fcf5ef2aSThomas Huth return; 1543fcf5ef2aSThomas Huth case 0x0083: /* pref @Rn */ 1544fcf5ef2aSThomas Huth return; 1545fcf5ef2aSThomas Huth case 0x00d3: /* prefi @Rn */ 1546ccae24d4SRichard Henderson CHECK_SH4A 1547fcf5ef2aSThomas Huth return; 1548fcf5ef2aSThomas Huth case 0x00e3: /* icbi @Rn */ 1549ccae24d4SRichard Henderson CHECK_SH4A 1550fcf5ef2aSThomas Huth return; 1551fcf5ef2aSThomas Huth case 0x00ab: /* synco */ 1552ccae24d4SRichard Henderson CHECK_SH4A 1553aa351317SAurelien Jarno tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 1554fcf5ef2aSThomas Huth return; 1555fcf5ef2aSThomas Huth case 0x4024: /* rotcl Rn */ 1556fcf5ef2aSThomas Huth { 1557fcf5ef2aSThomas Huth TCGv tmp = tcg_temp_new(); 1558fcf5ef2aSThomas Huth tcg_gen_mov_i32(tmp, cpu_sr_t); 1559fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_t, REG(B11_8), 31); 1560fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1); 1561fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), REG(B11_8), tmp); 1562fcf5ef2aSThomas Huth } 1563fcf5ef2aSThomas Huth return; 1564fcf5ef2aSThomas Huth case 0x4025: /* rotcr Rn */ 1565fcf5ef2aSThomas Huth { 1566fcf5ef2aSThomas Huth TCGv tmp = tcg_temp_new(); 1567fcf5ef2aSThomas Huth tcg_gen_shli_i32(tmp, cpu_sr_t, 31); 1568fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 1); 1569fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1); 1570fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), REG(B11_8), tmp); 1571fcf5ef2aSThomas Huth } 1572fcf5ef2aSThomas Huth return; 1573fcf5ef2aSThomas Huth case 0x4004: /* rotl Rn */ 1574fcf5ef2aSThomas Huth tcg_gen_rotli_i32(REG(B11_8), REG(B11_8), 1); 1575fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 0); 1576fcf5ef2aSThomas Huth return; 1577fcf5ef2aSThomas Huth case 0x4005: /* rotr Rn */ 1578fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 0); 1579fcf5ef2aSThomas Huth tcg_gen_rotri_i32(REG(B11_8), REG(B11_8), 1); 1580fcf5ef2aSThomas Huth return; 1581fcf5ef2aSThomas Huth case 0x4000: /* shll Rn */ 1582fcf5ef2aSThomas Huth case 0x4020: /* shal Rn */ 1583fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_t, REG(B11_8), 31); 1584fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1); 1585fcf5ef2aSThomas Huth return; 1586fcf5ef2aSThomas Huth case 0x4021: /* shar Rn */ 1587fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 1); 1588fcf5ef2aSThomas Huth tcg_gen_sari_i32(REG(B11_8), REG(B11_8), 1); 1589fcf5ef2aSThomas Huth return; 1590fcf5ef2aSThomas Huth case 0x4001: /* shlr Rn */ 1591fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 1); 1592fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1); 1593fcf5ef2aSThomas Huth return; 1594fcf5ef2aSThomas Huth case 0x4008: /* shll2 Rn */ 1595fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 2); 1596fcf5ef2aSThomas Huth return; 1597fcf5ef2aSThomas Huth case 0x4018: /* shll8 Rn */ 1598fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 8); 1599fcf5ef2aSThomas Huth return; 1600fcf5ef2aSThomas Huth case 0x4028: /* shll16 Rn */ 1601fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 16); 1602fcf5ef2aSThomas Huth return; 1603fcf5ef2aSThomas Huth case 0x4009: /* shlr2 Rn */ 1604fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 2); 1605fcf5ef2aSThomas Huth return; 1606fcf5ef2aSThomas Huth case 0x4019: /* shlr8 Rn */ 1607fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 8); 1608fcf5ef2aSThomas Huth return; 1609fcf5ef2aSThomas Huth case 0x4029: /* shlr16 Rn */ 1610fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 16); 1611fcf5ef2aSThomas Huth return; 1612fcf5ef2aSThomas Huth case 0x401b: /* tas.b @Rn */ 1613d3c2b2b3SRichard Henderson tcg_gen_atomic_fetch_or_i32(cpu_sr_t, REG(B11_8), 1614d3c2b2b3SRichard Henderson tcg_constant_i32(0x80), ctx->memidx, MO_UB); 1615d3c2b2b3SRichard Henderson tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, cpu_sr_t, 0); 1616fcf5ef2aSThomas Huth return; 1617fcf5ef2aSThomas Huth case 0xf00d: /* fsts FPUL,FRn - FPSCR: Nothing */ 1618fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 16197c9f7038SRichard Henderson tcg_gen_mov_i32(FREG(B11_8), cpu_fpul); 1620fcf5ef2aSThomas Huth return; 1621fcf5ef2aSThomas Huth case 0xf01d: /* flds FRm,FPUL - FPSCR: Nothing */ 1622fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 16237c9f7038SRichard Henderson tcg_gen_mov_i32(cpu_fpul, FREG(B11_8)); 1624fcf5ef2aSThomas Huth return; 1625fcf5ef2aSThomas Huth case 0xf02d: /* float FPUL,FRn/DRn - FPSCR: R[PR,Enable.I]/W[Cause,Flag] */ 1626fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1627a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_PR) { 1628fcf5ef2aSThomas Huth TCGv_i64 fp; 162993dc9c89SRichard Henderson if (ctx->opcode & 0x0100) { 163093dc9c89SRichard Henderson goto do_illegal; 163193dc9c89SRichard Henderson } 1632fcf5ef2aSThomas Huth fp = tcg_temp_new_i64(); 1633fcf5ef2aSThomas Huth gen_helper_float_DT(fp, cpu_env, cpu_fpul); 16341e0b21d8SRichard Henderson gen_store_fpr64(ctx, fp, B11_8); 1635fcf5ef2aSThomas Huth } 1636fcf5ef2aSThomas Huth else { 16377c9f7038SRichard Henderson gen_helper_float_FT(FREG(B11_8), cpu_env, cpu_fpul); 1638fcf5ef2aSThomas Huth } 1639fcf5ef2aSThomas Huth return; 1640fcf5ef2aSThomas Huth case 0xf03d: /* ftrc FRm/DRm,FPUL - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */ 1641fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1642a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_PR) { 1643fcf5ef2aSThomas Huth TCGv_i64 fp; 164493dc9c89SRichard Henderson if (ctx->opcode & 0x0100) { 164593dc9c89SRichard Henderson goto do_illegal; 164693dc9c89SRichard Henderson } 1647fcf5ef2aSThomas Huth fp = tcg_temp_new_i64(); 16481e0b21d8SRichard Henderson gen_load_fpr64(ctx, fp, B11_8); 1649fcf5ef2aSThomas Huth gen_helper_ftrc_DT(cpu_fpul, cpu_env, fp); 1650fcf5ef2aSThomas Huth } 1651fcf5ef2aSThomas Huth else { 16527c9f7038SRichard Henderson gen_helper_ftrc_FT(cpu_fpul, cpu_env, FREG(B11_8)); 1653fcf5ef2aSThomas Huth } 1654fcf5ef2aSThomas Huth return; 1655fcf5ef2aSThomas Huth case 0xf04d: /* fneg FRn/DRn - FPSCR: Nothing */ 1656fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 16577c9f7038SRichard Henderson tcg_gen_xori_i32(FREG(B11_8), FREG(B11_8), 0x80000000); 1658fcf5ef2aSThomas Huth return; 165957f5c1b0SAurelien Jarno case 0xf05d: /* fabs FRn/DRn - FPCSR: Nothing */ 1660fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 16617c9f7038SRichard Henderson tcg_gen_andi_i32(FREG(B11_8), FREG(B11_8), 0x7fffffff); 1662fcf5ef2aSThomas Huth return; 1663fcf5ef2aSThomas Huth case 0xf06d: /* fsqrt FRn */ 1664fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1665a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_PR) { 166693dc9c89SRichard Henderson if (ctx->opcode & 0x0100) { 166793dc9c89SRichard Henderson goto do_illegal; 166893dc9c89SRichard Henderson } 1669fcf5ef2aSThomas Huth TCGv_i64 fp = tcg_temp_new_i64(); 16701e0b21d8SRichard Henderson gen_load_fpr64(ctx, fp, B11_8); 1671fcf5ef2aSThomas Huth gen_helper_fsqrt_DT(fp, cpu_env, fp); 16721e0b21d8SRichard Henderson gen_store_fpr64(ctx, fp, B11_8); 1673fcf5ef2aSThomas Huth } else { 16747c9f7038SRichard Henderson gen_helper_fsqrt_FT(FREG(B11_8), cpu_env, FREG(B11_8)); 1675fcf5ef2aSThomas Huth } 1676fcf5ef2aSThomas Huth return; 1677fcf5ef2aSThomas Huth case 0xf07d: /* fsrra FRn */ 1678fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 167911b7aa23SRichard Henderson CHECK_FPSCR_PR_0 168011b7aa23SRichard Henderson gen_helper_fsrra_FT(FREG(B11_8), cpu_env, FREG(B11_8)); 1681fcf5ef2aSThomas Huth break; 1682fcf5ef2aSThomas Huth case 0xf08d: /* fldi0 FRn - FPSCR: R[PR] */ 1683fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 16847e9f7ca8SRichard Henderson CHECK_FPSCR_PR_0 16857c9f7038SRichard Henderson tcg_gen_movi_i32(FREG(B11_8), 0); 1686fcf5ef2aSThomas Huth return; 1687fcf5ef2aSThomas Huth case 0xf09d: /* fldi1 FRn - FPSCR: R[PR] */ 1688fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 16897e9f7ca8SRichard Henderson CHECK_FPSCR_PR_0 16907c9f7038SRichard Henderson tcg_gen_movi_i32(FREG(B11_8), 0x3f800000); 1691fcf5ef2aSThomas Huth return; 1692fcf5ef2aSThomas Huth case 0xf0ad: /* fcnvsd FPUL,DRn */ 1693fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1694fcf5ef2aSThomas Huth { 1695fcf5ef2aSThomas Huth TCGv_i64 fp = tcg_temp_new_i64(); 1696fcf5ef2aSThomas Huth gen_helper_fcnvsd_FT_DT(fp, cpu_env, cpu_fpul); 16971e0b21d8SRichard Henderson gen_store_fpr64(ctx, fp, B11_8); 1698fcf5ef2aSThomas Huth } 1699fcf5ef2aSThomas Huth return; 1700fcf5ef2aSThomas Huth case 0xf0bd: /* fcnvds DRn,FPUL */ 1701fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1702fcf5ef2aSThomas Huth { 1703fcf5ef2aSThomas Huth TCGv_i64 fp = tcg_temp_new_i64(); 17041e0b21d8SRichard Henderson gen_load_fpr64(ctx, fp, B11_8); 1705fcf5ef2aSThomas Huth gen_helper_fcnvds_DT_FT(cpu_fpul, cpu_env, fp); 1706fcf5ef2aSThomas Huth } 1707fcf5ef2aSThomas Huth return; 1708fcf5ef2aSThomas Huth case 0xf0ed: /* fipr FVm,FVn */ 1709fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 17107e9f7ca8SRichard Henderson CHECK_FPSCR_PR_1 17117e9f7ca8SRichard Henderson { 1712950b91beSRichard Henderson TCGv m = tcg_constant_i32((ctx->opcode >> 8) & 3); 1713950b91beSRichard Henderson TCGv n = tcg_constant_i32((ctx->opcode >> 10) & 3); 1714fcf5ef2aSThomas Huth gen_helper_fipr(cpu_env, m, n); 1715fcf5ef2aSThomas Huth return; 1716fcf5ef2aSThomas Huth } 1717fcf5ef2aSThomas Huth break; 1718fcf5ef2aSThomas Huth case 0xf0fd: /* ftrv XMTRX,FVn */ 1719fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 17207e9f7ca8SRichard Henderson CHECK_FPSCR_PR_1 17217e9f7ca8SRichard Henderson { 17227e9f7ca8SRichard Henderson if ((ctx->opcode & 0x0300) != 0x0100) { 17237e9f7ca8SRichard Henderson goto do_illegal; 17247e9f7ca8SRichard Henderson } 1725950b91beSRichard Henderson TCGv n = tcg_constant_i32((ctx->opcode >> 10) & 3); 1726fcf5ef2aSThomas Huth gen_helper_ftrv(cpu_env, n); 1727fcf5ef2aSThomas Huth return; 1728fcf5ef2aSThomas Huth } 1729fcf5ef2aSThomas Huth break; 1730fcf5ef2aSThomas Huth } 1731fcf5ef2aSThomas Huth #if 0 1732fcf5ef2aSThomas Huth fprintf(stderr, "unknown instruction 0x%04x at pc 0x%08x\n", 17336f1c2af6SRichard Henderson ctx->opcode, ctx->base.pc_next); 1734fcf5ef2aSThomas Huth fflush(stderr); 1735fcf5ef2aSThomas Huth #endif 17366b98213dSRichard Henderson do_illegal: 1737ab419fd8SRichard Henderson if (ctx->envflags & TB_FLAG_DELAY_SLOT_MASK) { 1738dec16c6eSRichard Henderson do_illegal_slot: 1739dec16c6eSRichard Henderson gen_save_cpu_state(ctx, true); 1740fcf5ef2aSThomas Huth gen_helper_raise_slot_illegal_instruction(cpu_env); 1741fcf5ef2aSThomas Huth } else { 1742dec16c6eSRichard Henderson gen_save_cpu_state(ctx, true); 1743fcf5ef2aSThomas Huth gen_helper_raise_illegal_instruction(cpu_env); 1744fcf5ef2aSThomas Huth } 17456f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NORETURN; 1746dec4f042SRichard Henderson return; 1747dec4f042SRichard Henderson 1748dec4f042SRichard Henderson do_fpu_disabled: 1749dec4f042SRichard Henderson gen_save_cpu_state(ctx, true); 1750ab419fd8SRichard Henderson if (ctx->envflags & TB_FLAG_DELAY_SLOT_MASK) { 1751dec4f042SRichard Henderson gen_helper_raise_slot_fpu_disable(cpu_env); 1752dec4f042SRichard Henderson } else { 1753dec4f042SRichard Henderson gen_helper_raise_fpu_disable(cpu_env); 1754dec4f042SRichard Henderson } 17556f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NORETURN; 1756dec4f042SRichard Henderson return; 1757fcf5ef2aSThomas Huth } 1758fcf5ef2aSThomas Huth 1759fcf5ef2aSThomas Huth static void decode_opc(DisasContext * ctx) 1760fcf5ef2aSThomas Huth { 1761a6215749SAurelien Jarno uint32_t old_flags = ctx->envflags; 1762fcf5ef2aSThomas Huth 1763fcf5ef2aSThomas Huth _decode_opc(ctx); 1764fcf5ef2aSThomas Huth 1765ab419fd8SRichard Henderson if (old_flags & TB_FLAG_DELAY_SLOT_MASK) { 1766fcf5ef2aSThomas Huth /* go out of the delay slot */ 1767ab419fd8SRichard Henderson ctx->envflags &= ~TB_FLAG_DELAY_SLOT_MASK; 17684bfa602bSRichard Henderson 17694bfa602bSRichard Henderson /* When in an exclusive region, we must continue to the end 17704bfa602bSRichard Henderson for conditional branches. */ 1771ab419fd8SRichard Henderson if (ctx->tbflags & TB_FLAG_GUSA_EXCLUSIVE 1772ab419fd8SRichard Henderson && old_flags & TB_FLAG_DELAY_SLOT_COND) { 17734bfa602bSRichard Henderson gen_delayed_conditional_jump(ctx); 17744bfa602bSRichard Henderson return; 17754bfa602bSRichard Henderson } 17764bfa602bSRichard Henderson /* Otherwise this is probably an invalid gUSA region. 17774bfa602bSRichard Henderson Drop the GUSA bits so the next TB doesn't see them. */ 1778ab419fd8SRichard Henderson ctx->envflags &= ~TB_FLAG_GUSA_MASK; 17794bfa602bSRichard Henderson 1780ac9707eaSAurelien Jarno tcg_gen_movi_i32(cpu_flags, ctx->envflags); 1781ab419fd8SRichard Henderson if (old_flags & TB_FLAG_DELAY_SLOT_COND) { 1782fcf5ef2aSThomas Huth gen_delayed_conditional_jump(ctx); 1783be53081aSAurelien Jarno } else { 1784fcf5ef2aSThomas Huth gen_jump(ctx); 1785fcf5ef2aSThomas Huth } 17864bfa602bSRichard Henderson } 17874bfa602bSRichard Henderson } 1788fcf5ef2aSThomas Huth 17894bfa602bSRichard Henderson #ifdef CONFIG_USER_ONLY 17904bfa602bSRichard Henderson /* For uniprocessors, SH4 uses optimistic restartable atomic sequences. 17914bfa602bSRichard Henderson Upon an interrupt, a real kernel would simply notice magic values in 17924bfa602bSRichard Henderson the registers and reset the PC to the start of the sequence. 17934bfa602bSRichard Henderson 17944bfa602bSRichard Henderson For QEMU, we cannot do this in quite the same way. Instead, we notice 17954bfa602bSRichard Henderson the normal start of such a sequence (mov #-x,r15). While we can handle 17964bfa602bSRichard Henderson any sequence via cpu_exec_step_atomic, we can recognize the "normal" 17974bfa602bSRichard Henderson sequences and transform them into atomic operations as seen by the host. 17984bfa602bSRichard Henderson */ 1799be0e3d7aSRichard Henderson static void decode_gusa(DisasContext *ctx, CPUSH4State *env) 18004bfa602bSRichard Henderson { 1801d6a6cffdSRichard Henderson uint16_t insns[5]; 1802d6a6cffdSRichard Henderson int ld_adr, ld_dst, ld_mop; 1803d6a6cffdSRichard Henderson int op_dst, op_src, op_opc; 1804d6a6cffdSRichard Henderson int mv_src, mt_dst, st_src, st_mop; 1805d6a6cffdSRichard Henderson TCGv op_arg; 18066f1c2af6SRichard Henderson uint32_t pc = ctx->base.pc_next; 18076f1c2af6SRichard Henderson uint32_t pc_end = ctx->base.tb->cs_base; 18084bfa602bSRichard Henderson int max_insns = (pc_end - pc) / 2; 1809d6a6cffdSRichard Henderson int i; 18104bfa602bSRichard Henderson 1811d6a6cffdSRichard Henderson /* The state machine below will consume only a few insns. 1812d6a6cffdSRichard Henderson If there are more than that in a region, fail now. */ 1813d6a6cffdSRichard Henderson if (max_insns > ARRAY_SIZE(insns)) { 1814d6a6cffdSRichard Henderson goto fail; 1815d6a6cffdSRichard Henderson } 1816d6a6cffdSRichard Henderson 1817d6a6cffdSRichard Henderson /* Read all of the insns for the region. */ 1818d6a6cffdSRichard Henderson for (i = 0; i < max_insns; ++i) { 18194e116893SIlya Leoshkevich insns[i] = translator_lduw(env, &ctx->base, pc + i * 2); 1820d6a6cffdSRichard Henderson } 1821d6a6cffdSRichard Henderson 1822d6a6cffdSRichard Henderson ld_adr = ld_dst = ld_mop = -1; 1823d6a6cffdSRichard Henderson mv_src = -1; 1824d6a6cffdSRichard Henderson op_dst = op_src = op_opc = -1; 1825d6a6cffdSRichard Henderson mt_dst = -1; 1826d6a6cffdSRichard Henderson st_src = st_mop = -1; 1827f764718dSRichard Henderson op_arg = NULL; 1828d6a6cffdSRichard Henderson i = 0; 1829d6a6cffdSRichard Henderson 1830d6a6cffdSRichard Henderson #define NEXT_INSN \ 1831d6a6cffdSRichard Henderson do { if (i >= max_insns) goto fail; ctx->opcode = insns[i++]; } while (0) 1832d6a6cffdSRichard Henderson 1833d6a6cffdSRichard Henderson /* 1834d6a6cffdSRichard Henderson * Expect a load to begin the region. 1835d6a6cffdSRichard Henderson */ 1836d6a6cffdSRichard Henderson NEXT_INSN; 1837d6a6cffdSRichard Henderson switch (ctx->opcode & 0xf00f) { 1838d6a6cffdSRichard Henderson case 0x6000: /* mov.b @Rm,Rn */ 1839d6a6cffdSRichard Henderson ld_mop = MO_SB; 1840d6a6cffdSRichard Henderson break; 1841d6a6cffdSRichard Henderson case 0x6001: /* mov.w @Rm,Rn */ 1842d6a6cffdSRichard Henderson ld_mop = MO_TESW; 1843d6a6cffdSRichard Henderson break; 1844d6a6cffdSRichard Henderson case 0x6002: /* mov.l @Rm,Rn */ 1845d6a6cffdSRichard Henderson ld_mop = MO_TESL; 1846d6a6cffdSRichard Henderson break; 1847d6a6cffdSRichard Henderson default: 1848d6a6cffdSRichard Henderson goto fail; 1849d6a6cffdSRichard Henderson } 1850d6a6cffdSRichard Henderson ld_adr = B7_4; 1851d6a6cffdSRichard Henderson ld_dst = B11_8; 1852d6a6cffdSRichard Henderson if (ld_adr == ld_dst) { 1853d6a6cffdSRichard Henderson goto fail; 1854d6a6cffdSRichard Henderson } 1855d6a6cffdSRichard Henderson /* Unless we see a mov, any two-operand operation must use ld_dst. */ 1856d6a6cffdSRichard Henderson op_dst = ld_dst; 1857d6a6cffdSRichard Henderson 1858d6a6cffdSRichard Henderson /* 1859d6a6cffdSRichard Henderson * Expect an optional register move. 1860d6a6cffdSRichard Henderson */ 1861d6a6cffdSRichard Henderson NEXT_INSN; 1862d6a6cffdSRichard Henderson switch (ctx->opcode & 0xf00f) { 1863d6a6cffdSRichard Henderson case 0x6003: /* mov Rm,Rn */ 186402b8e735SPhilippe Mathieu-Daudé /* 186523b5d9faSLichang Zhao * Here we want to recognize ld_dst being saved for later consumption, 186602b8e735SPhilippe Mathieu-Daudé * or for another input register being copied so that ld_dst need not 186702b8e735SPhilippe Mathieu-Daudé * be clobbered during the operation. 186802b8e735SPhilippe Mathieu-Daudé */ 1869d6a6cffdSRichard Henderson op_dst = B11_8; 1870d6a6cffdSRichard Henderson mv_src = B7_4; 1871d6a6cffdSRichard Henderson if (op_dst == ld_dst) { 1872d6a6cffdSRichard Henderson /* Overwriting the load output. */ 1873d6a6cffdSRichard Henderson goto fail; 1874d6a6cffdSRichard Henderson } 1875d6a6cffdSRichard Henderson if (mv_src != ld_dst) { 1876d6a6cffdSRichard Henderson /* Copying a new input; constrain op_src to match the load. */ 1877d6a6cffdSRichard Henderson op_src = ld_dst; 1878d6a6cffdSRichard Henderson } 1879d6a6cffdSRichard Henderson break; 1880d6a6cffdSRichard Henderson 1881d6a6cffdSRichard Henderson default: 1882d6a6cffdSRichard Henderson /* Put back and re-examine as operation. */ 1883d6a6cffdSRichard Henderson --i; 1884d6a6cffdSRichard Henderson } 1885d6a6cffdSRichard Henderson 1886d6a6cffdSRichard Henderson /* 1887d6a6cffdSRichard Henderson * Expect the operation. 1888d6a6cffdSRichard Henderson */ 1889d6a6cffdSRichard Henderson NEXT_INSN; 1890d6a6cffdSRichard Henderson switch (ctx->opcode & 0xf00f) { 1891d6a6cffdSRichard Henderson case 0x300c: /* add Rm,Rn */ 1892d6a6cffdSRichard Henderson op_opc = INDEX_op_add_i32; 1893d6a6cffdSRichard Henderson goto do_reg_op; 1894d6a6cffdSRichard Henderson case 0x2009: /* and Rm,Rn */ 1895d6a6cffdSRichard Henderson op_opc = INDEX_op_and_i32; 1896d6a6cffdSRichard Henderson goto do_reg_op; 1897d6a6cffdSRichard Henderson case 0x200a: /* xor Rm,Rn */ 1898d6a6cffdSRichard Henderson op_opc = INDEX_op_xor_i32; 1899d6a6cffdSRichard Henderson goto do_reg_op; 1900d6a6cffdSRichard Henderson case 0x200b: /* or Rm,Rn */ 1901d6a6cffdSRichard Henderson op_opc = INDEX_op_or_i32; 1902d6a6cffdSRichard Henderson do_reg_op: 1903d6a6cffdSRichard Henderson /* The operation register should be as expected, and the 1904d6a6cffdSRichard Henderson other input cannot depend on the load. */ 1905d6a6cffdSRichard Henderson if (op_dst != B11_8) { 1906d6a6cffdSRichard Henderson goto fail; 1907d6a6cffdSRichard Henderson } 1908d6a6cffdSRichard Henderson if (op_src < 0) { 1909d6a6cffdSRichard Henderson /* Unconstrainted input. */ 1910d6a6cffdSRichard Henderson op_src = B7_4; 1911d6a6cffdSRichard Henderson } else if (op_src == B7_4) { 1912d6a6cffdSRichard Henderson /* Constrained input matched load. All operations are 1913d6a6cffdSRichard Henderson commutative; "swap" them by "moving" the load output 1914d6a6cffdSRichard Henderson to the (implicit) first argument and the move source 1915d6a6cffdSRichard Henderson to the (explicit) second argument. */ 1916d6a6cffdSRichard Henderson op_src = mv_src; 1917d6a6cffdSRichard Henderson } else { 1918d6a6cffdSRichard Henderson goto fail; 1919d6a6cffdSRichard Henderson } 1920d6a6cffdSRichard Henderson op_arg = REG(op_src); 1921d6a6cffdSRichard Henderson break; 1922d6a6cffdSRichard Henderson 1923d6a6cffdSRichard Henderson case 0x6007: /* not Rm,Rn */ 1924d6a6cffdSRichard Henderson if (ld_dst != B7_4 || mv_src >= 0) { 1925d6a6cffdSRichard Henderson goto fail; 1926d6a6cffdSRichard Henderson } 1927d6a6cffdSRichard Henderson op_dst = B11_8; 1928d6a6cffdSRichard Henderson op_opc = INDEX_op_xor_i32; 1929950b91beSRichard Henderson op_arg = tcg_constant_i32(-1); 1930d6a6cffdSRichard Henderson break; 1931d6a6cffdSRichard Henderson 1932d6a6cffdSRichard Henderson case 0x7000 ... 0x700f: /* add #imm,Rn */ 1933d6a6cffdSRichard Henderson if (op_dst != B11_8 || mv_src >= 0) { 1934d6a6cffdSRichard Henderson goto fail; 1935d6a6cffdSRichard Henderson } 1936d6a6cffdSRichard Henderson op_opc = INDEX_op_add_i32; 1937950b91beSRichard Henderson op_arg = tcg_constant_i32(B7_0s); 1938d6a6cffdSRichard Henderson break; 1939d6a6cffdSRichard Henderson 1940d6a6cffdSRichard Henderson case 0x3000: /* cmp/eq Rm,Rn */ 1941d6a6cffdSRichard Henderson /* Looking for the middle of a compare-and-swap sequence, 1942d6a6cffdSRichard Henderson beginning with the compare. Operands can be either order, 1943d6a6cffdSRichard Henderson but with only one overlapping the load. */ 1944d6a6cffdSRichard Henderson if ((ld_dst == B11_8) + (ld_dst == B7_4) != 1 || mv_src >= 0) { 1945d6a6cffdSRichard Henderson goto fail; 1946d6a6cffdSRichard Henderson } 1947d6a6cffdSRichard Henderson op_opc = INDEX_op_setcond_i32; /* placeholder */ 1948d6a6cffdSRichard Henderson op_src = (ld_dst == B11_8 ? B7_4 : B11_8); 1949d6a6cffdSRichard Henderson op_arg = REG(op_src); 1950d6a6cffdSRichard Henderson 1951d6a6cffdSRichard Henderson NEXT_INSN; 1952d6a6cffdSRichard Henderson switch (ctx->opcode & 0xff00) { 1953d6a6cffdSRichard Henderson case 0x8b00: /* bf label */ 1954d6a6cffdSRichard Henderson case 0x8f00: /* bf/s label */ 1955d6a6cffdSRichard Henderson if (pc + (i + 1 + B7_0s) * 2 != pc_end) { 1956d6a6cffdSRichard Henderson goto fail; 1957d6a6cffdSRichard Henderson } 1958d6a6cffdSRichard Henderson if ((ctx->opcode & 0xff00) == 0x8b00) { /* bf label */ 1959d6a6cffdSRichard Henderson break; 1960d6a6cffdSRichard Henderson } 1961d6a6cffdSRichard Henderson /* We're looking to unconditionally modify Rn with the 1962d6a6cffdSRichard Henderson result of the comparison, within the delay slot of 1963d6a6cffdSRichard Henderson the branch. This is used by older gcc. */ 1964d6a6cffdSRichard Henderson NEXT_INSN; 1965d6a6cffdSRichard Henderson if ((ctx->opcode & 0xf0ff) == 0x0029) { /* movt Rn */ 1966d6a6cffdSRichard Henderson mt_dst = B11_8; 1967d6a6cffdSRichard Henderson } else { 1968d6a6cffdSRichard Henderson goto fail; 1969d6a6cffdSRichard Henderson } 1970d6a6cffdSRichard Henderson break; 1971d6a6cffdSRichard Henderson 1972d6a6cffdSRichard Henderson default: 1973d6a6cffdSRichard Henderson goto fail; 1974d6a6cffdSRichard Henderson } 1975d6a6cffdSRichard Henderson break; 1976d6a6cffdSRichard Henderson 1977d6a6cffdSRichard Henderson case 0x2008: /* tst Rm,Rn */ 1978d6a6cffdSRichard Henderson /* Looking for a compare-and-swap against zero. */ 1979d6a6cffdSRichard Henderson if (ld_dst != B11_8 || ld_dst != B7_4 || mv_src >= 0) { 1980d6a6cffdSRichard Henderson goto fail; 1981d6a6cffdSRichard Henderson } 1982d6a6cffdSRichard Henderson op_opc = INDEX_op_setcond_i32; 1983950b91beSRichard Henderson op_arg = tcg_constant_i32(0); 1984d6a6cffdSRichard Henderson 1985d6a6cffdSRichard Henderson NEXT_INSN; 1986d6a6cffdSRichard Henderson if ((ctx->opcode & 0xff00) != 0x8900 /* bt label */ 1987d6a6cffdSRichard Henderson || pc + (i + 1 + B7_0s) * 2 != pc_end) { 1988d6a6cffdSRichard Henderson goto fail; 1989d6a6cffdSRichard Henderson } 1990d6a6cffdSRichard Henderson break; 1991d6a6cffdSRichard Henderson 1992d6a6cffdSRichard Henderson default: 1993d6a6cffdSRichard Henderson /* Put back and re-examine as store. */ 1994d6a6cffdSRichard Henderson --i; 1995d6a6cffdSRichard Henderson } 1996d6a6cffdSRichard Henderson 1997d6a6cffdSRichard Henderson /* 1998d6a6cffdSRichard Henderson * Expect the store. 1999d6a6cffdSRichard Henderson */ 2000d6a6cffdSRichard Henderson /* The store must be the last insn. */ 2001d6a6cffdSRichard Henderson if (i != max_insns - 1) { 2002d6a6cffdSRichard Henderson goto fail; 2003d6a6cffdSRichard Henderson } 2004d6a6cffdSRichard Henderson NEXT_INSN; 2005d6a6cffdSRichard Henderson switch (ctx->opcode & 0xf00f) { 2006d6a6cffdSRichard Henderson case 0x2000: /* mov.b Rm,@Rn */ 2007d6a6cffdSRichard Henderson st_mop = MO_UB; 2008d6a6cffdSRichard Henderson break; 2009d6a6cffdSRichard Henderson case 0x2001: /* mov.w Rm,@Rn */ 2010d6a6cffdSRichard Henderson st_mop = MO_UW; 2011d6a6cffdSRichard Henderson break; 2012d6a6cffdSRichard Henderson case 0x2002: /* mov.l Rm,@Rn */ 2013d6a6cffdSRichard Henderson st_mop = MO_UL; 2014d6a6cffdSRichard Henderson break; 2015d6a6cffdSRichard Henderson default: 2016d6a6cffdSRichard Henderson goto fail; 2017d6a6cffdSRichard Henderson } 2018d6a6cffdSRichard Henderson /* The store must match the load. */ 2019d6a6cffdSRichard Henderson if (ld_adr != B11_8 || st_mop != (ld_mop & MO_SIZE)) { 2020d6a6cffdSRichard Henderson goto fail; 2021d6a6cffdSRichard Henderson } 2022d6a6cffdSRichard Henderson st_src = B7_4; 2023d6a6cffdSRichard Henderson 2024d6a6cffdSRichard Henderson #undef NEXT_INSN 2025d6a6cffdSRichard Henderson 2026d6a6cffdSRichard Henderson /* 2027d6a6cffdSRichard Henderson * Emit the operation. 2028d6a6cffdSRichard Henderson */ 2029d6a6cffdSRichard Henderson switch (op_opc) { 2030d6a6cffdSRichard Henderson case -1: 2031d6a6cffdSRichard Henderson /* No operation found. Look for exchange pattern. */ 2032d6a6cffdSRichard Henderson if (st_src == ld_dst || mv_src >= 0) { 2033d6a6cffdSRichard Henderson goto fail; 2034d6a6cffdSRichard Henderson } 2035d6a6cffdSRichard Henderson tcg_gen_atomic_xchg_i32(REG(ld_dst), REG(ld_adr), REG(st_src), 2036d6a6cffdSRichard Henderson ctx->memidx, ld_mop); 2037d6a6cffdSRichard Henderson break; 2038d6a6cffdSRichard Henderson 2039d6a6cffdSRichard Henderson case INDEX_op_add_i32: 2040d6a6cffdSRichard Henderson if (op_dst != st_src) { 2041d6a6cffdSRichard Henderson goto fail; 2042d6a6cffdSRichard Henderson } 2043d6a6cffdSRichard Henderson if (op_dst == ld_dst && st_mop == MO_UL) { 2044d6a6cffdSRichard Henderson tcg_gen_atomic_add_fetch_i32(REG(ld_dst), REG(ld_adr), 2045d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2046d6a6cffdSRichard Henderson } else { 2047d6a6cffdSRichard Henderson tcg_gen_atomic_fetch_add_i32(REG(ld_dst), REG(ld_adr), 2048d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2049d6a6cffdSRichard Henderson if (op_dst != ld_dst) { 2050d6a6cffdSRichard Henderson /* Note that mop sizes < 4 cannot use add_fetch 2051d6a6cffdSRichard Henderson because it won't carry into the higher bits. */ 2052d6a6cffdSRichard Henderson tcg_gen_add_i32(REG(op_dst), REG(ld_dst), op_arg); 2053d6a6cffdSRichard Henderson } 2054d6a6cffdSRichard Henderson } 2055d6a6cffdSRichard Henderson break; 2056d6a6cffdSRichard Henderson 2057d6a6cffdSRichard Henderson case INDEX_op_and_i32: 2058d6a6cffdSRichard Henderson if (op_dst != st_src) { 2059d6a6cffdSRichard Henderson goto fail; 2060d6a6cffdSRichard Henderson } 2061d6a6cffdSRichard Henderson if (op_dst == ld_dst) { 2062d6a6cffdSRichard Henderson tcg_gen_atomic_and_fetch_i32(REG(ld_dst), REG(ld_adr), 2063d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2064d6a6cffdSRichard Henderson } else { 2065d6a6cffdSRichard Henderson tcg_gen_atomic_fetch_and_i32(REG(ld_dst), REG(ld_adr), 2066d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2067d6a6cffdSRichard Henderson tcg_gen_and_i32(REG(op_dst), REG(ld_dst), op_arg); 2068d6a6cffdSRichard Henderson } 2069d6a6cffdSRichard Henderson break; 2070d6a6cffdSRichard Henderson 2071d6a6cffdSRichard Henderson case INDEX_op_or_i32: 2072d6a6cffdSRichard Henderson if (op_dst != st_src) { 2073d6a6cffdSRichard Henderson goto fail; 2074d6a6cffdSRichard Henderson } 2075d6a6cffdSRichard Henderson if (op_dst == ld_dst) { 2076d6a6cffdSRichard Henderson tcg_gen_atomic_or_fetch_i32(REG(ld_dst), REG(ld_adr), 2077d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2078d6a6cffdSRichard Henderson } else { 2079d6a6cffdSRichard Henderson tcg_gen_atomic_fetch_or_i32(REG(ld_dst), REG(ld_adr), 2080d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2081d6a6cffdSRichard Henderson tcg_gen_or_i32(REG(op_dst), REG(ld_dst), op_arg); 2082d6a6cffdSRichard Henderson } 2083d6a6cffdSRichard Henderson break; 2084d6a6cffdSRichard Henderson 2085d6a6cffdSRichard Henderson case INDEX_op_xor_i32: 2086d6a6cffdSRichard Henderson if (op_dst != st_src) { 2087d6a6cffdSRichard Henderson goto fail; 2088d6a6cffdSRichard Henderson } 2089d6a6cffdSRichard Henderson if (op_dst == ld_dst) { 2090d6a6cffdSRichard Henderson tcg_gen_atomic_xor_fetch_i32(REG(ld_dst), REG(ld_adr), 2091d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2092d6a6cffdSRichard Henderson } else { 2093d6a6cffdSRichard Henderson tcg_gen_atomic_fetch_xor_i32(REG(ld_dst), REG(ld_adr), 2094d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2095d6a6cffdSRichard Henderson tcg_gen_xor_i32(REG(op_dst), REG(ld_dst), op_arg); 2096d6a6cffdSRichard Henderson } 2097d6a6cffdSRichard Henderson break; 2098d6a6cffdSRichard Henderson 2099d6a6cffdSRichard Henderson case INDEX_op_setcond_i32: 2100d6a6cffdSRichard Henderson if (st_src == ld_dst) { 2101d6a6cffdSRichard Henderson goto fail; 2102d6a6cffdSRichard Henderson } 2103d6a6cffdSRichard Henderson tcg_gen_atomic_cmpxchg_i32(REG(ld_dst), REG(ld_adr), op_arg, 2104d6a6cffdSRichard Henderson REG(st_src), ctx->memidx, ld_mop); 2105d6a6cffdSRichard Henderson tcg_gen_setcond_i32(TCG_COND_EQ, cpu_sr_t, REG(ld_dst), op_arg); 2106d6a6cffdSRichard Henderson if (mt_dst >= 0) { 2107d6a6cffdSRichard Henderson tcg_gen_mov_i32(REG(mt_dst), cpu_sr_t); 2108d6a6cffdSRichard Henderson } 2109d6a6cffdSRichard Henderson break; 2110d6a6cffdSRichard Henderson 2111d6a6cffdSRichard Henderson default: 2112d6a6cffdSRichard Henderson g_assert_not_reached(); 2113d6a6cffdSRichard Henderson } 2114d6a6cffdSRichard Henderson 2115d6a6cffdSRichard Henderson /* The entire region has been translated. */ 2116ab419fd8SRichard Henderson ctx->envflags &= ~TB_FLAG_GUSA_MASK; 21176f1c2af6SRichard Henderson ctx->base.pc_next = pc_end; 2118be0e3d7aSRichard Henderson ctx->base.num_insns += max_insns - 1; 2119be0e3d7aSRichard Henderson return; 2120d6a6cffdSRichard Henderson 2121d6a6cffdSRichard Henderson fail: 21224bfa602bSRichard Henderson qemu_log_mask(LOG_UNIMP, "Unrecognized gUSA sequence %08x-%08x\n", 21234bfa602bSRichard Henderson pc, pc_end); 21244bfa602bSRichard Henderson 21254bfa602bSRichard Henderson /* Restart with the EXCLUSIVE bit set, within a TB run via 21264bfa602bSRichard Henderson cpu_exec_step_atomic holding the exclusive lock. */ 2127ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_GUSA_EXCLUSIVE; 21284bfa602bSRichard Henderson gen_save_cpu_state(ctx, false); 21294bfa602bSRichard Henderson gen_helper_exclusive(cpu_env); 21306f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NORETURN; 21314bfa602bSRichard Henderson 21324bfa602bSRichard Henderson /* We're not executing an instruction, but we must report one for the 21334bfa602bSRichard Henderson purposes of accounting within the TB. We might as well report the 21346f1c2af6SRichard Henderson entire region consumed via ctx->base.pc_next so that it's immediately 21356f1c2af6SRichard Henderson available in the disassembly dump. */ 21366f1c2af6SRichard Henderson ctx->base.pc_next = pc_end; 2137be0e3d7aSRichard Henderson ctx->base.num_insns += max_insns - 1; 21384bfa602bSRichard Henderson } 21394bfa602bSRichard Henderson #endif 21404bfa602bSRichard Henderson 2141fd1b3d38SEmilio G. Cota static void sh4_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs) 2142fcf5ef2aSThomas Huth { 2143fd1b3d38SEmilio G. Cota DisasContext *ctx = container_of(dcbase, DisasContext, base); 21449c489ea6SLluís Vilanova CPUSH4State *env = cs->env_ptr; 2145be0e3d7aSRichard Henderson uint32_t tbflags; 2146fd1b3d38SEmilio G. Cota int bound; 2147fcf5ef2aSThomas Huth 2148be0e3d7aSRichard Henderson ctx->tbflags = tbflags = ctx->base.tb->flags; 2149be0e3d7aSRichard Henderson ctx->envflags = tbflags & TB_FLAG_ENVFLAGS_MASK; 2150be0e3d7aSRichard Henderson ctx->memidx = (tbflags & (1u << SR_MD)) == 0 ? 1 : 0; 2151fcf5ef2aSThomas Huth /* We don't know if the delayed pc came from a dynamic or static branch, 2152fcf5ef2aSThomas Huth so assume it is a dynamic branch. */ 2153fd1b3d38SEmilio G. Cota ctx->delayed_pc = -1; /* use delayed pc from env pointer */ 2154fd1b3d38SEmilio G. Cota ctx->features = env->features; 2155be0e3d7aSRichard Henderson ctx->has_movcal = (tbflags & TB_FLAG_PENDING_MOVCA); 2156be0e3d7aSRichard Henderson ctx->gbank = ((tbflags & (1 << SR_MD)) && 2157be0e3d7aSRichard Henderson (tbflags & (1 << SR_RB))) * 0x10; 2158be0e3d7aSRichard Henderson ctx->fbank = tbflags & FPSCR_FR ? 0x10 : 0; 2159be0e3d7aSRichard Henderson 2160ab419fd8SRichard Henderson #ifdef CONFIG_USER_ONLY 2161ab419fd8SRichard Henderson if (tbflags & TB_FLAG_GUSA_MASK) { 2162ab419fd8SRichard Henderson /* In gUSA exclusive region. */ 2163be0e3d7aSRichard Henderson uint32_t pc = ctx->base.pc_next; 2164be0e3d7aSRichard Henderson uint32_t pc_end = ctx->base.tb->cs_base; 2165ab419fd8SRichard Henderson int backup = sextract32(ctx->tbflags, TB_FLAG_GUSA_SHIFT, 8); 2166be0e3d7aSRichard Henderson int max_insns = (pc_end - pc) / 2; 2167be0e3d7aSRichard Henderson 2168be0e3d7aSRichard Henderson if (pc != pc_end + backup || max_insns < 2) { 2169be0e3d7aSRichard Henderson /* This is a malformed gUSA region. Don't do anything special, 2170be0e3d7aSRichard Henderson since the interpreter is likely to get confused. */ 2171ab419fd8SRichard Henderson ctx->envflags &= ~TB_FLAG_GUSA_MASK; 2172ab419fd8SRichard Henderson } else if (tbflags & TB_FLAG_GUSA_EXCLUSIVE) { 2173be0e3d7aSRichard Henderson /* Regardless of single-stepping or the end of the page, 2174be0e3d7aSRichard Henderson we must complete execution of the gUSA region while 2175be0e3d7aSRichard Henderson holding the exclusive lock. */ 2176be0e3d7aSRichard Henderson ctx->base.max_insns = max_insns; 2177be0e3d7aSRichard Henderson return; 2178be0e3d7aSRichard Henderson } 2179be0e3d7aSRichard Henderson } 2180ab419fd8SRichard Henderson #endif 21814448a836SRichard Henderson 21824448a836SRichard Henderson /* Since the ISA is fixed-width, we can bound by the number 21834448a836SRichard Henderson of instructions remaining on the page. */ 2184fd1b3d38SEmilio G. Cota bound = -(ctx->base.pc_next | TARGET_PAGE_MASK) / 2; 2185fd1b3d38SEmilio G. Cota ctx->base.max_insns = MIN(ctx->base.max_insns, bound); 2186fcf5ef2aSThomas Huth } 2187fcf5ef2aSThomas Huth 2188fd1b3d38SEmilio G. Cota static void sh4_tr_tb_start(DisasContextBase *dcbase, CPUState *cs) 2189fd1b3d38SEmilio G. Cota { 2190fd1b3d38SEmilio G. Cota } 21914bfa602bSRichard Henderson 2192fd1b3d38SEmilio G. Cota static void sh4_tr_insn_start(DisasContextBase *dcbase, CPUState *cs) 2193fd1b3d38SEmilio G. Cota { 2194fd1b3d38SEmilio G. Cota DisasContext *ctx = container_of(dcbase, DisasContext, base); 2195fcf5ef2aSThomas Huth 2196fd1b3d38SEmilio G. Cota tcg_gen_insn_start(ctx->base.pc_next, ctx->envflags); 2197fd1b3d38SEmilio G. Cota } 2198fd1b3d38SEmilio G. Cota 2199fd1b3d38SEmilio G. Cota static void sh4_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs) 2200fd1b3d38SEmilio G. Cota { 2201fd1b3d38SEmilio G. Cota CPUSH4State *env = cs->env_ptr; 2202fd1b3d38SEmilio G. Cota DisasContext *ctx = container_of(dcbase, DisasContext, base); 2203fd1b3d38SEmilio G. Cota 2204be0e3d7aSRichard Henderson #ifdef CONFIG_USER_ONLY 2205ab419fd8SRichard Henderson if (unlikely(ctx->envflags & TB_FLAG_GUSA_MASK) 2206ab419fd8SRichard Henderson && !(ctx->envflags & TB_FLAG_GUSA_EXCLUSIVE)) { 2207be0e3d7aSRichard Henderson /* We're in an gUSA region, and we have not already fallen 2208be0e3d7aSRichard Henderson back on using an exclusive region. Attempt to parse the 2209be0e3d7aSRichard Henderson region into a single supported atomic operation. Failure 2210be0e3d7aSRichard Henderson is handled within the parser by raising an exception to 2211be0e3d7aSRichard Henderson retry using an exclusive region. */ 2212be0e3d7aSRichard Henderson decode_gusa(ctx, env); 2213be0e3d7aSRichard Henderson return; 2214be0e3d7aSRichard Henderson } 2215be0e3d7aSRichard Henderson #endif 2216be0e3d7aSRichard Henderson 22174e116893SIlya Leoshkevich ctx->opcode = translator_lduw(env, &ctx->base, ctx->base.pc_next); 2218fd1b3d38SEmilio G. Cota decode_opc(ctx); 2219fd1b3d38SEmilio G. Cota ctx->base.pc_next += 2; 2220fcf5ef2aSThomas Huth } 2221fcf5ef2aSThomas Huth 2222fd1b3d38SEmilio G. Cota static void sh4_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs) 2223fd1b3d38SEmilio G. Cota { 2224fd1b3d38SEmilio G. Cota DisasContext *ctx = container_of(dcbase, DisasContext, base); 22254bfa602bSRichard Henderson 2226ab419fd8SRichard Henderson if (ctx->tbflags & TB_FLAG_GUSA_EXCLUSIVE) { 22274bfa602bSRichard Henderson /* Ending the region of exclusivity. Clear the bits. */ 2228ab419fd8SRichard Henderson ctx->envflags &= ~TB_FLAG_GUSA_MASK; 22294bfa602bSRichard Henderson } 22304bfa602bSRichard Henderson 2231fd1b3d38SEmilio G. Cota switch (ctx->base.is_jmp) { 22324834871bSRichard Henderson case DISAS_STOP: 2233fd1b3d38SEmilio G. Cota gen_save_cpu_state(ctx, true); 223407ea28b4SRichard Henderson tcg_gen_exit_tb(NULL, 0); 22350fc37a8bSAurelien Jarno break; 22364834871bSRichard Henderson case DISAS_NEXT: 2237fd1b3d38SEmilio G. Cota case DISAS_TOO_MANY: 2238fd1b3d38SEmilio G. Cota gen_save_cpu_state(ctx, false); 2239fd1b3d38SEmilio G. Cota gen_goto_tb(ctx, 0, ctx->base.pc_next); 2240fcf5ef2aSThomas Huth break; 22414834871bSRichard Henderson case DISAS_NORETURN: 2242fcf5ef2aSThomas Huth break; 22434834871bSRichard Henderson default: 22444834871bSRichard Henderson g_assert_not_reached(); 2245fcf5ef2aSThomas Huth } 2246fcf5ef2aSThomas Huth } 2247fd1b3d38SEmilio G. Cota 22488eb806a7SRichard Henderson static void sh4_tr_disas_log(const DisasContextBase *dcbase, 22498eb806a7SRichard Henderson CPUState *cs, FILE *logfile) 2250fd1b3d38SEmilio G. Cota { 22518eb806a7SRichard Henderson fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first)); 22528eb806a7SRichard Henderson target_disas(logfile, cs, dcbase->pc_first, dcbase->tb->size); 2253fd1b3d38SEmilio G. Cota } 2254fd1b3d38SEmilio G. Cota 2255fd1b3d38SEmilio G. Cota static const TranslatorOps sh4_tr_ops = { 2256fd1b3d38SEmilio G. Cota .init_disas_context = sh4_tr_init_disas_context, 2257fd1b3d38SEmilio G. Cota .tb_start = sh4_tr_tb_start, 2258fd1b3d38SEmilio G. Cota .insn_start = sh4_tr_insn_start, 2259fd1b3d38SEmilio G. Cota .translate_insn = sh4_tr_translate_insn, 2260fd1b3d38SEmilio G. Cota .tb_stop = sh4_tr_tb_stop, 2261fd1b3d38SEmilio G. Cota .disas_log = sh4_tr_disas_log, 2262fd1b3d38SEmilio G. Cota }; 2263fd1b3d38SEmilio G. Cota 2264597f9b2dSRichard Henderson void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int *max_insns, 2265306c8721SRichard Henderson target_ulong pc, void *host_pc) 2266fd1b3d38SEmilio G. Cota { 2267fd1b3d38SEmilio G. Cota DisasContext ctx; 2268fd1b3d38SEmilio G. Cota 2269306c8721SRichard Henderson translator_loop(cs, tb, max_insns, pc, host_pc, &sh4_tr_ops, &ctx.base); 2270fcf5ef2aSThomas Huth } 2271