1fcf5ef2aSThomas Huth /* 2fcf5ef2aSThomas Huth * SH4 translation 3fcf5ef2aSThomas Huth * 4fcf5ef2aSThomas Huth * Copyright (c) 2005 Samuel Tardieu 5fcf5ef2aSThomas Huth * 6fcf5ef2aSThomas Huth * This library is free software; you can redistribute it and/or 7fcf5ef2aSThomas Huth * modify it under the terms of the GNU Lesser General Public 8fcf5ef2aSThomas Huth * License as published by the Free Software Foundation; either 96faf2b6cSThomas Huth * version 2.1 of the License, or (at your option) any later version. 10fcf5ef2aSThomas Huth * 11fcf5ef2aSThomas Huth * This library is distributed in the hope that it will be useful, 12fcf5ef2aSThomas Huth * but WITHOUT ANY WARRANTY; without even the implied warranty of 13fcf5ef2aSThomas Huth * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 14fcf5ef2aSThomas Huth * Lesser General Public License for more details. 15fcf5ef2aSThomas Huth * 16fcf5ef2aSThomas Huth * You should have received a copy of the GNU Lesser General Public 17fcf5ef2aSThomas Huth * License along with this library; if not, see <http://www.gnu.org/licenses/>. 18fcf5ef2aSThomas Huth */ 19fcf5ef2aSThomas Huth 20fcf5ef2aSThomas Huth #define DEBUG_DISAS 21fcf5ef2aSThomas Huth 22fcf5ef2aSThomas Huth #include "qemu/osdep.h" 23fcf5ef2aSThomas Huth #include "cpu.h" 24fcf5ef2aSThomas Huth #include "disas/disas.h" 25fcf5ef2aSThomas Huth #include "exec/exec-all.h" 26dcb32f1dSPhilippe Mathieu-Daudé #include "tcg/tcg-op.h" 27fcf5ef2aSThomas Huth #include "exec/cpu_ldst.h" 28fcf5ef2aSThomas Huth #include "exec/helper-proto.h" 29fcf5ef2aSThomas Huth #include "exec/helper-gen.h" 304834871bSRichard Henderson #include "exec/translator.h" 31fcf5ef2aSThomas Huth #include "exec/log.h" 3290c84c56SMarkus Armbruster #include "qemu/qemu-print.h" 33fcf5ef2aSThomas Huth 34fcf5ef2aSThomas Huth 35fcf5ef2aSThomas Huth typedef struct DisasContext { 366f1c2af6SRichard Henderson DisasContextBase base; 376f1c2af6SRichard Henderson 38a6215749SAurelien Jarno uint32_t tbflags; /* should stay unmodified during the TB translation */ 39a6215749SAurelien Jarno uint32_t envflags; /* should stay in sync with env->flags using TCG ops */ 40fcf5ef2aSThomas Huth int memidx; 413a3bb8d2SRichard Henderson int gbank; 425c13bad9SRichard Henderson int fbank; 43fcf5ef2aSThomas Huth uint32_t delayed_pc; 44fcf5ef2aSThomas Huth uint32_t features; 456f1c2af6SRichard Henderson 466f1c2af6SRichard Henderson uint16_t opcode; 476f1c2af6SRichard Henderson 486f1c2af6SRichard Henderson bool has_movcal; 49fcf5ef2aSThomas Huth } DisasContext; 50fcf5ef2aSThomas Huth 51fcf5ef2aSThomas Huth #if defined(CONFIG_USER_ONLY) 52fcf5ef2aSThomas Huth #define IS_USER(ctx) 1 534da06fb3SRichard Henderson #define UNALIGN(C) (ctx->tbflags & TB_FLAG_UNALIGN ? MO_UNALN : MO_ALIGN) 54fcf5ef2aSThomas Huth #else 55a6215749SAurelien Jarno #define IS_USER(ctx) (!(ctx->tbflags & (1u << SR_MD))) 564da06fb3SRichard Henderson #define UNALIGN(C) 0 57fcf5ef2aSThomas Huth #endif 58fcf5ef2aSThomas Huth 596f1c2af6SRichard Henderson /* Target-specific values for ctx->base.is_jmp. */ 604834871bSRichard Henderson /* We want to exit back to the cpu loop for some reason. 614834871bSRichard Henderson Usually this is to recognize interrupts immediately. */ 624834871bSRichard Henderson #define DISAS_STOP DISAS_TARGET_0 63fcf5ef2aSThomas Huth 64fcf5ef2aSThomas Huth /* global register indexes */ 653a3bb8d2SRichard Henderson static TCGv cpu_gregs[32]; 66fcf5ef2aSThomas Huth static TCGv cpu_sr, cpu_sr_m, cpu_sr_q, cpu_sr_t; 67fcf5ef2aSThomas Huth static TCGv cpu_pc, cpu_ssr, cpu_spc, cpu_gbr; 68fcf5ef2aSThomas Huth static TCGv cpu_vbr, cpu_sgr, cpu_dbr, cpu_mach, cpu_macl; 69f85da308SRichard Henderson static TCGv cpu_pr, cpu_fpscr, cpu_fpul; 70f85da308SRichard Henderson static TCGv cpu_lock_addr, cpu_lock_value; 71fcf5ef2aSThomas Huth static TCGv cpu_fregs[32]; 72fcf5ef2aSThomas Huth 73fcf5ef2aSThomas Huth /* internal register indexes */ 7447b9f4d5SAurelien Jarno static TCGv cpu_flags, cpu_delayed_pc, cpu_delayed_cond; 75fcf5ef2aSThomas Huth 76fcf5ef2aSThomas Huth #include "exec/gen-icount.h" 77fcf5ef2aSThomas Huth 78fcf5ef2aSThomas Huth void sh4_translate_init(void) 79fcf5ef2aSThomas Huth { 80fcf5ef2aSThomas Huth int i; 81fcf5ef2aSThomas Huth static const char * const gregnames[24] = { 82fcf5ef2aSThomas Huth "R0_BANK0", "R1_BANK0", "R2_BANK0", "R3_BANK0", 83fcf5ef2aSThomas Huth "R4_BANK0", "R5_BANK0", "R6_BANK0", "R7_BANK0", 84fcf5ef2aSThomas Huth "R8", "R9", "R10", "R11", "R12", "R13", "R14", "R15", 85fcf5ef2aSThomas Huth "R0_BANK1", "R1_BANK1", "R2_BANK1", "R3_BANK1", 86fcf5ef2aSThomas Huth "R4_BANK1", "R5_BANK1", "R6_BANK1", "R7_BANK1" 87fcf5ef2aSThomas Huth }; 88fcf5ef2aSThomas Huth static const char * const fregnames[32] = { 89fcf5ef2aSThomas Huth "FPR0_BANK0", "FPR1_BANK0", "FPR2_BANK0", "FPR3_BANK0", 90fcf5ef2aSThomas Huth "FPR4_BANK0", "FPR5_BANK0", "FPR6_BANK0", "FPR7_BANK0", 91fcf5ef2aSThomas Huth "FPR8_BANK0", "FPR9_BANK0", "FPR10_BANK0", "FPR11_BANK0", 92fcf5ef2aSThomas Huth "FPR12_BANK0", "FPR13_BANK0", "FPR14_BANK0", "FPR15_BANK0", 93fcf5ef2aSThomas Huth "FPR0_BANK1", "FPR1_BANK1", "FPR2_BANK1", "FPR3_BANK1", 94fcf5ef2aSThomas Huth "FPR4_BANK1", "FPR5_BANK1", "FPR6_BANK1", "FPR7_BANK1", 95fcf5ef2aSThomas Huth "FPR8_BANK1", "FPR9_BANK1", "FPR10_BANK1", "FPR11_BANK1", 96fcf5ef2aSThomas Huth "FPR12_BANK1", "FPR13_BANK1", "FPR14_BANK1", "FPR15_BANK1", 97fcf5ef2aSThomas Huth }; 98fcf5ef2aSThomas Huth 993a3bb8d2SRichard Henderson for (i = 0; i < 24; i++) { 100fcf5ef2aSThomas Huth cpu_gregs[i] = tcg_global_mem_new_i32(cpu_env, 101fcf5ef2aSThomas Huth offsetof(CPUSH4State, gregs[i]), 102fcf5ef2aSThomas Huth gregnames[i]); 1033a3bb8d2SRichard Henderson } 1043a3bb8d2SRichard Henderson memcpy(cpu_gregs + 24, cpu_gregs + 8, 8 * sizeof(TCGv)); 105fcf5ef2aSThomas Huth 106fcf5ef2aSThomas Huth cpu_pc = tcg_global_mem_new_i32(cpu_env, 107fcf5ef2aSThomas Huth offsetof(CPUSH4State, pc), "PC"); 108fcf5ef2aSThomas Huth cpu_sr = tcg_global_mem_new_i32(cpu_env, 109fcf5ef2aSThomas Huth offsetof(CPUSH4State, sr), "SR"); 110fcf5ef2aSThomas Huth cpu_sr_m = tcg_global_mem_new_i32(cpu_env, 111fcf5ef2aSThomas Huth offsetof(CPUSH4State, sr_m), "SR_M"); 112fcf5ef2aSThomas Huth cpu_sr_q = tcg_global_mem_new_i32(cpu_env, 113fcf5ef2aSThomas Huth offsetof(CPUSH4State, sr_q), "SR_Q"); 114fcf5ef2aSThomas Huth cpu_sr_t = tcg_global_mem_new_i32(cpu_env, 115fcf5ef2aSThomas Huth offsetof(CPUSH4State, sr_t), "SR_T"); 116fcf5ef2aSThomas Huth cpu_ssr = tcg_global_mem_new_i32(cpu_env, 117fcf5ef2aSThomas Huth offsetof(CPUSH4State, ssr), "SSR"); 118fcf5ef2aSThomas Huth cpu_spc = tcg_global_mem_new_i32(cpu_env, 119fcf5ef2aSThomas Huth offsetof(CPUSH4State, spc), "SPC"); 120fcf5ef2aSThomas Huth cpu_gbr = tcg_global_mem_new_i32(cpu_env, 121fcf5ef2aSThomas Huth offsetof(CPUSH4State, gbr), "GBR"); 122fcf5ef2aSThomas Huth cpu_vbr = tcg_global_mem_new_i32(cpu_env, 123fcf5ef2aSThomas Huth offsetof(CPUSH4State, vbr), "VBR"); 124fcf5ef2aSThomas Huth cpu_sgr = tcg_global_mem_new_i32(cpu_env, 125fcf5ef2aSThomas Huth offsetof(CPUSH4State, sgr), "SGR"); 126fcf5ef2aSThomas Huth cpu_dbr = tcg_global_mem_new_i32(cpu_env, 127fcf5ef2aSThomas Huth offsetof(CPUSH4State, dbr), "DBR"); 128fcf5ef2aSThomas Huth cpu_mach = tcg_global_mem_new_i32(cpu_env, 129fcf5ef2aSThomas Huth offsetof(CPUSH4State, mach), "MACH"); 130fcf5ef2aSThomas Huth cpu_macl = tcg_global_mem_new_i32(cpu_env, 131fcf5ef2aSThomas Huth offsetof(CPUSH4State, macl), "MACL"); 132fcf5ef2aSThomas Huth cpu_pr = tcg_global_mem_new_i32(cpu_env, 133fcf5ef2aSThomas Huth offsetof(CPUSH4State, pr), "PR"); 134fcf5ef2aSThomas Huth cpu_fpscr = tcg_global_mem_new_i32(cpu_env, 135fcf5ef2aSThomas Huth offsetof(CPUSH4State, fpscr), "FPSCR"); 136fcf5ef2aSThomas Huth cpu_fpul = tcg_global_mem_new_i32(cpu_env, 137fcf5ef2aSThomas Huth offsetof(CPUSH4State, fpul), "FPUL"); 138fcf5ef2aSThomas Huth 139fcf5ef2aSThomas Huth cpu_flags = tcg_global_mem_new_i32(cpu_env, 140fcf5ef2aSThomas Huth offsetof(CPUSH4State, flags), "_flags_"); 141fcf5ef2aSThomas Huth cpu_delayed_pc = tcg_global_mem_new_i32(cpu_env, 142fcf5ef2aSThomas Huth offsetof(CPUSH4State, delayed_pc), 143fcf5ef2aSThomas Huth "_delayed_pc_"); 14447b9f4d5SAurelien Jarno cpu_delayed_cond = tcg_global_mem_new_i32(cpu_env, 14547b9f4d5SAurelien Jarno offsetof(CPUSH4State, 14647b9f4d5SAurelien Jarno delayed_cond), 14747b9f4d5SAurelien Jarno "_delayed_cond_"); 148f85da308SRichard Henderson cpu_lock_addr = tcg_global_mem_new_i32(cpu_env, 149f85da308SRichard Henderson offsetof(CPUSH4State, lock_addr), 150f85da308SRichard Henderson "_lock_addr_"); 151f85da308SRichard Henderson cpu_lock_value = tcg_global_mem_new_i32(cpu_env, 152f85da308SRichard Henderson offsetof(CPUSH4State, lock_value), 153f85da308SRichard Henderson "_lock_value_"); 154fcf5ef2aSThomas Huth 155fcf5ef2aSThomas Huth for (i = 0; i < 32; i++) 156fcf5ef2aSThomas Huth cpu_fregs[i] = tcg_global_mem_new_i32(cpu_env, 157fcf5ef2aSThomas Huth offsetof(CPUSH4State, fregs[i]), 158fcf5ef2aSThomas Huth fregnames[i]); 159fcf5ef2aSThomas Huth } 160fcf5ef2aSThomas Huth 16190c84c56SMarkus Armbruster void superh_cpu_dump_state(CPUState *cs, FILE *f, int flags) 162fcf5ef2aSThomas Huth { 163fcf5ef2aSThomas Huth SuperHCPU *cpu = SUPERH_CPU(cs); 164fcf5ef2aSThomas Huth CPUSH4State *env = &cpu->env; 165fcf5ef2aSThomas Huth int i; 16690c84c56SMarkus Armbruster 16790c84c56SMarkus Armbruster qemu_fprintf(f, "pc=0x%08x sr=0x%08x pr=0x%08x fpscr=0x%08x\n", 168fcf5ef2aSThomas Huth env->pc, cpu_read_sr(env), env->pr, env->fpscr); 16990c84c56SMarkus Armbruster qemu_fprintf(f, "spc=0x%08x ssr=0x%08x gbr=0x%08x vbr=0x%08x\n", 170fcf5ef2aSThomas Huth env->spc, env->ssr, env->gbr, env->vbr); 17190c84c56SMarkus Armbruster qemu_fprintf(f, "sgr=0x%08x dbr=0x%08x delayed_pc=0x%08x fpul=0x%08x\n", 172fcf5ef2aSThomas Huth env->sgr, env->dbr, env->delayed_pc, env->fpul); 173fcf5ef2aSThomas Huth for (i = 0; i < 24; i += 4) { 17490c84c56SMarkus Armbruster qemu_printf("r%d=0x%08x r%d=0x%08x r%d=0x%08x r%d=0x%08x\n", 175fcf5ef2aSThomas Huth i, env->gregs[i], i + 1, env->gregs[i + 1], 176fcf5ef2aSThomas Huth i + 2, env->gregs[i + 2], i + 3, env->gregs[i + 3]); 177fcf5ef2aSThomas Huth } 178*ab419fd8SRichard Henderson if (env->flags & TB_FLAG_DELAY_SLOT) { 17990c84c56SMarkus Armbruster qemu_printf("in delay slot (delayed_pc=0x%08x)\n", 180fcf5ef2aSThomas Huth env->delayed_pc); 181*ab419fd8SRichard Henderson } else if (env->flags & TB_FLAG_DELAY_SLOT_COND) { 18290c84c56SMarkus Armbruster qemu_printf("in conditional delay slot (delayed_pc=0x%08x)\n", 183fcf5ef2aSThomas Huth env->delayed_pc); 184*ab419fd8SRichard Henderson } else if (env->flags & TB_FLAG_DELAY_SLOT_RTE) { 18590c84c56SMarkus Armbruster qemu_fprintf(f, "in rte delay slot (delayed_pc=0x%08x)\n", 186be53081aSAurelien Jarno env->delayed_pc); 187fcf5ef2aSThomas Huth } 188fcf5ef2aSThomas Huth } 189fcf5ef2aSThomas Huth 190fcf5ef2aSThomas Huth static void gen_read_sr(TCGv dst) 191fcf5ef2aSThomas Huth { 192fcf5ef2aSThomas Huth TCGv t0 = tcg_temp_new(); 193fcf5ef2aSThomas Huth tcg_gen_shli_i32(t0, cpu_sr_q, SR_Q); 194fcf5ef2aSThomas Huth tcg_gen_or_i32(dst, dst, t0); 195fcf5ef2aSThomas Huth tcg_gen_shli_i32(t0, cpu_sr_m, SR_M); 196fcf5ef2aSThomas Huth tcg_gen_or_i32(dst, dst, t0); 197fcf5ef2aSThomas Huth tcg_gen_shli_i32(t0, cpu_sr_t, SR_T); 198fcf5ef2aSThomas Huth tcg_gen_or_i32(dst, cpu_sr, t0); 199fcf5ef2aSThomas Huth tcg_temp_free_i32(t0); 200fcf5ef2aSThomas Huth } 201fcf5ef2aSThomas Huth 202fcf5ef2aSThomas Huth static void gen_write_sr(TCGv src) 203fcf5ef2aSThomas Huth { 204fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr, src, 205fcf5ef2aSThomas Huth ~((1u << SR_Q) | (1u << SR_M) | (1u << SR_T))); 206a380f9dbSAurelien Jarno tcg_gen_extract_i32(cpu_sr_q, src, SR_Q, 1); 207a380f9dbSAurelien Jarno tcg_gen_extract_i32(cpu_sr_m, src, SR_M, 1); 208a380f9dbSAurelien Jarno tcg_gen_extract_i32(cpu_sr_t, src, SR_T, 1); 209fcf5ef2aSThomas Huth } 210fcf5ef2aSThomas Huth 211ac9707eaSAurelien Jarno static inline void gen_save_cpu_state(DisasContext *ctx, bool save_pc) 212ac9707eaSAurelien Jarno { 213ac9707eaSAurelien Jarno if (save_pc) { 2146f1c2af6SRichard Henderson tcg_gen_movi_i32(cpu_pc, ctx->base.pc_next); 215ac9707eaSAurelien Jarno } 216ac9707eaSAurelien Jarno if (ctx->delayed_pc != (uint32_t) -1) { 217ac9707eaSAurelien Jarno tcg_gen_movi_i32(cpu_delayed_pc, ctx->delayed_pc); 218ac9707eaSAurelien Jarno } 219e1933d14SRichard Henderson if ((ctx->tbflags & TB_FLAG_ENVFLAGS_MASK) != ctx->envflags) { 220ac9707eaSAurelien Jarno tcg_gen_movi_i32(cpu_flags, ctx->envflags); 221ac9707eaSAurelien Jarno } 222ac9707eaSAurelien Jarno } 223ac9707eaSAurelien Jarno 224ec2eb22eSRichard Henderson static inline bool use_exit_tb(DisasContext *ctx) 225ec2eb22eSRichard Henderson { 226*ab419fd8SRichard Henderson return (ctx->tbflags & TB_FLAG_GUSA_EXCLUSIVE) != 0; 227ec2eb22eSRichard Henderson } 228ec2eb22eSRichard Henderson 2293f1e2098SRichard Henderson static bool use_goto_tb(DisasContext *ctx, target_ulong dest) 230fcf5ef2aSThomas Huth { 2313f1e2098SRichard Henderson if (use_exit_tb(ctx)) { 2324bfa602bSRichard Henderson return false; 2334bfa602bSRichard Henderson } 2343f1e2098SRichard Henderson return translator_use_goto_tb(&ctx->base, dest); 235fcf5ef2aSThomas Huth } 236fcf5ef2aSThomas Huth 237fcf5ef2aSThomas Huth static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest) 238fcf5ef2aSThomas Huth { 239fcf5ef2aSThomas Huth if (use_goto_tb(ctx, dest)) { 240fcf5ef2aSThomas Huth tcg_gen_goto_tb(n); 241fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_pc, dest); 24207ea28b4SRichard Henderson tcg_gen_exit_tb(ctx->base.tb, n); 243fcf5ef2aSThomas Huth } else { 244fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_pc, dest); 24552df5adcSRichard Henderson if (use_exit_tb(ctx)) { 24607ea28b4SRichard Henderson tcg_gen_exit_tb(NULL, 0); 247ec2eb22eSRichard Henderson } else { 2487f11636dSEmilio G. Cota tcg_gen_lookup_and_goto_ptr(); 249ec2eb22eSRichard Henderson } 250fcf5ef2aSThomas Huth } 2516f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NORETURN; 252fcf5ef2aSThomas Huth } 253fcf5ef2aSThomas Huth 254fcf5ef2aSThomas Huth static void gen_jump(DisasContext * ctx) 255fcf5ef2aSThomas Huth { 256ec2eb22eSRichard Henderson if (ctx->delayed_pc == -1) { 257fcf5ef2aSThomas Huth /* Target is not statically known, it comes necessarily from a 258fcf5ef2aSThomas Huth delayed jump as immediate jump are conditinal jumps */ 259fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_pc, cpu_delayed_pc); 260ac9707eaSAurelien Jarno tcg_gen_discard_i32(cpu_delayed_pc); 26152df5adcSRichard Henderson if (use_exit_tb(ctx)) { 26207ea28b4SRichard Henderson tcg_gen_exit_tb(NULL, 0); 263fcf5ef2aSThomas Huth } else { 2647f11636dSEmilio G. Cota tcg_gen_lookup_and_goto_ptr(); 265ec2eb22eSRichard Henderson } 2666f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NORETURN; 267ec2eb22eSRichard Henderson } else { 268fcf5ef2aSThomas Huth gen_goto_tb(ctx, 0, ctx->delayed_pc); 269fcf5ef2aSThomas Huth } 270fcf5ef2aSThomas Huth } 271fcf5ef2aSThomas Huth 272fcf5ef2aSThomas Huth /* Immediate conditional jump (bt or bf) */ 2734bfa602bSRichard Henderson static void gen_conditional_jump(DisasContext *ctx, target_ulong dest, 2744bfa602bSRichard Henderson bool jump_if_true) 275fcf5ef2aSThomas Huth { 276fcf5ef2aSThomas Huth TCGLabel *l1 = gen_new_label(); 2774bfa602bSRichard Henderson TCGCond cond_not_taken = jump_if_true ? TCG_COND_EQ : TCG_COND_NE; 2784bfa602bSRichard Henderson 279*ab419fd8SRichard Henderson if (ctx->tbflags & TB_FLAG_GUSA_EXCLUSIVE) { 2804bfa602bSRichard Henderson /* When in an exclusive region, we must continue to the end. 2814bfa602bSRichard Henderson Therefore, exit the region on a taken branch, but otherwise 2824bfa602bSRichard Henderson fall through to the next instruction. */ 2834bfa602bSRichard Henderson tcg_gen_brcondi_i32(cond_not_taken, cpu_sr_t, 0, l1); 284*ab419fd8SRichard Henderson tcg_gen_movi_i32(cpu_flags, ctx->envflags & ~TB_FLAG_GUSA_MASK); 2854bfa602bSRichard Henderson /* Note that this won't actually use a goto_tb opcode because we 2864bfa602bSRichard Henderson disallow it in use_goto_tb, but it handles exit + singlestep. */ 2874bfa602bSRichard Henderson gen_goto_tb(ctx, 0, dest); 288fcf5ef2aSThomas Huth gen_set_label(l1); 2895b38d026SLaurent Vivier ctx->base.is_jmp = DISAS_NEXT; 2904bfa602bSRichard Henderson return; 2914bfa602bSRichard Henderson } 2924bfa602bSRichard Henderson 2934bfa602bSRichard Henderson gen_save_cpu_state(ctx, false); 2944bfa602bSRichard Henderson tcg_gen_brcondi_i32(cond_not_taken, cpu_sr_t, 0, l1); 2954bfa602bSRichard Henderson gen_goto_tb(ctx, 0, dest); 2964bfa602bSRichard Henderson gen_set_label(l1); 2976f1c2af6SRichard Henderson gen_goto_tb(ctx, 1, ctx->base.pc_next + 2); 2986f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NORETURN; 299fcf5ef2aSThomas Huth } 300fcf5ef2aSThomas Huth 301fcf5ef2aSThomas Huth /* Delayed conditional jump (bt or bf) */ 302fcf5ef2aSThomas Huth static void gen_delayed_conditional_jump(DisasContext * ctx) 303fcf5ef2aSThomas Huth { 3044bfa602bSRichard Henderson TCGLabel *l1 = gen_new_label(); 3054bfa602bSRichard Henderson TCGv ds = tcg_temp_new(); 306fcf5ef2aSThomas Huth 30747b9f4d5SAurelien Jarno tcg_gen_mov_i32(ds, cpu_delayed_cond); 30847b9f4d5SAurelien Jarno tcg_gen_discard_i32(cpu_delayed_cond); 3094bfa602bSRichard Henderson 310*ab419fd8SRichard Henderson if (ctx->tbflags & TB_FLAG_GUSA_EXCLUSIVE) { 3114bfa602bSRichard Henderson /* When in an exclusive region, we must continue to the end. 3124bfa602bSRichard Henderson Therefore, exit the region on a taken branch, but otherwise 3134bfa602bSRichard Henderson fall through to the next instruction. */ 3144bfa602bSRichard Henderson tcg_gen_brcondi_i32(TCG_COND_EQ, ds, 0, l1); 3154bfa602bSRichard Henderson 3164bfa602bSRichard Henderson /* Leave the gUSA region. */ 317*ab419fd8SRichard Henderson tcg_gen_movi_i32(cpu_flags, ctx->envflags & ~TB_FLAG_GUSA_MASK); 3184bfa602bSRichard Henderson gen_jump(ctx); 3194bfa602bSRichard Henderson 3204bfa602bSRichard Henderson gen_set_label(l1); 3216f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NEXT; 3224bfa602bSRichard Henderson return; 3234bfa602bSRichard Henderson } 3244bfa602bSRichard Henderson 325fcf5ef2aSThomas Huth tcg_gen_brcondi_i32(TCG_COND_NE, ds, 0, l1); 3266f1c2af6SRichard Henderson gen_goto_tb(ctx, 1, ctx->base.pc_next + 2); 327fcf5ef2aSThomas Huth gen_set_label(l1); 328fcf5ef2aSThomas Huth gen_jump(ctx); 329fcf5ef2aSThomas Huth } 330fcf5ef2aSThomas Huth 331e5d8053eSRichard Henderson static inline void gen_load_fpr64(DisasContext *ctx, TCGv_i64 t, int reg) 332fcf5ef2aSThomas Huth { 3331e0b21d8SRichard Henderson /* We have already signaled illegal instruction for odd Dr. */ 3341e0b21d8SRichard Henderson tcg_debug_assert((reg & 1) == 0); 3351e0b21d8SRichard Henderson reg ^= ctx->fbank; 336fcf5ef2aSThomas Huth tcg_gen_concat_i32_i64(t, cpu_fregs[reg + 1], cpu_fregs[reg]); 337fcf5ef2aSThomas Huth } 338fcf5ef2aSThomas Huth 339e5d8053eSRichard Henderson static inline void gen_store_fpr64(DisasContext *ctx, TCGv_i64 t, int reg) 340fcf5ef2aSThomas Huth { 3411e0b21d8SRichard Henderson /* We have already signaled illegal instruction for odd Dr. */ 3421e0b21d8SRichard Henderson tcg_debug_assert((reg & 1) == 0); 3431e0b21d8SRichard Henderson reg ^= ctx->fbank; 34458d2a9aeSAurelien Jarno tcg_gen_extr_i64_i32(cpu_fregs[reg + 1], cpu_fregs[reg], t); 345fcf5ef2aSThomas Huth } 346fcf5ef2aSThomas Huth 347fcf5ef2aSThomas Huth #define B3_0 (ctx->opcode & 0xf) 348fcf5ef2aSThomas Huth #define B6_4 ((ctx->opcode >> 4) & 0x7) 349fcf5ef2aSThomas Huth #define B7_4 ((ctx->opcode >> 4) & 0xf) 350fcf5ef2aSThomas Huth #define B7_0 (ctx->opcode & 0xff) 351fcf5ef2aSThomas Huth #define B7_0s ((int32_t) (int8_t) (ctx->opcode & 0xff)) 352fcf5ef2aSThomas Huth #define B11_0s (ctx->opcode & 0x800 ? 0xfffff000 | (ctx->opcode & 0xfff) : \ 353fcf5ef2aSThomas Huth (ctx->opcode & 0xfff)) 354fcf5ef2aSThomas Huth #define B11_8 ((ctx->opcode >> 8) & 0xf) 355fcf5ef2aSThomas Huth #define B15_12 ((ctx->opcode >> 12) & 0xf) 356fcf5ef2aSThomas Huth 3573a3bb8d2SRichard Henderson #define REG(x) cpu_gregs[(x) ^ ctx->gbank] 3583a3bb8d2SRichard Henderson #define ALTREG(x) cpu_gregs[(x) ^ ctx->gbank ^ 0x10] 3595c13bad9SRichard Henderson #define FREG(x) cpu_fregs[(x) ^ ctx->fbank] 360fcf5ef2aSThomas Huth 361fcf5ef2aSThomas Huth #define XHACK(x) ((((x) & 1 ) << 4) | ((x) & 0xe)) 362fcf5ef2aSThomas Huth 363fcf5ef2aSThomas Huth #define CHECK_NOT_DELAY_SLOT \ 364*ab419fd8SRichard Henderson if (ctx->envflags & TB_FLAG_DELAY_SLOT_MASK) { \ 365dec16c6eSRichard Henderson goto do_illegal_slot; \ 366fcf5ef2aSThomas Huth } 367fcf5ef2aSThomas Huth 368fcf5ef2aSThomas Huth #define CHECK_PRIVILEGED \ 369fcf5ef2aSThomas Huth if (IS_USER(ctx)) { \ 3706b98213dSRichard Henderson goto do_illegal; \ 371fcf5ef2aSThomas Huth } 372fcf5ef2aSThomas Huth 373fcf5ef2aSThomas Huth #define CHECK_FPU_ENABLED \ 374a6215749SAurelien Jarno if (ctx->tbflags & (1u << SR_FD)) { \ 375dec4f042SRichard Henderson goto do_fpu_disabled; \ 376fcf5ef2aSThomas Huth } 377fcf5ef2aSThomas Huth 3787e9f7ca8SRichard Henderson #define CHECK_FPSCR_PR_0 \ 3797e9f7ca8SRichard Henderson if (ctx->tbflags & FPSCR_PR) { \ 3807e9f7ca8SRichard Henderson goto do_illegal; \ 3817e9f7ca8SRichard Henderson } 3827e9f7ca8SRichard Henderson 3837e9f7ca8SRichard Henderson #define CHECK_FPSCR_PR_1 \ 3847e9f7ca8SRichard Henderson if (!(ctx->tbflags & FPSCR_PR)) { \ 3857e9f7ca8SRichard Henderson goto do_illegal; \ 3867e9f7ca8SRichard Henderson } 3877e9f7ca8SRichard Henderson 388ccae24d4SRichard Henderson #define CHECK_SH4A \ 389ccae24d4SRichard Henderson if (!(ctx->features & SH_FEATURE_SH4A)) { \ 390ccae24d4SRichard Henderson goto do_illegal; \ 391ccae24d4SRichard Henderson } 392ccae24d4SRichard Henderson 393fcf5ef2aSThomas Huth static void _decode_opc(DisasContext * ctx) 394fcf5ef2aSThomas Huth { 395fcf5ef2aSThomas Huth /* This code tries to make movcal emulation sufficiently 396fcf5ef2aSThomas Huth accurate for Linux purposes. This instruction writes 397fcf5ef2aSThomas Huth memory, and prior to that, always allocates a cache line. 398fcf5ef2aSThomas Huth It is used in two contexts: 399fcf5ef2aSThomas Huth - in memcpy, where data is copied in blocks, the first write 400fcf5ef2aSThomas Huth of to a block uses movca.l for performance. 401fcf5ef2aSThomas Huth - in arch/sh/mm/cache-sh4.c, movcal.l + ocbi combination is used 402fcf5ef2aSThomas Huth to flush the cache. Here, the data written by movcal.l is never 403fcf5ef2aSThomas Huth written to memory, and the data written is just bogus. 404fcf5ef2aSThomas Huth 405fcf5ef2aSThomas Huth To simulate this, we simulate movcal.l, we store the value to memory, 406fcf5ef2aSThomas Huth but we also remember the previous content. If we see ocbi, we check 407fcf5ef2aSThomas Huth if movcal.l for that address was done previously. If so, the write should 408fcf5ef2aSThomas Huth not have hit the memory, so we restore the previous content. 409fcf5ef2aSThomas Huth When we see an instruction that is neither movca.l 410fcf5ef2aSThomas Huth nor ocbi, the previous content is discarded. 411fcf5ef2aSThomas Huth 412fcf5ef2aSThomas Huth To optimize, we only try to flush stores when we're at the start of 413fcf5ef2aSThomas Huth TB, or if we already saw movca.l in this TB and did not flush stores 414fcf5ef2aSThomas Huth yet. */ 415fcf5ef2aSThomas Huth if (ctx->has_movcal) 416fcf5ef2aSThomas Huth { 417fcf5ef2aSThomas Huth int opcode = ctx->opcode & 0xf0ff; 418fcf5ef2aSThomas Huth if (opcode != 0x0093 /* ocbi */ 419fcf5ef2aSThomas Huth && opcode != 0x00c3 /* movca.l */) 420fcf5ef2aSThomas Huth { 421fcf5ef2aSThomas Huth gen_helper_discard_movcal_backup(cpu_env); 422fcf5ef2aSThomas Huth ctx->has_movcal = 0; 423fcf5ef2aSThomas Huth } 424fcf5ef2aSThomas Huth } 425fcf5ef2aSThomas Huth 426fcf5ef2aSThomas Huth #if 0 427fcf5ef2aSThomas Huth fprintf(stderr, "Translating opcode 0x%04x\n", ctx->opcode); 428fcf5ef2aSThomas Huth #endif 429fcf5ef2aSThomas Huth 430fcf5ef2aSThomas Huth switch (ctx->opcode) { 431fcf5ef2aSThomas Huth case 0x0019: /* div0u */ 432fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_m, 0); 433fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_q, 0); 434fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_t, 0); 435fcf5ef2aSThomas Huth return; 436fcf5ef2aSThomas Huth case 0x000b: /* rts */ 437fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 438fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_delayed_pc, cpu_pr); 439*ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT; 440fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 441fcf5ef2aSThomas Huth return; 442fcf5ef2aSThomas Huth case 0x0028: /* clrmac */ 443fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_mach, 0); 444fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_macl, 0); 445fcf5ef2aSThomas Huth return; 446fcf5ef2aSThomas Huth case 0x0048: /* clrs */ 447fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr, cpu_sr, ~(1u << SR_S)); 448fcf5ef2aSThomas Huth return; 449fcf5ef2aSThomas Huth case 0x0008: /* clrt */ 450fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_t, 0); 451fcf5ef2aSThomas Huth return; 452fcf5ef2aSThomas Huth case 0x0038: /* ldtlb */ 453fcf5ef2aSThomas Huth CHECK_PRIVILEGED 454fcf5ef2aSThomas Huth gen_helper_ldtlb(cpu_env); 455fcf5ef2aSThomas Huth return; 456fcf5ef2aSThomas Huth case 0x002b: /* rte */ 457fcf5ef2aSThomas Huth CHECK_PRIVILEGED 458fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 459fcf5ef2aSThomas Huth gen_write_sr(cpu_ssr); 460fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_delayed_pc, cpu_spc); 461*ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT_RTE; 462fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 4636f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 464fcf5ef2aSThomas Huth return; 465fcf5ef2aSThomas Huth case 0x0058: /* sets */ 466fcf5ef2aSThomas Huth tcg_gen_ori_i32(cpu_sr, cpu_sr, (1u << SR_S)); 467fcf5ef2aSThomas Huth return; 468fcf5ef2aSThomas Huth case 0x0018: /* sett */ 469fcf5ef2aSThomas Huth tcg_gen_movi_i32(cpu_sr_t, 1); 470fcf5ef2aSThomas Huth return; 471fcf5ef2aSThomas Huth case 0xfbfd: /* frchg */ 47261dedf2aSRichard Henderson CHECK_FPSCR_PR_0 473fcf5ef2aSThomas Huth tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_FR); 4746f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 475fcf5ef2aSThomas Huth return; 476fcf5ef2aSThomas Huth case 0xf3fd: /* fschg */ 47761dedf2aSRichard Henderson CHECK_FPSCR_PR_0 478fcf5ef2aSThomas Huth tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_SZ); 4796f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 480fcf5ef2aSThomas Huth return; 481907759f9SRichard Henderson case 0xf7fd: /* fpchg */ 482907759f9SRichard Henderson CHECK_SH4A 483907759f9SRichard Henderson tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_PR); 4846f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 485907759f9SRichard Henderson return; 486fcf5ef2aSThomas Huth case 0x0009: /* nop */ 487fcf5ef2aSThomas Huth return; 488fcf5ef2aSThomas Huth case 0x001b: /* sleep */ 489fcf5ef2aSThomas Huth CHECK_PRIVILEGED 4906f1c2af6SRichard Henderson tcg_gen_movi_i32(cpu_pc, ctx->base.pc_next + 2); 491fcf5ef2aSThomas Huth gen_helper_sleep(cpu_env); 492fcf5ef2aSThomas Huth return; 493fcf5ef2aSThomas Huth } 494fcf5ef2aSThomas Huth 495fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf000) { 496fcf5ef2aSThomas Huth case 0x1000: /* mov.l Rm,@(disp,Rn) */ 497fcf5ef2aSThomas Huth { 498fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 499fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B11_8), B3_0 * 4); 5004da06fb3SRichard Henderson tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, 5014da06fb3SRichard Henderson MO_TEUL | UNALIGN(ctx)); 502fcf5ef2aSThomas Huth tcg_temp_free(addr); 503fcf5ef2aSThomas Huth } 504fcf5ef2aSThomas Huth return; 505fcf5ef2aSThomas Huth case 0x5000: /* mov.l @(disp,Rm),Rn */ 506fcf5ef2aSThomas Huth { 507fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 508fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 4); 5094da06fb3SRichard Henderson tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, 5104da06fb3SRichard Henderson MO_TESL | UNALIGN(ctx)); 511fcf5ef2aSThomas Huth tcg_temp_free(addr); 512fcf5ef2aSThomas Huth } 513fcf5ef2aSThomas Huth return; 514fcf5ef2aSThomas Huth case 0xe000: /* mov #imm,Rn */ 5154bfa602bSRichard Henderson #ifdef CONFIG_USER_ONLY 516*ab419fd8SRichard Henderson /* 517*ab419fd8SRichard Henderson * Detect the start of a gUSA region (mov #-n, r15). 518*ab419fd8SRichard Henderson * If so, update envflags and end the TB. This will allow us 519*ab419fd8SRichard Henderson * to see the end of the region (stored in R0) in the next TB. 520*ab419fd8SRichard Henderson */ 5216f1c2af6SRichard Henderson if (B11_8 == 15 && B7_0s < 0 && 5226f1c2af6SRichard Henderson (tb_cflags(ctx->base.tb) & CF_PARALLEL)) { 523*ab419fd8SRichard Henderson ctx->envflags = 524*ab419fd8SRichard Henderson deposit32(ctx->envflags, TB_FLAG_GUSA_SHIFT, 8, B7_0s); 5256f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 5264bfa602bSRichard Henderson } 5274bfa602bSRichard Henderson #endif 528fcf5ef2aSThomas Huth tcg_gen_movi_i32(REG(B11_8), B7_0s); 529fcf5ef2aSThomas Huth return; 530fcf5ef2aSThomas Huth case 0x9000: /* mov.w @(disp,PC),Rn */ 531fcf5ef2aSThomas Huth { 5326f1c2af6SRichard Henderson TCGv addr = tcg_const_i32(ctx->base.pc_next + 4 + B7_0 * 2); 533fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_TESW); 534fcf5ef2aSThomas Huth tcg_temp_free(addr); 535fcf5ef2aSThomas Huth } 536fcf5ef2aSThomas Huth return; 537fcf5ef2aSThomas Huth case 0xd000: /* mov.l @(disp,PC),Rn */ 538fcf5ef2aSThomas Huth { 5396f1c2af6SRichard Henderson TCGv addr = tcg_const_i32((ctx->base.pc_next + 4 + B7_0 * 4) & ~3); 540fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_TESL); 541fcf5ef2aSThomas Huth tcg_temp_free(addr); 542fcf5ef2aSThomas Huth } 543fcf5ef2aSThomas Huth return; 544fcf5ef2aSThomas Huth case 0x7000: /* add #imm,Rn */ 545fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), B7_0s); 546fcf5ef2aSThomas Huth return; 547fcf5ef2aSThomas Huth case 0xa000: /* bra disp */ 548fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 5496f1c2af6SRichard Henderson ctx->delayed_pc = ctx->base.pc_next + 4 + B11_0s * 2; 550*ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT; 551fcf5ef2aSThomas Huth return; 552fcf5ef2aSThomas Huth case 0xb000: /* bsr disp */ 553fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 5546f1c2af6SRichard Henderson tcg_gen_movi_i32(cpu_pr, ctx->base.pc_next + 4); 5556f1c2af6SRichard Henderson ctx->delayed_pc = ctx->base.pc_next + 4 + B11_0s * 2; 556*ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT; 557fcf5ef2aSThomas Huth return; 558fcf5ef2aSThomas Huth } 559fcf5ef2aSThomas Huth 560fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf00f) { 561fcf5ef2aSThomas Huth case 0x6003: /* mov Rm,Rn */ 562fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), REG(B7_4)); 563fcf5ef2aSThomas Huth return; 564fcf5ef2aSThomas Huth case 0x2000: /* mov.b Rm,@Rn */ 565fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), REG(B11_8), ctx->memidx, MO_UB); 566fcf5ef2aSThomas Huth return; 567fcf5ef2aSThomas Huth case 0x2001: /* mov.w Rm,@Rn */ 5684da06fb3SRichard Henderson tcg_gen_qemu_st_i32(REG(B7_4), REG(B11_8), ctx->memidx, 5694da06fb3SRichard Henderson MO_TEUW | UNALIGN(ctx)); 570fcf5ef2aSThomas Huth return; 571fcf5ef2aSThomas Huth case 0x2002: /* mov.l Rm,@Rn */ 5724da06fb3SRichard Henderson tcg_gen_qemu_st_i32(REG(B7_4), REG(B11_8), ctx->memidx, 5734da06fb3SRichard Henderson MO_TEUL | UNALIGN(ctx)); 574fcf5ef2aSThomas Huth return; 575fcf5ef2aSThomas Huth case 0x6000: /* mov.b @Rm,Rn */ 576fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_SB); 577fcf5ef2aSThomas Huth return; 578fcf5ef2aSThomas Huth case 0x6001: /* mov.w @Rm,Rn */ 5794da06fb3SRichard Henderson tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, 5804da06fb3SRichard Henderson MO_TESW | UNALIGN(ctx)); 581fcf5ef2aSThomas Huth return; 582fcf5ef2aSThomas Huth case 0x6002: /* mov.l @Rm,Rn */ 5834da06fb3SRichard Henderson tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, 5844da06fb3SRichard Henderson MO_TESL | UNALIGN(ctx)); 585fcf5ef2aSThomas Huth return; 586fcf5ef2aSThomas Huth case 0x2004: /* mov.b Rm,@-Rn */ 587fcf5ef2aSThomas Huth { 588fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 589fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 1); 590fcf5ef2aSThomas Huth /* might cause re-execution */ 591fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_UB); 592fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); /* modify register status */ 593fcf5ef2aSThomas Huth tcg_temp_free(addr); 594fcf5ef2aSThomas Huth } 595fcf5ef2aSThomas Huth return; 596fcf5ef2aSThomas Huth case 0x2005: /* mov.w Rm,@-Rn */ 597fcf5ef2aSThomas Huth { 598fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 599fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 2); 6004da06fb3SRichard Henderson tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, 6014da06fb3SRichard Henderson MO_TEUW | UNALIGN(ctx)); 602fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 603fcf5ef2aSThomas Huth tcg_temp_free(addr); 604fcf5ef2aSThomas Huth } 605fcf5ef2aSThomas Huth return; 606fcf5ef2aSThomas Huth case 0x2006: /* mov.l Rm,@-Rn */ 607fcf5ef2aSThomas Huth { 608fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 609fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); 6104da06fb3SRichard Henderson tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, 6114da06fb3SRichard Henderson MO_TEUL | UNALIGN(ctx)); 612fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 613e691e0edSPhilippe Mathieu-Daudé tcg_temp_free(addr); 614fcf5ef2aSThomas Huth } 615fcf5ef2aSThomas Huth return; 616fcf5ef2aSThomas Huth case 0x6004: /* mov.b @Rm+,Rn */ 617fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_SB); 618fcf5ef2aSThomas Huth if ( B11_8 != B7_4 ) 619fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 1); 620fcf5ef2aSThomas Huth return; 621fcf5ef2aSThomas Huth case 0x6005: /* mov.w @Rm+,Rn */ 6224da06fb3SRichard Henderson tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, 6234da06fb3SRichard Henderson MO_TESW | UNALIGN(ctx)); 624fcf5ef2aSThomas Huth if ( B11_8 != B7_4 ) 625fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2); 626fcf5ef2aSThomas Huth return; 627fcf5ef2aSThomas Huth case 0x6006: /* mov.l @Rm+,Rn */ 6284da06fb3SRichard Henderson tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, 6294da06fb3SRichard Henderson MO_TESL | UNALIGN(ctx)); 630fcf5ef2aSThomas Huth if ( B11_8 != B7_4 ) 631fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4); 632fcf5ef2aSThomas Huth return; 633fcf5ef2aSThomas Huth case 0x0004: /* mov.b Rm,@(R0,Rn) */ 634fcf5ef2aSThomas Huth { 635fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 636fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B11_8), REG(0)); 637fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_UB); 638fcf5ef2aSThomas Huth tcg_temp_free(addr); 639fcf5ef2aSThomas Huth } 640fcf5ef2aSThomas Huth return; 641fcf5ef2aSThomas Huth case 0x0005: /* mov.w Rm,@(R0,Rn) */ 642fcf5ef2aSThomas Huth { 643fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 644fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B11_8), REG(0)); 6454da06fb3SRichard Henderson tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, 6464da06fb3SRichard Henderson MO_TEUW | UNALIGN(ctx)); 647fcf5ef2aSThomas Huth tcg_temp_free(addr); 648fcf5ef2aSThomas Huth } 649fcf5ef2aSThomas Huth return; 650fcf5ef2aSThomas Huth case 0x0006: /* mov.l Rm,@(R0,Rn) */ 651fcf5ef2aSThomas Huth { 652fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 653fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B11_8), REG(0)); 6544da06fb3SRichard Henderson tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, 6554da06fb3SRichard Henderson MO_TEUL | UNALIGN(ctx)); 656fcf5ef2aSThomas Huth tcg_temp_free(addr); 657fcf5ef2aSThomas Huth } 658fcf5ef2aSThomas Huth return; 659fcf5ef2aSThomas Huth case 0x000c: /* mov.b @(R0,Rm),Rn */ 660fcf5ef2aSThomas Huth { 661fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 662fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B7_4), REG(0)); 663fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_SB); 664fcf5ef2aSThomas Huth tcg_temp_free(addr); 665fcf5ef2aSThomas Huth } 666fcf5ef2aSThomas Huth return; 667fcf5ef2aSThomas Huth case 0x000d: /* mov.w @(R0,Rm),Rn */ 668fcf5ef2aSThomas Huth { 669fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 670fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B7_4), REG(0)); 6714da06fb3SRichard Henderson tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, 6724da06fb3SRichard Henderson MO_TESW | UNALIGN(ctx)); 673fcf5ef2aSThomas Huth tcg_temp_free(addr); 674fcf5ef2aSThomas Huth } 675fcf5ef2aSThomas Huth return; 676fcf5ef2aSThomas Huth case 0x000e: /* mov.l @(R0,Rm),Rn */ 677fcf5ef2aSThomas Huth { 678fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 679fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B7_4), REG(0)); 6804da06fb3SRichard Henderson tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, 6814da06fb3SRichard Henderson MO_TESL | UNALIGN(ctx)); 682fcf5ef2aSThomas Huth tcg_temp_free(addr); 683fcf5ef2aSThomas Huth } 684fcf5ef2aSThomas Huth return; 685fcf5ef2aSThomas Huth case 0x6008: /* swap.b Rm,Rn */ 686fcf5ef2aSThomas Huth { 6873c254ab8SLadi Prosek TCGv low = tcg_temp_new(); 688b983a0e1SRichard Henderson tcg_gen_bswap16_i32(low, REG(B7_4), 0); 689fcf5ef2aSThomas Huth tcg_gen_deposit_i32(REG(B11_8), REG(B7_4), low, 0, 16); 690fcf5ef2aSThomas Huth tcg_temp_free(low); 691fcf5ef2aSThomas Huth } 692fcf5ef2aSThomas Huth return; 693fcf5ef2aSThomas Huth case 0x6009: /* swap.w Rm,Rn */ 694fcf5ef2aSThomas Huth tcg_gen_rotli_i32(REG(B11_8), REG(B7_4), 16); 695fcf5ef2aSThomas Huth return; 696fcf5ef2aSThomas Huth case 0x200d: /* xtrct Rm,Rn */ 697fcf5ef2aSThomas Huth { 698fcf5ef2aSThomas Huth TCGv high, low; 699fcf5ef2aSThomas Huth high = tcg_temp_new(); 700fcf5ef2aSThomas Huth tcg_gen_shli_i32(high, REG(B7_4), 16); 701fcf5ef2aSThomas Huth low = tcg_temp_new(); 702fcf5ef2aSThomas Huth tcg_gen_shri_i32(low, REG(B11_8), 16); 703fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), high, low); 704fcf5ef2aSThomas Huth tcg_temp_free(low); 705fcf5ef2aSThomas Huth tcg_temp_free(high); 706fcf5ef2aSThomas Huth } 707fcf5ef2aSThomas Huth return; 708fcf5ef2aSThomas Huth case 0x300c: /* add Rm,Rn */ 709fcf5ef2aSThomas Huth tcg_gen_add_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 710fcf5ef2aSThomas Huth return; 711fcf5ef2aSThomas Huth case 0x300e: /* addc Rm,Rn */ 712fcf5ef2aSThomas Huth { 713fcf5ef2aSThomas Huth TCGv t0, t1; 714fcf5ef2aSThomas Huth t0 = tcg_const_tl(0); 715fcf5ef2aSThomas Huth t1 = tcg_temp_new(); 716fcf5ef2aSThomas Huth tcg_gen_add2_i32(t1, cpu_sr_t, cpu_sr_t, t0, REG(B7_4), t0); 717fcf5ef2aSThomas Huth tcg_gen_add2_i32(REG(B11_8), cpu_sr_t, 718fcf5ef2aSThomas Huth REG(B11_8), t0, t1, cpu_sr_t); 719fcf5ef2aSThomas Huth tcg_temp_free(t0); 720fcf5ef2aSThomas Huth tcg_temp_free(t1); 721fcf5ef2aSThomas Huth } 722fcf5ef2aSThomas Huth return; 723fcf5ef2aSThomas Huth case 0x300f: /* addv Rm,Rn */ 724fcf5ef2aSThomas Huth { 725fcf5ef2aSThomas Huth TCGv t0, t1, t2; 726fcf5ef2aSThomas Huth t0 = tcg_temp_new(); 727fcf5ef2aSThomas Huth tcg_gen_add_i32(t0, REG(B7_4), REG(B11_8)); 728fcf5ef2aSThomas Huth t1 = tcg_temp_new(); 729fcf5ef2aSThomas Huth tcg_gen_xor_i32(t1, t0, REG(B11_8)); 730fcf5ef2aSThomas Huth t2 = tcg_temp_new(); 731fcf5ef2aSThomas Huth tcg_gen_xor_i32(t2, REG(B7_4), REG(B11_8)); 732fcf5ef2aSThomas Huth tcg_gen_andc_i32(cpu_sr_t, t1, t2); 733fcf5ef2aSThomas Huth tcg_temp_free(t2); 734fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_t, cpu_sr_t, 31); 735fcf5ef2aSThomas Huth tcg_temp_free(t1); 736fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B7_4), t0); 737fcf5ef2aSThomas Huth tcg_temp_free(t0); 738fcf5ef2aSThomas Huth } 739fcf5ef2aSThomas Huth return; 740fcf5ef2aSThomas Huth case 0x2009: /* and Rm,Rn */ 741fcf5ef2aSThomas Huth tcg_gen_and_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 742fcf5ef2aSThomas Huth return; 743fcf5ef2aSThomas Huth case 0x3000: /* cmp/eq Rm,Rn */ 744fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_EQ, cpu_sr_t, REG(B11_8), REG(B7_4)); 745fcf5ef2aSThomas Huth return; 746fcf5ef2aSThomas Huth case 0x3003: /* cmp/ge Rm,Rn */ 747fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_GE, cpu_sr_t, REG(B11_8), REG(B7_4)); 748fcf5ef2aSThomas Huth return; 749fcf5ef2aSThomas Huth case 0x3007: /* cmp/gt Rm,Rn */ 750fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_GT, cpu_sr_t, REG(B11_8), REG(B7_4)); 751fcf5ef2aSThomas Huth return; 752fcf5ef2aSThomas Huth case 0x3006: /* cmp/hi Rm,Rn */ 753fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_GTU, cpu_sr_t, REG(B11_8), REG(B7_4)); 754fcf5ef2aSThomas Huth return; 755fcf5ef2aSThomas Huth case 0x3002: /* cmp/hs Rm,Rn */ 756fcf5ef2aSThomas Huth tcg_gen_setcond_i32(TCG_COND_GEU, cpu_sr_t, REG(B11_8), REG(B7_4)); 757fcf5ef2aSThomas Huth return; 758fcf5ef2aSThomas Huth case 0x200c: /* cmp/str Rm,Rn */ 759fcf5ef2aSThomas Huth { 760fcf5ef2aSThomas Huth TCGv cmp1 = tcg_temp_new(); 761fcf5ef2aSThomas Huth TCGv cmp2 = tcg_temp_new(); 762fcf5ef2aSThomas Huth tcg_gen_xor_i32(cmp2, REG(B7_4), REG(B11_8)); 763fcf5ef2aSThomas Huth tcg_gen_subi_i32(cmp1, cmp2, 0x01010101); 764fcf5ef2aSThomas Huth tcg_gen_andc_i32(cmp1, cmp1, cmp2); 765fcf5ef2aSThomas Huth tcg_gen_andi_i32(cmp1, cmp1, 0x80808080); 766fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_NE, cpu_sr_t, cmp1, 0); 767fcf5ef2aSThomas Huth tcg_temp_free(cmp2); 768fcf5ef2aSThomas Huth tcg_temp_free(cmp1); 769fcf5ef2aSThomas Huth } 770fcf5ef2aSThomas Huth return; 771fcf5ef2aSThomas Huth case 0x2007: /* div0s Rm,Rn */ 772fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_q, REG(B11_8), 31); /* SR_Q */ 773fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_m, REG(B7_4), 31); /* SR_M */ 774fcf5ef2aSThomas Huth tcg_gen_xor_i32(cpu_sr_t, cpu_sr_q, cpu_sr_m); /* SR_T */ 775fcf5ef2aSThomas Huth return; 776fcf5ef2aSThomas Huth case 0x3004: /* div1 Rm,Rn */ 777fcf5ef2aSThomas Huth { 778fcf5ef2aSThomas Huth TCGv t0 = tcg_temp_new(); 779fcf5ef2aSThomas Huth TCGv t1 = tcg_temp_new(); 780fcf5ef2aSThomas Huth TCGv t2 = tcg_temp_new(); 781fcf5ef2aSThomas Huth TCGv zero = tcg_const_i32(0); 782fcf5ef2aSThomas Huth 783fcf5ef2aSThomas Huth /* shift left arg1, saving the bit being pushed out and inserting 784fcf5ef2aSThomas Huth T on the right */ 785fcf5ef2aSThomas Huth tcg_gen_shri_i32(t0, REG(B11_8), 31); 786fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1); 787fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), REG(B11_8), cpu_sr_t); 788fcf5ef2aSThomas Huth 789fcf5ef2aSThomas Huth /* Add or subtract arg0 from arg1 depending if Q == M. To avoid 790fcf5ef2aSThomas Huth using 64-bit temps, we compute arg0's high part from q ^ m, so 791fcf5ef2aSThomas Huth that it is 0x00000000 when adding the value or 0xffffffff when 792fcf5ef2aSThomas Huth subtracting it. */ 793fcf5ef2aSThomas Huth tcg_gen_xor_i32(t1, cpu_sr_q, cpu_sr_m); 794fcf5ef2aSThomas Huth tcg_gen_subi_i32(t1, t1, 1); 795fcf5ef2aSThomas Huth tcg_gen_neg_i32(t2, REG(B7_4)); 796fcf5ef2aSThomas Huth tcg_gen_movcond_i32(TCG_COND_EQ, t2, t1, zero, REG(B7_4), t2); 797fcf5ef2aSThomas Huth tcg_gen_add2_i32(REG(B11_8), t1, REG(B11_8), zero, t2, t1); 798fcf5ef2aSThomas Huth 799fcf5ef2aSThomas Huth /* compute T and Q depending on carry */ 800fcf5ef2aSThomas Huth tcg_gen_andi_i32(t1, t1, 1); 801fcf5ef2aSThomas Huth tcg_gen_xor_i32(t1, t1, t0); 802fcf5ef2aSThomas Huth tcg_gen_xori_i32(cpu_sr_t, t1, 1); 803fcf5ef2aSThomas Huth tcg_gen_xor_i32(cpu_sr_q, cpu_sr_m, t1); 804fcf5ef2aSThomas Huth 805fcf5ef2aSThomas Huth tcg_temp_free(zero); 806fcf5ef2aSThomas Huth tcg_temp_free(t2); 807fcf5ef2aSThomas Huth tcg_temp_free(t1); 808fcf5ef2aSThomas Huth tcg_temp_free(t0); 809fcf5ef2aSThomas Huth } 810fcf5ef2aSThomas Huth return; 811fcf5ef2aSThomas Huth case 0x300d: /* dmuls.l Rm,Rn */ 812fcf5ef2aSThomas Huth tcg_gen_muls2_i32(cpu_macl, cpu_mach, REG(B7_4), REG(B11_8)); 813fcf5ef2aSThomas Huth return; 814fcf5ef2aSThomas Huth case 0x3005: /* dmulu.l Rm,Rn */ 815fcf5ef2aSThomas Huth tcg_gen_mulu2_i32(cpu_macl, cpu_mach, REG(B7_4), REG(B11_8)); 816fcf5ef2aSThomas Huth return; 817fcf5ef2aSThomas Huth case 0x600e: /* exts.b Rm,Rn */ 818fcf5ef2aSThomas Huth tcg_gen_ext8s_i32(REG(B11_8), REG(B7_4)); 819fcf5ef2aSThomas Huth return; 820fcf5ef2aSThomas Huth case 0x600f: /* exts.w Rm,Rn */ 821fcf5ef2aSThomas Huth tcg_gen_ext16s_i32(REG(B11_8), REG(B7_4)); 822fcf5ef2aSThomas Huth return; 823fcf5ef2aSThomas Huth case 0x600c: /* extu.b Rm,Rn */ 824fcf5ef2aSThomas Huth tcg_gen_ext8u_i32(REG(B11_8), REG(B7_4)); 825fcf5ef2aSThomas Huth return; 826fcf5ef2aSThomas Huth case 0x600d: /* extu.w Rm,Rn */ 827fcf5ef2aSThomas Huth tcg_gen_ext16u_i32(REG(B11_8), REG(B7_4)); 828fcf5ef2aSThomas Huth return; 829fcf5ef2aSThomas Huth case 0x000f: /* mac.l @Rm+,@Rn+ */ 830fcf5ef2aSThomas Huth { 831fcf5ef2aSThomas Huth TCGv arg0, arg1; 832fcf5ef2aSThomas Huth arg0 = tcg_temp_new(); 833fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(arg0, REG(B7_4), ctx->memidx, MO_TESL); 834fcf5ef2aSThomas Huth arg1 = tcg_temp_new(); 835fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(arg1, REG(B11_8), ctx->memidx, MO_TESL); 836fcf5ef2aSThomas Huth gen_helper_macl(cpu_env, arg0, arg1); 837fcf5ef2aSThomas Huth tcg_temp_free(arg1); 838fcf5ef2aSThomas Huth tcg_temp_free(arg0); 839fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4); 840fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 841fcf5ef2aSThomas Huth } 842fcf5ef2aSThomas Huth return; 843fcf5ef2aSThomas Huth case 0x400f: /* mac.w @Rm+,@Rn+ */ 844fcf5ef2aSThomas Huth { 845fcf5ef2aSThomas Huth TCGv arg0, arg1; 846fcf5ef2aSThomas Huth arg0 = tcg_temp_new(); 847fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(arg0, REG(B7_4), ctx->memidx, MO_TESL); 848fcf5ef2aSThomas Huth arg1 = tcg_temp_new(); 849fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(arg1, REG(B11_8), ctx->memidx, MO_TESL); 850fcf5ef2aSThomas Huth gen_helper_macw(cpu_env, arg0, arg1); 851fcf5ef2aSThomas Huth tcg_temp_free(arg1); 852fcf5ef2aSThomas Huth tcg_temp_free(arg0); 853fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 2); 854fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2); 855fcf5ef2aSThomas Huth } 856fcf5ef2aSThomas Huth return; 857fcf5ef2aSThomas Huth case 0x0007: /* mul.l Rm,Rn */ 858fcf5ef2aSThomas Huth tcg_gen_mul_i32(cpu_macl, REG(B7_4), REG(B11_8)); 859fcf5ef2aSThomas Huth return; 860fcf5ef2aSThomas Huth case 0x200f: /* muls.w Rm,Rn */ 861fcf5ef2aSThomas Huth { 862fcf5ef2aSThomas Huth TCGv arg0, arg1; 863fcf5ef2aSThomas Huth arg0 = tcg_temp_new(); 864fcf5ef2aSThomas Huth tcg_gen_ext16s_i32(arg0, REG(B7_4)); 865fcf5ef2aSThomas Huth arg1 = tcg_temp_new(); 866fcf5ef2aSThomas Huth tcg_gen_ext16s_i32(arg1, REG(B11_8)); 867fcf5ef2aSThomas Huth tcg_gen_mul_i32(cpu_macl, arg0, arg1); 868fcf5ef2aSThomas Huth tcg_temp_free(arg1); 869fcf5ef2aSThomas Huth tcg_temp_free(arg0); 870fcf5ef2aSThomas Huth } 871fcf5ef2aSThomas Huth return; 872fcf5ef2aSThomas Huth case 0x200e: /* mulu.w Rm,Rn */ 873fcf5ef2aSThomas Huth { 874fcf5ef2aSThomas Huth TCGv arg0, arg1; 875fcf5ef2aSThomas Huth arg0 = tcg_temp_new(); 876fcf5ef2aSThomas Huth tcg_gen_ext16u_i32(arg0, REG(B7_4)); 877fcf5ef2aSThomas Huth arg1 = tcg_temp_new(); 878fcf5ef2aSThomas Huth tcg_gen_ext16u_i32(arg1, REG(B11_8)); 879fcf5ef2aSThomas Huth tcg_gen_mul_i32(cpu_macl, arg0, arg1); 880fcf5ef2aSThomas Huth tcg_temp_free(arg1); 881fcf5ef2aSThomas Huth tcg_temp_free(arg0); 882fcf5ef2aSThomas Huth } 883fcf5ef2aSThomas Huth return; 884fcf5ef2aSThomas Huth case 0x600b: /* neg Rm,Rn */ 885fcf5ef2aSThomas Huth tcg_gen_neg_i32(REG(B11_8), REG(B7_4)); 886fcf5ef2aSThomas Huth return; 887fcf5ef2aSThomas Huth case 0x600a: /* negc Rm,Rn */ 888fcf5ef2aSThomas Huth { 889fcf5ef2aSThomas Huth TCGv t0 = tcg_const_i32(0); 890fcf5ef2aSThomas Huth tcg_gen_add2_i32(REG(B11_8), cpu_sr_t, 891fcf5ef2aSThomas Huth REG(B7_4), t0, cpu_sr_t, t0); 892fcf5ef2aSThomas Huth tcg_gen_sub2_i32(REG(B11_8), cpu_sr_t, 893fcf5ef2aSThomas Huth t0, t0, REG(B11_8), cpu_sr_t); 894fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, cpu_sr_t, 1); 895fcf5ef2aSThomas Huth tcg_temp_free(t0); 896fcf5ef2aSThomas Huth } 897fcf5ef2aSThomas Huth return; 898fcf5ef2aSThomas Huth case 0x6007: /* not Rm,Rn */ 899fcf5ef2aSThomas Huth tcg_gen_not_i32(REG(B11_8), REG(B7_4)); 900fcf5ef2aSThomas Huth return; 901fcf5ef2aSThomas Huth case 0x200b: /* or Rm,Rn */ 902fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 903fcf5ef2aSThomas Huth return; 904fcf5ef2aSThomas Huth case 0x400c: /* shad Rm,Rn */ 905fcf5ef2aSThomas Huth { 906fcf5ef2aSThomas Huth TCGv t0 = tcg_temp_new(); 907fcf5ef2aSThomas Huth TCGv t1 = tcg_temp_new(); 908fcf5ef2aSThomas Huth TCGv t2 = tcg_temp_new(); 909fcf5ef2aSThomas Huth 910fcf5ef2aSThomas Huth tcg_gen_andi_i32(t0, REG(B7_4), 0x1f); 911fcf5ef2aSThomas Huth 912fcf5ef2aSThomas Huth /* positive case: shift to the left */ 913fcf5ef2aSThomas Huth tcg_gen_shl_i32(t1, REG(B11_8), t0); 914fcf5ef2aSThomas Huth 915fcf5ef2aSThomas Huth /* negative case: shift to the right in two steps to 916fcf5ef2aSThomas Huth correctly handle the -32 case */ 917fcf5ef2aSThomas Huth tcg_gen_xori_i32(t0, t0, 0x1f); 918fcf5ef2aSThomas Huth tcg_gen_sar_i32(t2, REG(B11_8), t0); 919fcf5ef2aSThomas Huth tcg_gen_sari_i32(t2, t2, 1); 920fcf5ef2aSThomas Huth 921fcf5ef2aSThomas Huth /* select between the two cases */ 922fcf5ef2aSThomas Huth tcg_gen_movi_i32(t0, 0); 923fcf5ef2aSThomas Huth tcg_gen_movcond_i32(TCG_COND_GE, REG(B11_8), REG(B7_4), t0, t1, t2); 924fcf5ef2aSThomas Huth 925fcf5ef2aSThomas Huth tcg_temp_free(t0); 926fcf5ef2aSThomas Huth tcg_temp_free(t1); 927fcf5ef2aSThomas Huth tcg_temp_free(t2); 928fcf5ef2aSThomas Huth } 929fcf5ef2aSThomas Huth return; 930fcf5ef2aSThomas Huth case 0x400d: /* shld Rm,Rn */ 931fcf5ef2aSThomas Huth { 932fcf5ef2aSThomas Huth TCGv t0 = tcg_temp_new(); 933fcf5ef2aSThomas Huth TCGv t1 = tcg_temp_new(); 934fcf5ef2aSThomas Huth TCGv t2 = tcg_temp_new(); 935fcf5ef2aSThomas Huth 936fcf5ef2aSThomas Huth tcg_gen_andi_i32(t0, REG(B7_4), 0x1f); 937fcf5ef2aSThomas Huth 938fcf5ef2aSThomas Huth /* positive case: shift to the left */ 939fcf5ef2aSThomas Huth tcg_gen_shl_i32(t1, REG(B11_8), t0); 940fcf5ef2aSThomas Huth 941fcf5ef2aSThomas Huth /* negative case: shift to the right in two steps to 942fcf5ef2aSThomas Huth correctly handle the -32 case */ 943fcf5ef2aSThomas Huth tcg_gen_xori_i32(t0, t0, 0x1f); 944fcf5ef2aSThomas Huth tcg_gen_shr_i32(t2, REG(B11_8), t0); 945fcf5ef2aSThomas Huth tcg_gen_shri_i32(t2, t2, 1); 946fcf5ef2aSThomas Huth 947fcf5ef2aSThomas Huth /* select between the two cases */ 948fcf5ef2aSThomas Huth tcg_gen_movi_i32(t0, 0); 949fcf5ef2aSThomas Huth tcg_gen_movcond_i32(TCG_COND_GE, REG(B11_8), REG(B7_4), t0, t1, t2); 950fcf5ef2aSThomas Huth 951fcf5ef2aSThomas Huth tcg_temp_free(t0); 952fcf5ef2aSThomas Huth tcg_temp_free(t1); 953fcf5ef2aSThomas Huth tcg_temp_free(t2); 954fcf5ef2aSThomas Huth } 955fcf5ef2aSThomas Huth return; 956fcf5ef2aSThomas Huth case 0x3008: /* sub Rm,Rn */ 957fcf5ef2aSThomas Huth tcg_gen_sub_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 958fcf5ef2aSThomas Huth return; 959fcf5ef2aSThomas Huth case 0x300a: /* subc Rm,Rn */ 960fcf5ef2aSThomas Huth { 961fcf5ef2aSThomas Huth TCGv t0, t1; 962fcf5ef2aSThomas Huth t0 = tcg_const_tl(0); 963fcf5ef2aSThomas Huth t1 = tcg_temp_new(); 964fcf5ef2aSThomas Huth tcg_gen_add2_i32(t1, cpu_sr_t, cpu_sr_t, t0, REG(B7_4), t0); 965fcf5ef2aSThomas Huth tcg_gen_sub2_i32(REG(B11_8), cpu_sr_t, 966fcf5ef2aSThomas Huth REG(B11_8), t0, t1, cpu_sr_t); 967fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, cpu_sr_t, 1); 968fcf5ef2aSThomas Huth tcg_temp_free(t0); 969fcf5ef2aSThomas Huth tcg_temp_free(t1); 970fcf5ef2aSThomas Huth } 971fcf5ef2aSThomas Huth return; 972fcf5ef2aSThomas Huth case 0x300b: /* subv Rm,Rn */ 973fcf5ef2aSThomas Huth { 974fcf5ef2aSThomas Huth TCGv t0, t1, t2; 975fcf5ef2aSThomas Huth t0 = tcg_temp_new(); 976fcf5ef2aSThomas Huth tcg_gen_sub_i32(t0, REG(B11_8), REG(B7_4)); 977fcf5ef2aSThomas Huth t1 = tcg_temp_new(); 978fcf5ef2aSThomas Huth tcg_gen_xor_i32(t1, t0, REG(B7_4)); 979fcf5ef2aSThomas Huth t2 = tcg_temp_new(); 980fcf5ef2aSThomas Huth tcg_gen_xor_i32(t2, REG(B11_8), REG(B7_4)); 981fcf5ef2aSThomas Huth tcg_gen_and_i32(t1, t1, t2); 982fcf5ef2aSThomas Huth tcg_temp_free(t2); 983fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_t, t1, 31); 984fcf5ef2aSThomas Huth tcg_temp_free(t1); 985fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), t0); 986fcf5ef2aSThomas Huth tcg_temp_free(t0); 987fcf5ef2aSThomas Huth } 988fcf5ef2aSThomas Huth return; 989fcf5ef2aSThomas Huth case 0x2008: /* tst Rm,Rn */ 990fcf5ef2aSThomas Huth { 991fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 992fcf5ef2aSThomas Huth tcg_gen_and_i32(val, REG(B7_4), REG(B11_8)); 993fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, val, 0); 994fcf5ef2aSThomas Huth tcg_temp_free(val); 995fcf5ef2aSThomas Huth } 996fcf5ef2aSThomas Huth return; 997fcf5ef2aSThomas Huth case 0x200a: /* xor Rm,Rn */ 998fcf5ef2aSThomas Huth tcg_gen_xor_i32(REG(B11_8), REG(B11_8), REG(B7_4)); 999fcf5ef2aSThomas Huth return; 1000fcf5ef2aSThomas Huth case 0xf00c: /* fmov {F,D,X}Rm,{F,D,X}Rn - FPSCR: Nothing */ 1001fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1002a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 1003bdcb3739SRichard Henderson int xsrc = XHACK(B7_4); 1004bdcb3739SRichard Henderson int xdst = XHACK(B11_8); 1005bdcb3739SRichard Henderson tcg_gen_mov_i32(FREG(xdst), FREG(xsrc)); 1006bdcb3739SRichard Henderson tcg_gen_mov_i32(FREG(xdst + 1), FREG(xsrc + 1)); 1007fcf5ef2aSThomas Huth } else { 10087c9f7038SRichard Henderson tcg_gen_mov_i32(FREG(B11_8), FREG(B7_4)); 1009fcf5ef2aSThomas Huth } 1010fcf5ef2aSThomas Huth return; 1011fcf5ef2aSThomas Huth case 0xf00a: /* fmov {F,D,X}Rm,@Rn - FPSCR: Nothing */ 1012fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1013a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 10144d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 10154d57fa50SRichard Henderson gen_load_fpr64(ctx, fp, XHACK(B7_4)); 1016fc313c64SFrédéric Pétrot tcg_gen_qemu_st_i64(fp, REG(B11_8), ctx->memidx, MO_TEUQ); 10174d57fa50SRichard Henderson tcg_temp_free_i64(fp); 1018fcf5ef2aSThomas Huth } else { 10197c9f7038SRichard Henderson tcg_gen_qemu_st_i32(FREG(B7_4), REG(B11_8), ctx->memidx, MO_TEUL); 1020fcf5ef2aSThomas Huth } 1021fcf5ef2aSThomas Huth return; 1022fcf5ef2aSThomas Huth case 0xf008: /* fmov @Rm,{F,D,X}Rn - FPSCR: Nothing */ 1023fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1024a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 10254d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 1026fc313c64SFrédéric Pétrot tcg_gen_qemu_ld_i64(fp, REG(B7_4), ctx->memidx, MO_TEUQ); 10274d57fa50SRichard Henderson gen_store_fpr64(ctx, fp, XHACK(B11_8)); 10284d57fa50SRichard Henderson tcg_temp_free_i64(fp); 1029fcf5ef2aSThomas Huth } else { 10307c9f7038SRichard Henderson tcg_gen_qemu_ld_i32(FREG(B11_8), REG(B7_4), ctx->memidx, MO_TEUL); 1031fcf5ef2aSThomas Huth } 1032fcf5ef2aSThomas Huth return; 1033fcf5ef2aSThomas Huth case 0xf009: /* fmov @Rm+,{F,D,X}Rn - FPSCR: Nothing */ 1034fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1035a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 10364d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 1037fc313c64SFrédéric Pétrot tcg_gen_qemu_ld_i64(fp, REG(B7_4), ctx->memidx, MO_TEUQ); 10384d57fa50SRichard Henderson gen_store_fpr64(ctx, fp, XHACK(B11_8)); 10394d57fa50SRichard Henderson tcg_temp_free_i64(fp); 1040fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 8); 1041fcf5ef2aSThomas Huth } else { 10427c9f7038SRichard Henderson tcg_gen_qemu_ld_i32(FREG(B11_8), REG(B7_4), ctx->memidx, MO_TEUL); 1043fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4); 1044fcf5ef2aSThomas Huth } 1045fcf5ef2aSThomas Huth return; 1046fcf5ef2aSThomas Huth case 0xf00b: /* fmov {F,D,X}Rm,@-Rn - FPSCR: Nothing */ 1047fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 10484d57fa50SRichard Henderson { 1049fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new_i32(); 1050a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 10514d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 10524d57fa50SRichard Henderson gen_load_fpr64(ctx, fp, XHACK(B7_4)); 10534d57fa50SRichard Henderson tcg_gen_subi_i32(addr, REG(B11_8), 8); 1054fc313c64SFrédéric Pétrot tcg_gen_qemu_st_i64(fp, addr, ctx->memidx, MO_TEUQ); 10554d57fa50SRichard Henderson tcg_temp_free_i64(fp); 1056fcf5ef2aSThomas Huth } else { 10574d57fa50SRichard Henderson tcg_gen_subi_i32(addr, REG(B11_8), 4); 10587c9f7038SRichard Henderson tcg_gen_qemu_st_i32(FREG(B7_4), addr, ctx->memidx, MO_TEUL); 1059fcf5ef2aSThomas Huth } 1060fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 1061fcf5ef2aSThomas Huth tcg_temp_free(addr); 10624d57fa50SRichard Henderson } 1063fcf5ef2aSThomas Huth return; 1064fcf5ef2aSThomas Huth case 0xf006: /* fmov @(R0,Rm),{F,D,X}Rm - FPSCR: Nothing */ 1065fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1066fcf5ef2aSThomas Huth { 1067fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new_i32(); 1068fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B7_4), REG(0)); 1069a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 10704d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 1071fc313c64SFrédéric Pétrot tcg_gen_qemu_ld_i64(fp, addr, ctx->memidx, MO_TEUQ); 10724d57fa50SRichard Henderson gen_store_fpr64(ctx, fp, XHACK(B11_8)); 10734d57fa50SRichard Henderson tcg_temp_free_i64(fp); 1074fcf5ef2aSThomas Huth } else { 10757c9f7038SRichard Henderson tcg_gen_qemu_ld_i32(FREG(B11_8), addr, ctx->memidx, MO_TEUL); 1076fcf5ef2aSThomas Huth } 1077fcf5ef2aSThomas Huth tcg_temp_free(addr); 1078fcf5ef2aSThomas Huth } 1079fcf5ef2aSThomas Huth return; 1080fcf5ef2aSThomas Huth case 0xf007: /* fmov {F,D,X}Rn,@(R0,Rn) - FPSCR: Nothing */ 1081fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1082fcf5ef2aSThomas Huth { 1083fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1084fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(B11_8), REG(0)); 1085a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_SZ) { 10864d57fa50SRichard Henderson TCGv_i64 fp = tcg_temp_new_i64(); 10874d57fa50SRichard Henderson gen_load_fpr64(ctx, fp, XHACK(B7_4)); 1088fc313c64SFrédéric Pétrot tcg_gen_qemu_st_i64(fp, addr, ctx->memidx, MO_TEUQ); 10894d57fa50SRichard Henderson tcg_temp_free_i64(fp); 1090fcf5ef2aSThomas Huth } else { 10917c9f7038SRichard Henderson tcg_gen_qemu_st_i32(FREG(B7_4), addr, ctx->memidx, MO_TEUL); 1092fcf5ef2aSThomas Huth } 1093fcf5ef2aSThomas Huth tcg_temp_free(addr); 1094fcf5ef2aSThomas Huth } 1095fcf5ef2aSThomas Huth return; 1096fcf5ef2aSThomas Huth case 0xf000: /* fadd Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */ 1097fcf5ef2aSThomas Huth case 0xf001: /* fsub Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */ 1098fcf5ef2aSThomas Huth case 0xf002: /* fmul Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */ 1099fcf5ef2aSThomas Huth case 0xf003: /* fdiv Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */ 1100fcf5ef2aSThomas Huth case 0xf004: /* fcmp/eq Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */ 1101fcf5ef2aSThomas Huth case 0xf005: /* fcmp/gt Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */ 1102fcf5ef2aSThomas Huth { 1103fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1104a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_PR) { 1105fcf5ef2aSThomas Huth TCGv_i64 fp0, fp1; 1106fcf5ef2aSThomas Huth 110793dc9c89SRichard Henderson if (ctx->opcode & 0x0110) { 110893dc9c89SRichard Henderson goto do_illegal; 110993dc9c89SRichard Henderson } 1110fcf5ef2aSThomas Huth fp0 = tcg_temp_new_i64(); 1111fcf5ef2aSThomas Huth fp1 = tcg_temp_new_i64(); 11121e0b21d8SRichard Henderson gen_load_fpr64(ctx, fp0, B11_8); 11131e0b21d8SRichard Henderson gen_load_fpr64(ctx, fp1, B7_4); 1114fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf00f) { 1115fcf5ef2aSThomas Huth case 0xf000: /* fadd Rm,Rn */ 1116fcf5ef2aSThomas Huth gen_helper_fadd_DT(fp0, cpu_env, fp0, fp1); 1117fcf5ef2aSThomas Huth break; 1118fcf5ef2aSThomas Huth case 0xf001: /* fsub Rm,Rn */ 1119fcf5ef2aSThomas Huth gen_helper_fsub_DT(fp0, cpu_env, fp0, fp1); 1120fcf5ef2aSThomas Huth break; 1121fcf5ef2aSThomas Huth case 0xf002: /* fmul Rm,Rn */ 1122fcf5ef2aSThomas Huth gen_helper_fmul_DT(fp0, cpu_env, fp0, fp1); 1123fcf5ef2aSThomas Huth break; 1124fcf5ef2aSThomas Huth case 0xf003: /* fdiv Rm,Rn */ 1125fcf5ef2aSThomas Huth gen_helper_fdiv_DT(fp0, cpu_env, fp0, fp1); 1126fcf5ef2aSThomas Huth break; 1127fcf5ef2aSThomas Huth case 0xf004: /* fcmp/eq Rm,Rn */ 112892f1f83eSAurelien Jarno gen_helper_fcmp_eq_DT(cpu_sr_t, cpu_env, fp0, fp1); 1129fcf5ef2aSThomas Huth return; 1130fcf5ef2aSThomas Huth case 0xf005: /* fcmp/gt Rm,Rn */ 113192f1f83eSAurelien Jarno gen_helper_fcmp_gt_DT(cpu_sr_t, cpu_env, fp0, fp1); 1132fcf5ef2aSThomas Huth return; 1133fcf5ef2aSThomas Huth } 11341e0b21d8SRichard Henderson gen_store_fpr64(ctx, fp0, B11_8); 1135fcf5ef2aSThomas Huth tcg_temp_free_i64(fp0); 1136fcf5ef2aSThomas Huth tcg_temp_free_i64(fp1); 1137fcf5ef2aSThomas Huth } else { 1138fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf00f) { 1139fcf5ef2aSThomas Huth case 0xf000: /* fadd Rm,Rn */ 11407c9f7038SRichard Henderson gen_helper_fadd_FT(FREG(B11_8), cpu_env, 11417c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1142fcf5ef2aSThomas Huth break; 1143fcf5ef2aSThomas Huth case 0xf001: /* fsub Rm,Rn */ 11447c9f7038SRichard Henderson gen_helper_fsub_FT(FREG(B11_8), cpu_env, 11457c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1146fcf5ef2aSThomas Huth break; 1147fcf5ef2aSThomas Huth case 0xf002: /* fmul Rm,Rn */ 11487c9f7038SRichard Henderson gen_helper_fmul_FT(FREG(B11_8), cpu_env, 11497c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1150fcf5ef2aSThomas Huth break; 1151fcf5ef2aSThomas Huth case 0xf003: /* fdiv Rm,Rn */ 11527c9f7038SRichard Henderson gen_helper_fdiv_FT(FREG(B11_8), cpu_env, 11537c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1154fcf5ef2aSThomas Huth break; 1155fcf5ef2aSThomas Huth case 0xf004: /* fcmp/eq Rm,Rn */ 115692f1f83eSAurelien Jarno gen_helper_fcmp_eq_FT(cpu_sr_t, cpu_env, 11577c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1158fcf5ef2aSThomas Huth return; 1159fcf5ef2aSThomas Huth case 0xf005: /* fcmp/gt Rm,Rn */ 116092f1f83eSAurelien Jarno gen_helper_fcmp_gt_FT(cpu_sr_t, cpu_env, 11617c9f7038SRichard Henderson FREG(B11_8), FREG(B7_4)); 1162fcf5ef2aSThomas Huth return; 1163fcf5ef2aSThomas Huth } 1164fcf5ef2aSThomas Huth } 1165fcf5ef2aSThomas Huth } 1166fcf5ef2aSThomas Huth return; 1167fcf5ef2aSThomas Huth case 0xf00e: /* fmac FR0,RM,Rn */ 1168fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 11697e9f7ca8SRichard Henderson CHECK_FPSCR_PR_0 11707c9f7038SRichard Henderson gen_helper_fmac_FT(FREG(B11_8), cpu_env, 11717c9f7038SRichard Henderson FREG(0), FREG(B7_4), FREG(B11_8)); 1172fcf5ef2aSThomas Huth return; 1173fcf5ef2aSThomas Huth } 1174fcf5ef2aSThomas Huth 1175fcf5ef2aSThomas Huth switch (ctx->opcode & 0xff00) { 1176fcf5ef2aSThomas Huth case 0xc900: /* and #imm,R0 */ 1177fcf5ef2aSThomas Huth tcg_gen_andi_i32(REG(0), REG(0), B7_0); 1178fcf5ef2aSThomas Huth return; 1179fcf5ef2aSThomas Huth case 0xcd00: /* and.b #imm,@(R0,GBR) */ 1180fcf5ef2aSThomas Huth { 1181fcf5ef2aSThomas Huth TCGv addr, val; 1182fcf5ef2aSThomas Huth addr = tcg_temp_new(); 1183fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(0), cpu_gbr); 1184fcf5ef2aSThomas Huth val = tcg_temp_new(); 1185fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, addr, ctx->memidx, MO_UB); 1186fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, val, B7_0); 1187fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_UB); 1188fcf5ef2aSThomas Huth tcg_temp_free(val); 1189fcf5ef2aSThomas Huth tcg_temp_free(addr); 1190fcf5ef2aSThomas Huth } 1191fcf5ef2aSThomas Huth return; 1192fcf5ef2aSThomas Huth case 0x8b00: /* bf label */ 1193fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 11946f1c2af6SRichard Henderson gen_conditional_jump(ctx, ctx->base.pc_next + 4 + B7_0s * 2, false); 1195fcf5ef2aSThomas Huth return; 1196fcf5ef2aSThomas Huth case 0x8f00: /* bf/s label */ 1197fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1198ac9707eaSAurelien Jarno tcg_gen_xori_i32(cpu_delayed_cond, cpu_sr_t, 1); 11996f1c2af6SRichard Henderson ctx->delayed_pc = ctx->base.pc_next + 4 + B7_0s * 2; 1200*ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT_COND; 1201fcf5ef2aSThomas Huth return; 1202fcf5ef2aSThomas Huth case 0x8900: /* bt label */ 1203fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 12046f1c2af6SRichard Henderson gen_conditional_jump(ctx, ctx->base.pc_next + 4 + B7_0s * 2, true); 1205fcf5ef2aSThomas Huth return; 1206fcf5ef2aSThomas Huth case 0x8d00: /* bt/s label */ 1207fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1208ac9707eaSAurelien Jarno tcg_gen_mov_i32(cpu_delayed_cond, cpu_sr_t); 12096f1c2af6SRichard Henderson ctx->delayed_pc = ctx->base.pc_next + 4 + B7_0s * 2; 1210*ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT_COND; 1211fcf5ef2aSThomas Huth return; 1212fcf5ef2aSThomas Huth case 0x8800: /* cmp/eq #imm,R0 */ 1213fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, REG(0), B7_0s); 1214fcf5ef2aSThomas Huth return; 1215fcf5ef2aSThomas Huth case 0xc400: /* mov.b @(disp,GBR),R0 */ 1216fcf5ef2aSThomas Huth { 1217fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1218fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0); 1219fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_SB); 1220fcf5ef2aSThomas Huth tcg_temp_free(addr); 1221fcf5ef2aSThomas Huth } 1222fcf5ef2aSThomas Huth return; 1223fcf5ef2aSThomas Huth case 0xc500: /* mov.w @(disp,GBR),R0 */ 1224fcf5ef2aSThomas Huth { 1225fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1226fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2); 1227fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_TESW); 1228fcf5ef2aSThomas Huth tcg_temp_free(addr); 1229fcf5ef2aSThomas Huth } 1230fcf5ef2aSThomas Huth return; 1231fcf5ef2aSThomas Huth case 0xc600: /* mov.l @(disp,GBR),R0 */ 1232fcf5ef2aSThomas Huth { 1233fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1234fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4); 1235fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_TESL); 1236fcf5ef2aSThomas Huth tcg_temp_free(addr); 1237fcf5ef2aSThomas Huth } 1238fcf5ef2aSThomas Huth return; 1239fcf5ef2aSThomas Huth case 0xc000: /* mov.b R0,@(disp,GBR) */ 1240fcf5ef2aSThomas Huth { 1241fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1242fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0); 1243fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_UB); 1244fcf5ef2aSThomas Huth tcg_temp_free(addr); 1245fcf5ef2aSThomas Huth } 1246fcf5ef2aSThomas Huth return; 1247fcf5ef2aSThomas Huth case 0xc100: /* mov.w R0,@(disp,GBR) */ 1248fcf5ef2aSThomas Huth { 1249fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1250fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2); 1251fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_TEUW); 1252fcf5ef2aSThomas Huth tcg_temp_free(addr); 1253fcf5ef2aSThomas Huth } 1254fcf5ef2aSThomas Huth return; 1255fcf5ef2aSThomas Huth case 0xc200: /* mov.l R0,@(disp,GBR) */ 1256fcf5ef2aSThomas Huth { 1257fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1258fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4); 1259fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_TEUL); 1260fcf5ef2aSThomas Huth tcg_temp_free(addr); 1261fcf5ef2aSThomas Huth } 1262fcf5ef2aSThomas Huth return; 1263fcf5ef2aSThomas Huth case 0x8000: /* mov.b R0,@(disp,Rn) */ 1264fcf5ef2aSThomas Huth { 1265fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1266fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0); 1267fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_UB); 1268fcf5ef2aSThomas Huth tcg_temp_free(addr); 1269fcf5ef2aSThomas Huth } 1270fcf5ef2aSThomas Huth return; 1271fcf5ef2aSThomas Huth case 0x8100: /* mov.w R0,@(disp,Rn) */ 1272fcf5ef2aSThomas Huth { 1273fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1274fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2); 12754da06fb3SRichard Henderson tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, 12764da06fb3SRichard Henderson MO_TEUW | UNALIGN(ctx)); 1277fcf5ef2aSThomas Huth tcg_temp_free(addr); 1278fcf5ef2aSThomas Huth } 1279fcf5ef2aSThomas Huth return; 1280fcf5ef2aSThomas Huth case 0x8400: /* mov.b @(disp,Rn),R0 */ 1281fcf5ef2aSThomas Huth { 1282fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1283fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0); 1284fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_SB); 1285fcf5ef2aSThomas Huth tcg_temp_free(addr); 1286fcf5ef2aSThomas Huth } 1287fcf5ef2aSThomas Huth return; 1288fcf5ef2aSThomas Huth case 0x8500: /* mov.w @(disp,Rn),R0 */ 1289fcf5ef2aSThomas Huth { 1290fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1291fcf5ef2aSThomas Huth tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2); 12924da06fb3SRichard Henderson tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, 12934da06fb3SRichard Henderson MO_TESW | UNALIGN(ctx)); 1294fcf5ef2aSThomas Huth tcg_temp_free(addr); 1295fcf5ef2aSThomas Huth } 1296fcf5ef2aSThomas Huth return; 1297fcf5ef2aSThomas Huth case 0xc700: /* mova @(disp,PC),R0 */ 12986f1c2af6SRichard Henderson tcg_gen_movi_i32(REG(0), ((ctx->base.pc_next & 0xfffffffc) + 12996f1c2af6SRichard Henderson 4 + B7_0 * 4) & ~3); 1300fcf5ef2aSThomas Huth return; 1301fcf5ef2aSThomas Huth case 0xcb00: /* or #imm,R0 */ 1302fcf5ef2aSThomas Huth tcg_gen_ori_i32(REG(0), REG(0), B7_0); 1303fcf5ef2aSThomas Huth return; 1304fcf5ef2aSThomas Huth case 0xcf00: /* or.b #imm,@(R0,GBR) */ 1305fcf5ef2aSThomas Huth { 1306fcf5ef2aSThomas Huth TCGv addr, val; 1307fcf5ef2aSThomas Huth addr = tcg_temp_new(); 1308fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(0), cpu_gbr); 1309fcf5ef2aSThomas Huth val = tcg_temp_new(); 1310fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, addr, ctx->memidx, MO_UB); 1311fcf5ef2aSThomas Huth tcg_gen_ori_i32(val, val, B7_0); 1312fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_UB); 1313fcf5ef2aSThomas Huth tcg_temp_free(val); 1314fcf5ef2aSThomas Huth tcg_temp_free(addr); 1315fcf5ef2aSThomas Huth } 1316fcf5ef2aSThomas Huth return; 1317fcf5ef2aSThomas Huth case 0xc300: /* trapa #imm */ 1318fcf5ef2aSThomas Huth { 1319fcf5ef2aSThomas Huth TCGv imm; 1320fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1321ac9707eaSAurelien Jarno gen_save_cpu_state(ctx, true); 1322fcf5ef2aSThomas Huth imm = tcg_const_i32(B7_0); 1323fcf5ef2aSThomas Huth gen_helper_trapa(cpu_env, imm); 1324fcf5ef2aSThomas Huth tcg_temp_free(imm); 13256f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NORETURN; 1326fcf5ef2aSThomas Huth } 1327fcf5ef2aSThomas Huth return; 1328fcf5ef2aSThomas Huth case 0xc800: /* tst #imm,R0 */ 1329fcf5ef2aSThomas Huth { 1330fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1331fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, REG(0), B7_0); 1332fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, val, 0); 1333fcf5ef2aSThomas Huth tcg_temp_free(val); 1334fcf5ef2aSThomas Huth } 1335fcf5ef2aSThomas Huth return; 1336fcf5ef2aSThomas Huth case 0xcc00: /* tst.b #imm,@(R0,GBR) */ 1337fcf5ef2aSThomas Huth { 1338fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1339fcf5ef2aSThomas Huth tcg_gen_add_i32(val, REG(0), cpu_gbr); 1340fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, val, ctx->memidx, MO_UB); 1341fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, val, B7_0); 1342fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, val, 0); 1343fcf5ef2aSThomas Huth tcg_temp_free(val); 1344fcf5ef2aSThomas Huth } 1345fcf5ef2aSThomas Huth return; 1346fcf5ef2aSThomas Huth case 0xca00: /* xor #imm,R0 */ 1347fcf5ef2aSThomas Huth tcg_gen_xori_i32(REG(0), REG(0), B7_0); 1348fcf5ef2aSThomas Huth return; 1349fcf5ef2aSThomas Huth case 0xce00: /* xor.b #imm,@(R0,GBR) */ 1350fcf5ef2aSThomas Huth { 1351fcf5ef2aSThomas Huth TCGv addr, val; 1352fcf5ef2aSThomas Huth addr = tcg_temp_new(); 1353fcf5ef2aSThomas Huth tcg_gen_add_i32(addr, REG(0), cpu_gbr); 1354fcf5ef2aSThomas Huth val = tcg_temp_new(); 1355fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, addr, ctx->memidx, MO_UB); 1356fcf5ef2aSThomas Huth tcg_gen_xori_i32(val, val, B7_0); 1357fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_UB); 1358fcf5ef2aSThomas Huth tcg_temp_free(val); 1359fcf5ef2aSThomas Huth tcg_temp_free(addr); 1360fcf5ef2aSThomas Huth } 1361fcf5ef2aSThomas Huth return; 1362fcf5ef2aSThomas Huth } 1363fcf5ef2aSThomas Huth 1364fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf08f) { 1365fcf5ef2aSThomas Huth case 0x408e: /* ldc Rm,Rn_BANK */ 1366fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1367fcf5ef2aSThomas Huth tcg_gen_mov_i32(ALTREG(B6_4), REG(B11_8)); 1368fcf5ef2aSThomas Huth return; 1369fcf5ef2aSThomas Huth case 0x4087: /* ldc.l @Rm+,Rn_BANK */ 1370fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1371fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(ALTREG(B6_4), REG(B11_8), ctx->memidx, MO_TESL); 1372fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 1373fcf5ef2aSThomas Huth return; 1374fcf5ef2aSThomas Huth case 0x0082: /* stc Rm_BANK,Rn */ 1375fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1376fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), ALTREG(B6_4)); 1377fcf5ef2aSThomas Huth return; 1378fcf5ef2aSThomas Huth case 0x4083: /* stc.l Rm_BANK,@-Rn */ 1379fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1380fcf5ef2aSThomas Huth { 1381fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1382fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); 1383fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(ALTREG(B6_4), addr, ctx->memidx, MO_TEUL); 1384fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 1385fcf5ef2aSThomas Huth tcg_temp_free(addr); 1386fcf5ef2aSThomas Huth } 1387fcf5ef2aSThomas Huth return; 1388fcf5ef2aSThomas Huth } 1389fcf5ef2aSThomas Huth 1390fcf5ef2aSThomas Huth switch (ctx->opcode & 0xf0ff) { 1391fcf5ef2aSThomas Huth case 0x0023: /* braf Rn */ 1392fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 13936f1c2af6SRichard Henderson tcg_gen_addi_i32(cpu_delayed_pc, REG(B11_8), ctx->base.pc_next + 4); 1394*ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT; 1395fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 1396fcf5ef2aSThomas Huth return; 1397fcf5ef2aSThomas Huth case 0x0003: /* bsrf Rn */ 1398fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 13996f1c2af6SRichard Henderson tcg_gen_movi_i32(cpu_pr, ctx->base.pc_next + 4); 1400fcf5ef2aSThomas Huth tcg_gen_add_i32(cpu_delayed_pc, REG(B11_8), cpu_pr); 1401*ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT; 1402fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 1403fcf5ef2aSThomas Huth return; 1404fcf5ef2aSThomas Huth case 0x4015: /* cmp/pl Rn */ 1405fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_GT, cpu_sr_t, REG(B11_8), 0); 1406fcf5ef2aSThomas Huth return; 1407fcf5ef2aSThomas Huth case 0x4011: /* cmp/pz Rn */ 1408fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_GE, cpu_sr_t, REG(B11_8), 0); 1409fcf5ef2aSThomas Huth return; 1410fcf5ef2aSThomas Huth case 0x4010: /* dt Rn */ 1411fcf5ef2aSThomas Huth tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 1); 1412fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, REG(B11_8), 0); 1413fcf5ef2aSThomas Huth return; 1414fcf5ef2aSThomas Huth case 0x402b: /* jmp @Rn */ 1415fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 1416fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8)); 1417*ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT; 1418fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 1419fcf5ef2aSThomas Huth return; 1420fcf5ef2aSThomas Huth case 0x400b: /* jsr @Rn */ 1421fcf5ef2aSThomas Huth CHECK_NOT_DELAY_SLOT 14226f1c2af6SRichard Henderson tcg_gen_movi_i32(cpu_pr, ctx->base.pc_next + 4); 1423fcf5ef2aSThomas Huth tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8)); 1424*ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_DELAY_SLOT; 1425fcf5ef2aSThomas Huth ctx->delayed_pc = (uint32_t) - 1; 1426fcf5ef2aSThomas Huth return; 1427fcf5ef2aSThomas Huth case 0x400e: /* ldc Rm,SR */ 1428fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1429fcf5ef2aSThomas Huth { 1430fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1431fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, REG(B11_8), 0x700083f3); 1432fcf5ef2aSThomas Huth gen_write_sr(val); 1433fcf5ef2aSThomas Huth tcg_temp_free(val); 14346f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 1435fcf5ef2aSThomas Huth } 1436fcf5ef2aSThomas Huth return; 1437fcf5ef2aSThomas Huth case 0x4007: /* ldc.l @Rm+,SR */ 1438fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1439fcf5ef2aSThomas Huth { 1440fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1441fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, REG(B11_8), ctx->memidx, MO_TESL); 1442fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, val, 0x700083f3); 1443fcf5ef2aSThomas Huth gen_write_sr(val); 1444fcf5ef2aSThomas Huth tcg_temp_free(val); 1445fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 14466f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 1447fcf5ef2aSThomas Huth } 1448fcf5ef2aSThomas Huth return; 1449fcf5ef2aSThomas Huth case 0x0002: /* stc SR,Rn */ 1450fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1451fcf5ef2aSThomas Huth gen_read_sr(REG(B11_8)); 1452fcf5ef2aSThomas Huth return; 1453fcf5ef2aSThomas Huth case 0x4003: /* stc SR,@-Rn */ 1454fcf5ef2aSThomas Huth CHECK_PRIVILEGED 1455fcf5ef2aSThomas Huth { 1456fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1457fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1458fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); 1459fcf5ef2aSThomas Huth gen_read_sr(val); 1460fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_TEUL); 1461fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 1462fcf5ef2aSThomas Huth tcg_temp_free(val); 1463fcf5ef2aSThomas Huth tcg_temp_free(addr); 1464fcf5ef2aSThomas Huth } 1465fcf5ef2aSThomas Huth return; 1466fcf5ef2aSThomas Huth #define LD(reg,ldnum,ldpnum,prechk) \ 1467fcf5ef2aSThomas Huth case ldnum: \ 1468fcf5ef2aSThomas Huth prechk \ 1469fcf5ef2aSThomas Huth tcg_gen_mov_i32 (cpu_##reg, REG(B11_8)); \ 1470fcf5ef2aSThomas Huth return; \ 1471fcf5ef2aSThomas Huth case ldpnum: \ 1472fcf5ef2aSThomas Huth prechk \ 1473fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(cpu_##reg, REG(B11_8), ctx->memidx, MO_TESL); \ 1474fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); \ 1475fcf5ef2aSThomas Huth return; 1476fcf5ef2aSThomas Huth #define ST(reg,stnum,stpnum,prechk) \ 1477fcf5ef2aSThomas Huth case stnum: \ 1478fcf5ef2aSThomas Huth prechk \ 1479fcf5ef2aSThomas Huth tcg_gen_mov_i32 (REG(B11_8), cpu_##reg); \ 1480fcf5ef2aSThomas Huth return; \ 1481fcf5ef2aSThomas Huth case stpnum: \ 1482fcf5ef2aSThomas Huth prechk \ 1483fcf5ef2aSThomas Huth { \ 1484fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); \ 1485fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); \ 1486fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(cpu_##reg, addr, ctx->memidx, MO_TEUL); \ 1487fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); \ 1488fcf5ef2aSThomas Huth tcg_temp_free(addr); \ 1489fcf5ef2aSThomas Huth } \ 1490fcf5ef2aSThomas Huth return; 1491fcf5ef2aSThomas Huth #define LDST(reg,ldnum,ldpnum,stnum,stpnum,prechk) \ 1492fcf5ef2aSThomas Huth LD(reg,ldnum,ldpnum,prechk) \ 1493fcf5ef2aSThomas Huth ST(reg,stnum,stpnum,prechk) 1494fcf5ef2aSThomas Huth LDST(gbr, 0x401e, 0x4017, 0x0012, 0x4013, {}) 1495fcf5ef2aSThomas Huth LDST(vbr, 0x402e, 0x4027, 0x0022, 0x4023, CHECK_PRIVILEGED) 1496fcf5ef2aSThomas Huth LDST(ssr, 0x403e, 0x4037, 0x0032, 0x4033, CHECK_PRIVILEGED) 1497fcf5ef2aSThomas Huth LDST(spc, 0x404e, 0x4047, 0x0042, 0x4043, CHECK_PRIVILEGED) 1498fcf5ef2aSThomas Huth ST(sgr, 0x003a, 0x4032, CHECK_PRIVILEGED) 1499ccae24d4SRichard Henderson LD(sgr, 0x403a, 0x4036, CHECK_PRIVILEGED CHECK_SH4A) 1500fcf5ef2aSThomas Huth LDST(dbr, 0x40fa, 0x40f6, 0x00fa, 0x40f2, CHECK_PRIVILEGED) 1501fcf5ef2aSThomas Huth LDST(mach, 0x400a, 0x4006, 0x000a, 0x4002, {}) 1502fcf5ef2aSThomas Huth LDST(macl, 0x401a, 0x4016, 0x001a, 0x4012, {}) 1503fcf5ef2aSThomas Huth LDST(pr, 0x402a, 0x4026, 0x002a, 0x4022, {}) 1504fcf5ef2aSThomas Huth LDST(fpul, 0x405a, 0x4056, 0x005a, 0x4052, {CHECK_FPU_ENABLED}) 1505fcf5ef2aSThomas Huth case 0x406a: /* lds Rm,FPSCR */ 1506fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1507fcf5ef2aSThomas Huth gen_helper_ld_fpscr(cpu_env, REG(B11_8)); 15086f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 1509fcf5ef2aSThomas Huth return; 1510fcf5ef2aSThomas Huth case 0x4066: /* lds.l @Rm+,FPSCR */ 1511fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1512fcf5ef2aSThomas Huth { 1513fcf5ef2aSThomas Huth TCGv addr = tcg_temp_new(); 1514fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(addr, REG(B11_8), ctx->memidx, MO_TESL); 1515fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 1516fcf5ef2aSThomas Huth gen_helper_ld_fpscr(cpu_env, addr); 1517fcf5ef2aSThomas Huth tcg_temp_free(addr); 15186f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_STOP; 1519fcf5ef2aSThomas Huth } 1520fcf5ef2aSThomas Huth return; 1521fcf5ef2aSThomas Huth case 0x006a: /* sts FPSCR,Rn */ 1522fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1523fcf5ef2aSThomas Huth tcg_gen_andi_i32(REG(B11_8), cpu_fpscr, 0x003fffff); 1524fcf5ef2aSThomas Huth return; 1525fcf5ef2aSThomas Huth case 0x4062: /* sts FPSCR,@-Rn */ 1526fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1527fcf5ef2aSThomas Huth { 1528fcf5ef2aSThomas Huth TCGv addr, val; 1529fcf5ef2aSThomas Huth val = tcg_temp_new(); 1530fcf5ef2aSThomas Huth tcg_gen_andi_i32(val, cpu_fpscr, 0x003fffff); 1531fcf5ef2aSThomas Huth addr = tcg_temp_new(); 1532fcf5ef2aSThomas Huth tcg_gen_subi_i32(addr, REG(B11_8), 4); 1533fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_TEUL); 1534fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), addr); 1535fcf5ef2aSThomas Huth tcg_temp_free(addr); 1536fcf5ef2aSThomas Huth tcg_temp_free(val); 1537fcf5ef2aSThomas Huth } 1538fcf5ef2aSThomas Huth return; 1539fcf5ef2aSThomas Huth case 0x00c3: /* movca.l R0,@Rm */ 1540fcf5ef2aSThomas Huth { 1541fcf5ef2aSThomas Huth TCGv val = tcg_temp_new(); 1542fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(val, REG(B11_8), ctx->memidx, MO_TEUL); 1543fcf5ef2aSThomas Huth gen_helper_movcal(cpu_env, REG(B11_8), val); 1544fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), REG(B11_8), ctx->memidx, MO_TEUL); 1545e691e0edSPhilippe Mathieu-Daudé tcg_temp_free(val); 1546fcf5ef2aSThomas Huth } 1547fcf5ef2aSThomas Huth ctx->has_movcal = 1; 1548fcf5ef2aSThomas Huth return; 1549143021b2SAurelien Jarno case 0x40a9: /* movua.l @Rm,R0 */ 1550ccae24d4SRichard Henderson CHECK_SH4A 1551143021b2SAurelien Jarno /* Load non-boundary-aligned data */ 155234257c21SAurelien Jarno tcg_gen_qemu_ld_i32(REG(0), REG(B11_8), ctx->memidx, 155334257c21SAurelien Jarno MO_TEUL | MO_UNALN); 1554fcf5ef2aSThomas Huth return; 1555143021b2SAurelien Jarno case 0x40e9: /* movua.l @Rm+,R0 */ 1556ccae24d4SRichard Henderson CHECK_SH4A 1557143021b2SAurelien Jarno /* Load non-boundary-aligned data */ 155834257c21SAurelien Jarno tcg_gen_qemu_ld_i32(REG(0), REG(B11_8), ctx->memidx, 155934257c21SAurelien Jarno MO_TEUL | MO_UNALN); 1560fcf5ef2aSThomas Huth tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); 1561fcf5ef2aSThomas Huth return; 1562fcf5ef2aSThomas Huth case 0x0029: /* movt Rn */ 1563fcf5ef2aSThomas Huth tcg_gen_mov_i32(REG(B11_8), cpu_sr_t); 1564fcf5ef2aSThomas Huth return; 1565fcf5ef2aSThomas Huth case 0x0073: 1566fcf5ef2aSThomas Huth /* MOVCO.L 1567f85da308SRichard Henderson * LDST -> T 1568f85da308SRichard Henderson * If (T == 1) R0 -> (Rn) 1569f85da308SRichard Henderson * 0 -> LDST 1570f85da308SRichard Henderson * 1571f85da308SRichard Henderson * The above description doesn't work in a parallel context. 1572f85da308SRichard Henderson * Since we currently support no smp boards, this implies user-mode. 1573f85da308SRichard Henderson * But we can still support the official mechanism while user-mode 1574f85da308SRichard Henderson * is single-threaded. */ 1575ccae24d4SRichard Henderson CHECK_SH4A 1576ccae24d4SRichard Henderson { 1577f85da308SRichard Henderson TCGLabel *fail = gen_new_label(); 1578f85da308SRichard Henderson TCGLabel *done = gen_new_label(); 1579f85da308SRichard Henderson 15806f1c2af6SRichard Henderson if ((tb_cflags(ctx->base.tb) & CF_PARALLEL)) { 1581f85da308SRichard Henderson TCGv tmp; 1582f85da308SRichard Henderson 1583f85da308SRichard Henderson tcg_gen_brcond_i32(TCG_COND_NE, REG(B11_8), 1584f85da308SRichard Henderson cpu_lock_addr, fail); 1585f85da308SRichard Henderson tmp = tcg_temp_new(); 1586f85da308SRichard Henderson tcg_gen_atomic_cmpxchg_i32(tmp, REG(B11_8), cpu_lock_value, 1587f85da308SRichard Henderson REG(0), ctx->memidx, MO_TEUL); 1588f85da308SRichard Henderson tcg_gen_setcond_i32(TCG_COND_EQ, cpu_sr_t, tmp, cpu_lock_value); 1589f85da308SRichard Henderson tcg_temp_free(tmp); 1590f85da308SRichard Henderson } else { 1591f85da308SRichard Henderson tcg_gen_brcondi_i32(TCG_COND_EQ, cpu_lock_addr, -1, fail); 1592fcf5ef2aSThomas Huth tcg_gen_qemu_st_i32(REG(0), REG(B11_8), ctx->memidx, MO_TEUL); 1593f85da308SRichard Henderson tcg_gen_movi_i32(cpu_sr_t, 1); 1594ccae24d4SRichard Henderson } 1595f85da308SRichard Henderson tcg_gen_br(done); 1596f85da308SRichard Henderson 1597f85da308SRichard Henderson gen_set_label(fail); 1598f85da308SRichard Henderson tcg_gen_movi_i32(cpu_sr_t, 0); 1599f85da308SRichard Henderson 1600f85da308SRichard Henderson gen_set_label(done); 1601f85da308SRichard Henderson tcg_gen_movi_i32(cpu_lock_addr, -1); 1602f85da308SRichard Henderson } 1603f85da308SRichard Henderson return; 1604fcf5ef2aSThomas Huth case 0x0063: 1605fcf5ef2aSThomas Huth /* MOVLI.L @Rm,R0 1606f85da308SRichard Henderson * 1 -> LDST 1607f85da308SRichard Henderson * (Rm) -> R0 1608f85da308SRichard Henderson * When interrupt/exception 1609f85da308SRichard Henderson * occurred 0 -> LDST 1610f85da308SRichard Henderson * 1611f85da308SRichard Henderson * In a parallel context, we must also save the loaded value 1612f85da308SRichard Henderson * for use with the cmpxchg that we'll use with movco.l. */ 1613ccae24d4SRichard Henderson CHECK_SH4A 16146f1c2af6SRichard Henderson if ((tb_cflags(ctx->base.tb) & CF_PARALLEL)) { 1615f85da308SRichard Henderson TCGv tmp = tcg_temp_new(); 1616f85da308SRichard Henderson tcg_gen_mov_i32(tmp, REG(B11_8)); 1617fcf5ef2aSThomas Huth tcg_gen_qemu_ld_i32(REG(0), REG(B11_8), ctx->memidx, MO_TESL); 1618f85da308SRichard Henderson tcg_gen_mov_i32(cpu_lock_value, REG(0)); 1619f85da308SRichard Henderson tcg_gen_mov_i32(cpu_lock_addr, tmp); 1620f85da308SRichard Henderson tcg_temp_free(tmp); 1621f85da308SRichard Henderson } else { 1622f85da308SRichard Henderson tcg_gen_qemu_ld_i32(REG(0), REG(B11_8), ctx->memidx, MO_TESL); 1623f85da308SRichard Henderson tcg_gen_movi_i32(cpu_lock_addr, 0); 1624f85da308SRichard Henderson } 1625fcf5ef2aSThomas Huth return; 1626fcf5ef2aSThomas Huth case 0x0093: /* ocbi @Rn */ 1627fcf5ef2aSThomas Huth { 1628fcf5ef2aSThomas Huth gen_helper_ocbi(cpu_env, REG(B11_8)); 1629fcf5ef2aSThomas Huth } 1630fcf5ef2aSThomas Huth return; 1631fcf5ef2aSThomas Huth case 0x00a3: /* ocbp @Rn */ 1632fcf5ef2aSThomas Huth case 0x00b3: /* ocbwb @Rn */ 1633fcf5ef2aSThomas Huth /* These instructions are supposed to do nothing in case of 1634fcf5ef2aSThomas Huth a cache miss. Given that we only partially emulate caches 1635fcf5ef2aSThomas Huth it is safe to simply ignore them. */ 1636fcf5ef2aSThomas Huth return; 1637fcf5ef2aSThomas Huth case 0x0083: /* pref @Rn */ 1638fcf5ef2aSThomas Huth return; 1639fcf5ef2aSThomas Huth case 0x00d3: /* prefi @Rn */ 1640ccae24d4SRichard Henderson CHECK_SH4A 1641fcf5ef2aSThomas Huth return; 1642fcf5ef2aSThomas Huth case 0x00e3: /* icbi @Rn */ 1643ccae24d4SRichard Henderson CHECK_SH4A 1644fcf5ef2aSThomas Huth return; 1645fcf5ef2aSThomas Huth case 0x00ab: /* synco */ 1646ccae24d4SRichard Henderson CHECK_SH4A 1647aa351317SAurelien Jarno tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 1648fcf5ef2aSThomas Huth return; 1649fcf5ef2aSThomas Huth case 0x4024: /* rotcl Rn */ 1650fcf5ef2aSThomas Huth { 1651fcf5ef2aSThomas Huth TCGv tmp = tcg_temp_new(); 1652fcf5ef2aSThomas Huth tcg_gen_mov_i32(tmp, cpu_sr_t); 1653fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_t, REG(B11_8), 31); 1654fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1); 1655fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), REG(B11_8), tmp); 1656fcf5ef2aSThomas Huth tcg_temp_free(tmp); 1657fcf5ef2aSThomas Huth } 1658fcf5ef2aSThomas Huth return; 1659fcf5ef2aSThomas Huth case 0x4025: /* rotcr Rn */ 1660fcf5ef2aSThomas Huth { 1661fcf5ef2aSThomas Huth TCGv tmp = tcg_temp_new(); 1662fcf5ef2aSThomas Huth tcg_gen_shli_i32(tmp, cpu_sr_t, 31); 1663fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 1); 1664fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1); 1665fcf5ef2aSThomas Huth tcg_gen_or_i32(REG(B11_8), REG(B11_8), tmp); 1666fcf5ef2aSThomas Huth tcg_temp_free(tmp); 1667fcf5ef2aSThomas Huth } 1668fcf5ef2aSThomas Huth return; 1669fcf5ef2aSThomas Huth case 0x4004: /* rotl Rn */ 1670fcf5ef2aSThomas Huth tcg_gen_rotli_i32(REG(B11_8), REG(B11_8), 1); 1671fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 0); 1672fcf5ef2aSThomas Huth return; 1673fcf5ef2aSThomas Huth case 0x4005: /* rotr Rn */ 1674fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 0); 1675fcf5ef2aSThomas Huth tcg_gen_rotri_i32(REG(B11_8), REG(B11_8), 1); 1676fcf5ef2aSThomas Huth return; 1677fcf5ef2aSThomas Huth case 0x4000: /* shll Rn */ 1678fcf5ef2aSThomas Huth case 0x4020: /* shal Rn */ 1679fcf5ef2aSThomas Huth tcg_gen_shri_i32(cpu_sr_t, REG(B11_8), 31); 1680fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1); 1681fcf5ef2aSThomas Huth return; 1682fcf5ef2aSThomas Huth case 0x4021: /* shar Rn */ 1683fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 1); 1684fcf5ef2aSThomas Huth tcg_gen_sari_i32(REG(B11_8), REG(B11_8), 1); 1685fcf5ef2aSThomas Huth return; 1686fcf5ef2aSThomas Huth case 0x4001: /* shlr Rn */ 1687fcf5ef2aSThomas Huth tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 1); 1688fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1); 1689fcf5ef2aSThomas Huth return; 1690fcf5ef2aSThomas Huth case 0x4008: /* shll2 Rn */ 1691fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 2); 1692fcf5ef2aSThomas Huth return; 1693fcf5ef2aSThomas Huth case 0x4018: /* shll8 Rn */ 1694fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 8); 1695fcf5ef2aSThomas Huth return; 1696fcf5ef2aSThomas Huth case 0x4028: /* shll16 Rn */ 1697fcf5ef2aSThomas Huth tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 16); 1698fcf5ef2aSThomas Huth return; 1699fcf5ef2aSThomas Huth case 0x4009: /* shlr2 Rn */ 1700fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 2); 1701fcf5ef2aSThomas Huth return; 1702fcf5ef2aSThomas Huth case 0x4019: /* shlr8 Rn */ 1703fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 8); 1704fcf5ef2aSThomas Huth return; 1705fcf5ef2aSThomas Huth case 0x4029: /* shlr16 Rn */ 1706fcf5ef2aSThomas Huth tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 16); 1707fcf5ef2aSThomas Huth return; 1708fcf5ef2aSThomas Huth case 0x401b: /* tas.b @Rn */ 1709fcf5ef2aSThomas Huth { 1710cb32f179SAurelien Jarno TCGv val = tcg_const_i32(0x80); 1711cb32f179SAurelien Jarno tcg_gen_atomic_fetch_or_i32(val, REG(B11_8), val, 1712cb32f179SAurelien Jarno ctx->memidx, MO_UB); 1713fcf5ef2aSThomas Huth tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, val, 0); 1714fcf5ef2aSThomas Huth tcg_temp_free(val); 1715fcf5ef2aSThomas Huth } 1716fcf5ef2aSThomas Huth return; 1717fcf5ef2aSThomas Huth case 0xf00d: /* fsts FPUL,FRn - FPSCR: Nothing */ 1718fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 17197c9f7038SRichard Henderson tcg_gen_mov_i32(FREG(B11_8), cpu_fpul); 1720fcf5ef2aSThomas Huth return; 1721fcf5ef2aSThomas Huth case 0xf01d: /* flds FRm,FPUL - FPSCR: Nothing */ 1722fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 17237c9f7038SRichard Henderson tcg_gen_mov_i32(cpu_fpul, FREG(B11_8)); 1724fcf5ef2aSThomas Huth return; 1725fcf5ef2aSThomas Huth case 0xf02d: /* float FPUL,FRn/DRn - FPSCR: R[PR,Enable.I]/W[Cause,Flag] */ 1726fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1727a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_PR) { 1728fcf5ef2aSThomas Huth TCGv_i64 fp; 172993dc9c89SRichard Henderson if (ctx->opcode & 0x0100) { 173093dc9c89SRichard Henderson goto do_illegal; 173193dc9c89SRichard Henderson } 1732fcf5ef2aSThomas Huth fp = tcg_temp_new_i64(); 1733fcf5ef2aSThomas Huth gen_helper_float_DT(fp, cpu_env, cpu_fpul); 17341e0b21d8SRichard Henderson gen_store_fpr64(ctx, fp, B11_8); 1735fcf5ef2aSThomas Huth tcg_temp_free_i64(fp); 1736fcf5ef2aSThomas Huth } 1737fcf5ef2aSThomas Huth else { 17387c9f7038SRichard Henderson gen_helper_float_FT(FREG(B11_8), cpu_env, cpu_fpul); 1739fcf5ef2aSThomas Huth } 1740fcf5ef2aSThomas Huth return; 1741fcf5ef2aSThomas Huth case 0xf03d: /* ftrc FRm/DRm,FPUL - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */ 1742fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1743a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_PR) { 1744fcf5ef2aSThomas Huth TCGv_i64 fp; 174593dc9c89SRichard Henderson if (ctx->opcode & 0x0100) { 174693dc9c89SRichard Henderson goto do_illegal; 174793dc9c89SRichard Henderson } 1748fcf5ef2aSThomas Huth fp = tcg_temp_new_i64(); 17491e0b21d8SRichard Henderson gen_load_fpr64(ctx, fp, B11_8); 1750fcf5ef2aSThomas Huth gen_helper_ftrc_DT(cpu_fpul, cpu_env, fp); 1751fcf5ef2aSThomas Huth tcg_temp_free_i64(fp); 1752fcf5ef2aSThomas Huth } 1753fcf5ef2aSThomas Huth else { 17547c9f7038SRichard Henderson gen_helper_ftrc_FT(cpu_fpul, cpu_env, FREG(B11_8)); 1755fcf5ef2aSThomas Huth } 1756fcf5ef2aSThomas Huth return; 1757fcf5ef2aSThomas Huth case 0xf04d: /* fneg FRn/DRn - FPSCR: Nothing */ 1758fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 17597c9f7038SRichard Henderson tcg_gen_xori_i32(FREG(B11_8), FREG(B11_8), 0x80000000); 1760fcf5ef2aSThomas Huth return; 176157f5c1b0SAurelien Jarno case 0xf05d: /* fabs FRn/DRn - FPCSR: Nothing */ 1762fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 17637c9f7038SRichard Henderson tcg_gen_andi_i32(FREG(B11_8), FREG(B11_8), 0x7fffffff); 1764fcf5ef2aSThomas Huth return; 1765fcf5ef2aSThomas Huth case 0xf06d: /* fsqrt FRn */ 1766fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1767a6215749SAurelien Jarno if (ctx->tbflags & FPSCR_PR) { 176893dc9c89SRichard Henderson if (ctx->opcode & 0x0100) { 176993dc9c89SRichard Henderson goto do_illegal; 177093dc9c89SRichard Henderson } 1771fcf5ef2aSThomas Huth TCGv_i64 fp = tcg_temp_new_i64(); 17721e0b21d8SRichard Henderson gen_load_fpr64(ctx, fp, B11_8); 1773fcf5ef2aSThomas Huth gen_helper_fsqrt_DT(fp, cpu_env, fp); 17741e0b21d8SRichard Henderson gen_store_fpr64(ctx, fp, B11_8); 1775fcf5ef2aSThomas Huth tcg_temp_free_i64(fp); 1776fcf5ef2aSThomas Huth } else { 17777c9f7038SRichard Henderson gen_helper_fsqrt_FT(FREG(B11_8), cpu_env, FREG(B11_8)); 1778fcf5ef2aSThomas Huth } 1779fcf5ef2aSThomas Huth return; 1780fcf5ef2aSThomas Huth case 0xf07d: /* fsrra FRn */ 1781fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 178211b7aa23SRichard Henderson CHECK_FPSCR_PR_0 178311b7aa23SRichard Henderson gen_helper_fsrra_FT(FREG(B11_8), cpu_env, FREG(B11_8)); 1784fcf5ef2aSThomas Huth break; 1785fcf5ef2aSThomas Huth case 0xf08d: /* fldi0 FRn - FPSCR: R[PR] */ 1786fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 17877e9f7ca8SRichard Henderson CHECK_FPSCR_PR_0 17887c9f7038SRichard Henderson tcg_gen_movi_i32(FREG(B11_8), 0); 1789fcf5ef2aSThomas Huth return; 1790fcf5ef2aSThomas Huth case 0xf09d: /* fldi1 FRn - FPSCR: R[PR] */ 1791fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 17927e9f7ca8SRichard Henderson CHECK_FPSCR_PR_0 17937c9f7038SRichard Henderson tcg_gen_movi_i32(FREG(B11_8), 0x3f800000); 1794fcf5ef2aSThomas Huth return; 1795fcf5ef2aSThomas Huth case 0xf0ad: /* fcnvsd FPUL,DRn */ 1796fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1797fcf5ef2aSThomas Huth { 1798fcf5ef2aSThomas Huth TCGv_i64 fp = tcg_temp_new_i64(); 1799fcf5ef2aSThomas Huth gen_helper_fcnvsd_FT_DT(fp, cpu_env, cpu_fpul); 18001e0b21d8SRichard Henderson gen_store_fpr64(ctx, fp, B11_8); 1801fcf5ef2aSThomas Huth tcg_temp_free_i64(fp); 1802fcf5ef2aSThomas Huth } 1803fcf5ef2aSThomas Huth return; 1804fcf5ef2aSThomas Huth case 0xf0bd: /* fcnvds DRn,FPUL */ 1805fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 1806fcf5ef2aSThomas Huth { 1807fcf5ef2aSThomas Huth TCGv_i64 fp = tcg_temp_new_i64(); 18081e0b21d8SRichard Henderson gen_load_fpr64(ctx, fp, B11_8); 1809fcf5ef2aSThomas Huth gen_helper_fcnvds_DT_FT(cpu_fpul, cpu_env, fp); 1810fcf5ef2aSThomas Huth tcg_temp_free_i64(fp); 1811fcf5ef2aSThomas Huth } 1812fcf5ef2aSThomas Huth return; 1813fcf5ef2aSThomas Huth case 0xf0ed: /* fipr FVm,FVn */ 1814fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 18157e9f7ca8SRichard Henderson CHECK_FPSCR_PR_1 18167e9f7ca8SRichard Henderson { 18177e9f7ca8SRichard Henderson TCGv m = tcg_const_i32((ctx->opcode >> 8) & 3); 18187e9f7ca8SRichard Henderson TCGv n = tcg_const_i32((ctx->opcode >> 10) & 3); 1819fcf5ef2aSThomas Huth gen_helper_fipr(cpu_env, m, n); 1820fcf5ef2aSThomas Huth tcg_temp_free(m); 1821fcf5ef2aSThomas Huth tcg_temp_free(n); 1822fcf5ef2aSThomas Huth return; 1823fcf5ef2aSThomas Huth } 1824fcf5ef2aSThomas Huth break; 1825fcf5ef2aSThomas Huth case 0xf0fd: /* ftrv XMTRX,FVn */ 1826fcf5ef2aSThomas Huth CHECK_FPU_ENABLED 18277e9f7ca8SRichard Henderson CHECK_FPSCR_PR_1 18287e9f7ca8SRichard Henderson { 18297e9f7ca8SRichard Henderson if ((ctx->opcode & 0x0300) != 0x0100) { 18307e9f7ca8SRichard Henderson goto do_illegal; 18317e9f7ca8SRichard Henderson } 18327e9f7ca8SRichard Henderson TCGv n = tcg_const_i32((ctx->opcode >> 10) & 3); 1833fcf5ef2aSThomas Huth gen_helper_ftrv(cpu_env, n); 1834fcf5ef2aSThomas Huth tcg_temp_free(n); 1835fcf5ef2aSThomas Huth return; 1836fcf5ef2aSThomas Huth } 1837fcf5ef2aSThomas Huth break; 1838fcf5ef2aSThomas Huth } 1839fcf5ef2aSThomas Huth #if 0 1840fcf5ef2aSThomas Huth fprintf(stderr, "unknown instruction 0x%04x at pc 0x%08x\n", 18416f1c2af6SRichard Henderson ctx->opcode, ctx->base.pc_next); 1842fcf5ef2aSThomas Huth fflush(stderr); 1843fcf5ef2aSThomas Huth #endif 18446b98213dSRichard Henderson do_illegal: 1845*ab419fd8SRichard Henderson if (ctx->envflags & TB_FLAG_DELAY_SLOT_MASK) { 1846dec16c6eSRichard Henderson do_illegal_slot: 1847dec16c6eSRichard Henderson gen_save_cpu_state(ctx, true); 1848fcf5ef2aSThomas Huth gen_helper_raise_slot_illegal_instruction(cpu_env); 1849fcf5ef2aSThomas Huth } else { 1850dec16c6eSRichard Henderson gen_save_cpu_state(ctx, true); 1851fcf5ef2aSThomas Huth gen_helper_raise_illegal_instruction(cpu_env); 1852fcf5ef2aSThomas Huth } 18536f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NORETURN; 1854dec4f042SRichard Henderson return; 1855dec4f042SRichard Henderson 1856dec4f042SRichard Henderson do_fpu_disabled: 1857dec4f042SRichard Henderson gen_save_cpu_state(ctx, true); 1858*ab419fd8SRichard Henderson if (ctx->envflags & TB_FLAG_DELAY_SLOT_MASK) { 1859dec4f042SRichard Henderson gen_helper_raise_slot_fpu_disable(cpu_env); 1860dec4f042SRichard Henderson } else { 1861dec4f042SRichard Henderson gen_helper_raise_fpu_disable(cpu_env); 1862dec4f042SRichard Henderson } 18636f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NORETURN; 1864dec4f042SRichard Henderson return; 1865fcf5ef2aSThomas Huth } 1866fcf5ef2aSThomas Huth 1867fcf5ef2aSThomas Huth static void decode_opc(DisasContext * ctx) 1868fcf5ef2aSThomas Huth { 1869a6215749SAurelien Jarno uint32_t old_flags = ctx->envflags; 1870fcf5ef2aSThomas Huth 1871fcf5ef2aSThomas Huth _decode_opc(ctx); 1872fcf5ef2aSThomas Huth 1873*ab419fd8SRichard Henderson if (old_flags & TB_FLAG_DELAY_SLOT_MASK) { 1874fcf5ef2aSThomas Huth /* go out of the delay slot */ 1875*ab419fd8SRichard Henderson ctx->envflags &= ~TB_FLAG_DELAY_SLOT_MASK; 18764bfa602bSRichard Henderson 18774bfa602bSRichard Henderson /* When in an exclusive region, we must continue to the end 18784bfa602bSRichard Henderson for conditional branches. */ 1879*ab419fd8SRichard Henderson if (ctx->tbflags & TB_FLAG_GUSA_EXCLUSIVE 1880*ab419fd8SRichard Henderson && old_flags & TB_FLAG_DELAY_SLOT_COND) { 18814bfa602bSRichard Henderson gen_delayed_conditional_jump(ctx); 18824bfa602bSRichard Henderson return; 18834bfa602bSRichard Henderson } 18844bfa602bSRichard Henderson /* Otherwise this is probably an invalid gUSA region. 18854bfa602bSRichard Henderson Drop the GUSA bits so the next TB doesn't see them. */ 1886*ab419fd8SRichard Henderson ctx->envflags &= ~TB_FLAG_GUSA_MASK; 18874bfa602bSRichard Henderson 1888ac9707eaSAurelien Jarno tcg_gen_movi_i32(cpu_flags, ctx->envflags); 1889*ab419fd8SRichard Henderson if (old_flags & TB_FLAG_DELAY_SLOT_COND) { 1890fcf5ef2aSThomas Huth gen_delayed_conditional_jump(ctx); 1891be53081aSAurelien Jarno } else { 1892fcf5ef2aSThomas Huth gen_jump(ctx); 1893fcf5ef2aSThomas Huth } 18944bfa602bSRichard Henderson } 18954bfa602bSRichard Henderson } 1896fcf5ef2aSThomas Huth 18974bfa602bSRichard Henderson #ifdef CONFIG_USER_ONLY 18984bfa602bSRichard Henderson /* For uniprocessors, SH4 uses optimistic restartable atomic sequences. 18994bfa602bSRichard Henderson Upon an interrupt, a real kernel would simply notice magic values in 19004bfa602bSRichard Henderson the registers and reset the PC to the start of the sequence. 19014bfa602bSRichard Henderson 19024bfa602bSRichard Henderson For QEMU, we cannot do this in quite the same way. Instead, we notice 19034bfa602bSRichard Henderson the normal start of such a sequence (mov #-x,r15). While we can handle 19044bfa602bSRichard Henderson any sequence via cpu_exec_step_atomic, we can recognize the "normal" 19054bfa602bSRichard Henderson sequences and transform them into atomic operations as seen by the host. 19064bfa602bSRichard Henderson */ 1907be0e3d7aSRichard Henderson static void decode_gusa(DisasContext *ctx, CPUSH4State *env) 19084bfa602bSRichard Henderson { 1909d6a6cffdSRichard Henderson uint16_t insns[5]; 1910d6a6cffdSRichard Henderson int ld_adr, ld_dst, ld_mop; 1911d6a6cffdSRichard Henderson int op_dst, op_src, op_opc; 1912d6a6cffdSRichard Henderson int mv_src, mt_dst, st_src, st_mop; 1913d6a6cffdSRichard Henderson TCGv op_arg; 19146f1c2af6SRichard Henderson uint32_t pc = ctx->base.pc_next; 19156f1c2af6SRichard Henderson uint32_t pc_end = ctx->base.tb->cs_base; 19164bfa602bSRichard Henderson int max_insns = (pc_end - pc) / 2; 1917d6a6cffdSRichard Henderson int i; 19184bfa602bSRichard Henderson 1919d6a6cffdSRichard Henderson /* The state machine below will consume only a few insns. 1920d6a6cffdSRichard Henderson If there are more than that in a region, fail now. */ 1921d6a6cffdSRichard Henderson if (max_insns > ARRAY_SIZE(insns)) { 1922d6a6cffdSRichard Henderson goto fail; 1923d6a6cffdSRichard Henderson } 1924d6a6cffdSRichard Henderson 1925d6a6cffdSRichard Henderson /* Read all of the insns for the region. */ 1926d6a6cffdSRichard Henderson for (i = 0; i < max_insns; ++i) { 19274e116893SIlya Leoshkevich insns[i] = translator_lduw(env, &ctx->base, pc + i * 2); 1928d6a6cffdSRichard Henderson } 1929d6a6cffdSRichard Henderson 1930d6a6cffdSRichard Henderson ld_adr = ld_dst = ld_mop = -1; 1931d6a6cffdSRichard Henderson mv_src = -1; 1932d6a6cffdSRichard Henderson op_dst = op_src = op_opc = -1; 1933d6a6cffdSRichard Henderson mt_dst = -1; 1934d6a6cffdSRichard Henderson st_src = st_mop = -1; 1935f764718dSRichard Henderson op_arg = NULL; 1936d6a6cffdSRichard Henderson i = 0; 1937d6a6cffdSRichard Henderson 1938d6a6cffdSRichard Henderson #define NEXT_INSN \ 1939d6a6cffdSRichard Henderson do { if (i >= max_insns) goto fail; ctx->opcode = insns[i++]; } while (0) 1940d6a6cffdSRichard Henderson 1941d6a6cffdSRichard Henderson /* 1942d6a6cffdSRichard Henderson * Expect a load to begin the region. 1943d6a6cffdSRichard Henderson */ 1944d6a6cffdSRichard Henderson NEXT_INSN; 1945d6a6cffdSRichard Henderson switch (ctx->opcode & 0xf00f) { 1946d6a6cffdSRichard Henderson case 0x6000: /* mov.b @Rm,Rn */ 1947d6a6cffdSRichard Henderson ld_mop = MO_SB; 1948d6a6cffdSRichard Henderson break; 1949d6a6cffdSRichard Henderson case 0x6001: /* mov.w @Rm,Rn */ 1950d6a6cffdSRichard Henderson ld_mop = MO_TESW; 1951d6a6cffdSRichard Henderson break; 1952d6a6cffdSRichard Henderson case 0x6002: /* mov.l @Rm,Rn */ 1953d6a6cffdSRichard Henderson ld_mop = MO_TESL; 1954d6a6cffdSRichard Henderson break; 1955d6a6cffdSRichard Henderson default: 1956d6a6cffdSRichard Henderson goto fail; 1957d6a6cffdSRichard Henderson } 1958d6a6cffdSRichard Henderson ld_adr = B7_4; 1959d6a6cffdSRichard Henderson ld_dst = B11_8; 1960d6a6cffdSRichard Henderson if (ld_adr == ld_dst) { 1961d6a6cffdSRichard Henderson goto fail; 1962d6a6cffdSRichard Henderson } 1963d6a6cffdSRichard Henderson /* Unless we see a mov, any two-operand operation must use ld_dst. */ 1964d6a6cffdSRichard Henderson op_dst = ld_dst; 1965d6a6cffdSRichard Henderson 1966d6a6cffdSRichard Henderson /* 1967d6a6cffdSRichard Henderson * Expect an optional register move. 1968d6a6cffdSRichard Henderson */ 1969d6a6cffdSRichard Henderson NEXT_INSN; 1970d6a6cffdSRichard Henderson switch (ctx->opcode & 0xf00f) { 1971d6a6cffdSRichard Henderson case 0x6003: /* mov Rm,Rn */ 197202b8e735SPhilippe Mathieu-Daudé /* 197323b5d9faSLichang Zhao * Here we want to recognize ld_dst being saved for later consumption, 197402b8e735SPhilippe Mathieu-Daudé * or for another input register being copied so that ld_dst need not 197502b8e735SPhilippe Mathieu-Daudé * be clobbered during the operation. 197602b8e735SPhilippe Mathieu-Daudé */ 1977d6a6cffdSRichard Henderson op_dst = B11_8; 1978d6a6cffdSRichard Henderson mv_src = B7_4; 1979d6a6cffdSRichard Henderson if (op_dst == ld_dst) { 1980d6a6cffdSRichard Henderson /* Overwriting the load output. */ 1981d6a6cffdSRichard Henderson goto fail; 1982d6a6cffdSRichard Henderson } 1983d6a6cffdSRichard Henderson if (mv_src != ld_dst) { 1984d6a6cffdSRichard Henderson /* Copying a new input; constrain op_src to match the load. */ 1985d6a6cffdSRichard Henderson op_src = ld_dst; 1986d6a6cffdSRichard Henderson } 1987d6a6cffdSRichard Henderson break; 1988d6a6cffdSRichard Henderson 1989d6a6cffdSRichard Henderson default: 1990d6a6cffdSRichard Henderson /* Put back and re-examine as operation. */ 1991d6a6cffdSRichard Henderson --i; 1992d6a6cffdSRichard Henderson } 1993d6a6cffdSRichard Henderson 1994d6a6cffdSRichard Henderson /* 1995d6a6cffdSRichard Henderson * Expect the operation. 1996d6a6cffdSRichard Henderson */ 1997d6a6cffdSRichard Henderson NEXT_INSN; 1998d6a6cffdSRichard Henderson switch (ctx->opcode & 0xf00f) { 1999d6a6cffdSRichard Henderson case 0x300c: /* add Rm,Rn */ 2000d6a6cffdSRichard Henderson op_opc = INDEX_op_add_i32; 2001d6a6cffdSRichard Henderson goto do_reg_op; 2002d6a6cffdSRichard Henderson case 0x2009: /* and Rm,Rn */ 2003d6a6cffdSRichard Henderson op_opc = INDEX_op_and_i32; 2004d6a6cffdSRichard Henderson goto do_reg_op; 2005d6a6cffdSRichard Henderson case 0x200a: /* xor Rm,Rn */ 2006d6a6cffdSRichard Henderson op_opc = INDEX_op_xor_i32; 2007d6a6cffdSRichard Henderson goto do_reg_op; 2008d6a6cffdSRichard Henderson case 0x200b: /* or Rm,Rn */ 2009d6a6cffdSRichard Henderson op_opc = INDEX_op_or_i32; 2010d6a6cffdSRichard Henderson do_reg_op: 2011d6a6cffdSRichard Henderson /* The operation register should be as expected, and the 2012d6a6cffdSRichard Henderson other input cannot depend on the load. */ 2013d6a6cffdSRichard Henderson if (op_dst != B11_8) { 2014d6a6cffdSRichard Henderson goto fail; 2015d6a6cffdSRichard Henderson } 2016d6a6cffdSRichard Henderson if (op_src < 0) { 2017d6a6cffdSRichard Henderson /* Unconstrainted input. */ 2018d6a6cffdSRichard Henderson op_src = B7_4; 2019d6a6cffdSRichard Henderson } else if (op_src == B7_4) { 2020d6a6cffdSRichard Henderson /* Constrained input matched load. All operations are 2021d6a6cffdSRichard Henderson commutative; "swap" them by "moving" the load output 2022d6a6cffdSRichard Henderson to the (implicit) first argument and the move source 2023d6a6cffdSRichard Henderson to the (explicit) second argument. */ 2024d6a6cffdSRichard Henderson op_src = mv_src; 2025d6a6cffdSRichard Henderson } else { 2026d6a6cffdSRichard Henderson goto fail; 2027d6a6cffdSRichard Henderson } 2028d6a6cffdSRichard Henderson op_arg = REG(op_src); 2029d6a6cffdSRichard Henderson break; 2030d6a6cffdSRichard Henderson 2031d6a6cffdSRichard Henderson case 0x6007: /* not Rm,Rn */ 2032d6a6cffdSRichard Henderson if (ld_dst != B7_4 || mv_src >= 0) { 2033d6a6cffdSRichard Henderson goto fail; 2034d6a6cffdSRichard Henderson } 2035d6a6cffdSRichard Henderson op_dst = B11_8; 2036d6a6cffdSRichard Henderson op_opc = INDEX_op_xor_i32; 2037d6a6cffdSRichard Henderson op_arg = tcg_const_i32(-1); 2038d6a6cffdSRichard Henderson break; 2039d6a6cffdSRichard Henderson 2040d6a6cffdSRichard Henderson case 0x7000 ... 0x700f: /* add #imm,Rn */ 2041d6a6cffdSRichard Henderson if (op_dst != B11_8 || mv_src >= 0) { 2042d6a6cffdSRichard Henderson goto fail; 2043d6a6cffdSRichard Henderson } 2044d6a6cffdSRichard Henderson op_opc = INDEX_op_add_i32; 2045d6a6cffdSRichard Henderson op_arg = tcg_const_i32(B7_0s); 2046d6a6cffdSRichard Henderson break; 2047d6a6cffdSRichard Henderson 2048d6a6cffdSRichard Henderson case 0x3000: /* cmp/eq Rm,Rn */ 2049d6a6cffdSRichard Henderson /* Looking for the middle of a compare-and-swap sequence, 2050d6a6cffdSRichard Henderson beginning with the compare. Operands can be either order, 2051d6a6cffdSRichard Henderson but with only one overlapping the load. */ 2052d6a6cffdSRichard Henderson if ((ld_dst == B11_8) + (ld_dst == B7_4) != 1 || mv_src >= 0) { 2053d6a6cffdSRichard Henderson goto fail; 2054d6a6cffdSRichard Henderson } 2055d6a6cffdSRichard Henderson op_opc = INDEX_op_setcond_i32; /* placeholder */ 2056d6a6cffdSRichard Henderson op_src = (ld_dst == B11_8 ? B7_4 : B11_8); 2057d6a6cffdSRichard Henderson op_arg = REG(op_src); 2058d6a6cffdSRichard Henderson 2059d6a6cffdSRichard Henderson NEXT_INSN; 2060d6a6cffdSRichard Henderson switch (ctx->opcode & 0xff00) { 2061d6a6cffdSRichard Henderson case 0x8b00: /* bf label */ 2062d6a6cffdSRichard Henderson case 0x8f00: /* bf/s label */ 2063d6a6cffdSRichard Henderson if (pc + (i + 1 + B7_0s) * 2 != pc_end) { 2064d6a6cffdSRichard Henderson goto fail; 2065d6a6cffdSRichard Henderson } 2066d6a6cffdSRichard Henderson if ((ctx->opcode & 0xff00) == 0x8b00) { /* bf label */ 2067d6a6cffdSRichard Henderson break; 2068d6a6cffdSRichard Henderson } 2069d6a6cffdSRichard Henderson /* We're looking to unconditionally modify Rn with the 2070d6a6cffdSRichard Henderson result of the comparison, within the delay slot of 2071d6a6cffdSRichard Henderson the branch. This is used by older gcc. */ 2072d6a6cffdSRichard Henderson NEXT_INSN; 2073d6a6cffdSRichard Henderson if ((ctx->opcode & 0xf0ff) == 0x0029) { /* movt Rn */ 2074d6a6cffdSRichard Henderson mt_dst = B11_8; 2075d6a6cffdSRichard Henderson } else { 2076d6a6cffdSRichard Henderson goto fail; 2077d6a6cffdSRichard Henderson } 2078d6a6cffdSRichard Henderson break; 2079d6a6cffdSRichard Henderson 2080d6a6cffdSRichard Henderson default: 2081d6a6cffdSRichard Henderson goto fail; 2082d6a6cffdSRichard Henderson } 2083d6a6cffdSRichard Henderson break; 2084d6a6cffdSRichard Henderson 2085d6a6cffdSRichard Henderson case 0x2008: /* tst Rm,Rn */ 2086d6a6cffdSRichard Henderson /* Looking for a compare-and-swap against zero. */ 2087d6a6cffdSRichard Henderson if (ld_dst != B11_8 || ld_dst != B7_4 || mv_src >= 0) { 2088d6a6cffdSRichard Henderson goto fail; 2089d6a6cffdSRichard Henderson } 2090d6a6cffdSRichard Henderson op_opc = INDEX_op_setcond_i32; 2091d6a6cffdSRichard Henderson op_arg = tcg_const_i32(0); 2092d6a6cffdSRichard Henderson 2093d6a6cffdSRichard Henderson NEXT_INSN; 2094d6a6cffdSRichard Henderson if ((ctx->opcode & 0xff00) != 0x8900 /* bt label */ 2095d6a6cffdSRichard Henderson || pc + (i + 1 + B7_0s) * 2 != pc_end) { 2096d6a6cffdSRichard Henderson goto fail; 2097d6a6cffdSRichard Henderson } 2098d6a6cffdSRichard Henderson break; 2099d6a6cffdSRichard Henderson 2100d6a6cffdSRichard Henderson default: 2101d6a6cffdSRichard Henderson /* Put back and re-examine as store. */ 2102d6a6cffdSRichard Henderson --i; 2103d6a6cffdSRichard Henderson } 2104d6a6cffdSRichard Henderson 2105d6a6cffdSRichard Henderson /* 2106d6a6cffdSRichard Henderson * Expect the store. 2107d6a6cffdSRichard Henderson */ 2108d6a6cffdSRichard Henderson /* The store must be the last insn. */ 2109d6a6cffdSRichard Henderson if (i != max_insns - 1) { 2110d6a6cffdSRichard Henderson goto fail; 2111d6a6cffdSRichard Henderson } 2112d6a6cffdSRichard Henderson NEXT_INSN; 2113d6a6cffdSRichard Henderson switch (ctx->opcode & 0xf00f) { 2114d6a6cffdSRichard Henderson case 0x2000: /* mov.b Rm,@Rn */ 2115d6a6cffdSRichard Henderson st_mop = MO_UB; 2116d6a6cffdSRichard Henderson break; 2117d6a6cffdSRichard Henderson case 0x2001: /* mov.w Rm,@Rn */ 2118d6a6cffdSRichard Henderson st_mop = MO_UW; 2119d6a6cffdSRichard Henderson break; 2120d6a6cffdSRichard Henderson case 0x2002: /* mov.l Rm,@Rn */ 2121d6a6cffdSRichard Henderson st_mop = MO_UL; 2122d6a6cffdSRichard Henderson break; 2123d6a6cffdSRichard Henderson default: 2124d6a6cffdSRichard Henderson goto fail; 2125d6a6cffdSRichard Henderson } 2126d6a6cffdSRichard Henderson /* The store must match the load. */ 2127d6a6cffdSRichard Henderson if (ld_adr != B11_8 || st_mop != (ld_mop & MO_SIZE)) { 2128d6a6cffdSRichard Henderson goto fail; 2129d6a6cffdSRichard Henderson } 2130d6a6cffdSRichard Henderson st_src = B7_4; 2131d6a6cffdSRichard Henderson 2132d6a6cffdSRichard Henderson #undef NEXT_INSN 2133d6a6cffdSRichard Henderson 2134d6a6cffdSRichard Henderson /* 2135d6a6cffdSRichard Henderson * Emit the operation. 2136d6a6cffdSRichard Henderson */ 2137d6a6cffdSRichard Henderson switch (op_opc) { 2138d6a6cffdSRichard Henderson case -1: 2139d6a6cffdSRichard Henderson /* No operation found. Look for exchange pattern. */ 2140d6a6cffdSRichard Henderson if (st_src == ld_dst || mv_src >= 0) { 2141d6a6cffdSRichard Henderson goto fail; 2142d6a6cffdSRichard Henderson } 2143d6a6cffdSRichard Henderson tcg_gen_atomic_xchg_i32(REG(ld_dst), REG(ld_adr), REG(st_src), 2144d6a6cffdSRichard Henderson ctx->memidx, ld_mop); 2145d6a6cffdSRichard Henderson break; 2146d6a6cffdSRichard Henderson 2147d6a6cffdSRichard Henderson case INDEX_op_add_i32: 2148d6a6cffdSRichard Henderson if (op_dst != st_src) { 2149d6a6cffdSRichard Henderson goto fail; 2150d6a6cffdSRichard Henderson } 2151d6a6cffdSRichard Henderson if (op_dst == ld_dst && st_mop == MO_UL) { 2152d6a6cffdSRichard Henderson tcg_gen_atomic_add_fetch_i32(REG(ld_dst), REG(ld_adr), 2153d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2154d6a6cffdSRichard Henderson } else { 2155d6a6cffdSRichard Henderson tcg_gen_atomic_fetch_add_i32(REG(ld_dst), REG(ld_adr), 2156d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2157d6a6cffdSRichard Henderson if (op_dst != ld_dst) { 2158d6a6cffdSRichard Henderson /* Note that mop sizes < 4 cannot use add_fetch 2159d6a6cffdSRichard Henderson because it won't carry into the higher bits. */ 2160d6a6cffdSRichard Henderson tcg_gen_add_i32(REG(op_dst), REG(ld_dst), op_arg); 2161d6a6cffdSRichard Henderson } 2162d6a6cffdSRichard Henderson } 2163d6a6cffdSRichard Henderson break; 2164d6a6cffdSRichard Henderson 2165d6a6cffdSRichard Henderson case INDEX_op_and_i32: 2166d6a6cffdSRichard Henderson if (op_dst != st_src) { 2167d6a6cffdSRichard Henderson goto fail; 2168d6a6cffdSRichard Henderson } 2169d6a6cffdSRichard Henderson if (op_dst == ld_dst) { 2170d6a6cffdSRichard Henderson tcg_gen_atomic_and_fetch_i32(REG(ld_dst), REG(ld_adr), 2171d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2172d6a6cffdSRichard Henderson } else { 2173d6a6cffdSRichard Henderson tcg_gen_atomic_fetch_and_i32(REG(ld_dst), REG(ld_adr), 2174d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2175d6a6cffdSRichard Henderson tcg_gen_and_i32(REG(op_dst), REG(ld_dst), op_arg); 2176d6a6cffdSRichard Henderson } 2177d6a6cffdSRichard Henderson break; 2178d6a6cffdSRichard Henderson 2179d6a6cffdSRichard Henderson case INDEX_op_or_i32: 2180d6a6cffdSRichard Henderson if (op_dst != st_src) { 2181d6a6cffdSRichard Henderson goto fail; 2182d6a6cffdSRichard Henderson } 2183d6a6cffdSRichard Henderson if (op_dst == ld_dst) { 2184d6a6cffdSRichard Henderson tcg_gen_atomic_or_fetch_i32(REG(ld_dst), REG(ld_adr), 2185d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2186d6a6cffdSRichard Henderson } else { 2187d6a6cffdSRichard Henderson tcg_gen_atomic_fetch_or_i32(REG(ld_dst), REG(ld_adr), 2188d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2189d6a6cffdSRichard Henderson tcg_gen_or_i32(REG(op_dst), REG(ld_dst), op_arg); 2190d6a6cffdSRichard Henderson } 2191d6a6cffdSRichard Henderson break; 2192d6a6cffdSRichard Henderson 2193d6a6cffdSRichard Henderson case INDEX_op_xor_i32: 2194d6a6cffdSRichard Henderson if (op_dst != st_src) { 2195d6a6cffdSRichard Henderson goto fail; 2196d6a6cffdSRichard Henderson } 2197d6a6cffdSRichard Henderson if (op_dst == ld_dst) { 2198d6a6cffdSRichard Henderson tcg_gen_atomic_xor_fetch_i32(REG(ld_dst), REG(ld_adr), 2199d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2200d6a6cffdSRichard Henderson } else { 2201d6a6cffdSRichard Henderson tcg_gen_atomic_fetch_xor_i32(REG(ld_dst), REG(ld_adr), 2202d6a6cffdSRichard Henderson op_arg, ctx->memidx, ld_mop); 2203d6a6cffdSRichard Henderson tcg_gen_xor_i32(REG(op_dst), REG(ld_dst), op_arg); 2204d6a6cffdSRichard Henderson } 2205d6a6cffdSRichard Henderson break; 2206d6a6cffdSRichard Henderson 2207d6a6cffdSRichard Henderson case INDEX_op_setcond_i32: 2208d6a6cffdSRichard Henderson if (st_src == ld_dst) { 2209d6a6cffdSRichard Henderson goto fail; 2210d6a6cffdSRichard Henderson } 2211d6a6cffdSRichard Henderson tcg_gen_atomic_cmpxchg_i32(REG(ld_dst), REG(ld_adr), op_arg, 2212d6a6cffdSRichard Henderson REG(st_src), ctx->memidx, ld_mop); 2213d6a6cffdSRichard Henderson tcg_gen_setcond_i32(TCG_COND_EQ, cpu_sr_t, REG(ld_dst), op_arg); 2214d6a6cffdSRichard Henderson if (mt_dst >= 0) { 2215d6a6cffdSRichard Henderson tcg_gen_mov_i32(REG(mt_dst), cpu_sr_t); 2216d6a6cffdSRichard Henderson } 2217d6a6cffdSRichard Henderson break; 2218d6a6cffdSRichard Henderson 2219d6a6cffdSRichard Henderson default: 2220d6a6cffdSRichard Henderson g_assert_not_reached(); 2221d6a6cffdSRichard Henderson } 2222d6a6cffdSRichard Henderson 2223d6a6cffdSRichard Henderson /* If op_src is not a valid register, then op_arg was a constant. */ 2224f764718dSRichard Henderson if (op_src < 0 && op_arg) { 2225d6a6cffdSRichard Henderson tcg_temp_free_i32(op_arg); 2226d6a6cffdSRichard Henderson } 2227d6a6cffdSRichard Henderson 2228d6a6cffdSRichard Henderson /* The entire region has been translated. */ 2229*ab419fd8SRichard Henderson ctx->envflags &= ~TB_FLAG_GUSA_MASK; 22306f1c2af6SRichard Henderson ctx->base.pc_next = pc_end; 2231be0e3d7aSRichard Henderson ctx->base.num_insns += max_insns - 1; 2232be0e3d7aSRichard Henderson return; 2233d6a6cffdSRichard Henderson 2234d6a6cffdSRichard Henderson fail: 22354bfa602bSRichard Henderson qemu_log_mask(LOG_UNIMP, "Unrecognized gUSA sequence %08x-%08x\n", 22364bfa602bSRichard Henderson pc, pc_end); 22374bfa602bSRichard Henderson 22384bfa602bSRichard Henderson /* Restart with the EXCLUSIVE bit set, within a TB run via 22394bfa602bSRichard Henderson cpu_exec_step_atomic holding the exclusive lock. */ 2240*ab419fd8SRichard Henderson ctx->envflags |= TB_FLAG_GUSA_EXCLUSIVE; 22414bfa602bSRichard Henderson gen_save_cpu_state(ctx, false); 22424bfa602bSRichard Henderson gen_helper_exclusive(cpu_env); 22436f1c2af6SRichard Henderson ctx->base.is_jmp = DISAS_NORETURN; 22444bfa602bSRichard Henderson 22454bfa602bSRichard Henderson /* We're not executing an instruction, but we must report one for the 22464bfa602bSRichard Henderson purposes of accounting within the TB. We might as well report the 22476f1c2af6SRichard Henderson entire region consumed via ctx->base.pc_next so that it's immediately 22486f1c2af6SRichard Henderson available in the disassembly dump. */ 22496f1c2af6SRichard Henderson ctx->base.pc_next = pc_end; 2250be0e3d7aSRichard Henderson ctx->base.num_insns += max_insns - 1; 22514bfa602bSRichard Henderson } 22524bfa602bSRichard Henderson #endif 22534bfa602bSRichard Henderson 2254fd1b3d38SEmilio G. Cota static void sh4_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs) 2255fcf5ef2aSThomas Huth { 2256fd1b3d38SEmilio G. Cota DisasContext *ctx = container_of(dcbase, DisasContext, base); 22579c489ea6SLluís Vilanova CPUSH4State *env = cs->env_ptr; 2258be0e3d7aSRichard Henderson uint32_t tbflags; 2259fd1b3d38SEmilio G. Cota int bound; 2260fcf5ef2aSThomas Huth 2261be0e3d7aSRichard Henderson ctx->tbflags = tbflags = ctx->base.tb->flags; 2262be0e3d7aSRichard Henderson ctx->envflags = tbflags & TB_FLAG_ENVFLAGS_MASK; 2263be0e3d7aSRichard Henderson ctx->memidx = (tbflags & (1u << SR_MD)) == 0 ? 1 : 0; 2264fcf5ef2aSThomas Huth /* We don't know if the delayed pc came from a dynamic or static branch, 2265fcf5ef2aSThomas Huth so assume it is a dynamic branch. */ 2266fd1b3d38SEmilio G. Cota ctx->delayed_pc = -1; /* use delayed pc from env pointer */ 2267fd1b3d38SEmilio G. Cota ctx->features = env->features; 2268be0e3d7aSRichard Henderson ctx->has_movcal = (tbflags & TB_FLAG_PENDING_MOVCA); 2269be0e3d7aSRichard Henderson ctx->gbank = ((tbflags & (1 << SR_MD)) && 2270be0e3d7aSRichard Henderson (tbflags & (1 << SR_RB))) * 0x10; 2271be0e3d7aSRichard Henderson ctx->fbank = tbflags & FPSCR_FR ? 0x10 : 0; 2272be0e3d7aSRichard Henderson 2273*ab419fd8SRichard Henderson #ifdef CONFIG_USER_ONLY 2274*ab419fd8SRichard Henderson if (tbflags & TB_FLAG_GUSA_MASK) { 2275*ab419fd8SRichard Henderson /* In gUSA exclusive region. */ 2276be0e3d7aSRichard Henderson uint32_t pc = ctx->base.pc_next; 2277be0e3d7aSRichard Henderson uint32_t pc_end = ctx->base.tb->cs_base; 2278*ab419fd8SRichard Henderson int backup = sextract32(ctx->tbflags, TB_FLAG_GUSA_SHIFT, 8); 2279be0e3d7aSRichard Henderson int max_insns = (pc_end - pc) / 2; 2280be0e3d7aSRichard Henderson 2281be0e3d7aSRichard Henderson if (pc != pc_end + backup || max_insns < 2) { 2282be0e3d7aSRichard Henderson /* This is a malformed gUSA region. Don't do anything special, 2283be0e3d7aSRichard Henderson since the interpreter is likely to get confused. */ 2284*ab419fd8SRichard Henderson ctx->envflags &= ~TB_FLAG_GUSA_MASK; 2285*ab419fd8SRichard Henderson } else if (tbflags & TB_FLAG_GUSA_EXCLUSIVE) { 2286be0e3d7aSRichard Henderson /* Regardless of single-stepping or the end of the page, 2287be0e3d7aSRichard Henderson we must complete execution of the gUSA region while 2288be0e3d7aSRichard Henderson holding the exclusive lock. */ 2289be0e3d7aSRichard Henderson ctx->base.max_insns = max_insns; 2290be0e3d7aSRichard Henderson return; 2291be0e3d7aSRichard Henderson } 2292be0e3d7aSRichard Henderson } 2293*ab419fd8SRichard Henderson #endif 22944448a836SRichard Henderson 22954448a836SRichard Henderson /* Since the ISA is fixed-width, we can bound by the number 22964448a836SRichard Henderson of instructions remaining on the page. */ 2297fd1b3d38SEmilio G. Cota bound = -(ctx->base.pc_next | TARGET_PAGE_MASK) / 2; 2298fd1b3d38SEmilio G. Cota ctx->base.max_insns = MIN(ctx->base.max_insns, bound); 2299fcf5ef2aSThomas Huth } 2300fcf5ef2aSThomas Huth 2301fd1b3d38SEmilio G. Cota static void sh4_tr_tb_start(DisasContextBase *dcbase, CPUState *cs) 2302fd1b3d38SEmilio G. Cota { 2303fd1b3d38SEmilio G. Cota } 23044bfa602bSRichard Henderson 2305fd1b3d38SEmilio G. Cota static void sh4_tr_insn_start(DisasContextBase *dcbase, CPUState *cs) 2306fd1b3d38SEmilio G. Cota { 2307fd1b3d38SEmilio G. Cota DisasContext *ctx = container_of(dcbase, DisasContext, base); 2308fcf5ef2aSThomas Huth 2309fd1b3d38SEmilio G. Cota tcg_gen_insn_start(ctx->base.pc_next, ctx->envflags); 2310fd1b3d38SEmilio G. Cota } 2311fd1b3d38SEmilio G. Cota 2312fd1b3d38SEmilio G. Cota static void sh4_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs) 2313fd1b3d38SEmilio G. Cota { 2314fd1b3d38SEmilio G. Cota CPUSH4State *env = cs->env_ptr; 2315fd1b3d38SEmilio G. Cota DisasContext *ctx = container_of(dcbase, DisasContext, base); 2316fd1b3d38SEmilio G. Cota 2317be0e3d7aSRichard Henderson #ifdef CONFIG_USER_ONLY 2318*ab419fd8SRichard Henderson if (unlikely(ctx->envflags & TB_FLAG_GUSA_MASK) 2319*ab419fd8SRichard Henderson && !(ctx->envflags & TB_FLAG_GUSA_EXCLUSIVE)) { 2320be0e3d7aSRichard Henderson /* We're in an gUSA region, and we have not already fallen 2321be0e3d7aSRichard Henderson back on using an exclusive region. Attempt to parse the 2322be0e3d7aSRichard Henderson region into a single supported atomic operation. Failure 2323be0e3d7aSRichard Henderson is handled within the parser by raising an exception to 2324be0e3d7aSRichard Henderson retry using an exclusive region. */ 2325be0e3d7aSRichard Henderson decode_gusa(ctx, env); 2326be0e3d7aSRichard Henderson return; 2327be0e3d7aSRichard Henderson } 2328be0e3d7aSRichard Henderson #endif 2329be0e3d7aSRichard Henderson 23304e116893SIlya Leoshkevich ctx->opcode = translator_lduw(env, &ctx->base, ctx->base.pc_next); 2331fd1b3d38SEmilio G. Cota decode_opc(ctx); 2332fd1b3d38SEmilio G. Cota ctx->base.pc_next += 2; 2333fcf5ef2aSThomas Huth } 2334fcf5ef2aSThomas Huth 2335fd1b3d38SEmilio G. Cota static void sh4_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs) 2336fd1b3d38SEmilio G. Cota { 2337fd1b3d38SEmilio G. Cota DisasContext *ctx = container_of(dcbase, DisasContext, base); 23384bfa602bSRichard Henderson 2339*ab419fd8SRichard Henderson if (ctx->tbflags & TB_FLAG_GUSA_EXCLUSIVE) { 23404bfa602bSRichard Henderson /* Ending the region of exclusivity. Clear the bits. */ 2341*ab419fd8SRichard Henderson ctx->envflags &= ~TB_FLAG_GUSA_MASK; 23424bfa602bSRichard Henderson } 23434bfa602bSRichard Henderson 2344fd1b3d38SEmilio G. Cota switch (ctx->base.is_jmp) { 23454834871bSRichard Henderson case DISAS_STOP: 2346fd1b3d38SEmilio G. Cota gen_save_cpu_state(ctx, true); 234707ea28b4SRichard Henderson tcg_gen_exit_tb(NULL, 0); 23480fc37a8bSAurelien Jarno break; 23494834871bSRichard Henderson case DISAS_NEXT: 2350fd1b3d38SEmilio G. Cota case DISAS_TOO_MANY: 2351fd1b3d38SEmilio G. Cota gen_save_cpu_state(ctx, false); 2352fd1b3d38SEmilio G. Cota gen_goto_tb(ctx, 0, ctx->base.pc_next); 2353fcf5ef2aSThomas Huth break; 23544834871bSRichard Henderson case DISAS_NORETURN: 2355fcf5ef2aSThomas Huth break; 23564834871bSRichard Henderson default: 23574834871bSRichard Henderson g_assert_not_reached(); 2358fcf5ef2aSThomas Huth } 2359fcf5ef2aSThomas Huth } 2360fd1b3d38SEmilio G. Cota 23618eb806a7SRichard Henderson static void sh4_tr_disas_log(const DisasContextBase *dcbase, 23628eb806a7SRichard Henderson CPUState *cs, FILE *logfile) 2363fd1b3d38SEmilio G. Cota { 23648eb806a7SRichard Henderson fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first)); 23658eb806a7SRichard Henderson target_disas(logfile, cs, dcbase->pc_first, dcbase->tb->size); 2366fd1b3d38SEmilio G. Cota } 2367fd1b3d38SEmilio G. Cota 2368fd1b3d38SEmilio G. Cota static const TranslatorOps sh4_tr_ops = { 2369fd1b3d38SEmilio G. Cota .init_disas_context = sh4_tr_init_disas_context, 2370fd1b3d38SEmilio G. Cota .tb_start = sh4_tr_tb_start, 2371fd1b3d38SEmilio G. Cota .insn_start = sh4_tr_insn_start, 2372fd1b3d38SEmilio G. Cota .translate_insn = sh4_tr_translate_insn, 2373fd1b3d38SEmilio G. Cota .tb_stop = sh4_tr_tb_stop, 2374fd1b3d38SEmilio G. Cota .disas_log = sh4_tr_disas_log, 2375fd1b3d38SEmilio G. Cota }; 2376fd1b3d38SEmilio G. Cota 2377306c8721SRichard Henderson void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int max_insns, 2378306c8721SRichard Henderson target_ulong pc, void *host_pc) 2379fd1b3d38SEmilio G. Cota { 2380fd1b3d38SEmilio G. Cota DisasContext ctx; 2381fd1b3d38SEmilio G. Cota 2382306c8721SRichard Henderson translator_loop(cs, tb, max_insns, pc, host_pc, &sh4_tr_ops, &ctx.base); 2383fcf5ef2aSThomas Huth } 2384fcf5ef2aSThomas Huth 2385fcf5ef2aSThomas Huth void restore_state_to_opc(CPUSH4State *env, TranslationBlock *tb, 2386fcf5ef2aSThomas Huth target_ulong *data) 2387fcf5ef2aSThomas Huth { 2388fcf5ef2aSThomas Huth env->pc = data[0]; 2389fcf5ef2aSThomas Huth env->flags = data[1]; 2390ac9707eaSAurelien Jarno /* Theoretically delayed_pc should also be restored. In practice the 2391ac9707eaSAurelien Jarno branch instruction is re-executed after exception, so the delayed 2392ac9707eaSAurelien Jarno branch target will be recomputed. */ 2393fcf5ef2aSThomas Huth } 2394