xref: /openbmc/qemu/target/m68k/translate.c (revision a4f9d9a4)
1 /*
2  *  m68k translation
3  *
4  *  Copyright (c) 2005-2007 CodeSourcery
5  *  Written by Paul Brook
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "exec/exec-all.h"
24 #include "tcg/tcg-op.h"
25 #include "qemu/log.h"
26 #include "qemu/qemu-print.h"
27 #include "exec/translator.h"
28 #include "exec/helper-proto.h"
29 #include "exec/helper-gen.h"
30 #include "exec/log.h"
31 #include "fpu/softfloat.h"
32 #include "semihosting/semihost.h"
33 
34 #define HELPER_H "helper.h"
35 #include "exec/helper-info.c.inc"
36 #undef  HELPER_H
37 
38 //#define DEBUG_DISPATCH 1
39 
40 #define DEFO32(name, offset) static TCGv QREG_##name;
41 #define DEFO64(name, offset) static TCGv_i64 QREG_##name;
42 #include "qregs.h.inc"
43 #undef DEFO32
44 #undef DEFO64
45 
46 static TCGv_i32 cpu_halted;
47 static TCGv_i32 cpu_exception_index;
48 
49 static char cpu_reg_names[2 * 8 * 3 + 5 * 4];
50 static TCGv cpu_dregs[8];
51 static TCGv cpu_aregs[8];
52 static TCGv_i64 cpu_macc[4];
53 
54 #define REG(insn, pos)  (((insn) >> (pos)) & 7)
55 #define DREG(insn, pos) cpu_dregs[REG(insn, pos)]
56 #define AREG(insn, pos) get_areg(s, REG(insn, pos))
57 #define MACREG(acc)     cpu_macc[acc]
58 #define QREG_SP         get_areg(s, 7)
59 
60 static TCGv NULL_QREG;
61 #define IS_NULL_QREG(t) (t == NULL_QREG)
62 /* Used to distinguish stores from bad addressing modes.  */
63 static TCGv store_dummy;
64 
m68k_tcg_init(void)65 void m68k_tcg_init(void)
66 {
67     char *p;
68     int i;
69 
70 #define DEFO32(name, offset) \
71     QREG_##name = tcg_global_mem_new_i32(tcg_env, \
72         offsetof(CPUM68KState, offset), #name);
73 #define DEFO64(name, offset) \
74     QREG_##name = tcg_global_mem_new_i64(tcg_env, \
75         offsetof(CPUM68KState, offset), #name);
76 #include "qregs.h.inc"
77 #undef DEFO32
78 #undef DEFO64
79 
80     cpu_halted = tcg_global_mem_new_i32(tcg_env,
81                                         -offsetof(M68kCPU, env) +
82                                         offsetof(CPUState, halted), "HALTED");
83     cpu_exception_index = tcg_global_mem_new_i32(tcg_env,
84                                                  -offsetof(M68kCPU, env) +
85                                                  offsetof(CPUState, exception_index),
86                                                  "EXCEPTION");
87 
88     p = cpu_reg_names;
89     for (i = 0; i < 8; i++) {
90         sprintf(p, "D%d", i);
91         cpu_dregs[i] = tcg_global_mem_new(tcg_env,
92                                           offsetof(CPUM68KState, dregs[i]), p);
93         p += 3;
94         sprintf(p, "A%d", i);
95         cpu_aregs[i] = tcg_global_mem_new(tcg_env,
96                                           offsetof(CPUM68KState, aregs[i]), p);
97         p += 3;
98     }
99     for (i = 0; i < 4; i++) {
100         sprintf(p, "ACC%d", i);
101         cpu_macc[i] = tcg_global_mem_new_i64(tcg_env,
102                                          offsetof(CPUM68KState, macc[i]), p);
103         p += 5;
104     }
105 
106     NULL_QREG = tcg_global_mem_new(tcg_env, -4, "NULL");
107     store_dummy = tcg_global_mem_new(tcg_env, -8, "NULL");
108 }
109 
110 /* internal defines */
111 typedef struct DisasContext {
112     DisasContextBase base;
113     CPUM68KState *env;
114     target_ulong pc;
115     target_ulong pc_prev;
116     CCOp cc_op; /* Current CC operation */
117     int cc_op_synced;
118     TCGv_i64 mactmp;
119     int done_mac;
120     int writeback_mask;
121     TCGv writeback[8];
122     bool ss_active;
123 } DisasContext;
124 
get_areg(DisasContext * s,unsigned regno)125 static TCGv get_areg(DisasContext *s, unsigned regno)
126 {
127     if (s->writeback_mask & (1 << regno)) {
128         return s->writeback[regno];
129     } else {
130         return cpu_aregs[regno];
131     }
132 }
133 
delay_set_areg(DisasContext * s,unsigned regno,TCGv val,bool give_temp)134 static void delay_set_areg(DisasContext *s, unsigned regno,
135                            TCGv val, bool give_temp)
136 {
137     if (s->writeback_mask & (1 << regno)) {
138         if (give_temp) {
139             s->writeback[regno] = val;
140         } else {
141             tcg_gen_mov_i32(s->writeback[regno], val);
142         }
143     } else {
144         s->writeback_mask |= 1 << regno;
145         if (give_temp) {
146             s->writeback[regno] = val;
147         } else {
148             TCGv tmp = tcg_temp_new();
149             s->writeback[regno] = tmp;
150             tcg_gen_mov_i32(tmp, val);
151         }
152     }
153 }
154 
do_writebacks(DisasContext * s)155 static void do_writebacks(DisasContext *s)
156 {
157     unsigned mask = s->writeback_mask;
158     if (mask) {
159         s->writeback_mask = 0;
160         do {
161             unsigned regno = ctz32(mask);
162             tcg_gen_mov_i32(cpu_aregs[regno], s->writeback[regno]);
163             mask &= mask - 1;
164         } while (mask);
165     }
166 }
167 
168 /* is_jmp field values */
169 #define DISAS_JUMP      DISAS_TARGET_0 /* only pc was modified dynamically */
170 #define DISAS_EXIT      DISAS_TARGET_1 /* cpu state was modified dynamically */
171 
172 #if defined(CONFIG_USER_ONLY)
173 #define IS_USER(s) 1
174 #else
175 #define IS_USER(s)   (!(s->base.tb->flags & TB_FLAGS_MSR_S))
176 #define SFC_INDEX(s) ((s->base.tb->flags & TB_FLAGS_SFC_S) ? \
177                       MMU_KERNEL_IDX : MMU_USER_IDX)
178 #define DFC_INDEX(s) ((s->base.tb->flags & TB_FLAGS_DFC_S) ? \
179                       MMU_KERNEL_IDX : MMU_USER_IDX)
180 #endif
181 
182 typedef void (*disas_proc)(CPUM68KState *env, DisasContext *s, uint16_t insn);
183 
184 #ifdef DEBUG_DISPATCH
185 #define DISAS_INSN(name)                                                \
186     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
187                                   uint16_t insn);                       \
188     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
189                              uint16_t insn)                             \
190     {                                                                   \
191         qemu_log("Dispatch " #name "\n");                               \
192         real_disas_##name(env, s, insn);                                \
193     }                                                                   \
194     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
195                                   uint16_t insn)
196 #else
197 #define DISAS_INSN(name)                                                \
198     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
199                              uint16_t insn)
200 #endif
201 
202 static const uint8_t cc_op_live[CC_OP_NB] = {
203     [CC_OP_DYNAMIC] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
204     [CC_OP_FLAGS] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
205     [CC_OP_ADDB ... CC_OP_ADDL] = CCF_X | CCF_N | CCF_V,
206     [CC_OP_SUBB ... CC_OP_SUBL] = CCF_X | CCF_N | CCF_V,
207     [CC_OP_CMPB ... CC_OP_CMPL] = CCF_X | CCF_N | CCF_V,
208     [CC_OP_LOGIC] = CCF_X | CCF_N
209 };
210 
set_cc_op(DisasContext * s,CCOp op)211 static void set_cc_op(DisasContext *s, CCOp op)
212 {
213     CCOp old_op = s->cc_op;
214     int dead;
215 
216     if (old_op == op) {
217         return;
218     }
219     s->cc_op = op;
220     s->cc_op_synced = 0;
221 
222     /*
223      * Discard CC computation that will no longer be used.
224      * Note that X and N are never dead.
225      */
226     dead = cc_op_live[old_op] & ~cc_op_live[op];
227     if (dead & CCF_C) {
228         tcg_gen_discard_i32(QREG_CC_C);
229     }
230     if (dead & CCF_Z) {
231         tcg_gen_discard_i32(QREG_CC_Z);
232     }
233     if (dead & CCF_V) {
234         tcg_gen_discard_i32(QREG_CC_V);
235     }
236 }
237 
238 /* Update the CPU env CC_OP state.  */
update_cc_op(DisasContext * s)239 static void update_cc_op(DisasContext *s)
240 {
241     if (!s->cc_op_synced) {
242         s->cc_op_synced = 1;
243         tcg_gen_movi_i32(QREG_CC_OP, s->cc_op);
244     }
245 }
246 
247 /* Generate a jump to an immediate address.  */
gen_jmp_im(DisasContext * s,uint32_t dest)248 static void gen_jmp_im(DisasContext *s, uint32_t dest)
249 {
250     update_cc_op(s);
251     tcg_gen_movi_i32(QREG_PC, dest);
252     s->base.is_jmp = DISAS_JUMP;
253 }
254 
255 /* Generate a jump to the address in qreg DEST.  */
gen_jmp(DisasContext * s,TCGv dest)256 static void gen_jmp(DisasContext *s, TCGv dest)
257 {
258     update_cc_op(s);
259     tcg_gen_mov_i32(QREG_PC, dest);
260     s->base.is_jmp = DISAS_JUMP;
261 }
262 
gen_raise_exception(int nr)263 static void gen_raise_exception(int nr)
264 {
265     gen_helper_raise_exception(tcg_env, tcg_constant_i32(nr));
266 }
267 
gen_raise_exception_format2(DisasContext * s,int nr,target_ulong this_pc)268 static void gen_raise_exception_format2(DisasContext *s, int nr,
269                                         target_ulong this_pc)
270 {
271     /*
272      * Pass the address of the insn to the exception handler,
273      * for recording in the Format $2 (6-word) stack frame.
274      * Re-use mmu.ar for the purpose, since that's only valid
275      * after tlb_fill.
276      */
277     tcg_gen_st_i32(tcg_constant_i32(this_pc), tcg_env,
278                    offsetof(CPUM68KState, mmu.ar));
279     gen_raise_exception(nr);
280     s->base.is_jmp = DISAS_NORETURN;
281 }
282 
gen_exception(DisasContext * s,uint32_t dest,int nr)283 static void gen_exception(DisasContext *s, uint32_t dest, int nr)
284 {
285     update_cc_op(s);
286     tcg_gen_movi_i32(QREG_PC, dest);
287 
288     gen_raise_exception(nr);
289 
290     s->base.is_jmp = DISAS_NORETURN;
291 }
292 
gen_addr_fault(DisasContext * s)293 static inline void gen_addr_fault(DisasContext *s)
294 {
295     gen_exception(s, s->base.pc_next, EXCP_ADDRESS);
296 }
297 
298 /*
299  * Generate a load from the specified address.  Narrow values are
300  *  sign extended to full register width.
301  */
gen_load(DisasContext * s,int opsize,TCGv addr,int sign,int index)302 static inline TCGv gen_load(DisasContext *s, int opsize, TCGv addr,
303                             int sign, int index)
304 {
305     TCGv tmp = tcg_temp_new_i32();
306 
307     switch (opsize) {
308     case OS_BYTE:
309     case OS_WORD:
310     case OS_LONG:
311         tcg_gen_qemu_ld_tl(tmp, addr, index,
312                            opsize | (sign ? MO_SIGN : 0) | MO_TE);
313         break;
314     default:
315         g_assert_not_reached();
316     }
317     return tmp;
318 }
319 
320 /* Generate a store.  */
gen_store(DisasContext * s,int opsize,TCGv addr,TCGv val,int index)321 static inline void gen_store(DisasContext *s, int opsize, TCGv addr, TCGv val,
322                              int index)
323 {
324     switch (opsize) {
325     case OS_BYTE:
326     case OS_WORD:
327     case OS_LONG:
328         tcg_gen_qemu_st_tl(val, addr, index, opsize | MO_TE);
329         break;
330     default:
331         g_assert_not_reached();
332     }
333 }
334 
335 typedef enum {
336     EA_STORE,
337     EA_LOADU,
338     EA_LOADS
339 } ea_what;
340 
341 /*
342  * Generate an unsigned load if VAL is 0 a signed load if val is -1,
343  * otherwise generate a store.
344  */
gen_ldst(DisasContext * s,int opsize,TCGv addr,TCGv val,ea_what what,int index)345 static TCGv gen_ldst(DisasContext *s, int opsize, TCGv addr, TCGv val,
346                      ea_what what, int index)
347 {
348     if (what == EA_STORE) {
349         gen_store(s, opsize, addr, val, index);
350         return store_dummy;
351     } else {
352         return gen_load(s, opsize, addr, what == EA_LOADS, index);
353     }
354 }
355 
356 /* Read a 16-bit immediate constant */
read_im16(CPUM68KState * env,DisasContext * s)357 static inline uint16_t read_im16(CPUM68KState *env, DisasContext *s)
358 {
359     uint16_t im;
360     im = translator_lduw(env, &s->base, s->pc);
361     s->pc += 2;
362     return im;
363 }
364 
365 /* Read an 8-bit immediate constant */
read_im8(CPUM68KState * env,DisasContext * s)366 static inline uint8_t read_im8(CPUM68KState *env, DisasContext *s)
367 {
368     return read_im16(env, s);
369 }
370 
371 /* Read a 32-bit immediate constant.  */
read_im32(CPUM68KState * env,DisasContext * s)372 static inline uint32_t read_im32(CPUM68KState *env, DisasContext *s)
373 {
374     uint32_t im;
375     im = read_im16(env, s) << 16;
376     im |= 0xffff & read_im16(env, s);
377     return im;
378 }
379 
380 /* Read a 64-bit immediate constant.  */
read_im64(CPUM68KState * env,DisasContext * s)381 static inline uint64_t read_im64(CPUM68KState *env, DisasContext *s)
382 {
383     uint64_t im;
384     im = (uint64_t)read_im32(env, s) << 32;
385     im |= (uint64_t)read_im32(env, s);
386     return im;
387 }
388 
389 /* Calculate and address index.  */
gen_addr_index(DisasContext * s,uint16_t ext,TCGv tmp)390 static TCGv gen_addr_index(DisasContext *s, uint16_t ext, TCGv tmp)
391 {
392     TCGv add;
393     int scale;
394 
395     add = (ext & 0x8000) ? AREG(ext, 12) : DREG(ext, 12);
396     if ((ext & 0x800) == 0) {
397         tcg_gen_ext16s_i32(tmp, add);
398         add = tmp;
399     }
400     scale = (ext >> 9) & 3;
401     if (scale != 0) {
402         tcg_gen_shli_i32(tmp, add, scale);
403         add = tmp;
404     }
405     return add;
406 }
407 
408 /*
409  * Handle a base + index + displacement effective address.
410  * A NULL_QREG base means pc-relative.
411  */
gen_lea_indexed(CPUM68KState * env,DisasContext * s,TCGv base)412 static TCGv gen_lea_indexed(CPUM68KState *env, DisasContext *s, TCGv base)
413 {
414     uint32_t offset;
415     uint16_t ext;
416     TCGv add;
417     TCGv tmp;
418     uint32_t bd, od;
419 
420     offset = s->pc;
421     ext = read_im16(env, s);
422 
423     if ((ext & 0x800) == 0 && !m68k_feature(s->env, M68K_FEATURE_WORD_INDEX))
424         return NULL_QREG;
425 
426     if (m68k_feature(s->env, M68K_FEATURE_M68K) &&
427         !m68k_feature(s->env, M68K_FEATURE_SCALED_INDEX)) {
428         ext &= ~(3 << 9);
429     }
430 
431     if (ext & 0x100) {
432         /* full extension word format */
433         if (!m68k_feature(s->env, M68K_FEATURE_EXT_FULL))
434             return NULL_QREG;
435 
436         if ((ext & 0x30) > 0x10) {
437             /* base displacement */
438             if ((ext & 0x30) == 0x20) {
439                 bd = (int16_t)read_im16(env, s);
440             } else {
441                 bd = read_im32(env, s);
442             }
443         } else {
444             bd = 0;
445         }
446         tmp = tcg_temp_new();
447         if ((ext & 0x44) == 0) {
448             /* pre-index */
449             add = gen_addr_index(s, ext, tmp);
450         } else {
451             add = NULL_QREG;
452         }
453         if ((ext & 0x80) == 0) {
454             /* base not suppressed */
455             if (IS_NULL_QREG(base)) {
456                 base = tcg_constant_i32(offset + bd);
457                 bd = 0;
458             }
459             if (!IS_NULL_QREG(add)) {
460                 tcg_gen_add_i32(tmp, add, base);
461                 add = tmp;
462             } else {
463                 add = base;
464             }
465         }
466         if (!IS_NULL_QREG(add)) {
467             if (bd != 0) {
468                 tcg_gen_addi_i32(tmp, add, bd);
469                 add = tmp;
470             }
471         } else {
472             add = tcg_constant_i32(bd);
473         }
474         if ((ext & 3) != 0) {
475             /* memory indirect */
476             base = gen_load(s, OS_LONG, add, 0, IS_USER(s));
477             if ((ext & 0x44) == 4) {
478                 add = gen_addr_index(s, ext, tmp);
479                 tcg_gen_add_i32(tmp, add, base);
480                 add = tmp;
481             } else {
482                 add = base;
483             }
484             if ((ext & 3) > 1) {
485                 /* outer displacement */
486                 if ((ext & 3) == 2) {
487                     od = (int16_t)read_im16(env, s);
488                 } else {
489                     od = read_im32(env, s);
490                 }
491             } else {
492                 od = 0;
493             }
494             if (od != 0) {
495                 tcg_gen_addi_i32(tmp, add, od);
496                 add = tmp;
497             }
498         }
499     } else {
500         /* brief extension word format */
501         tmp = tcg_temp_new();
502         add = gen_addr_index(s, ext, tmp);
503         if (!IS_NULL_QREG(base)) {
504             tcg_gen_add_i32(tmp, add, base);
505             if ((int8_t)ext)
506                 tcg_gen_addi_i32(tmp, tmp, (int8_t)ext);
507         } else {
508             tcg_gen_addi_i32(tmp, add, offset + (int8_t)ext);
509         }
510         add = tmp;
511     }
512     return add;
513 }
514 
515 /* Sign or zero extend a value.  */
516 
gen_ext(TCGv res,TCGv val,int opsize,int sign)517 static inline void gen_ext(TCGv res, TCGv val, int opsize, int sign)
518 {
519     switch (opsize) {
520     case OS_BYTE:
521     case OS_WORD:
522     case OS_LONG:
523         tcg_gen_ext_i32(res, val, opsize | (sign ? MO_SIGN : 0));
524         break;
525     default:
526         g_assert_not_reached();
527     }
528 }
529 
530 /* Evaluate all the CC flags.  */
531 
gen_flush_flags(DisasContext * s)532 static void gen_flush_flags(DisasContext *s)
533 {
534     TCGv t0, t1;
535 
536     switch (s->cc_op) {
537     case CC_OP_FLAGS:
538         return;
539 
540     case CC_OP_ADDB:
541     case CC_OP_ADDW:
542     case CC_OP_ADDL:
543         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
544         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
545         /* Compute signed overflow for addition.  */
546         t0 = tcg_temp_new();
547         t1 = tcg_temp_new();
548         tcg_gen_sub_i32(t0, QREG_CC_N, QREG_CC_V);
549         gen_ext(t0, t0, s->cc_op - CC_OP_ADDB, 1);
550         tcg_gen_xor_i32(t1, QREG_CC_N, QREG_CC_V);
551         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
552         tcg_gen_andc_i32(QREG_CC_V, t1, QREG_CC_V);
553         break;
554 
555     case CC_OP_SUBB:
556     case CC_OP_SUBW:
557     case CC_OP_SUBL:
558         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
559         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
560         /* Compute signed overflow for subtraction.  */
561         t0 = tcg_temp_new();
562         t1 = tcg_temp_new();
563         tcg_gen_add_i32(t0, QREG_CC_N, QREG_CC_V);
564         gen_ext(t0, t0, s->cc_op - CC_OP_SUBB, 1);
565         tcg_gen_xor_i32(t1, QREG_CC_N, t0);
566         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
567         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t1);
568         break;
569 
570     case CC_OP_CMPB:
571     case CC_OP_CMPW:
572     case CC_OP_CMPL:
573         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_C, QREG_CC_N, QREG_CC_V);
574         tcg_gen_sub_i32(QREG_CC_Z, QREG_CC_N, QREG_CC_V);
575         gen_ext(QREG_CC_Z, QREG_CC_Z, s->cc_op - CC_OP_CMPB, 1);
576         /* Compute signed overflow for subtraction.  */
577         t0 = tcg_temp_new();
578         tcg_gen_xor_i32(t0, QREG_CC_Z, QREG_CC_N);
579         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, QREG_CC_N);
580         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t0);
581         tcg_gen_mov_i32(QREG_CC_N, QREG_CC_Z);
582         break;
583 
584     case CC_OP_LOGIC:
585         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
586         tcg_gen_movi_i32(QREG_CC_C, 0);
587         tcg_gen_movi_i32(QREG_CC_V, 0);
588         break;
589 
590     case CC_OP_DYNAMIC:
591         gen_helper_flush_flags(tcg_env, QREG_CC_OP);
592         s->cc_op_synced = 1;
593         break;
594 
595     default:
596         gen_helper_flush_flags(tcg_env, tcg_constant_i32(s->cc_op));
597         s->cc_op_synced = 1;
598         break;
599     }
600 
601     /* Note that flush_flags also assigned to env->cc_op.  */
602     s->cc_op = CC_OP_FLAGS;
603 }
604 
gen_extend(DisasContext * s,TCGv val,int opsize,int sign)605 static inline TCGv gen_extend(DisasContext *s, TCGv val, int opsize, int sign)
606 {
607     TCGv tmp;
608 
609     if (opsize == OS_LONG) {
610         tmp = val;
611     } else {
612         tmp = tcg_temp_new();
613         gen_ext(tmp, val, opsize, sign);
614     }
615 
616     return tmp;
617 }
618 
gen_logic_cc(DisasContext * s,TCGv val,int opsize)619 static void gen_logic_cc(DisasContext *s, TCGv val, int opsize)
620 {
621     gen_ext(QREG_CC_N, val, opsize, 1);
622     set_cc_op(s, CC_OP_LOGIC);
623 }
624 
gen_update_cc_cmp(DisasContext * s,TCGv dest,TCGv src,int opsize)625 static void gen_update_cc_cmp(DisasContext *s, TCGv dest, TCGv src, int opsize)
626 {
627     tcg_gen_mov_i32(QREG_CC_N, dest);
628     tcg_gen_mov_i32(QREG_CC_V, src);
629     set_cc_op(s, CC_OP_CMPB + opsize);
630 }
631 
gen_update_cc_add(TCGv dest,TCGv src,int opsize)632 static void gen_update_cc_add(TCGv dest, TCGv src, int opsize)
633 {
634     gen_ext(QREG_CC_N, dest, opsize, 1);
635     tcg_gen_mov_i32(QREG_CC_V, src);
636 }
637 
opsize_bytes(int opsize)638 static inline int opsize_bytes(int opsize)
639 {
640     switch (opsize) {
641     case OS_BYTE: return 1;
642     case OS_WORD: return 2;
643     case OS_LONG: return 4;
644     case OS_SINGLE: return 4;
645     case OS_DOUBLE: return 8;
646     case OS_EXTENDED: return 12;
647     case OS_PACKED: return 12;
648     default:
649         g_assert_not_reached();
650     }
651 }
652 
insn_opsize(int insn)653 static inline int insn_opsize(int insn)
654 {
655     switch ((insn >> 6) & 3) {
656     case 0: return OS_BYTE;
657     case 1: return OS_WORD;
658     case 2: return OS_LONG;
659     default:
660         g_assert_not_reached();
661     }
662 }
663 
ext_opsize(int ext,int pos)664 static inline int ext_opsize(int ext, int pos)
665 {
666     switch ((ext >> pos) & 7) {
667     case 0: return OS_LONG;
668     case 1: return OS_SINGLE;
669     case 2: return OS_EXTENDED;
670     case 3: return OS_PACKED;
671     case 4: return OS_WORD;
672     case 5: return OS_DOUBLE;
673     case 6: return OS_BYTE;
674     default:
675         g_assert_not_reached();
676     }
677 }
678 
679 /*
680  * Assign value to a register.  If the width is less than the register width
681  * only the low part of the register is set.
682  */
gen_partset_reg(int opsize,TCGv reg,TCGv val)683 static void gen_partset_reg(int opsize, TCGv reg, TCGv val)
684 {
685     switch (opsize) {
686     case OS_BYTE:
687         tcg_gen_deposit_i32(reg, reg, val, 0, 8);
688         break;
689     case OS_WORD:
690         tcg_gen_deposit_i32(reg, reg, val, 0, 16);
691         break;
692     case OS_LONG:
693     case OS_SINGLE:
694         tcg_gen_mov_i32(reg, val);
695         break;
696     default:
697         g_assert_not_reached();
698     }
699 }
700 
701 /*
702  * Generate code for an "effective address".  Does not adjust the base
703  * register for autoincrement addressing modes.
704  */
gen_lea_mode(CPUM68KState * env,DisasContext * s,int mode,int reg0,int opsize)705 static TCGv gen_lea_mode(CPUM68KState *env, DisasContext *s,
706                          int mode, int reg0, int opsize)
707 {
708     TCGv reg;
709     TCGv tmp;
710     uint16_t ext;
711     uint32_t offset;
712 
713     switch (mode) {
714     case 0: /* Data register direct.  */
715     case 1: /* Address register direct.  */
716         return NULL_QREG;
717     case 3: /* Indirect postincrement.  */
718         if (opsize == OS_UNSIZED) {
719             return NULL_QREG;
720         }
721         /* fallthru */
722     case 2: /* Indirect register */
723         tmp = tcg_temp_new();
724         tcg_gen_mov_i32(tmp, get_areg(s, reg0));
725         return tmp;
726     case 4: /* Indirect predecrememnt.  */
727         if (opsize == OS_UNSIZED) {
728             return NULL_QREG;
729         }
730         reg = get_areg(s, reg0);
731         tmp = tcg_temp_new();
732         if (reg0 == 7 && opsize == OS_BYTE &&
733             m68k_feature(s->env, M68K_FEATURE_M68K)) {
734             tcg_gen_subi_i32(tmp, reg, 2);
735         } else {
736             tcg_gen_subi_i32(tmp, reg, opsize_bytes(opsize));
737         }
738         return tmp;
739     case 5: /* Indirect displacement.  */
740         reg = get_areg(s, reg0);
741         tmp = tcg_temp_new();
742         ext = read_im16(env, s);
743         tcg_gen_addi_i32(tmp, reg, (int16_t)ext);
744         return tmp;
745     case 6: /* Indirect index + displacement.  */
746         reg = get_areg(s, reg0);
747         return gen_lea_indexed(env, s, reg);
748     case 7: /* Other */
749         switch (reg0) {
750         case 0: /* Absolute short.  */
751             offset = (int16_t)read_im16(env, s);
752             break;
753         case 1: /* Absolute long.  */
754             offset = read_im32(env, s);
755             break;
756         case 2: /* pc displacement  */
757             offset = s->pc;
758             offset += (int16_t)read_im16(env, s);
759             break;
760         case 3: /* pc index+displacement.  */
761             return gen_lea_indexed(env, s, NULL_QREG);
762         case 4: /* Immediate.  */
763         default:
764             return NULL_QREG;
765         }
766         tmp = tcg_temp_new();
767         tcg_gen_movi_i32(tmp, offset);
768         return tmp;
769     }
770     /* Should never happen.  */
771     return NULL_QREG;
772 }
773 
gen_lea(CPUM68KState * env,DisasContext * s,uint16_t insn,int opsize)774 static TCGv gen_lea(CPUM68KState *env, DisasContext *s, uint16_t insn,
775                     int opsize)
776 {
777     int mode = extract32(insn, 3, 3);
778     int reg0 = REG(insn, 0);
779     return gen_lea_mode(env, s, mode, reg0, opsize);
780 }
781 
782 /*
783  * Generate code to load/store a value from/into an EA.  If WHAT > 0 this is
784  * a write otherwise it is a read (0 == sign extend, -1 == zero extend).
785  * ADDRP is non-null for readwrite operands.
786  */
gen_ea_mode(CPUM68KState * env,DisasContext * s,int mode,int reg0,int opsize,TCGv val,TCGv * addrp,ea_what what,int index)787 static TCGv gen_ea_mode(CPUM68KState *env, DisasContext *s, int mode, int reg0,
788                         int opsize, TCGv val, TCGv *addrp, ea_what what,
789                         int index)
790 {
791     TCGv reg, tmp, result;
792     int32_t offset;
793 
794     switch (mode) {
795     case 0: /* Data register direct.  */
796         reg = cpu_dregs[reg0];
797         if (what == EA_STORE) {
798             gen_partset_reg(opsize, reg, val);
799             return store_dummy;
800         } else {
801             return gen_extend(s, reg, opsize, what == EA_LOADS);
802         }
803     case 1: /* Address register direct.  */
804         reg = get_areg(s, reg0);
805         if (what == EA_STORE) {
806             tcg_gen_mov_i32(reg, val);
807             return store_dummy;
808         } else {
809             return gen_extend(s, reg, opsize, what == EA_LOADS);
810         }
811     case 2: /* Indirect register */
812         reg = get_areg(s, reg0);
813         return gen_ldst(s, opsize, reg, val, what, index);
814     case 3: /* Indirect postincrement.  */
815         reg = get_areg(s, reg0);
816         result = gen_ldst(s, opsize, reg, val, what, index);
817         if (what == EA_STORE || !addrp) {
818             tmp = tcg_temp_new();
819             if (reg0 == 7 && opsize == OS_BYTE &&
820                 m68k_feature(s->env, M68K_FEATURE_M68K)) {
821                 tcg_gen_addi_i32(tmp, reg, 2);
822             } else {
823                 tcg_gen_addi_i32(tmp, reg, opsize_bytes(opsize));
824             }
825             delay_set_areg(s, reg0, tmp, true);
826         }
827         return result;
828     case 4: /* Indirect predecrememnt.  */
829         if (addrp && what == EA_STORE) {
830             tmp = *addrp;
831         } else {
832             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
833             if (IS_NULL_QREG(tmp)) {
834                 return tmp;
835             }
836             if (addrp) {
837                 *addrp = tmp;
838             }
839         }
840         result = gen_ldst(s, opsize, tmp, val, what, index);
841         if (what == EA_STORE || !addrp) {
842             delay_set_areg(s, reg0, tmp, false);
843         }
844         return result;
845     case 5: /* Indirect displacement.  */
846     case 6: /* Indirect index + displacement.  */
847     do_indirect:
848         if (addrp && what == EA_STORE) {
849             tmp = *addrp;
850         } else {
851             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
852             if (IS_NULL_QREG(tmp)) {
853                 return tmp;
854             }
855             if (addrp) {
856                 *addrp = tmp;
857             }
858         }
859         return gen_ldst(s, opsize, tmp, val, what, index);
860     case 7: /* Other */
861         switch (reg0) {
862         case 0: /* Absolute short.  */
863         case 1: /* Absolute long.  */
864         case 2: /* pc displacement  */
865         case 3: /* pc index+displacement.  */
866             goto do_indirect;
867         case 4: /* Immediate.  */
868             /* Sign extend values for consistency.  */
869             switch (opsize) {
870             case OS_BYTE:
871                 if (what == EA_LOADS) {
872                     offset = (int8_t)read_im8(env, s);
873                 } else {
874                     offset = read_im8(env, s);
875                 }
876                 break;
877             case OS_WORD:
878                 if (what == EA_LOADS) {
879                     offset = (int16_t)read_im16(env, s);
880                 } else {
881                     offset = read_im16(env, s);
882                 }
883                 break;
884             case OS_LONG:
885                 offset = read_im32(env, s);
886                 break;
887             default:
888                 g_assert_not_reached();
889             }
890             return tcg_constant_i32(offset);
891         default:
892             return NULL_QREG;
893         }
894     }
895     /* Should never happen.  */
896     return NULL_QREG;
897 }
898 
gen_ea(CPUM68KState * env,DisasContext * s,uint16_t insn,int opsize,TCGv val,TCGv * addrp,ea_what what,int index)899 static TCGv gen_ea(CPUM68KState *env, DisasContext *s, uint16_t insn,
900                    int opsize, TCGv val, TCGv *addrp, ea_what what, int index)
901 {
902     int mode = extract32(insn, 3, 3);
903     int reg0 = REG(insn, 0);
904     return gen_ea_mode(env, s, mode, reg0, opsize, val, addrp, what, index);
905 }
906 
gen_fp_ptr(int freg)907 static TCGv_ptr gen_fp_ptr(int freg)
908 {
909     TCGv_ptr fp = tcg_temp_new_ptr();
910     tcg_gen_addi_ptr(fp, tcg_env, offsetof(CPUM68KState, fregs[freg]));
911     return fp;
912 }
913 
gen_fp_result_ptr(void)914 static TCGv_ptr gen_fp_result_ptr(void)
915 {
916     TCGv_ptr fp = tcg_temp_new_ptr();
917     tcg_gen_addi_ptr(fp, tcg_env, offsetof(CPUM68KState, fp_result));
918     return fp;
919 }
920 
gen_fp_move(TCGv_ptr dest,TCGv_ptr src)921 static void gen_fp_move(TCGv_ptr dest, TCGv_ptr src)
922 {
923     TCGv t32;
924     TCGv_i64 t64;
925 
926     t32 = tcg_temp_new();
927     tcg_gen_ld16u_i32(t32, src, offsetof(FPReg, l.upper));
928     tcg_gen_st16_i32(t32, dest, offsetof(FPReg, l.upper));
929 
930     t64 = tcg_temp_new_i64();
931     tcg_gen_ld_i64(t64, src, offsetof(FPReg, l.lower));
932     tcg_gen_st_i64(t64, dest, offsetof(FPReg, l.lower));
933 }
934 
gen_load_fp(DisasContext * s,int opsize,TCGv addr,TCGv_ptr fp,int index)935 static void gen_load_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
936                         int index)
937 {
938     TCGv tmp;
939     TCGv_i64 t64;
940 
941     t64 = tcg_temp_new_i64();
942     tmp = tcg_temp_new();
943     switch (opsize) {
944     case OS_BYTE:
945     case OS_WORD:
946     case OS_LONG:
947         tcg_gen_qemu_ld_tl(tmp, addr, index, opsize | MO_SIGN | MO_TE);
948         gen_helper_exts32(tcg_env, fp, tmp);
949         break;
950     case OS_SINGLE:
951         tcg_gen_qemu_ld_tl(tmp, addr, index, MO_TEUL);
952         gen_helper_extf32(tcg_env, fp, tmp);
953         break;
954     case OS_DOUBLE:
955         tcg_gen_qemu_ld_i64(t64, addr, index, MO_TEUQ);
956         gen_helper_extf64(tcg_env, fp, t64);
957         break;
958     case OS_EXTENDED:
959         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
960             gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
961             break;
962         }
963         tcg_gen_qemu_ld_i32(tmp, addr, index, MO_TEUL);
964         tcg_gen_shri_i32(tmp, tmp, 16);
965         tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
966         tcg_gen_addi_i32(tmp, addr, 4);
967         tcg_gen_qemu_ld_i64(t64, tmp, index, MO_TEUQ);
968         tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
969         break;
970     case OS_PACKED:
971         /*
972          * unimplemented data type on 68040/ColdFire
973          * FIXME if needed for another FPU
974          */
975         gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
976         break;
977     default:
978         g_assert_not_reached();
979     }
980 }
981 
gen_store_fp(DisasContext * s,int opsize,TCGv addr,TCGv_ptr fp,int index)982 static void gen_store_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
983                          int index)
984 {
985     TCGv tmp;
986     TCGv_i64 t64;
987 
988     t64 = tcg_temp_new_i64();
989     tmp = tcg_temp_new();
990     switch (opsize) {
991     case OS_BYTE:
992     case OS_WORD:
993     case OS_LONG:
994         gen_helper_reds32(tmp, tcg_env, fp);
995         tcg_gen_qemu_st_tl(tmp, addr, index, opsize | MO_TE);
996         break;
997     case OS_SINGLE:
998         gen_helper_redf32(tmp, tcg_env, fp);
999         tcg_gen_qemu_st_tl(tmp, addr, index, MO_TEUL);
1000         break;
1001     case OS_DOUBLE:
1002         gen_helper_redf64(t64, tcg_env, fp);
1003         tcg_gen_qemu_st_i64(t64, addr, index, MO_TEUQ);
1004         break;
1005     case OS_EXTENDED:
1006         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1007             gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1008             break;
1009         }
1010         tcg_gen_ld16u_i32(tmp, fp, offsetof(FPReg, l.upper));
1011         tcg_gen_shli_i32(tmp, tmp, 16);
1012         tcg_gen_qemu_st_i32(tmp, addr, index, MO_TEUL);
1013         tcg_gen_addi_i32(tmp, addr, 4);
1014         tcg_gen_ld_i64(t64, fp, offsetof(FPReg, l.lower));
1015         tcg_gen_qemu_st_i64(t64, tmp, index, MO_TEUQ);
1016         break;
1017     case OS_PACKED:
1018         /*
1019          * unimplemented data type on 68040/ColdFire
1020          * FIXME if needed for another FPU
1021          */
1022         gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1023         break;
1024     default:
1025         g_assert_not_reached();
1026     }
1027 }
1028 
gen_ldst_fp(DisasContext * s,int opsize,TCGv addr,TCGv_ptr fp,ea_what what,int index)1029 static void gen_ldst_fp(DisasContext *s, int opsize, TCGv addr,
1030                         TCGv_ptr fp, ea_what what, int index)
1031 {
1032     if (what == EA_STORE) {
1033         gen_store_fp(s, opsize, addr, fp, index);
1034     } else {
1035         gen_load_fp(s, opsize, addr, fp, index);
1036     }
1037 }
1038 
gen_ea_mode_fp(CPUM68KState * env,DisasContext * s,int mode,int reg0,int opsize,TCGv_ptr fp,ea_what what,int index)1039 static int gen_ea_mode_fp(CPUM68KState *env, DisasContext *s, int mode,
1040                           int reg0, int opsize, TCGv_ptr fp, ea_what what,
1041                           int index)
1042 {
1043     TCGv reg, addr, tmp;
1044     TCGv_i64 t64;
1045 
1046     switch (mode) {
1047     case 0: /* Data register direct.  */
1048         reg = cpu_dregs[reg0];
1049         if (what == EA_STORE) {
1050             switch (opsize) {
1051             case OS_BYTE:
1052             case OS_WORD:
1053             case OS_LONG:
1054                 gen_helper_reds32(reg, tcg_env, fp);
1055                 break;
1056             case OS_SINGLE:
1057                 gen_helper_redf32(reg, tcg_env, fp);
1058                 break;
1059             default:
1060                 g_assert_not_reached();
1061             }
1062         } else {
1063             tmp = tcg_temp_new();
1064             switch (opsize) {
1065             case OS_BYTE:
1066             case OS_WORD:
1067             case OS_LONG:
1068                 tcg_gen_ext_i32(tmp, reg, opsize | MO_SIGN);
1069                 gen_helper_exts32(tcg_env, fp, tmp);
1070                 break;
1071             case OS_SINGLE:
1072                 gen_helper_extf32(tcg_env, fp, reg);
1073                 break;
1074             default:
1075                 g_assert_not_reached();
1076             }
1077         }
1078         return 0;
1079     case 1: /* Address register direct.  */
1080         return -1;
1081     case 2: /* Indirect register */
1082         addr = get_areg(s, reg0);
1083         gen_ldst_fp(s, opsize, addr, fp, what, index);
1084         return 0;
1085     case 3: /* Indirect postincrement.  */
1086         addr = cpu_aregs[reg0];
1087         gen_ldst_fp(s, opsize, addr, fp, what, index);
1088         tcg_gen_addi_i32(addr, addr, opsize_bytes(opsize));
1089         return 0;
1090     case 4: /* Indirect predecrememnt.  */
1091         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1092         if (IS_NULL_QREG(addr)) {
1093             return -1;
1094         }
1095         gen_ldst_fp(s, opsize, addr, fp, what, index);
1096         tcg_gen_mov_i32(cpu_aregs[reg0], addr);
1097         return 0;
1098     case 5: /* Indirect displacement.  */
1099     case 6: /* Indirect index + displacement.  */
1100     do_indirect:
1101         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1102         if (IS_NULL_QREG(addr)) {
1103             return -1;
1104         }
1105         gen_ldst_fp(s, opsize, addr, fp, what, index);
1106         return 0;
1107     case 7: /* Other */
1108         switch (reg0) {
1109         case 0: /* Absolute short.  */
1110         case 1: /* Absolute long.  */
1111         case 2: /* pc displacement  */
1112         case 3: /* pc index+displacement.  */
1113             goto do_indirect;
1114         case 4: /* Immediate.  */
1115             if (what == EA_STORE) {
1116                 return -1;
1117             }
1118             switch (opsize) {
1119             case OS_BYTE:
1120                 tmp = tcg_constant_i32((int8_t)read_im8(env, s));
1121                 gen_helper_exts32(tcg_env, fp, tmp);
1122                 break;
1123             case OS_WORD:
1124                 tmp = tcg_constant_i32((int16_t)read_im16(env, s));
1125                 gen_helper_exts32(tcg_env, fp, tmp);
1126                 break;
1127             case OS_LONG:
1128                 tmp = tcg_constant_i32(read_im32(env, s));
1129                 gen_helper_exts32(tcg_env, fp, tmp);
1130                 break;
1131             case OS_SINGLE:
1132                 tmp = tcg_constant_i32(read_im32(env, s));
1133                 gen_helper_extf32(tcg_env, fp, tmp);
1134                 break;
1135             case OS_DOUBLE:
1136                 t64 = tcg_constant_i64(read_im64(env, s));
1137                 gen_helper_extf64(tcg_env, fp, t64);
1138                 break;
1139             case OS_EXTENDED:
1140                 if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1141                     gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1142                     break;
1143                 }
1144                 tmp = tcg_constant_i32(read_im32(env, s) >> 16);
1145                 tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
1146                 t64 = tcg_constant_i64(read_im64(env, s));
1147                 tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
1148                 break;
1149             case OS_PACKED:
1150                 /*
1151                  * unimplemented data type on 68040/ColdFire
1152                  * FIXME if needed for another FPU
1153                  */
1154                 gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1155                 break;
1156             default:
1157                 g_assert_not_reached();
1158             }
1159             return 0;
1160         default:
1161             return -1;
1162         }
1163     }
1164     return -1;
1165 }
1166 
gen_ea_fp(CPUM68KState * env,DisasContext * s,uint16_t insn,int opsize,TCGv_ptr fp,ea_what what,int index)1167 static int gen_ea_fp(CPUM68KState *env, DisasContext *s, uint16_t insn,
1168                        int opsize, TCGv_ptr fp, ea_what what, int index)
1169 {
1170     int mode = extract32(insn, 3, 3);
1171     int reg0 = REG(insn, 0);
1172     return gen_ea_mode_fp(env, s, mode, reg0, opsize, fp, what, index);
1173 }
1174 
1175 typedef struct {
1176     TCGCond tcond;
1177     TCGv v1;
1178     TCGv v2;
1179 } DisasCompare;
1180 
gen_cc_cond(DisasCompare * c,DisasContext * s,int cond)1181 static void gen_cc_cond(DisasCompare *c, DisasContext *s, int cond)
1182 {
1183     TCGv tmp, tmp2;
1184     TCGCond tcond;
1185     CCOp op = s->cc_op;
1186 
1187     /* The CC_OP_CMP form can handle most normal comparisons directly.  */
1188     if (op == CC_OP_CMPB || op == CC_OP_CMPW || op == CC_OP_CMPL) {
1189         c->v1 = QREG_CC_N;
1190         c->v2 = QREG_CC_V;
1191         switch (cond) {
1192         case 2: /* HI */
1193         case 3: /* LS */
1194             tcond = TCG_COND_LEU;
1195             goto done;
1196         case 4: /* CC */
1197         case 5: /* CS */
1198             tcond = TCG_COND_LTU;
1199             goto done;
1200         case 6: /* NE */
1201         case 7: /* EQ */
1202             tcond = TCG_COND_EQ;
1203             goto done;
1204         case 10: /* PL */
1205         case 11: /* MI */
1206             c->v2 = tcg_constant_i32(0);
1207             c->v1 = tmp = tcg_temp_new();
1208             tcg_gen_sub_i32(tmp, QREG_CC_N, QREG_CC_V);
1209             gen_ext(tmp, tmp, op - CC_OP_CMPB, 1);
1210             /* fallthru */
1211         case 12: /* GE */
1212         case 13: /* LT */
1213             tcond = TCG_COND_LT;
1214             goto done;
1215         case 14: /* GT */
1216         case 15: /* LE */
1217             tcond = TCG_COND_LE;
1218             goto done;
1219         }
1220     }
1221 
1222     c->v2 = tcg_constant_i32(0);
1223 
1224     switch (cond) {
1225     case 0: /* T */
1226     case 1: /* F */
1227         c->v1 = c->v2;
1228         tcond = TCG_COND_NEVER;
1229         goto done;
1230     case 14: /* GT (!(Z || (N ^ V))) */
1231     case 15: /* LE (Z || (N ^ V)) */
1232         /*
1233          * Logic operations clear V, which simplifies LE to (Z || N),
1234          * and since Z and N are co-located, this becomes a normal
1235          * comparison vs N.
1236          */
1237         if (op == CC_OP_LOGIC) {
1238             c->v1 = QREG_CC_N;
1239             tcond = TCG_COND_LE;
1240             goto done;
1241         }
1242         break;
1243     case 12: /* GE (!(N ^ V)) */
1244     case 13: /* LT (N ^ V) */
1245         /* Logic operations clear V, which simplifies this to N.  */
1246         if (op != CC_OP_LOGIC) {
1247             break;
1248         }
1249         /* fallthru */
1250     case 10: /* PL (!N) */
1251     case 11: /* MI (N) */
1252         /* Several cases represent N normally.  */
1253         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1254             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1255             op == CC_OP_LOGIC) {
1256             c->v1 = QREG_CC_N;
1257             tcond = TCG_COND_LT;
1258             goto done;
1259         }
1260         break;
1261     case 6: /* NE (!Z) */
1262     case 7: /* EQ (Z) */
1263         /* Some cases fold Z into N.  */
1264         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1265             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1266             op == CC_OP_LOGIC) {
1267             tcond = TCG_COND_EQ;
1268             c->v1 = QREG_CC_N;
1269             goto done;
1270         }
1271         break;
1272     case 4: /* CC (!C) */
1273     case 5: /* CS (C) */
1274         /* Some cases fold C into X.  */
1275         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1276             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL) {
1277             tcond = TCG_COND_NE;
1278             c->v1 = QREG_CC_X;
1279             goto done;
1280         }
1281         /* fallthru */
1282     case 8: /* VC (!V) */
1283     case 9: /* VS (V) */
1284         /* Logic operations clear V and C.  */
1285         if (op == CC_OP_LOGIC) {
1286             tcond = TCG_COND_NEVER;
1287             c->v1 = c->v2;
1288             goto done;
1289         }
1290         break;
1291     }
1292 
1293     /* Otherwise, flush flag state to CC_OP_FLAGS.  */
1294     gen_flush_flags(s);
1295 
1296     switch (cond) {
1297     case 0: /* T */
1298     case 1: /* F */
1299     default:
1300         /* Invalid, or handled above.  */
1301         abort();
1302     case 2: /* HI (!C && !Z) -> !(C || Z)*/
1303     case 3: /* LS (C || Z) */
1304         c->v1 = tmp = tcg_temp_new();
1305         tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1306         tcg_gen_or_i32(tmp, tmp, QREG_CC_C);
1307         tcond = TCG_COND_NE;
1308         break;
1309     case 4: /* CC (!C) */
1310     case 5: /* CS (C) */
1311         c->v1 = QREG_CC_C;
1312         tcond = TCG_COND_NE;
1313         break;
1314     case 6: /* NE (!Z) */
1315     case 7: /* EQ (Z) */
1316         c->v1 = QREG_CC_Z;
1317         tcond = TCG_COND_EQ;
1318         break;
1319     case 8: /* VC (!V) */
1320     case 9: /* VS (V) */
1321         c->v1 = QREG_CC_V;
1322         tcond = TCG_COND_LT;
1323         break;
1324     case 10: /* PL (!N) */
1325     case 11: /* MI (N) */
1326         c->v1 = QREG_CC_N;
1327         tcond = TCG_COND_LT;
1328         break;
1329     case 12: /* GE (!(N ^ V)) */
1330     case 13: /* LT (N ^ V) */
1331         c->v1 = tmp = tcg_temp_new();
1332         tcg_gen_xor_i32(tmp, QREG_CC_N, QREG_CC_V);
1333         tcond = TCG_COND_LT;
1334         break;
1335     case 14: /* GT (!(Z || (N ^ V))) */
1336     case 15: /* LE (Z || (N ^ V)) */
1337         c->v1 = tmp = tcg_temp_new();
1338         tcg_gen_negsetcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1339         tmp2 = tcg_temp_new();
1340         tcg_gen_xor_i32(tmp2, QREG_CC_N, QREG_CC_V);
1341         tcg_gen_or_i32(tmp, tmp, tmp2);
1342         tcond = TCG_COND_LT;
1343         break;
1344     }
1345 
1346  done:
1347     if ((cond & 1) == 0) {
1348         tcond = tcg_invert_cond(tcond);
1349     }
1350     c->tcond = tcond;
1351 }
1352 
gen_jmpcc(DisasContext * s,int cond,TCGLabel * l1)1353 static void gen_jmpcc(DisasContext *s, int cond, TCGLabel *l1)
1354 {
1355   DisasCompare c;
1356 
1357   gen_cc_cond(&c, s, cond);
1358   update_cc_op(s);
1359   tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
1360 }
1361 
1362 /* Force a TB lookup after an instruction that changes the CPU state.  */
gen_exit_tb(DisasContext * s)1363 static void gen_exit_tb(DisasContext *s)
1364 {
1365     update_cc_op(s);
1366     tcg_gen_movi_i32(QREG_PC, s->pc);
1367     s->base.is_jmp = DISAS_EXIT;
1368 }
1369 
1370 #define SRC_EA(env, result, opsize, op_sign, addrp) do {                \
1371         result = gen_ea(env, s, insn, opsize, NULL_QREG, addrp,         \
1372                         op_sign ? EA_LOADS : EA_LOADU, IS_USER(s));     \
1373         if (IS_NULL_QREG(result)) {                                     \
1374             gen_addr_fault(s);                                          \
1375             return;                                                     \
1376         }                                                               \
1377     } while (0)
1378 
1379 #define DEST_EA(env, insn, opsize, val, addrp) do {                     \
1380         TCGv ea_result = gen_ea(env, s, insn, opsize, val, addrp,       \
1381                                 EA_STORE, IS_USER(s));                  \
1382         if (IS_NULL_QREG(ea_result)) {                                  \
1383             gen_addr_fault(s);                                          \
1384             return;                                                     \
1385         }                                                               \
1386     } while (0)
1387 
1388 /* Generate a jump to an immediate address.  */
gen_jmp_tb(DisasContext * s,int n,target_ulong dest,target_ulong src)1389 static void gen_jmp_tb(DisasContext *s, int n, target_ulong dest,
1390                        target_ulong src)
1391 {
1392     if (unlikely(s->ss_active)) {
1393         update_cc_op(s);
1394         tcg_gen_movi_i32(QREG_PC, dest);
1395         gen_raise_exception_format2(s, EXCP_TRACE, src);
1396     } else if (translator_use_goto_tb(&s->base, dest)) {
1397         tcg_gen_goto_tb(n);
1398         tcg_gen_movi_i32(QREG_PC, dest);
1399         tcg_gen_exit_tb(s->base.tb, n);
1400     } else {
1401         gen_jmp_im(s, dest);
1402         tcg_gen_exit_tb(NULL, 0);
1403     }
1404     s->base.is_jmp = DISAS_NORETURN;
1405 }
1406 
1407 #ifndef CONFIG_USER_ONLY
semihosting_test(DisasContext * s)1408 static bool semihosting_test(DisasContext *s)
1409 {
1410     uint32_t test;
1411 
1412     if (!semihosting_enabled(IS_USER(s))) {
1413         return false;
1414     }
1415 
1416     /*
1417      * "The semihosting instruction is immediately preceded by a
1418      * nop aligned to a 4-byte boundary..."
1419      * The preceding 2-byte (aligned) nop plus the 2-byte halt/bkpt
1420      * means that we have advanced 4 bytes from the required nop.
1421      */
1422     if (s->pc % 4 != 0) {
1423         return false;
1424     }
1425     test = translator_lduw(s->env, &s->base, s->pc - 4);
1426     if (test != 0x4e71) {
1427         return false;
1428     }
1429     /* "... and followed by an invalid sentinel instruction movec %sp,0." */
1430     test = translator_ldl(s->env, &s->base, s->pc);
1431     if (test != 0x4e7bf000) {
1432         return false;
1433     }
1434 
1435     /* Consume the sentinel. */
1436     s->pc += 4;
1437     return true;
1438 }
1439 #endif /* !CONFIG_USER_ONLY */
1440 
DISAS_INSN(scc)1441 DISAS_INSN(scc)
1442 {
1443     DisasCompare c;
1444     int cond;
1445     TCGv tmp;
1446 
1447     cond = (insn >> 8) & 0xf;
1448     gen_cc_cond(&c, s, cond);
1449 
1450     tmp = tcg_temp_new();
1451     tcg_gen_negsetcond_i32(c.tcond, tmp, c.v1, c.v2);
1452 
1453     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
1454 }
1455 
DISAS_INSN(dbcc)1456 DISAS_INSN(dbcc)
1457 {
1458     TCGLabel *l1;
1459     TCGv reg;
1460     TCGv tmp;
1461     int16_t offset;
1462     uint32_t base;
1463 
1464     reg = DREG(insn, 0);
1465     base = s->pc;
1466     offset = (int16_t)read_im16(env, s);
1467     l1 = gen_new_label();
1468     gen_jmpcc(s, (insn >> 8) & 0xf, l1);
1469 
1470     tmp = tcg_temp_new();
1471     tcg_gen_ext16s_i32(tmp, reg);
1472     tcg_gen_addi_i32(tmp, tmp, -1);
1473     gen_partset_reg(OS_WORD, reg, tmp);
1474     tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, -1, l1);
1475     gen_jmp_tb(s, 1, base + offset, s->base.pc_next);
1476     gen_set_label(l1);
1477     gen_jmp_tb(s, 0, s->pc, s->base.pc_next);
1478 }
1479 
DISAS_INSN(undef_mac)1480 DISAS_INSN(undef_mac)
1481 {
1482     gen_exception(s, s->base.pc_next, EXCP_LINEA);
1483 }
1484 
DISAS_INSN(undef_fpu)1485 DISAS_INSN(undef_fpu)
1486 {
1487     gen_exception(s, s->base.pc_next, EXCP_LINEF);
1488 }
1489 
DISAS_INSN(undef)1490 DISAS_INSN(undef)
1491 {
1492     /*
1493      * ??? This is both instructions that are as yet unimplemented
1494      * for the 680x0 series, as well as those that are implemented
1495      * but actually illegal for CPU32 or pre-68020.
1496      */
1497     qemu_log_mask(LOG_UNIMP, "Illegal instruction: %04x @ %" VADDR_PRIx "\n",
1498                   insn, s->base.pc_next);
1499     gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
1500 }
1501 
DISAS_INSN(mulw)1502 DISAS_INSN(mulw)
1503 {
1504     TCGv reg;
1505     TCGv tmp;
1506     TCGv src;
1507     int sign;
1508 
1509     sign = (insn & 0x100) != 0;
1510     reg = DREG(insn, 9);
1511     tmp = tcg_temp_new();
1512     if (sign)
1513         tcg_gen_ext16s_i32(tmp, reg);
1514     else
1515         tcg_gen_ext16u_i32(tmp, reg);
1516     SRC_EA(env, src, OS_WORD, sign, NULL);
1517     tcg_gen_mul_i32(tmp, tmp, src);
1518     tcg_gen_mov_i32(reg, tmp);
1519     gen_logic_cc(s, tmp, OS_LONG);
1520 }
1521 
DISAS_INSN(divw)1522 DISAS_INSN(divw)
1523 {
1524     int sign;
1525     TCGv src;
1526     TCGv destr;
1527     TCGv ilen;
1528 
1529     /* divX.w <EA>,Dn    32/16 -> 16r:16q */
1530 
1531     sign = (insn & 0x100) != 0;
1532 
1533     /* dest.l / src.w */
1534 
1535     SRC_EA(env, src, OS_WORD, sign, NULL);
1536     destr = tcg_constant_i32(REG(insn, 9));
1537     ilen = tcg_constant_i32(s->pc - s->base.pc_next);
1538     if (sign) {
1539         gen_helper_divsw(tcg_env, destr, src, ilen);
1540     } else {
1541         gen_helper_divuw(tcg_env, destr, src, ilen);
1542     }
1543 
1544     set_cc_op(s, CC_OP_FLAGS);
1545 }
1546 
DISAS_INSN(divl)1547 DISAS_INSN(divl)
1548 {
1549     TCGv num, reg, den, ilen;
1550     int sign;
1551     uint16_t ext;
1552 
1553     ext = read_im16(env, s);
1554 
1555     sign = (ext & 0x0800) != 0;
1556 
1557     if (ext & 0x400) {
1558         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
1559             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
1560             return;
1561         }
1562 
1563         /* divX.l <EA>, Dr:Dq    64/32 -> 32r:32q */
1564 
1565         SRC_EA(env, den, OS_LONG, 0, NULL);
1566         num = tcg_constant_i32(REG(ext, 12));
1567         reg = tcg_constant_i32(REG(ext, 0));
1568         ilen = tcg_constant_i32(s->pc - s->base.pc_next);
1569         if (sign) {
1570             gen_helper_divsll(tcg_env, num, reg, den, ilen);
1571         } else {
1572             gen_helper_divull(tcg_env, num, reg, den, ilen);
1573         }
1574         set_cc_op(s, CC_OP_FLAGS);
1575         return;
1576     }
1577 
1578     /* divX.l <EA>, Dq        32/32 -> 32q     */
1579     /* divXl.l <EA>, Dr:Dq    32/32 -> 32r:32q */
1580 
1581     SRC_EA(env, den, OS_LONG, 0, NULL);
1582     num = tcg_constant_i32(REG(ext, 12));
1583     reg = tcg_constant_i32(REG(ext, 0));
1584     ilen = tcg_constant_i32(s->pc - s->base.pc_next);
1585     if (sign) {
1586         gen_helper_divsl(tcg_env, num, reg, den, ilen);
1587     } else {
1588         gen_helper_divul(tcg_env, num, reg, den, ilen);
1589     }
1590 
1591     set_cc_op(s, CC_OP_FLAGS);
1592 }
1593 
bcd_add(TCGv dest,TCGv src)1594 static void bcd_add(TCGv dest, TCGv src)
1595 {
1596     TCGv t0, t1;
1597 
1598     /*
1599      * dest10 = dest10 + src10 + X
1600      *
1601      *        t1 = src
1602      *        t2 = t1 + 0x066
1603      *        t3 = t2 + dest + X
1604      *        t4 = t2 ^ dest
1605      *        t5 = t3 ^ t4
1606      *        t6 = ~t5 & 0x110
1607      *        t7 = (t6 >> 2) | (t6 >> 3)
1608      *        return t3 - t7
1609      */
1610 
1611     /*
1612      * t1 = (src + 0x066) + dest + X
1613      *    = result with some possible exceeding 0x6
1614      */
1615 
1616     t0 = tcg_temp_new();
1617     tcg_gen_addi_i32(t0, src, 0x066);
1618 
1619     t1 = tcg_temp_new();
1620     tcg_gen_add_i32(t1, t0, dest);
1621     tcg_gen_add_i32(t1, t1, QREG_CC_X);
1622 
1623     /* we will remove exceeding 0x6 where there is no carry */
1624 
1625     /*
1626      * t0 = (src + 0x0066) ^ dest
1627      *    = t1 without carries
1628      */
1629 
1630     tcg_gen_xor_i32(t0, t0, dest);
1631 
1632     /*
1633      * extract the carries
1634      * t0 = t0 ^ t1
1635      *    = only the carries
1636      */
1637 
1638     tcg_gen_xor_i32(t0, t0, t1);
1639 
1640     /*
1641      * generate 0x1 where there is no carry
1642      * and for each 0x10, generate a 0x6
1643      */
1644 
1645     tcg_gen_shri_i32(t0, t0, 3);
1646     tcg_gen_not_i32(t0, t0);
1647     tcg_gen_andi_i32(t0, t0, 0x22);
1648     tcg_gen_add_i32(dest, t0, t0);
1649     tcg_gen_add_i32(dest, dest, t0);
1650 
1651     /*
1652      * remove the exceeding 0x6
1653      * for digits that have not generated a carry
1654      */
1655 
1656     tcg_gen_sub_i32(dest, t1, dest);
1657 }
1658 
bcd_sub(TCGv dest,TCGv src)1659 static void bcd_sub(TCGv dest, TCGv src)
1660 {
1661     TCGv t0, t1, t2;
1662 
1663     /*
1664      *  dest10 = dest10 - src10 - X
1665      *         = bcd_add(dest + 1 - X, 0x199 - src)
1666      */
1667 
1668     /* t0 = 0x066 + (0x199 - src) */
1669 
1670     t0 = tcg_temp_new();
1671     tcg_gen_subfi_i32(t0, 0x1ff, src);
1672 
1673     /* t1 = t0 + dest + 1 - X*/
1674 
1675     t1 = tcg_temp_new();
1676     tcg_gen_add_i32(t1, t0, dest);
1677     tcg_gen_addi_i32(t1, t1, 1);
1678     tcg_gen_sub_i32(t1, t1, QREG_CC_X);
1679 
1680     /* t2 = t0 ^ dest */
1681 
1682     t2 = tcg_temp_new();
1683     tcg_gen_xor_i32(t2, t0, dest);
1684 
1685     /* t0 = t1 ^ t2 */
1686 
1687     tcg_gen_xor_i32(t0, t1, t2);
1688 
1689     /*
1690      * t2 = ~t0 & 0x110
1691      * t0 = (t2 >> 2) | (t2 >> 3)
1692      *
1693      * to fit on 8bit operands, changed in:
1694      *
1695      * t2 = ~(t0 >> 3) & 0x22
1696      * t0 = t2 + t2
1697      * t0 = t0 + t2
1698      */
1699 
1700     tcg_gen_shri_i32(t2, t0, 3);
1701     tcg_gen_not_i32(t2, t2);
1702     tcg_gen_andi_i32(t2, t2, 0x22);
1703     tcg_gen_add_i32(t0, t2, t2);
1704     tcg_gen_add_i32(t0, t0, t2);
1705 
1706     /* return t1 - t0 */
1707 
1708     tcg_gen_sub_i32(dest, t1, t0);
1709 }
1710 
bcd_flags(TCGv val)1711 static void bcd_flags(TCGv val)
1712 {
1713     tcg_gen_andi_i32(QREG_CC_C, val, 0x0ff);
1714     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_C);
1715 
1716     tcg_gen_extract_i32(QREG_CC_C, val, 8, 1);
1717 
1718     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
1719 }
1720 
DISAS_INSN(abcd_reg)1721 DISAS_INSN(abcd_reg)
1722 {
1723     TCGv src;
1724     TCGv dest;
1725 
1726     gen_flush_flags(s); /* !Z is sticky */
1727 
1728     src = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
1729     dest = gen_extend(s, DREG(insn, 9), OS_BYTE, 0);
1730     bcd_add(dest, src);
1731     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1732 
1733     bcd_flags(dest);
1734 }
1735 
DISAS_INSN(abcd_mem)1736 DISAS_INSN(abcd_mem)
1737 {
1738     TCGv src, dest, addr;
1739 
1740     gen_flush_flags(s); /* !Z is sticky */
1741 
1742     /* Indirect pre-decrement load (mode 4) */
1743 
1744     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1745                       NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1746     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1747                        NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1748 
1749     bcd_add(dest, src);
1750 
1751     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1752                 EA_STORE, IS_USER(s));
1753 
1754     bcd_flags(dest);
1755 }
1756 
DISAS_INSN(sbcd_reg)1757 DISAS_INSN(sbcd_reg)
1758 {
1759     TCGv src, dest;
1760 
1761     gen_flush_flags(s); /* !Z is sticky */
1762 
1763     src = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
1764     dest = gen_extend(s, DREG(insn, 9), OS_BYTE, 0);
1765 
1766     bcd_sub(dest, src);
1767 
1768     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1769 
1770     bcd_flags(dest);
1771 }
1772 
DISAS_INSN(sbcd_mem)1773 DISAS_INSN(sbcd_mem)
1774 {
1775     TCGv src, dest, addr;
1776 
1777     gen_flush_flags(s); /* !Z is sticky */
1778 
1779     /* Indirect pre-decrement load (mode 4) */
1780 
1781     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1782                       NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1783     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1784                        NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1785 
1786     bcd_sub(dest, src);
1787 
1788     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1789                 EA_STORE, IS_USER(s));
1790 
1791     bcd_flags(dest);
1792 }
1793 
DISAS_INSN(nbcd)1794 DISAS_INSN(nbcd)
1795 {
1796     TCGv src, dest;
1797     TCGv addr;
1798 
1799     gen_flush_flags(s); /* !Z is sticky */
1800 
1801     SRC_EA(env, src, OS_BYTE, 0, &addr);
1802 
1803     dest = tcg_temp_new();
1804     tcg_gen_movi_i32(dest, 0);
1805     bcd_sub(dest, src);
1806 
1807     DEST_EA(env, insn, OS_BYTE, dest, &addr);
1808 
1809     bcd_flags(dest);
1810 }
1811 
DISAS_INSN(addsub)1812 DISAS_INSN(addsub)
1813 {
1814     TCGv reg;
1815     TCGv dest;
1816     TCGv src;
1817     TCGv tmp;
1818     TCGv addr;
1819     int add;
1820     int opsize;
1821 
1822     add = (insn & 0x4000) != 0;
1823     opsize = insn_opsize(insn);
1824     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
1825     dest = tcg_temp_new();
1826     if (insn & 0x100) {
1827         SRC_EA(env, tmp, opsize, 1, &addr);
1828         src = reg;
1829     } else {
1830         tmp = reg;
1831         SRC_EA(env, src, opsize, 1, NULL);
1832     }
1833     if (add) {
1834         tcg_gen_add_i32(dest, tmp, src);
1835         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, src);
1836         set_cc_op(s, CC_OP_ADDB + opsize);
1837     } else {
1838         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, tmp, src);
1839         tcg_gen_sub_i32(dest, tmp, src);
1840         set_cc_op(s, CC_OP_SUBB + opsize);
1841     }
1842     gen_update_cc_add(dest, src, opsize);
1843     if (insn & 0x100) {
1844         DEST_EA(env, insn, opsize, dest, &addr);
1845     } else {
1846         gen_partset_reg(opsize, DREG(insn, 9), dest);
1847     }
1848 }
1849 
1850 /* Reverse the order of the bits in REG.  */
DISAS_INSN(bitrev)1851 DISAS_INSN(bitrev)
1852 {
1853     TCGv reg;
1854     reg = DREG(insn, 0);
1855     gen_helper_bitrev(reg, reg);
1856 }
1857 
DISAS_INSN(bitop_reg)1858 DISAS_INSN(bitop_reg)
1859 {
1860     int opsize;
1861     int op;
1862     TCGv src1;
1863     TCGv src2;
1864     TCGv tmp;
1865     TCGv addr;
1866     TCGv dest;
1867 
1868     if ((insn & 0x38) != 0)
1869         opsize = OS_BYTE;
1870     else
1871         opsize = OS_LONG;
1872     op = (insn >> 6) & 3;
1873     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
1874 
1875     gen_flush_flags(s);
1876     src2 = tcg_temp_new();
1877     if (opsize == OS_BYTE)
1878         tcg_gen_andi_i32(src2, DREG(insn, 9), 7);
1879     else
1880         tcg_gen_andi_i32(src2, DREG(insn, 9), 31);
1881 
1882     tmp = tcg_temp_new();
1883     tcg_gen_shl_i32(tmp, tcg_constant_i32(1), src2);
1884 
1885     tcg_gen_and_i32(QREG_CC_Z, src1, tmp);
1886 
1887     dest = tcg_temp_new();
1888     switch (op) {
1889     case 1: /* bchg */
1890         tcg_gen_xor_i32(dest, src1, tmp);
1891         break;
1892     case 2: /* bclr */
1893         tcg_gen_andc_i32(dest, src1, tmp);
1894         break;
1895     case 3: /* bset */
1896         tcg_gen_or_i32(dest, src1, tmp);
1897         break;
1898     default: /* btst */
1899         break;
1900     }
1901     if (op) {
1902         DEST_EA(env, insn, opsize, dest, &addr);
1903     }
1904 }
1905 
DISAS_INSN(sats)1906 DISAS_INSN(sats)
1907 {
1908     TCGv reg;
1909     reg = DREG(insn, 0);
1910     gen_flush_flags(s);
1911     gen_helper_sats(reg, reg, QREG_CC_V);
1912     gen_logic_cc(s, reg, OS_LONG);
1913 }
1914 
gen_push(DisasContext * s,TCGv val)1915 static void gen_push(DisasContext *s, TCGv val)
1916 {
1917     TCGv tmp;
1918 
1919     tmp = tcg_temp_new();
1920     tcg_gen_subi_i32(tmp, QREG_SP, 4);
1921     gen_store(s, OS_LONG, tmp, val, IS_USER(s));
1922     tcg_gen_mov_i32(QREG_SP, tmp);
1923 }
1924 
mreg(int reg)1925 static TCGv mreg(int reg)
1926 {
1927     if (reg < 8) {
1928         /* Dx */
1929         return cpu_dregs[reg];
1930     }
1931     /* Ax */
1932     return cpu_aregs[reg & 7];
1933 }
1934 
DISAS_INSN(movem)1935 DISAS_INSN(movem)
1936 {
1937     TCGv addr, incr, tmp, r[16];
1938     int is_load = (insn & 0x0400) != 0;
1939     int opsize = (insn & 0x40) != 0 ? OS_LONG : OS_WORD;
1940     uint16_t mask = read_im16(env, s);
1941     int mode = extract32(insn, 3, 3);
1942     int reg0 = REG(insn, 0);
1943     int i;
1944 
1945     tmp = cpu_aregs[reg0];
1946 
1947     switch (mode) {
1948     case 0: /* data register direct */
1949     case 1: /* addr register direct */
1950     do_addr_fault:
1951         gen_addr_fault(s);
1952         return;
1953 
1954     case 2: /* indirect */
1955         break;
1956 
1957     case 3: /* indirect post-increment */
1958         if (!is_load) {
1959             /* post-increment is not allowed */
1960             goto do_addr_fault;
1961         }
1962         break;
1963 
1964     case 4: /* indirect pre-decrement */
1965         if (is_load) {
1966             /* pre-decrement is not allowed */
1967             goto do_addr_fault;
1968         }
1969         /*
1970          * We want a bare copy of the address reg, without any pre-decrement
1971          * adjustment, as gen_lea would provide.
1972          */
1973         break;
1974 
1975     default:
1976         tmp = gen_lea_mode(env, s, mode, reg0, opsize);
1977         if (IS_NULL_QREG(tmp)) {
1978             goto do_addr_fault;
1979         }
1980         break;
1981     }
1982 
1983     addr = tcg_temp_new();
1984     tcg_gen_mov_i32(addr, tmp);
1985     incr = tcg_constant_i32(opsize_bytes(opsize));
1986 
1987     if (is_load) {
1988         /* memory to register */
1989         for (i = 0; i < 16; i++) {
1990             if (mask & (1 << i)) {
1991                 r[i] = gen_load(s, opsize, addr, 1, IS_USER(s));
1992                 tcg_gen_add_i32(addr, addr, incr);
1993             }
1994         }
1995         for (i = 0; i < 16; i++) {
1996             if (mask & (1 << i)) {
1997                 tcg_gen_mov_i32(mreg(i), r[i]);
1998             }
1999         }
2000         if (mode == 3) {
2001             /* post-increment: movem (An)+,X */
2002             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2003         }
2004     } else {
2005         /* register to memory */
2006         if (mode == 4) {
2007             /* pre-decrement: movem X,-(An) */
2008             for (i = 15; i >= 0; i--) {
2009                 if ((mask << i) & 0x8000) {
2010                     tcg_gen_sub_i32(addr, addr, incr);
2011                     if (reg0 + 8 == i &&
2012                         m68k_feature(s->env, M68K_FEATURE_EXT_FULL)) {
2013                         /*
2014                          * M68020+: if the addressing register is the
2015                          * register moved to memory, the value written
2016                          * is the initial value decremented by the size of
2017                          * the operation, regardless of how many actual
2018                          * stores have been performed until this point.
2019                          * M68000/M68010: the value is the initial value.
2020                          */
2021                         tmp = tcg_temp_new();
2022                         tcg_gen_sub_i32(tmp, cpu_aregs[reg0], incr);
2023                         gen_store(s, opsize, addr, tmp, IS_USER(s));
2024                     } else {
2025                         gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2026                     }
2027                 }
2028             }
2029             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2030         } else {
2031             for (i = 0; i < 16; i++) {
2032                 if (mask & (1 << i)) {
2033                     gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2034                     tcg_gen_add_i32(addr, addr, incr);
2035                 }
2036             }
2037         }
2038     }
2039 }
2040 
DISAS_INSN(movep)2041 DISAS_INSN(movep)
2042 {
2043     uint8_t i;
2044     int16_t displ;
2045     TCGv reg;
2046     TCGv addr;
2047     TCGv abuf;
2048     TCGv dbuf;
2049 
2050     displ = read_im16(env, s);
2051 
2052     addr = AREG(insn, 0);
2053     reg = DREG(insn, 9);
2054 
2055     abuf = tcg_temp_new();
2056     tcg_gen_addi_i32(abuf, addr, displ);
2057     dbuf = tcg_temp_new();
2058 
2059     if (insn & 0x40) {
2060         i = 4;
2061     } else {
2062         i = 2;
2063     }
2064 
2065     if (insn & 0x80) {
2066         for ( ; i > 0 ; i--) {
2067             tcg_gen_shri_i32(dbuf, reg, (i - 1) * 8);
2068             tcg_gen_qemu_st_i32(dbuf, abuf, IS_USER(s), MO_UB);
2069             if (i > 1) {
2070                 tcg_gen_addi_i32(abuf, abuf, 2);
2071             }
2072         }
2073     } else {
2074         for ( ; i > 0 ; i--) {
2075             tcg_gen_qemu_ld_tl(dbuf, abuf, IS_USER(s), MO_UB);
2076             tcg_gen_deposit_i32(reg, reg, dbuf, (i - 1) * 8, 8);
2077             if (i > 1) {
2078                 tcg_gen_addi_i32(abuf, abuf, 2);
2079             }
2080         }
2081     }
2082 }
2083 
DISAS_INSN(bitop_im)2084 DISAS_INSN(bitop_im)
2085 {
2086     int opsize;
2087     int op;
2088     TCGv src1;
2089     uint32_t mask;
2090     int bitnum;
2091     TCGv tmp;
2092     TCGv addr;
2093 
2094     if ((insn & 0x38) != 0)
2095         opsize = OS_BYTE;
2096     else
2097         opsize = OS_LONG;
2098     op = (insn >> 6) & 3;
2099 
2100     bitnum = read_im16(env, s);
2101     if (m68k_feature(s->env, M68K_FEATURE_M68K)) {
2102         if (bitnum & 0xfe00) {
2103             disas_undef(env, s, insn);
2104             return;
2105         }
2106     } else {
2107         if (bitnum & 0xff00) {
2108             disas_undef(env, s, insn);
2109             return;
2110         }
2111     }
2112 
2113     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
2114 
2115     gen_flush_flags(s);
2116     if (opsize == OS_BYTE)
2117         bitnum &= 7;
2118     else
2119         bitnum &= 31;
2120     mask = 1 << bitnum;
2121 
2122    tcg_gen_andi_i32(QREG_CC_Z, src1, mask);
2123 
2124     if (op) {
2125         tmp = tcg_temp_new();
2126         switch (op) {
2127         case 1: /* bchg */
2128             tcg_gen_xori_i32(tmp, src1, mask);
2129             break;
2130         case 2: /* bclr */
2131             tcg_gen_andi_i32(tmp, src1, ~mask);
2132             break;
2133         case 3: /* bset */
2134             tcg_gen_ori_i32(tmp, src1, mask);
2135             break;
2136         default: /* btst */
2137             break;
2138         }
2139         DEST_EA(env, insn, opsize, tmp, &addr);
2140     }
2141 }
2142 
gen_get_ccr(DisasContext * s)2143 static TCGv gen_get_ccr(DisasContext *s)
2144 {
2145     TCGv dest;
2146 
2147     update_cc_op(s);
2148     dest = tcg_temp_new();
2149     gen_helper_get_ccr(dest, tcg_env);
2150     return dest;
2151 }
2152 
gen_get_sr(DisasContext * s)2153 static TCGv gen_get_sr(DisasContext *s)
2154 {
2155     TCGv ccr;
2156     TCGv sr;
2157 
2158     ccr = gen_get_ccr(s);
2159     sr = tcg_temp_new();
2160     tcg_gen_andi_i32(sr, QREG_SR, 0xffe0);
2161     tcg_gen_or_i32(sr, sr, ccr);
2162     return sr;
2163 }
2164 
gen_set_sr_im(DisasContext * s,uint16_t val,int ccr_only)2165 static void gen_set_sr_im(DisasContext *s, uint16_t val, int ccr_only)
2166 {
2167     if (ccr_only) {
2168         tcg_gen_movi_i32(QREG_CC_C, val & CCF_C ? 1 : 0);
2169         tcg_gen_movi_i32(QREG_CC_V, val & CCF_V ? -1 : 0);
2170         tcg_gen_movi_i32(QREG_CC_Z, val & CCF_Z ? 0 : 1);
2171         tcg_gen_movi_i32(QREG_CC_N, val & CCF_N ? -1 : 0);
2172         tcg_gen_movi_i32(QREG_CC_X, val & CCF_X ? 1 : 0);
2173     } else {
2174         /* Must writeback before changing security state. */
2175         do_writebacks(s);
2176         gen_helper_set_sr(tcg_env, tcg_constant_i32(val));
2177     }
2178     set_cc_op(s, CC_OP_FLAGS);
2179 }
2180 
gen_set_sr(DisasContext * s,TCGv val,int ccr_only)2181 static void gen_set_sr(DisasContext *s, TCGv val, int ccr_only)
2182 {
2183     if (ccr_only) {
2184         gen_helper_set_ccr(tcg_env, val);
2185     } else {
2186         /* Must writeback before changing security state. */
2187         do_writebacks(s);
2188         gen_helper_set_sr(tcg_env, val);
2189     }
2190     set_cc_op(s, CC_OP_FLAGS);
2191 }
2192 
gen_move_to_sr(CPUM68KState * env,DisasContext * s,uint16_t insn,bool ccr_only)2193 static void gen_move_to_sr(CPUM68KState *env, DisasContext *s, uint16_t insn,
2194                            bool ccr_only)
2195 {
2196     if ((insn & 0x3f) == 0x3c) {
2197         uint16_t val;
2198         val = read_im16(env, s);
2199         gen_set_sr_im(s, val, ccr_only);
2200     } else {
2201         TCGv src;
2202         SRC_EA(env, src, OS_WORD, 0, NULL);
2203         gen_set_sr(s, src, ccr_only);
2204     }
2205 }
2206 
DISAS_INSN(arith_im)2207 DISAS_INSN(arith_im)
2208 {
2209     int op;
2210     TCGv im;
2211     TCGv src1;
2212     TCGv dest;
2213     TCGv addr;
2214     int opsize;
2215     bool with_SR = ((insn & 0x3f) == 0x3c);
2216 
2217     op = (insn >> 9) & 7;
2218     opsize = insn_opsize(insn);
2219     switch (opsize) {
2220     case OS_BYTE:
2221         im = tcg_constant_i32((int8_t)read_im8(env, s));
2222         break;
2223     case OS_WORD:
2224         im = tcg_constant_i32((int16_t)read_im16(env, s));
2225         break;
2226     case OS_LONG:
2227         im = tcg_constant_i32(read_im32(env, s));
2228         break;
2229     default:
2230         g_assert_not_reached();
2231     }
2232 
2233     if (with_SR) {
2234         /* SR/CCR can only be used with andi/eori/ori */
2235         if (op == 2 || op == 3 || op == 6) {
2236             disas_undef(env, s, insn);
2237             return;
2238         }
2239         switch (opsize) {
2240         case OS_BYTE:
2241             src1 = gen_get_ccr(s);
2242             break;
2243         case OS_WORD:
2244             if (IS_USER(s)) {
2245                 gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
2246                 return;
2247             }
2248             src1 = gen_get_sr(s);
2249             break;
2250         default:
2251             /* OS_LONG; others already g_assert_not_reached.  */
2252             disas_undef(env, s, insn);
2253             return;
2254         }
2255     } else {
2256         SRC_EA(env, src1, opsize, 1, (op == 6) ? NULL : &addr);
2257     }
2258     dest = tcg_temp_new();
2259     switch (op) {
2260     case 0: /* ori */
2261         tcg_gen_or_i32(dest, src1, im);
2262         if (with_SR) {
2263             gen_set_sr(s, dest, opsize == OS_BYTE);
2264             gen_exit_tb(s);
2265         } else {
2266             DEST_EA(env, insn, opsize, dest, &addr);
2267             gen_logic_cc(s, dest, opsize);
2268         }
2269         break;
2270     case 1: /* andi */
2271         tcg_gen_and_i32(dest, src1, im);
2272         if (with_SR) {
2273             gen_set_sr(s, dest, opsize == OS_BYTE);
2274             gen_exit_tb(s);
2275         } else {
2276             DEST_EA(env, insn, opsize, dest, &addr);
2277             gen_logic_cc(s, dest, opsize);
2278         }
2279         break;
2280     case 2: /* subi */
2281         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, src1, im);
2282         tcg_gen_sub_i32(dest, src1, im);
2283         gen_update_cc_add(dest, im, opsize);
2284         set_cc_op(s, CC_OP_SUBB + opsize);
2285         DEST_EA(env, insn, opsize, dest, &addr);
2286         break;
2287     case 3: /* addi */
2288         tcg_gen_add_i32(dest, src1, im);
2289         gen_update_cc_add(dest, im, opsize);
2290         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, im);
2291         set_cc_op(s, CC_OP_ADDB + opsize);
2292         DEST_EA(env, insn, opsize, dest, &addr);
2293         break;
2294     case 5: /* eori */
2295         tcg_gen_xor_i32(dest, src1, im);
2296         if (with_SR) {
2297             gen_set_sr(s, dest, opsize == OS_BYTE);
2298             gen_exit_tb(s);
2299         } else {
2300             DEST_EA(env, insn, opsize, dest, &addr);
2301             gen_logic_cc(s, dest, opsize);
2302         }
2303         break;
2304     case 6: /* cmpi */
2305         gen_update_cc_cmp(s, src1, im, opsize);
2306         break;
2307     default:
2308         abort();
2309     }
2310 }
2311 
DISAS_INSN(cas)2312 DISAS_INSN(cas)
2313 {
2314     int opsize;
2315     TCGv addr;
2316     uint16_t ext;
2317     TCGv load;
2318     TCGv cmp;
2319     MemOp opc;
2320 
2321     switch ((insn >> 9) & 3) {
2322     case 1:
2323         opsize = OS_BYTE;
2324         opc = MO_SB;
2325         break;
2326     case 2:
2327         opsize = OS_WORD;
2328         opc = MO_TESW;
2329         break;
2330     case 3:
2331         opsize = OS_LONG;
2332         opc = MO_TESL;
2333         break;
2334     default:
2335         g_assert_not_reached();
2336     }
2337 
2338     ext = read_im16(env, s);
2339 
2340     /* cas Dc,Du,<EA> */
2341 
2342     addr = gen_lea(env, s, insn, opsize);
2343     if (IS_NULL_QREG(addr)) {
2344         gen_addr_fault(s);
2345         return;
2346     }
2347 
2348     cmp = gen_extend(s, DREG(ext, 0), opsize, 1);
2349 
2350     /*
2351      * if  <EA> == Dc then
2352      *     <EA> = Du
2353      *     Dc = <EA> (because <EA> == Dc)
2354      * else
2355      *     Dc = <EA>
2356      */
2357 
2358     load = tcg_temp_new();
2359     tcg_gen_atomic_cmpxchg_i32(load, addr, cmp, DREG(ext, 6),
2360                                IS_USER(s), opc);
2361     /* update flags before setting cmp to load */
2362     gen_update_cc_cmp(s, load, cmp, opsize);
2363     gen_partset_reg(opsize, DREG(ext, 0), load);
2364 
2365     switch (extract32(insn, 3, 3)) {
2366     case 3: /* Indirect postincrement.  */
2367         tcg_gen_addi_i32(AREG(insn, 0), addr, opsize_bytes(opsize));
2368         break;
2369     case 4: /* Indirect predecrememnt.  */
2370         tcg_gen_mov_i32(AREG(insn, 0), addr);
2371         break;
2372     }
2373 }
2374 
DISAS_INSN(cas2w)2375 DISAS_INSN(cas2w)
2376 {
2377     uint16_t ext1, ext2;
2378     TCGv addr1, addr2;
2379 
2380     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2381 
2382     ext1 = read_im16(env, s);
2383 
2384     if (ext1 & 0x8000) {
2385         /* Address Register */
2386         addr1 = AREG(ext1, 12);
2387     } else {
2388         /* Data Register */
2389         addr1 = DREG(ext1, 12);
2390     }
2391 
2392     ext2 = read_im16(env, s);
2393     if (ext2 & 0x8000) {
2394         /* Address Register */
2395         addr2 = AREG(ext2, 12);
2396     } else {
2397         /* Data Register */
2398         addr2 = DREG(ext2, 12);
2399     }
2400 
2401     /*
2402      * if (R1) == Dc1 && (R2) == Dc2 then
2403      *     (R1) = Du1
2404      *     (R2) = Du2
2405      * else
2406      *     Dc1 = (R1)
2407      *     Dc2 = (R2)
2408      */
2409 
2410     if (tb_cflags(s->base.tb) & CF_PARALLEL) {
2411         gen_helper_exit_atomic(tcg_env);
2412     } else {
2413         TCGv regs = tcg_constant_i32(REG(ext2, 6) |
2414                                      (REG(ext1, 6) << 3) |
2415                                      (REG(ext2, 0) << 6) |
2416                                      (REG(ext1, 0) << 9));
2417         gen_helper_cas2w(tcg_env, regs, addr1, addr2);
2418     }
2419 
2420     /* Note that cas2w also assigned to env->cc_op.  */
2421     s->cc_op = CC_OP_CMPW;
2422     s->cc_op_synced = 1;
2423 }
2424 
DISAS_INSN(cas2l)2425 DISAS_INSN(cas2l)
2426 {
2427     uint16_t ext1, ext2;
2428     TCGv addr1, addr2, regs;
2429 
2430     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2431 
2432     ext1 = read_im16(env, s);
2433 
2434     if (ext1 & 0x8000) {
2435         /* Address Register */
2436         addr1 = AREG(ext1, 12);
2437     } else {
2438         /* Data Register */
2439         addr1 = DREG(ext1, 12);
2440     }
2441 
2442     ext2 = read_im16(env, s);
2443     if (ext2 & 0x8000) {
2444         /* Address Register */
2445         addr2 = AREG(ext2, 12);
2446     } else {
2447         /* Data Register */
2448         addr2 = DREG(ext2, 12);
2449     }
2450 
2451     /*
2452      * if (R1) == Dc1 && (R2) == Dc2 then
2453      *     (R1) = Du1
2454      *     (R2) = Du2
2455      * else
2456      *     Dc1 = (R1)
2457      *     Dc2 = (R2)
2458      */
2459 
2460     regs = tcg_constant_i32(REG(ext2, 6) |
2461                             (REG(ext1, 6) << 3) |
2462                             (REG(ext2, 0) << 6) |
2463                             (REG(ext1, 0) << 9));
2464     if (tb_cflags(s->base.tb) & CF_PARALLEL) {
2465         gen_helper_cas2l_parallel(tcg_env, regs, addr1, addr2);
2466     } else {
2467         gen_helper_cas2l(tcg_env, regs, addr1, addr2);
2468     }
2469 
2470     /* Note that cas2l also assigned to env->cc_op.  */
2471     s->cc_op = CC_OP_CMPL;
2472     s->cc_op_synced = 1;
2473 }
2474 
DISAS_INSN(byterev)2475 DISAS_INSN(byterev)
2476 {
2477     TCGv reg;
2478 
2479     reg = DREG(insn, 0);
2480     tcg_gen_bswap32_i32(reg, reg);
2481 }
2482 
DISAS_INSN(move)2483 DISAS_INSN(move)
2484 {
2485     TCGv src;
2486     TCGv dest;
2487     int op;
2488     int opsize;
2489 
2490     switch (insn >> 12) {
2491     case 1: /* move.b */
2492         opsize = OS_BYTE;
2493         break;
2494     case 2: /* move.l */
2495         opsize = OS_LONG;
2496         break;
2497     case 3: /* move.w */
2498         opsize = OS_WORD;
2499         break;
2500     default:
2501         abort();
2502     }
2503     SRC_EA(env, src, opsize, 1, NULL);
2504     op = (insn >> 6) & 7;
2505     if (op == 1) {
2506         /* movea */
2507         /* The value will already have been sign extended.  */
2508         dest = AREG(insn, 9);
2509         tcg_gen_mov_i32(dest, src);
2510     } else {
2511         /* normal move */
2512         uint16_t dest_ea;
2513         dest_ea = ((insn >> 9) & 7) | (op << 3);
2514         DEST_EA(env, dest_ea, opsize, src, NULL);
2515         /* This will be correct because loads sign extend.  */
2516         gen_logic_cc(s, src, opsize);
2517     }
2518 }
2519 
DISAS_INSN(negx)2520 DISAS_INSN(negx)
2521 {
2522     TCGv z;
2523     TCGv src;
2524     TCGv addr;
2525     int opsize;
2526 
2527     opsize = insn_opsize(insn);
2528     SRC_EA(env, src, opsize, 1, &addr);
2529 
2530     gen_flush_flags(s); /* compute old Z */
2531 
2532     /*
2533      * Perform subtract with borrow.
2534      * (X, N) =  -(src + X);
2535      */
2536 
2537     z = tcg_constant_i32(0);
2538     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, z, QREG_CC_X, z);
2539     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, z, z, QREG_CC_N, QREG_CC_X);
2540     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
2541 
2542     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
2543 
2544     /*
2545      * Compute signed-overflow for negation.  The normal formula for
2546      * subtraction is (res ^ src) & (src ^ dest), but with dest==0
2547      * this simplifies to res & src.
2548      */
2549 
2550     tcg_gen_and_i32(QREG_CC_V, QREG_CC_N, src);
2551 
2552     /* Copy the rest of the results into place.  */
2553     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
2554     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
2555 
2556     set_cc_op(s, CC_OP_FLAGS);
2557 
2558     /* result is in QREG_CC_N */
2559 
2560     DEST_EA(env, insn, opsize, QREG_CC_N, &addr);
2561 }
2562 
DISAS_INSN(lea)2563 DISAS_INSN(lea)
2564 {
2565     TCGv reg;
2566     TCGv tmp;
2567 
2568     reg = AREG(insn, 9);
2569     tmp = gen_lea(env, s, insn, OS_LONG);
2570     if (IS_NULL_QREG(tmp)) {
2571         gen_addr_fault(s);
2572         return;
2573     }
2574     tcg_gen_mov_i32(reg, tmp);
2575 }
2576 
DISAS_INSN(clr)2577 DISAS_INSN(clr)
2578 {
2579     int opsize;
2580     TCGv zero;
2581 
2582     zero = tcg_constant_i32(0);
2583     opsize = insn_opsize(insn);
2584     DEST_EA(env, insn, opsize, zero, NULL);
2585     gen_logic_cc(s, zero, opsize);
2586 }
2587 
DISAS_INSN(move_from_ccr)2588 DISAS_INSN(move_from_ccr)
2589 {
2590     TCGv ccr;
2591 
2592     ccr = gen_get_ccr(s);
2593     DEST_EA(env, insn, OS_WORD, ccr, NULL);
2594 }
2595 
DISAS_INSN(neg)2596 DISAS_INSN(neg)
2597 {
2598     TCGv src1;
2599     TCGv dest;
2600     TCGv addr;
2601     int opsize;
2602 
2603     opsize = insn_opsize(insn);
2604     SRC_EA(env, src1, opsize, 1, &addr);
2605     dest = tcg_temp_new();
2606     tcg_gen_neg_i32(dest, src1);
2607     set_cc_op(s, CC_OP_SUBB + opsize);
2608     gen_update_cc_add(dest, src1, opsize);
2609     tcg_gen_setcondi_i32(TCG_COND_NE, QREG_CC_X, dest, 0);
2610     DEST_EA(env, insn, opsize, dest, &addr);
2611 }
2612 
DISAS_INSN(move_to_ccr)2613 DISAS_INSN(move_to_ccr)
2614 {
2615     gen_move_to_sr(env, s, insn, true);
2616 }
2617 
DISAS_INSN(not)2618 DISAS_INSN(not)
2619 {
2620     TCGv src1;
2621     TCGv dest;
2622     TCGv addr;
2623     int opsize;
2624 
2625     opsize = insn_opsize(insn);
2626     SRC_EA(env, src1, opsize, 1, &addr);
2627     dest = tcg_temp_new();
2628     tcg_gen_not_i32(dest, src1);
2629     DEST_EA(env, insn, opsize, dest, &addr);
2630     gen_logic_cc(s, dest, opsize);
2631 }
2632 
DISAS_INSN(swap)2633 DISAS_INSN(swap)
2634 {
2635     TCGv src1;
2636     TCGv src2;
2637     TCGv reg;
2638 
2639     src1 = tcg_temp_new();
2640     src2 = tcg_temp_new();
2641     reg = DREG(insn, 0);
2642     tcg_gen_shli_i32(src1, reg, 16);
2643     tcg_gen_shri_i32(src2, reg, 16);
2644     tcg_gen_or_i32(reg, src1, src2);
2645     gen_logic_cc(s, reg, OS_LONG);
2646 }
2647 
DISAS_INSN(bkpt)2648 DISAS_INSN(bkpt)
2649 {
2650 #if defined(CONFIG_USER_ONLY)
2651     gen_exception(s, s->base.pc_next, EXCP_DEBUG);
2652 #else
2653     /* BKPT #0 is the alternate semihosting instruction. */
2654     if ((insn & 7) == 0 && semihosting_test(s)) {
2655         gen_exception(s, s->pc, EXCP_SEMIHOSTING);
2656         return;
2657     }
2658     gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2659 #endif
2660 }
2661 
DISAS_INSN(pea)2662 DISAS_INSN(pea)
2663 {
2664     TCGv tmp;
2665 
2666     tmp = gen_lea(env, s, insn, OS_LONG);
2667     if (IS_NULL_QREG(tmp)) {
2668         gen_addr_fault(s);
2669         return;
2670     }
2671     gen_push(s, tmp);
2672 }
2673 
DISAS_INSN(ext)2674 DISAS_INSN(ext)
2675 {
2676     int op;
2677     TCGv reg;
2678     TCGv tmp;
2679 
2680     reg = DREG(insn, 0);
2681     op = (insn >> 6) & 7;
2682     tmp = tcg_temp_new();
2683     if (op == 3)
2684         tcg_gen_ext16s_i32(tmp, reg);
2685     else
2686         tcg_gen_ext8s_i32(tmp, reg);
2687     if (op == 2)
2688         gen_partset_reg(OS_WORD, reg, tmp);
2689     else
2690         tcg_gen_mov_i32(reg, tmp);
2691     gen_logic_cc(s, tmp, OS_LONG);
2692 }
2693 
DISAS_INSN(tst)2694 DISAS_INSN(tst)
2695 {
2696     int opsize;
2697     TCGv tmp;
2698 
2699     opsize = insn_opsize(insn);
2700     SRC_EA(env, tmp, opsize, 1, NULL);
2701     gen_logic_cc(s, tmp, opsize);
2702 }
2703 
DISAS_INSN(pulse)2704 DISAS_INSN(pulse)
2705 {
2706   /* Implemented as a NOP.  */
2707 }
2708 
DISAS_INSN(illegal)2709 DISAS_INSN(illegal)
2710 {
2711     gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2712 }
2713 
DISAS_INSN(tas)2714 DISAS_INSN(tas)
2715 {
2716     int mode = extract32(insn, 3, 3);
2717     int reg0 = REG(insn, 0);
2718 
2719     if (mode == 0) {
2720         /* data register direct */
2721         TCGv dest = cpu_dregs[reg0];
2722         gen_logic_cc(s, dest, OS_BYTE);
2723         tcg_gen_ori_tl(dest, dest, 0x80);
2724     } else {
2725         TCGv src1, addr;
2726 
2727         addr = gen_lea_mode(env, s, mode, reg0, OS_BYTE);
2728         if (IS_NULL_QREG(addr)) {
2729             gen_addr_fault(s);
2730             return;
2731         }
2732         src1 = tcg_temp_new();
2733         tcg_gen_atomic_fetch_or_tl(src1, addr, tcg_constant_tl(0x80),
2734                                    IS_USER(s), MO_SB);
2735         gen_logic_cc(s, src1, OS_BYTE);
2736 
2737         switch (mode) {
2738         case 3: /* Indirect postincrement.  */
2739             tcg_gen_addi_i32(AREG(insn, 0), addr, 1);
2740             break;
2741         case 4: /* Indirect predecrememnt.  */
2742             tcg_gen_mov_i32(AREG(insn, 0), addr);
2743             break;
2744         }
2745     }
2746 }
2747 
DISAS_INSN(mull)2748 DISAS_INSN(mull)
2749 {
2750     uint16_t ext;
2751     TCGv src1;
2752     int sign;
2753 
2754     ext = read_im16(env, s);
2755 
2756     sign = ext & 0x800;
2757 
2758     if (ext & 0x400) {
2759         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
2760             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2761             return;
2762         }
2763 
2764         SRC_EA(env, src1, OS_LONG, 0, NULL);
2765 
2766         if (sign) {
2767             tcg_gen_muls2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2768         } else {
2769             tcg_gen_mulu2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2770         }
2771         /* if Dl == Dh, 68040 returns low word */
2772         tcg_gen_mov_i32(DREG(ext, 0), QREG_CC_N);
2773         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_Z);
2774         tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N);
2775 
2776         tcg_gen_movi_i32(QREG_CC_V, 0);
2777         tcg_gen_movi_i32(QREG_CC_C, 0);
2778 
2779         set_cc_op(s, CC_OP_FLAGS);
2780         return;
2781     }
2782     SRC_EA(env, src1, OS_LONG, 0, NULL);
2783     if (m68k_feature(s->env, M68K_FEATURE_M68K)) {
2784         tcg_gen_movi_i32(QREG_CC_C, 0);
2785         if (sign) {
2786             tcg_gen_muls2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2787             /* QREG_CC_V is -(QREG_CC_V != (QREG_CC_N >> 31)) */
2788             tcg_gen_sari_i32(QREG_CC_Z, QREG_CC_N, 31);
2789             tcg_gen_negsetcond_i32(TCG_COND_NE, QREG_CC_V,
2790                                    QREG_CC_V, QREG_CC_Z);
2791         } else {
2792             tcg_gen_mulu2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2793             /* QREG_CC_V is -(QREG_CC_V != 0), use QREG_CC_C as 0 */
2794             tcg_gen_negsetcond_i32(TCG_COND_NE, QREG_CC_V,
2795                                    QREG_CC_V, QREG_CC_C);
2796         }
2797         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_N);
2798 
2799         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
2800 
2801         set_cc_op(s, CC_OP_FLAGS);
2802     } else {
2803         /*
2804          * The upper 32 bits of the product are discarded, so
2805          * muls.l and mulu.l are functionally equivalent.
2806          */
2807         tcg_gen_mul_i32(DREG(ext, 12), src1, DREG(ext, 12));
2808         gen_logic_cc(s, DREG(ext, 12), OS_LONG);
2809     }
2810 }
2811 
gen_link(DisasContext * s,uint16_t insn,int32_t offset)2812 static void gen_link(DisasContext *s, uint16_t insn, int32_t offset)
2813 {
2814     TCGv reg;
2815     TCGv tmp;
2816 
2817     reg = AREG(insn, 0);
2818     tmp = tcg_temp_new();
2819     tcg_gen_subi_i32(tmp, QREG_SP, 4);
2820     gen_store(s, OS_LONG, tmp, reg, IS_USER(s));
2821     if ((insn & 7) != 7) {
2822         tcg_gen_mov_i32(reg, tmp);
2823     }
2824     tcg_gen_addi_i32(QREG_SP, tmp, offset);
2825 }
2826 
DISAS_INSN(link)2827 DISAS_INSN(link)
2828 {
2829     int16_t offset;
2830 
2831     offset = read_im16(env, s);
2832     gen_link(s, insn, offset);
2833 }
2834 
DISAS_INSN(linkl)2835 DISAS_INSN(linkl)
2836 {
2837     int32_t offset;
2838 
2839     offset = read_im32(env, s);
2840     gen_link(s, insn, offset);
2841 }
2842 
DISAS_INSN(unlk)2843 DISAS_INSN(unlk)
2844 {
2845     TCGv src;
2846     TCGv reg;
2847     TCGv tmp;
2848 
2849     src = tcg_temp_new();
2850     reg = AREG(insn, 0);
2851     tcg_gen_mov_i32(src, reg);
2852     tmp = gen_load(s, OS_LONG, src, 0, IS_USER(s));
2853     tcg_gen_mov_i32(reg, tmp);
2854     tcg_gen_addi_i32(QREG_SP, src, 4);
2855 }
2856 
2857 #if !defined(CONFIG_USER_ONLY)
DISAS_INSN(reset)2858 DISAS_INSN(reset)
2859 {
2860     if (IS_USER(s)) {
2861         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
2862         return;
2863     }
2864 
2865     gen_helper_reset(tcg_env);
2866 }
2867 #endif
2868 
DISAS_INSN(nop)2869 DISAS_INSN(nop)
2870 {
2871 }
2872 
DISAS_INSN(rtd)2873 DISAS_INSN(rtd)
2874 {
2875     TCGv tmp;
2876     int16_t offset = read_im16(env, s);
2877 
2878     tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
2879     tcg_gen_addi_i32(QREG_SP, QREG_SP, offset + 4);
2880     gen_jmp(s, tmp);
2881 }
2882 
DISAS_INSN(rtr)2883 DISAS_INSN(rtr)
2884 {
2885     TCGv tmp;
2886     TCGv ccr;
2887     TCGv sp;
2888 
2889     sp = tcg_temp_new();
2890     ccr = gen_load(s, OS_WORD, QREG_SP, 0, IS_USER(s));
2891     tcg_gen_addi_i32(sp, QREG_SP, 2);
2892     tmp = gen_load(s, OS_LONG, sp, 0, IS_USER(s));
2893     tcg_gen_addi_i32(QREG_SP, sp, 4);
2894 
2895     gen_set_sr(s, ccr, true);
2896 
2897     gen_jmp(s, tmp);
2898 }
2899 
DISAS_INSN(rts)2900 DISAS_INSN(rts)
2901 {
2902     TCGv tmp;
2903 
2904     tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
2905     tcg_gen_addi_i32(QREG_SP, QREG_SP, 4);
2906     gen_jmp(s, tmp);
2907 }
2908 
DISAS_INSN(jump)2909 DISAS_INSN(jump)
2910 {
2911     TCGv tmp;
2912 
2913     /*
2914      * Load the target address first to ensure correct exception
2915      * behavior.
2916      */
2917     tmp = gen_lea(env, s, insn, OS_LONG);
2918     if (IS_NULL_QREG(tmp)) {
2919         gen_addr_fault(s);
2920         return;
2921     }
2922     if ((insn & 0x40) == 0) {
2923         /* jsr */
2924         gen_push(s, tcg_constant_i32(s->pc));
2925     }
2926     gen_jmp(s, tmp);
2927 }
2928 
DISAS_INSN(addsubq)2929 DISAS_INSN(addsubq)
2930 {
2931     TCGv src;
2932     TCGv dest;
2933     TCGv val;
2934     int imm;
2935     TCGv addr;
2936     int opsize;
2937 
2938     if ((insn & 070) == 010) {
2939         /* Operation on address register is always long.  */
2940         opsize = OS_LONG;
2941     } else {
2942         opsize = insn_opsize(insn);
2943     }
2944     SRC_EA(env, src, opsize, 1, &addr);
2945     imm = (insn >> 9) & 7;
2946     if (imm == 0) {
2947         imm = 8;
2948     }
2949     val = tcg_constant_i32(imm);
2950     dest = tcg_temp_new();
2951     tcg_gen_mov_i32(dest, src);
2952     if ((insn & 0x38) == 0x08) {
2953         /*
2954          * Don't update condition codes if the destination is an
2955          * address register.
2956          */
2957         if (insn & 0x0100) {
2958             tcg_gen_sub_i32(dest, dest, val);
2959         } else {
2960             tcg_gen_add_i32(dest, dest, val);
2961         }
2962     } else {
2963         if (insn & 0x0100) {
2964             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
2965             tcg_gen_sub_i32(dest, dest, val);
2966             set_cc_op(s, CC_OP_SUBB + opsize);
2967         } else {
2968             tcg_gen_add_i32(dest, dest, val);
2969             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
2970             set_cc_op(s, CC_OP_ADDB + opsize);
2971         }
2972         gen_update_cc_add(dest, val, opsize);
2973     }
2974     DEST_EA(env, insn, opsize, dest, &addr);
2975 }
2976 
DISAS_INSN(branch)2977 DISAS_INSN(branch)
2978 {
2979     int32_t offset;
2980     uint32_t base;
2981     int op;
2982 
2983     base = s->pc;
2984     op = (insn >> 8) & 0xf;
2985     offset = (int8_t)insn;
2986     if (offset == 0) {
2987         offset = (int16_t)read_im16(env, s);
2988     } else if (offset == -1) {
2989         offset = read_im32(env, s);
2990     }
2991     if (op == 1) {
2992         /* bsr */
2993         gen_push(s, tcg_constant_i32(s->pc));
2994     }
2995     if (op > 1) {
2996         /* Bcc */
2997         TCGLabel *l1 = gen_new_label();
2998         gen_jmpcc(s, ((insn >> 8) & 0xf) ^ 1, l1);
2999         gen_jmp_tb(s, 1, base + offset, s->base.pc_next);
3000         gen_set_label(l1);
3001         gen_jmp_tb(s, 0, s->pc, s->base.pc_next);
3002     } else {
3003         /* Unconditional branch.  */
3004         update_cc_op(s);
3005         gen_jmp_tb(s, 0, base + offset, s->base.pc_next);
3006     }
3007 }
3008 
DISAS_INSN(moveq)3009 DISAS_INSN(moveq)
3010 {
3011     tcg_gen_movi_i32(DREG(insn, 9), (int8_t)insn);
3012     gen_logic_cc(s, DREG(insn, 9), OS_LONG);
3013 }
3014 
DISAS_INSN(mvzs)3015 DISAS_INSN(mvzs)
3016 {
3017     int opsize;
3018     TCGv src;
3019     TCGv reg;
3020 
3021     if (insn & 0x40)
3022         opsize = OS_WORD;
3023     else
3024         opsize = OS_BYTE;
3025     SRC_EA(env, src, opsize, (insn & 0x80) == 0, NULL);
3026     reg = DREG(insn, 9);
3027     tcg_gen_mov_i32(reg, src);
3028     gen_logic_cc(s, src, opsize);
3029 }
3030 
DISAS_INSN(or)3031 DISAS_INSN(or)
3032 {
3033     TCGv reg;
3034     TCGv dest;
3035     TCGv src;
3036     TCGv addr;
3037     int opsize;
3038 
3039     opsize = insn_opsize(insn);
3040     reg = gen_extend(s, DREG(insn, 9), opsize, 0);
3041     dest = tcg_temp_new();
3042     if (insn & 0x100) {
3043         SRC_EA(env, src, opsize, 0, &addr);
3044         tcg_gen_or_i32(dest, src, reg);
3045         DEST_EA(env, insn, opsize, dest, &addr);
3046     } else {
3047         SRC_EA(env, src, opsize, 0, NULL);
3048         tcg_gen_or_i32(dest, src, reg);
3049         gen_partset_reg(opsize, DREG(insn, 9), dest);
3050     }
3051     gen_logic_cc(s, dest, opsize);
3052 }
3053 
DISAS_INSN(suba)3054 DISAS_INSN(suba)
3055 {
3056     TCGv src;
3057     TCGv reg;
3058 
3059     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3060     reg = AREG(insn, 9);
3061     tcg_gen_sub_i32(reg, reg, src);
3062 }
3063 
gen_subx(DisasContext * s,TCGv src,TCGv dest,int opsize)3064 static inline void gen_subx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3065 {
3066     TCGv tmp, zero;
3067 
3068     gen_flush_flags(s); /* compute old Z */
3069 
3070     /*
3071      * Perform subtract with borrow.
3072      * (X, N) = dest - (src + X);
3073      */
3074 
3075     zero = tcg_constant_i32(0);
3076     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, zero, QREG_CC_X, zero);
3077     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, dest, zero, QREG_CC_N, QREG_CC_X);
3078     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3079     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
3080 
3081     /* Compute signed-overflow for subtract.  */
3082 
3083     tmp = tcg_temp_new();
3084     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, dest);
3085     tcg_gen_xor_i32(tmp, dest, src);
3086     tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, tmp);
3087 
3088     /* Copy the rest of the results into place.  */
3089     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3090     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3091 
3092     set_cc_op(s, CC_OP_FLAGS);
3093 
3094     /* result is in QREG_CC_N */
3095 }
3096 
DISAS_INSN(subx_reg)3097 DISAS_INSN(subx_reg)
3098 {
3099     TCGv dest;
3100     TCGv src;
3101     int opsize;
3102 
3103     opsize = insn_opsize(insn);
3104 
3105     src = gen_extend(s, DREG(insn, 0), opsize, 1);
3106     dest = gen_extend(s, DREG(insn, 9), opsize, 1);
3107 
3108     gen_subx(s, src, dest, opsize);
3109 
3110     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3111 }
3112 
DISAS_INSN(subx_mem)3113 DISAS_INSN(subx_mem)
3114 {
3115     TCGv src;
3116     TCGv addr_src;
3117     TCGv dest;
3118     TCGv addr_dest;
3119     int opsize;
3120 
3121     opsize = insn_opsize(insn);
3122 
3123     addr_src = AREG(insn, 0);
3124     tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3125     src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3126 
3127     addr_dest = AREG(insn, 9);
3128     tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3129     dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3130 
3131     gen_subx(s, src, dest, opsize);
3132 
3133     gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3134 }
3135 
DISAS_INSN(mov3q)3136 DISAS_INSN(mov3q)
3137 {
3138     TCGv src;
3139     int val;
3140 
3141     val = (insn >> 9) & 7;
3142     if (val == 0) {
3143         val = -1;
3144     }
3145     src = tcg_constant_i32(val);
3146     gen_logic_cc(s, src, OS_LONG);
3147     DEST_EA(env, insn, OS_LONG, src, NULL);
3148 }
3149 
DISAS_INSN(cmp)3150 DISAS_INSN(cmp)
3151 {
3152     TCGv src;
3153     TCGv reg;
3154     int opsize;
3155 
3156     opsize = insn_opsize(insn);
3157     SRC_EA(env, src, opsize, 1, NULL);
3158     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
3159     gen_update_cc_cmp(s, reg, src, opsize);
3160 }
3161 
DISAS_INSN(cmpa)3162 DISAS_INSN(cmpa)
3163 {
3164     int opsize;
3165     TCGv src;
3166     TCGv reg;
3167 
3168     if (insn & 0x100) {
3169         opsize = OS_LONG;
3170     } else {
3171         opsize = OS_WORD;
3172     }
3173     SRC_EA(env, src, opsize, 1, NULL);
3174     reg = AREG(insn, 9);
3175     gen_update_cc_cmp(s, reg, src, OS_LONG);
3176 }
3177 
DISAS_INSN(cmpm)3178 DISAS_INSN(cmpm)
3179 {
3180     int opsize = insn_opsize(insn);
3181     TCGv src, dst;
3182 
3183     /* Post-increment load (mode 3) from Ay.  */
3184     src = gen_ea_mode(env, s, 3, REG(insn, 0), opsize,
3185                       NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3186     /* Post-increment load (mode 3) from Ax.  */
3187     dst = gen_ea_mode(env, s, 3, REG(insn, 9), opsize,
3188                       NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3189 
3190     gen_update_cc_cmp(s, dst, src, opsize);
3191 }
3192 
DISAS_INSN(eor)3193 DISAS_INSN(eor)
3194 {
3195     TCGv src;
3196     TCGv dest;
3197     TCGv addr;
3198     int opsize;
3199 
3200     opsize = insn_opsize(insn);
3201 
3202     SRC_EA(env, src, opsize, 0, &addr);
3203     dest = tcg_temp_new();
3204     tcg_gen_xor_i32(dest, src, DREG(insn, 9));
3205     gen_logic_cc(s, dest, opsize);
3206     DEST_EA(env, insn, opsize, dest, &addr);
3207 }
3208 
do_exg(TCGv reg1,TCGv reg2)3209 static void do_exg(TCGv reg1, TCGv reg2)
3210 {
3211     TCGv temp = tcg_temp_new();
3212     tcg_gen_mov_i32(temp, reg1);
3213     tcg_gen_mov_i32(reg1, reg2);
3214     tcg_gen_mov_i32(reg2, temp);
3215 }
3216 
DISAS_INSN(exg_dd)3217 DISAS_INSN(exg_dd)
3218 {
3219     /* exchange Dx and Dy */
3220     do_exg(DREG(insn, 9), DREG(insn, 0));
3221 }
3222 
DISAS_INSN(exg_aa)3223 DISAS_INSN(exg_aa)
3224 {
3225     /* exchange Ax and Ay */
3226     do_exg(AREG(insn, 9), AREG(insn, 0));
3227 }
3228 
DISAS_INSN(exg_da)3229 DISAS_INSN(exg_da)
3230 {
3231     /* exchange Dx and Ay */
3232     do_exg(DREG(insn, 9), AREG(insn, 0));
3233 }
3234 
DISAS_INSN(and)3235 DISAS_INSN(and)
3236 {
3237     TCGv src;
3238     TCGv reg;
3239     TCGv dest;
3240     TCGv addr;
3241     int opsize;
3242 
3243     dest = tcg_temp_new();
3244 
3245     opsize = insn_opsize(insn);
3246     reg = DREG(insn, 9);
3247     if (insn & 0x100) {
3248         SRC_EA(env, src, opsize, 0, &addr);
3249         tcg_gen_and_i32(dest, src, reg);
3250         DEST_EA(env, insn, opsize, dest, &addr);
3251     } else {
3252         SRC_EA(env, src, opsize, 0, NULL);
3253         tcg_gen_and_i32(dest, src, reg);
3254         gen_partset_reg(opsize, reg, dest);
3255     }
3256     gen_logic_cc(s, dest, opsize);
3257 }
3258 
DISAS_INSN(adda)3259 DISAS_INSN(adda)
3260 {
3261     TCGv src;
3262     TCGv reg;
3263 
3264     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3265     reg = AREG(insn, 9);
3266     tcg_gen_add_i32(reg, reg, src);
3267 }
3268 
gen_addx(DisasContext * s,TCGv src,TCGv dest,int opsize)3269 static inline void gen_addx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3270 {
3271     TCGv tmp, zero;
3272 
3273     gen_flush_flags(s); /* compute old Z */
3274 
3275     /*
3276      * Perform addition with carry.
3277      * (X, N) = src + dest + X;
3278      */
3279 
3280     zero = tcg_constant_i32(0);
3281     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_X, zero, dest, zero);
3282     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_N, QREG_CC_X, src, zero);
3283     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3284 
3285     /* Compute signed-overflow for addition.  */
3286 
3287     tmp = tcg_temp_new();
3288     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3289     tcg_gen_xor_i32(tmp, dest, src);
3290     tcg_gen_andc_i32(QREG_CC_V, QREG_CC_V, tmp);
3291 
3292     /* Copy the rest of the results into place.  */
3293     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3294     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3295 
3296     set_cc_op(s, CC_OP_FLAGS);
3297 
3298     /* result is in QREG_CC_N */
3299 }
3300 
DISAS_INSN(addx_reg)3301 DISAS_INSN(addx_reg)
3302 {
3303     TCGv dest;
3304     TCGv src;
3305     int opsize;
3306 
3307     opsize = insn_opsize(insn);
3308 
3309     dest = gen_extend(s, DREG(insn, 9), opsize, 1);
3310     src = gen_extend(s, DREG(insn, 0), opsize, 1);
3311 
3312     gen_addx(s, src, dest, opsize);
3313 
3314     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3315 }
3316 
DISAS_INSN(addx_mem)3317 DISAS_INSN(addx_mem)
3318 {
3319     TCGv src;
3320     TCGv addr_src;
3321     TCGv dest;
3322     TCGv addr_dest;
3323     int opsize;
3324 
3325     opsize = insn_opsize(insn);
3326 
3327     addr_src = AREG(insn, 0);
3328     tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3329     src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3330 
3331     addr_dest = AREG(insn, 9);
3332     tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3333     dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3334 
3335     gen_addx(s, src, dest, opsize);
3336 
3337     gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3338 }
3339 
shift_im(DisasContext * s,uint16_t insn,int opsize)3340 static inline void shift_im(DisasContext *s, uint16_t insn, int opsize)
3341 {
3342     int count = (insn >> 9) & 7;
3343     int logical = insn & 8;
3344     int left = insn & 0x100;
3345     int bits = opsize_bytes(opsize) * 8;
3346     TCGv reg = gen_extend(s, DREG(insn, 0), opsize, !logical);
3347 
3348     if (count == 0) {
3349         count = 8;
3350     }
3351 
3352     tcg_gen_movi_i32(QREG_CC_V, 0);
3353     if (left) {
3354         tcg_gen_shri_i32(QREG_CC_C, reg, bits - count);
3355         tcg_gen_shli_i32(QREG_CC_N, reg, count);
3356 
3357         /*
3358          * Note that ColdFire always clears V (done above),
3359          * while M68000 sets if the most significant bit is changed at
3360          * any time during the shift operation.
3361          */
3362         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68K)) {
3363             /* if shift count >= bits, V is (reg != 0) */
3364             if (count >= bits) {
3365                 tcg_gen_negsetcond_i32(TCG_COND_NE, QREG_CC_V, reg, QREG_CC_V);
3366             } else {
3367                 TCGv t0 = tcg_temp_new();
3368                 tcg_gen_sari_i32(QREG_CC_V, reg, bits - 1);
3369                 tcg_gen_sari_i32(t0, reg, bits - count - 1);
3370                 tcg_gen_negsetcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, t0);
3371             }
3372         }
3373     } else {
3374         tcg_gen_shri_i32(QREG_CC_C, reg, count - 1);
3375         if (logical) {
3376             tcg_gen_shri_i32(QREG_CC_N, reg, count);
3377         } else {
3378             tcg_gen_sari_i32(QREG_CC_N, reg, count);
3379         }
3380     }
3381 
3382     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3383     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3384     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3385     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3386 
3387     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3388     set_cc_op(s, CC_OP_FLAGS);
3389 }
3390 
shift_reg(DisasContext * s,uint16_t insn,int opsize)3391 static inline void shift_reg(DisasContext *s, uint16_t insn, int opsize)
3392 {
3393     int logical = insn & 8;
3394     int left = insn & 0x100;
3395     int bits = opsize_bytes(opsize) * 8;
3396     TCGv reg = gen_extend(s, DREG(insn, 0), opsize, !logical);
3397     TCGv s32;
3398     TCGv_i64 t64, s64;
3399 
3400     t64 = tcg_temp_new_i64();
3401     s64 = tcg_temp_new_i64();
3402     s32 = tcg_temp_new();
3403 
3404     /*
3405      * Note that m68k truncates the shift count modulo 64, not 32.
3406      * In addition, a 64-bit shift makes it easy to find "the last
3407      * bit shifted out", for the carry flag.
3408      */
3409     tcg_gen_andi_i32(s32, DREG(insn, 9), 63);
3410     tcg_gen_extu_i32_i64(s64, s32);
3411     tcg_gen_extu_i32_i64(t64, reg);
3412 
3413     /* Optimistically set V=0.  Also used as a zero source below.  */
3414     tcg_gen_movi_i32(QREG_CC_V, 0);
3415     if (left) {
3416         tcg_gen_shl_i64(t64, t64, s64);
3417 
3418         if (opsize == OS_LONG) {
3419             tcg_gen_extr_i64_i32(QREG_CC_N, QREG_CC_C, t64);
3420             /* Note that C=0 if shift count is 0, and we get that for free.  */
3421         } else {
3422             TCGv zero = tcg_constant_i32(0);
3423             tcg_gen_extrl_i64_i32(QREG_CC_N, t64);
3424             tcg_gen_shri_i32(QREG_CC_C, QREG_CC_N, bits);
3425             tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3426                                 s32, zero, zero, QREG_CC_C);
3427         }
3428         tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3429 
3430         /* X = C, but only if the shift count was non-zero.  */
3431         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3432                             QREG_CC_C, QREG_CC_X);
3433 
3434         /*
3435          * M68000 sets V if the most significant bit is changed at
3436          * any time during the shift operation.  Do this via creating
3437          * an extension of the sign bit, comparing, and discarding
3438          * the bits below the sign bit.  I.e.
3439          *     int64_t s = (intN_t)reg;
3440          *     int64_t t = (int64_t)(intN_t)reg << count;
3441          *     V = ((s ^ t) & (-1 << (bits - 1))) != 0
3442          */
3443         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68K)) {
3444             TCGv_i64 tt = tcg_constant_i64(32);
3445             /* if shift is greater than 32, use 32 */
3446             tcg_gen_movcond_i64(TCG_COND_GT, s64, s64, tt, tt, s64);
3447             /* Sign extend the input to 64 bits; re-do the shift.  */
3448             tcg_gen_ext_i32_i64(t64, reg);
3449             tcg_gen_shl_i64(s64, t64, s64);
3450             /* Clear all bits that are unchanged.  */
3451             tcg_gen_xor_i64(t64, t64, s64);
3452             /* Ignore the bits below the sign bit.  */
3453             tcg_gen_andi_i64(t64, t64, -1ULL << (bits - 1));
3454             /* If any bits remain set, we have overflow.  */
3455             tcg_gen_negsetcond_i64(TCG_COND_NE, t64, t64, tcg_constant_i64(0));
3456             tcg_gen_extrl_i64_i32(QREG_CC_V, t64);
3457         }
3458     } else {
3459         tcg_gen_shli_i64(t64, t64, 32);
3460         if (logical) {
3461             tcg_gen_shr_i64(t64, t64, s64);
3462         } else {
3463             tcg_gen_sar_i64(t64, t64, s64);
3464         }
3465         tcg_gen_extr_i64_i32(QREG_CC_C, QREG_CC_N, t64);
3466 
3467         /* Note that C=0 if shift count is 0, and we get that for free.  */
3468         tcg_gen_shri_i32(QREG_CC_C, QREG_CC_C, 31);
3469 
3470         /* X = C, but only if the shift count was non-zero.  */
3471         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3472                             QREG_CC_C, QREG_CC_X);
3473     }
3474     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3475     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3476 
3477     /* Write back the result.  */
3478     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3479     set_cc_op(s, CC_OP_FLAGS);
3480 }
3481 
DISAS_INSN(shift8_im)3482 DISAS_INSN(shift8_im)
3483 {
3484     shift_im(s, insn, OS_BYTE);
3485 }
3486 
DISAS_INSN(shift16_im)3487 DISAS_INSN(shift16_im)
3488 {
3489     shift_im(s, insn, OS_WORD);
3490 }
3491 
DISAS_INSN(shift_im)3492 DISAS_INSN(shift_im)
3493 {
3494     shift_im(s, insn, OS_LONG);
3495 }
3496 
DISAS_INSN(shift8_reg)3497 DISAS_INSN(shift8_reg)
3498 {
3499     shift_reg(s, insn, OS_BYTE);
3500 }
3501 
DISAS_INSN(shift16_reg)3502 DISAS_INSN(shift16_reg)
3503 {
3504     shift_reg(s, insn, OS_WORD);
3505 }
3506 
DISAS_INSN(shift_reg)3507 DISAS_INSN(shift_reg)
3508 {
3509     shift_reg(s, insn, OS_LONG);
3510 }
3511 
DISAS_INSN(shift_mem)3512 DISAS_INSN(shift_mem)
3513 {
3514     int logical = insn & 8;
3515     int left = insn & 0x100;
3516     TCGv src;
3517     TCGv addr;
3518 
3519     SRC_EA(env, src, OS_WORD, !logical, &addr);
3520     tcg_gen_movi_i32(QREG_CC_V, 0);
3521     if (left) {
3522         tcg_gen_shri_i32(QREG_CC_C, src, 15);
3523         tcg_gen_shli_i32(QREG_CC_N, src, 1);
3524 
3525         /*
3526          * Note that ColdFire always clears V,
3527          * while M68000 sets if the most significant bit is changed at
3528          * any time during the shift operation
3529          */
3530         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68K)) {
3531             src = gen_extend(s, src, OS_WORD, 1);
3532             tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3533         }
3534     } else {
3535         tcg_gen_mov_i32(QREG_CC_C, src);
3536         if (logical) {
3537             tcg_gen_shri_i32(QREG_CC_N, src, 1);
3538         } else {
3539             tcg_gen_sari_i32(QREG_CC_N, src, 1);
3540         }
3541     }
3542 
3543     gen_ext(QREG_CC_N, QREG_CC_N, OS_WORD, 1);
3544     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3545     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3546     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3547 
3548     DEST_EA(env, insn, OS_WORD, QREG_CC_N, &addr);
3549     set_cc_op(s, CC_OP_FLAGS);
3550 }
3551 
rotate(TCGv reg,TCGv shift,int left,int size)3552 static void rotate(TCGv reg, TCGv shift, int left, int size)
3553 {
3554     switch (size) {
3555     case 8:
3556         /* Replicate the 8-bit input so that a 32-bit rotate works.  */
3557         tcg_gen_ext8u_i32(reg, reg);
3558         tcg_gen_muli_i32(reg, reg, 0x01010101);
3559         goto do_long;
3560     case 16:
3561         /* Replicate the 16-bit input so that a 32-bit rotate works.  */
3562         tcg_gen_deposit_i32(reg, reg, reg, 16, 16);
3563         goto do_long;
3564     do_long:
3565     default:
3566         if (left) {
3567             tcg_gen_rotl_i32(reg, reg, shift);
3568         } else {
3569             tcg_gen_rotr_i32(reg, reg, shift);
3570         }
3571     }
3572 
3573     /* compute flags */
3574 
3575     switch (size) {
3576     case 8:
3577         tcg_gen_ext8s_i32(reg, reg);
3578         break;
3579     case 16:
3580         tcg_gen_ext16s_i32(reg, reg);
3581         break;
3582     default:
3583         break;
3584     }
3585 
3586     /* QREG_CC_X is not affected */
3587 
3588     tcg_gen_mov_i32(QREG_CC_N, reg);
3589     tcg_gen_mov_i32(QREG_CC_Z, reg);
3590 
3591     if (left) {
3592         tcg_gen_andi_i32(QREG_CC_C, reg, 1);
3593     } else {
3594         tcg_gen_shri_i32(QREG_CC_C, reg, 31);
3595     }
3596 
3597     tcg_gen_movi_i32(QREG_CC_V, 0); /* always cleared */
3598 }
3599 
rotate_x_flags(TCGv reg,TCGv X,int size)3600 static void rotate_x_flags(TCGv reg, TCGv X, int size)
3601 {
3602     switch (size) {
3603     case 8:
3604         tcg_gen_ext8s_i32(reg, reg);
3605         break;
3606     case 16:
3607         tcg_gen_ext16s_i32(reg, reg);
3608         break;
3609     default:
3610         break;
3611     }
3612     tcg_gen_mov_i32(QREG_CC_N, reg);
3613     tcg_gen_mov_i32(QREG_CC_Z, reg);
3614     tcg_gen_mov_i32(QREG_CC_X, X);
3615     tcg_gen_mov_i32(QREG_CC_C, X);
3616     tcg_gen_movi_i32(QREG_CC_V, 0);
3617 }
3618 
3619 /* Result of rotate_x() is valid if 0 <= shift <= size */
rotate_x(TCGv reg,TCGv shift,int left,int size)3620 static TCGv rotate_x(TCGv reg, TCGv shift, int left, int size)
3621 {
3622     TCGv X, shl, shr, shx, sz, zero;
3623 
3624     sz = tcg_constant_i32(size);
3625 
3626     shr = tcg_temp_new();
3627     shl = tcg_temp_new();
3628     shx = tcg_temp_new();
3629     if (left) {
3630         tcg_gen_mov_i32(shl, shift);      /* shl = shift */
3631         tcg_gen_movi_i32(shr, size + 1);
3632         tcg_gen_sub_i32(shr, shr, shift); /* shr = size + 1 - shift */
3633         tcg_gen_subi_i32(shx, shift, 1);  /* shx = shift - 1 */
3634         /* shx = shx < 0 ? size : shx; */
3635         zero = tcg_constant_i32(0);
3636         tcg_gen_movcond_i32(TCG_COND_LT, shx, shx, zero, sz, shx);
3637     } else {
3638         tcg_gen_mov_i32(shr, shift);      /* shr = shift */
3639         tcg_gen_movi_i32(shl, size + 1);
3640         tcg_gen_sub_i32(shl, shl, shift); /* shl = size + 1 - shift */
3641         tcg_gen_sub_i32(shx, sz, shift); /* shx = size - shift */
3642     }
3643 
3644     /* reg = (reg << shl) | (reg >> shr) | (x << shx); */
3645 
3646     tcg_gen_shl_i32(shl, reg, shl);
3647     tcg_gen_shr_i32(shr, reg, shr);
3648     tcg_gen_or_i32(reg, shl, shr);
3649     tcg_gen_shl_i32(shx, QREG_CC_X, shx);
3650     tcg_gen_or_i32(reg, reg, shx);
3651 
3652     /* X = (reg >> size) & 1 */
3653 
3654     X = tcg_temp_new();
3655     tcg_gen_extract_i32(X, reg, size, 1);
3656 
3657     return X;
3658 }
3659 
3660 /* Result of rotate32_x() is valid if 0 <= shift < 33 */
rotate32_x(TCGv reg,TCGv shift,int left)3661 static TCGv rotate32_x(TCGv reg, TCGv shift, int left)
3662 {
3663     TCGv_i64 t0, shift64;
3664     TCGv X, lo, hi, zero;
3665 
3666     shift64 = tcg_temp_new_i64();
3667     tcg_gen_extu_i32_i64(shift64, shift);
3668 
3669     t0 = tcg_temp_new_i64();
3670 
3671     X = tcg_temp_new();
3672     lo = tcg_temp_new();
3673     hi = tcg_temp_new();
3674 
3675     if (left) {
3676         /* create [reg:X:..] */
3677 
3678         tcg_gen_shli_i32(lo, QREG_CC_X, 31);
3679         tcg_gen_concat_i32_i64(t0, lo, reg);
3680 
3681         /* rotate */
3682 
3683         tcg_gen_rotl_i64(t0, t0, shift64);
3684 
3685         /* result is [reg:..:reg:X] */
3686 
3687         tcg_gen_extr_i64_i32(lo, hi, t0);
3688         tcg_gen_andi_i32(X, lo, 1);
3689 
3690         tcg_gen_shri_i32(lo, lo, 1);
3691     } else {
3692         /* create [..:X:reg] */
3693 
3694         tcg_gen_concat_i32_i64(t0, reg, QREG_CC_X);
3695 
3696         tcg_gen_rotr_i64(t0, t0, shift64);
3697 
3698         /* result is value: [X:reg:..:reg] */
3699 
3700         tcg_gen_extr_i64_i32(lo, hi, t0);
3701 
3702         /* extract X */
3703 
3704         tcg_gen_shri_i32(X, hi, 31);
3705 
3706         /* extract result */
3707 
3708         tcg_gen_shli_i32(hi, hi, 1);
3709     }
3710     tcg_gen_or_i32(lo, lo, hi);
3711 
3712     /* if shift == 0, register and X are not affected */
3713 
3714     zero = tcg_constant_i32(0);
3715     tcg_gen_movcond_i32(TCG_COND_EQ, X, shift, zero, QREG_CC_X, X);
3716     tcg_gen_movcond_i32(TCG_COND_EQ, reg, shift, zero, reg, lo);
3717 
3718     return X;
3719 }
3720 
DISAS_INSN(rotate_im)3721 DISAS_INSN(rotate_im)
3722 {
3723     TCGv shift;
3724     int tmp;
3725     int left = (insn & 0x100);
3726 
3727     tmp = (insn >> 9) & 7;
3728     if (tmp == 0) {
3729         tmp = 8;
3730     }
3731 
3732     shift = tcg_constant_i32(tmp);
3733     if (insn & 8) {
3734         rotate(DREG(insn, 0), shift, left, 32);
3735     } else {
3736         TCGv X = rotate32_x(DREG(insn, 0), shift, left);
3737         rotate_x_flags(DREG(insn, 0), X, 32);
3738     }
3739 
3740     set_cc_op(s, CC_OP_FLAGS);
3741 }
3742 
DISAS_INSN(rotate8_im)3743 DISAS_INSN(rotate8_im)
3744 {
3745     int left = (insn & 0x100);
3746     TCGv reg;
3747     TCGv shift;
3748     int tmp;
3749 
3750     reg = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
3751 
3752     tmp = (insn >> 9) & 7;
3753     if (tmp == 0) {
3754         tmp = 8;
3755     }
3756 
3757     shift = tcg_constant_i32(tmp);
3758     if (insn & 8) {
3759         rotate(reg, shift, left, 8);
3760     } else {
3761         TCGv X = rotate_x(reg, shift, left, 8);
3762         rotate_x_flags(reg, X, 8);
3763     }
3764     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3765     set_cc_op(s, CC_OP_FLAGS);
3766 }
3767 
DISAS_INSN(rotate16_im)3768 DISAS_INSN(rotate16_im)
3769 {
3770     int left = (insn & 0x100);
3771     TCGv reg;
3772     TCGv shift;
3773     int tmp;
3774 
3775     reg = gen_extend(s, DREG(insn, 0), OS_WORD, 0);
3776     tmp = (insn >> 9) & 7;
3777     if (tmp == 0) {
3778         tmp = 8;
3779     }
3780 
3781     shift = tcg_constant_i32(tmp);
3782     if (insn & 8) {
3783         rotate(reg, shift, left, 16);
3784     } else {
3785         TCGv X = rotate_x(reg, shift, left, 16);
3786         rotate_x_flags(reg, X, 16);
3787     }
3788     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3789     set_cc_op(s, CC_OP_FLAGS);
3790 }
3791 
DISAS_INSN(rotate_reg)3792 DISAS_INSN(rotate_reg)
3793 {
3794     TCGv reg;
3795     TCGv src;
3796     TCGv t0, t1;
3797     int left = (insn & 0x100);
3798 
3799     reg = DREG(insn, 0);
3800     src = DREG(insn, 9);
3801     /* shift in [0..63] */
3802     t0 = tcg_temp_new();
3803     tcg_gen_andi_i32(t0, src, 63);
3804     t1 = tcg_temp_new_i32();
3805     if (insn & 8) {
3806         tcg_gen_andi_i32(t1, src, 31);
3807         rotate(reg, t1, left, 32);
3808         /* if shift == 0, clear C */
3809         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3810                             t0, QREG_CC_V /* 0 */,
3811                             QREG_CC_V /* 0 */, QREG_CC_C);
3812     } else {
3813         TCGv X;
3814         /* modulo 33 */
3815         tcg_gen_movi_i32(t1, 33);
3816         tcg_gen_remu_i32(t1, t0, t1);
3817         X = rotate32_x(DREG(insn, 0), t1, left);
3818         rotate_x_flags(DREG(insn, 0), X, 32);
3819     }
3820     set_cc_op(s, CC_OP_FLAGS);
3821 }
3822 
DISAS_INSN(rotate8_reg)3823 DISAS_INSN(rotate8_reg)
3824 {
3825     TCGv reg;
3826     TCGv src;
3827     TCGv t0, t1;
3828     int left = (insn & 0x100);
3829 
3830     reg = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
3831     src = DREG(insn, 9);
3832     /* shift in [0..63] */
3833     t0 = tcg_temp_new_i32();
3834     tcg_gen_andi_i32(t0, src, 63);
3835     t1 = tcg_temp_new_i32();
3836     if (insn & 8) {
3837         tcg_gen_andi_i32(t1, src, 7);
3838         rotate(reg, t1, left, 8);
3839         /* if shift == 0, clear C */
3840         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3841                             t0, QREG_CC_V /* 0 */,
3842                             QREG_CC_V /* 0 */, QREG_CC_C);
3843     } else {
3844         TCGv X;
3845         /* modulo 9 */
3846         tcg_gen_movi_i32(t1, 9);
3847         tcg_gen_remu_i32(t1, t0, t1);
3848         X = rotate_x(reg, t1, left, 8);
3849         rotate_x_flags(reg, X, 8);
3850     }
3851     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3852     set_cc_op(s, CC_OP_FLAGS);
3853 }
3854 
DISAS_INSN(rotate16_reg)3855 DISAS_INSN(rotate16_reg)
3856 {
3857     TCGv reg;
3858     TCGv src;
3859     TCGv t0, t1;
3860     int left = (insn & 0x100);
3861 
3862     reg = gen_extend(s, DREG(insn, 0), OS_WORD, 0);
3863     src = DREG(insn, 9);
3864     /* shift in [0..63] */
3865     t0 = tcg_temp_new_i32();
3866     tcg_gen_andi_i32(t0, src, 63);
3867     t1 = tcg_temp_new_i32();
3868     if (insn & 8) {
3869         tcg_gen_andi_i32(t1, src, 15);
3870         rotate(reg, t1, left, 16);
3871         /* if shift == 0, clear C */
3872         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3873                             t0, QREG_CC_V /* 0 */,
3874                             QREG_CC_V /* 0 */, QREG_CC_C);
3875     } else {
3876         TCGv X;
3877         /* modulo 17 */
3878         tcg_gen_movi_i32(t1, 17);
3879         tcg_gen_remu_i32(t1, t0, t1);
3880         X = rotate_x(reg, t1, left, 16);
3881         rotate_x_flags(reg, X, 16);
3882     }
3883     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3884     set_cc_op(s, CC_OP_FLAGS);
3885 }
3886 
DISAS_INSN(rotate_mem)3887 DISAS_INSN(rotate_mem)
3888 {
3889     TCGv src;
3890     TCGv addr;
3891     TCGv shift;
3892     int left = (insn & 0x100);
3893 
3894     SRC_EA(env, src, OS_WORD, 0, &addr);
3895 
3896     shift = tcg_constant_i32(1);
3897     if (insn & 0x0200) {
3898         rotate(src, shift, left, 16);
3899     } else {
3900         TCGv X = rotate_x(src, shift, left, 16);
3901         rotate_x_flags(src, X, 16);
3902     }
3903     DEST_EA(env, insn, OS_WORD, src, &addr);
3904     set_cc_op(s, CC_OP_FLAGS);
3905 }
3906 
DISAS_INSN(bfext_reg)3907 DISAS_INSN(bfext_reg)
3908 {
3909     int ext = read_im16(env, s);
3910     int is_sign = insn & 0x200;
3911     TCGv src = DREG(insn, 0);
3912     TCGv dst = DREG(ext, 12);
3913     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
3914     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
3915     int pos = 32 - ofs - len;        /* little bit-endian */
3916     TCGv tmp = tcg_temp_new();
3917     TCGv shift;
3918 
3919     /*
3920      * In general, we're going to rotate the field so that it's at the
3921      * top of the word and then right-shift by the complement of the
3922      * width to extend the field.
3923      */
3924     if (ext & 0x20) {
3925         /* Variable width.  */
3926         if (ext & 0x800) {
3927             /* Variable offset.  */
3928             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
3929             tcg_gen_rotl_i32(tmp, src, tmp);
3930         } else {
3931             tcg_gen_rotli_i32(tmp, src, ofs);
3932         }
3933 
3934         shift = tcg_temp_new();
3935         tcg_gen_neg_i32(shift, DREG(ext, 0));
3936         tcg_gen_andi_i32(shift, shift, 31);
3937         tcg_gen_sar_i32(QREG_CC_N, tmp, shift);
3938         if (is_sign) {
3939             tcg_gen_mov_i32(dst, QREG_CC_N);
3940         } else {
3941             tcg_gen_shr_i32(dst, tmp, shift);
3942         }
3943     } else {
3944         /* Immediate width.  */
3945         if (ext & 0x800) {
3946             /* Variable offset */
3947             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
3948             tcg_gen_rotl_i32(tmp, src, tmp);
3949             src = tmp;
3950             pos = 32 - len;
3951         } else {
3952             /*
3953              * Immediate offset.  If the field doesn't wrap around the
3954              * end of the word, rely on (s)extract completely.
3955              */
3956             if (pos < 0) {
3957                 tcg_gen_rotli_i32(tmp, src, ofs);
3958                 src = tmp;
3959                 pos = 32 - len;
3960             }
3961         }
3962 
3963         tcg_gen_sextract_i32(QREG_CC_N, src, pos, len);
3964         if (is_sign) {
3965             tcg_gen_mov_i32(dst, QREG_CC_N);
3966         } else {
3967             tcg_gen_extract_i32(dst, src, pos, len);
3968         }
3969     }
3970 
3971     set_cc_op(s, CC_OP_LOGIC);
3972 }
3973 
DISAS_INSN(bfext_mem)3974 DISAS_INSN(bfext_mem)
3975 {
3976     int ext = read_im16(env, s);
3977     int is_sign = insn & 0x200;
3978     TCGv dest = DREG(ext, 12);
3979     TCGv addr, len, ofs;
3980 
3981     addr = gen_lea(env, s, insn, OS_UNSIZED);
3982     if (IS_NULL_QREG(addr)) {
3983         gen_addr_fault(s);
3984         return;
3985     }
3986 
3987     if (ext & 0x20) {
3988         len = DREG(ext, 0);
3989     } else {
3990         len = tcg_constant_i32(extract32(ext, 0, 5));
3991     }
3992     if (ext & 0x800) {
3993         ofs = DREG(ext, 6);
3994     } else {
3995         ofs = tcg_constant_i32(extract32(ext, 6, 5));
3996     }
3997 
3998     if (is_sign) {
3999         gen_helper_bfexts_mem(dest, tcg_env, addr, ofs, len);
4000         tcg_gen_mov_i32(QREG_CC_N, dest);
4001     } else {
4002         TCGv_i64 tmp = tcg_temp_new_i64();
4003         gen_helper_bfextu_mem(tmp, tcg_env, addr, ofs, len);
4004         tcg_gen_extr_i64_i32(dest, QREG_CC_N, tmp);
4005     }
4006     set_cc_op(s, CC_OP_LOGIC);
4007 }
4008 
DISAS_INSN(bfop_reg)4009 DISAS_INSN(bfop_reg)
4010 {
4011     int ext = read_im16(env, s);
4012     TCGv src = DREG(insn, 0);
4013     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4014     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4015     TCGv mask, tofs = NULL, tlen = NULL;
4016     bool is_bfffo = (insn & 0x0f00) == 0x0d00;
4017 
4018     if ((ext & 0x820) == 0) {
4019         /* Immediate width and offset.  */
4020         uint32_t maski = 0x7fffffffu >> (len - 1);
4021         if (ofs + len <= 32) {
4022             tcg_gen_shli_i32(QREG_CC_N, src, ofs);
4023         } else {
4024             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4025         }
4026         tcg_gen_andi_i32(QREG_CC_N, QREG_CC_N, ~maski);
4027 
4028         mask = tcg_constant_i32(ror32(maski, ofs));
4029         if (is_bfffo) {
4030             tofs = tcg_constant_i32(ofs);
4031             tlen = tcg_constant_i32(len);
4032         }
4033     } else {
4034         TCGv tmp = tcg_temp_new();
4035 
4036         mask = tcg_temp_new();
4037         if (ext & 0x20) {
4038             /* Variable width */
4039             tcg_gen_subi_i32(tmp, DREG(ext, 0), 1);
4040             tcg_gen_andi_i32(tmp, tmp, 31);
4041             tcg_gen_shr_i32(mask, tcg_constant_i32(0x7fffffffu), tmp);
4042             if (is_bfffo) {
4043                 tlen = tcg_temp_new();
4044                 tcg_gen_addi_i32(tlen, tmp, 1);
4045             }
4046         } else {
4047             /* Immediate width */
4048             tcg_gen_movi_i32(mask, 0x7fffffffu >> (len - 1));
4049             if (is_bfffo) {
4050                 tlen = tcg_constant_i32(len);
4051             }
4052         }
4053 
4054         if (ext & 0x800) {
4055             /* Variable offset */
4056             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4057             tcg_gen_rotl_i32(QREG_CC_N, src, tmp);
4058             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4059             tcg_gen_rotr_i32(mask, mask, tmp);
4060             if (is_bfffo) {
4061                 tofs = tmp;
4062             }
4063         } else {
4064             /* Immediate offset (and variable width) */
4065             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4066             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4067             tcg_gen_rotri_i32(mask, mask, ofs);
4068             if (is_bfffo) {
4069                 tofs = tcg_constant_i32(ofs);
4070             }
4071         }
4072     }
4073     set_cc_op(s, CC_OP_LOGIC);
4074 
4075     switch (insn & 0x0f00) {
4076     case 0x0a00: /* bfchg */
4077         tcg_gen_eqv_i32(src, src, mask);
4078         break;
4079     case 0x0c00: /* bfclr */
4080         tcg_gen_and_i32(src, src, mask);
4081         break;
4082     case 0x0d00: /* bfffo */
4083         gen_helper_bfffo_reg(DREG(ext, 12), QREG_CC_N, tofs, tlen);
4084         break;
4085     case 0x0e00: /* bfset */
4086         tcg_gen_orc_i32(src, src, mask);
4087         break;
4088     case 0x0800: /* bftst */
4089         /* flags already set; no other work to do.  */
4090         break;
4091     default:
4092         g_assert_not_reached();
4093     }
4094 }
4095 
DISAS_INSN(bfop_mem)4096 DISAS_INSN(bfop_mem)
4097 {
4098     int ext = read_im16(env, s);
4099     TCGv addr, len, ofs;
4100     TCGv_i64 t64;
4101 
4102     addr = gen_lea(env, s, insn, OS_UNSIZED);
4103     if (IS_NULL_QREG(addr)) {
4104         gen_addr_fault(s);
4105         return;
4106     }
4107 
4108     if (ext & 0x20) {
4109         len = DREG(ext, 0);
4110     } else {
4111         len = tcg_constant_i32(extract32(ext, 0, 5));
4112     }
4113     if (ext & 0x800) {
4114         ofs = DREG(ext, 6);
4115     } else {
4116         ofs = tcg_constant_i32(extract32(ext, 6, 5));
4117     }
4118 
4119     switch (insn & 0x0f00) {
4120     case 0x0a00: /* bfchg */
4121         gen_helper_bfchg_mem(QREG_CC_N, tcg_env, addr, ofs, len);
4122         break;
4123     case 0x0c00: /* bfclr */
4124         gen_helper_bfclr_mem(QREG_CC_N, tcg_env, addr, ofs, len);
4125         break;
4126     case 0x0d00: /* bfffo */
4127         t64 = tcg_temp_new_i64();
4128         gen_helper_bfffo_mem(t64, tcg_env, addr, ofs, len);
4129         tcg_gen_extr_i64_i32(DREG(ext, 12), QREG_CC_N, t64);
4130         break;
4131     case 0x0e00: /* bfset */
4132         gen_helper_bfset_mem(QREG_CC_N, tcg_env, addr, ofs, len);
4133         break;
4134     case 0x0800: /* bftst */
4135         gen_helper_bfexts_mem(QREG_CC_N, tcg_env, addr, ofs, len);
4136         break;
4137     default:
4138         g_assert_not_reached();
4139     }
4140     set_cc_op(s, CC_OP_LOGIC);
4141 }
4142 
DISAS_INSN(bfins_reg)4143 DISAS_INSN(bfins_reg)
4144 {
4145     int ext = read_im16(env, s);
4146     TCGv dst = DREG(insn, 0);
4147     TCGv src = DREG(ext, 12);
4148     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4149     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4150     int pos = 32 - ofs - len;        /* little bit-endian */
4151     TCGv tmp;
4152 
4153     tmp = tcg_temp_new();
4154 
4155     if (ext & 0x20) {
4156         /* Variable width */
4157         tcg_gen_neg_i32(tmp, DREG(ext, 0));
4158         tcg_gen_andi_i32(tmp, tmp, 31);
4159         tcg_gen_shl_i32(QREG_CC_N, src, tmp);
4160     } else {
4161         /* Immediate width */
4162         tcg_gen_shli_i32(QREG_CC_N, src, 32 - len);
4163     }
4164     set_cc_op(s, CC_OP_LOGIC);
4165 
4166     /* Immediate width and offset */
4167     if ((ext & 0x820) == 0) {
4168         /* Check for suitability for deposit.  */
4169         if (pos >= 0) {
4170             tcg_gen_deposit_i32(dst, dst, src, pos, len);
4171         } else {
4172             uint32_t maski = -2U << (len - 1);
4173             uint32_t roti = (ofs + len) & 31;
4174             tcg_gen_andi_i32(tmp, src, ~maski);
4175             tcg_gen_rotri_i32(tmp, tmp, roti);
4176             tcg_gen_andi_i32(dst, dst, ror32(maski, roti));
4177             tcg_gen_or_i32(dst, dst, tmp);
4178         }
4179     } else {
4180         TCGv mask = tcg_temp_new();
4181         TCGv rot = tcg_temp_new();
4182 
4183         if (ext & 0x20) {
4184             /* Variable width */
4185             tcg_gen_subi_i32(rot, DREG(ext, 0), 1);
4186             tcg_gen_andi_i32(rot, rot, 31);
4187             tcg_gen_movi_i32(mask, -2);
4188             tcg_gen_shl_i32(mask, mask, rot);
4189             tcg_gen_mov_i32(rot, DREG(ext, 0));
4190             tcg_gen_andc_i32(tmp, src, mask);
4191         } else {
4192             /* Immediate width (variable offset) */
4193             uint32_t maski = -2U << (len - 1);
4194             tcg_gen_andi_i32(tmp, src, ~maski);
4195             tcg_gen_movi_i32(mask, maski);
4196             tcg_gen_movi_i32(rot, len & 31);
4197         }
4198         if (ext & 0x800) {
4199             /* Variable offset */
4200             tcg_gen_add_i32(rot, rot, DREG(ext, 6));
4201         } else {
4202             /* Immediate offset (variable width) */
4203             tcg_gen_addi_i32(rot, rot, ofs);
4204         }
4205         tcg_gen_andi_i32(rot, rot, 31);
4206         tcg_gen_rotr_i32(mask, mask, rot);
4207         tcg_gen_rotr_i32(tmp, tmp, rot);
4208         tcg_gen_and_i32(dst, dst, mask);
4209         tcg_gen_or_i32(dst, dst, tmp);
4210     }
4211 }
4212 
DISAS_INSN(bfins_mem)4213 DISAS_INSN(bfins_mem)
4214 {
4215     int ext = read_im16(env, s);
4216     TCGv src = DREG(ext, 12);
4217     TCGv addr, len, ofs;
4218 
4219     addr = gen_lea(env, s, insn, OS_UNSIZED);
4220     if (IS_NULL_QREG(addr)) {
4221         gen_addr_fault(s);
4222         return;
4223     }
4224 
4225     if (ext & 0x20) {
4226         len = DREG(ext, 0);
4227     } else {
4228         len = tcg_constant_i32(extract32(ext, 0, 5));
4229     }
4230     if (ext & 0x800) {
4231         ofs = DREG(ext, 6);
4232     } else {
4233         ofs = tcg_constant_i32(extract32(ext, 6, 5));
4234     }
4235 
4236     gen_helper_bfins_mem(QREG_CC_N, tcg_env, addr, src, ofs, len);
4237     set_cc_op(s, CC_OP_LOGIC);
4238 }
4239 
DISAS_INSN(ff1)4240 DISAS_INSN(ff1)
4241 {
4242     TCGv reg;
4243     reg = DREG(insn, 0);
4244     gen_logic_cc(s, reg, OS_LONG);
4245     gen_helper_ff1(reg, reg);
4246 }
4247 
DISAS_INSN(chk)4248 DISAS_INSN(chk)
4249 {
4250     TCGv src, reg;
4251     int opsize;
4252 
4253     switch ((insn >> 7) & 3) {
4254     case 3:
4255         opsize = OS_WORD;
4256         break;
4257     case 2:
4258         if (m68k_feature(env, M68K_FEATURE_CHK2)) {
4259             opsize = OS_LONG;
4260             break;
4261         }
4262         /* fallthru */
4263     default:
4264         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4265         return;
4266     }
4267     SRC_EA(env, src, opsize, 1, NULL);
4268     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
4269 
4270     gen_flush_flags(s);
4271     gen_helper_chk(tcg_env, reg, src);
4272 }
4273 
DISAS_INSN(chk2)4274 DISAS_INSN(chk2)
4275 {
4276     uint16_t ext;
4277     TCGv addr1, addr2, bound1, bound2, reg;
4278     int opsize;
4279 
4280     switch ((insn >> 9) & 3) {
4281     case 0:
4282         opsize = OS_BYTE;
4283         break;
4284     case 1:
4285         opsize = OS_WORD;
4286         break;
4287     case 2:
4288         opsize = OS_LONG;
4289         break;
4290     default:
4291         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4292         return;
4293     }
4294 
4295     ext = read_im16(env, s);
4296     if ((ext & 0x0800) == 0) {
4297         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4298         return;
4299     }
4300 
4301     addr1 = gen_lea(env, s, insn, OS_UNSIZED);
4302     addr2 = tcg_temp_new();
4303     tcg_gen_addi_i32(addr2, addr1, opsize_bytes(opsize));
4304 
4305     bound1 = gen_load(s, opsize, addr1, 1, IS_USER(s));
4306     bound2 = gen_load(s, opsize, addr2, 1, IS_USER(s));
4307 
4308     reg = tcg_temp_new();
4309     if (ext & 0x8000) {
4310         tcg_gen_mov_i32(reg, AREG(ext, 12));
4311     } else {
4312         gen_ext(reg, DREG(ext, 12), opsize, 1);
4313     }
4314 
4315     gen_flush_flags(s);
4316     gen_helper_chk2(tcg_env, reg, bound1, bound2);
4317 }
4318 
m68k_copy_line(TCGv dst,TCGv src,int index)4319 static void m68k_copy_line(TCGv dst, TCGv src, int index)
4320 {
4321     TCGv addr;
4322     TCGv_i64 t0, t1;
4323 
4324     addr = tcg_temp_new();
4325 
4326     t0 = tcg_temp_new_i64();
4327     t1 = tcg_temp_new_i64();
4328 
4329     tcg_gen_andi_i32(addr, src, ~15);
4330     tcg_gen_qemu_ld_i64(t0, addr, index, MO_TEUQ);
4331     tcg_gen_addi_i32(addr, addr, 8);
4332     tcg_gen_qemu_ld_i64(t1, addr, index, MO_TEUQ);
4333 
4334     tcg_gen_andi_i32(addr, dst, ~15);
4335     tcg_gen_qemu_st_i64(t0, addr, index, MO_TEUQ);
4336     tcg_gen_addi_i32(addr, addr, 8);
4337     tcg_gen_qemu_st_i64(t1, addr, index, MO_TEUQ);
4338 }
4339 
DISAS_INSN(move16_reg)4340 DISAS_INSN(move16_reg)
4341 {
4342     int index = IS_USER(s);
4343     TCGv tmp;
4344     uint16_t ext;
4345 
4346     ext = read_im16(env, s);
4347     if ((ext & (1 << 15)) == 0) {
4348         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4349     }
4350 
4351     m68k_copy_line(AREG(ext, 12), AREG(insn, 0), index);
4352 
4353     /* Ax can be Ay, so save Ay before incrementing Ax */
4354     tmp = tcg_temp_new();
4355     tcg_gen_mov_i32(tmp, AREG(ext, 12));
4356     tcg_gen_addi_i32(AREG(insn, 0), AREG(insn, 0), 16);
4357     tcg_gen_addi_i32(AREG(ext, 12), tmp, 16);
4358 }
4359 
DISAS_INSN(move16_mem)4360 DISAS_INSN(move16_mem)
4361 {
4362     int index = IS_USER(s);
4363     TCGv reg, addr;
4364 
4365     reg = AREG(insn, 0);
4366     addr = tcg_constant_i32(read_im32(env, s));
4367 
4368     if ((insn >> 3) & 1) {
4369         /* MOVE16 (xxx).L, (Ay) */
4370         m68k_copy_line(reg, addr, index);
4371     } else {
4372         /* MOVE16 (Ay), (xxx).L */
4373         m68k_copy_line(addr, reg, index);
4374     }
4375 
4376     if (((insn >> 3) & 2) == 0) {
4377         /* (Ay)+ */
4378         tcg_gen_addi_i32(reg, reg, 16);
4379     }
4380 }
4381 
DISAS_INSN(strldsr)4382 DISAS_INSN(strldsr)
4383 {
4384     uint16_t ext;
4385     uint32_t addr;
4386 
4387     addr = s->pc - 2;
4388     ext = read_im16(env, s);
4389     if (ext != 0x46FC) {
4390         gen_exception(s, addr, EXCP_ILLEGAL);
4391         return;
4392     }
4393     ext = read_im16(env, s);
4394     if (IS_USER(s) || (ext & SR_S) == 0) {
4395         gen_exception(s, addr, EXCP_PRIVILEGE);
4396         return;
4397     }
4398     gen_push(s, gen_get_sr(s));
4399     gen_set_sr_im(s, ext, 0);
4400     gen_exit_tb(s);
4401 }
4402 
DISAS_INSN(move_from_sr)4403 DISAS_INSN(move_from_sr)
4404 {
4405     TCGv sr;
4406 
4407     if (IS_USER(s) && m68k_feature(env, M68K_FEATURE_MOVEFROMSR_PRIV)) {
4408         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4409         return;
4410     }
4411     sr = gen_get_sr(s);
4412     DEST_EA(env, insn, OS_WORD, sr, NULL);
4413 }
4414 
4415 #if !defined(CONFIG_USER_ONLY)
DISAS_INSN(moves)4416 DISAS_INSN(moves)
4417 {
4418     int opsize;
4419     uint16_t ext;
4420     TCGv reg;
4421     TCGv addr;
4422     int extend;
4423 
4424     if (IS_USER(s)) {
4425         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4426         return;
4427     }
4428 
4429     ext = read_im16(env, s);
4430 
4431     opsize = insn_opsize(insn);
4432 
4433     if (ext & 0x8000) {
4434         /* address register */
4435         reg = AREG(ext, 12);
4436         extend = 1;
4437     } else {
4438         /* data register */
4439         reg = DREG(ext, 12);
4440         extend = 0;
4441     }
4442 
4443     addr = gen_lea(env, s, insn, opsize);
4444     if (IS_NULL_QREG(addr)) {
4445         gen_addr_fault(s);
4446         return;
4447     }
4448 
4449     if (ext & 0x0800) {
4450         /* from reg to ea */
4451         gen_store(s, opsize, addr, reg, DFC_INDEX(s));
4452     } else {
4453         /* from ea to reg */
4454         TCGv tmp = gen_load(s, opsize, addr, 0, SFC_INDEX(s));
4455         if (extend) {
4456             gen_ext(reg, tmp, opsize, 1);
4457         } else {
4458             gen_partset_reg(opsize, reg, tmp);
4459         }
4460     }
4461     switch (extract32(insn, 3, 3)) {
4462     case 3: /* Indirect postincrement.  */
4463         tcg_gen_addi_i32(AREG(insn, 0), addr,
4464                          REG(insn, 0) == 7 && opsize == OS_BYTE
4465                          ? 2
4466                          : opsize_bytes(opsize));
4467         break;
4468     case 4: /* Indirect predecrememnt.  */
4469         tcg_gen_mov_i32(AREG(insn, 0), addr);
4470         break;
4471     }
4472 }
4473 
DISAS_INSN(move_to_sr)4474 DISAS_INSN(move_to_sr)
4475 {
4476     if (IS_USER(s)) {
4477         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4478         return;
4479     }
4480     gen_move_to_sr(env, s, insn, false);
4481     gen_exit_tb(s);
4482 }
4483 
DISAS_INSN(move_from_usp)4484 DISAS_INSN(move_from_usp)
4485 {
4486     if (IS_USER(s)) {
4487         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4488         return;
4489     }
4490     tcg_gen_ld_i32(AREG(insn, 0), tcg_env,
4491                    offsetof(CPUM68KState, sp[M68K_USP]));
4492 }
4493 
DISAS_INSN(move_to_usp)4494 DISAS_INSN(move_to_usp)
4495 {
4496     if (IS_USER(s)) {
4497         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4498         return;
4499     }
4500     tcg_gen_st_i32(AREG(insn, 0), tcg_env,
4501                    offsetof(CPUM68KState, sp[M68K_USP]));
4502 }
4503 
DISAS_INSN(halt)4504 DISAS_INSN(halt)
4505 {
4506     if (IS_USER(s)) {
4507         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4508         return;
4509     }
4510     if (semihosting_test(s)) {
4511         gen_exception(s, s->pc, EXCP_SEMIHOSTING);
4512         return;
4513     }
4514     tcg_gen_movi_i32(cpu_halted, 1);
4515     gen_exception(s, s->pc, EXCP_HLT);
4516 }
4517 
DISAS_INSN(stop)4518 DISAS_INSN(stop)
4519 {
4520     uint16_t ext;
4521 
4522     if (IS_USER(s)) {
4523         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4524         return;
4525     }
4526 
4527     ext = read_im16(env, s);
4528 
4529     gen_set_sr_im(s, ext, 0);
4530     tcg_gen_movi_i32(cpu_halted, 1);
4531     gen_exception(s, s->pc, EXCP_HLT);
4532 }
4533 
DISAS_INSN(rte)4534 DISAS_INSN(rte)
4535 {
4536     if (IS_USER(s)) {
4537         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4538         return;
4539     }
4540     gen_exception(s, s->base.pc_next, EXCP_RTE);
4541 }
4542 
DISAS_INSN(cf_movec)4543 DISAS_INSN(cf_movec)
4544 {
4545     uint16_t ext;
4546     TCGv reg;
4547 
4548     if (IS_USER(s)) {
4549         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4550         return;
4551     }
4552 
4553     ext = read_im16(env, s);
4554 
4555     if (ext & 0x8000) {
4556         reg = AREG(ext, 12);
4557     } else {
4558         reg = DREG(ext, 12);
4559     }
4560     gen_helper_cf_movec_to(tcg_env, tcg_constant_i32(ext & 0xfff), reg);
4561     gen_exit_tb(s);
4562 }
4563 
DISAS_INSN(m68k_movec)4564 DISAS_INSN(m68k_movec)
4565 {
4566     uint16_t ext;
4567     TCGv reg, creg;
4568 
4569     if (IS_USER(s)) {
4570         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4571         return;
4572     }
4573 
4574     ext = read_im16(env, s);
4575 
4576     if (ext & 0x8000) {
4577         reg = AREG(ext, 12);
4578     } else {
4579         reg = DREG(ext, 12);
4580     }
4581     creg = tcg_constant_i32(ext & 0xfff);
4582     if (insn & 1) {
4583         gen_helper_m68k_movec_to(tcg_env, creg, reg);
4584     } else {
4585         gen_helper_m68k_movec_from(reg, tcg_env, creg);
4586     }
4587     gen_exit_tb(s);
4588 }
4589 
DISAS_INSN(intouch)4590 DISAS_INSN(intouch)
4591 {
4592     if (IS_USER(s)) {
4593         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4594         return;
4595     }
4596     /* ICache fetch.  Implement as no-op.  */
4597 }
4598 
DISAS_INSN(cpushl)4599 DISAS_INSN(cpushl)
4600 {
4601     if (IS_USER(s)) {
4602         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4603         return;
4604     }
4605     /* Cache push/invalidate.  Implement as no-op.  */
4606 }
4607 
DISAS_INSN(cpush)4608 DISAS_INSN(cpush)
4609 {
4610     if (IS_USER(s)) {
4611         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4612         return;
4613     }
4614     /* Cache push/invalidate.  Implement as no-op.  */
4615 }
4616 
DISAS_INSN(cinv)4617 DISAS_INSN(cinv)
4618 {
4619     if (IS_USER(s)) {
4620         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4621         return;
4622     }
4623     /* Invalidate cache line.  Implement as no-op.  */
4624 }
4625 
4626 #if !defined(CONFIG_USER_ONLY)
DISAS_INSN(pflush)4627 DISAS_INSN(pflush)
4628 {
4629     TCGv opmode;
4630 
4631     if (IS_USER(s)) {
4632         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4633         return;
4634     }
4635 
4636     opmode = tcg_constant_i32((insn >> 3) & 3);
4637     gen_helper_pflush(tcg_env, AREG(insn, 0), opmode);
4638 }
4639 
DISAS_INSN(ptest)4640 DISAS_INSN(ptest)
4641 {
4642     TCGv is_read;
4643 
4644     if (IS_USER(s)) {
4645         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4646         return;
4647     }
4648     is_read = tcg_constant_i32((insn >> 5) & 1);
4649     gen_helper_ptest(tcg_env, AREG(insn, 0), is_read);
4650 }
4651 #endif
4652 
DISAS_INSN(wddata)4653 DISAS_INSN(wddata)
4654 {
4655     gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4656 }
4657 
DISAS_INSN(wdebug)4658 DISAS_INSN(wdebug)
4659 {
4660     if (IS_USER(s)) {
4661         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4662         return;
4663     }
4664     /* TODO: Implement wdebug.  */
4665     cpu_abort(env_cpu(env), "WDEBUG not implemented");
4666 }
4667 #endif
4668 
DISAS_INSN(trap)4669 DISAS_INSN(trap)
4670 {
4671     gen_exception(s, s->pc, EXCP_TRAP0 + (insn & 0xf));
4672 }
4673 
do_trapcc(DisasContext * s,DisasCompare * c)4674 static void do_trapcc(DisasContext *s, DisasCompare *c)
4675 {
4676     if (c->tcond != TCG_COND_NEVER) {
4677         TCGLabel *over = NULL;
4678 
4679         update_cc_op(s);
4680 
4681         if (c->tcond != TCG_COND_ALWAYS) {
4682             /* Jump over if !c. */
4683             over = gen_new_label();
4684             tcg_gen_brcond_i32(tcg_invert_cond(c->tcond), c->v1, c->v2, over);
4685         }
4686 
4687         tcg_gen_movi_i32(QREG_PC, s->pc);
4688         gen_raise_exception_format2(s, EXCP_TRAPCC, s->base.pc_next);
4689 
4690         if (over != NULL) {
4691             gen_set_label(over);
4692             s->base.is_jmp = DISAS_NEXT;
4693         }
4694     }
4695 }
4696 
DISAS_INSN(trapcc)4697 DISAS_INSN(trapcc)
4698 {
4699     DisasCompare c;
4700 
4701     /* Consume and discard the immediate operand. */
4702     switch (extract32(insn, 0, 3)) {
4703     case 2: /* trapcc.w */
4704         (void)read_im16(env, s);
4705         break;
4706     case 3: /* trapcc.l */
4707         (void)read_im32(env, s);
4708         break;
4709     case 4: /* trapcc (no operand) */
4710         break;
4711     default:
4712         /* trapcc registered with only valid opmodes */
4713         g_assert_not_reached();
4714     }
4715 
4716     gen_cc_cond(&c, s, extract32(insn, 8, 4));
4717     do_trapcc(s, &c);
4718 }
4719 
DISAS_INSN(trapv)4720 DISAS_INSN(trapv)
4721 {
4722     DisasCompare c;
4723 
4724     gen_cc_cond(&c, s, 9); /* V set */
4725     do_trapcc(s, &c);
4726 }
4727 
gen_load_fcr(DisasContext * s,TCGv res,int reg)4728 static void gen_load_fcr(DisasContext *s, TCGv res, int reg)
4729 {
4730     switch (reg) {
4731     case M68K_FPIAR:
4732         tcg_gen_movi_i32(res, 0);
4733         break;
4734     case M68K_FPSR:
4735         gen_helper_get_fpsr(res, tcg_env);
4736         break;
4737     case M68K_FPCR:
4738         tcg_gen_ld_i32(res, tcg_env, offsetof(CPUM68KState, fpcr));
4739         break;
4740     }
4741 }
4742 
gen_store_fcr(DisasContext * s,TCGv val,int reg)4743 static void gen_store_fcr(DisasContext *s, TCGv val, int reg)
4744 {
4745     switch (reg) {
4746     case M68K_FPIAR:
4747         break;
4748     case M68K_FPSR:
4749         gen_helper_set_fpsr(tcg_env, val);
4750         break;
4751     case M68K_FPCR:
4752         gen_helper_set_fpcr(tcg_env, val);
4753         break;
4754     }
4755 }
4756 
gen_qemu_store_fcr(DisasContext * s,TCGv addr,int reg)4757 static void gen_qemu_store_fcr(DisasContext *s, TCGv addr, int reg)
4758 {
4759     int index = IS_USER(s);
4760     TCGv tmp;
4761 
4762     tmp = tcg_temp_new();
4763     gen_load_fcr(s, tmp, reg);
4764     tcg_gen_qemu_st_tl(tmp, addr, index, MO_TEUL);
4765 }
4766 
gen_qemu_load_fcr(DisasContext * s,TCGv addr,int reg)4767 static void gen_qemu_load_fcr(DisasContext *s, TCGv addr, int reg)
4768 {
4769     int index = IS_USER(s);
4770     TCGv tmp;
4771 
4772     tmp = tcg_temp_new();
4773     tcg_gen_qemu_ld_tl(tmp, addr, index, MO_TEUL);
4774     gen_store_fcr(s, tmp, reg);
4775 }
4776 
4777 
gen_op_fmove_fcr(CPUM68KState * env,DisasContext * s,uint32_t insn,uint32_t ext)4778 static void gen_op_fmove_fcr(CPUM68KState *env, DisasContext *s,
4779                              uint32_t insn, uint32_t ext)
4780 {
4781     int mask = (ext >> 10) & 7;
4782     int is_write = (ext >> 13) & 1;
4783     int mode = extract32(insn, 3, 3);
4784     int i;
4785     TCGv addr, tmp;
4786 
4787     switch (mode) {
4788     case 0: /* Dn */
4789         if (mask != M68K_FPIAR && mask != M68K_FPSR && mask != M68K_FPCR) {
4790             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4791             return;
4792         }
4793         if (is_write) {
4794             gen_load_fcr(s, DREG(insn, 0), mask);
4795         } else {
4796             gen_store_fcr(s, DREG(insn, 0), mask);
4797         }
4798         return;
4799     case 1: /* An, only with FPIAR */
4800         if (mask != M68K_FPIAR) {
4801             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4802             return;
4803         }
4804         if (is_write) {
4805             gen_load_fcr(s, AREG(insn, 0), mask);
4806         } else {
4807             gen_store_fcr(s, AREG(insn, 0), mask);
4808         }
4809         return;
4810     case 7: /* Immediate */
4811         if (REG(insn, 0) == 4) {
4812             if (is_write ||
4813                 (mask != M68K_FPIAR && mask != M68K_FPSR &&
4814                  mask != M68K_FPCR)) {
4815                 gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4816                 return;
4817             }
4818             tmp = tcg_constant_i32(read_im32(env, s));
4819             gen_store_fcr(s, tmp, mask);
4820             return;
4821         }
4822         break;
4823     default:
4824         break;
4825     }
4826 
4827     tmp = gen_lea(env, s, insn, OS_LONG);
4828     if (IS_NULL_QREG(tmp)) {
4829         gen_addr_fault(s);
4830         return;
4831     }
4832 
4833     addr = tcg_temp_new();
4834     tcg_gen_mov_i32(addr, tmp);
4835 
4836     /*
4837      * mask:
4838      *
4839      * 0b100 Floating-Point Control Register
4840      * 0b010 Floating-Point Status Register
4841      * 0b001 Floating-Point Instruction Address Register
4842      *
4843      */
4844 
4845     if (is_write && mode == 4) {
4846         for (i = 2; i >= 0; i--, mask >>= 1) {
4847             if (mask & 1) {
4848                 gen_qemu_store_fcr(s, addr, 1 << i);
4849                 if (mask != 1) {
4850                     tcg_gen_subi_i32(addr, addr, opsize_bytes(OS_LONG));
4851                 }
4852             }
4853        }
4854        tcg_gen_mov_i32(AREG(insn, 0), addr);
4855     } else {
4856         for (i = 0; i < 3; i++, mask >>= 1) {
4857             if (mask & 1) {
4858                 if (is_write) {
4859                     gen_qemu_store_fcr(s, addr, 1 << i);
4860                 } else {
4861                     gen_qemu_load_fcr(s, addr, 1 << i);
4862                 }
4863                 if (mask != 1 || mode == 3) {
4864                     tcg_gen_addi_i32(addr, addr, opsize_bytes(OS_LONG));
4865                 }
4866             }
4867         }
4868         if (mode == 3) {
4869             tcg_gen_mov_i32(AREG(insn, 0), addr);
4870         }
4871     }
4872 }
4873 
gen_op_fmovem(CPUM68KState * env,DisasContext * s,uint32_t insn,uint32_t ext)4874 static void gen_op_fmovem(CPUM68KState *env, DisasContext *s,
4875                           uint32_t insn, uint32_t ext)
4876 {
4877     int opsize;
4878     TCGv addr, tmp;
4879     int mode = (ext >> 11) & 0x3;
4880     int is_load = ((ext & 0x2000) == 0);
4881 
4882     if (m68k_feature(s->env, M68K_FEATURE_FPU)) {
4883         opsize = OS_EXTENDED;
4884     } else {
4885         opsize = OS_DOUBLE;  /* FIXME */
4886     }
4887 
4888     addr = gen_lea(env, s, insn, opsize);
4889     if (IS_NULL_QREG(addr)) {
4890         gen_addr_fault(s);
4891         return;
4892     }
4893 
4894     tmp = tcg_temp_new();
4895     if (mode & 0x1) {
4896         /* Dynamic register list */
4897         tcg_gen_ext8u_i32(tmp, DREG(ext, 4));
4898     } else {
4899         /* Static register list */
4900         tcg_gen_movi_i32(tmp, ext & 0xff);
4901     }
4902 
4903     if (!is_load && (mode & 2) == 0) {
4904         /*
4905          * predecrement addressing mode
4906          * only available to store register to memory
4907          */
4908         if (opsize == OS_EXTENDED) {
4909             gen_helper_fmovemx_st_predec(tmp, tcg_env, addr, tmp);
4910         } else {
4911             gen_helper_fmovemd_st_predec(tmp, tcg_env, addr, tmp);
4912         }
4913     } else {
4914         /* postincrement addressing mode */
4915         if (opsize == OS_EXTENDED) {
4916             if (is_load) {
4917                 gen_helper_fmovemx_ld_postinc(tmp, tcg_env, addr, tmp);
4918             } else {
4919                 gen_helper_fmovemx_st_postinc(tmp, tcg_env, addr, tmp);
4920             }
4921         } else {
4922             if (is_load) {
4923                 gen_helper_fmovemd_ld_postinc(tmp, tcg_env, addr, tmp);
4924             } else {
4925                 gen_helper_fmovemd_st_postinc(tmp, tcg_env, addr, tmp);
4926             }
4927         }
4928     }
4929     if ((insn & 070) == 030 || (insn & 070) == 040) {
4930         tcg_gen_mov_i32(AREG(insn, 0), tmp);
4931     }
4932 }
4933 
4934 /*
4935  * ??? FP exceptions are not implemented.  Most exceptions are deferred until
4936  * immediately before the next FP instruction is executed.
4937  */
DISAS_INSN(fpu)4938 DISAS_INSN(fpu)
4939 {
4940     uint16_t ext;
4941     int opmode;
4942     int opsize;
4943     TCGv_ptr cpu_src, cpu_dest;
4944 
4945     ext = read_im16(env, s);
4946     opmode = ext & 0x7f;
4947     switch ((ext >> 13) & 7) {
4948     case 0:
4949         break;
4950     case 1:
4951         goto undef;
4952     case 2:
4953         if (insn == 0xf200 && (ext & 0xfc00) == 0x5c00) {
4954             /* fmovecr */
4955             TCGv rom_offset = tcg_constant_i32(opmode);
4956             cpu_dest = gen_fp_ptr(REG(ext, 7));
4957             gen_helper_fconst(tcg_env, cpu_dest, rom_offset);
4958             return;
4959         }
4960         break;
4961     case 3: /* fmove out */
4962         cpu_src = gen_fp_ptr(REG(ext, 7));
4963         opsize = ext_opsize(ext, 10);
4964         if (gen_ea_fp(env, s, insn, opsize, cpu_src,
4965                       EA_STORE, IS_USER(s)) == -1) {
4966             gen_addr_fault(s);
4967         }
4968         gen_helper_ftst(tcg_env, cpu_src);
4969         return;
4970     case 4: /* fmove to control register.  */
4971     case 5: /* fmove from control register.  */
4972         gen_op_fmove_fcr(env, s, insn, ext);
4973         return;
4974     case 6: /* fmovem */
4975     case 7:
4976         if ((ext & 0x1000) == 0 && !m68k_feature(s->env, M68K_FEATURE_FPU)) {
4977             goto undef;
4978         }
4979         gen_op_fmovem(env, s, insn, ext);
4980         return;
4981     }
4982     if (ext & (1 << 14)) {
4983         /* Source effective address.  */
4984         opsize = ext_opsize(ext, 10);
4985         cpu_src = gen_fp_result_ptr();
4986         if (gen_ea_fp(env, s, insn, opsize, cpu_src,
4987                       EA_LOADS, IS_USER(s)) == -1) {
4988             gen_addr_fault(s);
4989             return;
4990         }
4991     } else {
4992         /* Source register.  */
4993         opsize = OS_EXTENDED;
4994         cpu_src = gen_fp_ptr(REG(ext, 10));
4995     }
4996     cpu_dest = gen_fp_ptr(REG(ext, 7));
4997     switch (opmode) {
4998     case 0: /* fmove */
4999         gen_fp_move(cpu_dest, cpu_src);
5000         break;
5001     case 0x40: /* fsmove */
5002         gen_helper_fsround(tcg_env, cpu_dest, cpu_src);
5003         break;
5004     case 0x44: /* fdmove */
5005         gen_helper_fdround(tcg_env, cpu_dest, cpu_src);
5006         break;
5007     case 1: /* fint */
5008         gen_helper_firound(tcg_env, cpu_dest, cpu_src);
5009         break;
5010     case 2: /* fsinh */
5011         gen_helper_fsinh(tcg_env, cpu_dest, cpu_src);
5012         break;
5013     case 3: /* fintrz */
5014         gen_helper_fitrunc(tcg_env, cpu_dest, cpu_src);
5015         break;
5016     case 4: /* fsqrt */
5017         gen_helper_fsqrt(tcg_env, cpu_dest, cpu_src);
5018         break;
5019     case 0x41: /* fssqrt */
5020         gen_helper_fssqrt(tcg_env, cpu_dest, cpu_src);
5021         break;
5022     case 0x45: /* fdsqrt */
5023         gen_helper_fdsqrt(tcg_env, cpu_dest, cpu_src);
5024         break;
5025     case 0x06: /* flognp1 */
5026         gen_helper_flognp1(tcg_env, cpu_dest, cpu_src);
5027         break;
5028     case 0x08: /* fetoxm1 */
5029         gen_helper_fetoxm1(tcg_env, cpu_dest, cpu_src);
5030         break;
5031     case 0x09: /* ftanh */
5032         gen_helper_ftanh(tcg_env, cpu_dest, cpu_src);
5033         break;
5034     case 0x0a: /* fatan */
5035         gen_helper_fatan(tcg_env, cpu_dest, cpu_src);
5036         break;
5037     case 0x0c: /* fasin */
5038         gen_helper_fasin(tcg_env, cpu_dest, cpu_src);
5039         break;
5040     case 0x0d: /* fatanh */
5041         gen_helper_fatanh(tcg_env, cpu_dest, cpu_src);
5042         break;
5043     case 0x0e: /* fsin */
5044         gen_helper_fsin(tcg_env, cpu_dest, cpu_src);
5045         break;
5046     case 0x0f: /* ftan */
5047         gen_helper_ftan(tcg_env, cpu_dest, cpu_src);
5048         break;
5049     case 0x10: /* fetox */
5050         gen_helper_fetox(tcg_env, cpu_dest, cpu_src);
5051         break;
5052     case 0x11: /* ftwotox */
5053         gen_helper_ftwotox(tcg_env, cpu_dest, cpu_src);
5054         break;
5055     case 0x12: /* ftentox */
5056         gen_helper_ftentox(tcg_env, cpu_dest, cpu_src);
5057         break;
5058     case 0x14: /* flogn */
5059         gen_helper_flogn(tcg_env, cpu_dest, cpu_src);
5060         break;
5061     case 0x15: /* flog10 */
5062         gen_helper_flog10(tcg_env, cpu_dest, cpu_src);
5063         break;
5064     case 0x16: /* flog2 */
5065         gen_helper_flog2(tcg_env, cpu_dest, cpu_src);
5066         break;
5067     case 0x18: /* fabs */
5068         gen_helper_fabs(tcg_env, cpu_dest, cpu_src);
5069         break;
5070     case 0x58: /* fsabs */
5071         gen_helper_fsabs(tcg_env, cpu_dest, cpu_src);
5072         break;
5073     case 0x5c: /* fdabs */
5074         gen_helper_fdabs(tcg_env, cpu_dest, cpu_src);
5075         break;
5076     case 0x19: /* fcosh */
5077         gen_helper_fcosh(tcg_env, cpu_dest, cpu_src);
5078         break;
5079     case 0x1a: /* fneg */
5080         gen_helper_fneg(tcg_env, cpu_dest, cpu_src);
5081         break;
5082     case 0x5a: /* fsneg */
5083         gen_helper_fsneg(tcg_env, cpu_dest, cpu_src);
5084         break;
5085     case 0x5e: /* fdneg */
5086         gen_helper_fdneg(tcg_env, cpu_dest, cpu_src);
5087         break;
5088     case 0x1c: /* facos */
5089         gen_helper_facos(tcg_env, cpu_dest, cpu_src);
5090         break;
5091     case 0x1d: /* fcos */
5092         gen_helper_fcos(tcg_env, cpu_dest, cpu_src);
5093         break;
5094     case 0x1e: /* fgetexp */
5095         gen_helper_fgetexp(tcg_env, cpu_dest, cpu_src);
5096         break;
5097     case 0x1f: /* fgetman */
5098         gen_helper_fgetman(tcg_env, cpu_dest, cpu_src);
5099         break;
5100     case 0x20: /* fdiv */
5101         gen_helper_fdiv(tcg_env, cpu_dest, cpu_src, cpu_dest);
5102         break;
5103     case 0x60: /* fsdiv */
5104         gen_helper_fsdiv(tcg_env, cpu_dest, cpu_src, cpu_dest);
5105         break;
5106     case 0x64: /* fddiv */
5107         gen_helper_fddiv(tcg_env, cpu_dest, cpu_src, cpu_dest);
5108         break;
5109     case 0x21: /* fmod */
5110         gen_helper_fmod(tcg_env, cpu_dest, cpu_src, cpu_dest);
5111         break;
5112     case 0x22: /* fadd */
5113         gen_helper_fadd(tcg_env, cpu_dest, cpu_src, cpu_dest);
5114         break;
5115     case 0x62: /* fsadd */
5116         gen_helper_fsadd(tcg_env, cpu_dest, cpu_src, cpu_dest);
5117         break;
5118     case 0x66: /* fdadd */
5119         gen_helper_fdadd(tcg_env, cpu_dest, cpu_src, cpu_dest);
5120         break;
5121     case 0x23: /* fmul */
5122         gen_helper_fmul(tcg_env, cpu_dest, cpu_src, cpu_dest);
5123         break;
5124     case 0x63: /* fsmul */
5125         gen_helper_fsmul(tcg_env, cpu_dest, cpu_src, cpu_dest);
5126         break;
5127     case 0x67: /* fdmul */
5128         gen_helper_fdmul(tcg_env, cpu_dest, cpu_src, cpu_dest);
5129         break;
5130     case 0x24: /* fsgldiv */
5131         gen_helper_fsgldiv(tcg_env, cpu_dest, cpu_src, cpu_dest);
5132         break;
5133     case 0x25: /* frem */
5134         gen_helper_frem(tcg_env, cpu_dest, cpu_src, cpu_dest);
5135         break;
5136     case 0x26: /* fscale */
5137         gen_helper_fscale(tcg_env, cpu_dest, cpu_src, cpu_dest);
5138         break;
5139     case 0x27: /* fsglmul */
5140         gen_helper_fsglmul(tcg_env, cpu_dest, cpu_src, cpu_dest);
5141         break;
5142     case 0x28: /* fsub */
5143         gen_helper_fsub(tcg_env, cpu_dest, cpu_src, cpu_dest);
5144         break;
5145     case 0x68: /* fssub */
5146         gen_helper_fssub(tcg_env, cpu_dest, cpu_src, cpu_dest);
5147         break;
5148     case 0x6c: /* fdsub */
5149         gen_helper_fdsub(tcg_env, cpu_dest, cpu_src, cpu_dest);
5150         break;
5151     case 0x30: case 0x31: case 0x32:
5152     case 0x33: case 0x34: case 0x35:
5153     case 0x36: case 0x37: {
5154             TCGv_ptr cpu_dest2 = gen_fp_ptr(REG(ext, 0));
5155             gen_helper_fsincos(tcg_env, cpu_dest, cpu_dest2, cpu_src);
5156         }
5157         break;
5158     case 0x38: /* fcmp */
5159         gen_helper_fcmp(tcg_env, cpu_src, cpu_dest);
5160         return;
5161     case 0x3a: /* ftst */
5162         gen_helper_ftst(tcg_env, cpu_src);
5163         return;
5164     default:
5165         goto undef;
5166     }
5167     gen_helper_ftst(tcg_env, cpu_dest);
5168     return;
5169 undef:
5170     /* FIXME: Is this right for offset addressing modes?  */
5171     s->pc -= 2;
5172     disas_undef_fpu(env, s, insn);
5173 }
5174 
gen_fcc_cond(DisasCompare * c,DisasContext * s,int cond)5175 static void gen_fcc_cond(DisasCompare *c, DisasContext *s, int cond)
5176 {
5177     TCGv fpsr;
5178     int imm = 0;
5179 
5180     /* TODO: Raise BSUN exception.  */
5181     fpsr = tcg_temp_new();
5182     gen_load_fcr(s, fpsr, M68K_FPSR);
5183     c->v1 = fpsr;
5184 
5185     switch (cond) {
5186     case 0:  /* False */
5187     case 16: /* Signaling False */
5188         c->tcond = TCG_COND_NEVER;
5189         break;
5190     case 1:  /* EQual Z */
5191     case 17: /* Signaling EQual Z */
5192         imm = FPSR_CC_Z;
5193         c->tcond = TCG_COND_TSTNE;
5194         break;
5195     case 2:  /* Ordered Greater Than !(A || Z || N) */
5196     case 18: /* Greater Than !(A || Z || N) */
5197         imm = FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N;
5198         c->tcond = TCG_COND_TSTEQ;
5199         break;
5200     case 3:  /* Ordered Greater than or Equal Z || !(A || N) */
5201     case 19: /* Greater than or Equal Z || !(A || N) */
5202         c->v1 = tcg_temp_new();
5203         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5204         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5205         tcg_gen_or_i32(c->v1, c->v1, fpsr);
5206         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5207         imm = FPSR_CC_Z | FPSR_CC_N;
5208         c->tcond = TCG_COND_TSTNE;
5209         break;
5210     case 4:  /* Ordered Less Than !(!N || A || Z); */
5211     case 20: /* Less Than !(!N || A || Z); */
5212         c->v1 = tcg_temp_new();
5213         tcg_gen_xori_i32(c->v1, fpsr, FPSR_CC_N);
5214         imm = FPSR_CC_N | FPSR_CC_A | FPSR_CC_Z;
5215         c->tcond = TCG_COND_TSTEQ;
5216         break;
5217     case 5:  /* Ordered Less than or Equal Z || (N && !A) */
5218     case 21: /* Less than or Equal Z || (N && !A) */
5219         c->v1 = tcg_temp_new();
5220         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5221         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5222         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5223         imm = FPSR_CC_Z | FPSR_CC_N;
5224         c->tcond = TCG_COND_TSTNE;
5225         break;
5226     case 6:  /* Ordered Greater or Less than !(A || Z) */
5227     case 22: /* Greater or Less than !(A || Z) */
5228         imm = FPSR_CC_A | FPSR_CC_Z;
5229         c->tcond = TCG_COND_TSTEQ;
5230         break;
5231     case 7:  /* Ordered !A */
5232     case 23: /* Greater, Less or Equal !A */
5233         imm = FPSR_CC_A;
5234         c->tcond = TCG_COND_TSTEQ;
5235         break;
5236     case 8:  /* Unordered A */
5237     case 24: /* Not Greater, Less or Equal A */
5238         imm = FPSR_CC_A;
5239         c->tcond = TCG_COND_TSTNE;
5240         break;
5241     case 9:  /* Unordered or Equal A || Z */
5242     case 25: /* Not Greater or Less then A || Z */
5243         imm = FPSR_CC_A | FPSR_CC_Z;
5244         c->tcond = TCG_COND_TSTNE;
5245         break;
5246     case 10: /* Unordered or Greater Than A || !(N || Z)) */
5247     case 26: /* Not Less or Equal A || !(N || Z)) */
5248         c->v1 = tcg_temp_new();
5249         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5250         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5251         tcg_gen_or_i32(c->v1, c->v1, fpsr);
5252         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5253         imm = FPSR_CC_A | FPSR_CC_N;
5254         c->tcond = TCG_COND_TSTNE;
5255         break;
5256     case 11: /* Unordered or Greater or Equal A || Z || !N */
5257     case 27: /* Not Less Than A || Z || !N */
5258         c->v1 = tcg_temp_new();
5259         tcg_gen_xori_i32(c->v1, fpsr, FPSR_CC_N);
5260         imm = FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N;
5261         c->tcond = TCG_COND_TSTNE;
5262         break;
5263     case 12: /* Unordered or Less Than A || (N && !Z) */
5264     case 28: /* Not Greater than or Equal A || (N && !Z) */
5265         c->v1 = tcg_temp_new();
5266         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5267         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5268         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5269         imm = FPSR_CC_A | FPSR_CC_N;
5270         c->tcond = TCG_COND_TSTNE;
5271         break;
5272     case 13: /* Unordered or Less or Equal A || Z || N */
5273     case 29: /* Not Greater Than A || Z || N */
5274         imm = FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N;
5275         c->tcond = TCG_COND_TSTNE;
5276         break;
5277     case 14: /* Not Equal !Z */
5278     case 30: /* Signaling Not Equal !Z */
5279         imm = FPSR_CC_Z;
5280         c->tcond = TCG_COND_TSTEQ;
5281         break;
5282     case 15: /* True */
5283     case 31: /* Signaling True */
5284         c->tcond = TCG_COND_ALWAYS;
5285         break;
5286     }
5287     c->v2 = tcg_constant_i32(imm);
5288 }
5289 
gen_fjmpcc(DisasContext * s,int cond,TCGLabel * l1)5290 static void gen_fjmpcc(DisasContext *s, int cond, TCGLabel *l1)
5291 {
5292     DisasCompare c;
5293 
5294     gen_fcc_cond(&c, s, cond);
5295     update_cc_op(s);
5296     tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
5297 }
5298 
DISAS_INSN(fbcc)5299 DISAS_INSN(fbcc)
5300 {
5301     uint32_t offset;
5302     uint32_t base;
5303     TCGLabel *l1;
5304 
5305     base = s->pc;
5306     offset = (int16_t)read_im16(env, s);
5307     if (insn & (1 << 6)) {
5308         offset = (offset << 16) | read_im16(env, s);
5309     }
5310 
5311     l1 = gen_new_label();
5312     update_cc_op(s);
5313     gen_fjmpcc(s, insn & 0x3f, l1);
5314     gen_jmp_tb(s, 0, s->pc, s->base.pc_next);
5315     gen_set_label(l1);
5316     gen_jmp_tb(s, 1, base + offset, s->base.pc_next);
5317 }
5318 
DISAS_INSN(fscc)5319 DISAS_INSN(fscc)
5320 {
5321     DisasCompare c;
5322     int cond;
5323     TCGv tmp;
5324     uint16_t ext;
5325 
5326     ext = read_im16(env, s);
5327     cond = ext & 0x3f;
5328     gen_fcc_cond(&c, s, cond);
5329 
5330     tmp = tcg_temp_new();
5331     tcg_gen_negsetcond_i32(c.tcond, tmp, c.v1, c.v2);
5332 
5333     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
5334 }
5335 
DISAS_INSN(ftrapcc)5336 DISAS_INSN(ftrapcc)
5337 {
5338     DisasCompare c;
5339     uint16_t ext;
5340     int cond;
5341 
5342     ext = read_im16(env, s);
5343     cond = ext & 0x3f;
5344 
5345     /* Consume and discard the immediate operand. */
5346     switch (extract32(insn, 0, 3)) {
5347     case 2: /* ftrapcc.w */
5348         (void)read_im16(env, s);
5349         break;
5350     case 3: /* ftrapcc.l */
5351         (void)read_im32(env, s);
5352         break;
5353     case 4: /* ftrapcc (no operand) */
5354         break;
5355     default:
5356         /* ftrapcc registered with only valid opmodes */
5357         g_assert_not_reached();
5358     }
5359 
5360     gen_fcc_cond(&c, s, cond);
5361     do_trapcc(s, &c);
5362 }
5363 
5364 #if !defined(CONFIG_USER_ONLY)
DISAS_INSN(frestore)5365 DISAS_INSN(frestore)
5366 {
5367     TCGv addr;
5368 
5369     if (IS_USER(s)) {
5370         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
5371         return;
5372     }
5373     if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5374         SRC_EA(env, addr, OS_LONG, 0, NULL);
5375         /* FIXME: check the state frame */
5376     } else {
5377         disas_undef(env, s, insn);
5378     }
5379 }
5380 
DISAS_INSN(fsave)5381 DISAS_INSN(fsave)
5382 {
5383     if (IS_USER(s)) {
5384         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
5385         return;
5386     }
5387 
5388     if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5389         /* always write IDLE */
5390         TCGv idle = tcg_constant_i32(0x41000000);
5391         DEST_EA(env, insn, OS_LONG, idle, NULL);
5392     } else {
5393         disas_undef(env, s, insn);
5394     }
5395 }
5396 #endif
5397 
gen_mac_extract_word(DisasContext * s,TCGv val,int upper)5398 static inline TCGv gen_mac_extract_word(DisasContext *s, TCGv val, int upper)
5399 {
5400     TCGv tmp = tcg_temp_new();
5401     if (s->env->macsr & MACSR_FI) {
5402         if (upper)
5403             tcg_gen_andi_i32(tmp, val, 0xffff0000);
5404         else
5405             tcg_gen_shli_i32(tmp, val, 16);
5406     } else if (s->env->macsr & MACSR_SU) {
5407         if (upper)
5408             tcg_gen_sari_i32(tmp, val, 16);
5409         else
5410             tcg_gen_ext16s_i32(tmp, val);
5411     } else {
5412         if (upper)
5413             tcg_gen_shri_i32(tmp, val, 16);
5414         else
5415             tcg_gen_ext16u_i32(tmp, val);
5416     }
5417     return tmp;
5418 }
5419 
gen_mac_clear_flags(void)5420 static void gen_mac_clear_flags(void)
5421 {
5422     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR,
5423                      ~(MACSR_V | MACSR_Z | MACSR_N | MACSR_EV));
5424 }
5425 
DISAS_INSN(mac)5426 DISAS_INSN(mac)
5427 {
5428     TCGv rx;
5429     TCGv ry;
5430     uint16_t ext;
5431     int acc;
5432     TCGv tmp;
5433     TCGv addr;
5434     TCGv loadval;
5435     int dual;
5436     TCGv saved_flags;
5437 
5438     if (!s->done_mac) {
5439         s->mactmp = tcg_temp_new_i64();
5440         s->done_mac = 1;
5441     }
5442 
5443     ext = read_im16(env, s);
5444 
5445     acc = ((insn >> 7) & 1) | ((ext >> 3) & 2);
5446     dual = ((insn & 0x30) != 0 && (ext & 3) != 0);
5447     if (dual && !m68k_feature(s->env, M68K_FEATURE_CF_EMAC_B)) {
5448         disas_undef(env, s, insn);
5449         return;
5450     }
5451     if (insn & 0x30) {
5452         /* MAC with load.  */
5453         tmp = gen_lea(env, s, insn, OS_LONG);
5454         addr = tcg_temp_new();
5455         tcg_gen_and_i32(addr, tmp, QREG_MAC_MASK);
5456         /*
5457          * Load the value now to ensure correct exception behavior.
5458          * Perform writeback after reading the MAC inputs.
5459          */
5460         loadval = gen_load(s, OS_LONG, addr, 0, IS_USER(s));
5461 
5462         acc ^= 1;
5463         rx = (ext & 0x8000) ? AREG(ext, 12) : DREG(insn, 12);
5464         ry = (ext & 8) ? AREG(ext, 0) : DREG(ext, 0);
5465     } else {
5466         loadval = addr = NULL_QREG;
5467         rx = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5468         ry = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5469     }
5470 
5471     gen_mac_clear_flags();
5472 #if 0
5473     l1 = -1;
5474     /* Disabled because conditional branches clobber temporary vars.  */
5475     if ((s->env->macsr & MACSR_OMC) != 0 && !dual) {
5476         /* Skip the multiply if we know we will ignore it.  */
5477         l1 = gen_new_label();
5478         tmp = tcg_temp_new();
5479         tcg_gen_andi_i32(tmp, QREG_MACSR, 1 << (acc + 8));
5480         gen_op_jmp_nz32(tmp, l1);
5481     }
5482 #endif
5483 
5484     if ((ext & 0x0800) == 0) {
5485         /* Word.  */
5486         rx = gen_mac_extract_word(s, rx, (ext & 0x80) != 0);
5487         ry = gen_mac_extract_word(s, ry, (ext & 0x40) != 0);
5488     }
5489     if (s->env->macsr & MACSR_FI) {
5490         gen_helper_macmulf(s->mactmp, tcg_env, rx, ry);
5491     } else {
5492         if (s->env->macsr & MACSR_SU)
5493             gen_helper_macmuls(s->mactmp, tcg_env, rx, ry);
5494         else
5495             gen_helper_macmulu(s->mactmp, tcg_env, rx, ry);
5496         switch ((ext >> 9) & 3) {
5497         case 1:
5498             tcg_gen_shli_i64(s->mactmp, s->mactmp, 1);
5499             break;
5500         case 3:
5501             tcg_gen_shri_i64(s->mactmp, s->mactmp, 1);
5502             break;
5503         }
5504     }
5505 
5506     if (dual) {
5507         /* Save the overflow flag from the multiply.  */
5508         saved_flags = tcg_temp_new();
5509         tcg_gen_mov_i32(saved_flags, QREG_MACSR);
5510     } else {
5511         saved_flags = NULL_QREG;
5512     }
5513 
5514 #if 0
5515     /* Disabled because conditional branches clobber temporary vars.  */
5516     if ((s->env->macsr & MACSR_OMC) != 0 && dual) {
5517         /* Skip the accumulate if the value is already saturated.  */
5518         l1 = gen_new_label();
5519         tmp = tcg_temp_new();
5520         gen_op_and32(tmp, QREG_MACSR, tcg_constant_i32(MACSR_PAV0 << acc));
5521         gen_op_jmp_nz32(tmp, l1);
5522     }
5523 #endif
5524 
5525     if (insn & 0x100)
5526         tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5527     else
5528         tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5529 
5530     if (s->env->macsr & MACSR_FI)
5531         gen_helper_macsatf(tcg_env, tcg_constant_i32(acc));
5532     else if (s->env->macsr & MACSR_SU)
5533         gen_helper_macsats(tcg_env, tcg_constant_i32(acc));
5534     else
5535         gen_helper_macsatu(tcg_env, tcg_constant_i32(acc));
5536 
5537 #if 0
5538     /* Disabled because conditional branches clobber temporary vars.  */
5539     if (l1 != -1)
5540         gen_set_label(l1);
5541 #endif
5542 
5543     if (dual) {
5544         /* Dual accumulate variant.  */
5545         acc = (ext >> 2) & 3;
5546         /* Restore the overflow flag from the multiplier.  */
5547         tcg_gen_mov_i32(QREG_MACSR, saved_flags);
5548 #if 0
5549         /* Disabled because conditional branches clobber temporary vars.  */
5550         if ((s->env->macsr & MACSR_OMC) != 0) {
5551             /* Skip the accumulate if the value is already saturated.  */
5552             l1 = gen_new_label();
5553             tmp = tcg_temp_new();
5554             gen_op_and32(tmp, QREG_MACSR, tcg_constant_i32(MACSR_PAV0 << acc));
5555             gen_op_jmp_nz32(tmp, l1);
5556         }
5557 #endif
5558         if (ext & 2)
5559             tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5560         else
5561             tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5562         if (s->env->macsr & MACSR_FI)
5563             gen_helper_macsatf(tcg_env, tcg_constant_i32(acc));
5564         else if (s->env->macsr & MACSR_SU)
5565             gen_helper_macsats(tcg_env, tcg_constant_i32(acc));
5566         else
5567             gen_helper_macsatu(tcg_env, tcg_constant_i32(acc));
5568 #if 0
5569         /* Disabled because conditional branches clobber temporary vars.  */
5570         if (l1 != -1)
5571             gen_set_label(l1);
5572 #endif
5573     }
5574     gen_helper_mac_set_flags(tcg_env, tcg_constant_i32(acc));
5575 
5576     if (insn & 0x30) {
5577         TCGv rw;
5578         rw = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5579         tcg_gen_mov_i32(rw, loadval);
5580         /*
5581          * FIXME: Should address writeback happen with the masked or
5582          * unmasked value?
5583          */
5584         switch ((insn >> 3) & 7) {
5585         case 3: /* Post-increment.  */
5586             tcg_gen_addi_i32(AREG(insn, 0), addr, 4);
5587             break;
5588         case 4: /* Pre-decrement.  */
5589             tcg_gen_mov_i32(AREG(insn, 0), addr);
5590         }
5591     }
5592 }
5593 
DISAS_INSN(from_mac)5594 DISAS_INSN(from_mac)
5595 {
5596     TCGv rx;
5597     TCGv_i64 acc;
5598     int accnum;
5599 
5600     rx = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5601     accnum = (insn >> 9) & 3;
5602     acc = MACREG(accnum);
5603     if (s->env->macsr & MACSR_FI) {
5604         gen_helper_get_macf(rx, tcg_env, acc);
5605     } else if ((s->env->macsr & MACSR_OMC) == 0) {
5606         tcg_gen_extrl_i64_i32(rx, acc);
5607     } else if (s->env->macsr & MACSR_SU) {
5608         gen_helper_get_macs(rx, acc);
5609     } else {
5610         gen_helper_get_macu(rx, acc);
5611     }
5612     if (insn & 0x40) {
5613         tcg_gen_movi_i64(acc, 0);
5614         tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5615     }
5616 }
5617 
DISAS_INSN(move_mac)5618 DISAS_INSN(move_mac)
5619 {
5620     /* FIXME: This can be done without a helper.  */
5621     int src;
5622     TCGv dest;
5623     src = insn & 3;
5624     dest = tcg_constant_i32((insn >> 9) & 3);
5625     gen_helper_mac_move(tcg_env, dest, tcg_constant_i32(src));
5626     gen_mac_clear_flags();
5627     gen_helper_mac_set_flags(tcg_env, dest);
5628 }
5629 
DISAS_INSN(from_macsr)5630 DISAS_INSN(from_macsr)
5631 {
5632     TCGv reg;
5633 
5634     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5635     tcg_gen_mov_i32(reg, QREG_MACSR);
5636 }
5637 
DISAS_INSN(from_mask)5638 DISAS_INSN(from_mask)
5639 {
5640     TCGv reg;
5641     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5642     tcg_gen_mov_i32(reg, QREG_MAC_MASK);
5643 }
5644 
DISAS_INSN(from_mext)5645 DISAS_INSN(from_mext)
5646 {
5647     TCGv reg;
5648     TCGv acc;
5649     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5650     acc = tcg_constant_i32((insn & 0x400) ? 2 : 0);
5651     if (s->env->macsr & MACSR_FI)
5652         gen_helper_get_mac_extf(reg, tcg_env, acc);
5653     else
5654         gen_helper_get_mac_exti(reg, tcg_env, acc);
5655 }
5656 
DISAS_INSN(macsr_to_ccr)5657 DISAS_INSN(macsr_to_ccr)
5658 {
5659     TCGv tmp = tcg_temp_new();
5660 
5661     /* Note that X and C are always cleared. */
5662     tcg_gen_andi_i32(tmp, QREG_MACSR, CCF_N | CCF_Z | CCF_V);
5663     gen_helper_set_ccr(tcg_env, tmp);
5664     set_cc_op(s, CC_OP_FLAGS);
5665 }
5666 
DISAS_INSN(to_mac)5667 DISAS_INSN(to_mac)
5668 {
5669     TCGv_i64 acc;
5670     TCGv val;
5671     int accnum;
5672     accnum = (insn >> 9) & 3;
5673     acc = MACREG(accnum);
5674     SRC_EA(env, val, OS_LONG, 0, NULL);
5675     if (s->env->macsr & MACSR_FI) {
5676         tcg_gen_ext_i32_i64(acc, val);
5677         tcg_gen_shli_i64(acc, acc, 8);
5678     } else if (s->env->macsr & MACSR_SU) {
5679         tcg_gen_ext_i32_i64(acc, val);
5680     } else {
5681         tcg_gen_extu_i32_i64(acc, val);
5682     }
5683     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5684     gen_mac_clear_flags();
5685     gen_helper_mac_set_flags(tcg_env, tcg_constant_i32(accnum));
5686 }
5687 
DISAS_INSN(to_macsr)5688 DISAS_INSN(to_macsr)
5689 {
5690     TCGv val;
5691     SRC_EA(env, val, OS_LONG, 0, NULL);
5692     gen_helper_set_macsr(tcg_env, val);
5693     gen_exit_tb(s);
5694 }
5695 
DISAS_INSN(to_mask)5696 DISAS_INSN(to_mask)
5697 {
5698     TCGv val;
5699     SRC_EA(env, val, OS_LONG, 0, NULL);
5700     tcg_gen_ori_i32(QREG_MAC_MASK, val, 0xffff0000);
5701 }
5702 
DISAS_INSN(to_mext)5703 DISAS_INSN(to_mext)
5704 {
5705     TCGv val;
5706     TCGv acc;
5707     SRC_EA(env, val, OS_LONG, 0, NULL);
5708     acc = tcg_constant_i32((insn & 0x400) ? 2 : 0);
5709     if (s->env->macsr & MACSR_FI)
5710         gen_helper_set_mac_extf(tcg_env, val, acc);
5711     else if (s->env->macsr & MACSR_SU)
5712         gen_helper_set_mac_exts(tcg_env, val, acc);
5713     else
5714         gen_helper_set_mac_extu(tcg_env, val, acc);
5715 }
5716 
5717 static disas_proc opcode_table[65536];
5718 
5719 static void
register_opcode(disas_proc proc,uint16_t opcode,uint16_t mask)5720 register_opcode (disas_proc proc, uint16_t opcode, uint16_t mask)
5721 {
5722   int i;
5723   int from;
5724   int to;
5725 
5726   /* Sanity check.  All set bits must be included in the mask.  */
5727   if (opcode & ~mask) {
5728       fprintf(stderr,
5729               "qemu internal error: bogus opcode definition %04x/%04x\n",
5730               opcode, mask);
5731       abort();
5732   }
5733   /*
5734    * This could probably be cleverer.  For now just optimize the case where
5735    * the top bits are known.
5736    */
5737   /* Find the first zero bit in the mask.  */
5738   i = 0x8000;
5739   while ((i & mask) != 0)
5740       i >>= 1;
5741   /* Iterate over all combinations of this and lower bits.  */
5742   if (i == 0)
5743       i = 1;
5744   else
5745       i <<= 1;
5746   from = opcode & ~(i - 1);
5747   to = from + i;
5748   for (i = from; i < to; i++) {
5749       if ((i & mask) == opcode)
5750           opcode_table[i] = proc;
5751   }
5752 }
5753 
5754 /*
5755  * Register m68k opcode handlers.  Order is important.
5756  * Later insn override earlier ones.
5757  */
register_m68k_insns(CPUM68KState * env)5758 void register_m68k_insns (CPUM68KState *env)
5759 {
5760     /*
5761      * Build the opcode table only once to avoid
5762      * multithreading issues.
5763      */
5764     if (opcode_table[0] != NULL) {
5765         return;
5766     }
5767 
5768     /*
5769      * use BASE() for instruction available
5770      * for CF_ISA_A and M68000.
5771      */
5772 #define BASE(name, opcode, mask) \
5773     register_opcode(disas_##name, 0x##opcode, 0x##mask)
5774 #define INSN(name, opcode, mask, feature) do { \
5775     if (m68k_feature(env, M68K_FEATURE_##feature)) \
5776         BASE(name, opcode, mask); \
5777     } while(0)
5778     BASE(undef,     0000, 0000);
5779     INSN(arith_im,  0080, fff8, CF_ISA_A);
5780     INSN(arith_im,  0000, ff00, M68K);
5781     INSN(chk2,      00c0, f9c0, CHK2);
5782     INSN(bitrev,    00c0, fff8, CF_ISA_APLUSC);
5783     BASE(bitop_reg, 0100, f1c0);
5784     BASE(bitop_reg, 0140, f1c0);
5785     BASE(bitop_reg, 0180, f1c0);
5786     BASE(bitop_reg, 01c0, f1c0);
5787     INSN(movep,     0108, f138, MOVEP);
5788     INSN(arith_im,  0280, fff8, CF_ISA_A);
5789     INSN(arith_im,  0200, ff00, M68K);
5790     INSN(undef,     02c0, ffc0, M68K);
5791     INSN(byterev,   02c0, fff8, CF_ISA_APLUSC);
5792     INSN(arith_im,  0480, fff8, CF_ISA_A);
5793     INSN(arith_im,  0400, ff00, M68K);
5794     INSN(undef,     04c0, ffc0, M68K);
5795     INSN(arith_im,  0600, ff00, M68K);
5796     INSN(undef,     06c0, ffc0, M68K);
5797     INSN(ff1,       04c0, fff8, CF_ISA_APLUSC);
5798     INSN(arith_im,  0680, fff8, CF_ISA_A);
5799     INSN(arith_im,  0c00, ff38, CF_ISA_A);
5800     INSN(arith_im,  0c00, ff00, M68K);
5801     BASE(bitop_im,  0800, ffc0);
5802     BASE(bitop_im,  0840, ffc0);
5803     BASE(bitop_im,  0880, ffc0);
5804     BASE(bitop_im,  08c0, ffc0);
5805     INSN(arith_im,  0a80, fff8, CF_ISA_A);
5806     INSN(arith_im,  0a00, ff00, M68K);
5807 #if !defined(CONFIG_USER_ONLY)
5808     INSN(moves,     0e00, ff00, M68K);
5809 #endif
5810     INSN(cas,       0ac0, ffc0, CAS);
5811     INSN(cas,       0cc0, ffc0, CAS);
5812     INSN(cas,       0ec0, ffc0, CAS);
5813     INSN(cas2w,     0cfc, ffff, CAS);
5814     INSN(cas2l,     0efc, ffff, CAS);
5815     BASE(move,      1000, f000);
5816     BASE(move,      2000, f000);
5817     BASE(move,      3000, f000);
5818     INSN(chk,       4000, f040, M68K);
5819     INSN(strldsr,   40e7, ffff, CF_ISA_APLUSC);
5820     INSN(negx,      4080, fff8, CF_ISA_A);
5821     INSN(negx,      4000, ff00, M68K);
5822     INSN(undef,     40c0, ffc0, M68K);
5823     INSN(move_from_sr, 40c0, fff8, CF_ISA_A);
5824     INSN(move_from_sr, 40c0, ffc0, M68K);
5825     BASE(lea,       41c0, f1c0);
5826     BASE(clr,       4200, ff00);
5827     BASE(undef,     42c0, ffc0);
5828     INSN(move_from_ccr, 42c0, fff8, CF_ISA_A);
5829     INSN(move_from_ccr, 42c0, ffc0, M68K);
5830     INSN(neg,       4480, fff8, CF_ISA_A);
5831     INSN(neg,       4400, ff00, M68K);
5832     INSN(undef,     44c0, ffc0, M68K);
5833     BASE(move_to_ccr, 44c0, ffc0);
5834     INSN(not,       4680, fff8, CF_ISA_A);
5835     INSN(not,       4600, ff00, M68K);
5836 #if !defined(CONFIG_USER_ONLY)
5837     BASE(move_to_sr, 46c0, ffc0);
5838 #endif
5839     INSN(nbcd,      4800, ffc0, M68K);
5840     INSN(linkl,     4808, fff8, M68K);
5841     BASE(pea,       4840, ffc0);
5842     BASE(swap,      4840, fff8);
5843     INSN(bkpt,      4848, fff8, BKPT);
5844     INSN(movem,     48d0, fbf8, CF_ISA_A);
5845     INSN(movem,     48e8, fbf8, CF_ISA_A);
5846     INSN(movem,     4880, fb80, M68K);
5847     BASE(ext,       4880, fff8);
5848     BASE(ext,       48c0, fff8);
5849     BASE(ext,       49c0, fff8);
5850     BASE(tst,       4a00, ff00);
5851     INSN(tas,       4ac0, ffc0, CF_ISA_B);
5852     INSN(tas,       4ac0, ffc0, M68K);
5853 #if !defined(CONFIG_USER_ONLY)
5854     INSN(halt,      4ac8, ffff, CF_ISA_A);
5855     INSN(halt,      4ac8, ffff, M68K);
5856 #endif
5857     INSN(pulse,     4acc, ffff, CF_ISA_A);
5858     BASE(illegal,   4afc, ffff);
5859     INSN(mull,      4c00, ffc0, CF_ISA_A);
5860     INSN(mull,      4c00, ffc0, LONG_MULDIV);
5861     INSN(divl,      4c40, ffc0, CF_ISA_A);
5862     INSN(divl,      4c40, ffc0, LONG_MULDIV);
5863     INSN(sats,      4c80, fff8, CF_ISA_B);
5864     BASE(trap,      4e40, fff0);
5865     BASE(link,      4e50, fff8);
5866     BASE(unlk,      4e58, fff8);
5867 #if !defined(CONFIG_USER_ONLY)
5868     INSN(move_to_usp, 4e60, fff8, USP);
5869     INSN(move_from_usp, 4e68, fff8, USP);
5870     INSN(reset,     4e70, ffff, M68K);
5871     BASE(stop,      4e72, ffff);
5872     BASE(rte,       4e73, ffff);
5873     INSN(cf_movec,  4e7b, ffff, CF_ISA_A);
5874     INSN(m68k_movec, 4e7a, fffe, MOVEC);
5875 #endif
5876     BASE(nop,       4e71, ffff);
5877     INSN(rtd,       4e74, ffff, RTD);
5878     BASE(rts,       4e75, ffff);
5879     INSN(trapv,     4e76, ffff, M68K);
5880     INSN(rtr,       4e77, ffff, M68K);
5881     BASE(jump,      4e80, ffc0);
5882     BASE(jump,      4ec0, ffc0);
5883     INSN(addsubq,   5000, f080, M68K);
5884     BASE(addsubq,   5080, f0c0);
5885     INSN(scc,       50c0, f0f8, CF_ISA_A); /* Scc.B Dx   */
5886     INSN(scc,       50c0, f0c0, M68K);     /* Scc.B <EA> */
5887     INSN(dbcc,      50c8, f0f8, M68K);
5888     INSN(trapcc,    50fa, f0fe, TRAPCC);   /* opmode 010, 011 */
5889     INSN(trapcc,    50fc, f0ff, TRAPCC);   /* opmode 100 */
5890     INSN(trapcc,    51fa, fffe, CF_ISA_A); /* TPF (trapf) opmode 010, 011 */
5891     INSN(trapcc,    51fc, ffff, CF_ISA_A); /* TPF (trapf) opmode 100 */
5892 
5893     /* Branch instructions.  */
5894     BASE(branch,    6000, f000);
5895     /* Disable long branch instructions, then add back the ones we want.  */
5896     BASE(undef,     60ff, f0ff); /* All long branches.  */
5897     INSN(branch,    60ff, f0ff, CF_ISA_B);
5898     INSN(undef,     60ff, ffff, CF_ISA_B); /* bra.l */
5899     INSN(branch,    60ff, ffff, BRAL);
5900     INSN(branch,    60ff, f0ff, BCCL);
5901 
5902     BASE(moveq,     7000, f100);
5903     INSN(mvzs,      7100, f100, CF_ISA_B);
5904     BASE(or,        8000, f000);
5905     BASE(divw,      80c0, f0c0);
5906     INSN(sbcd_reg,  8100, f1f8, M68K);
5907     INSN(sbcd_mem,  8108, f1f8, M68K);
5908     BASE(addsub,    9000, f000);
5909     INSN(undef,     90c0, f0c0, CF_ISA_A);
5910     INSN(subx_reg,  9180, f1f8, CF_ISA_A);
5911     INSN(subx_reg,  9100, f138, M68K);
5912     INSN(subx_mem,  9108, f138, M68K);
5913     INSN(suba,      91c0, f1c0, CF_ISA_A);
5914     INSN(suba,      90c0, f0c0, M68K);
5915 
5916     BASE(undef_mac, a000, f000);
5917     INSN(mac,       a000, f100, CF_EMAC);
5918     INSN(from_mac,  a180, f9b0, CF_EMAC);
5919     INSN(move_mac,  a110, f9fc, CF_EMAC);
5920     INSN(from_macsr,a980, f9f0, CF_EMAC);
5921     INSN(from_mask, ad80, fff0, CF_EMAC);
5922     INSN(from_mext, ab80, fbf0, CF_EMAC);
5923     INSN(macsr_to_ccr, a9c0, ffff, CF_EMAC);
5924     INSN(to_mac,    a100, f9c0, CF_EMAC);
5925     INSN(to_macsr,  a900, ffc0, CF_EMAC);
5926     INSN(to_mext,   ab00, fbc0, CF_EMAC);
5927     INSN(to_mask,   ad00, ffc0, CF_EMAC);
5928 
5929     INSN(mov3q,     a140, f1c0, CF_ISA_B);
5930     INSN(cmp,       b000, f1c0, CF_ISA_B); /* cmp.b */
5931     INSN(cmp,       b040, f1c0, CF_ISA_B); /* cmp.w */
5932     INSN(cmpa,      b0c0, f1c0, CF_ISA_B); /* cmpa.w */
5933     INSN(cmp,       b080, f1c0, CF_ISA_A);
5934     INSN(cmpa,      b1c0, f1c0, CF_ISA_A);
5935     INSN(cmp,       b000, f100, M68K);
5936     INSN(eor,       b100, f100, M68K);
5937     INSN(cmpm,      b108, f138, M68K);
5938     INSN(cmpa,      b0c0, f0c0, M68K);
5939     INSN(eor,       b180, f1c0, CF_ISA_A);
5940     BASE(and,       c000, f000);
5941     INSN(exg_dd,    c140, f1f8, M68K);
5942     INSN(exg_aa,    c148, f1f8, M68K);
5943     INSN(exg_da,    c188, f1f8, M68K);
5944     BASE(mulw,      c0c0, f0c0);
5945     INSN(abcd_reg,  c100, f1f8, M68K);
5946     INSN(abcd_mem,  c108, f1f8, M68K);
5947     BASE(addsub,    d000, f000);
5948     INSN(undef,     d0c0, f0c0, CF_ISA_A);
5949     INSN(addx_reg,      d180, f1f8, CF_ISA_A);
5950     INSN(addx_reg,  d100, f138, M68K);
5951     INSN(addx_mem,  d108, f138, M68K);
5952     INSN(adda,      d1c0, f1c0, CF_ISA_A);
5953     INSN(adda,      d0c0, f0c0, M68K);
5954     INSN(shift_im,  e080, f0f0, CF_ISA_A);
5955     INSN(shift_reg, e0a0, f0f0, CF_ISA_A);
5956     INSN(shift8_im, e000, f0f0, M68K);
5957     INSN(shift16_im, e040, f0f0, M68K);
5958     INSN(shift_im,  e080, f0f0, M68K);
5959     INSN(shift8_reg, e020, f0f0, M68K);
5960     INSN(shift16_reg, e060, f0f0, M68K);
5961     INSN(shift_reg, e0a0, f0f0, M68K);
5962     INSN(shift_mem, e0c0, fcc0, M68K);
5963     INSN(rotate_im, e090, f0f0, M68K);
5964     INSN(rotate8_im, e010, f0f0, M68K);
5965     INSN(rotate16_im, e050, f0f0, M68K);
5966     INSN(rotate_reg, e0b0, f0f0, M68K);
5967     INSN(rotate8_reg, e030, f0f0, M68K);
5968     INSN(rotate16_reg, e070, f0f0, M68K);
5969     INSN(rotate_mem, e4c0, fcc0, M68K);
5970     INSN(bfext_mem, e9c0, fdc0, BITFIELD);  /* bfextu & bfexts */
5971     INSN(bfext_reg, e9c0, fdf8, BITFIELD);
5972     INSN(bfins_mem, efc0, ffc0, BITFIELD);
5973     INSN(bfins_reg, efc0, fff8, BITFIELD);
5974     INSN(bfop_mem, eac0, ffc0, BITFIELD);   /* bfchg */
5975     INSN(bfop_reg, eac0, fff8, BITFIELD);   /* bfchg */
5976     INSN(bfop_mem, ecc0, ffc0, BITFIELD);   /* bfclr */
5977     INSN(bfop_reg, ecc0, fff8, BITFIELD);   /* bfclr */
5978     INSN(bfop_mem, edc0, ffc0, BITFIELD);   /* bfffo */
5979     INSN(bfop_reg, edc0, fff8, BITFIELD);   /* bfffo */
5980     INSN(bfop_mem, eec0, ffc0, BITFIELD);   /* bfset */
5981     INSN(bfop_reg, eec0, fff8, BITFIELD);   /* bfset */
5982     INSN(bfop_mem, e8c0, ffc0, BITFIELD);   /* bftst */
5983     INSN(bfop_reg, e8c0, fff8, BITFIELD);   /* bftst */
5984     BASE(undef_fpu, f000, f000);
5985     INSN(fpu,       f200, ffc0, CF_FPU);
5986     INSN(fbcc,      f280, ffc0, CF_FPU);
5987     INSN(fpu,       f200, ffc0, FPU);
5988     INSN(fscc,      f240, ffc0, FPU);
5989     INSN(ftrapcc,   f27a, fffe, FPU);       /* opmode 010, 011 */
5990     INSN(ftrapcc,   f27c, ffff, FPU);       /* opmode 100 */
5991     INSN(fbcc,      f280, ff80, FPU);
5992 #if !defined(CONFIG_USER_ONLY)
5993     INSN(frestore,  f340, ffc0, CF_FPU);
5994     INSN(fsave,     f300, ffc0, CF_FPU);
5995     INSN(frestore,  f340, ffc0, FPU);
5996     INSN(fsave,     f300, ffc0, FPU);
5997     INSN(intouch,   f340, ffc0, CF_ISA_A);
5998     INSN(cpushl,    f428, ff38, CF_ISA_A);
5999     INSN(cpush,     f420, ff20, M68040);
6000     INSN(cinv,      f400, ff20, M68040);
6001     INSN(pflush,    f500, ffe0, M68040);
6002     INSN(ptest,     f548, ffd8, M68040);
6003     INSN(wddata,    fb00, ff00, CF_ISA_A);
6004     INSN(wdebug,    fbc0, ffc0, CF_ISA_A);
6005 #endif
6006     INSN(move16_mem, f600, ffe0, M68040);
6007     INSN(move16_reg, f620, fff8, M68040);
6008 #undef INSN
6009 }
6010 
m68k_tr_init_disas_context(DisasContextBase * dcbase,CPUState * cpu)6011 static void m68k_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cpu)
6012 {
6013     DisasContext *dc = container_of(dcbase, DisasContext, base);
6014     CPUM68KState *env = cpu_env(cpu);
6015 
6016     dc->env = env;
6017     dc->pc = dc->base.pc_first;
6018     /* This value will always be filled in properly before m68k_tr_tb_stop. */
6019     dc->pc_prev = 0xdeadbeef;
6020     dc->cc_op = CC_OP_DYNAMIC;
6021     dc->cc_op_synced = 1;
6022     dc->done_mac = 0;
6023     dc->writeback_mask = 0;
6024 
6025     dc->ss_active = (M68K_SR_TRACE(env->sr) == M68K_SR_TRACE_ANY_INS);
6026     /* If architectural single step active, limit to 1 */
6027     if (dc->ss_active) {
6028         dc->base.max_insns = 1;
6029     }
6030 }
6031 
m68k_tr_tb_start(DisasContextBase * dcbase,CPUState * cpu)6032 static void m68k_tr_tb_start(DisasContextBase *dcbase, CPUState *cpu)
6033 {
6034 }
6035 
m68k_tr_insn_start(DisasContextBase * dcbase,CPUState * cpu)6036 static void m68k_tr_insn_start(DisasContextBase *dcbase, CPUState *cpu)
6037 {
6038     DisasContext *dc = container_of(dcbase, DisasContext, base);
6039     tcg_gen_insn_start(dc->base.pc_next, dc->cc_op);
6040 }
6041 
m68k_tr_translate_insn(DisasContextBase * dcbase,CPUState * cpu)6042 static void m68k_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
6043 {
6044     DisasContext *dc = container_of(dcbase, DisasContext, base);
6045     CPUM68KState *env = cpu_env(cpu);
6046     uint16_t insn = read_im16(env, dc);
6047 
6048     opcode_table[insn](env, dc, insn);
6049     do_writebacks(dc);
6050 
6051     dc->pc_prev = dc->base.pc_next;
6052     dc->base.pc_next = dc->pc;
6053 
6054     if (dc->base.is_jmp == DISAS_NEXT) {
6055         /*
6056          * Stop translation when the next insn might touch a new page.
6057          * This ensures that prefetch aborts at the right place.
6058          *
6059          * We cannot determine the size of the next insn without
6060          * completely decoding it.  However, the maximum insn size
6061          * is 32 bytes, so end if we do not have that much remaining.
6062          * This may produce several small TBs at the end of each page,
6063          * but they will all be linked with goto_tb.
6064          *
6065          * ??? ColdFire maximum is 4 bytes; MC68000's maximum is also
6066          * smaller than MC68020's.
6067          */
6068         target_ulong start_page_offset
6069             = dc->pc - (dc->base.pc_first & TARGET_PAGE_MASK);
6070 
6071         if (start_page_offset >= TARGET_PAGE_SIZE - 32) {
6072             dc->base.is_jmp = DISAS_TOO_MANY;
6073         }
6074     }
6075 }
6076 
m68k_tr_tb_stop(DisasContextBase * dcbase,CPUState * cpu)6077 static void m68k_tr_tb_stop(DisasContextBase *dcbase, CPUState *cpu)
6078 {
6079     DisasContext *dc = container_of(dcbase, DisasContext, base);
6080 
6081     switch (dc->base.is_jmp) {
6082     case DISAS_NORETURN:
6083         break;
6084     case DISAS_TOO_MANY:
6085         update_cc_op(dc);
6086         gen_jmp_tb(dc, 0, dc->pc, dc->pc_prev);
6087         break;
6088     case DISAS_JUMP:
6089         /* We updated CC_OP and PC in gen_jmp/gen_jmp_im.  */
6090         if (dc->ss_active) {
6091             gen_raise_exception_format2(dc, EXCP_TRACE, dc->pc_prev);
6092         } else {
6093             tcg_gen_lookup_and_goto_ptr();
6094         }
6095         break;
6096     case DISAS_EXIT:
6097         /*
6098          * We updated CC_OP and PC in gen_exit_tb, but also modified
6099          * other state that may require returning to the main loop.
6100          */
6101         if (dc->ss_active) {
6102             gen_raise_exception_format2(dc, EXCP_TRACE, dc->pc_prev);
6103         } else {
6104             tcg_gen_exit_tb(NULL, 0);
6105         }
6106         break;
6107     default:
6108         g_assert_not_reached();
6109     }
6110 }
6111 
6112 static const TranslatorOps m68k_tr_ops = {
6113     .init_disas_context = m68k_tr_init_disas_context,
6114     .tb_start           = m68k_tr_tb_start,
6115     .insn_start         = m68k_tr_insn_start,
6116     .translate_insn     = m68k_tr_translate_insn,
6117     .tb_stop            = m68k_tr_tb_stop,
6118 };
6119 
gen_intermediate_code(CPUState * cpu,TranslationBlock * tb,int * max_insns,vaddr pc,void * host_pc)6120 void gen_intermediate_code(CPUState *cpu, TranslationBlock *tb, int *max_insns,
6121                            vaddr pc, void *host_pc)
6122 {
6123     DisasContext dc;
6124     translator_loop(cpu, tb, max_insns, pc, host_pc, &m68k_tr_ops, &dc.base);
6125 }
6126 
floatx80_to_double(CPUM68KState * env,uint16_t high,uint64_t low)6127 static double floatx80_to_double(CPUM68KState *env, uint16_t high, uint64_t low)
6128 {
6129     floatx80 a = { .high = high, .low = low };
6130     union {
6131         float64 f64;
6132         double d;
6133     } u;
6134 
6135     u.f64 = floatx80_to_float64(a, &env->fp_status);
6136     return u.d;
6137 }
6138 
m68k_cpu_dump_state(CPUState * cs,FILE * f,int flags)6139 void m68k_cpu_dump_state(CPUState *cs, FILE *f, int flags)
6140 {
6141     CPUM68KState *env = cpu_env(cs);
6142     int i;
6143     uint16_t sr;
6144     for (i = 0; i < 8; i++) {
6145         qemu_fprintf(f, "D%d = %08x   A%d = %08x   "
6146                      "F%d = %04x %016"PRIx64"  (%12g)\n",
6147                      i, env->dregs[i], i, env->aregs[i],
6148                      i, env->fregs[i].l.upper, env->fregs[i].l.lower,
6149                      floatx80_to_double(env, env->fregs[i].l.upper,
6150                                         env->fregs[i].l.lower));
6151     }
6152     qemu_fprintf(f, "PC = %08x   ", env->pc);
6153     sr = env->sr | cpu_m68k_get_ccr(env);
6154     qemu_fprintf(f, "SR = %04x T:%x I:%x %c%c %c%c%c%c%c\n",
6155                  sr, (sr & SR_T) >> SR_T_SHIFT, (sr & SR_I) >> SR_I_SHIFT,
6156                  (sr & SR_S) ? 'S' : 'U', (sr & SR_M) ? '%' : 'I',
6157                  (sr & CCF_X) ? 'X' : '-', (sr & CCF_N) ? 'N' : '-',
6158                  (sr & CCF_Z) ? 'Z' : '-', (sr & CCF_V) ? 'V' : '-',
6159                  (sr & CCF_C) ? 'C' : '-');
6160     qemu_fprintf(f, "FPSR = %08x %c%c%c%c ", env->fpsr,
6161                  (env->fpsr & FPSR_CC_A) ? 'A' : '-',
6162                  (env->fpsr & FPSR_CC_I) ? 'I' : '-',
6163                  (env->fpsr & FPSR_CC_Z) ? 'Z' : '-',
6164                  (env->fpsr & FPSR_CC_N) ? 'N' : '-');
6165     qemu_fprintf(f, "\n                                "
6166                  "FPCR =     %04x ", env->fpcr);
6167     switch (env->fpcr & FPCR_PREC_MASK) {
6168     case FPCR_PREC_X:
6169         qemu_fprintf(f, "X ");
6170         break;
6171     case FPCR_PREC_S:
6172         qemu_fprintf(f, "S ");
6173         break;
6174     case FPCR_PREC_D:
6175         qemu_fprintf(f, "D ");
6176         break;
6177     }
6178     switch (env->fpcr & FPCR_RND_MASK) {
6179     case FPCR_RND_N:
6180         qemu_fprintf(f, "RN ");
6181         break;
6182     case FPCR_RND_Z:
6183         qemu_fprintf(f, "RZ ");
6184         break;
6185     case FPCR_RND_M:
6186         qemu_fprintf(f, "RM ");
6187         break;
6188     case FPCR_RND_P:
6189         qemu_fprintf(f, "RP ");
6190         break;
6191     }
6192     qemu_fprintf(f, "\n");
6193 #ifndef CONFIG_USER_ONLY
6194     qemu_fprintf(f, "%sA7(MSP) = %08x %sA7(USP) = %08x %sA7(ISP) = %08x\n",
6195                  env->current_sp == M68K_SSP ? "->" : "  ", env->sp[M68K_SSP],
6196                  env->current_sp == M68K_USP ? "->" : "  ", env->sp[M68K_USP],
6197                  env->current_sp == M68K_ISP ? "->" : "  ", env->sp[M68K_ISP]);
6198     qemu_fprintf(f, "VBR = 0x%08x\n", env->vbr);
6199     qemu_fprintf(f, "SFC = %x DFC %x\n", env->sfc, env->dfc);
6200     qemu_fprintf(f, "SSW %08x TCR %08x URP %08x SRP %08x\n",
6201                  env->mmu.ssw, env->mmu.tcr, env->mmu.urp, env->mmu.srp);
6202     qemu_fprintf(f, "DTTR0/1: %08x/%08x ITTR0/1: %08x/%08x\n",
6203                  env->mmu.ttr[M68K_DTTR0], env->mmu.ttr[M68K_DTTR1],
6204                  env->mmu.ttr[M68K_ITTR0], env->mmu.ttr[M68K_ITTR1]);
6205     qemu_fprintf(f, "MMUSR %08x, fault at %08x\n",
6206                  env->mmu.mmusr, env->mmu.ar);
6207 #endif /* !CONFIG_USER_ONLY */
6208 }
6209