xref: /openbmc/qemu/target/m68k/translate.c (revision 042cea27)
1 /*
2  *  m68k translation
3  *
4  *  Copyright (c) 2005-2007 CodeSourcery
5  *  Written by Paul Brook
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "disas/disas.h"
24 #include "exec/exec-all.h"
25 #include "tcg-op.h"
26 #include "qemu/log.h"
27 #include "exec/cpu_ldst.h"
28 #include "exec/translator.h"
29 
30 #include "exec/helper-proto.h"
31 #include "exec/helper-gen.h"
32 
33 #include "trace-tcg.h"
34 #include "exec/log.h"
35 #include "fpu/softfloat.h"
36 
37 
38 //#define DEBUG_DISPATCH 1
39 
40 #define DEFO32(name, offset) static TCGv QREG_##name;
41 #define DEFO64(name, offset) static TCGv_i64 QREG_##name;
42 #include "qregs.def"
43 #undef DEFO32
44 #undef DEFO64
45 
46 static TCGv_i32 cpu_halted;
47 static TCGv_i32 cpu_exception_index;
48 
49 static char cpu_reg_names[2 * 8 * 3 + 5 * 4];
50 static TCGv cpu_dregs[8];
51 static TCGv cpu_aregs[8];
52 static TCGv_i64 cpu_macc[4];
53 
54 #define REG(insn, pos)  (((insn) >> (pos)) & 7)
55 #define DREG(insn, pos) cpu_dregs[REG(insn, pos)]
56 #define AREG(insn, pos) get_areg(s, REG(insn, pos))
57 #define MACREG(acc)     cpu_macc[acc]
58 #define QREG_SP         get_areg(s, 7)
59 
60 static TCGv NULL_QREG;
61 #define IS_NULL_QREG(t) (t == NULL_QREG)
62 /* Used to distinguish stores from bad addressing modes.  */
63 static TCGv store_dummy;
64 
65 #include "exec/gen-icount.h"
66 
67 void m68k_tcg_init(void)
68 {
69     char *p;
70     int i;
71 
72 #define DEFO32(name, offset) \
73     QREG_##name = tcg_global_mem_new_i32(cpu_env, \
74         offsetof(CPUM68KState, offset), #name);
75 #define DEFO64(name, offset) \
76     QREG_##name = tcg_global_mem_new_i64(cpu_env, \
77         offsetof(CPUM68KState, offset), #name);
78 #include "qregs.def"
79 #undef DEFO32
80 #undef DEFO64
81 
82     cpu_halted = tcg_global_mem_new_i32(cpu_env,
83                                         -offsetof(M68kCPU, env) +
84                                         offsetof(CPUState, halted), "HALTED");
85     cpu_exception_index = tcg_global_mem_new_i32(cpu_env,
86                                                  -offsetof(M68kCPU, env) +
87                                                  offsetof(CPUState, exception_index),
88                                                  "EXCEPTION");
89 
90     p = cpu_reg_names;
91     for (i = 0; i < 8; i++) {
92         sprintf(p, "D%d", i);
93         cpu_dregs[i] = tcg_global_mem_new(cpu_env,
94                                           offsetof(CPUM68KState, dregs[i]), p);
95         p += 3;
96         sprintf(p, "A%d", i);
97         cpu_aregs[i] = tcg_global_mem_new(cpu_env,
98                                           offsetof(CPUM68KState, aregs[i]), p);
99         p += 3;
100     }
101     for (i = 0; i < 4; i++) {
102         sprintf(p, "ACC%d", i);
103         cpu_macc[i] = tcg_global_mem_new_i64(cpu_env,
104                                          offsetof(CPUM68KState, macc[i]), p);
105         p += 5;
106     }
107 
108     NULL_QREG = tcg_global_mem_new(cpu_env, -4, "NULL");
109     store_dummy = tcg_global_mem_new(cpu_env, -8, "NULL");
110 }
111 
112 /* internal defines */
113 typedef struct DisasContext {
114     CPUM68KState *env;
115     target_ulong insn_pc; /* Start of the current instruction.  */
116     target_ulong pc;
117     int is_jmp;
118     CCOp cc_op; /* Current CC operation */
119     int cc_op_synced;
120     struct TranslationBlock *tb;
121     int singlestep_enabled;
122     TCGv_i64 mactmp;
123     int done_mac;
124     int writeback_mask;
125     TCGv writeback[8];
126 } DisasContext;
127 
128 static TCGv get_areg(DisasContext *s, unsigned regno)
129 {
130     if (s->writeback_mask & (1 << regno)) {
131         return s->writeback[regno];
132     } else {
133         return cpu_aregs[regno];
134     }
135 }
136 
137 static void delay_set_areg(DisasContext *s, unsigned regno,
138                            TCGv val, bool give_temp)
139 {
140     if (s->writeback_mask & (1 << regno)) {
141         if (give_temp) {
142             tcg_temp_free(s->writeback[regno]);
143             s->writeback[regno] = val;
144         } else {
145             tcg_gen_mov_i32(s->writeback[regno], val);
146         }
147     } else {
148         s->writeback_mask |= 1 << regno;
149         if (give_temp) {
150             s->writeback[regno] = val;
151         } else {
152             TCGv tmp = tcg_temp_new();
153             s->writeback[regno] = tmp;
154             tcg_gen_mov_i32(tmp, val);
155         }
156     }
157 }
158 
159 static void do_writebacks(DisasContext *s)
160 {
161     unsigned mask = s->writeback_mask;
162     if (mask) {
163         s->writeback_mask = 0;
164         do {
165             unsigned regno = ctz32(mask);
166             tcg_gen_mov_i32(cpu_aregs[regno], s->writeback[regno]);
167             tcg_temp_free(s->writeback[regno]);
168             mask &= mask - 1;
169         } while (mask);
170     }
171 }
172 
173 /* is_jmp field values */
174 #define DISAS_JUMP      DISAS_TARGET_0 /* only pc was modified dynamically */
175 #define DISAS_UPDATE    DISAS_TARGET_1 /* cpu state was modified dynamically */
176 #define DISAS_TB_JUMP   DISAS_TARGET_2 /* only pc was modified statically */
177 #define DISAS_JUMP_NEXT DISAS_TARGET_3
178 
179 #if defined(CONFIG_USER_ONLY)
180 #define IS_USER(s) 1
181 #else
182 #define IS_USER(s)   (!(s->tb->flags & TB_FLAGS_MSR_S))
183 #define SFC_INDEX(s) ((s->tb->flags & TB_FLAGS_SFC_S) ? \
184                       MMU_KERNEL_IDX : MMU_USER_IDX)
185 #define DFC_INDEX(s) ((s->tb->flags & TB_FLAGS_DFC_S) ? \
186                       MMU_KERNEL_IDX : MMU_USER_IDX)
187 #endif
188 
189 typedef void (*disas_proc)(CPUM68KState *env, DisasContext *s, uint16_t insn);
190 
191 #ifdef DEBUG_DISPATCH
192 #define DISAS_INSN(name)                                                \
193     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
194                                   uint16_t insn);                       \
195     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
196                              uint16_t insn)                             \
197     {                                                                   \
198         qemu_log("Dispatch " #name "\n");                               \
199         real_disas_##name(env, s, insn);                                \
200     }                                                                   \
201     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
202                                   uint16_t insn)
203 #else
204 #define DISAS_INSN(name)                                                \
205     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
206                              uint16_t insn)
207 #endif
208 
209 static const uint8_t cc_op_live[CC_OP_NB] = {
210     [CC_OP_DYNAMIC] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
211     [CC_OP_FLAGS] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
212     [CC_OP_ADDB ... CC_OP_ADDL] = CCF_X | CCF_N | CCF_V,
213     [CC_OP_SUBB ... CC_OP_SUBL] = CCF_X | CCF_N | CCF_V,
214     [CC_OP_CMPB ... CC_OP_CMPL] = CCF_X | CCF_N | CCF_V,
215     [CC_OP_LOGIC] = CCF_X | CCF_N
216 };
217 
218 static void set_cc_op(DisasContext *s, CCOp op)
219 {
220     CCOp old_op = s->cc_op;
221     int dead;
222 
223     if (old_op == op) {
224         return;
225     }
226     s->cc_op = op;
227     s->cc_op_synced = 0;
228 
229     /* Discard CC computation that will no longer be used.
230        Note that X and N are never dead.  */
231     dead = cc_op_live[old_op] & ~cc_op_live[op];
232     if (dead & CCF_C) {
233         tcg_gen_discard_i32(QREG_CC_C);
234     }
235     if (dead & CCF_Z) {
236         tcg_gen_discard_i32(QREG_CC_Z);
237     }
238     if (dead & CCF_V) {
239         tcg_gen_discard_i32(QREG_CC_V);
240     }
241 }
242 
243 /* Update the CPU env CC_OP state.  */
244 static void update_cc_op(DisasContext *s)
245 {
246     if (!s->cc_op_synced) {
247         s->cc_op_synced = 1;
248         tcg_gen_movi_i32(QREG_CC_OP, s->cc_op);
249     }
250 }
251 
252 /* Generate a jump to an immediate address.  */
253 static void gen_jmp_im(DisasContext *s, uint32_t dest)
254 {
255     update_cc_op(s);
256     tcg_gen_movi_i32(QREG_PC, dest);
257     s->is_jmp = DISAS_JUMP;
258 }
259 
260 /* Generate a jump to the address in qreg DEST.  */
261 static void gen_jmp(DisasContext *s, TCGv dest)
262 {
263     update_cc_op(s);
264     tcg_gen_mov_i32(QREG_PC, dest);
265     s->is_jmp = DISAS_JUMP;
266 }
267 
268 static void gen_raise_exception(int nr)
269 {
270     TCGv_i32 tmp = tcg_const_i32(nr);
271 
272     gen_helper_raise_exception(cpu_env, tmp);
273     tcg_temp_free_i32(tmp);
274 }
275 
276 static void gen_exception(DisasContext *s, uint32_t where, int nr)
277 {
278     gen_jmp_im(s, where);
279     gen_raise_exception(nr);
280 }
281 
282 static inline void gen_addr_fault(DisasContext *s)
283 {
284     gen_exception(s, s->insn_pc, EXCP_ADDRESS);
285 }
286 
287 /* Generate a load from the specified address.  Narrow values are
288    sign extended to full register width.  */
289 static inline TCGv gen_load(DisasContext *s, int opsize, TCGv addr,
290                             int sign, int index)
291 {
292     TCGv tmp;
293     tmp = tcg_temp_new_i32();
294     switch(opsize) {
295     case OS_BYTE:
296         if (sign)
297             tcg_gen_qemu_ld8s(tmp, addr, index);
298         else
299             tcg_gen_qemu_ld8u(tmp, addr, index);
300         break;
301     case OS_WORD:
302         if (sign)
303             tcg_gen_qemu_ld16s(tmp, addr, index);
304         else
305             tcg_gen_qemu_ld16u(tmp, addr, index);
306         break;
307     case OS_LONG:
308         tcg_gen_qemu_ld32u(tmp, addr, index);
309         break;
310     default:
311         g_assert_not_reached();
312     }
313     return tmp;
314 }
315 
316 /* Generate a store.  */
317 static inline void gen_store(DisasContext *s, int opsize, TCGv addr, TCGv val,
318                              int index)
319 {
320     switch(opsize) {
321     case OS_BYTE:
322         tcg_gen_qemu_st8(val, addr, index);
323         break;
324     case OS_WORD:
325         tcg_gen_qemu_st16(val, addr, index);
326         break;
327     case OS_LONG:
328         tcg_gen_qemu_st32(val, addr, index);
329         break;
330     default:
331         g_assert_not_reached();
332     }
333 }
334 
335 typedef enum {
336     EA_STORE,
337     EA_LOADU,
338     EA_LOADS
339 } ea_what;
340 
341 /* Generate an unsigned load if VAL is 0 a signed load if val is -1,
342    otherwise generate a store.  */
343 static TCGv gen_ldst(DisasContext *s, int opsize, TCGv addr, TCGv val,
344                      ea_what what, int index)
345 {
346     if (what == EA_STORE) {
347         gen_store(s, opsize, addr, val, index);
348         return store_dummy;
349     } else {
350         return gen_load(s, opsize, addr, what == EA_LOADS, index);
351     }
352 }
353 
354 /* Read a 16-bit immediate constant */
355 static inline uint16_t read_im16(CPUM68KState *env, DisasContext *s)
356 {
357     uint16_t im;
358     im = cpu_lduw_code(env, s->pc);
359     s->pc += 2;
360     return im;
361 }
362 
363 /* Read an 8-bit immediate constant */
364 static inline uint8_t read_im8(CPUM68KState *env, DisasContext *s)
365 {
366     return read_im16(env, s);
367 }
368 
369 /* Read a 32-bit immediate constant.  */
370 static inline uint32_t read_im32(CPUM68KState *env, DisasContext *s)
371 {
372     uint32_t im;
373     im = read_im16(env, s) << 16;
374     im |= 0xffff & read_im16(env, s);
375     return im;
376 }
377 
378 /* Read a 64-bit immediate constant.  */
379 static inline uint64_t read_im64(CPUM68KState *env, DisasContext *s)
380 {
381     uint64_t im;
382     im = (uint64_t)read_im32(env, s) << 32;
383     im |= (uint64_t)read_im32(env, s);
384     return im;
385 }
386 
387 /* Calculate and address index.  */
388 static TCGv gen_addr_index(DisasContext *s, uint16_t ext, TCGv tmp)
389 {
390     TCGv add;
391     int scale;
392 
393     add = (ext & 0x8000) ? AREG(ext, 12) : DREG(ext, 12);
394     if ((ext & 0x800) == 0) {
395         tcg_gen_ext16s_i32(tmp, add);
396         add = tmp;
397     }
398     scale = (ext >> 9) & 3;
399     if (scale != 0) {
400         tcg_gen_shli_i32(tmp, add, scale);
401         add = tmp;
402     }
403     return add;
404 }
405 
406 /* Handle a base + index + displacement effective addresss.
407    A NULL_QREG base means pc-relative.  */
408 static TCGv gen_lea_indexed(CPUM68KState *env, DisasContext *s, TCGv base)
409 {
410     uint32_t offset;
411     uint16_t ext;
412     TCGv add;
413     TCGv tmp;
414     uint32_t bd, od;
415 
416     offset = s->pc;
417     ext = read_im16(env, s);
418 
419     if ((ext & 0x800) == 0 && !m68k_feature(s->env, M68K_FEATURE_WORD_INDEX))
420         return NULL_QREG;
421 
422     if (m68k_feature(s->env, M68K_FEATURE_M68000) &&
423         !m68k_feature(s->env, M68K_FEATURE_SCALED_INDEX)) {
424         ext &= ~(3 << 9);
425     }
426 
427     if (ext & 0x100) {
428         /* full extension word format */
429         if (!m68k_feature(s->env, M68K_FEATURE_EXT_FULL))
430             return NULL_QREG;
431 
432         if ((ext & 0x30) > 0x10) {
433             /* base displacement */
434             if ((ext & 0x30) == 0x20) {
435                 bd = (int16_t)read_im16(env, s);
436             } else {
437                 bd = read_im32(env, s);
438             }
439         } else {
440             bd = 0;
441         }
442         tmp = tcg_temp_new();
443         if ((ext & 0x44) == 0) {
444             /* pre-index */
445             add = gen_addr_index(s, ext, tmp);
446         } else {
447             add = NULL_QREG;
448         }
449         if ((ext & 0x80) == 0) {
450             /* base not suppressed */
451             if (IS_NULL_QREG(base)) {
452                 base = tcg_const_i32(offset + bd);
453                 bd = 0;
454             }
455             if (!IS_NULL_QREG(add)) {
456                 tcg_gen_add_i32(tmp, add, base);
457                 add = tmp;
458             } else {
459                 add = base;
460             }
461         }
462         if (!IS_NULL_QREG(add)) {
463             if (bd != 0) {
464                 tcg_gen_addi_i32(tmp, add, bd);
465                 add = tmp;
466             }
467         } else {
468             add = tcg_const_i32(bd);
469         }
470         if ((ext & 3) != 0) {
471             /* memory indirect */
472             base = gen_load(s, OS_LONG, add, 0, IS_USER(s));
473             if ((ext & 0x44) == 4) {
474                 add = gen_addr_index(s, ext, tmp);
475                 tcg_gen_add_i32(tmp, add, base);
476                 add = tmp;
477             } else {
478                 add = base;
479             }
480             if ((ext & 3) > 1) {
481                 /* outer displacement */
482                 if ((ext & 3) == 2) {
483                     od = (int16_t)read_im16(env, s);
484                 } else {
485                     od = read_im32(env, s);
486                 }
487             } else {
488                 od = 0;
489             }
490             if (od != 0) {
491                 tcg_gen_addi_i32(tmp, add, od);
492                 add = tmp;
493             }
494         }
495     } else {
496         /* brief extension word format */
497         tmp = tcg_temp_new();
498         add = gen_addr_index(s, ext, tmp);
499         if (!IS_NULL_QREG(base)) {
500             tcg_gen_add_i32(tmp, add, base);
501             if ((int8_t)ext)
502                 tcg_gen_addi_i32(tmp, tmp, (int8_t)ext);
503         } else {
504             tcg_gen_addi_i32(tmp, add, offset + (int8_t)ext);
505         }
506         add = tmp;
507     }
508     return add;
509 }
510 
511 /* Sign or zero extend a value.  */
512 
513 static inline void gen_ext(TCGv res, TCGv val, int opsize, int sign)
514 {
515     switch (opsize) {
516     case OS_BYTE:
517         if (sign) {
518             tcg_gen_ext8s_i32(res, val);
519         } else {
520             tcg_gen_ext8u_i32(res, val);
521         }
522         break;
523     case OS_WORD:
524         if (sign) {
525             tcg_gen_ext16s_i32(res, val);
526         } else {
527             tcg_gen_ext16u_i32(res, val);
528         }
529         break;
530     case OS_LONG:
531         tcg_gen_mov_i32(res, val);
532         break;
533     default:
534         g_assert_not_reached();
535     }
536 }
537 
538 /* Evaluate all the CC flags.  */
539 
540 static void gen_flush_flags(DisasContext *s)
541 {
542     TCGv t0, t1;
543 
544     switch (s->cc_op) {
545     case CC_OP_FLAGS:
546         return;
547 
548     case CC_OP_ADDB:
549     case CC_OP_ADDW:
550     case CC_OP_ADDL:
551         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
552         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
553         /* Compute signed overflow for addition.  */
554         t0 = tcg_temp_new();
555         t1 = tcg_temp_new();
556         tcg_gen_sub_i32(t0, QREG_CC_N, QREG_CC_V);
557         gen_ext(t0, t0, s->cc_op - CC_OP_ADDB, 1);
558         tcg_gen_xor_i32(t1, QREG_CC_N, QREG_CC_V);
559         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
560         tcg_temp_free(t0);
561         tcg_gen_andc_i32(QREG_CC_V, t1, QREG_CC_V);
562         tcg_temp_free(t1);
563         break;
564 
565     case CC_OP_SUBB:
566     case CC_OP_SUBW:
567     case CC_OP_SUBL:
568         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
569         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
570         /* Compute signed overflow for subtraction.  */
571         t0 = tcg_temp_new();
572         t1 = tcg_temp_new();
573         tcg_gen_add_i32(t0, QREG_CC_N, QREG_CC_V);
574         gen_ext(t0, t0, s->cc_op - CC_OP_SUBB, 1);
575         tcg_gen_xor_i32(t1, QREG_CC_N, t0);
576         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
577         tcg_temp_free(t0);
578         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t1);
579         tcg_temp_free(t1);
580         break;
581 
582     case CC_OP_CMPB:
583     case CC_OP_CMPW:
584     case CC_OP_CMPL:
585         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_C, QREG_CC_N, QREG_CC_V);
586         tcg_gen_sub_i32(QREG_CC_Z, QREG_CC_N, QREG_CC_V);
587         gen_ext(QREG_CC_Z, QREG_CC_Z, s->cc_op - CC_OP_CMPB, 1);
588         /* Compute signed overflow for subtraction.  */
589         t0 = tcg_temp_new();
590         tcg_gen_xor_i32(t0, QREG_CC_Z, QREG_CC_N);
591         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, QREG_CC_N);
592         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t0);
593         tcg_temp_free(t0);
594         tcg_gen_mov_i32(QREG_CC_N, QREG_CC_Z);
595         break;
596 
597     case CC_OP_LOGIC:
598         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
599         tcg_gen_movi_i32(QREG_CC_C, 0);
600         tcg_gen_movi_i32(QREG_CC_V, 0);
601         break;
602 
603     case CC_OP_DYNAMIC:
604         gen_helper_flush_flags(cpu_env, QREG_CC_OP);
605         s->cc_op_synced = 1;
606         break;
607 
608     default:
609         t0 = tcg_const_i32(s->cc_op);
610         gen_helper_flush_flags(cpu_env, t0);
611         tcg_temp_free(t0);
612         s->cc_op_synced = 1;
613         break;
614     }
615 
616     /* Note that flush_flags also assigned to env->cc_op.  */
617     s->cc_op = CC_OP_FLAGS;
618 }
619 
620 static inline TCGv gen_extend(TCGv val, int opsize, int sign)
621 {
622     TCGv tmp;
623 
624     if (opsize == OS_LONG) {
625         tmp = val;
626     } else {
627         tmp = tcg_temp_new();
628         gen_ext(tmp, val, opsize, sign);
629     }
630 
631     return tmp;
632 }
633 
634 static void gen_logic_cc(DisasContext *s, TCGv val, int opsize)
635 {
636     gen_ext(QREG_CC_N, val, opsize, 1);
637     set_cc_op(s, CC_OP_LOGIC);
638 }
639 
640 static void gen_update_cc_cmp(DisasContext *s, TCGv dest, TCGv src, int opsize)
641 {
642     tcg_gen_mov_i32(QREG_CC_N, dest);
643     tcg_gen_mov_i32(QREG_CC_V, src);
644     set_cc_op(s, CC_OP_CMPB + opsize);
645 }
646 
647 static void gen_update_cc_add(TCGv dest, TCGv src, int opsize)
648 {
649     gen_ext(QREG_CC_N, dest, opsize, 1);
650     tcg_gen_mov_i32(QREG_CC_V, src);
651 }
652 
653 static inline int opsize_bytes(int opsize)
654 {
655     switch (opsize) {
656     case OS_BYTE: return 1;
657     case OS_WORD: return 2;
658     case OS_LONG: return 4;
659     case OS_SINGLE: return 4;
660     case OS_DOUBLE: return 8;
661     case OS_EXTENDED: return 12;
662     case OS_PACKED: return 12;
663     default:
664         g_assert_not_reached();
665     }
666 }
667 
668 static inline int insn_opsize(int insn)
669 {
670     switch ((insn >> 6) & 3) {
671     case 0: return OS_BYTE;
672     case 1: return OS_WORD;
673     case 2: return OS_LONG;
674     default:
675         g_assert_not_reached();
676     }
677 }
678 
679 static inline int ext_opsize(int ext, int pos)
680 {
681     switch ((ext >> pos) & 7) {
682     case 0: return OS_LONG;
683     case 1: return OS_SINGLE;
684     case 2: return OS_EXTENDED;
685     case 3: return OS_PACKED;
686     case 4: return OS_WORD;
687     case 5: return OS_DOUBLE;
688     case 6: return OS_BYTE;
689     default:
690         g_assert_not_reached();
691     }
692 }
693 
694 /* Assign value to a register.  If the width is less than the register width
695    only the low part of the register is set.  */
696 static void gen_partset_reg(int opsize, TCGv reg, TCGv val)
697 {
698     TCGv tmp;
699     switch (opsize) {
700     case OS_BYTE:
701         tcg_gen_andi_i32(reg, reg, 0xffffff00);
702         tmp = tcg_temp_new();
703         tcg_gen_ext8u_i32(tmp, val);
704         tcg_gen_or_i32(reg, reg, tmp);
705         tcg_temp_free(tmp);
706         break;
707     case OS_WORD:
708         tcg_gen_andi_i32(reg, reg, 0xffff0000);
709         tmp = tcg_temp_new();
710         tcg_gen_ext16u_i32(tmp, val);
711         tcg_gen_or_i32(reg, reg, tmp);
712         tcg_temp_free(tmp);
713         break;
714     case OS_LONG:
715     case OS_SINGLE:
716         tcg_gen_mov_i32(reg, val);
717         break;
718     default:
719         g_assert_not_reached();
720     }
721 }
722 
723 /* Generate code for an "effective address".  Does not adjust the base
724    register for autoincrement addressing modes.  */
725 static TCGv gen_lea_mode(CPUM68KState *env, DisasContext *s,
726                          int mode, int reg0, int opsize)
727 {
728     TCGv reg;
729     TCGv tmp;
730     uint16_t ext;
731     uint32_t offset;
732 
733     switch (mode) {
734     case 0: /* Data register direct.  */
735     case 1: /* Address register direct.  */
736         return NULL_QREG;
737     case 3: /* Indirect postincrement.  */
738         if (opsize == OS_UNSIZED) {
739             return NULL_QREG;
740         }
741         /* fallthru */
742     case 2: /* Indirect register */
743         return get_areg(s, reg0);
744     case 4: /* Indirect predecrememnt.  */
745         if (opsize == OS_UNSIZED) {
746             return NULL_QREG;
747         }
748         reg = get_areg(s, reg0);
749         tmp = tcg_temp_new();
750         if (reg0 == 7 && opsize == OS_BYTE &&
751             m68k_feature(s->env, M68K_FEATURE_M68000)) {
752             tcg_gen_subi_i32(tmp, reg, 2);
753         } else {
754             tcg_gen_subi_i32(tmp, reg, opsize_bytes(opsize));
755         }
756         return tmp;
757     case 5: /* Indirect displacement.  */
758         reg = get_areg(s, reg0);
759         tmp = tcg_temp_new();
760         ext = read_im16(env, s);
761         tcg_gen_addi_i32(tmp, reg, (int16_t)ext);
762         return tmp;
763     case 6: /* Indirect index + displacement.  */
764         reg = get_areg(s, reg0);
765         return gen_lea_indexed(env, s, reg);
766     case 7: /* Other */
767         switch (reg0) {
768         case 0: /* Absolute short.  */
769             offset = (int16_t)read_im16(env, s);
770             return tcg_const_i32(offset);
771         case 1: /* Absolute long.  */
772             offset = read_im32(env, s);
773             return tcg_const_i32(offset);
774         case 2: /* pc displacement  */
775             offset = s->pc;
776             offset += (int16_t)read_im16(env, s);
777             return tcg_const_i32(offset);
778         case 3: /* pc index+displacement.  */
779             return gen_lea_indexed(env, s, NULL_QREG);
780         case 4: /* Immediate.  */
781         default:
782             return NULL_QREG;
783         }
784     }
785     /* Should never happen.  */
786     return NULL_QREG;
787 }
788 
789 static TCGv gen_lea(CPUM68KState *env, DisasContext *s, uint16_t insn,
790                     int opsize)
791 {
792     int mode = extract32(insn, 3, 3);
793     int reg0 = REG(insn, 0);
794     return gen_lea_mode(env, s, mode, reg0, opsize);
795 }
796 
797 /* Generate code to load/store a value from/into an EA.  If WHAT > 0 this is
798    a write otherwise it is a read (0 == sign extend, -1 == zero extend).
799    ADDRP is non-null for readwrite operands.  */
800 static TCGv gen_ea_mode(CPUM68KState *env, DisasContext *s, int mode, int reg0,
801                         int opsize, TCGv val, TCGv *addrp, ea_what what,
802                         int index)
803 {
804     TCGv reg, tmp, result;
805     int32_t offset;
806 
807     switch (mode) {
808     case 0: /* Data register direct.  */
809         reg = cpu_dregs[reg0];
810         if (what == EA_STORE) {
811             gen_partset_reg(opsize, reg, val);
812             return store_dummy;
813         } else {
814             return gen_extend(reg, opsize, what == EA_LOADS);
815         }
816     case 1: /* Address register direct.  */
817         reg = get_areg(s, reg0);
818         if (what == EA_STORE) {
819             tcg_gen_mov_i32(reg, val);
820             return store_dummy;
821         } else {
822             return gen_extend(reg, opsize, what == EA_LOADS);
823         }
824     case 2: /* Indirect register */
825         reg = get_areg(s, reg0);
826         return gen_ldst(s, opsize, reg, val, what, index);
827     case 3: /* Indirect postincrement.  */
828         reg = get_areg(s, reg0);
829         result = gen_ldst(s, opsize, reg, val, what, index);
830         if (what == EA_STORE || !addrp) {
831             TCGv tmp = tcg_temp_new();
832             if (reg0 == 7 && opsize == OS_BYTE &&
833                 m68k_feature(s->env, M68K_FEATURE_M68000)) {
834                 tcg_gen_addi_i32(tmp, reg, 2);
835             } else {
836                 tcg_gen_addi_i32(tmp, reg, opsize_bytes(opsize));
837             }
838             delay_set_areg(s, reg0, tmp, true);
839         }
840         return result;
841     case 4: /* Indirect predecrememnt.  */
842         if (addrp && what == EA_STORE) {
843             tmp = *addrp;
844         } else {
845             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
846             if (IS_NULL_QREG(tmp)) {
847                 return tmp;
848             }
849             if (addrp) {
850                 *addrp = tmp;
851             }
852         }
853         result = gen_ldst(s, opsize, tmp, val, what, index);
854         if (what == EA_STORE || !addrp) {
855             delay_set_areg(s, reg0, tmp, false);
856         }
857         return result;
858     case 5: /* Indirect displacement.  */
859     case 6: /* Indirect index + displacement.  */
860     do_indirect:
861         if (addrp && what == EA_STORE) {
862             tmp = *addrp;
863         } else {
864             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
865             if (IS_NULL_QREG(tmp)) {
866                 return tmp;
867             }
868             if (addrp) {
869                 *addrp = tmp;
870             }
871         }
872         return gen_ldst(s, opsize, tmp, val, what, index);
873     case 7: /* Other */
874         switch (reg0) {
875         case 0: /* Absolute short.  */
876         case 1: /* Absolute long.  */
877         case 2: /* pc displacement  */
878         case 3: /* pc index+displacement.  */
879             goto do_indirect;
880         case 4: /* Immediate.  */
881             /* Sign extend values for consistency.  */
882             switch (opsize) {
883             case OS_BYTE:
884                 if (what == EA_LOADS) {
885                     offset = (int8_t)read_im8(env, s);
886                 } else {
887                     offset = read_im8(env, s);
888                 }
889                 break;
890             case OS_WORD:
891                 if (what == EA_LOADS) {
892                     offset = (int16_t)read_im16(env, s);
893                 } else {
894                     offset = read_im16(env, s);
895                 }
896                 break;
897             case OS_LONG:
898                 offset = read_im32(env, s);
899                 break;
900             default:
901                 g_assert_not_reached();
902             }
903             return tcg_const_i32(offset);
904         default:
905             return NULL_QREG;
906         }
907     }
908     /* Should never happen.  */
909     return NULL_QREG;
910 }
911 
912 static TCGv gen_ea(CPUM68KState *env, DisasContext *s, uint16_t insn,
913                    int opsize, TCGv val, TCGv *addrp, ea_what what, int index)
914 {
915     int mode = extract32(insn, 3, 3);
916     int reg0 = REG(insn, 0);
917     return gen_ea_mode(env, s, mode, reg0, opsize, val, addrp, what, index);
918 }
919 
920 static TCGv_ptr gen_fp_ptr(int freg)
921 {
922     TCGv_ptr fp = tcg_temp_new_ptr();
923     tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fregs[freg]));
924     return fp;
925 }
926 
927 static TCGv_ptr gen_fp_result_ptr(void)
928 {
929     TCGv_ptr fp = tcg_temp_new_ptr();
930     tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fp_result));
931     return fp;
932 }
933 
934 static void gen_fp_move(TCGv_ptr dest, TCGv_ptr src)
935 {
936     TCGv t32;
937     TCGv_i64 t64;
938 
939     t32 = tcg_temp_new();
940     tcg_gen_ld16u_i32(t32, src, offsetof(FPReg, l.upper));
941     tcg_gen_st16_i32(t32, dest, offsetof(FPReg, l.upper));
942     tcg_temp_free(t32);
943 
944     t64 = tcg_temp_new_i64();
945     tcg_gen_ld_i64(t64, src, offsetof(FPReg, l.lower));
946     tcg_gen_st_i64(t64, dest, offsetof(FPReg, l.lower));
947     tcg_temp_free_i64(t64);
948 }
949 
950 static void gen_load_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
951                         int index)
952 {
953     TCGv tmp;
954     TCGv_i64 t64;
955 
956     t64 = tcg_temp_new_i64();
957     tmp = tcg_temp_new();
958     switch (opsize) {
959     case OS_BYTE:
960         tcg_gen_qemu_ld8s(tmp, addr, index);
961         gen_helper_exts32(cpu_env, fp, tmp);
962         break;
963     case OS_WORD:
964         tcg_gen_qemu_ld16s(tmp, addr, index);
965         gen_helper_exts32(cpu_env, fp, tmp);
966         break;
967     case OS_LONG:
968         tcg_gen_qemu_ld32u(tmp, addr, index);
969         gen_helper_exts32(cpu_env, fp, tmp);
970         break;
971     case OS_SINGLE:
972         tcg_gen_qemu_ld32u(tmp, addr, index);
973         gen_helper_extf32(cpu_env, fp, tmp);
974         break;
975     case OS_DOUBLE:
976         tcg_gen_qemu_ld64(t64, addr, index);
977         gen_helper_extf64(cpu_env, fp, t64);
978         break;
979     case OS_EXTENDED:
980         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
981             gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
982             break;
983         }
984         tcg_gen_qemu_ld32u(tmp, addr, index);
985         tcg_gen_shri_i32(tmp, tmp, 16);
986         tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
987         tcg_gen_addi_i32(tmp, addr, 4);
988         tcg_gen_qemu_ld64(t64, tmp, index);
989         tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
990         break;
991     case OS_PACKED:
992         /* unimplemented data type on 68040/ColdFire
993          * FIXME if needed for another FPU
994          */
995         gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
996         break;
997     default:
998         g_assert_not_reached();
999     }
1000     tcg_temp_free(tmp);
1001     tcg_temp_free_i64(t64);
1002 }
1003 
1004 static void gen_store_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
1005                          int index)
1006 {
1007     TCGv tmp;
1008     TCGv_i64 t64;
1009 
1010     t64 = tcg_temp_new_i64();
1011     tmp = tcg_temp_new();
1012     switch (opsize) {
1013     case OS_BYTE:
1014         gen_helper_reds32(tmp, cpu_env, fp);
1015         tcg_gen_qemu_st8(tmp, addr, index);
1016         break;
1017     case OS_WORD:
1018         gen_helper_reds32(tmp, cpu_env, fp);
1019         tcg_gen_qemu_st16(tmp, addr, index);
1020         break;
1021     case OS_LONG:
1022         gen_helper_reds32(tmp, cpu_env, fp);
1023         tcg_gen_qemu_st32(tmp, addr, index);
1024         break;
1025     case OS_SINGLE:
1026         gen_helper_redf32(tmp, cpu_env, fp);
1027         tcg_gen_qemu_st32(tmp, addr, index);
1028         break;
1029     case OS_DOUBLE:
1030         gen_helper_redf64(t64, cpu_env, fp);
1031         tcg_gen_qemu_st64(t64, addr, index);
1032         break;
1033     case OS_EXTENDED:
1034         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1035             gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
1036             break;
1037         }
1038         tcg_gen_ld16u_i32(tmp, fp, offsetof(FPReg, l.upper));
1039         tcg_gen_shli_i32(tmp, tmp, 16);
1040         tcg_gen_qemu_st32(tmp, addr, index);
1041         tcg_gen_addi_i32(tmp, addr, 4);
1042         tcg_gen_ld_i64(t64, fp, offsetof(FPReg, l.lower));
1043         tcg_gen_qemu_st64(t64, tmp, index);
1044         break;
1045     case OS_PACKED:
1046         /* unimplemented data type on 68040/ColdFire
1047          * FIXME if needed for another FPU
1048          */
1049         gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
1050         break;
1051     default:
1052         g_assert_not_reached();
1053     }
1054     tcg_temp_free(tmp);
1055     tcg_temp_free_i64(t64);
1056 }
1057 
1058 static void gen_ldst_fp(DisasContext *s, int opsize, TCGv addr,
1059                         TCGv_ptr fp, ea_what what, int index)
1060 {
1061     if (what == EA_STORE) {
1062         gen_store_fp(s, opsize, addr, fp, index);
1063     } else {
1064         gen_load_fp(s, opsize, addr, fp, index);
1065     }
1066 }
1067 
1068 static int gen_ea_mode_fp(CPUM68KState *env, DisasContext *s, int mode,
1069                           int reg0, int opsize, TCGv_ptr fp, ea_what what,
1070                           int index)
1071 {
1072     TCGv reg, addr, tmp;
1073     TCGv_i64 t64;
1074 
1075     switch (mode) {
1076     case 0: /* Data register direct.  */
1077         reg = cpu_dregs[reg0];
1078         if (what == EA_STORE) {
1079             switch (opsize) {
1080             case OS_BYTE:
1081             case OS_WORD:
1082             case OS_LONG:
1083                 gen_helper_reds32(reg, cpu_env, fp);
1084                 break;
1085             case OS_SINGLE:
1086                 gen_helper_redf32(reg, cpu_env, fp);
1087                 break;
1088             default:
1089                 g_assert_not_reached();
1090             }
1091         } else {
1092             tmp = tcg_temp_new();
1093             switch (opsize) {
1094             case OS_BYTE:
1095                 tcg_gen_ext8s_i32(tmp, reg);
1096                 gen_helper_exts32(cpu_env, fp, tmp);
1097                 break;
1098             case OS_WORD:
1099                 tcg_gen_ext16s_i32(tmp, reg);
1100                 gen_helper_exts32(cpu_env, fp, tmp);
1101                 break;
1102             case OS_LONG:
1103                 gen_helper_exts32(cpu_env, fp, reg);
1104                 break;
1105             case OS_SINGLE:
1106                 gen_helper_extf32(cpu_env, fp, reg);
1107                 break;
1108             default:
1109                 g_assert_not_reached();
1110             }
1111             tcg_temp_free(tmp);
1112         }
1113         return 0;
1114     case 1: /* Address register direct.  */
1115         return -1;
1116     case 2: /* Indirect register */
1117         addr = get_areg(s, reg0);
1118         gen_ldst_fp(s, opsize, addr, fp, what, index);
1119         return 0;
1120     case 3: /* Indirect postincrement.  */
1121         addr = cpu_aregs[reg0];
1122         gen_ldst_fp(s, opsize, addr, fp, what, index);
1123         tcg_gen_addi_i32(addr, addr, opsize_bytes(opsize));
1124         return 0;
1125     case 4: /* Indirect predecrememnt.  */
1126         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1127         if (IS_NULL_QREG(addr)) {
1128             return -1;
1129         }
1130         gen_ldst_fp(s, opsize, addr, fp, what, index);
1131         tcg_gen_mov_i32(cpu_aregs[reg0], addr);
1132         return 0;
1133     case 5: /* Indirect displacement.  */
1134     case 6: /* Indirect index + displacement.  */
1135     do_indirect:
1136         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1137         if (IS_NULL_QREG(addr)) {
1138             return -1;
1139         }
1140         gen_ldst_fp(s, opsize, addr, fp, what, index);
1141         return 0;
1142     case 7: /* Other */
1143         switch (reg0) {
1144         case 0: /* Absolute short.  */
1145         case 1: /* Absolute long.  */
1146         case 2: /* pc displacement  */
1147         case 3: /* pc index+displacement.  */
1148             goto do_indirect;
1149         case 4: /* Immediate.  */
1150             if (what == EA_STORE) {
1151                 return -1;
1152             }
1153             switch (opsize) {
1154             case OS_BYTE:
1155                 tmp = tcg_const_i32((int8_t)read_im8(env, s));
1156                 gen_helper_exts32(cpu_env, fp, tmp);
1157                 tcg_temp_free(tmp);
1158                 break;
1159             case OS_WORD:
1160                 tmp = tcg_const_i32((int16_t)read_im16(env, s));
1161                 gen_helper_exts32(cpu_env, fp, tmp);
1162                 tcg_temp_free(tmp);
1163                 break;
1164             case OS_LONG:
1165                 tmp = tcg_const_i32(read_im32(env, s));
1166                 gen_helper_exts32(cpu_env, fp, tmp);
1167                 tcg_temp_free(tmp);
1168                 break;
1169             case OS_SINGLE:
1170                 tmp = tcg_const_i32(read_im32(env, s));
1171                 gen_helper_extf32(cpu_env, fp, tmp);
1172                 tcg_temp_free(tmp);
1173                 break;
1174             case OS_DOUBLE:
1175                 t64 = tcg_const_i64(read_im64(env, s));
1176                 gen_helper_extf64(cpu_env, fp, t64);
1177                 tcg_temp_free_i64(t64);
1178                 break;
1179             case OS_EXTENDED:
1180                 if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1181                     gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
1182                     break;
1183                 }
1184                 tmp = tcg_const_i32(read_im32(env, s) >> 16);
1185                 tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
1186                 tcg_temp_free(tmp);
1187                 t64 = tcg_const_i64(read_im64(env, s));
1188                 tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
1189                 tcg_temp_free_i64(t64);
1190                 break;
1191             case OS_PACKED:
1192                 /* unimplemented data type on 68040/ColdFire
1193                  * FIXME if needed for another FPU
1194                  */
1195                 gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
1196                 break;
1197             default:
1198                 g_assert_not_reached();
1199             }
1200             return 0;
1201         default:
1202             return -1;
1203         }
1204     }
1205     return -1;
1206 }
1207 
1208 static int gen_ea_fp(CPUM68KState *env, DisasContext *s, uint16_t insn,
1209                        int opsize, TCGv_ptr fp, ea_what what, int index)
1210 {
1211     int mode = extract32(insn, 3, 3);
1212     int reg0 = REG(insn, 0);
1213     return gen_ea_mode_fp(env, s, mode, reg0, opsize, fp, what, index);
1214 }
1215 
1216 typedef struct {
1217     TCGCond tcond;
1218     bool g1;
1219     bool g2;
1220     TCGv v1;
1221     TCGv v2;
1222 } DisasCompare;
1223 
1224 static void gen_cc_cond(DisasCompare *c, DisasContext *s, int cond)
1225 {
1226     TCGv tmp, tmp2;
1227     TCGCond tcond;
1228     CCOp op = s->cc_op;
1229 
1230     /* The CC_OP_CMP form can handle most normal comparisons directly.  */
1231     if (op == CC_OP_CMPB || op == CC_OP_CMPW || op == CC_OP_CMPL) {
1232         c->g1 = c->g2 = 1;
1233         c->v1 = QREG_CC_N;
1234         c->v2 = QREG_CC_V;
1235         switch (cond) {
1236         case 2: /* HI */
1237         case 3: /* LS */
1238             tcond = TCG_COND_LEU;
1239             goto done;
1240         case 4: /* CC */
1241         case 5: /* CS */
1242             tcond = TCG_COND_LTU;
1243             goto done;
1244         case 6: /* NE */
1245         case 7: /* EQ */
1246             tcond = TCG_COND_EQ;
1247             goto done;
1248         case 10: /* PL */
1249         case 11: /* MI */
1250             c->g1 = c->g2 = 0;
1251             c->v2 = tcg_const_i32(0);
1252             c->v1 = tmp = tcg_temp_new();
1253             tcg_gen_sub_i32(tmp, QREG_CC_N, QREG_CC_V);
1254             gen_ext(tmp, tmp, op - CC_OP_CMPB, 1);
1255             /* fallthru */
1256         case 12: /* GE */
1257         case 13: /* LT */
1258             tcond = TCG_COND_LT;
1259             goto done;
1260         case 14: /* GT */
1261         case 15: /* LE */
1262             tcond = TCG_COND_LE;
1263             goto done;
1264         }
1265     }
1266 
1267     c->g1 = 1;
1268     c->g2 = 0;
1269     c->v2 = tcg_const_i32(0);
1270 
1271     switch (cond) {
1272     case 0: /* T */
1273     case 1: /* F */
1274         c->v1 = c->v2;
1275         tcond = TCG_COND_NEVER;
1276         goto done;
1277     case 14: /* GT (!(Z || (N ^ V))) */
1278     case 15: /* LE (Z || (N ^ V)) */
1279         /* Logic operations clear V, which simplifies LE to (Z || N),
1280            and since Z and N are co-located, this becomes a normal
1281            comparison vs N.  */
1282         if (op == CC_OP_LOGIC) {
1283             c->v1 = QREG_CC_N;
1284             tcond = TCG_COND_LE;
1285             goto done;
1286         }
1287         break;
1288     case 12: /* GE (!(N ^ V)) */
1289     case 13: /* LT (N ^ V) */
1290         /* Logic operations clear V, which simplifies this to N.  */
1291         if (op != CC_OP_LOGIC) {
1292             break;
1293         }
1294         /* fallthru */
1295     case 10: /* PL (!N) */
1296     case 11: /* MI (N) */
1297         /* Several cases represent N normally.  */
1298         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1299             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1300             op == CC_OP_LOGIC) {
1301             c->v1 = QREG_CC_N;
1302             tcond = TCG_COND_LT;
1303             goto done;
1304         }
1305         break;
1306     case 6: /* NE (!Z) */
1307     case 7: /* EQ (Z) */
1308         /* Some cases fold Z into N.  */
1309         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1310             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1311             op == CC_OP_LOGIC) {
1312             tcond = TCG_COND_EQ;
1313             c->v1 = QREG_CC_N;
1314             goto done;
1315         }
1316         break;
1317     case 4: /* CC (!C) */
1318     case 5: /* CS (C) */
1319         /* Some cases fold C into X.  */
1320         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1321             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL) {
1322             tcond = TCG_COND_NE;
1323             c->v1 = QREG_CC_X;
1324             goto done;
1325         }
1326         /* fallthru */
1327     case 8: /* VC (!V) */
1328     case 9: /* VS (V) */
1329         /* Logic operations clear V and C.  */
1330         if (op == CC_OP_LOGIC) {
1331             tcond = TCG_COND_NEVER;
1332             c->v1 = c->v2;
1333             goto done;
1334         }
1335         break;
1336     }
1337 
1338     /* Otherwise, flush flag state to CC_OP_FLAGS.  */
1339     gen_flush_flags(s);
1340 
1341     switch (cond) {
1342     case 0: /* T */
1343     case 1: /* F */
1344     default:
1345         /* Invalid, or handled above.  */
1346         abort();
1347     case 2: /* HI (!C && !Z) -> !(C || Z)*/
1348     case 3: /* LS (C || Z) */
1349         c->v1 = tmp = tcg_temp_new();
1350         c->g1 = 0;
1351         tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1352         tcg_gen_or_i32(tmp, tmp, QREG_CC_C);
1353         tcond = TCG_COND_NE;
1354         break;
1355     case 4: /* CC (!C) */
1356     case 5: /* CS (C) */
1357         c->v1 = QREG_CC_C;
1358         tcond = TCG_COND_NE;
1359         break;
1360     case 6: /* NE (!Z) */
1361     case 7: /* EQ (Z) */
1362         c->v1 = QREG_CC_Z;
1363         tcond = TCG_COND_EQ;
1364         break;
1365     case 8: /* VC (!V) */
1366     case 9: /* VS (V) */
1367         c->v1 = QREG_CC_V;
1368         tcond = TCG_COND_LT;
1369         break;
1370     case 10: /* PL (!N) */
1371     case 11: /* MI (N) */
1372         c->v1 = QREG_CC_N;
1373         tcond = TCG_COND_LT;
1374         break;
1375     case 12: /* GE (!(N ^ V)) */
1376     case 13: /* LT (N ^ V) */
1377         c->v1 = tmp = tcg_temp_new();
1378         c->g1 = 0;
1379         tcg_gen_xor_i32(tmp, QREG_CC_N, QREG_CC_V);
1380         tcond = TCG_COND_LT;
1381         break;
1382     case 14: /* GT (!(Z || (N ^ V))) */
1383     case 15: /* LE (Z || (N ^ V)) */
1384         c->v1 = tmp = tcg_temp_new();
1385         c->g1 = 0;
1386         tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1387         tcg_gen_neg_i32(tmp, tmp);
1388         tmp2 = tcg_temp_new();
1389         tcg_gen_xor_i32(tmp2, QREG_CC_N, QREG_CC_V);
1390         tcg_gen_or_i32(tmp, tmp, tmp2);
1391         tcg_temp_free(tmp2);
1392         tcond = TCG_COND_LT;
1393         break;
1394     }
1395 
1396  done:
1397     if ((cond & 1) == 0) {
1398         tcond = tcg_invert_cond(tcond);
1399     }
1400     c->tcond = tcond;
1401 }
1402 
1403 static void free_cond(DisasCompare *c)
1404 {
1405     if (!c->g1) {
1406         tcg_temp_free(c->v1);
1407     }
1408     if (!c->g2) {
1409         tcg_temp_free(c->v2);
1410     }
1411 }
1412 
1413 static void gen_jmpcc(DisasContext *s, int cond, TCGLabel *l1)
1414 {
1415   DisasCompare c;
1416 
1417   gen_cc_cond(&c, s, cond);
1418   update_cc_op(s);
1419   tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
1420   free_cond(&c);
1421 }
1422 
1423 /* Force a TB lookup after an instruction that changes the CPU state.  */
1424 static void gen_lookup_tb(DisasContext *s)
1425 {
1426     update_cc_op(s);
1427     tcg_gen_movi_i32(QREG_PC, s->pc);
1428     s->is_jmp = DISAS_UPDATE;
1429 }
1430 
1431 #define SRC_EA(env, result, opsize, op_sign, addrp) do {                \
1432         result = gen_ea(env, s, insn, opsize, NULL_QREG, addrp,         \
1433                         op_sign ? EA_LOADS : EA_LOADU, IS_USER(s));     \
1434         if (IS_NULL_QREG(result)) {                                     \
1435             gen_addr_fault(s);                                          \
1436             return;                                                     \
1437         }                                                               \
1438     } while (0)
1439 
1440 #define DEST_EA(env, insn, opsize, val, addrp) do {                     \
1441         TCGv ea_result = gen_ea(env, s, insn, opsize, val, addrp,       \
1442                                 EA_STORE, IS_USER(s));                  \
1443         if (IS_NULL_QREG(ea_result)) {                                  \
1444             gen_addr_fault(s);                                          \
1445             return;                                                     \
1446         }                                                               \
1447     } while (0)
1448 
1449 static inline bool use_goto_tb(DisasContext *s, uint32_t dest)
1450 {
1451 #ifndef CONFIG_USER_ONLY
1452     return (s->tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) ||
1453            (s->insn_pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK);
1454 #else
1455     return true;
1456 #endif
1457 }
1458 
1459 /* Generate a jump to an immediate address.  */
1460 static void gen_jmp_tb(DisasContext *s, int n, uint32_t dest)
1461 {
1462     if (unlikely(s->singlestep_enabled)) {
1463         gen_exception(s, dest, EXCP_DEBUG);
1464     } else if (use_goto_tb(s, dest)) {
1465         tcg_gen_goto_tb(n);
1466         tcg_gen_movi_i32(QREG_PC, dest);
1467         tcg_gen_exit_tb((uintptr_t)s->tb + n);
1468     } else {
1469         gen_jmp_im(s, dest);
1470         tcg_gen_exit_tb(0);
1471     }
1472     s->is_jmp = DISAS_TB_JUMP;
1473 }
1474 
1475 DISAS_INSN(scc)
1476 {
1477     DisasCompare c;
1478     int cond;
1479     TCGv tmp;
1480 
1481     cond = (insn >> 8) & 0xf;
1482     gen_cc_cond(&c, s, cond);
1483 
1484     tmp = tcg_temp_new();
1485     tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
1486     free_cond(&c);
1487 
1488     tcg_gen_neg_i32(tmp, tmp);
1489     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
1490     tcg_temp_free(tmp);
1491 }
1492 
1493 DISAS_INSN(dbcc)
1494 {
1495     TCGLabel *l1;
1496     TCGv reg;
1497     TCGv tmp;
1498     int16_t offset;
1499     uint32_t base;
1500 
1501     reg = DREG(insn, 0);
1502     base = s->pc;
1503     offset = (int16_t)read_im16(env, s);
1504     l1 = gen_new_label();
1505     gen_jmpcc(s, (insn >> 8) & 0xf, l1);
1506 
1507     tmp = tcg_temp_new();
1508     tcg_gen_ext16s_i32(tmp, reg);
1509     tcg_gen_addi_i32(tmp, tmp, -1);
1510     gen_partset_reg(OS_WORD, reg, tmp);
1511     tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, -1, l1);
1512     gen_jmp_tb(s, 1, base + offset);
1513     gen_set_label(l1);
1514     gen_jmp_tb(s, 0, s->pc);
1515 }
1516 
1517 DISAS_INSN(undef_mac)
1518 {
1519     gen_exception(s, s->insn_pc, EXCP_LINEA);
1520 }
1521 
1522 DISAS_INSN(undef_fpu)
1523 {
1524     gen_exception(s, s->insn_pc, EXCP_LINEF);
1525 }
1526 
1527 DISAS_INSN(undef)
1528 {
1529     /* ??? This is both instructions that are as yet unimplemented
1530        for the 680x0 series, as well as those that are implemented
1531        but actually illegal for CPU32 or pre-68020.  */
1532     qemu_log_mask(LOG_UNIMP, "Illegal instruction: %04x @ %08x",
1533                   insn, s->insn_pc);
1534     gen_exception(s, s->insn_pc, EXCP_UNSUPPORTED);
1535 }
1536 
1537 DISAS_INSN(mulw)
1538 {
1539     TCGv reg;
1540     TCGv tmp;
1541     TCGv src;
1542     int sign;
1543 
1544     sign = (insn & 0x100) != 0;
1545     reg = DREG(insn, 9);
1546     tmp = tcg_temp_new();
1547     if (sign)
1548         tcg_gen_ext16s_i32(tmp, reg);
1549     else
1550         tcg_gen_ext16u_i32(tmp, reg);
1551     SRC_EA(env, src, OS_WORD, sign, NULL);
1552     tcg_gen_mul_i32(tmp, tmp, src);
1553     tcg_gen_mov_i32(reg, tmp);
1554     gen_logic_cc(s, tmp, OS_LONG);
1555     tcg_temp_free(tmp);
1556 }
1557 
1558 DISAS_INSN(divw)
1559 {
1560     int sign;
1561     TCGv src;
1562     TCGv destr;
1563 
1564     /* divX.w <EA>,Dn    32/16 -> 16r:16q */
1565 
1566     sign = (insn & 0x100) != 0;
1567 
1568     /* dest.l / src.w */
1569 
1570     SRC_EA(env, src, OS_WORD, sign, NULL);
1571     destr = tcg_const_i32(REG(insn, 9));
1572     if (sign) {
1573         gen_helper_divsw(cpu_env, destr, src);
1574     } else {
1575         gen_helper_divuw(cpu_env, destr, src);
1576     }
1577     tcg_temp_free(destr);
1578 
1579     set_cc_op(s, CC_OP_FLAGS);
1580 }
1581 
1582 DISAS_INSN(divl)
1583 {
1584     TCGv num, reg, den;
1585     int sign;
1586     uint16_t ext;
1587 
1588     ext = read_im16(env, s);
1589 
1590     sign = (ext & 0x0800) != 0;
1591 
1592     if (ext & 0x400) {
1593         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
1594             gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
1595             return;
1596         }
1597 
1598         /* divX.l <EA>, Dr:Dq    64/32 -> 32r:32q */
1599 
1600         SRC_EA(env, den, OS_LONG, 0, NULL);
1601         num = tcg_const_i32(REG(ext, 12));
1602         reg = tcg_const_i32(REG(ext, 0));
1603         if (sign) {
1604             gen_helper_divsll(cpu_env, num, reg, den);
1605         } else {
1606             gen_helper_divull(cpu_env, num, reg, den);
1607         }
1608         tcg_temp_free(reg);
1609         tcg_temp_free(num);
1610         set_cc_op(s, CC_OP_FLAGS);
1611         return;
1612     }
1613 
1614     /* divX.l <EA>, Dq        32/32 -> 32q     */
1615     /* divXl.l <EA>, Dr:Dq    32/32 -> 32r:32q */
1616 
1617     SRC_EA(env, den, OS_LONG, 0, NULL);
1618     num = tcg_const_i32(REG(ext, 12));
1619     reg = tcg_const_i32(REG(ext, 0));
1620     if (sign) {
1621         gen_helper_divsl(cpu_env, num, reg, den);
1622     } else {
1623         gen_helper_divul(cpu_env, num, reg, den);
1624     }
1625     tcg_temp_free(reg);
1626     tcg_temp_free(num);
1627 
1628     set_cc_op(s, CC_OP_FLAGS);
1629 }
1630 
1631 static void bcd_add(TCGv dest, TCGv src)
1632 {
1633     TCGv t0, t1;
1634 
1635     /*  dest10 = dest10 + src10 + X
1636      *
1637      *        t1 = src
1638      *        t2 = t1 + 0x066
1639      *        t3 = t2 + dest + X
1640      *        t4 = t2 ^ dest
1641      *        t5 = t3 ^ t4
1642      *        t6 = ~t5 & 0x110
1643      *        t7 = (t6 >> 2) | (t6 >> 3)
1644      *        return t3 - t7
1645      */
1646 
1647     /* t1 = (src + 0x066) + dest + X
1648      *    = result with some possible exceding 0x6
1649      */
1650 
1651     t0 = tcg_const_i32(0x066);
1652     tcg_gen_add_i32(t0, t0, src);
1653 
1654     t1 = tcg_temp_new();
1655     tcg_gen_add_i32(t1, t0, dest);
1656     tcg_gen_add_i32(t1, t1, QREG_CC_X);
1657 
1658     /* we will remove exceding 0x6 where there is no carry */
1659 
1660     /* t0 = (src + 0x0066) ^ dest
1661      *    = t1 without carries
1662      */
1663 
1664     tcg_gen_xor_i32(t0, t0, dest);
1665 
1666     /* extract the carries
1667      * t0 = t0 ^ t1
1668      *    = only the carries
1669      */
1670 
1671     tcg_gen_xor_i32(t0, t0, t1);
1672 
1673     /* generate 0x1 where there is no carry
1674      * and for each 0x10, generate a 0x6
1675      */
1676 
1677     tcg_gen_shri_i32(t0, t0, 3);
1678     tcg_gen_not_i32(t0, t0);
1679     tcg_gen_andi_i32(t0, t0, 0x22);
1680     tcg_gen_add_i32(dest, t0, t0);
1681     tcg_gen_add_i32(dest, dest, t0);
1682     tcg_temp_free(t0);
1683 
1684     /* remove the exceding 0x6
1685      * for digits that have not generated a carry
1686      */
1687 
1688     tcg_gen_sub_i32(dest, t1, dest);
1689     tcg_temp_free(t1);
1690 }
1691 
1692 static void bcd_sub(TCGv dest, TCGv src)
1693 {
1694     TCGv t0, t1, t2;
1695 
1696     /*  dest10 = dest10 - src10 - X
1697      *         = bcd_add(dest + 1 - X, 0x199 - src)
1698      */
1699 
1700     /* t0 = 0x066 + (0x199 - src) */
1701 
1702     t0 = tcg_temp_new();
1703     tcg_gen_subfi_i32(t0, 0x1ff, src);
1704 
1705     /* t1 = t0 + dest + 1 - X*/
1706 
1707     t1 = tcg_temp_new();
1708     tcg_gen_add_i32(t1, t0, dest);
1709     tcg_gen_addi_i32(t1, t1, 1);
1710     tcg_gen_sub_i32(t1, t1, QREG_CC_X);
1711 
1712     /* t2 = t0 ^ dest */
1713 
1714     t2 = tcg_temp_new();
1715     tcg_gen_xor_i32(t2, t0, dest);
1716 
1717     /* t0 = t1 ^ t2 */
1718 
1719     tcg_gen_xor_i32(t0, t1, t2);
1720 
1721     /* t2 = ~t0 & 0x110
1722      * t0 = (t2 >> 2) | (t2 >> 3)
1723      *
1724      * to fit on 8bit operands, changed in:
1725      *
1726      * t2 = ~(t0 >> 3) & 0x22
1727      * t0 = t2 + t2
1728      * t0 = t0 + t2
1729      */
1730 
1731     tcg_gen_shri_i32(t2, t0, 3);
1732     tcg_gen_not_i32(t2, t2);
1733     tcg_gen_andi_i32(t2, t2, 0x22);
1734     tcg_gen_add_i32(t0, t2, t2);
1735     tcg_gen_add_i32(t0, t0, t2);
1736     tcg_temp_free(t2);
1737 
1738     /* return t1 - t0 */
1739 
1740     tcg_gen_sub_i32(dest, t1, t0);
1741     tcg_temp_free(t0);
1742     tcg_temp_free(t1);
1743 }
1744 
1745 static void bcd_flags(TCGv val)
1746 {
1747     tcg_gen_andi_i32(QREG_CC_C, val, 0x0ff);
1748     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_C);
1749 
1750     tcg_gen_extract_i32(QREG_CC_C, val, 8, 1);
1751 
1752     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
1753 }
1754 
1755 DISAS_INSN(abcd_reg)
1756 {
1757     TCGv src;
1758     TCGv dest;
1759 
1760     gen_flush_flags(s); /* !Z is sticky */
1761 
1762     src = gen_extend(DREG(insn, 0), OS_BYTE, 0);
1763     dest = gen_extend(DREG(insn, 9), OS_BYTE, 0);
1764     bcd_add(dest, src);
1765     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1766 
1767     bcd_flags(dest);
1768 }
1769 
1770 DISAS_INSN(abcd_mem)
1771 {
1772     TCGv src, dest, addr;
1773 
1774     gen_flush_flags(s); /* !Z is sticky */
1775 
1776     /* Indirect pre-decrement load (mode 4) */
1777 
1778     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1779                       NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1780     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1781                        NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1782 
1783     bcd_add(dest, src);
1784 
1785     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1786                 EA_STORE, IS_USER(s));
1787 
1788     bcd_flags(dest);
1789 }
1790 
1791 DISAS_INSN(sbcd_reg)
1792 {
1793     TCGv src, dest;
1794 
1795     gen_flush_flags(s); /* !Z is sticky */
1796 
1797     src = gen_extend(DREG(insn, 0), OS_BYTE, 0);
1798     dest = gen_extend(DREG(insn, 9), OS_BYTE, 0);
1799 
1800     bcd_sub(dest, src);
1801 
1802     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1803 
1804     bcd_flags(dest);
1805 }
1806 
1807 DISAS_INSN(sbcd_mem)
1808 {
1809     TCGv src, dest, addr;
1810 
1811     gen_flush_flags(s); /* !Z is sticky */
1812 
1813     /* Indirect pre-decrement load (mode 4) */
1814 
1815     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1816                       NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1817     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1818                        NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1819 
1820     bcd_sub(dest, src);
1821 
1822     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1823                 EA_STORE, IS_USER(s));
1824 
1825     bcd_flags(dest);
1826 }
1827 
1828 DISAS_INSN(nbcd)
1829 {
1830     TCGv src, dest;
1831     TCGv addr;
1832 
1833     gen_flush_flags(s); /* !Z is sticky */
1834 
1835     SRC_EA(env, src, OS_BYTE, 0, &addr);
1836 
1837     dest = tcg_const_i32(0);
1838     bcd_sub(dest, src);
1839 
1840     DEST_EA(env, insn, OS_BYTE, dest, &addr);
1841 
1842     bcd_flags(dest);
1843 
1844     tcg_temp_free(dest);
1845 }
1846 
1847 DISAS_INSN(addsub)
1848 {
1849     TCGv reg;
1850     TCGv dest;
1851     TCGv src;
1852     TCGv tmp;
1853     TCGv addr;
1854     int add;
1855     int opsize;
1856 
1857     add = (insn & 0x4000) != 0;
1858     opsize = insn_opsize(insn);
1859     reg = gen_extend(DREG(insn, 9), opsize, 1);
1860     dest = tcg_temp_new();
1861     if (insn & 0x100) {
1862         SRC_EA(env, tmp, opsize, 1, &addr);
1863         src = reg;
1864     } else {
1865         tmp = reg;
1866         SRC_EA(env, src, opsize, 1, NULL);
1867     }
1868     if (add) {
1869         tcg_gen_add_i32(dest, tmp, src);
1870         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, src);
1871         set_cc_op(s, CC_OP_ADDB + opsize);
1872     } else {
1873         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, tmp, src);
1874         tcg_gen_sub_i32(dest, tmp, src);
1875         set_cc_op(s, CC_OP_SUBB + opsize);
1876     }
1877     gen_update_cc_add(dest, src, opsize);
1878     if (insn & 0x100) {
1879         DEST_EA(env, insn, opsize, dest, &addr);
1880     } else {
1881         gen_partset_reg(opsize, DREG(insn, 9), dest);
1882     }
1883     tcg_temp_free(dest);
1884 }
1885 
1886 /* Reverse the order of the bits in REG.  */
1887 DISAS_INSN(bitrev)
1888 {
1889     TCGv reg;
1890     reg = DREG(insn, 0);
1891     gen_helper_bitrev(reg, reg);
1892 }
1893 
1894 DISAS_INSN(bitop_reg)
1895 {
1896     int opsize;
1897     int op;
1898     TCGv src1;
1899     TCGv src2;
1900     TCGv tmp;
1901     TCGv addr;
1902     TCGv dest;
1903 
1904     if ((insn & 0x38) != 0)
1905         opsize = OS_BYTE;
1906     else
1907         opsize = OS_LONG;
1908     op = (insn >> 6) & 3;
1909     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
1910 
1911     gen_flush_flags(s);
1912     src2 = tcg_temp_new();
1913     if (opsize == OS_BYTE)
1914         tcg_gen_andi_i32(src2, DREG(insn, 9), 7);
1915     else
1916         tcg_gen_andi_i32(src2, DREG(insn, 9), 31);
1917 
1918     tmp = tcg_const_i32(1);
1919     tcg_gen_shl_i32(tmp, tmp, src2);
1920     tcg_temp_free(src2);
1921 
1922     tcg_gen_and_i32(QREG_CC_Z, src1, tmp);
1923 
1924     dest = tcg_temp_new();
1925     switch (op) {
1926     case 1: /* bchg */
1927         tcg_gen_xor_i32(dest, src1, tmp);
1928         break;
1929     case 2: /* bclr */
1930         tcg_gen_andc_i32(dest, src1, tmp);
1931         break;
1932     case 3: /* bset */
1933         tcg_gen_or_i32(dest, src1, tmp);
1934         break;
1935     default: /* btst */
1936         break;
1937     }
1938     tcg_temp_free(tmp);
1939     if (op) {
1940         DEST_EA(env, insn, opsize, dest, &addr);
1941     }
1942     tcg_temp_free(dest);
1943 }
1944 
1945 DISAS_INSN(sats)
1946 {
1947     TCGv reg;
1948     reg = DREG(insn, 0);
1949     gen_flush_flags(s);
1950     gen_helper_sats(reg, reg, QREG_CC_V);
1951     gen_logic_cc(s, reg, OS_LONG);
1952 }
1953 
1954 static void gen_push(DisasContext *s, TCGv val)
1955 {
1956     TCGv tmp;
1957 
1958     tmp = tcg_temp_new();
1959     tcg_gen_subi_i32(tmp, QREG_SP, 4);
1960     gen_store(s, OS_LONG, tmp, val, IS_USER(s));
1961     tcg_gen_mov_i32(QREG_SP, tmp);
1962     tcg_temp_free(tmp);
1963 }
1964 
1965 static TCGv mreg(int reg)
1966 {
1967     if (reg < 8) {
1968         /* Dx */
1969         return cpu_dregs[reg];
1970     }
1971     /* Ax */
1972     return cpu_aregs[reg & 7];
1973 }
1974 
1975 DISAS_INSN(movem)
1976 {
1977     TCGv addr, incr, tmp, r[16];
1978     int is_load = (insn & 0x0400) != 0;
1979     int opsize = (insn & 0x40) != 0 ? OS_LONG : OS_WORD;
1980     uint16_t mask = read_im16(env, s);
1981     int mode = extract32(insn, 3, 3);
1982     int reg0 = REG(insn, 0);
1983     int i;
1984 
1985     tmp = cpu_aregs[reg0];
1986 
1987     switch (mode) {
1988     case 0: /* data register direct */
1989     case 1: /* addr register direct */
1990     do_addr_fault:
1991         gen_addr_fault(s);
1992         return;
1993 
1994     case 2: /* indirect */
1995         break;
1996 
1997     case 3: /* indirect post-increment */
1998         if (!is_load) {
1999             /* post-increment is not allowed */
2000             goto do_addr_fault;
2001         }
2002         break;
2003 
2004     case 4: /* indirect pre-decrement */
2005         if (is_load) {
2006             /* pre-decrement is not allowed */
2007             goto do_addr_fault;
2008         }
2009         /* We want a bare copy of the address reg, without any pre-decrement
2010            adjustment, as gen_lea would provide.  */
2011         break;
2012 
2013     default:
2014         tmp = gen_lea_mode(env, s, mode, reg0, opsize);
2015         if (IS_NULL_QREG(tmp)) {
2016             goto do_addr_fault;
2017         }
2018         break;
2019     }
2020 
2021     addr = tcg_temp_new();
2022     tcg_gen_mov_i32(addr, tmp);
2023     incr = tcg_const_i32(opsize_bytes(opsize));
2024 
2025     if (is_load) {
2026         /* memory to register */
2027         for (i = 0; i < 16; i++) {
2028             if (mask & (1 << i)) {
2029                 r[i] = gen_load(s, opsize, addr, 1, IS_USER(s));
2030                 tcg_gen_add_i32(addr, addr, incr);
2031             }
2032         }
2033         for (i = 0; i < 16; i++) {
2034             if (mask & (1 << i)) {
2035                 tcg_gen_mov_i32(mreg(i), r[i]);
2036                 tcg_temp_free(r[i]);
2037             }
2038         }
2039         if (mode == 3) {
2040             /* post-increment: movem (An)+,X */
2041             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2042         }
2043     } else {
2044         /* register to memory */
2045         if (mode == 4) {
2046             /* pre-decrement: movem X,-(An) */
2047             for (i = 15; i >= 0; i--) {
2048                 if ((mask << i) & 0x8000) {
2049                     tcg_gen_sub_i32(addr, addr, incr);
2050                     if (reg0 + 8 == i &&
2051                         m68k_feature(s->env, M68K_FEATURE_EXT_FULL)) {
2052                         /* M68020+: if the addressing register is the
2053                          * register moved to memory, the value written
2054                          * is the initial value decremented by the size of
2055                          * the operation, regardless of how many actual
2056                          * stores have been performed until this point.
2057                          * M68000/M68010: the value is the initial value.
2058                          */
2059                         tmp = tcg_temp_new();
2060                         tcg_gen_sub_i32(tmp, cpu_aregs[reg0], incr);
2061                         gen_store(s, opsize, addr, tmp, IS_USER(s));
2062                         tcg_temp_free(tmp);
2063                     } else {
2064                         gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2065                     }
2066                 }
2067             }
2068             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2069         } else {
2070             for (i = 0; i < 16; i++) {
2071                 if (mask & (1 << i)) {
2072                     gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2073                     tcg_gen_add_i32(addr, addr, incr);
2074                 }
2075             }
2076         }
2077     }
2078 
2079     tcg_temp_free(incr);
2080     tcg_temp_free(addr);
2081 }
2082 
2083 DISAS_INSN(movep)
2084 {
2085     uint8_t i;
2086     int16_t displ;
2087     TCGv reg;
2088     TCGv addr;
2089     TCGv abuf;
2090     TCGv dbuf;
2091 
2092     displ = read_im16(env, s);
2093 
2094     addr = AREG(insn, 0);
2095     reg = DREG(insn, 9);
2096 
2097     abuf = tcg_temp_new();
2098     tcg_gen_addi_i32(abuf, addr, displ);
2099     dbuf = tcg_temp_new();
2100 
2101     if (insn & 0x40) {
2102         i = 4;
2103     } else {
2104         i = 2;
2105     }
2106 
2107     if (insn & 0x80) {
2108         for ( ; i > 0 ; i--) {
2109             tcg_gen_shri_i32(dbuf, reg, (i - 1) * 8);
2110             tcg_gen_qemu_st8(dbuf, abuf, IS_USER(s));
2111             if (i > 1) {
2112                 tcg_gen_addi_i32(abuf, abuf, 2);
2113             }
2114         }
2115     } else {
2116         for ( ; i > 0 ; i--) {
2117             tcg_gen_qemu_ld8u(dbuf, abuf, IS_USER(s));
2118             tcg_gen_deposit_i32(reg, reg, dbuf, (i - 1) * 8, 8);
2119             if (i > 1) {
2120                 tcg_gen_addi_i32(abuf, abuf, 2);
2121             }
2122         }
2123     }
2124     tcg_temp_free(abuf);
2125     tcg_temp_free(dbuf);
2126 }
2127 
2128 DISAS_INSN(bitop_im)
2129 {
2130     int opsize;
2131     int op;
2132     TCGv src1;
2133     uint32_t mask;
2134     int bitnum;
2135     TCGv tmp;
2136     TCGv addr;
2137 
2138     if ((insn & 0x38) != 0)
2139         opsize = OS_BYTE;
2140     else
2141         opsize = OS_LONG;
2142     op = (insn >> 6) & 3;
2143 
2144     bitnum = read_im16(env, s);
2145     if (m68k_feature(s->env, M68K_FEATURE_M68000)) {
2146         if (bitnum & 0xfe00) {
2147             disas_undef(env, s, insn);
2148             return;
2149         }
2150     } else {
2151         if (bitnum & 0xff00) {
2152             disas_undef(env, s, insn);
2153             return;
2154         }
2155     }
2156 
2157     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
2158 
2159     gen_flush_flags(s);
2160     if (opsize == OS_BYTE)
2161         bitnum &= 7;
2162     else
2163         bitnum &= 31;
2164     mask = 1 << bitnum;
2165 
2166    tcg_gen_andi_i32(QREG_CC_Z, src1, mask);
2167 
2168     if (op) {
2169         tmp = tcg_temp_new();
2170         switch (op) {
2171         case 1: /* bchg */
2172             tcg_gen_xori_i32(tmp, src1, mask);
2173             break;
2174         case 2: /* bclr */
2175             tcg_gen_andi_i32(tmp, src1, ~mask);
2176             break;
2177         case 3: /* bset */
2178             tcg_gen_ori_i32(tmp, src1, mask);
2179             break;
2180         default: /* btst */
2181             break;
2182         }
2183         DEST_EA(env, insn, opsize, tmp, &addr);
2184         tcg_temp_free(tmp);
2185     }
2186 }
2187 
2188 static TCGv gen_get_ccr(DisasContext *s)
2189 {
2190     TCGv dest;
2191 
2192     update_cc_op(s);
2193     dest = tcg_temp_new();
2194     gen_helper_get_ccr(dest, cpu_env);
2195     return dest;
2196 }
2197 
2198 static TCGv gen_get_sr(DisasContext *s)
2199 {
2200     TCGv ccr;
2201     TCGv sr;
2202 
2203     ccr = gen_get_ccr(s);
2204     sr = tcg_temp_new();
2205     tcg_gen_andi_i32(sr, QREG_SR, 0xffe0);
2206     tcg_gen_or_i32(sr, sr, ccr);
2207     return sr;
2208 }
2209 
2210 static void gen_set_sr_im(DisasContext *s, uint16_t val, int ccr_only)
2211 {
2212     if (ccr_only) {
2213         tcg_gen_movi_i32(QREG_CC_C, val & CCF_C ? 1 : 0);
2214         tcg_gen_movi_i32(QREG_CC_V, val & CCF_V ? -1 : 0);
2215         tcg_gen_movi_i32(QREG_CC_Z, val & CCF_Z ? 0 : 1);
2216         tcg_gen_movi_i32(QREG_CC_N, val & CCF_N ? -1 : 0);
2217         tcg_gen_movi_i32(QREG_CC_X, val & CCF_X ? 1 : 0);
2218     } else {
2219         TCGv sr = tcg_const_i32(val);
2220         gen_helper_set_sr(cpu_env, sr);
2221         tcg_temp_free(sr);
2222     }
2223     set_cc_op(s, CC_OP_FLAGS);
2224 }
2225 
2226 static void gen_set_sr(DisasContext *s, TCGv val, int ccr_only)
2227 {
2228     if (ccr_only) {
2229         gen_helper_set_ccr(cpu_env, val);
2230     } else {
2231         gen_helper_set_sr(cpu_env, val);
2232     }
2233     set_cc_op(s, CC_OP_FLAGS);
2234 }
2235 
2236 static void gen_move_to_sr(CPUM68KState *env, DisasContext *s, uint16_t insn,
2237                            bool ccr_only)
2238 {
2239     if ((insn & 0x3f) == 0x3c) {
2240         uint16_t val;
2241         val = read_im16(env, s);
2242         gen_set_sr_im(s, val, ccr_only);
2243     } else {
2244         TCGv src;
2245         SRC_EA(env, src, OS_WORD, 0, NULL);
2246         gen_set_sr(s, src, ccr_only);
2247     }
2248 }
2249 
2250 DISAS_INSN(arith_im)
2251 {
2252     int op;
2253     TCGv im;
2254     TCGv src1;
2255     TCGv dest;
2256     TCGv addr;
2257     int opsize;
2258     bool with_SR = ((insn & 0x3f) == 0x3c);
2259 
2260     op = (insn >> 9) & 7;
2261     opsize = insn_opsize(insn);
2262     switch (opsize) {
2263     case OS_BYTE:
2264         im = tcg_const_i32((int8_t)read_im8(env, s));
2265         break;
2266     case OS_WORD:
2267         im = tcg_const_i32((int16_t)read_im16(env, s));
2268         break;
2269     case OS_LONG:
2270         im = tcg_const_i32(read_im32(env, s));
2271         break;
2272     default:
2273        abort();
2274     }
2275 
2276     if (with_SR) {
2277         /* SR/CCR can only be used with andi/eori/ori */
2278         if (op == 2 || op == 3 || op == 6) {
2279             disas_undef(env, s, insn);
2280             return;
2281         }
2282         switch (opsize) {
2283         case OS_BYTE:
2284             src1 = gen_get_ccr(s);
2285             break;
2286         case OS_WORD:
2287             if (IS_USER(s)) {
2288                 gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
2289                 return;
2290             }
2291             src1 = gen_get_sr(s);
2292             break;
2293         case OS_LONG:
2294             disas_undef(env, s, insn);
2295             return;
2296         }
2297     } else {
2298         SRC_EA(env, src1, opsize, 1, (op == 6) ? NULL : &addr);
2299     }
2300     dest = tcg_temp_new();
2301     switch (op) {
2302     case 0: /* ori */
2303         tcg_gen_or_i32(dest, src1, im);
2304         if (with_SR) {
2305             gen_set_sr(s, dest, opsize == OS_BYTE);
2306         } else {
2307             DEST_EA(env, insn, opsize, dest, &addr);
2308             gen_logic_cc(s, dest, opsize);
2309         }
2310         break;
2311     case 1: /* andi */
2312         tcg_gen_and_i32(dest, src1, im);
2313         if (with_SR) {
2314             gen_set_sr(s, dest, opsize == OS_BYTE);
2315         } else {
2316             DEST_EA(env, insn, opsize, dest, &addr);
2317             gen_logic_cc(s, dest, opsize);
2318         }
2319         break;
2320     case 2: /* subi */
2321         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, src1, im);
2322         tcg_gen_sub_i32(dest, src1, im);
2323         gen_update_cc_add(dest, im, opsize);
2324         set_cc_op(s, CC_OP_SUBB + opsize);
2325         DEST_EA(env, insn, opsize, dest, &addr);
2326         break;
2327     case 3: /* addi */
2328         tcg_gen_add_i32(dest, src1, im);
2329         gen_update_cc_add(dest, im, opsize);
2330         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, im);
2331         set_cc_op(s, CC_OP_ADDB + opsize);
2332         DEST_EA(env, insn, opsize, dest, &addr);
2333         break;
2334     case 5: /* eori */
2335         tcg_gen_xor_i32(dest, src1, im);
2336         if (with_SR) {
2337             gen_set_sr(s, dest, opsize == OS_BYTE);
2338         } else {
2339             DEST_EA(env, insn, opsize, dest, &addr);
2340             gen_logic_cc(s, dest, opsize);
2341         }
2342         break;
2343     case 6: /* cmpi */
2344         gen_update_cc_cmp(s, src1, im, opsize);
2345         break;
2346     default:
2347         abort();
2348     }
2349     tcg_temp_free(im);
2350     tcg_temp_free(dest);
2351 }
2352 
2353 DISAS_INSN(cas)
2354 {
2355     int opsize;
2356     TCGv addr;
2357     uint16_t ext;
2358     TCGv load;
2359     TCGv cmp;
2360     TCGMemOp opc;
2361 
2362     switch ((insn >> 9) & 3) {
2363     case 1:
2364         opsize = OS_BYTE;
2365         opc = MO_SB;
2366         break;
2367     case 2:
2368         opsize = OS_WORD;
2369         opc = MO_TESW;
2370         break;
2371     case 3:
2372         opsize = OS_LONG;
2373         opc = MO_TESL;
2374         break;
2375     default:
2376         g_assert_not_reached();
2377     }
2378 
2379     ext = read_im16(env, s);
2380 
2381     /* cas Dc,Du,<EA> */
2382 
2383     addr = gen_lea(env, s, insn, opsize);
2384     if (IS_NULL_QREG(addr)) {
2385         gen_addr_fault(s);
2386         return;
2387     }
2388 
2389     cmp = gen_extend(DREG(ext, 0), opsize, 1);
2390 
2391     /* if  <EA> == Dc then
2392      *     <EA> = Du
2393      *     Dc = <EA> (because <EA> == Dc)
2394      * else
2395      *     Dc = <EA>
2396      */
2397 
2398     load = tcg_temp_new();
2399     tcg_gen_atomic_cmpxchg_i32(load, addr, cmp, DREG(ext, 6),
2400                                IS_USER(s), opc);
2401     /* update flags before setting cmp to load */
2402     gen_update_cc_cmp(s, load, cmp, opsize);
2403     gen_partset_reg(opsize, DREG(ext, 0), load);
2404 
2405     tcg_temp_free(load);
2406 
2407     switch (extract32(insn, 3, 3)) {
2408     case 3: /* Indirect postincrement.  */
2409         tcg_gen_addi_i32(AREG(insn, 0), addr, opsize_bytes(opsize));
2410         break;
2411     case 4: /* Indirect predecrememnt.  */
2412         tcg_gen_mov_i32(AREG(insn, 0), addr);
2413         break;
2414     }
2415 }
2416 
2417 DISAS_INSN(cas2w)
2418 {
2419     uint16_t ext1, ext2;
2420     TCGv addr1, addr2;
2421     TCGv regs;
2422 
2423     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2424 
2425     ext1 = read_im16(env, s);
2426 
2427     if (ext1 & 0x8000) {
2428         /* Address Register */
2429         addr1 = AREG(ext1, 12);
2430     } else {
2431         /* Data Register */
2432         addr1 = DREG(ext1, 12);
2433     }
2434 
2435     ext2 = read_im16(env, s);
2436     if (ext2 & 0x8000) {
2437         /* Address Register */
2438         addr2 = AREG(ext2, 12);
2439     } else {
2440         /* Data Register */
2441         addr2 = DREG(ext2, 12);
2442     }
2443 
2444     /* if (R1) == Dc1 && (R2) == Dc2 then
2445      *     (R1) = Du1
2446      *     (R2) = Du2
2447      * else
2448      *     Dc1 = (R1)
2449      *     Dc2 = (R2)
2450      */
2451 
2452     regs = tcg_const_i32(REG(ext2, 6) |
2453                          (REG(ext1, 6) << 3) |
2454                          (REG(ext2, 0) << 6) |
2455                          (REG(ext1, 0) << 9));
2456     if (tb_cflags(s->tb) & CF_PARALLEL) {
2457         gen_helper_exit_atomic(cpu_env);
2458     } else {
2459         gen_helper_cas2w(cpu_env, regs, addr1, addr2);
2460     }
2461     tcg_temp_free(regs);
2462 
2463     /* Note that cas2w also assigned to env->cc_op.  */
2464     s->cc_op = CC_OP_CMPW;
2465     s->cc_op_synced = 1;
2466 }
2467 
2468 DISAS_INSN(cas2l)
2469 {
2470     uint16_t ext1, ext2;
2471     TCGv addr1, addr2, regs;
2472 
2473     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2474 
2475     ext1 = read_im16(env, s);
2476 
2477     if (ext1 & 0x8000) {
2478         /* Address Register */
2479         addr1 = AREG(ext1, 12);
2480     } else {
2481         /* Data Register */
2482         addr1 = DREG(ext1, 12);
2483     }
2484 
2485     ext2 = read_im16(env, s);
2486     if (ext2 & 0x8000) {
2487         /* Address Register */
2488         addr2 = AREG(ext2, 12);
2489     } else {
2490         /* Data Register */
2491         addr2 = DREG(ext2, 12);
2492     }
2493 
2494     /* if (R1) == Dc1 && (R2) == Dc2 then
2495      *     (R1) = Du1
2496      *     (R2) = Du2
2497      * else
2498      *     Dc1 = (R1)
2499      *     Dc2 = (R2)
2500      */
2501 
2502     regs = tcg_const_i32(REG(ext2, 6) |
2503                          (REG(ext1, 6) << 3) |
2504                          (REG(ext2, 0) << 6) |
2505                          (REG(ext1, 0) << 9));
2506     if (tb_cflags(s->tb) & CF_PARALLEL) {
2507         gen_helper_cas2l_parallel(cpu_env, regs, addr1, addr2);
2508     } else {
2509         gen_helper_cas2l(cpu_env, regs, addr1, addr2);
2510     }
2511     tcg_temp_free(regs);
2512 
2513     /* Note that cas2l also assigned to env->cc_op.  */
2514     s->cc_op = CC_OP_CMPL;
2515     s->cc_op_synced = 1;
2516 }
2517 
2518 DISAS_INSN(byterev)
2519 {
2520     TCGv reg;
2521 
2522     reg = DREG(insn, 0);
2523     tcg_gen_bswap32_i32(reg, reg);
2524 }
2525 
2526 DISAS_INSN(move)
2527 {
2528     TCGv src;
2529     TCGv dest;
2530     int op;
2531     int opsize;
2532 
2533     switch (insn >> 12) {
2534     case 1: /* move.b */
2535         opsize = OS_BYTE;
2536         break;
2537     case 2: /* move.l */
2538         opsize = OS_LONG;
2539         break;
2540     case 3: /* move.w */
2541         opsize = OS_WORD;
2542         break;
2543     default:
2544         abort();
2545     }
2546     SRC_EA(env, src, opsize, 1, NULL);
2547     op = (insn >> 6) & 7;
2548     if (op == 1) {
2549         /* movea */
2550         /* The value will already have been sign extended.  */
2551         dest = AREG(insn, 9);
2552         tcg_gen_mov_i32(dest, src);
2553     } else {
2554         /* normal move */
2555         uint16_t dest_ea;
2556         dest_ea = ((insn >> 9) & 7) | (op << 3);
2557         DEST_EA(env, dest_ea, opsize, src, NULL);
2558         /* This will be correct because loads sign extend.  */
2559         gen_logic_cc(s, src, opsize);
2560     }
2561 }
2562 
2563 DISAS_INSN(negx)
2564 {
2565     TCGv z;
2566     TCGv src;
2567     TCGv addr;
2568     int opsize;
2569 
2570     opsize = insn_opsize(insn);
2571     SRC_EA(env, src, opsize, 1, &addr);
2572 
2573     gen_flush_flags(s); /* compute old Z */
2574 
2575     /* Perform substract with borrow.
2576      * (X, N) =  -(src + X);
2577      */
2578 
2579     z = tcg_const_i32(0);
2580     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, z, QREG_CC_X, z);
2581     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, z, z, QREG_CC_N, QREG_CC_X);
2582     tcg_temp_free(z);
2583     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
2584 
2585     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
2586 
2587     /* Compute signed-overflow for negation.  The normal formula for
2588      * subtraction is (res ^ src) & (src ^ dest), but with dest==0
2589      * this simplies to res & src.
2590      */
2591 
2592     tcg_gen_and_i32(QREG_CC_V, QREG_CC_N, src);
2593 
2594     /* Copy the rest of the results into place.  */
2595     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
2596     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
2597 
2598     set_cc_op(s, CC_OP_FLAGS);
2599 
2600     /* result is in QREG_CC_N */
2601 
2602     DEST_EA(env, insn, opsize, QREG_CC_N, &addr);
2603 }
2604 
2605 DISAS_INSN(lea)
2606 {
2607     TCGv reg;
2608     TCGv tmp;
2609 
2610     reg = AREG(insn, 9);
2611     tmp = gen_lea(env, s, insn, OS_LONG);
2612     if (IS_NULL_QREG(tmp)) {
2613         gen_addr_fault(s);
2614         return;
2615     }
2616     tcg_gen_mov_i32(reg, tmp);
2617 }
2618 
2619 DISAS_INSN(clr)
2620 {
2621     int opsize;
2622     TCGv zero;
2623 
2624     zero = tcg_const_i32(0);
2625 
2626     opsize = insn_opsize(insn);
2627     DEST_EA(env, insn, opsize, zero, NULL);
2628     gen_logic_cc(s, zero, opsize);
2629     tcg_temp_free(zero);
2630 }
2631 
2632 DISAS_INSN(move_from_ccr)
2633 {
2634     TCGv ccr;
2635 
2636     ccr = gen_get_ccr(s);
2637     DEST_EA(env, insn, OS_WORD, ccr, NULL);
2638 }
2639 
2640 DISAS_INSN(neg)
2641 {
2642     TCGv src1;
2643     TCGv dest;
2644     TCGv addr;
2645     int opsize;
2646 
2647     opsize = insn_opsize(insn);
2648     SRC_EA(env, src1, opsize, 1, &addr);
2649     dest = tcg_temp_new();
2650     tcg_gen_neg_i32(dest, src1);
2651     set_cc_op(s, CC_OP_SUBB + opsize);
2652     gen_update_cc_add(dest, src1, opsize);
2653     tcg_gen_setcondi_i32(TCG_COND_NE, QREG_CC_X, dest, 0);
2654     DEST_EA(env, insn, opsize, dest, &addr);
2655     tcg_temp_free(dest);
2656 }
2657 
2658 DISAS_INSN(move_to_ccr)
2659 {
2660     gen_move_to_sr(env, s, insn, true);
2661 }
2662 
2663 DISAS_INSN(not)
2664 {
2665     TCGv src1;
2666     TCGv dest;
2667     TCGv addr;
2668     int opsize;
2669 
2670     opsize = insn_opsize(insn);
2671     SRC_EA(env, src1, opsize, 1, &addr);
2672     dest = tcg_temp_new();
2673     tcg_gen_not_i32(dest, src1);
2674     DEST_EA(env, insn, opsize, dest, &addr);
2675     gen_logic_cc(s, dest, opsize);
2676 }
2677 
2678 DISAS_INSN(swap)
2679 {
2680     TCGv src1;
2681     TCGv src2;
2682     TCGv reg;
2683 
2684     src1 = tcg_temp_new();
2685     src2 = tcg_temp_new();
2686     reg = DREG(insn, 0);
2687     tcg_gen_shli_i32(src1, reg, 16);
2688     tcg_gen_shri_i32(src2, reg, 16);
2689     tcg_gen_or_i32(reg, src1, src2);
2690     tcg_temp_free(src2);
2691     tcg_temp_free(src1);
2692     gen_logic_cc(s, reg, OS_LONG);
2693 }
2694 
2695 DISAS_INSN(bkpt)
2696 {
2697     gen_exception(s, s->insn_pc, EXCP_DEBUG);
2698 }
2699 
2700 DISAS_INSN(pea)
2701 {
2702     TCGv tmp;
2703 
2704     tmp = gen_lea(env, s, insn, OS_LONG);
2705     if (IS_NULL_QREG(tmp)) {
2706         gen_addr_fault(s);
2707         return;
2708     }
2709     gen_push(s, tmp);
2710 }
2711 
2712 DISAS_INSN(ext)
2713 {
2714     int op;
2715     TCGv reg;
2716     TCGv tmp;
2717 
2718     reg = DREG(insn, 0);
2719     op = (insn >> 6) & 7;
2720     tmp = tcg_temp_new();
2721     if (op == 3)
2722         tcg_gen_ext16s_i32(tmp, reg);
2723     else
2724         tcg_gen_ext8s_i32(tmp, reg);
2725     if (op == 2)
2726         gen_partset_reg(OS_WORD, reg, tmp);
2727     else
2728         tcg_gen_mov_i32(reg, tmp);
2729     gen_logic_cc(s, tmp, OS_LONG);
2730     tcg_temp_free(tmp);
2731 }
2732 
2733 DISAS_INSN(tst)
2734 {
2735     int opsize;
2736     TCGv tmp;
2737 
2738     opsize = insn_opsize(insn);
2739     SRC_EA(env, tmp, opsize, 1, NULL);
2740     gen_logic_cc(s, tmp, opsize);
2741 }
2742 
2743 DISAS_INSN(pulse)
2744 {
2745   /* Implemented as a NOP.  */
2746 }
2747 
2748 DISAS_INSN(illegal)
2749 {
2750     gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
2751 }
2752 
2753 /* ??? This should be atomic.  */
2754 DISAS_INSN(tas)
2755 {
2756     TCGv dest;
2757     TCGv src1;
2758     TCGv addr;
2759 
2760     dest = tcg_temp_new();
2761     SRC_EA(env, src1, OS_BYTE, 1, &addr);
2762     gen_logic_cc(s, src1, OS_BYTE);
2763     tcg_gen_ori_i32(dest, src1, 0x80);
2764     DEST_EA(env, insn, OS_BYTE, dest, &addr);
2765     tcg_temp_free(dest);
2766 }
2767 
2768 DISAS_INSN(mull)
2769 {
2770     uint16_t ext;
2771     TCGv src1;
2772     int sign;
2773 
2774     ext = read_im16(env, s);
2775 
2776     sign = ext & 0x800;
2777 
2778     if (ext & 0x400) {
2779         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
2780             gen_exception(s, s->insn_pc, EXCP_UNSUPPORTED);
2781             return;
2782         }
2783 
2784         SRC_EA(env, src1, OS_LONG, 0, NULL);
2785 
2786         if (sign) {
2787             tcg_gen_muls2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2788         } else {
2789             tcg_gen_mulu2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2790         }
2791         /* if Dl == Dh, 68040 returns low word */
2792         tcg_gen_mov_i32(DREG(ext, 0), QREG_CC_N);
2793         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_Z);
2794         tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N);
2795 
2796         tcg_gen_movi_i32(QREG_CC_V, 0);
2797         tcg_gen_movi_i32(QREG_CC_C, 0);
2798 
2799         set_cc_op(s, CC_OP_FLAGS);
2800         return;
2801     }
2802     SRC_EA(env, src1, OS_LONG, 0, NULL);
2803     if (m68k_feature(s->env, M68K_FEATURE_M68000)) {
2804         tcg_gen_movi_i32(QREG_CC_C, 0);
2805         if (sign) {
2806             tcg_gen_muls2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2807             /* QREG_CC_V is -(QREG_CC_V != (QREG_CC_N >> 31)) */
2808             tcg_gen_sari_i32(QREG_CC_Z, QREG_CC_N, 31);
2809             tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_Z);
2810         } else {
2811             tcg_gen_mulu2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2812             /* QREG_CC_V is -(QREG_CC_V != 0), use QREG_CC_C as 0 */
2813             tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_C);
2814         }
2815         tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
2816         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_N);
2817 
2818         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
2819 
2820         set_cc_op(s, CC_OP_FLAGS);
2821     } else {
2822         /* The upper 32 bits of the product are discarded, so
2823            muls.l and mulu.l are functionally equivalent.  */
2824         tcg_gen_mul_i32(DREG(ext, 12), src1, DREG(ext, 12));
2825         gen_logic_cc(s, DREG(ext, 12), OS_LONG);
2826     }
2827 }
2828 
2829 static void gen_link(DisasContext *s, uint16_t insn, int32_t offset)
2830 {
2831     TCGv reg;
2832     TCGv tmp;
2833 
2834     reg = AREG(insn, 0);
2835     tmp = tcg_temp_new();
2836     tcg_gen_subi_i32(tmp, QREG_SP, 4);
2837     gen_store(s, OS_LONG, tmp, reg, IS_USER(s));
2838     if ((insn & 7) != 7) {
2839         tcg_gen_mov_i32(reg, tmp);
2840     }
2841     tcg_gen_addi_i32(QREG_SP, tmp, offset);
2842     tcg_temp_free(tmp);
2843 }
2844 
2845 DISAS_INSN(link)
2846 {
2847     int16_t offset;
2848 
2849     offset = read_im16(env, s);
2850     gen_link(s, insn, offset);
2851 }
2852 
2853 DISAS_INSN(linkl)
2854 {
2855     int32_t offset;
2856 
2857     offset = read_im32(env, s);
2858     gen_link(s, insn, offset);
2859 }
2860 
2861 DISAS_INSN(unlk)
2862 {
2863     TCGv src;
2864     TCGv reg;
2865     TCGv tmp;
2866 
2867     src = tcg_temp_new();
2868     reg = AREG(insn, 0);
2869     tcg_gen_mov_i32(src, reg);
2870     tmp = gen_load(s, OS_LONG, src, 0, IS_USER(s));
2871     tcg_gen_mov_i32(reg, tmp);
2872     tcg_gen_addi_i32(QREG_SP, src, 4);
2873     tcg_temp_free(src);
2874 }
2875 
2876 #if defined(CONFIG_SOFTMMU)
2877 DISAS_INSN(reset)
2878 {
2879     if (IS_USER(s)) {
2880         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
2881         return;
2882     }
2883 
2884     gen_helper_reset(cpu_env);
2885 }
2886 #endif
2887 
2888 DISAS_INSN(nop)
2889 {
2890 }
2891 
2892 DISAS_INSN(rtd)
2893 {
2894     TCGv tmp;
2895     int16_t offset = read_im16(env, s);
2896 
2897     tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
2898     tcg_gen_addi_i32(QREG_SP, QREG_SP, offset + 4);
2899     gen_jmp(s, tmp);
2900 }
2901 
2902 DISAS_INSN(rts)
2903 {
2904     TCGv tmp;
2905 
2906     tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
2907     tcg_gen_addi_i32(QREG_SP, QREG_SP, 4);
2908     gen_jmp(s, tmp);
2909 }
2910 
2911 DISAS_INSN(jump)
2912 {
2913     TCGv tmp;
2914 
2915     /* Load the target address first to ensure correct exception
2916        behavior.  */
2917     tmp = gen_lea(env, s, insn, OS_LONG);
2918     if (IS_NULL_QREG(tmp)) {
2919         gen_addr_fault(s);
2920         return;
2921     }
2922     if ((insn & 0x40) == 0) {
2923         /* jsr */
2924         gen_push(s, tcg_const_i32(s->pc));
2925     }
2926     gen_jmp(s, tmp);
2927 }
2928 
2929 DISAS_INSN(addsubq)
2930 {
2931     TCGv src;
2932     TCGv dest;
2933     TCGv val;
2934     int imm;
2935     TCGv addr;
2936     int opsize;
2937 
2938     if ((insn & 070) == 010) {
2939         /* Operation on address register is always long.  */
2940         opsize = OS_LONG;
2941     } else {
2942         opsize = insn_opsize(insn);
2943     }
2944     SRC_EA(env, src, opsize, 1, &addr);
2945     imm = (insn >> 9) & 7;
2946     if (imm == 0) {
2947         imm = 8;
2948     }
2949     val = tcg_const_i32(imm);
2950     dest = tcg_temp_new();
2951     tcg_gen_mov_i32(dest, src);
2952     if ((insn & 0x38) == 0x08) {
2953         /* Don't update condition codes if the destination is an
2954            address register.  */
2955         if (insn & 0x0100) {
2956             tcg_gen_sub_i32(dest, dest, val);
2957         } else {
2958             tcg_gen_add_i32(dest, dest, val);
2959         }
2960     } else {
2961         if (insn & 0x0100) {
2962             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
2963             tcg_gen_sub_i32(dest, dest, val);
2964             set_cc_op(s, CC_OP_SUBB + opsize);
2965         } else {
2966             tcg_gen_add_i32(dest, dest, val);
2967             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
2968             set_cc_op(s, CC_OP_ADDB + opsize);
2969         }
2970         gen_update_cc_add(dest, val, opsize);
2971     }
2972     tcg_temp_free(val);
2973     DEST_EA(env, insn, opsize, dest, &addr);
2974     tcg_temp_free(dest);
2975 }
2976 
2977 DISAS_INSN(tpf)
2978 {
2979     switch (insn & 7) {
2980     case 2: /* One extension word.  */
2981         s->pc += 2;
2982         break;
2983     case 3: /* Two extension words.  */
2984         s->pc += 4;
2985         break;
2986     case 4: /* No extension words.  */
2987         break;
2988     default:
2989         disas_undef(env, s, insn);
2990     }
2991 }
2992 
2993 DISAS_INSN(branch)
2994 {
2995     int32_t offset;
2996     uint32_t base;
2997     int op;
2998     TCGLabel *l1;
2999 
3000     base = s->pc;
3001     op = (insn >> 8) & 0xf;
3002     offset = (int8_t)insn;
3003     if (offset == 0) {
3004         offset = (int16_t)read_im16(env, s);
3005     } else if (offset == -1) {
3006         offset = read_im32(env, s);
3007     }
3008     if (op == 1) {
3009         /* bsr */
3010         gen_push(s, tcg_const_i32(s->pc));
3011     }
3012     if (op > 1) {
3013         /* Bcc */
3014         l1 = gen_new_label();
3015         gen_jmpcc(s, ((insn >> 8) & 0xf) ^ 1, l1);
3016         gen_jmp_tb(s, 1, base + offset);
3017         gen_set_label(l1);
3018         gen_jmp_tb(s, 0, s->pc);
3019     } else {
3020         /* Unconditional branch.  */
3021         update_cc_op(s);
3022         gen_jmp_tb(s, 0, base + offset);
3023     }
3024 }
3025 
3026 DISAS_INSN(moveq)
3027 {
3028     tcg_gen_movi_i32(DREG(insn, 9), (int8_t)insn);
3029     gen_logic_cc(s, DREG(insn, 9), OS_LONG);
3030 }
3031 
3032 DISAS_INSN(mvzs)
3033 {
3034     int opsize;
3035     TCGv src;
3036     TCGv reg;
3037 
3038     if (insn & 0x40)
3039         opsize = OS_WORD;
3040     else
3041         opsize = OS_BYTE;
3042     SRC_EA(env, src, opsize, (insn & 0x80) == 0, NULL);
3043     reg = DREG(insn, 9);
3044     tcg_gen_mov_i32(reg, src);
3045     gen_logic_cc(s, src, opsize);
3046 }
3047 
3048 DISAS_INSN(or)
3049 {
3050     TCGv reg;
3051     TCGv dest;
3052     TCGv src;
3053     TCGv addr;
3054     int opsize;
3055 
3056     opsize = insn_opsize(insn);
3057     reg = gen_extend(DREG(insn, 9), opsize, 0);
3058     dest = tcg_temp_new();
3059     if (insn & 0x100) {
3060         SRC_EA(env, src, opsize, 0, &addr);
3061         tcg_gen_or_i32(dest, src, reg);
3062         DEST_EA(env, insn, opsize, dest, &addr);
3063     } else {
3064         SRC_EA(env, src, opsize, 0, NULL);
3065         tcg_gen_or_i32(dest, src, reg);
3066         gen_partset_reg(opsize, DREG(insn, 9), dest);
3067     }
3068     gen_logic_cc(s, dest, opsize);
3069     tcg_temp_free(dest);
3070 }
3071 
3072 DISAS_INSN(suba)
3073 {
3074     TCGv src;
3075     TCGv reg;
3076 
3077     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3078     reg = AREG(insn, 9);
3079     tcg_gen_sub_i32(reg, reg, src);
3080 }
3081 
3082 static inline void gen_subx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3083 {
3084     TCGv tmp;
3085 
3086     gen_flush_flags(s); /* compute old Z */
3087 
3088     /* Perform substract with borrow.
3089      * (X, N) = dest - (src + X);
3090      */
3091 
3092     tmp = tcg_const_i32(0);
3093     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, tmp, QREG_CC_X, tmp);
3094     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, dest, tmp, QREG_CC_N, QREG_CC_X);
3095     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3096     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
3097 
3098     /* Compute signed-overflow for substract.  */
3099 
3100     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, dest);
3101     tcg_gen_xor_i32(tmp, dest, src);
3102     tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, tmp);
3103     tcg_temp_free(tmp);
3104 
3105     /* Copy the rest of the results into place.  */
3106     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3107     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3108 
3109     set_cc_op(s, CC_OP_FLAGS);
3110 
3111     /* result is in QREG_CC_N */
3112 }
3113 
3114 DISAS_INSN(subx_reg)
3115 {
3116     TCGv dest;
3117     TCGv src;
3118     int opsize;
3119 
3120     opsize = insn_opsize(insn);
3121 
3122     src = gen_extend(DREG(insn, 0), opsize, 1);
3123     dest = gen_extend(DREG(insn, 9), opsize, 1);
3124 
3125     gen_subx(s, src, dest, opsize);
3126 
3127     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3128 }
3129 
3130 DISAS_INSN(subx_mem)
3131 {
3132     TCGv src;
3133     TCGv addr_src;
3134     TCGv dest;
3135     TCGv addr_dest;
3136     int opsize;
3137 
3138     opsize = insn_opsize(insn);
3139 
3140     addr_src = AREG(insn, 0);
3141     tcg_gen_subi_i32(addr_src, addr_src, opsize);
3142     src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3143 
3144     addr_dest = AREG(insn, 9);
3145     tcg_gen_subi_i32(addr_dest, addr_dest, opsize);
3146     dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3147 
3148     gen_subx(s, src, dest, opsize);
3149 
3150     gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3151 }
3152 
3153 DISAS_INSN(mov3q)
3154 {
3155     TCGv src;
3156     int val;
3157 
3158     val = (insn >> 9) & 7;
3159     if (val == 0)
3160         val = -1;
3161     src = tcg_const_i32(val);
3162     gen_logic_cc(s, src, OS_LONG);
3163     DEST_EA(env, insn, OS_LONG, src, NULL);
3164     tcg_temp_free(src);
3165 }
3166 
3167 DISAS_INSN(cmp)
3168 {
3169     TCGv src;
3170     TCGv reg;
3171     int opsize;
3172 
3173     opsize = insn_opsize(insn);
3174     SRC_EA(env, src, opsize, 1, NULL);
3175     reg = gen_extend(DREG(insn, 9), opsize, 1);
3176     gen_update_cc_cmp(s, reg, src, opsize);
3177 }
3178 
3179 DISAS_INSN(cmpa)
3180 {
3181     int opsize;
3182     TCGv src;
3183     TCGv reg;
3184 
3185     if (insn & 0x100) {
3186         opsize = OS_LONG;
3187     } else {
3188         opsize = OS_WORD;
3189     }
3190     SRC_EA(env, src, opsize, 1, NULL);
3191     reg = AREG(insn, 9);
3192     gen_update_cc_cmp(s, reg, src, OS_LONG);
3193 }
3194 
3195 DISAS_INSN(cmpm)
3196 {
3197     int opsize = insn_opsize(insn);
3198     TCGv src, dst;
3199 
3200     /* Post-increment load (mode 3) from Ay.  */
3201     src = gen_ea_mode(env, s, 3, REG(insn, 0), opsize,
3202                       NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3203     /* Post-increment load (mode 3) from Ax.  */
3204     dst = gen_ea_mode(env, s, 3, REG(insn, 9), opsize,
3205                       NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3206 
3207     gen_update_cc_cmp(s, dst, src, opsize);
3208 }
3209 
3210 DISAS_INSN(eor)
3211 {
3212     TCGv src;
3213     TCGv dest;
3214     TCGv addr;
3215     int opsize;
3216 
3217     opsize = insn_opsize(insn);
3218 
3219     SRC_EA(env, src, opsize, 0, &addr);
3220     dest = tcg_temp_new();
3221     tcg_gen_xor_i32(dest, src, DREG(insn, 9));
3222     gen_logic_cc(s, dest, opsize);
3223     DEST_EA(env, insn, opsize, dest, &addr);
3224     tcg_temp_free(dest);
3225 }
3226 
3227 static void do_exg(TCGv reg1, TCGv reg2)
3228 {
3229     TCGv temp = tcg_temp_new();
3230     tcg_gen_mov_i32(temp, reg1);
3231     tcg_gen_mov_i32(reg1, reg2);
3232     tcg_gen_mov_i32(reg2, temp);
3233     tcg_temp_free(temp);
3234 }
3235 
3236 DISAS_INSN(exg_dd)
3237 {
3238     /* exchange Dx and Dy */
3239     do_exg(DREG(insn, 9), DREG(insn, 0));
3240 }
3241 
3242 DISAS_INSN(exg_aa)
3243 {
3244     /* exchange Ax and Ay */
3245     do_exg(AREG(insn, 9), AREG(insn, 0));
3246 }
3247 
3248 DISAS_INSN(exg_da)
3249 {
3250     /* exchange Dx and Ay */
3251     do_exg(DREG(insn, 9), AREG(insn, 0));
3252 }
3253 
3254 DISAS_INSN(and)
3255 {
3256     TCGv src;
3257     TCGv reg;
3258     TCGv dest;
3259     TCGv addr;
3260     int opsize;
3261 
3262     dest = tcg_temp_new();
3263 
3264     opsize = insn_opsize(insn);
3265     reg = DREG(insn, 9);
3266     if (insn & 0x100) {
3267         SRC_EA(env, src, opsize, 0, &addr);
3268         tcg_gen_and_i32(dest, src, reg);
3269         DEST_EA(env, insn, opsize, dest, &addr);
3270     } else {
3271         SRC_EA(env, src, opsize, 0, NULL);
3272         tcg_gen_and_i32(dest, src, reg);
3273         gen_partset_reg(opsize, reg, dest);
3274     }
3275     gen_logic_cc(s, dest, opsize);
3276     tcg_temp_free(dest);
3277 }
3278 
3279 DISAS_INSN(adda)
3280 {
3281     TCGv src;
3282     TCGv reg;
3283 
3284     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3285     reg = AREG(insn, 9);
3286     tcg_gen_add_i32(reg, reg, src);
3287 }
3288 
3289 static inline void gen_addx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3290 {
3291     TCGv tmp;
3292 
3293     gen_flush_flags(s); /* compute old Z */
3294 
3295     /* Perform addition with carry.
3296      * (X, N) = src + dest + X;
3297      */
3298 
3299     tmp = tcg_const_i32(0);
3300     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_X, tmp, dest, tmp);
3301     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_N, QREG_CC_X, src, tmp);
3302     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3303 
3304     /* Compute signed-overflow for addition.  */
3305 
3306     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3307     tcg_gen_xor_i32(tmp, dest, src);
3308     tcg_gen_andc_i32(QREG_CC_V, QREG_CC_V, tmp);
3309     tcg_temp_free(tmp);
3310 
3311     /* Copy the rest of the results into place.  */
3312     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3313     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3314 
3315     set_cc_op(s, CC_OP_FLAGS);
3316 
3317     /* result is in QREG_CC_N */
3318 }
3319 
3320 DISAS_INSN(addx_reg)
3321 {
3322     TCGv dest;
3323     TCGv src;
3324     int opsize;
3325 
3326     opsize = insn_opsize(insn);
3327 
3328     dest = gen_extend(DREG(insn, 9), opsize, 1);
3329     src = gen_extend(DREG(insn, 0), opsize, 1);
3330 
3331     gen_addx(s, src, dest, opsize);
3332 
3333     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3334 }
3335 
3336 DISAS_INSN(addx_mem)
3337 {
3338     TCGv src;
3339     TCGv addr_src;
3340     TCGv dest;
3341     TCGv addr_dest;
3342     int opsize;
3343 
3344     opsize = insn_opsize(insn);
3345 
3346     addr_src = AREG(insn, 0);
3347     tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3348     src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3349 
3350     addr_dest = AREG(insn, 9);
3351     tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3352     dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3353 
3354     gen_addx(s, src, dest, opsize);
3355 
3356     gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3357 }
3358 
3359 static inline void shift_im(DisasContext *s, uint16_t insn, int opsize)
3360 {
3361     int count = (insn >> 9) & 7;
3362     int logical = insn & 8;
3363     int left = insn & 0x100;
3364     int bits = opsize_bytes(opsize) * 8;
3365     TCGv reg = gen_extend(DREG(insn, 0), opsize, !logical);
3366 
3367     if (count == 0) {
3368         count = 8;
3369     }
3370 
3371     tcg_gen_movi_i32(QREG_CC_V, 0);
3372     if (left) {
3373         tcg_gen_shri_i32(QREG_CC_C, reg, bits - count);
3374         tcg_gen_shli_i32(QREG_CC_N, reg, count);
3375 
3376         /* Note that ColdFire always clears V (done above),
3377            while M68000 sets if the most significant bit is changed at
3378            any time during the shift operation */
3379         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3380             /* if shift count >= bits, V is (reg != 0) */
3381             if (count >= bits) {
3382                 tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, reg, QREG_CC_V);
3383             } else {
3384                 TCGv t0 = tcg_temp_new();
3385                 tcg_gen_sari_i32(QREG_CC_V, reg, bits - 1);
3386                 tcg_gen_sari_i32(t0, reg, bits - count - 1);
3387                 tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, t0);
3388                 tcg_temp_free(t0);
3389             }
3390             tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3391         }
3392     } else {
3393         tcg_gen_shri_i32(QREG_CC_C, reg, count - 1);
3394         if (logical) {
3395             tcg_gen_shri_i32(QREG_CC_N, reg, count);
3396         } else {
3397             tcg_gen_sari_i32(QREG_CC_N, reg, count);
3398         }
3399     }
3400 
3401     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3402     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3403     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3404     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3405 
3406     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3407     set_cc_op(s, CC_OP_FLAGS);
3408 }
3409 
3410 static inline void shift_reg(DisasContext *s, uint16_t insn, int opsize)
3411 {
3412     int logical = insn & 8;
3413     int left = insn & 0x100;
3414     int bits = opsize_bytes(opsize) * 8;
3415     TCGv reg = gen_extend(DREG(insn, 0), opsize, !logical);
3416     TCGv s32;
3417     TCGv_i64 t64, s64;
3418 
3419     t64 = tcg_temp_new_i64();
3420     s64 = tcg_temp_new_i64();
3421     s32 = tcg_temp_new();
3422 
3423     /* Note that m68k truncates the shift count modulo 64, not 32.
3424        In addition, a 64-bit shift makes it easy to find "the last
3425        bit shifted out", for the carry flag.  */
3426     tcg_gen_andi_i32(s32, DREG(insn, 9), 63);
3427     tcg_gen_extu_i32_i64(s64, s32);
3428     tcg_gen_extu_i32_i64(t64, reg);
3429 
3430     /* Optimistically set V=0.  Also used as a zero source below.  */
3431     tcg_gen_movi_i32(QREG_CC_V, 0);
3432     if (left) {
3433         tcg_gen_shl_i64(t64, t64, s64);
3434 
3435         if (opsize == OS_LONG) {
3436             tcg_gen_extr_i64_i32(QREG_CC_N, QREG_CC_C, t64);
3437             /* Note that C=0 if shift count is 0, and we get that for free.  */
3438         } else {
3439             TCGv zero = tcg_const_i32(0);
3440             tcg_gen_extrl_i64_i32(QREG_CC_N, t64);
3441             tcg_gen_shri_i32(QREG_CC_C, QREG_CC_N, bits);
3442             tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3443                                 s32, zero, zero, QREG_CC_C);
3444             tcg_temp_free(zero);
3445         }
3446         tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3447 
3448         /* X = C, but only if the shift count was non-zero.  */
3449         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3450                             QREG_CC_C, QREG_CC_X);
3451 
3452         /* M68000 sets V if the most significant bit is changed at
3453          * any time during the shift operation.  Do this via creating
3454          * an extension of the sign bit, comparing, and discarding
3455          * the bits below the sign bit.  I.e.
3456          *     int64_t s = (intN_t)reg;
3457          *     int64_t t = (int64_t)(intN_t)reg << count;
3458          *     V = ((s ^ t) & (-1 << (bits - 1))) != 0
3459          */
3460         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3461             TCGv_i64 tt = tcg_const_i64(32);
3462             /* if shift is greater than 32, use 32 */
3463             tcg_gen_movcond_i64(TCG_COND_GT, s64, s64, tt, tt, s64);
3464             tcg_temp_free_i64(tt);
3465             /* Sign extend the input to 64 bits; re-do the shift.  */
3466             tcg_gen_ext_i32_i64(t64, reg);
3467             tcg_gen_shl_i64(s64, t64, s64);
3468             /* Clear all bits that are unchanged.  */
3469             tcg_gen_xor_i64(t64, t64, s64);
3470             /* Ignore the bits below the sign bit.  */
3471             tcg_gen_andi_i64(t64, t64, -1ULL << (bits - 1));
3472             /* If any bits remain set, we have overflow.  */
3473             tcg_gen_setcondi_i64(TCG_COND_NE, t64, t64, 0);
3474             tcg_gen_extrl_i64_i32(QREG_CC_V, t64);
3475             tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3476         }
3477     } else {
3478         tcg_gen_shli_i64(t64, t64, 32);
3479         if (logical) {
3480             tcg_gen_shr_i64(t64, t64, s64);
3481         } else {
3482             tcg_gen_sar_i64(t64, t64, s64);
3483         }
3484         tcg_gen_extr_i64_i32(QREG_CC_C, QREG_CC_N, t64);
3485 
3486         /* Note that C=0 if shift count is 0, and we get that for free.  */
3487         tcg_gen_shri_i32(QREG_CC_C, QREG_CC_C, 31);
3488 
3489         /* X = C, but only if the shift count was non-zero.  */
3490         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3491                             QREG_CC_C, QREG_CC_X);
3492     }
3493     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3494     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3495 
3496     tcg_temp_free(s32);
3497     tcg_temp_free_i64(s64);
3498     tcg_temp_free_i64(t64);
3499 
3500     /* Write back the result.  */
3501     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3502     set_cc_op(s, CC_OP_FLAGS);
3503 }
3504 
3505 DISAS_INSN(shift8_im)
3506 {
3507     shift_im(s, insn, OS_BYTE);
3508 }
3509 
3510 DISAS_INSN(shift16_im)
3511 {
3512     shift_im(s, insn, OS_WORD);
3513 }
3514 
3515 DISAS_INSN(shift_im)
3516 {
3517     shift_im(s, insn, OS_LONG);
3518 }
3519 
3520 DISAS_INSN(shift8_reg)
3521 {
3522     shift_reg(s, insn, OS_BYTE);
3523 }
3524 
3525 DISAS_INSN(shift16_reg)
3526 {
3527     shift_reg(s, insn, OS_WORD);
3528 }
3529 
3530 DISAS_INSN(shift_reg)
3531 {
3532     shift_reg(s, insn, OS_LONG);
3533 }
3534 
3535 DISAS_INSN(shift_mem)
3536 {
3537     int logical = insn & 8;
3538     int left = insn & 0x100;
3539     TCGv src;
3540     TCGv addr;
3541 
3542     SRC_EA(env, src, OS_WORD, !logical, &addr);
3543     tcg_gen_movi_i32(QREG_CC_V, 0);
3544     if (left) {
3545         tcg_gen_shri_i32(QREG_CC_C, src, 15);
3546         tcg_gen_shli_i32(QREG_CC_N, src, 1);
3547 
3548         /* Note that ColdFire always clears V,
3549            while M68000 sets if the most significant bit is changed at
3550            any time during the shift operation */
3551         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3552             src = gen_extend(src, OS_WORD, 1);
3553             tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3554         }
3555     } else {
3556         tcg_gen_mov_i32(QREG_CC_C, src);
3557         if (logical) {
3558             tcg_gen_shri_i32(QREG_CC_N, src, 1);
3559         } else {
3560             tcg_gen_sari_i32(QREG_CC_N, src, 1);
3561         }
3562     }
3563 
3564     gen_ext(QREG_CC_N, QREG_CC_N, OS_WORD, 1);
3565     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3566     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3567     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3568 
3569     DEST_EA(env, insn, OS_WORD, QREG_CC_N, &addr);
3570     set_cc_op(s, CC_OP_FLAGS);
3571 }
3572 
3573 static void rotate(TCGv reg, TCGv shift, int left, int size)
3574 {
3575     switch (size) {
3576     case 8:
3577         /* Replicate the 8-bit input so that a 32-bit rotate works.  */
3578         tcg_gen_ext8u_i32(reg, reg);
3579         tcg_gen_muli_i32(reg, reg, 0x01010101);
3580         goto do_long;
3581     case 16:
3582         /* Replicate the 16-bit input so that a 32-bit rotate works.  */
3583         tcg_gen_deposit_i32(reg, reg, reg, 16, 16);
3584         goto do_long;
3585     do_long:
3586     default:
3587         if (left) {
3588             tcg_gen_rotl_i32(reg, reg, shift);
3589         } else {
3590             tcg_gen_rotr_i32(reg, reg, shift);
3591         }
3592     }
3593 
3594     /* compute flags */
3595 
3596     switch (size) {
3597     case 8:
3598         tcg_gen_ext8s_i32(reg, reg);
3599         break;
3600     case 16:
3601         tcg_gen_ext16s_i32(reg, reg);
3602         break;
3603     default:
3604         break;
3605     }
3606 
3607     /* QREG_CC_X is not affected */
3608 
3609     tcg_gen_mov_i32(QREG_CC_N, reg);
3610     tcg_gen_mov_i32(QREG_CC_Z, reg);
3611 
3612     if (left) {
3613         tcg_gen_andi_i32(QREG_CC_C, reg, 1);
3614     } else {
3615         tcg_gen_shri_i32(QREG_CC_C, reg, 31);
3616     }
3617 
3618     tcg_gen_movi_i32(QREG_CC_V, 0); /* always cleared */
3619 }
3620 
3621 static void rotate_x_flags(TCGv reg, TCGv X, int size)
3622 {
3623     switch (size) {
3624     case 8:
3625         tcg_gen_ext8s_i32(reg, reg);
3626         break;
3627     case 16:
3628         tcg_gen_ext16s_i32(reg, reg);
3629         break;
3630     default:
3631         break;
3632     }
3633     tcg_gen_mov_i32(QREG_CC_N, reg);
3634     tcg_gen_mov_i32(QREG_CC_Z, reg);
3635     tcg_gen_mov_i32(QREG_CC_X, X);
3636     tcg_gen_mov_i32(QREG_CC_C, X);
3637     tcg_gen_movi_i32(QREG_CC_V, 0);
3638 }
3639 
3640 /* Result of rotate_x() is valid if 0 <= shift <= size */
3641 static TCGv rotate_x(TCGv reg, TCGv shift, int left, int size)
3642 {
3643     TCGv X, shl, shr, shx, sz, zero;
3644 
3645     sz = tcg_const_i32(size);
3646 
3647     shr = tcg_temp_new();
3648     shl = tcg_temp_new();
3649     shx = tcg_temp_new();
3650     if (left) {
3651         tcg_gen_mov_i32(shl, shift);      /* shl = shift */
3652         tcg_gen_movi_i32(shr, size + 1);
3653         tcg_gen_sub_i32(shr, shr, shift); /* shr = size + 1 - shift */
3654         tcg_gen_subi_i32(shx, shift, 1);  /* shx = shift - 1 */
3655         /* shx = shx < 0 ? size : shx; */
3656         zero = tcg_const_i32(0);
3657         tcg_gen_movcond_i32(TCG_COND_LT, shx, shx, zero, sz, shx);
3658         tcg_temp_free(zero);
3659     } else {
3660         tcg_gen_mov_i32(shr, shift);      /* shr = shift */
3661         tcg_gen_movi_i32(shl, size + 1);
3662         tcg_gen_sub_i32(shl, shl, shift); /* shl = size + 1 - shift */
3663         tcg_gen_sub_i32(shx, sz, shift); /* shx = size - shift */
3664     }
3665 
3666     /* reg = (reg << shl) | (reg >> shr) | (x << shx); */
3667 
3668     tcg_gen_shl_i32(shl, reg, shl);
3669     tcg_gen_shr_i32(shr, reg, shr);
3670     tcg_gen_or_i32(reg, shl, shr);
3671     tcg_temp_free(shl);
3672     tcg_temp_free(shr);
3673     tcg_gen_shl_i32(shx, QREG_CC_X, shx);
3674     tcg_gen_or_i32(reg, reg, shx);
3675     tcg_temp_free(shx);
3676 
3677     /* X = (reg >> size) & 1 */
3678 
3679     X = tcg_temp_new();
3680     tcg_gen_shr_i32(X, reg, sz);
3681     tcg_gen_andi_i32(X, X, 1);
3682     tcg_temp_free(sz);
3683 
3684     return X;
3685 }
3686 
3687 /* Result of rotate32_x() is valid if 0 <= shift < 33 */
3688 static TCGv rotate32_x(TCGv reg, TCGv shift, int left)
3689 {
3690     TCGv_i64 t0, shift64;
3691     TCGv X, lo, hi, zero;
3692 
3693     shift64 = tcg_temp_new_i64();
3694     tcg_gen_extu_i32_i64(shift64, shift);
3695 
3696     t0 = tcg_temp_new_i64();
3697 
3698     X = tcg_temp_new();
3699     lo = tcg_temp_new();
3700     hi = tcg_temp_new();
3701 
3702     if (left) {
3703         /* create [reg:X:..] */
3704 
3705         tcg_gen_shli_i32(lo, QREG_CC_X, 31);
3706         tcg_gen_concat_i32_i64(t0, lo, reg);
3707 
3708         /* rotate */
3709 
3710         tcg_gen_rotl_i64(t0, t0, shift64);
3711         tcg_temp_free_i64(shift64);
3712 
3713         /* result is [reg:..:reg:X] */
3714 
3715         tcg_gen_extr_i64_i32(lo, hi, t0);
3716         tcg_gen_andi_i32(X, lo, 1);
3717 
3718         tcg_gen_shri_i32(lo, lo, 1);
3719     } else {
3720         /* create [..:X:reg] */
3721 
3722         tcg_gen_concat_i32_i64(t0, reg, QREG_CC_X);
3723 
3724         tcg_gen_rotr_i64(t0, t0, shift64);
3725         tcg_temp_free_i64(shift64);
3726 
3727         /* result is value: [X:reg:..:reg] */
3728 
3729         tcg_gen_extr_i64_i32(lo, hi, t0);
3730 
3731         /* extract X */
3732 
3733         tcg_gen_shri_i32(X, hi, 31);
3734 
3735         /* extract result */
3736 
3737         tcg_gen_shli_i32(hi, hi, 1);
3738     }
3739     tcg_temp_free_i64(t0);
3740     tcg_gen_or_i32(lo, lo, hi);
3741     tcg_temp_free(hi);
3742 
3743     /* if shift == 0, register and X are not affected */
3744 
3745     zero = tcg_const_i32(0);
3746     tcg_gen_movcond_i32(TCG_COND_EQ, X, shift, zero, QREG_CC_X, X);
3747     tcg_gen_movcond_i32(TCG_COND_EQ, reg, shift, zero, reg, lo);
3748     tcg_temp_free(zero);
3749     tcg_temp_free(lo);
3750 
3751     return X;
3752 }
3753 
3754 DISAS_INSN(rotate_im)
3755 {
3756     TCGv shift;
3757     int tmp;
3758     int left = (insn & 0x100);
3759 
3760     tmp = (insn >> 9) & 7;
3761     if (tmp == 0) {
3762         tmp = 8;
3763     }
3764 
3765     shift = tcg_const_i32(tmp);
3766     if (insn & 8) {
3767         rotate(DREG(insn, 0), shift, left, 32);
3768     } else {
3769         TCGv X = rotate32_x(DREG(insn, 0), shift, left);
3770         rotate_x_flags(DREG(insn, 0), X, 32);
3771         tcg_temp_free(X);
3772     }
3773     tcg_temp_free(shift);
3774 
3775     set_cc_op(s, CC_OP_FLAGS);
3776 }
3777 
3778 DISAS_INSN(rotate8_im)
3779 {
3780     int left = (insn & 0x100);
3781     TCGv reg;
3782     TCGv shift;
3783     int tmp;
3784 
3785     reg = gen_extend(DREG(insn, 0), OS_BYTE, 0);
3786 
3787     tmp = (insn >> 9) & 7;
3788     if (tmp == 0) {
3789         tmp = 8;
3790     }
3791 
3792     shift = tcg_const_i32(tmp);
3793     if (insn & 8) {
3794         rotate(reg, shift, left, 8);
3795     } else {
3796         TCGv X = rotate_x(reg, shift, left, 8);
3797         rotate_x_flags(reg, X, 8);
3798         tcg_temp_free(X);
3799     }
3800     tcg_temp_free(shift);
3801     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3802     set_cc_op(s, CC_OP_FLAGS);
3803 }
3804 
3805 DISAS_INSN(rotate16_im)
3806 {
3807     int left = (insn & 0x100);
3808     TCGv reg;
3809     TCGv shift;
3810     int tmp;
3811 
3812     reg = gen_extend(DREG(insn, 0), OS_WORD, 0);
3813     tmp = (insn >> 9) & 7;
3814     if (tmp == 0) {
3815         tmp = 8;
3816     }
3817 
3818     shift = tcg_const_i32(tmp);
3819     if (insn & 8) {
3820         rotate(reg, shift, left, 16);
3821     } else {
3822         TCGv X = rotate_x(reg, shift, left, 16);
3823         rotate_x_flags(reg, X, 16);
3824         tcg_temp_free(X);
3825     }
3826     tcg_temp_free(shift);
3827     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3828     set_cc_op(s, CC_OP_FLAGS);
3829 }
3830 
3831 DISAS_INSN(rotate_reg)
3832 {
3833     TCGv reg;
3834     TCGv src;
3835     TCGv t0, t1;
3836     int left = (insn & 0x100);
3837 
3838     reg = DREG(insn, 0);
3839     src = DREG(insn, 9);
3840     /* shift in [0..63] */
3841     t0 = tcg_temp_new();
3842     tcg_gen_andi_i32(t0, src, 63);
3843     t1 = tcg_temp_new_i32();
3844     if (insn & 8) {
3845         tcg_gen_andi_i32(t1, src, 31);
3846         rotate(reg, t1, left, 32);
3847         /* if shift == 0, clear C */
3848         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3849                             t0, QREG_CC_V /* 0 */,
3850                             QREG_CC_V /* 0 */, QREG_CC_C);
3851     } else {
3852         TCGv X;
3853         /* modulo 33 */
3854         tcg_gen_movi_i32(t1, 33);
3855         tcg_gen_remu_i32(t1, t0, t1);
3856         X = rotate32_x(DREG(insn, 0), t1, left);
3857         rotate_x_flags(DREG(insn, 0), X, 32);
3858         tcg_temp_free(X);
3859     }
3860     tcg_temp_free(t1);
3861     tcg_temp_free(t0);
3862     set_cc_op(s, CC_OP_FLAGS);
3863 }
3864 
3865 DISAS_INSN(rotate8_reg)
3866 {
3867     TCGv reg;
3868     TCGv src;
3869     TCGv t0, t1;
3870     int left = (insn & 0x100);
3871 
3872     reg = gen_extend(DREG(insn, 0), OS_BYTE, 0);
3873     src = DREG(insn, 9);
3874     /* shift in [0..63] */
3875     t0 = tcg_temp_new_i32();
3876     tcg_gen_andi_i32(t0, src, 63);
3877     t1 = tcg_temp_new_i32();
3878     if (insn & 8) {
3879         tcg_gen_andi_i32(t1, src, 7);
3880         rotate(reg, t1, left, 8);
3881         /* if shift == 0, clear C */
3882         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3883                             t0, QREG_CC_V /* 0 */,
3884                             QREG_CC_V /* 0 */, QREG_CC_C);
3885     } else {
3886         TCGv X;
3887         /* modulo 9 */
3888         tcg_gen_movi_i32(t1, 9);
3889         tcg_gen_remu_i32(t1, t0, t1);
3890         X = rotate_x(reg, t1, left, 8);
3891         rotate_x_flags(reg, X, 8);
3892         tcg_temp_free(X);
3893     }
3894     tcg_temp_free(t1);
3895     tcg_temp_free(t0);
3896     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3897     set_cc_op(s, CC_OP_FLAGS);
3898 }
3899 
3900 DISAS_INSN(rotate16_reg)
3901 {
3902     TCGv reg;
3903     TCGv src;
3904     TCGv t0, t1;
3905     int left = (insn & 0x100);
3906 
3907     reg = gen_extend(DREG(insn, 0), OS_WORD, 0);
3908     src = DREG(insn, 9);
3909     /* shift in [0..63] */
3910     t0 = tcg_temp_new_i32();
3911     tcg_gen_andi_i32(t0, src, 63);
3912     t1 = tcg_temp_new_i32();
3913     if (insn & 8) {
3914         tcg_gen_andi_i32(t1, src, 15);
3915         rotate(reg, t1, left, 16);
3916         /* if shift == 0, clear C */
3917         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3918                             t0, QREG_CC_V /* 0 */,
3919                             QREG_CC_V /* 0 */, QREG_CC_C);
3920     } else {
3921         TCGv X;
3922         /* modulo 17 */
3923         tcg_gen_movi_i32(t1, 17);
3924         tcg_gen_remu_i32(t1, t0, t1);
3925         X = rotate_x(reg, t1, left, 16);
3926         rotate_x_flags(reg, X, 16);
3927         tcg_temp_free(X);
3928     }
3929     tcg_temp_free(t1);
3930     tcg_temp_free(t0);
3931     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3932     set_cc_op(s, CC_OP_FLAGS);
3933 }
3934 
3935 DISAS_INSN(rotate_mem)
3936 {
3937     TCGv src;
3938     TCGv addr;
3939     TCGv shift;
3940     int left = (insn & 0x100);
3941 
3942     SRC_EA(env, src, OS_WORD, 0, &addr);
3943 
3944     shift = tcg_const_i32(1);
3945     if (insn & 0x0200) {
3946         rotate(src, shift, left, 16);
3947     } else {
3948         TCGv X = rotate_x(src, shift, left, 16);
3949         rotate_x_flags(src, X, 16);
3950         tcg_temp_free(X);
3951     }
3952     tcg_temp_free(shift);
3953     DEST_EA(env, insn, OS_WORD, src, &addr);
3954     set_cc_op(s, CC_OP_FLAGS);
3955 }
3956 
3957 DISAS_INSN(bfext_reg)
3958 {
3959     int ext = read_im16(env, s);
3960     int is_sign = insn & 0x200;
3961     TCGv src = DREG(insn, 0);
3962     TCGv dst = DREG(ext, 12);
3963     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
3964     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
3965     int pos = 32 - ofs - len;        /* little bit-endian */
3966     TCGv tmp = tcg_temp_new();
3967     TCGv shift;
3968 
3969     /* In general, we're going to rotate the field so that it's at the
3970        top of the word and then right-shift by the compliment of the
3971        width to extend the field.  */
3972     if (ext & 0x20) {
3973         /* Variable width.  */
3974         if (ext & 0x800) {
3975             /* Variable offset.  */
3976             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
3977             tcg_gen_rotl_i32(tmp, src, tmp);
3978         } else {
3979             tcg_gen_rotli_i32(tmp, src, ofs);
3980         }
3981 
3982         shift = tcg_temp_new();
3983         tcg_gen_neg_i32(shift, DREG(ext, 0));
3984         tcg_gen_andi_i32(shift, shift, 31);
3985         tcg_gen_sar_i32(QREG_CC_N, tmp, shift);
3986         if (is_sign) {
3987             tcg_gen_mov_i32(dst, QREG_CC_N);
3988         } else {
3989             tcg_gen_shr_i32(dst, tmp, shift);
3990         }
3991         tcg_temp_free(shift);
3992     } else {
3993         /* Immediate width.  */
3994         if (ext & 0x800) {
3995             /* Variable offset */
3996             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
3997             tcg_gen_rotl_i32(tmp, src, tmp);
3998             src = tmp;
3999             pos = 32 - len;
4000         } else {
4001             /* Immediate offset.  If the field doesn't wrap around the
4002                end of the word, rely on (s)extract completely.  */
4003             if (pos < 0) {
4004                 tcg_gen_rotli_i32(tmp, src, ofs);
4005                 src = tmp;
4006                 pos = 32 - len;
4007             }
4008         }
4009 
4010         tcg_gen_sextract_i32(QREG_CC_N, src, pos, len);
4011         if (is_sign) {
4012             tcg_gen_mov_i32(dst, QREG_CC_N);
4013         } else {
4014             tcg_gen_extract_i32(dst, src, pos, len);
4015         }
4016     }
4017 
4018     tcg_temp_free(tmp);
4019     set_cc_op(s, CC_OP_LOGIC);
4020 }
4021 
4022 DISAS_INSN(bfext_mem)
4023 {
4024     int ext = read_im16(env, s);
4025     int is_sign = insn & 0x200;
4026     TCGv dest = DREG(ext, 12);
4027     TCGv addr, len, ofs;
4028 
4029     addr = gen_lea(env, s, insn, OS_UNSIZED);
4030     if (IS_NULL_QREG(addr)) {
4031         gen_addr_fault(s);
4032         return;
4033     }
4034 
4035     if (ext & 0x20) {
4036         len = DREG(ext, 0);
4037     } else {
4038         len = tcg_const_i32(extract32(ext, 0, 5));
4039     }
4040     if (ext & 0x800) {
4041         ofs = DREG(ext, 6);
4042     } else {
4043         ofs = tcg_const_i32(extract32(ext, 6, 5));
4044     }
4045 
4046     if (is_sign) {
4047         gen_helper_bfexts_mem(dest, cpu_env, addr, ofs, len);
4048         tcg_gen_mov_i32(QREG_CC_N, dest);
4049     } else {
4050         TCGv_i64 tmp = tcg_temp_new_i64();
4051         gen_helper_bfextu_mem(tmp, cpu_env, addr, ofs, len);
4052         tcg_gen_extr_i64_i32(dest, QREG_CC_N, tmp);
4053         tcg_temp_free_i64(tmp);
4054     }
4055     set_cc_op(s, CC_OP_LOGIC);
4056 
4057     if (!(ext & 0x20)) {
4058         tcg_temp_free(len);
4059     }
4060     if (!(ext & 0x800)) {
4061         tcg_temp_free(ofs);
4062     }
4063 }
4064 
4065 DISAS_INSN(bfop_reg)
4066 {
4067     int ext = read_im16(env, s);
4068     TCGv src = DREG(insn, 0);
4069     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4070     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4071     TCGv mask, tofs, tlen;
4072 
4073     tofs = NULL;
4074     tlen = NULL;
4075     if ((insn & 0x0f00) == 0x0d00) { /* bfffo */
4076         tofs = tcg_temp_new();
4077         tlen = tcg_temp_new();
4078     }
4079 
4080     if ((ext & 0x820) == 0) {
4081         /* Immediate width and offset.  */
4082         uint32_t maski = 0x7fffffffu >> (len - 1);
4083         if (ofs + len <= 32) {
4084             tcg_gen_shli_i32(QREG_CC_N, src, ofs);
4085         } else {
4086             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4087         }
4088         tcg_gen_andi_i32(QREG_CC_N, QREG_CC_N, ~maski);
4089         mask = tcg_const_i32(ror32(maski, ofs));
4090         if (tofs) {
4091             tcg_gen_movi_i32(tofs, ofs);
4092             tcg_gen_movi_i32(tlen, len);
4093         }
4094     } else {
4095         TCGv tmp = tcg_temp_new();
4096         if (ext & 0x20) {
4097             /* Variable width */
4098             tcg_gen_subi_i32(tmp, DREG(ext, 0), 1);
4099             tcg_gen_andi_i32(tmp, tmp, 31);
4100             mask = tcg_const_i32(0x7fffffffu);
4101             tcg_gen_shr_i32(mask, mask, tmp);
4102             if (tlen) {
4103                 tcg_gen_addi_i32(tlen, tmp, 1);
4104             }
4105         } else {
4106             /* Immediate width */
4107             mask = tcg_const_i32(0x7fffffffu >> (len - 1));
4108             if (tlen) {
4109                 tcg_gen_movi_i32(tlen, len);
4110             }
4111         }
4112         if (ext & 0x800) {
4113             /* Variable offset */
4114             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4115             tcg_gen_rotl_i32(QREG_CC_N, src, tmp);
4116             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4117             tcg_gen_rotr_i32(mask, mask, tmp);
4118             if (tofs) {
4119                 tcg_gen_mov_i32(tofs, tmp);
4120             }
4121         } else {
4122             /* Immediate offset (and variable width) */
4123             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4124             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4125             tcg_gen_rotri_i32(mask, mask, ofs);
4126             if (tofs) {
4127                 tcg_gen_movi_i32(tofs, ofs);
4128             }
4129         }
4130         tcg_temp_free(tmp);
4131     }
4132     set_cc_op(s, CC_OP_LOGIC);
4133 
4134     switch (insn & 0x0f00) {
4135     case 0x0a00: /* bfchg */
4136         tcg_gen_eqv_i32(src, src, mask);
4137         break;
4138     case 0x0c00: /* bfclr */
4139         tcg_gen_and_i32(src, src, mask);
4140         break;
4141     case 0x0d00: /* bfffo */
4142         gen_helper_bfffo_reg(DREG(ext, 12), QREG_CC_N, tofs, tlen);
4143         tcg_temp_free(tlen);
4144         tcg_temp_free(tofs);
4145         break;
4146     case 0x0e00: /* bfset */
4147         tcg_gen_orc_i32(src, src, mask);
4148         break;
4149     case 0x0800: /* bftst */
4150         /* flags already set; no other work to do.  */
4151         break;
4152     default:
4153         g_assert_not_reached();
4154     }
4155     tcg_temp_free(mask);
4156 }
4157 
4158 DISAS_INSN(bfop_mem)
4159 {
4160     int ext = read_im16(env, s);
4161     TCGv addr, len, ofs;
4162     TCGv_i64 t64;
4163 
4164     addr = gen_lea(env, s, insn, OS_UNSIZED);
4165     if (IS_NULL_QREG(addr)) {
4166         gen_addr_fault(s);
4167         return;
4168     }
4169 
4170     if (ext & 0x20) {
4171         len = DREG(ext, 0);
4172     } else {
4173         len = tcg_const_i32(extract32(ext, 0, 5));
4174     }
4175     if (ext & 0x800) {
4176         ofs = DREG(ext, 6);
4177     } else {
4178         ofs = tcg_const_i32(extract32(ext, 6, 5));
4179     }
4180 
4181     switch (insn & 0x0f00) {
4182     case 0x0a00: /* bfchg */
4183         gen_helper_bfchg_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4184         break;
4185     case 0x0c00: /* bfclr */
4186         gen_helper_bfclr_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4187         break;
4188     case 0x0d00: /* bfffo */
4189         t64 = tcg_temp_new_i64();
4190         gen_helper_bfffo_mem(t64, cpu_env, addr, ofs, len);
4191         tcg_gen_extr_i64_i32(DREG(ext, 12), QREG_CC_N, t64);
4192         tcg_temp_free_i64(t64);
4193         break;
4194     case 0x0e00: /* bfset */
4195         gen_helper_bfset_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4196         break;
4197     case 0x0800: /* bftst */
4198         gen_helper_bfexts_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4199         break;
4200     default:
4201         g_assert_not_reached();
4202     }
4203     set_cc_op(s, CC_OP_LOGIC);
4204 
4205     if (!(ext & 0x20)) {
4206         tcg_temp_free(len);
4207     }
4208     if (!(ext & 0x800)) {
4209         tcg_temp_free(ofs);
4210     }
4211 }
4212 
4213 DISAS_INSN(bfins_reg)
4214 {
4215     int ext = read_im16(env, s);
4216     TCGv dst = DREG(insn, 0);
4217     TCGv src = DREG(ext, 12);
4218     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4219     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4220     int pos = 32 - ofs - len;        /* little bit-endian */
4221     TCGv tmp;
4222 
4223     tmp = tcg_temp_new();
4224 
4225     if (ext & 0x20) {
4226         /* Variable width */
4227         tcg_gen_neg_i32(tmp, DREG(ext, 0));
4228         tcg_gen_andi_i32(tmp, tmp, 31);
4229         tcg_gen_shl_i32(QREG_CC_N, src, tmp);
4230     } else {
4231         /* Immediate width */
4232         tcg_gen_shli_i32(QREG_CC_N, src, 32 - len);
4233     }
4234     set_cc_op(s, CC_OP_LOGIC);
4235 
4236     /* Immediate width and offset */
4237     if ((ext & 0x820) == 0) {
4238         /* Check for suitability for deposit.  */
4239         if (pos >= 0) {
4240             tcg_gen_deposit_i32(dst, dst, src, pos, len);
4241         } else {
4242             uint32_t maski = -2U << (len - 1);
4243             uint32_t roti = (ofs + len) & 31;
4244             tcg_gen_andi_i32(tmp, src, ~maski);
4245             tcg_gen_rotri_i32(tmp, tmp, roti);
4246             tcg_gen_andi_i32(dst, dst, ror32(maski, roti));
4247             tcg_gen_or_i32(dst, dst, tmp);
4248         }
4249     } else {
4250         TCGv mask = tcg_temp_new();
4251         TCGv rot = tcg_temp_new();
4252 
4253         if (ext & 0x20) {
4254             /* Variable width */
4255             tcg_gen_subi_i32(rot, DREG(ext, 0), 1);
4256             tcg_gen_andi_i32(rot, rot, 31);
4257             tcg_gen_movi_i32(mask, -2);
4258             tcg_gen_shl_i32(mask, mask, rot);
4259             tcg_gen_mov_i32(rot, DREG(ext, 0));
4260             tcg_gen_andc_i32(tmp, src, mask);
4261         } else {
4262             /* Immediate width (variable offset) */
4263             uint32_t maski = -2U << (len - 1);
4264             tcg_gen_andi_i32(tmp, src, ~maski);
4265             tcg_gen_movi_i32(mask, maski);
4266             tcg_gen_movi_i32(rot, len & 31);
4267         }
4268         if (ext & 0x800) {
4269             /* Variable offset */
4270             tcg_gen_add_i32(rot, rot, DREG(ext, 6));
4271         } else {
4272             /* Immediate offset (variable width) */
4273             tcg_gen_addi_i32(rot, rot, ofs);
4274         }
4275         tcg_gen_andi_i32(rot, rot, 31);
4276         tcg_gen_rotr_i32(mask, mask, rot);
4277         tcg_gen_rotr_i32(tmp, tmp, rot);
4278         tcg_gen_and_i32(dst, dst, mask);
4279         tcg_gen_or_i32(dst, dst, tmp);
4280 
4281         tcg_temp_free(rot);
4282         tcg_temp_free(mask);
4283     }
4284     tcg_temp_free(tmp);
4285 }
4286 
4287 DISAS_INSN(bfins_mem)
4288 {
4289     int ext = read_im16(env, s);
4290     TCGv src = DREG(ext, 12);
4291     TCGv addr, len, ofs;
4292 
4293     addr = gen_lea(env, s, insn, OS_UNSIZED);
4294     if (IS_NULL_QREG(addr)) {
4295         gen_addr_fault(s);
4296         return;
4297     }
4298 
4299     if (ext & 0x20) {
4300         len = DREG(ext, 0);
4301     } else {
4302         len = tcg_const_i32(extract32(ext, 0, 5));
4303     }
4304     if (ext & 0x800) {
4305         ofs = DREG(ext, 6);
4306     } else {
4307         ofs = tcg_const_i32(extract32(ext, 6, 5));
4308     }
4309 
4310     gen_helper_bfins_mem(QREG_CC_N, cpu_env, addr, src, ofs, len);
4311     set_cc_op(s, CC_OP_LOGIC);
4312 
4313     if (!(ext & 0x20)) {
4314         tcg_temp_free(len);
4315     }
4316     if (!(ext & 0x800)) {
4317         tcg_temp_free(ofs);
4318     }
4319 }
4320 
4321 DISAS_INSN(ff1)
4322 {
4323     TCGv reg;
4324     reg = DREG(insn, 0);
4325     gen_logic_cc(s, reg, OS_LONG);
4326     gen_helper_ff1(reg, reg);
4327 }
4328 
4329 DISAS_INSN(chk)
4330 {
4331     TCGv src, reg;
4332     int opsize;
4333 
4334     switch ((insn >> 7) & 3) {
4335     case 3:
4336         opsize = OS_WORD;
4337         break;
4338     case 2:
4339         if (m68k_feature(env, M68K_FEATURE_CHK2)) {
4340             opsize = OS_LONG;
4341             break;
4342         }
4343         /* fallthru */
4344     default:
4345         gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
4346         return;
4347     }
4348     SRC_EA(env, src, opsize, 1, NULL);
4349     reg = gen_extend(DREG(insn, 9), opsize, 1);
4350 
4351     gen_flush_flags(s);
4352     gen_helper_chk(cpu_env, reg, src);
4353 }
4354 
4355 DISAS_INSN(chk2)
4356 {
4357     uint16_t ext;
4358     TCGv addr1, addr2, bound1, bound2, reg;
4359     int opsize;
4360 
4361     switch ((insn >> 9) & 3) {
4362     case 0:
4363         opsize = OS_BYTE;
4364         break;
4365     case 1:
4366         opsize = OS_WORD;
4367         break;
4368     case 2:
4369         opsize = OS_LONG;
4370         break;
4371     default:
4372         gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
4373         return;
4374     }
4375 
4376     ext = read_im16(env, s);
4377     if ((ext & 0x0800) == 0) {
4378         gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
4379         return;
4380     }
4381 
4382     addr1 = gen_lea(env, s, insn, OS_UNSIZED);
4383     addr2 = tcg_temp_new();
4384     tcg_gen_addi_i32(addr2, addr1, opsize_bytes(opsize));
4385 
4386     bound1 = gen_load(s, opsize, addr1, 1, IS_USER(s));
4387     tcg_temp_free(addr1);
4388     bound2 = gen_load(s, opsize, addr2, 1, IS_USER(s));
4389     tcg_temp_free(addr2);
4390 
4391     reg = tcg_temp_new();
4392     if (ext & 0x8000) {
4393         tcg_gen_mov_i32(reg, AREG(ext, 12));
4394     } else {
4395         gen_ext(reg, DREG(ext, 12), opsize, 1);
4396     }
4397 
4398     gen_flush_flags(s);
4399     gen_helper_chk2(cpu_env, reg, bound1, bound2);
4400     tcg_temp_free(reg);
4401 }
4402 
4403 static void m68k_copy_line(TCGv dst, TCGv src, int index)
4404 {
4405     TCGv addr;
4406     TCGv_i64 t0, t1;
4407 
4408     addr = tcg_temp_new();
4409 
4410     t0 = tcg_temp_new_i64();
4411     t1 = tcg_temp_new_i64();
4412 
4413     tcg_gen_andi_i32(addr, src, ~15);
4414     tcg_gen_qemu_ld64(t0, addr, index);
4415     tcg_gen_addi_i32(addr, addr, 8);
4416     tcg_gen_qemu_ld64(t1, addr, index);
4417 
4418     tcg_gen_andi_i32(addr, dst, ~15);
4419     tcg_gen_qemu_st64(t0, addr, index);
4420     tcg_gen_addi_i32(addr, addr, 8);
4421     tcg_gen_qemu_st64(t1, addr, index);
4422 
4423     tcg_temp_free_i64(t0);
4424     tcg_temp_free_i64(t1);
4425     tcg_temp_free(addr);
4426 }
4427 
4428 DISAS_INSN(move16_reg)
4429 {
4430     int index = IS_USER(s);
4431     TCGv tmp;
4432     uint16_t ext;
4433 
4434     ext = read_im16(env, s);
4435     if ((ext & (1 << 15)) == 0) {
4436         gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
4437     }
4438 
4439     m68k_copy_line(AREG(ext, 12), AREG(insn, 0), index);
4440 
4441     /* Ax can be Ay, so save Ay before incrementing Ax */
4442     tmp = tcg_temp_new();
4443     tcg_gen_mov_i32(tmp, AREG(ext, 12));
4444     tcg_gen_addi_i32(AREG(insn, 0), AREG(insn, 0), 16);
4445     tcg_gen_addi_i32(AREG(ext, 12), tmp, 16);
4446     tcg_temp_free(tmp);
4447 }
4448 
4449 DISAS_INSN(move16_mem)
4450 {
4451     int index = IS_USER(s);
4452     TCGv reg, addr;
4453 
4454     reg = AREG(insn, 0);
4455     addr = tcg_const_i32(read_im32(env, s));
4456 
4457     if ((insn >> 3) & 1) {
4458         /* MOVE16 (xxx).L, (Ay) */
4459         m68k_copy_line(reg, addr, index);
4460     } else {
4461         /* MOVE16 (Ay), (xxx).L */
4462         m68k_copy_line(addr, reg, index);
4463     }
4464 
4465     tcg_temp_free(addr);
4466 
4467     if (((insn >> 3) & 2) == 0) {
4468         /* (Ay)+ */
4469         tcg_gen_addi_i32(reg, reg, 16);
4470     }
4471 }
4472 
4473 DISAS_INSN(strldsr)
4474 {
4475     uint16_t ext;
4476     uint32_t addr;
4477 
4478     addr = s->pc - 2;
4479     ext = read_im16(env, s);
4480     if (ext != 0x46FC) {
4481         gen_exception(s, addr, EXCP_UNSUPPORTED);
4482         return;
4483     }
4484     ext = read_im16(env, s);
4485     if (IS_USER(s) || (ext & SR_S) == 0) {
4486         gen_exception(s, addr, EXCP_PRIVILEGE);
4487         return;
4488     }
4489     gen_push(s, gen_get_sr(s));
4490     gen_set_sr_im(s, ext, 0);
4491 }
4492 
4493 DISAS_INSN(move_from_sr)
4494 {
4495     TCGv sr;
4496 
4497     if (IS_USER(s) && !m68k_feature(env, M68K_FEATURE_M68000)) {
4498         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4499         return;
4500     }
4501     sr = gen_get_sr(s);
4502     DEST_EA(env, insn, OS_WORD, sr, NULL);
4503 }
4504 
4505 #if defined(CONFIG_SOFTMMU)
4506 DISAS_INSN(moves)
4507 {
4508     int opsize;
4509     uint16_t ext;
4510     TCGv reg;
4511     TCGv addr;
4512     int extend;
4513 
4514     if (IS_USER(s)) {
4515         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4516         return;
4517     }
4518 
4519     ext = read_im16(env, s);
4520 
4521     opsize = insn_opsize(insn);
4522 
4523     if (ext & 0x8000) {
4524         /* address register */
4525         reg = AREG(ext, 12);
4526         extend = 1;
4527     } else {
4528         /* data register */
4529         reg = DREG(ext, 12);
4530         extend = 0;
4531     }
4532 
4533     addr = gen_lea(env, s, insn, opsize);
4534     if (IS_NULL_QREG(addr)) {
4535         gen_addr_fault(s);
4536         return;
4537     }
4538 
4539     if (ext & 0x0800) {
4540         /* from reg to ea */
4541         gen_store(s, opsize, addr, reg, DFC_INDEX(s));
4542     } else {
4543         /* from ea to reg */
4544         TCGv tmp = gen_load(s, opsize, addr, 0, SFC_INDEX(s));
4545         if (extend) {
4546             gen_ext(reg, tmp, opsize, 1);
4547         } else {
4548             gen_partset_reg(opsize, reg, tmp);
4549         }
4550     }
4551     switch (extract32(insn, 3, 3)) {
4552     case 3: /* Indirect postincrement.  */
4553         tcg_gen_addi_i32(AREG(insn, 0), addr,
4554                          REG(insn, 0) == 7 && opsize == OS_BYTE
4555                          ? 2
4556                          : opsize_bytes(opsize));
4557         break;
4558     case 4: /* Indirect predecrememnt.  */
4559         tcg_gen_mov_i32(AREG(insn, 0), addr);
4560         break;
4561     }
4562 }
4563 
4564 DISAS_INSN(move_to_sr)
4565 {
4566     if (IS_USER(s)) {
4567         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4568         return;
4569     }
4570     gen_move_to_sr(env, s, insn, false);
4571     gen_lookup_tb(s);
4572 }
4573 
4574 DISAS_INSN(move_from_usp)
4575 {
4576     if (IS_USER(s)) {
4577         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4578         return;
4579     }
4580     tcg_gen_ld_i32(AREG(insn, 0), cpu_env,
4581                    offsetof(CPUM68KState, sp[M68K_USP]));
4582 }
4583 
4584 DISAS_INSN(move_to_usp)
4585 {
4586     if (IS_USER(s)) {
4587         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4588         return;
4589     }
4590     tcg_gen_st_i32(AREG(insn, 0), cpu_env,
4591                    offsetof(CPUM68KState, sp[M68K_USP]));
4592 }
4593 
4594 DISAS_INSN(halt)
4595 {
4596     if (IS_USER(s)) {
4597         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4598         return;
4599     }
4600 
4601     gen_exception(s, s->pc, EXCP_HALT_INSN);
4602 }
4603 
4604 DISAS_INSN(stop)
4605 {
4606     uint16_t ext;
4607 
4608     if (IS_USER(s)) {
4609         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4610         return;
4611     }
4612 
4613     ext = read_im16(env, s);
4614 
4615     gen_set_sr_im(s, ext, 0);
4616     tcg_gen_movi_i32(cpu_halted, 1);
4617     gen_exception(s, s->pc, EXCP_HLT);
4618 }
4619 
4620 DISAS_INSN(rte)
4621 {
4622     if (IS_USER(s)) {
4623         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4624         return;
4625     }
4626     gen_exception(s, s->insn_pc, EXCP_RTE);
4627 }
4628 
4629 DISAS_INSN(cf_movec)
4630 {
4631     uint16_t ext;
4632     TCGv reg;
4633 
4634     if (IS_USER(s)) {
4635         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4636         return;
4637     }
4638 
4639     ext = read_im16(env, s);
4640 
4641     if (ext & 0x8000) {
4642         reg = AREG(ext, 12);
4643     } else {
4644         reg = DREG(ext, 12);
4645     }
4646     gen_helper_cf_movec_to(cpu_env, tcg_const_i32(ext & 0xfff), reg);
4647     gen_lookup_tb(s);
4648 }
4649 
4650 DISAS_INSN(m68k_movec)
4651 {
4652     uint16_t ext;
4653     TCGv reg;
4654 
4655     if (IS_USER(s)) {
4656         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4657         return;
4658     }
4659 
4660     ext = read_im16(env, s);
4661 
4662     if (ext & 0x8000) {
4663         reg = AREG(ext, 12);
4664     } else {
4665         reg = DREG(ext, 12);
4666     }
4667     if (insn & 1) {
4668         gen_helper_m68k_movec_to(cpu_env, tcg_const_i32(ext & 0xfff), reg);
4669     } else {
4670         gen_helper_m68k_movec_from(reg, cpu_env, tcg_const_i32(ext & 0xfff));
4671     }
4672     gen_lookup_tb(s);
4673 }
4674 
4675 DISAS_INSN(intouch)
4676 {
4677     if (IS_USER(s)) {
4678         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4679         return;
4680     }
4681     /* ICache fetch.  Implement as no-op.  */
4682 }
4683 
4684 DISAS_INSN(cpushl)
4685 {
4686     if (IS_USER(s)) {
4687         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4688         return;
4689     }
4690     /* Cache push/invalidate.  Implement as no-op.  */
4691 }
4692 
4693 DISAS_INSN(cpush)
4694 {
4695     if (IS_USER(s)) {
4696         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4697         return;
4698     }
4699     /* Cache push/invalidate.  Implement as no-op.  */
4700 }
4701 
4702 DISAS_INSN(cinv)
4703 {
4704     if (IS_USER(s)) {
4705         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4706         return;
4707     }
4708     /* Invalidate cache line.  Implement as no-op.  */
4709 }
4710 
4711 #if defined(CONFIG_SOFTMMU)
4712 DISAS_INSN(pflush)
4713 {
4714     TCGv opmode;
4715 
4716     if (IS_USER(s)) {
4717         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4718         return;
4719     }
4720 
4721     opmode = tcg_const_i32((insn >> 3) & 3);
4722     gen_helper_pflush(cpu_env, AREG(insn, 0), opmode);
4723     tcg_temp_free(opmode);
4724 }
4725 
4726 DISAS_INSN(ptest)
4727 {
4728     TCGv is_read;
4729 
4730     if (IS_USER(s)) {
4731         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4732         return;
4733     }
4734     is_read = tcg_const_i32((insn >> 5) & 1);
4735     gen_helper_ptest(cpu_env, AREG(insn, 0), is_read);
4736     tcg_temp_free(is_read);
4737 }
4738 #endif
4739 
4740 DISAS_INSN(wddata)
4741 {
4742     gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4743 }
4744 
4745 DISAS_INSN(wdebug)
4746 {
4747     M68kCPU *cpu = m68k_env_get_cpu(env);
4748 
4749     if (IS_USER(s)) {
4750         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4751         return;
4752     }
4753     /* TODO: Implement wdebug.  */
4754     cpu_abort(CPU(cpu), "WDEBUG not implemented");
4755 }
4756 #endif
4757 
4758 DISAS_INSN(trap)
4759 {
4760     gen_exception(s, s->insn_pc, EXCP_TRAP0 + (insn & 0xf));
4761 }
4762 
4763 static void gen_load_fcr(DisasContext *s, TCGv res, int reg)
4764 {
4765     switch (reg) {
4766     case M68K_FPIAR:
4767         tcg_gen_movi_i32(res, 0);
4768         break;
4769     case M68K_FPSR:
4770         tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpsr));
4771         break;
4772     case M68K_FPCR:
4773         tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpcr));
4774         break;
4775     }
4776 }
4777 
4778 static void gen_store_fcr(DisasContext *s, TCGv val, int reg)
4779 {
4780     switch (reg) {
4781     case M68K_FPIAR:
4782         break;
4783     case M68K_FPSR:
4784         tcg_gen_st_i32(val, cpu_env, offsetof(CPUM68KState, fpsr));
4785         break;
4786     case M68K_FPCR:
4787         gen_helper_set_fpcr(cpu_env, val);
4788         break;
4789     }
4790 }
4791 
4792 static void gen_qemu_store_fcr(DisasContext *s, TCGv addr, int reg)
4793 {
4794     int index = IS_USER(s);
4795     TCGv tmp;
4796 
4797     tmp = tcg_temp_new();
4798     gen_load_fcr(s, tmp, reg);
4799     tcg_gen_qemu_st32(tmp, addr, index);
4800     tcg_temp_free(tmp);
4801 }
4802 
4803 static void gen_qemu_load_fcr(DisasContext *s, TCGv addr, int reg)
4804 {
4805     int index = IS_USER(s);
4806     TCGv tmp;
4807 
4808     tmp = tcg_temp_new();
4809     tcg_gen_qemu_ld32u(tmp, addr, index);
4810     gen_store_fcr(s, tmp, reg);
4811     tcg_temp_free(tmp);
4812 }
4813 
4814 
4815 static void gen_op_fmove_fcr(CPUM68KState *env, DisasContext *s,
4816                              uint32_t insn, uint32_t ext)
4817 {
4818     int mask = (ext >> 10) & 7;
4819     int is_write = (ext >> 13) & 1;
4820     int mode = extract32(insn, 3, 3);
4821     int i;
4822     TCGv addr, tmp;
4823 
4824     switch (mode) {
4825     case 0: /* Dn */
4826         if (mask != M68K_FPIAR && mask != M68K_FPSR && mask != M68K_FPCR) {
4827             gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
4828             return;
4829         }
4830         if (is_write) {
4831             gen_load_fcr(s, DREG(insn, 0), mask);
4832         } else {
4833             gen_store_fcr(s, DREG(insn, 0), mask);
4834         }
4835         return;
4836     case 1: /* An, only with FPIAR */
4837         if (mask != M68K_FPIAR) {
4838             gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
4839             return;
4840         }
4841         if (is_write) {
4842             gen_load_fcr(s, AREG(insn, 0), mask);
4843         } else {
4844             gen_store_fcr(s, AREG(insn, 0), mask);
4845         }
4846         return;
4847     default:
4848         break;
4849     }
4850 
4851     tmp = gen_lea(env, s, insn, OS_LONG);
4852     if (IS_NULL_QREG(tmp)) {
4853         gen_addr_fault(s);
4854         return;
4855     }
4856 
4857     addr = tcg_temp_new();
4858     tcg_gen_mov_i32(addr, tmp);
4859 
4860     /* mask:
4861      *
4862      * 0b100 Floating-Point Control Register
4863      * 0b010 Floating-Point Status Register
4864      * 0b001 Floating-Point Instruction Address Register
4865      *
4866      */
4867 
4868     if (is_write && mode == 4) {
4869         for (i = 2; i >= 0; i--, mask >>= 1) {
4870             if (mask & 1) {
4871                 gen_qemu_store_fcr(s, addr, 1 << i);
4872                 if (mask != 1) {
4873                     tcg_gen_subi_i32(addr, addr, opsize_bytes(OS_LONG));
4874                 }
4875             }
4876        }
4877        tcg_gen_mov_i32(AREG(insn, 0), addr);
4878     } else {
4879         for (i = 0; i < 3; i++, mask >>= 1) {
4880             if (mask & 1) {
4881                 if (is_write) {
4882                     gen_qemu_store_fcr(s, addr, 1 << i);
4883                 } else {
4884                     gen_qemu_load_fcr(s, addr, 1 << i);
4885                 }
4886                 if (mask != 1 || mode == 3) {
4887                     tcg_gen_addi_i32(addr, addr, opsize_bytes(OS_LONG));
4888                 }
4889             }
4890         }
4891         if (mode == 3) {
4892             tcg_gen_mov_i32(AREG(insn, 0), addr);
4893         }
4894     }
4895     tcg_temp_free_i32(addr);
4896 }
4897 
4898 static void gen_op_fmovem(CPUM68KState *env, DisasContext *s,
4899                           uint32_t insn, uint32_t ext)
4900 {
4901     int opsize;
4902     TCGv addr, tmp;
4903     int mode = (ext >> 11) & 0x3;
4904     int is_load = ((ext & 0x2000) == 0);
4905 
4906     if (m68k_feature(s->env, M68K_FEATURE_FPU)) {
4907         opsize = OS_EXTENDED;
4908     } else {
4909         opsize = OS_DOUBLE;  /* FIXME */
4910     }
4911 
4912     addr = gen_lea(env, s, insn, opsize);
4913     if (IS_NULL_QREG(addr)) {
4914         gen_addr_fault(s);
4915         return;
4916     }
4917 
4918     tmp = tcg_temp_new();
4919     if (mode & 0x1) {
4920         /* Dynamic register list */
4921         tcg_gen_ext8u_i32(tmp, DREG(ext, 4));
4922     } else {
4923         /* Static register list */
4924         tcg_gen_movi_i32(tmp, ext & 0xff);
4925     }
4926 
4927     if (!is_load && (mode & 2) == 0) {
4928         /* predecrement addressing mode
4929          * only available to store register to memory
4930          */
4931         if (opsize == OS_EXTENDED) {
4932             gen_helper_fmovemx_st_predec(tmp, cpu_env, addr, tmp);
4933         } else {
4934             gen_helper_fmovemd_st_predec(tmp, cpu_env, addr, tmp);
4935         }
4936     } else {
4937         /* postincrement addressing mode */
4938         if (opsize == OS_EXTENDED) {
4939             if (is_load) {
4940                 gen_helper_fmovemx_ld_postinc(tmp, cpu_env, addr, tmp);
4941             } else {
4942                 gen_helper_fmovemx_st_postinc(tmp, cpu_env, addr, tmp);
4943             }
4944         } else {
4945             if (is_load) {
4946                 gen_helper_fmovemd_ld_postinc(tmp, cpu_env, addr, tmp);
4947             } else {
4948                 gen_helper_fmovemd_st_postinc(tmp, cpu_env, addr, tmp);
4949             }
4950         }
4951     }
4952     if ((insn & 070) == 030 || (insn & 070) == 040) {
4953         tcg_gen_mov_i32(AREG(insn, 0), tmp);
4954     }
4955     tcg_temp_free(tmp);
4956 }
4957 
4958 /* ??? FP exceptions are not implemented.  Most exceptions are deferred until
4959    immediately before the next FP instruction is executed.  */
4960 DISAS_INSN(fpu)
4961 {
4962     uint16_t ext;
4963     int opmode;
4964     int opsize;
4965     TCGv_ptr cpu_src, cpu_dest;
4966 
4967     ext = read_im16(env, s);
4968     opmode = ext & 0x7f;
4969     switch ((ext >> 13) & 7) {
4970     case 0:
4971         break;
4972     case 1:
4973         goto undef;
4974     case 2:
4975         if (insn == 0xf200 && (ext & 0xfc00) == 0x5c00) {
4976             /* fmovecr */
4977             TCGv rom_offset = tcg_const_i32(opmode);
4978             cpu_dest = gen_fp_ptr(REG(ext, 7));
4979             gen_helper_fconst(cpu_env, cpu_dest, rom_offset);
4980             tcg_temp_free_ptr(cpu_dest);
4981             tcg_temp_free(rom_offset);
4982             return;
4983         }
4984         break;
4985     case 3: /* fmove out */
4986         cpu_src = gen_fp_ptr(REG(ext, 7));
4987         opsize = ext_opsize(ext, 10);
4988         if (gen_ea_fp(env, s, insn, opsize, cpu_src,
4989                       EA_STORE, IS_USER(s)) == -1) {
4990             gen_addr_fault(s);
4991         }
4992         gen_helper_ftst(cpu_env, cpu_src);
4993         tcg_temp_free_ptr(cpu_src);
4994         return;
4995     case 4: /* fmove to control register.  */
4996     case 5: /* fmove from control register.  */
4997         gen_op_fmove_fcr(env, s, insn, ext);
4998         return;
4999     case 6: /* fmovem */
5000     case 7:
5001         if ((ext & 0x1000) == 0 && !m68k_feature(s->env, M68K_FEATURE_FPU)) {
5002             goto undef;
5003         }
5004         gen_op_fmovem(env, s, insn, ext);
5005         return;
5006     }
5007     if (ext & (1 << 14)) {
5008         /* Source effective address.  */
5009         opsize = ext_opsize(ext, 10);
5010         cpu_src = gen_fp_result_ptr();
5011         if (gen_ea_fp(env, s, insn, opsize, cpu_src,
5012                       EA_LOADS, IS_USER(s)) == -1) {
5013             gen_addr_fault(s);
5014             return;
5015         }
5016     } else {
5017         /* Source register.  */
5018         opsize = OS_EXTENDED;
5019         cpu_src = gen_fp_ptr(REG(ext, 10));
5020     }
5021     cpu_dest = gen_fp_ptr(REG(ext, 7));
5022     switch (opmode) {
5023     case 0: /* fmove */
5024         gen_fp_move(cpu_dest, cpu_src);
5025         break;
5026     case 0x40: /* fsmove */
5027         gen_helper_fsround(cpu_env, cpu_dest, cpu_src);
5028         break;
5029     case 0x44: /* fdmove */
5030         gen_helper_fdround(cpu_env, cpu_dest, cpu_src);
5031         break;
5032     case 1: /* fint */
5033         gen_helper_firound(cpu_env, cpu_dest, cpu_src);
5034         break;
5035     case 3: /* fintrz */
5036         gen_helper_fitrunc(cpu_env, cpu_dest, cpu_src);
5037         break;
5038     case 4: /* fsqrt */
5039         gen_helper_fsqrt(cpu_env, cpu_dest, cpu_src);
5040         break;
5041     case 0x41: /* fssqrt */
5042         gen_helper_fssqrt(cpu_env, cpu_dest, cpu_src);
5043         break;
5044     case 0x45: /* fdsqrt */
5045         gen_helper_fdsqrt(cpu_env, cpu_dest, cpu_src);
5046         break;
5047     case 0x18: /* fabs */
5048         gen_helper_fabs(cpu_env, cpu_dest, cpu_src);
5049         break;
5050     case 0x58: /* fsabs */
5051         gen_helper_fsabs(cpu_env, cpu_dest, cpu_src);
5052         break;
5053     case 0x5c: /* fdabs */
5054         gen_helper_fdabs(cpu_env, cpu_dest, cpu_src);
5055         break;
5056     case 0x1a: /* fneg */
5057         gen_helper_fneg(cpu_env, cpu_dest, cpu_src);
5058         break;
5059     case 0x5a: /* fsneg */
5060         gen_helper_fsneg(cpu_env, cpu_dest, cpu_src);
5061         break;
5062     case 0x5e: /* fdneg */
5063         gen_helper_fdneg(cpu_env, cpu_dest, cpu_src);
5064         break;
5065     case 0x20: /* fdiv */
5066         gen_helper_fdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5067         break;
5068     case 0x60: /* fsdiv */
5069         gen_helper_fsdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5070         break;
5071     case 0x64: /* fddiv */
5072         gen_helper_fddiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5073         break;
5074     case 0x22: /* fadd */
5075         gen_helper_fadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5076         break;
5077     case 0x62: /* fsadd */
5078         gen_helper_fsadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5079         break;
5080     case 0x66: /* fdadd */
5081         gen_helper_fdadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5082         break;
5083     case 0x23: /* fmul */
5084         gen_helper_fmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5085         break;
5086     case 0x63: /* fsmul */
5087         gen_helper_fsmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5088         break;
5089     case 0x67: /* fdmul */
5090         gen_helper_fdmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5091         break;
5092     case 0x24: /* fsgldiv */
5093         gen_helper_fsgldiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5094         break;
5095     case 0x27: /* fsglmul */
5096         gen_helper_fsglmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5097         break;
5098     case 0x28: /* fsub */
5099         gen_helper_fsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5100         break;
5101     case 0x68: /* fssub */
5102         gen_helper_fssub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5103         break;
5104     case 0x6c: /* fdsub */
5105         gen_helper_fdsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5106         break;
5107     case 0x38: /* fcmp */
5108         gen_helper_fcmp(cpu_env, cpu_src, cpu_dest);
5109         return;
5110     case 0x3a: /* ftst */
5111         gen_helper_ftst(cpu_env, cpu_src);
5112         return;
5113     default:
5114         goto undef;
5115     }
5116     tcg_temp_free_ptr(cpu_src);
5117     gen_helper_ftst(cpu_env, cpu_dest);
5118     tcg_temp_free_ptr(cpu_dest);
5119     return;
5120 undef:
5121     /* FIXME: Is this right for offset addressing modes?  */
5122     s->pc -= 2;
5123     disas_undef_fpu(env, s, insn);
5124 }
5125 
5126 static void gen_fcc_cond(DisasCompare *c, DisasContext *s, int cond)
5127 {
5128     TCGv fpsr;
5129 
5130     c->g1 = 1;
5131     c->v2 = tcg_const_i32(0);
5132     c->g2 = 0;
5133     /* TODO: Raise BSUN exception.  */
5134     fpsr = tcg_temp_new();
5135     gen_load_fcr(s, fpsr, M68K_FPSR);
5136     switch (cond) {
5137     case 0:  /* False */
5138     case 16: /* Signaling False */
5139         c->v1 = c->v2;
5140         c->tcond = TCG_COND_NEVER;
5141         break;
5142     case 1:  /* EQual Z */
5143     case 17: /* Signaling EQual Z */
5144         c->v1 = tcg_temp_new();
5145         c->g1 = 0;
5146         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5147         c->tcond = TCG_COND_NE;
5148         break;
5149     case 2:  /* Ordered Greater Than !(A || Z || N) */
5150     case 18: /* Greater Than !(A || Z || N) */
5151         c->v1 = tcg_temp_new();
5152         c->g1 = 0;
5153         tcg_gen_andi_i32(c->v1, fpsr,
5154                          FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5155         c->tcond = TCG_COND_EQ;
5156         break;
5157     case 3:  /* Ordered Greater than or Equal Z || !(A || N) */
5158     case 19: /* Greater than or Equal Z || !(A || N) */
5159         c->v1 = tcg_temp_new();
5160         c->g1 = 0;
5161         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5162         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5163         tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_Z | FPSR_CC_N);
5164         tcg_gen_or_i32(c->v1, c->v1, fpsr);
5165         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5166         c->tcond = TCG_COND_NE;
5167         break;
5168     case 4:  /* Ordered Less Than !(!N || A || Z); */
5169     case 20: /* Less Than !(!N || A || Z); */
5170         c->v1 = tcg_temp_new();
5171         c->g1 = 0;
5172         tcg_gen_xori_i32(c->v1, fpsr, FPSR_CC_N);
5173         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_N | FPSR_CC_A | FPSR_CC_Z);
5174         c->tcond = TCG_COND_EQ;
5175         break;
5176     case 5:  /* Ordered Less than or Equal Z || (N && !A) */
5177     case 21: /* Less than or Equal Z || (N && !A) */
5178         c->v1 = tcg_temp_new();
5179         c->g1 = 0;
5180         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5181         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5182         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5183         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_Z | FPSR_CC_N);
5184         c->tcond = TCG_COND_NE;
5185         break;
5186     case 6:  /* Ordered Greater or Less than !(A || Z) */
5187     case 22: /* Greater or Less than !(A || Z) */
5188         c->v1 = tcg_temp_new();
5189         c->g1 = 0;
5190         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5191         c->tcond = TCG_COND_EQ;
5192         break;
5193     case 7:  /* Ordered !A */
5194     case 23: /* Greater, Less or Equal !A */
5195         c->v1 = tcg_temp_new();
5196         c->g1 = 0;
5197         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5198         c->tcond = TCG_COND_EQ;
5199         break;
5200     case 8:  /* Unordered A */
5201     case 24: /* Not Greater, Less or Equal A */
5202         c->v1 = tcg_temp_new();
5203         c->g1 = 0;
5204         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5205         c->tcond = TCG_COND_NE;
5206         break;
5207     case 9:  /* Unordered or Equal A || Z */
5208     case 25: /* Not Greater or Less then A || Z */
5209         c->v1 = tcg_temp_new();
5210         c->g1 = 0;
5211         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5212         c->tcond = TCG_COND_NE;
5213         break;
5214     case 10: /* Unordered or Greater Than A || !(N || Z)) */
5215     case 26: /* Not Less or Equal A || !(N || Z)) */
5216         c->v1 = tcg_temp_new();
5217         c->g1 = 0;
5218         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5219         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5220         tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_A | FPSR_CC_N);
5221         tcg_gen_or_i32(c->v1, c->v1, fpsr);
5222         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5223         c->tcond = TCG_COND_NE;
5224         break;
5225     case 11: /* Unordered or Greater or Equal A || Z || !N */
5226     case 27: /* Not Less Than A || Z || !N */
5227         c->v1 = tcg_temp_new();
5228         c->g1 = 0;
5229         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5230         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5231         c->tcond = TCG_COND_NE;
5232         break;
5233     case 12: /* Unordered or Less Than A || (N && !Z) */
5234     case 28: /* Not Greater than or Equal A || (N && !Z) */
5235         c->v1 = tcg_temp_new();
5236         c->g1 = 0;
5237         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5238         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5239         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5240         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_A | FPSR_CC_N);
5241         c->tcond = TCG_COND_NE;
5242         break;
5243     case 13: /* Unordered or Less or Equal A || Z || N */
5244     case 29: /* Not Greater Than A || Z || N */
5245         c->v1 = tcg_temp_new();
5246         c->g1 = 0;
5247         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5248         c->tcond = TCG_COND_NE;
5249         break;
5250     case 14: /* Not Equal !Z */
5251     case 30: /* Signaling Not Equal !Z */
5252         c->v1 = tcg_temp_new();
5253         c->g1 = 0;
5254         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5255         c->tcond = TCG_COND_EQ;
5256         break;
5257     case 15: /* True */
5258     case 31: /* Signaling True */
5259         c->v1 = c->v2;
5260         c->tcond = TCG_COND_ALWAYS;
5261         break;
5262     }
5263     tcg_temp_free(fpsr);
5264 }
5265 
5266 static void gen_fjmpcc(DisasContext *s, int cond, TCGLabel *l1)
5267 {
5268     DisasCompare c;
5269 
5270     gen_fcc_cond(&c, s, cond);
5271     update_cc_op(s);
5272     tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
5273     free_cond(&c);
5274 }
5275 
5276 DISAS_INSN(fbcc)
5277 {
5278     uint32_t offset;
5279     uint32_t base;
5280     TCGLabel *l1;
5281 
5282     base = s->pc;
5283     offset = (int16_t)read_im16(env, s);
5284     if (insn & (1 << 6)) {
5285         offset = (offset << 16) | read_im16(env, s);
5286     }
5287 
5288     l1 = gen_new_label();
5289     update_cc_op(s);
5290     gen_fjmpcc(s, insn & 0x3f, l1);
5291     gen_jmp_tb(s, 0, s->pc);
5292     gen_set_label(l1);
5293     gen_jmp_tb(s, 1, base + offset);
5294 }
5295 
5296 DISAS_INSN(fscc)
5297 {
5298     DisasCompare c;
5299     int cond;
5300     TCGv tmp;
5301     uint16_t ext;
5302 
5303     ext = read_im16(env, s);
5304     cond = ext & 0x3f;
5305     gen_fcc_cond(&c, s, cond);
5306 
5307     tmp = tcg_temp_new();
5308     tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
5309     free_cond(&c);
5310 
5311     tcg_gen_neg_i32(tmp, tmp);
5312     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
5313     tcg_temp_free(tmp);
5314 }
5315 
5316 #if defined(CONFIG_SOFTMMU)
5317 DISAS_INSN(frestore)
5318 {
5319     TCGv addr;
5320 
5321     if (IS_USER(s)) {
5322         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
5323         return;
5324     }
5325     if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5326         SRC_EA(env, addr, OS_LONG, 0, NULL);
5327         /* FIXME: check the state frame */
5328     } else {
5329         disas_undef(env, s, insn);
5330     }
5331 }
5332 
5333 DISAS_INSN(fsave)
5334 {
5335     if (IS_USER(s)) {
5336         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
5337         return;
5338     }
5339 
5340     if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5341         /* always write IDLE */
5342         TCGv idle = tcg_const_i32(0x41000000);
5343         DEST_EA(env, insn, OS_LONG, idle, NULL);
5344         tcg_temp_free(idle);
5345     } else {
5346         disas_undef(env, s, insn);
5347     }
5348 }
5349 #endif
5350 
5351 static inline TCGv gen_mac_extract_word(DisasContext *s, TCGv val, int upper)
5352 {
5353     TCGv tmp = tcg_temp_new();
5354     if (s->env->macsr & MACSR_FI) {
5355         if (upper)
5356             tcg_gen_andi_i32(tmp, val, 0xffff0000);
5357         else
5358             tcg_gen_shli_i32(tmp, val, 16);
5359     } else if (s->env->macsr & MACSR_SU) {
5360         if (upper)
5361             tcg_gen_sari_i32(tmp, val, 16);
5362         else
5363             tcg_gen_ext16s_i32(tmp, val);
5364     } else {
5365         if (upper)
5366             tcg_gen_shri_i32(tmp, val, 16);
5367         else
5368             tcg_gen_ext16u_i32(tmp, val);
5369     }
5370     return tmp;
5371 }
5372 
5373 static void gen_mac_clear_flags(void)
5374 {
5375     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR,
5376                      ~(MACSR_V | MACSR_Z | MACSR_N | MACSR_EV));
5377 }
5378 
5379 DISAS_INSN(mac)
5380 {
5381     TCGv rx;
5382     TCGv ry;
5383     uint16_t ext;
5384     int acc;
5385     TCGv tmp;
5386     TCGv addr;
5387     TCGv loadval;
5388     int dual;
5389     TCGv saved_flags;
5390 
5391     if (!s->done_mac) {
5392         s->mactmp = tcg_temp_new_i64();
5393         s->done_mac = 1;
5394     }
5395 
5396     ext = read_im16(env, s);
5397 
5398     acc = ((insn >> 7) & 1) | ((ext >> 3) & 2);
5399     dual = ((insn & 0x30) != 0 && (ext & 3) != 0);
5400     if (dual && !m68k_feature(s->env, M68K_FEATURE_CF_EMAC_B)) {
5401         disas_undef(env, s, insn);
5402         return;
5403     }
5404     if (insn & 0x30) {
5405         /* MAC with load.  */
5406         tmp = gen_lea(env, s, insn, OS_LONG);
5407         addr = tcg_temp_new();
5408         tcg_gen_and_i32(addr, tmp, QREG_MAC_MASK);
5409         /* Load the value now to ensure correct exception behavior.
5410            Perform writeback after reading the MAC inputs.  */
5411         loadval = gen_load(s, OS_LONG, addr, 0, IS_USER(s));
5412 
5413         acc ^= 1;
5414         rx = (ext & 0x8000) ? AREG(ext, 12) : DREG(insn, 12);
5415         ry = (ext & 8) ? AREG(ext, 0) : DREG(ext, 0);
5416     } else {
5417         loadval = addr = NULL_QREG;
5418         rx = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5419         ry = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5420     }
5421 
5422     gen_mac_clear_flags();
5423 #if 0
5424     l1 = -1;
5425     /* Disabled because conditional branches clobber temporary vars.  */
5426     if ((s->env->macsr & MACSR_OMC) != 0 && !dual) {
5427         /* Skip the multiply if we know we will ignore it.  */
5428         l1 = gen_new_label();
5429         tmp = tcg_temp_new();
5430         tcg_gen_andi_i32(tmp, QREG_MACSR, 1 << (acc + 8));
5431         gen_op_jmp_nz32(tmp, l1);
5432     }
5433 #endif
5434 
5435     if ((ext & 0x0800) == 0) {
5436         /* Word.  */
5437         rx = gen_mac_extract_word(s, rx, (ext & 0x80) != 0);
5438         ry = gen_mac_extract_word(s, ry, (ext & 0x40) != 0);
5439     }
5440     if (s->env->macsr & MACSR_FI) {
5441         gen_helper_macmulf(s->mactmp, cpu_env, rx, ry);
5442     } else {
5443         if (s->env->macsr & MACSR_SU)
5444             gen_helper_macmuls(s->mactmp, cpu_env, rx, ry);
5445         else
5446             gen_helper_macmulu(s->mactmp, cpu_env, rx, ry);
5447         switch ((ext >> 9) & 3) {
5448         case 1:
5449             tcg_gen_shli_i64(s->mactmp, s->mactmp, 1);
5450             break;
5451         case 3:
5452             tcg_gen_shri_i64(s->mactmp, s->mactmp, 1);
5453             break;
5454         }
5455     }
5456 
5457     if (dual) {
5458         /* Save the overflow flag from the multiply.  */
5459         saved_flags = tcg_temp_new();
5460         tcg_gen_mov_i32(saved_flags, QREG_MACSR);
5461     } else {
5462         saved_flags = NULL_QREG;
5463     }
5464 
5465 #if 0
5466     /* Disabled because conditional branches clobber temporary vars.  */
5467     if ((s->env->macsr & MACSR_OMC) != 0 && dual) {
5468         /* Skip the accumulate if the value is already saturated.  */
5469         l1 = gen_new_label();
5470         tmp = tcg_temp_new();
5471         gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
5472         gen_op_jmp_nz32(tmp, l1);
5473     }
5474 #endif
5475 
5476     if (insn & 0x100)
5477         tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5478     else
5479         tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5480 
5481     if (s->env->macsr & MACSR_FI)
5482         gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
5483     else if (s->env->macsr & MACSR_SU)
5484         gen_helper_macsats(cpu_env, tcg_const_i32(acc));
5485     else
5486         gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
5487 
5488 #if 0
5489     /* Disabled because conditional branches clobber temporary vars.  */
5490     if (l1 != -1)
5491         gen_set_label(l1);
5492 #endif
5493 
5494     if (dual) {
5495         /* Dual accumulate variant.  */
5496         acc = (ext >> 2) & 3;
5497         /* Restore the overflow flag from the multiplier.  */
5498         tcg_gen_mov_i32(QREG_MACSR, saved_flags);
5499 #if 0
5500         /* Disabled because conditional branches clobber temporary vars.  */
5501         if ((s->env->macsr & MACSR_OMC) != 0) {
5502             /* Skip the accumulate if the value is already saturated.  */
5503             l1 = gen_new_label();
5504             tmp = tcg_temp_new();
5505             gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
5506             gen_op_jmp_nz32(tmp, l1);
5507         }
5508 #endif
5509         if (ext & 2)
5510             tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5511         else
5512             tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5513         if (s->env->macsr & MACSR_FI)
5514             gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
5515         else if (s->env->macsr & MACSR_SU)
5516             gen_helper_macsats(cpu_env, tcg_const_i32(acc));
5517         else
5518             gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
5519 #if 0
5520         /* Disabled because conditional branches clobber temporary vars.  */
5521         if (l1 != -1)
5522             gen_set_label(l1);
5523 #endif
5524     }
5525     gen_helper_mac_set_flags(cpu_env, tcg_const_i32(acc));
5526 
5527     if (insn & 0x30) {
5528         TCGv rw;
5529         rw = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5530         tcg_gen_mov_i32(rw, loadval);
5531         /* FIXME: Should address writeback happen with the masked or
5532            unmasked value?  */
5533         switch ((insn >> 3) & 7) {
5534         case 3: /* Post-increment.  */
5535             tcg_gen_addi_i32(AREG(insn, 0), addr, 4);
5536             break;
5537         case 4: /* Pre-decrement.  */
5538             tcg_gen_mov_i32(AREG(insn, 0), addr);
5539         }
5540     }
5541 }
5542 
5543 DISAS_INSN(from_mac)
5544 {
5545     TCGv rx;
5546     TCGv_i64 acc;
5547     int accnum;
5548 
5549     rx = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5550     accnum = (insn >> 9) & 3;
5551     acc = MACREG(accnum);
5552     if (s->env->macsr & MACSR_FI) {
5553         gen_helper_get_macf(rx, cpu_env, acc);
5554     } else if ((s->env->macsr & MACSR_OMC) == 0) {
5555         tcg_gen_extrl_i64_i32(rx, acc);
5556     } else if (s->env->macsr & MACSR_SU) {
5557         gen_helper_get_macs(rx, acc);
5558     } else {
5559         gen_helper_get_macu(rx, acc);
5560     }
5561     if (insn & 0x40) {
5562         tcg_gen_movi_i64(acc, 0);
5563         tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5564     }
5565 }
5566 
5567 DISAS_INSN(move_mac)
5568 {
5569     /* FIXME: This can be done without a helper.  */
5570     int src;
5571     TCGv dest;
5572     src = insn & 3;
5573     dest = tcg_const_i32((insn >> 9) & 3);
5574     gen_helper_mac_move(cpu_env, dest, tcg_const_i32(src));
5575     gen_mac_clear_flags();
5576     gen_helper_mac_set_flags(cpu_env, dest);
5577 }
5578 
5579 DISAS_INSN(from_macsr)
5580 {
5581     TCGv reg;
5582 
5583     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5584     tcg_gen_mov_i32(reg, QREG_MACSR);
5585 }
5586 
5587 DISAS_INSN(from_mask)
5588 {
5589     TCGv reg;
5590     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5591     tcg_gen_mov_i32(reg, QREG_MAC_MASK);
5592 }
5593 
5594 DISAS_INSN(from_mext)
5595 {
5596     TCGv reg;
5597     TCGv acc;
5598     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5599     acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
5600     if (s->env->macsr & MACSR_FI)
5601         gen_helper_get_mac_extf(reg, cpu_env, acc);
5602     else
5603         gen_helper_get_mac_exti(reg, cpu_env, acc);
5604 }
5605 
5606 DISAS_INSN(macsr_to_ccr)
5607 {
5608     TCGv tmp = tcg_temp_new();
5609     tcg_gen_andi_i32(tmp, QREG_MACSR, 0xf);
5610     gen_helper_set_sr(cpu_env, tmp);
5611     tcg_temp_free(tmp);
5612     set_cc_op(s, CC_OP_FLAGS);
5613 }
5614 
5615 DISAS_INSN(to_mac)
5616 {
5617     TCGv_i64 acc;
5618     TCGv val;
5619     int accnum;
5620     accnum = (insn >> 9) & 3;
5621     acc = MACREG(accnum);
5622     SRC_EA(env, val, OS_LONG, 0, NULL);
5623     if (s->env->macsr & MACSR_FI) {
5624         tcg_gen_ext_i32_i64(acc, val);
5625         tcg_gen_shli_i64(acc, acc, 8);
5626     } else if (s->env->macsr & MACSR_SU) {
5627         tcg_gen_ext_i32_i64(acc, val);
5628     } else {
5629         tcg_gen_extu_i32_i64(acc, val);
5630     }
5631     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5632     gen_mac_clear_flags();
5633     gen_helper_mac_set_flags(cpu_env, tcg_const_i32(accnum));
5634 }
5635 
5636 DISAS_INSN(to_macsr)
5637 {
5638     TCGv val;
5639     SRC_EA(env, val, OS_LONG, 0, NULL);
5640     gen_helper_set_macsr(cpu_env, val);
5641     gen_lookup_tb(s);
5642 }
5643 
5644 DISAS_INSN(to_mask)
5645 {
5646     TCGv val;
5647     SRC_EA(env, val, OS_LONG, 0, NULL);
5648     tcg_gen_ori_i32(QREG_MAC_MASK, val, 0xffff0000);
5649 }
5650 
5651 DISAS_INSN(to_mext)
5652 {
5653     TCGv val;
5654     TCGv acc;
5655     SRC_EA(env, val, OS_LONG, 0, NULL);
5656     acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
5657     if (s->env->macsr & MACSR_FI)
5658         gen_helper_set_mac_extf(cpu_env, val, acc);
5659     else if (s->env->macsr & MACSR_SU)
5660         gen_helper_set_mac_exts(cpu_env, val, acc);
5661     else
5662         gen_helper_set_mac_extu(cpu_env, val, acc);
5663 }
5664 
5665 static disas_proc opcode_table[65536];
5666 
5667 static void
5668 register_opcode (disas_proc proc, uint16_t opcode, uint16_t mask)
5669 {
5670   int i;
5671   int from;
5672   int to;
5673 
5674   /* Sanity check.  All set bits must be included in the mask.  */
5675   if (opcode & ~mask) {
5676       fprintf(stderr,
5677               "qemu internal error: bogus opcode definition %04x/%04x\n",
5678               opcode, mask);
5679       abort();
5680   }
5681   /* This could probably be cleverer.  For now just optimize the case where
5682      the top bits are known.  */
5683   /* Find the first zero bit in the mask.  */
5684   i = 0x8000;
5685   while ((i & mask) != 0)
5686       i >>= 1;
5687   /* Iterate over all combinations of this and lower bits.  */
5688   if (i == 0)
5689       i = 1;
5690   else
5691       i <<= 1;
5692   from = opcode & ~(i - 1);
5693   to = from + i;
5694   for (i = from; i < to; i++) {
5695       if ((i & mask) == opcode)
5696           opcode_table[i] = proc;
5697   }
5698 }
5699 
5700 /* Register m68k opcode handlers.  Order is important.
5701    Later insn override earlier ones.  */
5702 void register_m68k_insns (CPUM68KState *env)
5703 {
5704     /* Build the opcode table only once to avoid
5705        multithreading issues. */
5706     if (opcode_table[0] != NULL) {
5707         return;
5708     }
5709 
5710     /* use BASE() for instruction available
5711      * for CF_ISA_A and M68000.
5712      */
5713 #define BASE(name, opcode, mask) \
5714     register_opcode(disas_##name, 0x##opcode, 0x##mask)
5715 #define INSN(name, opcode, mask, feature) do { \
5716     if (m68k_feature(env, M68K_FEATURE_##feature)) \
5717         BASE(name, opcode, mask); \
5718     } while(0)
5719     BASE(undef,     0000, 0000);
5720     INSN(arith_im,  0080, fff8, CF_ISA_A);
5721     INSN(arith_im,  0000, ff00, M68000);
5722     INSN(chk2,      00c0, f9c0, CHK2);
5723     INSN(bitrev,    00c0, fff8, CF_ISA_APLUSC);
5724     BASE(bitop_reg, 0100, f1c0);
5725     BASE(bitop_reg, 0140, f1c0);
5726     BASE(bitop_reg, 0180, f1c0);
5727     BASE(bitop_reg, 01c0, f1c0);
5728     INSN(movep,     0108, f138, MOVEP);
5729     INSN(arith_im,  0280, fff8, CF_ISA_A);
5730     INSN(arith_im,  0200, ff00, M68000);
5731     INSN(undef,     02c0, ffc0, M68000);
5732     INSN(byterev,   02c0, fff8, CF_ISA_APLUSC);
5733     INSN(arith_im,  0480, fff8, CF_ISA_A);
5734     INSN(arith_im,  0400, ff00, M68000);
5735     INSN(undef,     04c0, ffc0, M68000);
5736     INSN(arith_im,  0600, ff00, M68000);
5737     INSN(undef,     06c0, ffc0, M68000);
5738     INSN(ff1,       04c0, fff8, CF_ISA_APLUSC);
5739     INSN(arith_im,  0680, fff8, CF_ISA_A);
5740     INSN(arith_im,  0c00, ff38, CF_ISA_A);
5741     INSN(arith_im,  0c00, ff00, M68000);
5742     BASE(bitop_im,  0800, ffc0);
5743     BASE(bitop_im,  0840, ffc0);
5744     BASE(bitop_im,  0880, ffc0);
5745     BASE(bitop_im,  08c0, ffc0);
5746     INSN(arith_im,  0a80, fff8, CF_ISA_A);
5747     INSN(arith_im,  0a00, ff00, M68000);
5748 #if defined(CONFIG_SOFTMMU)
5749     INSN(moves,     0e00, ff00, M68000);
5750 #endif
5751     INSN(cas,       0ac0, ffc0, CAS);
5752     INSN(cas,       0cc0, ffc0, CAS);
5753     INSN(cas,       0ec0, ffc0, CAS);
5754     INSN(cas2w,     0cfc, ffff, CAS);
5755     INSN(cas2l,     0efc, ffff, CAS);
5756     BASE(move,      1000, f000);
5757     BASE(move,      2000, f000);
5758     BASE(move,      3000, f000);
5759     INSN(chk,       4000, f040, M68000);
5760     INSN(strldsr,   40e7, ffff, CF_ISA_APLUSC);
5761     INSN(negx,      4080, fff8, CF_ISA_A);
5762     INSN(negx,      4000, ff00, M68000);
5763     INSN(undef,     40c0, ffc0, M68000);
5764     INSN(move_from_sr, 40c0, fff8, CF_ISA_A);
5765     INSN(move_from_sr, 40c0, ffc0, M68000);
5766     BASE(lea,       41c0, f1c0);
5767     BASE(clr,       4200, ff00);
5768     BASE(undef,     42c0, ffc0);
5769     INSN(move_from_ccr, 42c0, fff8, CF_ISA_A);
5770     INSN(move_from_ccr, 42c0, ffc0, M68000);
5771     INSN(neg,       4480, fff8, CF_ISA_A);
5772     INSN(neg,       4400, ff00, M68000);
5773     INSN(undef,     44c0, ffc0, M68000);
5774     BASE(move_to_ccr, 44c0, ffc0);
5775     INSN(not,       4680, fff8, CF_ISA_A);
5776     INSN(not,       4600, ff00, M68000);
5777 #if defined(CONFIG_SOFTMMU)
5778     BASE(move_to_sr, 46c0, ffc0);
5779 #endif
5780     INSN(nbcd,      4800, ffc0, M68000);
5781     INSN(linkl,     4808, fff8, M68000);
5782     BASE(pea,       4840, ffc0);
5783     BASE(swap,      4840, fff8);
5784     INSN(bkpt,      4848, fff8, BKPT);
5785     INSN(movem,     48d0, fbf8, CF_ISA_A);
5786     INSN(movem,     48e8, fbf8, CF_ISA_A);
5787     INSN(movem,     4880, fb80, M68000);
5788     BASE(ext,       4880, fff8);
5789     BASE(ext,       48c0, fff8);
5790     BASE(ext,       49c0, fff8);
5791     BASE(tst,       4a00, ff00);
5792     INSN(tas,       4ac0, ffc0, CF_ISA_B);
5793     INSN(tas,       4ac0, ffc0, M68000);
5794 #if defined(CONFIG_SOFTMMU)
5795     INSN(halt,      4ac8, ffff, CF_ISA_A);
5796 #endif
5797     INSN(pulse,     4acc, ffff, CF_ISA_A);
5798     BASE(illegal,   4afc, ffff);
5799     INSN(mull,      4c00, ffc0, CF_ISA_A);
5800     INSN(mull,      4c00, ffc0, LONG_MULDIV);
5801     INSN(divl,      4c40, ffc0, CF_ISA_A);
5802     INSN(divl,      4c40, ffc0, LONG_MULDIV);
5803     INSN(sats,      4c80, fff8, CF_ISA_B);
5804     BASE(trap,      4e40, fff0);
5805     BASE(link,      4e50, fff8);
5806     BASE(unlk,      4e58, fff8);
5807 #if defined(CONFIG_SOFTMMU)
5808     INSN(move_to_usp, 4e60, fff8, USP);
5809     INSN(move_from_usp, 4e68, fff8, USP);
5810     INSN(reset,     4e70, ffff, M68000);
5811     BASE(stop,      4e72, ffff);
5812     BASE(rte,       4e73, ffff);
5813     INSN(cf_movec,  4e7b, ffff, CF_ISA_A);
5814     INSN(m68k_movec, 4e7a, fffe, M68000);
5815 #endif
5816     BASE(nop,       4e71, ffff);
5817     INSN(rtd,       4e74, ffff, RTD);
5818     BASE(rts,       4e75, ffff);
5819     BASE(jump,      4e80, ffc0);
5820     BASE(jump,      4ec0, ffc0);
5821     INSN(addsubq,   5000, f080, M68000);
5822     BASE(addsubq,   5080, f0c0);
5823     INSN(scc,       50c0, f0f8, CF_ISA_A); /* Scc.B Dx   */
5824     INSN(scc,       50c0, f0c0, M68000);   /* Scc.B <EA> */
5825     INSN(dbcc,      50c8, f0f8, M68000);
5826     INSN(tpf,       51f8, fff8, CF_ISA_A);
5827 
5828     /* Branch instructions.  */
5829     BASE(branch,    6000, f000);
5830     /* Disable long branch instructions, then add back the ones we want.  */
5831     BASE(undef,     60ff, f0ff); /* All long branches.  */
5832     INSN(branch,    60ff, f0ff, CF_ISA_B);
5833     INSN(undef,     60ff, ffff, CF_ISA_B); /* bra.l */
5834     INSN(branch,    60ff, ffff, BRAL);
5835     INSN(branch,    60ff, f0ff, BCCL);
5836 
5837     BASE(moveq,     7000, f100);
5838     INSN(mvzs,      7100, f100, CF_ISA_B);
5839     BASE(or,        8000, f000);
5840     BASE(divw,      80c0, f0c0);
5841     INSN(sbcd_reg,  8100, f1f8, M68000);
5842     INSN(sbcd_mem,  8108, f1f8, M68000);
5843     BASE(addsub,    9000, f000);
5844     INSN(undef,     90c0, f0c0, CF_ISA_A);
5845     INSN(subx_reg,  9180, f1f8, CF_ISA_A);
5846     INSN(subx_reg,  9100, f138, M68000);
5847     INSN(subx_mem,  9108, f138, M68000);
5848     INSN(suba,      91c0, f1c0, CF_ISA_A);
5849     INSN(suba,      90c0, f0c0, M68000);
5850 
5851     BASE(undef_mac, a000, f000);
5852     INSN(mac,       a000, f100, CF_EMAC);
5853     INSN(from_mac,  a180, f9b0, CF_EMAC);
5854     INSN(move_mac,  a110, f9fc, CF_EMAC);
5855     INSN(from_macsr,a980, f9f0, CF_EMAC);
5856     INSN(from_mask, ad80, fff0, CF_EMAC);
5857     INSN(from_mext, ab80, fbf0, CF_EMAC);
5858     INSN(macsr_to_ccr, a9c0, ffff, CF_EMAC);
5859     INSN(to_mac,    a100, f9c0, CF_EMAC);
5860     INSN(to_macsr,  a900, ffc0, CF_EMAC);
5861     INSN(to_mext,   ab00, fbc0, CF_EMAC);
5862     INSN(to_mask,   ad00, ffc0, CF_EMAC);
5863 
5864     INSN(mov3q,     a140, f1c0, CF_ISA_B);
5865     INSN(cmp,       b000, f1c0, CF_ISA_B); /* cmp.b */
5866     INSN(cmp,       b040, f1c0, CF_ISA_B); /* cmp.w */
5867     INSN(cmpa,      b0c0, f1c0, CF_ISA_B); /* cmpa.w */
5868     INSN(cmp,       b080, f1c0, CF_ISA_A);
5869     INSN(cmpa,      b1c0, f1c0, CF_ISA_A);
5870     INSN(cmp,       b000, f100, M68000);
5871     INSN(eor,       b100, f100, M68000);
5872     INSN(cmpm,      b108, f138, M68000);
5873     INSN(cmpa,      b0c0, f0c0, M68000);
5874     INSN(eor,       b180, f1c0, CF_ISA_A);
5875     BASE(and,       c000, f000);
5876     INSN(exg_dd,    c140, f1f8, M68000);
5877     INSN(exg_aa,    c148, f1f8, M68000);
5878     INSN(exg_da,    c188, f1f8, M68000);
5879     BASE(mulw,      c0c0, f0c0);
5880     INSN(abcd_reg,  c100, f1f8, M68000);
5881     INSN(abcd_mem,  c108, f1f8, M68000);
5882     BASE(addsub,    d000, f000);
5883     INSN(undef,     d0c0, f0c0, CF_ISA_A);
5884     INSN(addx_reg,      d180, f1f8, CF_ISA_A);
5885     INSN(addx_reg,  d100, f138, M68000);
5886     INSN(addx_mem,  d108, f138, M68000);
5887     INSN(adda,      d1c0, f1c0, CF_ISA_A);
5888     INSN(adda,      d0c0, f0c0, M68000);
5889     INSN(shift_im,  e080, f0f0, CF_ISA_A);
5890     INSN(shift_reg, e0a0, f0f0, CF_ISA_A);
5891     INSN(shift8_im, e000, f0f0, M68000);
5892     INSN(shift16_im, e040, f0f0, M68000);
5893     INSN(shift_im,  e080, f0f0, M68000);
5894     INSN(shift8_reg, e020, f0f0, M68000);
5895     INSN(shift16_reg, e060, f0f0, M68000);
5896     INSN(shift_reg, e0a0, f0f0, M68000);
5897     INSN(shift_mem, e0c0, fcc0, M68000);
5898     INSN(rotate_im, e090, f0f0, M68000);
5899     INSN(rotate8_im, e010, f0f0, M68000);
5900     INSN(rotate16_im, e050, f0f0, M68000);
5901     INSN(rotate_reg, e0b0, f0f0, M68000);
5902     INSN(rotate8_reg, e030, f0f0, M68000);
5903     INSN(rotate16_reg, e070, f0f0, M68000);
5904     INSN(rotate_mem, e4c0, fcc0, M68000);
5905     INSN(bfext_mem, e9c0, fdc0, BITFIELD);  /* bfextu & bfexts */
5906     INSN(bfext_reg, e9c0, fdf8, BITFIELD);
5907     INSN(bfins_mem, efc0, ffc0, BITFIELD);
5908     INSN(bfins_reg, efc0, fff8, BITFIELD);
5909     INSN(bfop_mem, eac0, ffc0, BITFIELD);   /* bfchg */
5910     INSN(bfop_reg, eac0, fff8, BITFIELD);   /* bfchg */
5911     INSN(bfop_mem, ecc0, ffc0, BITFIELD);   /* bfclr */
5912     INSN(bfop_reg, ecc0, fff8, BITFIELD);   /* bfclr */
5913     INSN(bfop_mem, edc0, ffc0, BITFIELD);   /* bfffo */
5914     INSN(bfop_reg, edc0, fff8, BITFIELD);   /* bfffo */
5915     INSN(bfop_mem, eec0, ffc0, BITFIELD);   /* bfset */
5916     INSN(bfop_reg, eec0, fff8, BITFIELD);   /* bfset */
5917     INSN(bfop_mem, e8c0, ffc0, BITFIELD);   /* bftst */
5918     INSN(bfop_reg, e8c0, fff8, BITFIELD);   /* bftst */
5919     BASE(undef_fpu, f000, f000);
5920     INSN(fpu,       f200, ffc0, CF_FPU);
5921     INSN(fbcc,      f280, ffc0, CF_FPU);
5922     INSN(fpu,       f200, ffc0, FPU);
5923     INSN(fscc,      f240, ffc0, FPU);
5924     INSN(fbcc,      f280, ff80, FPU);
5925 #if defined(CONFIG_SOFTMMU)
5926     INSN(frestore,  f340, ffc0, CF_FPU);
5927     INSN(fsave,     f300, ffc0, CF_FPU);
5928     INSN(frestore,  f340, ffc0, FPU);
5929     INSN(fsave,     f300, ffc0, FPU);
5930     INSN(intouch,   f340, ffc0, CF_ISA_A);
5931     INSN(cpushl,    f428, ff38, CF_ISA_A);
5932     INSN(cpush,     f420, ff20, M68040);
5933     INSN(cinv,      f400, ff20, M68040);
5934     INSN(pflush,    f500, ffe0, M68040);
5935     INSN(ptest,     f548, ffd8, M68040);
5936     INSN(wddata,    fb00, ff00, CF_ISA_A);
5937     INSN(wdebug,    fbc0, ffc0, CF_ISA_A);
5938 #endif
5939     INSN(move16_mem, f600, ffe0, M68040);
5940     INSN(move16_reg, f620, fff8, M68040);
5941 #undef INSN
5942 }
5943 
5944 /* ??? Some of this implementation is not exception safe.  We should always
5945    write back the result to memory before setting the condition codes.  */
5946 static void disas_m68k_insn(CPUM68KState * env, DisasContext *s)
5947 {
5948     uint16_t insn = read_im16(env, s);
5949     opcode_table[insn](env, s, insn);
5950     do_writebacks(s);
5951 }
5952 
5953 /* generate intermediate code for basic block 'tb'.  */
5954 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb)
5955 {
5956     CPUM68KState *env = cs->env_ptr;
5957     DisasContext dc1, *dc = &dc1;
5958     target_ulong pc_start;
5959     int pc_offset;
5960     int num_insns;
5961     int max_insns;
5962 
5963     /* generate intermediate code */
5964     pc_start = tb->pc;
5965 
5966     dc->tb = tb;
5967 
5968     dc->env = env;
5969     dc->is_jmp = DISAS_NEXT;
5970     dc->pc = pc_start;
5971     dc->cc_op = CC_OP_DYNAMIC;
5972     dc->cc_op_synced = 1;
5973     dc->singlestep_enabled = cs->singlestep_enabled;
5974     dc->done_mac = 0;
5975     dc->writeback_mask = 0;
5976     num_insns = 0;
5977     max_insns = tb_cflags(tb) & CF_COUNT_MASK;
5978     if (max_insns == 0) {
5979         max_insns = CF_COUNT_MASK;
5980     }
5981     if (max_insns > TCG_MAX_INSNS) {
5982         max_insns = TCG_MAX_INSNS;
5983     }
5984 
5985     gen_tb_start(tb);
5986     do {
5987         pc_offset = dc->pc - pc_start;
5988         tcg_gen_insn_start(dc->pc, dc->cc_op);
5989         num_insns++;
5990 
5991         if (unlikely(cpu_breakpoint_test(cs, dc->pc, BP_ANY))) {
5992             gen_exception(dc, dc->pc, EXCP_DEBUG);
5993             dc->is_jmp = DISAS_JUMP;
5994             /* The address covered by the breakpoint must be included in
5995                [tb->pc, tb->pc + tb->size) in order to for it to be
5996                properly cleared -- thus we increment the PC here so that
5997                the logic setting tb->size below does the right thing.  */
5998             dc->pc += 2;
5999             break;
6000         }
6001 
6002         if (num_insns == max_insns && (tb_cflags(tb) & CF_LAST_IO)) {
6003             gen_io_start();
6004         }
6005 
6006         dc->insn_pc = dc->pc;
6007 	disas_m68k_insn(env, dc);
6008     } while (!dc->is_jmp && !tcg_op_buf_full() &&
6009              !cs->singlestep_enabled &&
6010              !singlestep &&
6011              (pc_offset) < (TARGET_PAGE_SIZE - 32) &&
6012              num_insns < max_insns);
6013 
6014     if (tb_cflags(tb) & CF_LAST_IO)
6015         gen_io_end();
6016     if (unlikely(cs->singlestep_enabled)) {
6017         /* Make sure the pc is updated, and raise a debug exception.  */
6018         if (!dc->is_jmp) {
6019             update_cc_op(dc);
6020             tcg_gen_movi_i32(QREG_PC, dc->pc);
6021         }
6022         gen_helper_raise_exception(cpu_env, tcg_const_i32(EXCP_DEBUG));
6023     } else {
6024         switch(dc->is_jmp) {
6025         case DISAS_NEXT:
6026             update_cc_op(dc);
6027             gen_jmp_tb(dc, 0, dc->pc);
6028             break;
6029         default:
6030         case DISAS_JUMP:
6031         case DISAS_UPDATE:
6032             update_cc_op(dc);
6033             /* indicate that the hash table must be used to find the next TB */
6034             tcg_gen_exit_tb(0);
6035             break;
6036         case DISAS_TB_JUMP:
6037             /* nothing more to generate */
6038             break;
6039         }
6040     }
6041     gen_tb_end(tb, num_insns);
6042 
6043 #ifdef DEBUG_DISAS
6044     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)
6045         && qemu_log_in_addr_range(pc_start)) {
6046         qemu_log_lock();
6047         qemu_log("----------------\n");
6048         qemu_log("IN: %s\n", lookup_symbol(pc_start));
6049         log_target_disas(cs, pc_start, dc->pc - pc_start);
6050         qemu_log("\n");
6051         qemu_log_unlock();
6052     }
6053 #endif
6054     tb->size = dc->pc - pc_start;
6055     tb->icount = num_insns;
6056 }
6057 
6058 static double floatx80_to_double(CPUM68KState *env, uint16_t high, uint64_t low)
6059 {
6060     floatx80 a = { .high = high, .low = low };
6061     union {
6062         float64 f64;
6063         double d;
6064     } u;
6065 
6066     u.f64 = floatx80_to_float64(a, &env->fp_status);
6067     return u.d;
6068 }
6069 
6070 void m68k_cpu_dump_state(CPUState *cs, FILE *f, fprintf_function cpu_fprintf,
6071                          int flags)
6072 {
6073     M68kCPU *cpu = M68K_CPU(cs);
6074     CPUM68KState *env = &cpu->env;
6075     int i;
6076     uint16_t sr;
6077     for (i = 0; i < 8; i++) {
6078         cpu_fprintf(f, "D%d = %08x   A%d = %08x   "
6079                     "F%d = %04x %016"PRIx64"  (%12g)\n",
6080                     i, env->dregs[i], i, env->aregs[i],
6081                     i, env->fregs[i].l.upper, env->fregs[i].l.lower,
6082                     floatx80_to_double(env, env->fregs[i].l.upper,
6083                                        env->fregs[i].l.lower));
6084     }
6085     cpu_fprintf (f, "PC = %08x   ", env->pc);
6086     sr = env->sr | cpu_m68k_get_ccr(env);
6087     cpu_fprintf(f, "SR = %04x T:%x I:%x %c%c %c%c%c%c%c\n",
6088                 sr, (sr & SR_T) >> SR_T_SHIFT, (sr & SR_I) >> SR_I_SHIFT,
6089                 (sr & SR_S) ? 'S' : 'U', (sr & SR_M) ? '%' : 'I',
6090                 (sr & CCF_X) ? 'X' : '-', (sr & CCF_N) ? 'N' : '-',
6091                 (sr & CCF_Z) ? 'Z' : '-', (sr & CCF_V) ? 'V' : '-',
6092                 (sr & CCF_C) ? 'C' : '-');
6093     cpu_fprintf(f, "FPSR = %08x %c%c%c%c ", env->fpsr,
6094                 (env->fpsr & FPSR_CC_A) ? 'A' : '-',
6095                 (env->fpsr & FPSR_CC_I) ? 'I' : '-',
6096                 (env->fpsr & FPSR_CC_Z) ? 'Z' : '-',
6097                 (env->fpsr & FPSR_CC_N) ? 'N' : '-');
6098     cpu_fprintf(f, "\n                                "
6099                    "FPCR =     %04x ", env->fpcr);
6100     switch (env->fpcr & FPCR_PREC_MASK) {
6101     case FPCR_PREC_X:
6102         cpu_fprintf(f, "X ");
6103         break;
6104     case FPCR_PREC_S:
6105         cpu_fprintf(f, "S ");
6106         break;
6107     case FPCR_PREC_D:
6108         cpu_fprintf(f, "D ");
6109         break;
6110     }
6111     switch (env->fpcr & FPCR_RND_MASK) {
6112     case FPCR_RND_N:
6113         cpu_fprintf(f, "RN ");
6114         break;
6115     case FPCR_RND_Z:
6116         cpu_fprintf(f, "RZ ");
6117         break;
6118     case FPCR_RND_M:
6119         cpu_fprintf(f, "RM ");
6120         break;
6121     case FPCR_RND_P:
6122         cpu_fprintf(f, "RP ");
6123         break;
6124     }
6125     cpu_fprintf(f, "\n");
6126 #ifdef CONFIG_SOFTMMU
6127     cpu_fprintf(f, "%sA7(MSP) = %08x %sA7(USP) = %08x %sA7(ISP) = %08x\n",
6128                env->current_sp == M68K_SSP ? "->" : "  ", env->sp[M68K_SSP],
6129                env->current_sp == M68K_USP ? "->" : "  ", env->sp[M68K_USP],
6130                env->current_sp == M68K_ISP ? "->" : "  ", env->sp[M68K_ISP]);
6131     cpu_fprintf(f, "VBR = 0x%08x\n", env->vbr);
6132     cpu_fprintf(f, "SFC = %x DFC %x\n", env->sfc, env->dfc);
6133     cpu_fprintf(f, "SSW %08x TCR %08x URP %08x SRP %08x\n",
6134                 env->mmu.ssw, env->mmu.tcr, env->mmu.urp, env->mmu.srp);
6135     cpu_fprintf(f, "DTTR0/1: %08x/%08x ITTR0/1: %08x/%08x\n",
6136                 env->mmu.ttr[M68K_DTTR0], env->mmu.ttr[M68K_DTTR1],
6137                 env->mmu.ttr[M68K_ITTR0], env->mmu.ttr[M68K_ITTR1]);
6138     cpu_fprintf(f, "MMUSR %08x, fault at %08x\n",
6139                 env->mmu.mmusr, env->mmu.ar);
6140 #endif
6141 }
6142 
6143 void restore_state_to_opc(CPUM68KState *env, TranslationBlock *tb,
6144                           target_ulong *data)
6145 {
6146     int cc_op = data[1];
6147     env->pc = data[0];
6148     if (cc_op != CC_OP_DYNAMIC) {
6149         env->cc_op = cc_op;
6150     }
6151 }
6152