xref: /openbmc/qemu/target/m68k/translate.c (revision dc5bd18f)
1 /*
2  *  m68k translation
3  *
4  *  Copyright (c) 2005-2007 CodeSourcery
5  *  Written by Paul Brook
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "disas/disas.h"
24 #include "exec/exec-all.h"
25 #include "tcg-op.h"
26 #include "qemu/log.h"
27 #include "exec/cpu_ldst.h"
28 #include "exec/translator.h"
29 
30 #include "exec/helper-proto.h"
31 #include "exec/helper-gen.h"
32 
33 #include "trace-tcg.h"
34 #include "exec/log.h"
35 #include "fpu/softfloat.h"
36 
37 
38 //#define DEBUG_DISPATCH 1
39 
40 #define DEFO32(name, offset) static TCGv QREG_##name;
41 #define DEFO64(name, offset) static TCGv_i64 QREG_##name;
42 #include "qregs.def"
43 #undef DEFO32
44 #undef DEFO64
45 
46 static TCGv_i32 cpu_halted;
47 static TCGv_i32 cpu_exception_index;
48 
49 static char cpu_reg_names[2 * 8 * 3 + 5 * 4];
50 static TCGv cpu_dregs[8];
51 static TCGv cpu_aregs[8];
52 static TCGv_i64 cpu_macc[4];
53 
54 #define REG(insn, pos)  (((insn) >> (pos)) & 7)
55 #define DREG(insn, pos) cpu_dregs[REG(insn, pos)]
56 #define AREG(insn, pos) get_areg(s, REG(insn, pos))
57 #define MACREG(acc)     cpu_macc[acc]
58 #define QREG_SP         get_areg(s, 7)
59 
60 static TCGv NULL_QREG;
61 #define IS_NULL_QREG(t) (t == NULL_QREG)
62 /* Used to distinguish stores from bad addressing modes.  */
63 static TCGv store_dummy;
64 
65 #include "exec/gen-icount.h"
66 
67 void m68k_tcg_init(void)
68 {
69     char *p;
70     int i;
71 
72 #define DEFO32(name, offset) \
73     QREG_##name = tcg_global_mem_new_i32(cpu_env, \
74         offsetof(CPUM68KState, offset), #name);
75 #define DEFO64(name, offset) \
76     QREG_##name = tcg_global_mem_new_i64(cpu_env, \
77         offsetof(CPUM68KState, offset), #name);
78 #include "qregs.def"
79 #undef DEFO32
80 #undef DEFO64
81 
82     cpu_halted = tcg_global_mem_new_i32(cpu_env,
83                                         -offsetof(M68kCPU, env) +
84                                         offsetof(CPUState, halted), "HALTED");
85     cpu_exception_index = tcg_global_mem_new_i32(cpu_env,
86                                                  -offsetof(M68kCPU, env) +
87                                                  offsetof(CPUState, exception_index),
88                                                  "EXCEPTION");
89 
90     p = cpu_reg_names;
91     for (i = 0; i < 8; i++) {
92         sprintf(p, "D%d", i);
93         cpu_dregs[i] = tcg_global_mem_new(cpu_env,
94                                           offsetof(CPUM68KState, dregs[i]), p);
95         p += 3;
96         sprintf(p, "A%d", i);
97         cpu_aregs[i] = tcg_global_mem_new(cpu_env,
98                                           offsetof(CPUM68KState, aregs[i]), p);
99         p += 3;
100     }
101     for (i = 0; i < 4; i++) {
102         sprintf(p, "ACC%d", i);
103         cpu_macc[i] = tcg_global_mem_new_i64(cpu_env,
104                                          offsetof(CPUM68KState, macc[i]), p);
105         p += 5;
106     }
107 
108     NULL_QREG = tcg_global_mem_new(cpu_env, -4, "NULL");
109     store_dummy = tcg_global_mem_new(cpu_env, -8, "NULL");
110 }
111 
112 /* internal defines */
113 typedef struct DisasContext {
114     CPUM68KState *env;
115     target_ulong insn_pc; /* Start of the current instruction.  */
116     target_ulong pc;
117     int is_jmp;
118     CCOp cc_op; /* Current CC operation */
119     int cc_op_synced;
120     struct TranslationBlock *tb;
121     int singlestep_enabled;
122     TCGv_i64 mactmp;
123     int done_mac;
124     int writeback_mask;
125     TCGv writeback[8];
126 } DisasContext;
127 
128 static TCGv get_areg(DisasContext *s, unsigned regno)
129 {
130     if (s->writeback_mask & (1 << regno)) {
131         return s->writeback[regno];
132     } else {
133         return cpu_aregs[regno];
134     }
135 }
136 
137 static void delay_set_areg(DisasContext *s, unsigned regno,
138                            TCGv val, bool give_temp)
139 {
140     if (s->writeback_mask & (1 << regno)) {
141         if (give_temp) {
142             tcg_temp_free(s->writeback[regno]);
143             s->writeback[regno] = val;
144         } else {
145             tcg_gen_mov_i32(s->writeback[regno], val);
146         }
147     } else {
148         s->writeback_mask |= 1 << regno;
149         if (give_temp) {
150             s->writeback[regno] = val;
151         } else {
152             TCGv tmp = tcg_temp_new();
153             s->writeback[regno] = tmp;
154             tcg_gen_mov_i32(tmp, val);
155         }
156     }
157 }
158 
159 static void do_writebacks(DisasContext *s)
160 {
161     unsigned mask = s->writeback_mask;
162     if (mask) {
163         s->writeback_mask = 0;
164         do {
165             unsigned regno = ctz32(mask);
166             tcg_gen_mov_i32(cpu_aregs[regno], s->writeback[regno]);
167             tcg_temp_free(s->writeback[regno]);
168             mask &= mask - 1;
169         } while (mask);
170     }
171 }
172 
173 /* is_jmp field values */
174 #define DISAS_JUMP      DISAS_TARGET_0 /* only pc was modified dynamically */
175 #define DISAS_UPDATE    DISAS_TARGET_1 /* cpu state was modified dynamically */
176 #define DISAS_TB_JUMP   DISAS_TARGET_2 /* only pc was modified statically */
177 #define DISAS_JUMP_NEXT DISAS_TARGET_3
178 
179 #if defined(CONFIG_USER_ONLY)
180 #define IS_USER(s) 1
181 #else
182 #define IS_USER(s)   (!(s->tb->flags & TB_FLAGS_MSR_S))
183 #define SFC_INDEX(s) ((s->tb->flags & TB_FLAGS_SFC_S) ? \
184                       MMU_KERNEL_IDX : MMU_USER_IDX)
185 #define DFC_INDEX(s) ((s->tb->flags & TB_FLAGS_DFC_S) ? \
186                       MMU_KERNEL_IDX : MMU_USER_IDX)
187 #endif
188 
189 typedef void (*disas_proc)(CPUM68KState *env, DisasContext *s, uint16_t insn);
190 
191 #ifdef DEBUG_DISPATCH
192 #define DISAS_INSN(name)                                                \
193     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
194                                   uint16_t insn);                       \
195     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
196                              uint16_t insn)                             \
197     {                                                                   \
198         qemu_log("Dispatch " #name "\n");                               \
199         real_disas_##name(env, s, insn);                                \
200     }                                                                   \
201     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
202                                   uint16_t insn)
203 #else
204 #define DISAS_INSN(name)                                                \
205     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
206                              uint16_t insn)
207 #endif
208 
209 static const uint8_t cc_op_live[CC_OP_NB] = {
210     [CC_OP_DYNAMIC] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
211     [CC_OP_FLAGS] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
212     [CC_OP_ADDB ... CC_OP_ADDL] = CCF_X | CCF_N | CCF_V,
213     [CC_OP_SUBB ... CC_OP_SUBL] = CCF_X | CCF_N | CCF_V,
214     [CC_OP_CMPB ... CC_OP_CMPL] = CCF_X | CCF_N | CCF_V,
215     [CC_OP_LOGIC] = CCF_X | CCF_N
216 };
217 
218 static void set_cc_op(DisasContext *s, CCOp op)
219 {
220     CCOp old_op = s->cc_op;
221     int dead;
222 
223     if (old_op == op) {
224         return;
225     }
226     s->cc_op = op;
227     s->cc_op_synced = 0;
228 
229     /* Discard CC computation that will no longer be used.
230        Note that X and N are never dead.  */
231     dead = cc_op_live[old_op] & ~cc_op_live[op];
232     if (dead & CCF_C) {
233         tcg_gen_discard_i32(QREG_CC_C);
234     }
235     if (dead & CCF_Z) {
236         tcg_gen_discard_i32(QREG_CC_Z);
237     }
238     if (dead & CCF_V) {
239         tcg_gen_discard_i32(QREG_CC_V);
240     }
241 }
242 
243 /* Update the CPU env CC_OP state.  */
244 static void update_cc_op(DisasContext *s)
245 {
246     if (!s->cc_op_synced) {
247         s->cc_op_synced = 1;
248         tcg_gen_movi_i32(QREG_CC_OP, s->cc_op);
249     }
250 }
251 
252 /* Generate a jump to an immediate address.  */
253 static void gen_jmp_im(DisasContext *s, uint32_t dest)
254 {
255     update_cc_op(s);
256     tcg_gen_movi_i32(QREG_PC, dest);
257     s->is_jmp = DISAS_JUMP;
258 }
259 
260 /* Generate a jump to the address in qreg DEST.  */
261 static void gen_jmp(DisasContext *s, TCGv dest)
262 {
263     update_cc_op(s);
264     tcg_gen_mov_i32(QREG_PC, dest);
265     s->is_jmp = DISAS_JUMP;
266 }
267 
268 static void gen_raise_exception(int nr)
269 {
270     TCGv_i32 tmp = tcg_const_i32(nr);
271 
272     gen_helper_raise_exception(cpu_env, tmp);
273     tcg_temp_free_i32(tmp);
274 }
275 
276 static void gen_exception(DisasContext *s, uint32_t where, int nr)
277 {
278     gen_jmp_im(s, where);
279     gen_raise_exception(nr);
280 }
281 
282 static inline void gen_addr_fault(DisasContext *s)
283 {
284     gen_exception(s, s->insn_pc, EXCP_ADDRESS);
285 }
286 
287 /* Generate a load from the specified address.  Narrow values are
288    sign extended to full register width.  */
289 static inline TCGv gen_load(DisasContext *s, int opsize, TCGv addr,
290                             int sign, int index)
291 {
292     TCGv tmp;
293     tmp = tcg_temp_new_i32();
294     switch(opsize) {
295     case OS_BYTE:
296         if (sign)
297             tcg_gen_qemu_ld8s(tmp, addr, index);
298         else
299             tcg_gen_qemu_ld8u(tmp, addr, index);
300         break;
301     case OS_WORD:
302         if (sign)
303             tcg_gen_qemu_ld16s(tmp, addr, index);
304         else
305             tcg_gen_qemu_ld16u(tmp, addr, index);
306         break;
307     case OS_LONG:
308         tcg_gen_qemu_ld32u(tmp, addr, index);
309         break;
310     default:
311         g_assert_not_reached();
312     }
313     return tmp;
314 }
315 
316 /* Generate a store.  */
317 static inline void gen_store(DisasContext *s, int opsize, TCGv addr, TCGv val,
318                              int index)
319 {
320     switch(opsize) {
321     case OS_BYTE:
322         tcg_gen_qemu_st8(val, addr, index);
323         break;
324     case OS_WORD:
325         tcg_gen_qemu_st16(val, addr, index);
326         break;
327     case OS_LONG:
328         tcg_gen_qemu_st32(val, addr, index);
329         break;
330     default:
331         g_assert_not_reached();
332     }
333 }
334 
335 typedef enum {
336     EA_STORE,
337     EA_LOADU,
338     EA_LOADS
339 } ea_what;
340 
341 /* Generate an unsigned load if VAL is 0 a signed load if val is -1,
342    otherwise generate a store.  */
343 static TCGv gen_ldst(DisasContext *s, int opsize, TCGv addr, TCGv val,
344                      ea_what what, int index)
345 {
346     if (what == EA_STORE) {
347         gen_store(s, opsize, addr, val, index);
348         return store_dummy;
349     } else {
350         return gen_load(s, opsize, addr, what == EA_LOADS, index);
351     }
352 }
353 
354 /* Read a 16-bit immediate constant */
355 static inline uint16_t read_im16(CPUM68KState *env, DisasContext *s)
356 {
357     uint16_t im;
358     im = cpu_lduw_code(env, s->pc);
359     s->pc += 2;
360     return im;
361 }
362 
363 /* Read an 8-bit immediate constant */
364 static inline uint8_t read_im8(CPUM68KState *env, DisasContext *s)
365 {
366     return read_im16(env, s);
367 }
368 
369 /* Read a 32-bit immediate constant.  */
370 static inline uint32_t read_im32(CPUM68KState *env, DisasContext *s)
371 {
372     uint32_t im;
373     im = read_im16(env, s) << 16;
374     im |= 0xffff & read_im16(env, s);
375     return im;
376 }
377 
378 /* Read a 64-bit immediate constant.  */
379 static inline uint64_t read_im64(CPUM68KState *env, DisasContext *s)
380 {
381     uint64_t im;
382     im = (uint64_t)read_im32(env, s) << 32;
383     im |= (uint64_t)read_im32(env, s);
384     return im;
385 }
386 
387 /* Calculate and address index.  */
388 static TCGv gen_addr_index(DisasContext *s, uint16_t ext, TCGv tmp)
389 {
390     TCGv add;
391     int scale;
392 
393     add = (ext & 0x8000) ? AREG(ext, 12) : DREG(ext, 12);
394     if ((ext & 0x800) == 0) {
395         tcg_gen_ext16s_i32(tmp, add);
396         add = tmp;
397     }
398     scale = (ext >> 9) & 3;
399     if (scale != 0) {
400         tcg_gen_shli_i32(tmp, add, scale);
401         add = tmp;
402     }
403     return add;
404 }
405 
406 /* Handle a base + index + displacement effective addresss.
407    A NULL_QREG base means pc-relative.  */
408 static TCGv gen_lea_indexed(CPUM68KState *env, DisasContext *s, TCGv base)
409 {
410     uint32_t offset;
411     uint16_t ext;
412     TCGv add;
413     TCGv tmp;
414     uint32_t bd, od;
415 
416     offset = s->pc;
417     ext = read_im16(env, s);
418 
419     if ((ext & 0x800) == 0 && !m68k_feature(s->env, M68K_FEATURE_WORD_INDEX))
420         return NULL_QREG;
421 
422     if (m68k_feature(s->env, M68K_FEATURE_M68000) &&
423         !m68k_feature(s->env, M68K_FEATURE_SCALED_INDEX)) {
424         ext &= ~(3 << 9);
425     }
426 
427     if (ext & 0x100) {
428         /* full extension word format */
429         if (!m68k_feature(s->env, M68K_FEATURE_EXT_FULL))
430             return NULL_QREG;
431 
432         if ((ext & 0x30) > 0x10) {
433             /* base displacement */
434             if ((ext & 0x30) == 0x20) {
435                 bd = (int16_t)read_im16(env, s);
436             } else {
437                 bd = read_im32(env, s);
438             }
439         } else {
440             bd = 0;
441         }
442         tmp = tcg_temp_new();
443         if ((ext & 0x44) == 0) {
444             /* pre-index */
445             add = gen_addr_index(s, ext, tmp);
446         } else {
447             add = NULL_QREG;
448         }
449         if ((ext & 0x80) == 0) {
450             /* base not suppressed */
451             if (IS_NULL_QREG(base)) {
452                 base = tcg_const_i32(offset + bd);
453                 bd = 0;
454             }
455             if (!IS_NULL_QREG(add)) {
456                 tcg_gen_add_i32(tmp, add, base);
457                 add = tmp;
458             } else {
459                 add = base;
460             }
461         }
462         if (!IS_NULL_QREG(add)) {
463             if (bd != 0) {
464                 tcg_gen_addi_i32(tmp, add, bd);
465                 add = tmp;
466             }
467         } else {
468             add = tcg_const_i32(bd);
469         }
470         if ((ext & 3) != 0) {
471             /* memory indirect */
472             base = gen_load(s, OS_LONG, add, 0, IS_USER(s));
473             if ((ext & 0x44) == 4) {
474                 add = gen_addr_index(s, ext, tmp);
475                 tcg_gen_add_i32(tmp, add, base);
476                 add = tmp;
477             } else {
478                 add = base;
479             }
480             if ((ext & 3) > 1) {
481                 /* outer displacement */
482                 if ((ext & 3) == 2) {
483                     od = (int16_t)read_im16(env, s);
484                 } else {
485                     od = read_im32(env, s);
486                 }
487             } else {
488                 od = 0;
489             }
490             if (od != 0) {
491                 tcg_gen_addi_i32(tmp, add, od);
492                 add = tmp;
493             }
494         }
495     } else {
496         /* brief extension word format */
497         tmp = tcg_temp_new();
498         add = gen_addr_index(s, ext, tmp);
499         if (!IS_NULL_QREG(base)) {
500             tcg_gen_add_i32(tmp, add, base);
501             if ((int8_t)ext)
502                 tcg_gen_addi_i32(tmp, tmp, (int8_t)ext);
503         } else {
504             tcg_gen_addi_i32(tmp, add, offset + (int8_t)ext);
505         }
506         add = tmp;
507     }
508     return add;
509 }
510 
511 /* Sign or zero extend a value.  */
512 
513 static inline void gen_ext(TCGv res, TCGv val, int opsize, int sign)
514 {
515     switch (opsize) {
516     case OS_BYTE:
517         if (sign) {
518             tcg_gen_ext8s_i32(res, val);
519         } else {
520             tcg_gen_ext8u_i32(res, val);
521         }
522         break;
523     case OS_WORD:
524         if (sign) {
525             tcg_gen_ext16s_i32(res, val);
526         } else {
527             tcg_gen_ext16u_i32(res, val);
528         }
529         break;
530     case OS_LONG:
531         tcg_gen_mov_i32(res, val);
532         break;
533     default:
534         g_assert_not_reached();
535     }
536 }
537 
538 /* Evaluate all the CC flags.  */
539 
540 static void gen_flush_flags(DisasContext *s)
541 {
542     TCGv t0, t1;
543 
544     switch (s->cc_op) {
545     case CC_OP_FLAGS:
546         return;
547 
548     case CC_OP_ADDB:
549     case CC_OP_ADDW:
550     case CC_OP_ADDL:
551         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
552         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
553         /* Compute signed overflow for addition.  */
554         t0 = tcg_temp_new();
555         t1 = tcg_temp_new();
556         tcg_gen_sub_i32(t0, QREG_CC_N, QREG_CC_V);
557         gen_ext(t0, t0, s->cc_op - CC_OP_ADDB, 1);
558         tcg_gen_xor_i32(t1, QREG_CC_N, QREG_CC_V);
559         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
560         tcg_temp_free(t0);
561         tcg_gen_andc_i32(QREG_CC_V, t1, QREG_CC_V);
562         tcg_temp_free(t1);
563         break;
564 
565     case CC_OP_SUBB:
566     case CC_OP_SUBW:
567     case CC_OP_SUBL:
568         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
569         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
570         /* Compute signed overflow for subtraction.  */
571         t0 = tcg_temp_new();
572         t1 = tcg_temp_new();
573         tcg_gen_add_i32(t0, QREG_CC_N, QREG_CC_V);
574         gen_ext(t0, t0, s->cc_op - CC_OP_SUBB, 1);
575         tcg_gen_xor_i32(t1, QREG_CC_N, t0);
576         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
577         tcg_temp_free(t0);
578         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t1);
579         tcg_temp_free(t1);
580         break;
581 
582     case CC_OP_CMPB:
583     case CC_OP_CMPW:
584     case CC_OP_CMPL:
585         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_C, QREG_CC_N, QREG_CC_V);
586         tcg_gen_sub_i32(QREG_CC_Z, QREG_CC_N, QREG_CC_V);
587         gen_ext(QREG_CC_Z, QREG_CC_Z, s->cc_op - CC_OP_CMPB, 1);
588         /* Compute signed overflow for subtraction.  */
589         t0 = tcg_temp_new();
590         tcg_gen_xor_i32(t0, QREG_CC_Z, QREG_CC_N);
591         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, QREG_CC_N);
592         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t0);
593         tcg_temp_free(t0);
594         tcg_gen_mov_i32(QREG_CC_N, QREG_CC_Z);
595         break;
596 
597     case CC_OP_LOGIC:
598         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
599         tcg_gen_movi_i32(QREG_CC_C, 0);
600         tcg_gen_movi_i32(QREG_CC_V, 0);
601         break;
602 
603     case CC_OP_DYNAMIC:
604         gen_helper_flush_flags(cpu_env, QREG_CC_OP);
605         s->cc_op_synced = 1;
606         break;
607 
608     default:
609         t0 = tcg_const_i32(s->cc_op);
610         gen_helper_flush_flags(cpu_env, t0);
611         tcg_temp_free(t0);
612         s->cc_op_synced = 1;
613         break;
614     }
615 
616     /* Note that flush_flags also assigned to env->cc_op.  */
617     s->cc_op = CC_OP_FLAGS;
618 }
619 
620 static inline TCGv gen_extend(TCGv val, int opsize, int sign)
621 {
622     TCGv tmp;
623 
624     if (opsize == OS_LONG) {
625         tmp = val;
626     } else {
627         tmp = tcg_temp_new();
628         gen_ext(tmp, val, opsize, sign);
629     }
630 
631     return tmp;
632 }
633 
634 static void gen_logic_cc(DisasContext *s, TCGv val, int opsize)
635 {
636     gen_ext(QREG_CC_N, val, opsize, 1);
637     set_cc_op(s, CC_OP_LOGIC);
638 }
639 
640 static void gen_update_cc_cmp(DisasContext *s, TCGv dest, TCGv src, int opsize)
641 {
642     tcg_gen_mov_i32(QREG_CC_N, dest);
643     tcg_gen_mov_i32(QREG_CC_V, src);
644     set_cc_op(s, CC_OP_CMPB + opsize);
645 }
646 
647 static void gen_update_cc_add(TCGv dest, TCGv src, int opsize)
648 {
649     gen_ext(QREG_CC_N, dest, opsize, 1);
650     tcg_gen_mov_i32(QREG_CC_V, src);
651 }
652 
653 static inline int opsize_bytes(int opsize)
654 {
655     switch (opsize) {
656     case OS_BYTE: return 1;
657     case OS_WORD: return 2;
658     case OS_LONG: return 4;
659     case OS_SINGLE: return 4;
660     case OS_DOUBLE: return 8;
661     case OS_EXTENDED: return 12;
662     case OS_PACKED: return 12;
663     default:
664         g_assert_not_reached();
665     }
666 }
667 
668 static inline int insn_opsize(int insn)
669 {
670     switch ((insn >> 6) & 3) {
671     case 0: return OS_BYTE;
672     case 1: return OS_WORD;
673     case 2: return OS_LONG;
674     default:
675         g_assert_not_reached();
676     }
677 }
678 
679 static inline int ext_opsize(int ext, int pos)
680 {
681     switch ((ext >> pos) & 7) {
682     case 0: return OS_LONG;
683     case 1: return OS_SINGLE;
684     case 2: return OS_EXTENDED;
685     case 3: return OS_PACKED;
686     case 4: return OS_WORD;
687     case 5: return OS_DOUBLE;
688     case 6: return OS_BYTE;
689     default:
690         g_assert_not_reached();
691     }
692 }
693 
694 /* Assign value to a register.  If the width is less than the register width
695    only the low part of the register is set.  */
696 static void gen_partset_reg(int opsize, TCGv reg, TCGv val)
697 {
698     TCGv tmp;
699     switch (opsize) {
700     case OS_BYTE:
701         tcg_gen_andi_i32(reg, reg, 0xffffff00);
702         tmp = tcg_temp_new();
703         tcg_gen_ext8u_i32(tmp, val);
704         tcg_gen_or_i32(reg, reg, tmp);
705         tcg_temp_free(tmp);
706         break;
707     case OS_WORD:
708         tcg_gen_andi_i32(reg, reg, 0xffff0000);
709         tmp = tcg_temp_new();
710         tcg_gen_ext16u_i32(tmp, val);
711         tcg_gen_or_i32(reg, reg, tmp);
712         tcg_temp_free(tmp);
713         break;
714     case OS_LONG:
715     case OS_SINGLE:
716         tcg_gen_mov_i32(reg, val);
717         break;
718     default:
719         g_assert_not_reached();
720     }
721 }
722 
723 /* Generate code for an "effective address".  Does not adjust the base
724    register for autoincrement addressing modes.  */
725 static TCGv gen_lea_mode(CPUM68KState *env, DisasContext *s,
726                          int mode, int reg0, int opsize)
727 {
728     TCGv reg;
729     TCGv tmp;
730     uint16_t ext;
731     uint32_t offset;
732 
733     switch (mode) {
734     case 0: /* Data register direct.  */
735     case 1: /* Address register direct.  */
736         return NULL_QREG;
737     case 3: /* Indirect postincrement.  */
738         if (opsize == OS_UNSIZED) {
739             return NULL_QREG;
740         }
741         /* fallthru */
742     case 2: /* Indirect register */
743         return get_areg(s, reg0);
744     case 4: /* Indirect predecrememnt.  */
745         if (opsize == OS_UNSIZED) {
746             return NULL_QREG;
747         }
748         reg = get_areg(s, reg0);
749         tmp = tcg_temp_new();
750         if (reg0 == 7 && opsize == OS_BYTE &&
751             m68k_feature(s->env, M68K_FEATURE_M68000)) {
752             tcg_gen_subi_i32(tmp, reg, 2);
753         } else {
754             tcg_gen_subi_i32(tmp, reg, opsize_bytes(opsize));
755         }
756         return tmp;
757     case 5: /* Indirect displacement.  */
758         reg = get_areg(s, reg0);
759         tmp = tcg_temp_new();
760         ext = read_im16(env, s);
761         tcg_gen_addi_i32(tmp, reg, (int16_t)ext);
762         return tmp;
763     case 6: /* Indirect index + displacement.  */
764         reg = get_areg(s, reg0);
765         return gen_lea_indexed(env, s, reg);
766     case 7: /* Other */
767         switch (reg0) {
768         case 0: /* Absolute short.  */
769             offset = (int16_t)read_im16(env, s);
770             return tcg_const_i32(offset);
771         case 1: /* Absolute long.  */
772             offset = read_im32(env, s);
773             return tcg_const_i32(offset);
774         case 2: /* pc displacement  */
775             offset = s->pc;
776             offset += (int16_t)read_im16(env, s);
777             return tcg_const_i32(offset);
778         case 3: /* pc index+displacement.  */
779             return gen_lea_indexed(env, s, NULL_QREG);
780         case 4: /* Immediate.  */
781         default:
782             return NULL_QREG;
783         }
784     }
785     /* Should never happen.  */
786     return NULL_QREG;
787 }
788 
789 static TCGv gen_lea(CPUM68KState *env, DisasContext *s, uint16_t insn,
790                     int opsize)
791 {
792     int mode = extract32(insn, 3, 3);
793     int reg0 = REG(insn, 0);
794     return gen_lea_mode(env, s, mode, reg0, opsize);
795 }
796 
797 /* Generate code to load/store a value from/into an EA.  If WHAT > 0 this is
798    a write otherwise it is a read (0 == sign extend, -1 == zero extend).
799    ADDRP is non-null for readwrite operands.  */
800 static TCGv gen_ea_mode(CPUM68KState *env, DisasContext *s, int mode, int reg0,
801                         int opsize, TCGv val, TCGv *addrp, ea_what what,
802                         int index)
803 {
804     TCGv reg, tmp, result;
805     int32_t offset;
806 
807     switch (mode) {
808     case 0: /* Data register direct.  */
809         reg = cpu_dregs[reg0];
810         if (what == EA_STORE) {
811             gen_partset_reg(opsize, reg, val);
812             return store_dummy;
813         } else {
814             return gen_extend(reg, opsize, what == EA_LOADS);
815         }
816     case 1: /* Address register direct.  */
817         reg = get_areg(s, reg0);
818         if (what == EA_STORE) {
819             tcg_gen_mov_i32(reg, val);
820             return store_dummy;
821         } else {
822             return gen_extend(reg, opsize, what == EA_LOADS);
823         }
824     case 2: /* Indirect register */
825         reg = get_areg(s, reg0);
826         return gen_ldst(s, opsize, reg, val, what, index);
827     case 3: /* Indirect postincrement.  */
828         reg = get_areg(s, reg0);
829         result = gen_ldst(s, opsize, reg, val, what, index);
830         if (what == EA_STORE || !addrp) {
831             TCGv tmp = tcg_temp_new();
832             if (reg0 == 7 && opsize == OS_BYTE &&
833                 m68k_feature(s->env, M68K_FEATURE_M68000)) {
834                 tcg_gen_addi_i32(tmp, reg, 2);
835             } else {
836                 tcg_gen_addi_i32(tmp, reg, opsize_bytes(opsize));
837             }
838             delay_set_areg(s, reg0, tmp, true);
839         }
840         return result;
841     case 4: /* Indirect predecrememnt.  */
842         if (addrp && what == EA_STORE) {
843             tmp = *addrp;
844         } else {
845             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
846             if (IS_NULL_QREG(tmp)) {
847                 return tmp;
848             }
849             if (addrp) {
850                 *addrp = tmp;
851             }
852         }
853         result = gen_ldst(s, opsize, tmp, val, what, index);
854         if (what == EA_STORE || !addrp) {
855             delay_set_areg(s, reg0, tmp, false);
856         }
857         return result;
858     case 5: /* Indirect displacement.  */
859     case 6: /* Indirect index + displacement.  */
860     do_indirect:
861         if (addrp && what == EA_STORE) {
862             tmp = *addrp;
863         } else {
864             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
865             if (IS_NULL_QREG(tmp)) {
866                 return tmp;
867             }
868             if (addrp) {
869                 *addrp = tmp;
870             }
871         }
872         return gen_ldst(s, opsize, tmp, val, what, index);
873     case 7: /* Other */
874         switch (reg0) {
875         case 0: /* Absolute short.  */
876         case 1: /* Absolute long.  */
877         case 2: /* pc displacement  */
878         case 3: /* pc index+displacement.  */
879             goto do_indirect;
880         case 4: /* Immediate.  */
881             /* Sign extend values for consistency.  */
882             switch (opsize) {
883             case OS_BYTE:
884                 if (what == EA_LOADS) {
885                     offset = (int8_t)read_im8(env, s);
886                 } else {
887                     offset = read_im8(env, s);
888                 }
889                 break;
890             case OS_WORD:
891                 if (what == EA_LOADS) {
892                     offset = (int16_t)read_im16(env, s);
893                 } else {
894                     offset = read_im16(env, s);
895                 }
896                 break;
897             case OS_LONG:
898                 offset = read_im32(env, s);
899                 break;
900             default:
901                 g_assert_not_reached();
902             }
903             return tcg_const_i32(offset);
904         default:
905             return NULL_QREG;
906         }
907     }
908     /* Should never happen.  */
909     return NULL_QREG;
910 }
911 
912 static TCGv gen_ea(CPUM68KState *env, DisasContext *s, uint16_t insn,
913                    int opsize, TCGv val, TCGv *addrp, ea_what what, int index)
914 {
915     int mode = extract32(insn, 3, 3);
916     int reg0 = REG(insn, 0);
917     return gen_ea_mode(env, s, mode, reg0, opsize, val, addrp, what, index);
918 }
919 
920 static TCGv_ptr gen_fp_ptr(int freg)
921 {
922     TCGv_ptr fp = tcg_temp_new_ptr();
923     tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fregs[freg]));
924     return fp;
925 }
926 
927 static TCGv_ptr gen_fp_result_ptr(void)
928 {
929     TCGv_ptr fp = tcg_temp_new_ptr();
930     tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fp_result));
931     return fp;
932 }
933 
934 static void gen_fp_move(TCGv_ptr dest, TCGv_ptr src)
935 {
936     TCGv t32;
937     TCGv_i64 t64;
938 
939     t32 = tcg_temp_new();
940     tcg_gen_ld16u_i32(t32, src, offsetof(FPReg, l.upper));
941     tcg_gen_st16_i32(t32, dest, offsetof(FPReg, l.upper));
942     tcg_temp_free(t32);
943 
944     t64 = tcg_temp_new_i64();
945     tcg_gen_ld_i64(t64, src, offsetof(FPReg, l.lower));
946     tcg_gen_st_i64(t64, dest, offsetof(FPReg, l.lower));
947     tcg_temp_free_i64(t64);
948 }
949 
950 static void gen_load_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
951                         int index)
952 {
953     TCGv tmp;
954     TCGv_i64 t64;
955 
956     t64 = tcg_temp_new_i64();
957     tmp = tcg_temp_new();
958     switch (opsize) {
959     case OS_BYTE:
960         tcg_gen_qemu_ld8s(tmp, addr, index);
961         gen_helper_exts32(cpu_env, fp, tmp);
962         break;
963     case OS_WORD:
964         tcg_gen_qemu_ld16s(tmp, addr, index);
965         gen_helper_exts32(cpu_env, fp, tmp);
966         break;
967     case OS_LONG:
968         tcg_gen_qemu_ld32u(tmp, addr, index);
969         gen_helper_exts32(cpu_env, fp, tmp);
970         break;
971     case OS_SINGLE:
972         tcg_gen_qemu_ld32u(tmp, addr, index);
973         gen_helper_extf32(cpu_env, fp, tmp);
974         break;
975     case OS_DOUBLE:
976         tcg_gen_qemu_ld64(t64, addr, index);
977         gen_helper_extf64(cpu_env, fp, t64);
978         break;
979     case OS_EXTENDED:
980         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
981             gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
982             break;
983         }
984         tcg_gen_qemu_ld32u(tmp, addr, index);
985         tcg_gen_shri_i32(tmp, tmp, 16);
986         tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
987         tcg_gen_addi_i32(tmp, addr, 4);
988         tcg_gen_qemu_ld64(t64, tmp, index);
989         tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
990         break;
991     case OS_PACKED:
992         /* unimplemented data type on 68040/ColdFire
993          * FIXME if needed for another FPU
994          */
995         gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
996         break;
997     default:
998         g_assert_not_reached();
999     }
1000     tcg_temp_free(tmp);
1001     tcg_temp_free_i64(t64);
1002 }
1003 
1004 static void gen_store_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
1005                          int index)
1006 {
1007     TCGv tmp;
1008     TCGv_i64 t64;
1009 
1010     t64 = tcg_temp_new_i64();
1011     tmp = tcg_temp_new();
1012     switch (opsize) {
1013     case OS_BYTE:
1014         gen_helper_reds32(tmp, cpu_env, fp);
1015         tcg_gen_qemu_st8(tmp, addr, index);
1016         break;
1017     case OS_WORD:
1018         gen_helper_reds32(tmp, cpu_env, fp);
1019         tcg_gen_qemu_st16(tmp, addr, index);
1020         break;
1021     case OS_LONG:
1022         gen_helper_reds32(tmp, cpu_env, fp);
1023         tcg_gen_qemu_st32(tmp, addr, index);
1024         break;
1025     case OS_SINGLE:
1026         gen_helper_redf32(tmp, cpu_env, fp);
1027         tcg_gen_qemu_st32(tmp, addr, index);
1028         break;
1029     case OS_DOUBLE:
1030         gen_helper_redf64(t64, cpu_env, fp);
1031         tcg_gen_qemu_st64(t64, addr, index);
1032         break;
1033     case OS_EXTENDED:
1034         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1035             gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
1036             break;
1037         }
1038         tcg_gen_ld16u_i32(tmp, fp, offsetof(FPReg, l.upper));
1039         tcg_gen_shli_i32(tmp, tmp, 16);
1040         tcg_gen_qemu_st32(tmp, addr, index);
1041         tcg_gen_addi_i32(tmp, addr, 4);
1042         tcg_gen_ld_i64(t64, fp, offsetof(FPReg, l.lower));
1043         tcg_gen_qemu_st64(t64, tmp, index);
1044         break;
1045     case OS_PACKED:
1046         /* unimplemented data type on 68040/ColdFire
1047          * FIXME if needed for another FPU
1048          */
1049         gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
1050         break;
1051     default:
1052         g_assert_not_reached();
1053     }
1054     tcg_temp_free(tmp);
1055     tcg_temp_free_i64(t64);
1056 }
1057 
1058 static void gen_ldst_fp(DisasContext *s, int opsize, TCGv addr,
1059                         TCGv_ptr fp, ea_what what, int index)
1060 {
1061     if (what == EA_STORE) {
1062         gen_store_fp(s, opsize, addr, fp, index);
1063     } else {
1064         gen_load_fp(s, opsize, addr, fp, index);
1065     }
1066 }
1067 
1068 static int gen_ea_mode_fp(CPUM68KState *env, DisasContext *s, int mode,
1069                           int reg0, int opsize, TCGv_ptr fp, ea_what what,
1070                           int index)
1071 {
1072     TCGv reg, addr, tmp;
1073     TCGv_i64 t64;
1074 
1075     switch (mode) {
1076     case 0: /* Data register direct.  */
1077         reg = cpu_dregs[reg0];
1078         if (what == EA_STORE) {
1079             switch (opsize) {
1080             case OS_BYTE:
1081             case OS_WORD:
1082             case OS_LONG:
1083                 gen_helper_reds32(reg, cpu_env, fp);
1084                 break;
1085             case OS_SINGLE:
1086                 gen_helper_redf32(reg, cpu_env, fp);
1087                 break;
1088             default:
1089                 g_assert_not_reached();
1090             }
1091         } else {
1092             tmp = tcg_temp_new();
1093             switch (opsize) {
1094             case OS_BYTE:
1095                 tcg_gen_ext8s_i32(tmp, reg);
1096                 gen_helper_exts32(cpu_env, fp, tmp);
1097                 break;
1098             case OS_WORD:
1099                 tcg_gen_ext16s_i32(tmp, reg);
1100                 gen_helper_exts32(cpu_env, fp, tmp);
1101                 break;
1102             case OS_LONG:
1103                 gen_helper_exts32(cpu_env, fp, reg);
1104                 break;
1105             case OS_SINGLE:
1106                 gen_helper_extf32(cpu_env, fp, reg);
1107                 break;
1108             default:
1109                 g_assert_not_reached();
1110             }
1111             tcg_temp_free(tmp);
1112         }
1113         return 0;
1114     case 1: /* Address register direct.  */
1115         return -1;
1116     case 2: /* Indirect register */
1117         addr = get_areg(s, reg0);
1118         gen_ldst_fp(s, opsize, addr, fp, what, index);
1119         return 0;
1120     case 3: /* Indirect postincrement.  */
1121         addr = cpu_aregs[reg0];
1122         gen_ldst_fp(s, opsize, addr, fp, what, index);
1123         tcg_gen_addi_i32(addr, addr, opsize_bytes(opsize));
1124         return 0;
1125     case 4: /* Indirect predecrememnt.  */
1126         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1127         if (IS_NULL_QREG(addr)) {
1128             return -1;
1129         }
1130         gen_ldst_fp(s, opsize, addr, fp, what, index);
1131         tcg_gen_mov_i32(cpu_aregs[reg0], addr);
1132         return 0;
1133     case 5: /* Indirect displacement.  */
1134     case 6: /* Indirect index + displacement.  */
1135     do_indirect:
1136         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1137         if (IS_NULL_QREG(addr)) {
1138             return -1;
1139         }
1140         gen_ldst_fp(s, opsize, addr, fp, what, index);
1141         return 0;
1142     case 7: /* Other */
1143         switch (reg0) {
1144         case 0: /* Absolute short.  */
1145         case 1: /* Absolute long.  */
1146         case 2: /* pc displacement  */
1147         case 3: /* pc index+displacement.  */
1148             goto do_indirect;
1149         case 4: /* Immediate.  */
1150             if (what == EA_STORE) {
1151                 return -1;
1152             }
1153             switch (opsize) {
1154             case OS_BYTE:
1155                 tmp = tcg_const_i32((int8_t)read_im8(env, s));
1156                 gen_helper_exts32(cpu_env, fp, tmp);
1157                 tcg_temp_free(tmp);
1158                 break;
1159             case OS_WORD:
1160                 tmp = tcg_const_i32((int16_t)read_im16(env, s));
1161                 gen_helper_exts32(cpu_env, fp, tmp);
1162                 tcg_temp_free(tmp);
1163                 break;
1164             case OS_LONG:
1165                 tmp = tcg_const_i32(read_im32(env, s));
1166                 gen_helper_exts32(cpu_env, fp, tmp);
1167                 tcg_temp_free(tmp);
1168                 break;
1169             case OS_SINGLE:
1170                 tmp = tcg_const_i32(read_im32(env, s));
1171                 gen_helper_extf32(cpu_env, fp, tmp);
1172                 tcg_temp_free(tmp);
1173                 break;
1174             case OS_DOUBLE:
1175                 t64 = tcg_const_i64(read_im64(env, s));
1176                 gen_helper_extf64(cpu_env, fp, t64);
1177                 tcg_temp_free_i64(t64);
1178                 break;
1179             case OS_EXTENDED:
1180                 if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1181                     gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
1182                     break;
1183                 }
1184                 tmp = tcg_const_i32(read_im32(env, s) >> 16);
1185                 tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
1186                 tcg_temp_free(tmp);
1187                 t64 = tcg_const_i64(read_im64(env, s));
1188                 tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
1189                 tcg_temp_free_i64(t64);
1190                 break;
1191             case OS_PACKED:
1192                 /* unimplemented data type on 68040/ColdFire
1193                  * FIXME if needed for another FPU
1194                  */
1195                 gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
1196                 break;
1197             default:
1198                 g_assert_not_reached();
1199             }
1200             return 0;
1201         default:
1202             return -1;
1203         }
1204     }
1205     return -1;
1206 }
1207 
1208 static int gen_ea_fp(CPUM68KState *env, DisasContext *s, uint16_t insn,
1209                        int opsize, TCGv_ptr fp, ea_what what, int index)
1210 {
1211     int mode = extract32(insn, 3, 3);
1212     int reg0 = REG(insn, 0);
1213     return gen_ea_mode_fp(env, s, mode, reg0, opsize, fp, what, index);
1214 }
1215 
1216 typedef struct {
1217     TCGCond tcond;
1218     bool g1;
1219     bool g2;
1220     TCGv v1;
1221     TCGv v2;
1222 } DisasCompare;
1223 
1224 static void gen_cc_cond(DisasCompare *c, DisasContext *s, int cond)
1225 {
1226     TCGv tmp, tmp2;
1227     TCGCond tcond;
1228     CCOp op = s->cc_op;
1229 
1230     /* The CC_OP_CMP form can handle most normal comparisons directly.  */
1231     if (op == CC_OP_CMPB || op == CC_OP_CMPW || op == CC_OP_CMPL) {
1232         c->g1 = c->g2 = 1;
1233         c->v1 = QREG_CC_N;
1234         c->v2 = QREG_CC_V;
1235         switch (cond) {
1236         case 2: /* HI */
1237         case 3: /* LS */
1238             tcond = TCG_COND_LEU;
1239             goto done;
1240         case 4: /* CC */
1241         case 5: /* CS */
1242             tcond = TCG_COND_LTU;
1243             goto done;
1244         case 6: /* NE */
1245         case 7: /* EQ */
1246             tcond = TCG_COND_EQ;
1247             goto done;
1248         case 10: /* PL */
1249         case 11: /* MI */
1250             c->g1 = c->g2 = 0;
1251             c->v2 = tcg_const_i32(0);
1252             c->v1 = tmp = tcg_temp_new();
1253             tcg_gen_sub_i32(tmp, QREG_CC_N, QREG_CC_V);
1254             gen_ext(tmp, tmp, op - CC_OP_CMPB, 1);
1255             /* fallthru */
1256         case 12: /* GE */
1257         case 13: /* LT */
1258             tcond = TCG_COND_LT;
1259             goto done;
1260         case 14: /* GT */
1261         case 15: /* LE */
1262             tcond = TCG_COND_LE;
1263             goto done;
1264         }
1265     }
1266 
1267     c->g1 = 1;
1268     c->g2 = 0;
1269     c->v2 = tcg_const_i32(0);
1270 
1271     switch (cond) {
1272     case 0: /* T */
1273     case 1: /* F */
1274         c->v1 = c->v2;
1275         tcond = TCG_COND_NEVER;
1276         goto done;
1277     case 14: /* GT (!(Z || (N ^ V))) */
1278     case 15: /* LE (Z || (N ^ V)) */
1279         /* Logic operations clear V, which simplifies LE to (Z || N),
1280            and since Z and N are co-located, this becomes a normal
1281            comparison vs N.  */
1282         if (op == CC_OP_LOGIC) {
1283             c->v1 = QREG_CC_N;
1284             tcond = TCG_COND_LE;
1285             goto done;
1286         }
1287         break;
1288     case 12: /* GE (!(N ^ V)) */
1289     case 13: /* LT (N ^ V) */
1290         /* Logic operations clear V, which simplifies this to N.  */
1291         if (op != CC_OP_LOGIC) {
1292             break;
1293         }
1294         /* fallthru */
1295     case 10: /* PL (!N) */
1296     case 11: /* MI (N) */
1297         /* Several cases represent N normally.  */
1298         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1299             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1300             op == CC_OP_LOGIC) {
1301             c->v1 = QREG_CC_N;
1302             tcond = TCG_COND_LT;
1303             goto done;
1304         }
1305         break;
1306     case 6: /* NE (!Z) */
1307     case 7: /* EQ (Z) */
1308         /* Some cases fold Z into N.  */
1309         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1310             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1311             op == CC_OP_LOGIC) {
1312             tcond = TCG_COND_EQ;
1313             c->v1 = QREG_CC_N;
1314             goto done;
1315         }
1316         break;
1317     case 4: /* CC (!C) */
1318     case 5: /* CS (C) */
1319         /* Some cases fold C into X.  */
1320         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1321             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL) {
1322             tcond = TCG_COND_NE;
1323             c->v1 = QREG_CC_X;
1324             goto done;
1325         }
1326         /* fallthru */
1327     case 8: /* VC (!V) */
1328     case 9: /* VS (V) */
1329         /* Logic operations clear V and C.  */
1330         if (op == CC_OP_LOGIC) {
1331             tcond = TCG_COND_NEVER;
1332             c->v1 = c->v2;
1333             goto done;
1334         }
1335         break;
1336     }
1337 
1338     /* Otherwise, flush flag state to CC_OP_FLAGS.  */
1339     gen_flush_flags(s);
1340 
1341     switch (cond) {
1342     case 0: /* T */
1343     case 1: /* F */
1344     default:
1345         /* Invalid, or handled above.  */
1346         abort();
1347     case 2: /* HI (!C && !Z) -> !(C || Z)*/
1348     case 3: /* LS (C || Z) */
1349         c->v1 = tmp = tcg_temp_new();
1350         c->g1 = 0;
1351         tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1352         tcg_gen_or_i32(tmp, tmp, QREG_CC_C);
1353         tcond = TCG_COND_NE;
1354         break;
1355     case 4: /* CC (!C) */
1356     case 5: /* CS (C) */
1357         c->v1 = QREG_CC_C;
1358         tcond = TCG_COND_NE;
1359         break;
1360     case 6: /* NE (!Z) */
1361     case 7: /* EQ (Z) */
1362         c->v1 = QREG_CC_Z;
1363         tcond = TCG_COND_EQ;
1364         break;
1365     case 8: /* VC (!V) */
1366     case 9: /* VS (V) */
1367         c->v1 = QREG_CC_V;
1368         tcond = TCG_COND_LT;
1369         break;
1370     case 10: /* PL (!N) */
1371     case 11: /* MI (N) */
1372         c->v1 = QREG_CC_N;
1373         tcond = TCG_COND_LT;
1374         break;
1375     case 12: /* GE (!(N ^ V)) */
1376     case 13: /* LT (N ^ V) */
1377         c->v1 = tmp = tcg_temp_new();
1378         c->g1 = 0;
1379         tcg_gen_xor_i32(tmp, QREG_CC_N, QREG_CC_V);
1380         tcond = TCG_COND_LT;
1381         break;
1382     case 14: /* GT (!(Z || (N ^ V))) */
1383     case 15: /* LE (Z || (N ^ V)) */
1384         c->v1 = tmp = tcg_temp_new();
1385         c->g1 = 0;
1386         tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1387         tcg_gen_neg_i32(tmp, tmp);
1388         tmp2 = tcg_temp_new();
1389         tcg_gen_xor_i32(tmp2, QREG_CC_N, QREG_CC_V);
1390         tcg_gen_or_i32(tmp, tmp, tmp2);
1391         tcg_temp_free(tmp2);
1392         tcond = TCG_COND_LT;
1393         break;
1394     }
1395 
1396  done:
1397     if ((cond & 1) == 0) {
1398         tcond = tcg_invert_cond(tcond);
1399     }
1400     c->tcond = tcond;
1401 }
1402 
1403 static void free_cond(DisasCompare *c)
1404 {
1405     if (!c->g1) {
1406         tcg_temp_free(c->v1);
1407     }
1408     if (!c->g2) {
1409         tcg_temp_free(c->v2);
1410     }
1411 }
1412 
1413 static void gen_jmpcc(DisasContext *s, int cond, TCGLabel *l1)
1414 {
1415   DisasCompare c;
1416 
1417   gen_cc_cond(&c, s, cond);
1418   update_cc_op(s);
1419   tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
1420   free_cond(&c);
1421 }
1422 
1423 /* Force a TB lookup after an instruction that changes the CPU state.  */
1424 static void gen_lookup_tb(DisasContext *s)
1425 {
1426     update_cc_op(s);
1427     tcg_gen_movi_i32(QREG_PC, s->pc);
1428     s->is_jmp = DISAS_UPDATE;
1429 }
1430 
1431 #define SRC_EA(env, result, opsize, op_sign, addrp) do {                \
1432         result = gen_ea(env, s, insn, opsize, NULL_QREG, addrp,         \
1433                         op_sign ? EA_LOADS : EA_LOADU, IS_USER(s));     \
1434         if (IS_NULL_QREG(result)) {                                     \
1435             gen_addr_fault(s);                                          \
1436             return;                                                     \
1437         }                                                               \
1438     } while (0)
1439 
1440 #define DEST_EA(env, insn, opsize, val, addrp) do {                     \
1441         TCGv ea_result = gen_ea(env, s, insn, opsize, val, addrp,       \
1442                                 EA_STORE, IS_USER(s));                  \
1443         if (IS_NULL_QREG(ea_result)) {                                  \
1444             gen_addr_fault(s);                                          \
1445             return;                                                     \
1446         }                                                               \
1447     } while (0)
1448 
1449 static inline bool use_goto_tb(DisasContext *s, uint32_t dest)
1450 {
1451 #ifndef CONFIG_USER_ONLY
1452     return (s->tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) ||
1453            (s->insn_pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK);
1454 #else
1455     return true;
1456 #endif
1457 }
1458 
1459 /* Generate a jump to an immediate address.  */
1460 static void gen_jmp_tb(DisasContext *s, int n, uint32_t dest)
1461 {
1462     if (unlikely(s->singlestep_enabled)) {
1463         gen_exception(s, dest, EXCP_DEBUG);
1464     } else if (use_goto_tb(s, dest)) {
1465         tcg_gen_goto_tb(n);
1466         tcg_gen_movi_i32(QREG_PC, dest);
1467         tcg_gen_exit_tb((uintptr_t)s->tb + n);
1468     } else {
1469         gen_jmp_im(s, dest);
1470         tcg_gen_exit_tb(0);
1471     }
1472     s->is_jmp = DISAS_TB_JUMP;
1473 }
1474 
1475 DISAS_INSN(scc)
1476 {
1477     DisasCompare c;
1478     int cond;
1479     TCGv tmp;
1480 
1481     cond = (insn >> 8) & 0xf;
1482     gen_cc_cond(&c, s, cond);
1483 
1484     tmp = tcg_temp_new();
1485     tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
1486     free_cond(&c);
1487 
1488     tcg_gen_neg_i32(tmp, tmp);
1489     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
1490     tcg_temp_free(tmp);
1491 }
1492 
1493 DISAS_INSN(dbcc)
1494 {
1495     TCGLabel *l1;
1496     TCGv reg;
1497     TCGv tmp;
1498     int16_t offset;
1499     uint32_t base;
1500 
1501     reg = DREG(insn, 0);
1502     base = s->pc;
1503     offset = (int16_t)read_im16(env, s);
1504     l1 = gen_new_label();
1505     gen_jmpcc(s, (insn >> 8) & 0xf, l1);
1506 
1507     tmp = tcg_temp_new();
1508     tcg_gen_ext16s_i32(tmp, reg);
1509     tcg_gen_addi_i32(tmp, tmp, -1);
1510     gen_partset_reg(OS_WORD, reg, tmp);
1511     tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, -1, l1);
1512     gen_jmp_tb(s, 1, base + offset);
1513     gen_set_label(l1);
1514     gen_jmp_tb(s, 0, s->pc);
1515 }
1516 
1517 DISAS_INSN(undef_mac)
1518 {
1519     gen_exception(s, s->insn_pc, EXCP_LINEA);
1520 }
1521 
1522 DISAS_INSN(undef_fpu)
1523 {
1524     gen_exception(s, s->insn_pc, EXCP_LINEF);
1525 }
1526 
1527 DISAS_INSN(undef)
1528 {
1529     /* ??? This is both instructions that are as yet unimplemented
1530        for the 680x0 series, as well as those that are implemented
1531        but actually illegal for CPU32 or pre-68020.  */
1532     qemu_log_mask(LOG_UNIMP, "Illegal instruction: %04x @ %08x",
1533                   insn, s->insn_pc);
1534     gen_exception(s, s->insn_pc, EXCP_UNSUPPORTED);
1535 }
1536 
1537 DISAS_INSN(mulw)
1538 {
1539     TCGv reg;
1540     TCGv tmp;
1541     TCGv src;
1542     int sign;
1543 
1544     sign = (insn & 0x100) != 0;
1545     reg = DREG(insn, 9);
1546     tmp = tcg_temp_new();
1547     if (sign)
1548         tcg_gen_ext16s_i32(tmp, reg);
1549     else
1550         tcg_gen_ext16u_i32(tmp, reg);
1551     SRC_EA(env, src, OS_WORD, sign, NULL);
1552     tcg_gen_mul_i32(tmp, tmp, src);
1553     tcg_gen_mov_i32(reg, tmp);
1554     gen_logic_cc(s, tmp, OS_LONG);
1555     tcg_temp_free(tmp);
1556 }
1557 
1558 DISAS_INSN(divw)
1559 {
1560     int sign;
1561     TCGv src;
1562     TCGv destr;
1563 
1564     /* divX.w <EA>,Dn    32/16 -> 16r:16q */
1565 
1566     sign = (insn & 0x100) != 0;
1567 
1568     /* dest.l / src.w */
1569 
1570     SRC_EA(env, src, OS_WORD, sign, NULL);
1571     destr = tcg_const_i32(REG(insn, 9));
1572     if (sign) {
1573         gen_helper_divsw(cpu_env, destr, src);
1574     } else {
1575         gen_helper_divuw(cpu_env, destr, src);
1576     }
1577     tcg_temp_free(destr);
1578 
1579     set_cc_op(s, CC_OP_FLAGS);
1580 }
1581 
1582 DISAS_INSN(divl)
1583 {
1584     TCGv num, reg, den;
1585     int sign;
1586     uint16_t ext;
1587 
1588     ext = read_im16(env, s);
1589 
1590     sign = (ext & 0x0800) != 0;
1591 
1592     if (ext & 0x400) {
1593         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
1594             gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
1595             return;
1596         }
1597 
1598         /* divX.l <EA>, Dr:Dq    64/32 -> 32r:32q */
1599 
1600         SRC_EA(env, den, OS_LONG, 0, NULL);
1601         num = tcg_const_i32(REG(ext, 12));
1602         reg = tcg_const_i32(REG(ext, 0));
1603         if (sign) {
1604             gen_helper_divsll(cpu_env, num, reg, den);
1605         } else {
1606             gen_helper_divull(cpu_env, num, reg, den);
1607         }
1608         tcg_temp_free(reg);
1609         tcg_temp_free(num);
1610         set_cc_op(s, CC_OP_FLAGS);
1611         return;
1612     }
1613 
1614     /* divX.l <EA>, Dq        32/32 -> 32q     */
1615     /* divXl.l <EA>, Dr:Dq    32/32 -> 32r:32q */
1616 
1617     SRC_EA(env, den, OS_LONG, 0, NULL);
1618     num = tcg_const_i32(REG(ext, 12));
1619     reg = tcg_const_i32(REG(ext, 0));
1620     if (sign) {
1621         gen_helper_divsl(cpu_env, num, reg, den);
1622     } else {
1623         gen_helper_divul(cpu_env, num, reg, den);
1624     }
1625     tcg_temp_free(reg);
1626     tcg_temp_free(num);
1627 
1628     set_cc_op(s, CC_OP_FLAGS);
1629 }
1630 
1631 static void bcd_add(TCGv dest, TCGv src)
1632 {
1633     TCGv t0, t1;
1634 
1635     /*  dest10 = dest10 + src10 + X
1636      *
1637      *        t1 = src
1638      *        t2 = t1 + 0x066
1639      *        t3 = t2 + dest + X
1640      *        t4 = t2 ^ dest
1641      *        t5 = t3 ^ t4
1642      *        t6 = ~t5 & 0x110
1643      *        t7 = (t6 >> 2) | (t6 >> 3)
1644      *        return t3 - t7
1645      */
1646 
1647     /* t1 = (src + 0x066) + dest + X
1648      *    = result with some possible exceding 0x6
1649      */
1650 
1651     t0 = tcg_const_i32(0x066);
1652     tcg_gen_add_i32(t0, t0, src);
1653 
1654     t1 = tcg_temp_new();
1655     tcg_gen_add_i32(t1, t0, dest);
1656     tcg_gen_add_i32(t1, t1, QREG_CC_X);
1657 
1658     /* we will remove exceding 0x6 where there is no carry */
1659 
1660     /* t0 = (src + 0x0066) ^ dest
1661      *    = t1 without carries
1662      */
1663 
1664     tcg_gen_xor_i32(t0, t0, dest);
1665 
1666     /* extract the carries
1667      * t0 = t0 ^ t1
1668      *    = only the carries
1669      */
1670 
1671     tcg_gen_xor_i32(t0, t0, t1);
1672 
1673     /* generate 0x1 where there is no carry
1674      * and for each 0x10, generate a 0x6
1675      */
1676 
1677     tcg_gen_shri_i32(t0, t0, 3);
1678     tcg_gen_not_i32(t0, t0);
1679     tcg_gen_andi_i32(t0, t0, 0x22);
1680     tcg_gen_add_i32(dest, t0, t0);
1681     tcg_gen_add_i32(dest, dest, t0);
1682     tcg_temp_free(t0);
1683 
1684     /* remove the exceding 0x6
1685      * for digits that have not generated a carry
1686      */
1687 
1688     tcg_gen_sub_i32(dest, t1, dest);
1689     tcg_temp_free(t1);
1690 }
1691 
1692 static void bcd_sub(TCGv dest, TCGv src)
1693 {
1694     TCGv t0, t1, t2;
1695 
1696     /*  dest10 = dest10 - src10 - X
1697      *         = bcd_add(dest + 1 - X, 0x199 - src)
1698      */
1699 
1700     /* t0 = 0x066 + (0x199 - src) */
1701 
1702     t0 = tcg_temp_new();
1703     tcg_gen_subfi_i32(t0, 0x1ff, src);
1704 
1705     /* t1 = t0 + dest + 1 - X*/
1706 
1707     t1 = tcg_temp_new();
1708     tcg_gen_add_i32(t1, t0, dest);
1709     tcg_gen_addi_i32(t1, t1, 1);
1710     tcg_gen_sub_i32(t1, t1, QREG_CC_X);
1711 
1712     /* t2 = t0 ^ dest */
1713 
1714     t2 = tcg_temp_new();
1715     tcg_gen_xor_i32(t2, t0, dest);
1716 
1717     /* t0 = t1 ^ t2 */
1718 
1719     tcg_gen_xor_i32(t0, t1, t2);
1720 
1721     /* t2 = ~t0 & 0x110
1722      * t0 = (t2 >> 2) | (t2 >> 3)
1723      *
1724      * to fit on 8bit operands, changed in:
1725      *
1726      * t2 = ~(t0 >> 3) & 0x22
1727      * t0 = t2 + t2
1728      * t0 = t0 + t2
1729      */
1730 
1731     tcg_gen_shri_i32(t2, t0, 3);
1732     tcg_gen_not_i32(t2, t2);
1733     tcg_gen_andi_i32(t2, t2, 0x22);
1734     tcg_gen_add_i32(t0, t2, t2);
1735     tcg_gen_add_i32(t0, t0, t2);
1736     tcg_temp_free(t2);
1737 
1738     /* return t1 - t0 */
1739 
1740     tcg_gen_sub_i32(dest, t1, t0);
1741     tcg_temp_free(t0);
1742     tcg_temp_free(t1);
1743 }
1744 
1745 static void bcd_flags(TCGv val)
1746 {
1747     tcg_gen_andi_i32(QREG_CC_C, val, 0x0ff);
1748     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_C);
1749 
1750     tcg_gen_extract_i32(QREG_CC_C, val, 8, 1);
1751 
1752     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
1753 }
1754 
1755 DISAS_INSN(abcd_reg)
1756 {
1757     TCGv src;
1758     TCGv dest;
1759 
1760     gen_flush_flags(s); /* !Z is sticky */
1761 
1762     src = gen_extend(DREG(insn, 0), OS_BYTE, 0);
1763     dest = gen_extend(DREG(insn, 9), OS_BYTE, 0);
1764     bcd_add(dest, src);
1765     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1766 
1767     bcd_flags(dest);
1768 }
1769 
1770 DISAS_INSN(abcd_mem)
1771 {
1772     TCGv src, dest, addr;
1773 
1774     gen_flush_flags(s); /* !Z is sticky */
1775 
1776     /* Indirect pre-decrement load (mode 4) */
1777 
1778     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1779                       NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1780     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1781                        NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1782 
1783     bcd_add(dest, src);
1784 
1785     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1786                 EA_STORE, IS_USER(s));
1787 
1788     bcd_flags(dest);
1789 }
1790 
1791 DISAS_INSN(sbcd_reg)
1792 {
1793     TCGv src, dest;
1794 
1795     gen_flush_flags(s); /* !Z is sticky */
1796 
1797     src = gen_extend(DREG(insn, 0), OS_BYTE, 0);
1798     dest = gen_extend(DREG(insn, 9), OS_BYTE, 0);
1799 
1800     bcd_sub(dest, src);
1801 
1802     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1803 
1804     bcd_flags(dest);
1805 }
1806 
1807 DISAS_INSN(sbcd_mem)
1808 {
1809     TCGv src, dest, addr;
1810 
1811     gen_flush_flags(s); /* !Z is sticky */
1812 
1813     /* Indirect pre-decrement load (mode 4) */
1814 
1815     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1816                       NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1817     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1818                        NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1819 
1820     bcd_sub(dest, src);
1821 
1822     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1823                 EA_STORE, IS_USER(s));
1824 
1825     bcd_flags(dest);
1826 }
1827 
1828 DISAS_INSN(nbcd)
1829 {
1830     TCGv src, dest;
1831     TCGv addr;
1832 
1833     gen_flush_flags(s); /* !Z is sticky */
1834 
1835     SRC_EA(env, src, OS_BYTE, 0, &addr);
1836 
1837     dest = tcg_const_i32(0);
1838     bcd_sub(dest, src);
1839 
1840     DEST_EA(env, insn, OS_BYTE, dest, &addr);
1841 
1842     bcd_flags(dest);
1843 
1844     tcg_temp_free(dest);
1845 }
1846 
1847 DISAS_INSN(addsub)
1848 {
1849     TCGv reg;
1850     TCGv dest;
1851     TCGv src;
1852     TCGv tmp;
1853     TCGv addr;
1854     int add;
1855     int opsize;
1856 
1857     add = (insn & 0x4000) != 0;
1858     opsize = insn_opsize(insn);
1859     reg = gen_extend(DREG(insn, 9), opsize, 1);
1860     dest = tcg_temp_new();
1861     if (insn & 0x100) {
1862         SRC_EA(env, tmp, opsize, 1, &addr);
1863         src = reg;
1864     } else {
1865         tmp = reg;
1866         SRC_EA(env, src, opsize, 1, NULL);
1867     }
1868     if (add) {
1869         tcg_gen_add_i32(dest, tmp, src);
1870         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, src);
1871         set_cc_op(s, CC_OP_ADDB + opsize);
1872     } else {
1873         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, tmp, src);
1874         tcg_gen_sub_i32(dest, tmp, src);
1875         set_cc_op(s, CC_OP_SUBB + opsize);
1876     }
1877     gen_update_cc_add(dest, src, opsize);
1878     if (insn & 0x100) {
1879         DEST_EA(env, insn, opsize, dest, &addr);
1880     } else {
1881         gen_partset_reg(opsize, DREG(insn, 9), dest);
1882     }
1883     tcg_temp_free(dest);
1884 }
1885 
1886 /* Reverse the order of the bits in REG.  */
1887 DISAS_INSN(bitrev)
1888 {
1889     TCGv reg;
1890     reg = DREG(insn, 0);
1891     gen_helper_bitrev(reg, reg);
1892 }
1893 
1894 DISAS_INSN(bitop_reg)
1895 {
1896     int opsize;
1897     int op;
1898     TCGv src1;
1899     TCGv src2;
1900     TCGv tmp;
1901     TCGv addr;
1902     TCGv dest;
1903 
1904     if ((insn & 0x38) != 0)
1905         opsize = OS_BYTE;
1906     else
1907         opsize = OS_LONG;
1908     op = (insn >> 6) & 3;
1909     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
1910 
1911     gen_flush_flags(s);
1912     src2 = tcg_temp_new();
1913     if (opsize == OS_BYTE)
1914         tcg_gen_andi_i32(src2, DREG(insn, 9), 7);
1915     else
1916         tcg_gen_andi_i32(src2, DREG(insn, 9), 31);
1917 
1918     tmp = tcg_const_i32(1);
1919     tcg_gen_shl_i32(tmp, tmp, src2);
1920     tcg_temp_free(src2);
1921 
1922     tcg_gen_and_i32(QREG_CC_Z, src1, tmp);
1923 
1924     dest = tcg_temp_new();
1925     switch (op) {
1926     case 1: /* bchg */
1927         tcg_gen_xor_i32(dest, src1, tmp);
1928         break;
1929     case 2: /* bclr */
1930         tcg_gen_andc_i32(dest, src1, tmp);
1931         break;
1932     case 3: /* bset */
1933         tcg_gen_or_i32(dest, src1, tmp);
1934         break;
1935     default: /* btst */
1936         break;
1937     }
1938     tcg_temp_free(tmp);
1939     if (op) {
1940         DEST_EA(env, insn, opsize, dest, &addr);
1941     }
1942     tcg_temp_free(dest);
1943 }
1944 
1945 DISAS_INSN(sats)
1946 {
1947     TCGv reg;
1948     reg = DREG(insn, 0);
1949     gen_flush_flags(s);
1950     gen_helper_sats(reg, reg, QREG_CC_V);
1951     gen_logic_cc(s, reg, OS_LONG);
1952 }
1953 
1954 static void gen_push(DisasContext *s, TCGv val)
1955 {
1956     TCGv tmp;
1957 
1958     tmp = tcg_temp_new();
1959     tcg_gen_subi_i32(tmp, QREG_SP, 4);
1960     gen_store(s, OS_LONG, tmp, val, IS_USER(s));
1961     tcg_gen_mov_i32(QREG_SP, tmp);
1962     tcg_temp_free(tmp);
1963 }
1964 
1965 static TCGv mreg(int reg)
1966 {
1967     if (reg < 8) {
1968         /* Dx */
1969         return cpu_dregs[reg];
1970     }
1971     /* Ax */
1972     return cpu_aregs[reg & 7];
1973 }
1974 
1975 DISAS_INSN(movem)
1976 {
1977     TCGv addr, incr, tmp, r[16];
1978     int is_load = (insn & 0x0400) != 0;
1979     int opsize = (insn & 0x40) != 0 ? OS_LONG : OS_WORD;
1980     uint16_t mask = read_im16(env, s);
1981     int mode = extract32(insn, 3, 3);
1982     int reg0 = REG(insn, 0);
1983     int i;
1984 
1985     tmp = cpu_aregs[reg0];
1986 
1987     switch (mode) {
1988     case 0: /* data register direct */
1989     case 1: /* addr register direct */
1990     do_addr_fault:
1991         gen_addr_fault(s);
1992         return;
1993 
1994     case 2: /* indirect */
1995         break;
1996 
1997     case 3: /* indirect post-increment */
1998         if (!is_load) {
1999             /* post-increment is not allowed */
2000             goto do_addr_fault;
2001         }
2002         break;
2003 
2004     case 4: /* indirect pre-decrement */
2005         if (is_load) {
2006             /* pre-decrement is not allowed */
2007             goto do_addr_fault;
2008         }
2009         /* We want a bare copy of the address reg, without any pre-decrement
2010            adjustment, as gen_lea would provide.  */
2011         break;
2012 
2013     default:
2014         tmp = gen_lea_mode(env, s, mode, reg0, opsize);
2015         if (IS_NULL_QREG(tmp)) {
2016             goto do_addr_fault;
2017         }
2018         break;
2019     }
2020 
2021     addr = tcg_temp_new();
2022     tcg_gen_mov_i32(addr, tmp);
2023     incr = tcg_const_i32(opsize_bytes(opsize));
2024 
2025     if (is_load) {
2026         /* memory to register */
2027         for (i = 0; i < 16; i++) {
2028             if (mask & (1 << i)) {
2029                 r[i] = gen_load(s, opsize, addr, 1, IS_USER(s));
2030                 tcg_gen_add_i32(addr, addr, incr);
2031             }
2032         }
2033         for (i = 0; i < 16; i++) {
2034             if (mask & (1 << i)) {
2035                 tcg_gen_mov_i32(mreg(i), r[i]);
2036                 tcg_temp_free(r[i]);
2037             }
2038         }
2039         if (mode == 3) {
2040             /* post-increment: movem (An)+,X */
2041             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2042         }
2043     } else {
2044         /* register to memory */
2045         if (mode == 4) {
2046             /* pre-decrement: movem X,-(An) */
2047             for (i = 15; i >= 0; i--) {
2048                 if ((mask << i) & 0x8000) {
2049                     tcg_gen_sub_i32(addr, addr, incr);
2050                     if (reg0 + 8 == i &&
2051                         m68k_feature(s->env, M68K_FEATURE_EXT_FULL)) {
2052                         /* M68020+: if the addressing register is the
2053                          * register moved to memory, the value written
2054                          * is the initial value decremented by the size of
2055                          * the operation, regardless of how many actual
2056                          * stores have been performed until this point.
2057                          * M68000/M68010: the value is the initial value.
2058                          */
2059                         tmp = tcg_temp_new();
2060                         tcg_gen_sub_i32(tmp, cpu_aregs[reg0], incr);
2061                         gen_store(s, opsize, addr, tmp, IS_USER(s));
2062                         tcg_temp_free(tmp);
2063                     } else {
2064                         gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2065                     }
2066                 }
2067             }
2068             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2069         } else {
2070             for (i = 0; i < 16; i++) {
2071                 if (mask & (1 << i)) {
2072                     gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2073                     tcg_gen_add_i32(addr, addr, incr);
2074                 }
2075             }
2076         }
2077     }
2078 
2079     tcg_temp_free(incr);
2080     tcg_temp_free(addr);
2081 }
2082 
2083 DISAS_INSN(movep)
2084 {
2085     uint8_t i;
2086     int16_t displ;
2087     TCGv reg;
2088     TCGv addr;
2089     TCGv abuf;
2090     TCGv dbuf;
2091 
2092     displ = read_im16(env, s);
2093 
2094     addr = AREG(insn, 0);
2095     reg = DREG(insn, 9);
2096 
2097     abuf = tcg_temp_new();
2098     tcg_gen_addi_i32(abuf, addr, displ);
2099     dbuf = tcg_temp_new();
2100 
2101     if (insn & 0x40) {
2102         i = 4;
2103     } else {
2104         i = 2;
2105     }
2106 
2107     if (insn & 0x80) {
2108         for ( ; i > 0 ; i--) {
2109             tcg_gen_shri_i32(dbuf, reg, (i - 1) * 8);
2110             tcg_gen_qemu_st8(dbuf, abuf, IS_USER(s));
2111             if (i > 1) {
2112                 tcg_gen_addi_i32(abuf, abuf, 2);
2113             }
2114         }
2115     } else {
2116         for ( ; i > 0 ; i--) {
2117             tcg_gen_qemu_ld8u(dbuf, abuf, IS_USER(s));
2118             tcg_gen_deposit_i32(reg, reg, dbuf, (i - 1) * 8, 8);
2119             if (i > 1) {
2120                 tcg_gen_addi_i32(abuf, abuf, 2);
2121             }
2122         }
2123     }
2124     tcg_temp_free(abuf);
2125     tcg_temp_free(dbuf);
2126 }
2127 
2128 DISAS_INSN(bitop_im)
2129 {
2130     int opsize;
2131     int op;
2132     TCGv src1;
2133     uint32_t mask;
2134     int bitnum;
2135     TCGv tmp;
2136     TCGv addr;
2137 
2138     if ((insn & 0x38) != 0)
2139         opsize = OS_BYTE;
2140     else
2141         opsize = OS_LONG;
2142     op = (insn >> 6) & 3;
2143 
2144     bitnum = read_im16(env, s);
2145     if (m68k_feature(s->env, M68K_FEATURE_M68000)) {
2146         if (bitnum & 0xfe00) {
2147             disas_undef(env, s, insn);
2148             return;
2149         }
2150     } else {
2151         if (bitnum & 0xff00) {
2152             disas_undef(env, s, insn);
2153             return;
2154         }
2155     }
2156 
2157     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
2158 
2159     gen_flush_flags(s);
2160     if (opsize == OS_BYTE)
2161         bitnum &= 7;
2162     else
2163         bitnum &= 31;
2164     mask = 1 << bitnum;
2165 
2166    tcg_gen_andi_i32(QREG_CC_Z, src1, mask);
2167 
2168     if (op) {
2169         tmp = tcg_temp_new();
2170         switch (op) {
2171         case 1: /* bchg */
2172             tcg_gen_xori_i32(tmp, src1, mask);
2173             break;
2174         case 2: /* bclr */
2175             tcg_gen_andi_i32(tmp, src1, ~mask);
2176             break;
2177         case 3: /* bset */
2178             tcg_gen_ori_i32(tmp, src1, mask);
2179             break;
2180         default: /* btst */
2181             break;
2182         }
2183         DEST_EA(env, insn, opsize, tmp, &addr);
2184         tcg_temp_free(tmp);
2185     }
2186 }
2187 
2188 static TCGv gen_get_ccr(DisasContext *s)
2189 {
2190     TCGv dest;
2191 
2192     update_cc_op(s);
2193     dest = tcg_temp_new();
2194     gen_helper_get_ccr(dest, cpu_env);
2195     return dest;
2196 }
2197 
2198 static TCGv gen_get_sr(DisasContext *s)
2199 {
2200     TCGv ccr;
2201     TCGv sr;
2202 
2203     ccr = gen_get_ccr(s);
2204     sr = tcg_temp_new();
2205     tcg_gen_andi_i32(sr, QREG_SR, 0xffe0);
2206     tcg_gen_or_i32(sr, sr, ccr);
2207     return sr;
2208 }
2209 
2210 static void gen_set_sr_im(DisasContext *s, uint16_t val, int ccr_only)
2211 {
2212     if (ccr_only) {
2213         tcg_gen_movi_i32(QREG_CC_C, val & CCF_C ? 1 : 0);
2214         tcg_gen_movi_i32(QREG_CC_V, val & CCF_V ? -1 : 0);
2215         tcg_gen_movi_i32(QREG_CC_Z, val & CCF_Z ? 0 : 1);
2216         tcg_gen_movi_i32(QREG_CC_N, val & CCF_N ? -1 : 0);
2217         tcg_gen_movi_i32(QREG_CC_X, val & CCF_X ? 1 : 0);
2218     } else {
2219         TCGv sr = tcg_const_i32(val);
2220         gen_helper_set_sr(cpu_env, sr);
2221         tcg_temp_free(sr);
2222     }
2223     set_cc_op(s, CC_OP_FLAGS);
2224 }
2225 
2226 static void gen_set_sr(DisasContext *s, TCGv val, int ccr_only)
2227 {
2228     if (ccr_only) {
2229         gen_helper_set_ccr(cpu_env, val);
2230     } else {
2231         gen_helper_set_sr(cpu_env, val);
2232     }
2233     set_cc_op(s, CC_OP_FLAGS);
2234 }
2235 
2236 static void gen_move_to_sr(CPUM68KState *env, DisasContext *s, uint16_t insn,
2237                            bool ccr_only)
2238 {
2239     if ((insn & 0x3f) == 0x3c) {
2240         uint16_t val;
2241         val = read_im16(env, s);
2242         gen_set_sr_im(s, val, ccr_only);
2243     } else {
2244         TCGv src;
2245         SRC_EA(env, src, OS_WORD, 0, NULL);
2246         gen_set_sr(s, src, ccr_only);
2247     }
2248 }
2249 
2250 DISAS_INSN(arith_im)
2251 {
2252     int op;
2253     TCGv im;
2254     TCGv src1;
2255     TCGv dest;
2256     TCGv addr;
2257     int opsize;
2258     bool with_SR = ((insn & 0x3f) == 0x3c);
2259 
2260     op = (insn >> 9) & 7;
2261     opsize = insn_opsize(insn);
2262     switch (opsize) {
2263     case OS_BYTE:
2264         im = tcg_const_i32((int8_t)read_im8(env, s));
2265         break;
2266     case OS_WORD:
2267         im = tcg_const_i32((int16_t)read_im16(env, s));
2268         break;
2269     case OS_LONG:
2270         im = tcg_const_i32(read_im32(env, s));
2271         break;
2272     default:
2273        abort();
2274     }
2275 
2276     if (with_SR) {
2277         /* SR/CCR can only be used with andi/eori/ori */
2278         if (op == 2 || op == 3 || op == 6) {
2279             disas_undef(env, s, insn);
2280             return;
2281         }
2282         switch (opsize) {
2283         case OS_BYTE:
2284             src1 = gen_get_ccr(s);
2285             break;
2286         case OS_WORD:
2287             if (IS_USER(s)) {
2288                 gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
2289                 return;
2290             }
2291             src1 = gen_get_sr(s);
2292             break;
2293         case OS_LONG:
2294             disas_undef(env, s, insn);
2295             return;
2296         }
2297     } else {
2298         SRC_EA(env, src1, opsize, 1, (op == 6) ? NULL : &addr);
2299     }
2300     dest = tcg_temp_new();
2301     switch (op) {
2302     case 0: /* ori */
2303         tcg_gen_or_i32(dest, src1, im);
2304         if (with_SR) {
2305             gen_set_sr(s, dest, opsize == OS_BYTE);
2306         } else {
2307             DEST_EA(env, insn, opsize, dest, &addr);
2308             gen_logic_cc(s, dest, opsize);
2309         }
2310         break;
2311     case 1: /* andi */
2312         tcg_gen_and_i32(dest, src1, im);
2313         if (with_SR) {
2314             gen_set_sr(s, dest, opsize == OS_BYTE);
2315         } else {
2316             DEST_EA(env, insn, opsize, dest, &addr);
2317             gen_logic_cc(s, dest, opsize);
2318         }
2319         break;
2320     case 2: /* subi */
2321         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, src1, im);
2322         tcg_gen_sub_i32(dest, src1, im);
2323         gen_update_cc_add(dest, im, opsize);
2324         set_cc_op(s, CC_OP_SUBB + opsize);
2325         DEST_EA(env, insn, opsize, dest, &addr);
2326         break;
2327     case 3: /* addi */
2328         tcg_gen_add_i32(dest, src1, im);
2329         gen_update_cc_add(dest, im, opsize);
2330         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, im);
2331         set_cc_op(s, CC_OP_ADDB + opsize);
2332         DEST_EA(env, insn, opsize, dest, &addr);
2333         break;
2334     case 5: /* eori */
2335         tcg_gen_xor_i32(dest, src1, im);
2336         if (with_SR) {
2337             gen_set_sr(s, dest, opsize == OS_BYTE);
2338         } else {
2339             DEST_EA(env, insn, opsize, dest, &addr);
2340             gen_logic_cc(s, dest, opsize);
2341         }
2342         break;
2343     case 6: /* cmpi */
2344         gen_update_cc_cmp(s, src1, im, opsize);
2345         break;
2346     default:
2347         abort();
2348     }
2349     tcg_temp_free(im);
2350     tcg_temp_free(dest);
2351 }
2352 
2353 DISAS_INSN(cas)
2354 {
2355     int opsize;
2356     TCGv addr;
2357     uint16_t ext;
2358     TCGv load;
2359     TCGv cmp;
2360     TCGMemOp opc;
2361 
2362     switch ((insn >> 9) & 3) {
2363     case 1:
2364         opsize = OS_BYTE;
2365         opc = MO_SB;
2366         break;
2367     case 2:
2368         opsize = OS_WORD;
2369         opc = MO_TESW;
2370         break;
2371     case 3:
2372         opsize = OS_LONG;
2373         opc = MO_TESL;
2374         break;
2375     default:
2376         g_assert_not_reached();
2377     }
2378 
2379     ext = read_im16(env, s);
2380 
2381     /* cas Dc,Du,<EA> */
2382 
2383     addr = gen_lea(env, s, insn, opsize);
2384     if (IS_NULL_QREG(addr)) {
2385         gen_addr_fault(s);
2386         return;
2387     }
2388 
2389     cmp = gen_extend(DREG(ext, 0), opsize, 1);
2390 
2391     /* if  <EA> == Dc then
2392      *     <EA> = Du
2393      *     Dc = <EA> (because <EA> == Dc)
2394      * else
2395      *     Dc = <EA>
2396      */
2397 
2398     load = tcg_temp_new();
2399     tcg_gen_atomic_cmpxchg_i32(load, addr, cmp, DREG(ext, 6),
2400                                IS_USER(s), opc);
2401     /* update flags before setting cmp to load */
2402     gen_update_cc_cmp(s, load, cmp, opsize);
2403     gen_partset_reg(opsize, DREG(ext, 0), load);
2404 
2405     tcg_temp_free(load);
2406 
2407     switch (extract32(insn, 3, 3)) {
2408     case 3: /* Indirect postincrement.  */
2409         tcg_gen_addi_i32(AREG(insn, 0), addr, opsize_bytes(opsize));
2410         break;
2411     case 4: /* Indirect predecrememnt.  */
2412         tcg_gen_mov_i32(AREG(insn, 0), addr);
2413         break;
2414     }
2415 }
2416 
2417 DISAS_INSN(cas2w)
2418 {
2419     uint16_t ext1, ext2;
2420     TCGv addr1, addr2;
2421     TCGv regs;
2422 
2423     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2424 
2425     ext1 = read_im16(env, s);
2426 
2427     if (ext1 & 0x8000) {
2428         /* Address Register */
2429         addr1 = AREG(ext1, 12);
2430     } else {
2431         /* Data Register */
2432         addr1 = DREG(ext1, 12);
2433     }
2434 
2435     ext2 = read_im16(env, s);
2436     if (ext2 & 0x8000) {
2437         /* Address Register */
2438         addr2 = AREG(ext2, 12);
2439     } else {
2440         /* Data Register */
2441         addr2 = DREG(ext2, 12);
2442     }
2443 
2444     /* if (R1) == Dc1 && (R2) == Dc2 then
2445      *     (R1) = Du1
2446      *     (R2) = Du2
2447      * else
2448      *     Dc1 = (R1)
2449      *     Dc2 = (R2)
2450      */
2451 
2452     regs = tcg_const_i32(REG(ext2, 6) |
2453                          (REG(ext1, 6) << 3) |
2454                          (REG(ext2, 0) << 6) |
2455                          (REG(ext1, 0) << 9));
2456     if (tb_cflags(s->tb) & CF_PARALLEL) {
2457         gen_helper_exit_atomic(cpu_env);
2458     } else {
2459         gen_helper_cas2w(cpu_env, regs, addr1, addr2);
2460     }
2461     tcg_temp_free(regs);
2462 
2463     /* Note that cas2w also assigned to env->cc_op.  */
2464     s->cc_op = CC_OP_CMPW;
2465     s->cc_op_synced = 1;
2466 }
2467 
2468 DISAS_INSN(cas2l)
2469 {
2470     uint16_t ext1, ext2;
2471     TCGv addr1, addr2, regs;
2472 
2473     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2474 
2475     ext1 = read_im16(env, s);
2476 
2477     if (ext1 & 0x8000) {
2478         /* Address Register */
2479         addr1 = AREG(ext1, 12);
2480     } else {
2481         /* Data Register */
2482         addr1 = DREG(ext1, 12);
2483     }
2484 
2485     ext2 = read_im16(env, s);
2486     if (ext2 & 0x8000) {
2487         /* Address Register */
2488         addr2 = AREG(ext2, 12);
2489     } else {
2490         /* Data Register */
2491         addr2 = DREG(ext2, 12);
2492     }
2493 
2494     /* if (R1) == Dc1 && (R2) == Dc2 then
2495      *     (R1) = Du1
2496      *     (R2) = Du2
2497      * else
2498      *     Dc1 = (R1)
2499      *     Dc2 = (R2)
2500      */
2501 
2502     regs = tcg_const_i32(REG(ext2, 6) |
2503                          (REG(ext1, 6) << 3) |
2504                          (REG(ext2, 0) << 6) |
2505                          (REG(ext1, 0) << 9));
2506     if (tb_cflags(s->tb) & CF_PARALLEL) {
2507         gen_helper_cas2l_parallel(cpu_env, regs, addr1, addr2);
2508     } else {
2509         gen_helper_cas2l(cpu_env, regs, addr1, addr2);
2510     }
2511     tcg_temp_free(regs);
2512 
2513     /* Note that cas2l also assigned to env->cc_op.  */
2514     s->cc_op = CC_OP_CMPL;
2515     s->cc_op_synced = 1;
2516 }
2517 
2518 DISAS_INSN(byterev)
2519 {
2520     TCGv reg;
2521 
2522     reg = DREG(insn, 0);
2523     tcg_gen_bswap32_i32(reg, reg);
2524 }
2525 
2526 DISAS_INSN(move)
2527 {
2528     TCGv src;
2529     TCGv dest;
2530     int op;
2531     int opsize;
2532 
2533     switch (insn >> 12) {
2534     case 1: /* move.b */
2535         opsize = OS_BYTE;
2536         break;
2537     case 2: /* move.l */
2538         opsize = OS_LONG;
2539         break;
2540     case 3: /* move.w */
2541         opsize = OS_WORD;
2542         break;
2543     default:
2544         abort();
2545     }
2546     SRC_EA(env, src, opsize, 1, NULL);
2547     op = (insn >> 6) & 7;
2548     if (op == 1) {
2549         /* movea */
2550         /* The value will already have been sign extended.  */
2551         dest = AREG(insn, 9);
2552         tcg_gen_mov_i32(dest, src);
2553     } else {
2554         /* normal move */
2555         uint16_t dest_ea;
2556         dest_ea = ((insn >> 9) & 7) | (op << 3);
2557         DEST_EA(env, dest_ea, opsize, src, NULL);
2558         /* This will be correct because loads sign extend.  */
2559         gen_logic_cc(s, src, opsize);
2560     }
2561 }
2562 
2563 DISAS_INSN(negx)
2564 {
2565     TCGv z;
2566     TCGv src;
2567     TCGv addr;
2568     int opsize;
2569 
2570     opsize = insn_opsize(insn);
2571     SRC_EA(env, src, opsize, 1, &addr);
2572 
2573     gen_flush_flags(s); /* compute old Z */
2574 
2575     /* Perform substract with borrow.
2576      * (X, N) =  -(src + X);
2577      */
2578 
2579     z = tcg_const_i32(0);
2580     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, z, QREG_CC_X, z);
2581     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, z, z, QREG_CC_N, QREG_CC_X);
2582     tcg_temp_free(z);
2583     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
2584 
2585     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
2586 
2587     /* Compute signed-overflow for negation.  The normal formula for
2588      * subtraction is (res ^ src) & (src ^ dest), but with dest==0
2589      * this simplies to res & src.
2590      */
2591 
2592     tcg_gen_and_i32(QREG_CC_V, QREG_CC_N, src);
2593 
2594     /* Copy the rest of the results into place.  */
2595     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
2596     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
2597 
2598     set_cc_op(s, CC_OP_FLAGS);
2599 
2600     /* result is in QREG_CC_N */
2601 
2602     DEST_EA(env, insn, opsize, QREG_CC_N, &addr);
2603 }
2604 
2605 DISAS_INSN(lea)
2606 {
2607     TCGv reg;
2608     TCGv tmp;
2609 
2610     reg = AREG(insn, 9);
2611     tmp = gen_lea(env, s, insn, OS_LONG);
2612     if (IS_NULL_QREG(tmp)) {
2613         gen_addr_fault(s);
2614         return;
2615     }
2616     tcg_gen_mov_i32(reg, tmp);
2617 }
2618 
2619 DISAS_INSN(clr)
2620 {
2621     int opsize;
2622     TCGv zero;
2623 
2624     zero = tcg_const_i32(0);
2625 
2626     opsize = insn_opsize(insn);
2627     DEST_EA(env, insn, opsize, zero, NULL);
2628     gen_logic_cc(s, zero, opsize);
2629     tcg_temp_free(zero);
2630 }
2631 
2632 DISAS_INSN(move_from_ccr)
2633 {
2634     TCGv ccr;
2635 
2636     ccr = gen_get_ccr(s);
2637     DEST_EA(env, insn, OS_WORD, ccr, NULL);
2638 }
2639 
2640 DISAS_INSN(neg)
2641 {
2642     TCGv src1;
2643     TCGv dest;
2644     TCGv addr;
2645     int opsize;
2646 
2647     opsize = insn_opsize(insn);
2648     SRC_EA(env, src1, opsize, 1, &addr);
2649     dest = tcg_temp_new();
2650     tcg_gen_neg_i32(dest, src1);
2651     set_cc_op(s, CC_OP_SUBB + opsize);
2652     gen_update_cc_add(dest, src1, opsize);
2653     tcg_gen_setcondi_i32(TCG_COND_NE, QREG_CC_X, dest, 0);
2654     DEST_EA(env, insn, opsize, dest, &addr);
2655     tcg_temp_free(dest);
2656 }
2657 
2658 DISAS_INSN(move_to_ccr)
2659 {
2660     gen_move_to_sr(env, s, insn, true);
2661 }
2662 
2663 DISAS_INSN(not)
2664 {
2665     TCGv src1;
2666     TCGv dest;
2667     TCGv addr;
2668     int opsize;
2669 
2670     opsize = insn_opsize(insn);
2671     SRC_EA(env, src1, opsize, 1, &addr);
2672     dest = tcg_temp_new();
2673     tcg_gen_not_i32(dest, src1);
2674     DEST_EA(env, insn, opsize, dest, &addr);
2675     gen_logic_cc(s, dest, opsize);
2676 }
2677 
2678 DISAS_INSN(swap)
2679 {
2680     TCGv src1;
2681     TCGv src2;
2682     TCGv reg;
2683 
2684     src1 = tcg_temp_new();
2685     src2 = tcg_temp_new();
2686     reg = DREG(insn, 0);
2687     tcg_gen_shli_i32(src1, reg, 16);
2688     tcg_gen_shri_i32(src2, reg, 16);
2689     tcg_gen_or_i32(reg, src1, src2);
2690     tcg_temp_free(src2);
2691     tcg_temp_free(src1);
2692     gen_logic_cc(s, reg, OS_LONG);
2693 }
2694 
2695 DISAS_INSN(bkpt)
2696 {
2697     gen_exception(s, s->insn_pc, EXCP_DEBUG);
2698 }
2699 
2700 DISAS_INSN(pea)
2701 {
2702     TCGv tmp;
2703 
2704     tmp = gen_lea(env, s, insn, OS_LONG);
2705     if (IS_NULL_QREG(tmp)) {
2706         gen_addr_fault(s);
2707         return;
2708     }
2709     gen_push(s, tmp);
2710 }
2711 
2712 DISAS_INSN(ext)
2713 {
2714     int op;
2715     TCGv reg;
2716     TCGv tmp;
2717 
2718     reg = DREG(insn, 0);
2719     op = (insn >> 6) & 7;
2720     tmp = tcg_temp_new();
2721     if (op == 3)
2722         tcg_gen_ext16s_i32(tmp, reg);
2723     else
2724         tcg_gen_ext8s_i32(tmp, reg);
2725     if (op == 2)
2726         gen_partset_reg(OS_WORD, reg, tmp);
2727     else
2728         tcg_gen_mov_i32(reg, tmp);
2729     gen_logic_cc(s, tmp, OS_LONG);
2730     tcg_temp_free(tmp);
2731 }
2732 
2733 DISAS_INSN(tst)
2734 {
2735     int opsize;
2736     TCGv tmp;
2737 
2738     opsize = insn_opsize(insn);
2739     SRC_EA(env, tmp, opsize, 1, NULL);
2740     gen_logic_cc(s, tmp, opsize);
2741 }
2742 
2743 DISAS_INSN(pulse)
2744 {
2745   /* Implemented as a NOP.  */
2746 }
2747 
2748 DISAS_INSN(illegal)
2749 {
2750     gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
2751 }
2752 
2753 /* ??? This should be atomic.  */
2754 DISAS_INSN(tas)
2755 {
2756     TCGv dest;
2757     TCGv src1;
2758     TCGv addr;
2759 
2760     dest = tcg_temp_new();
2761     SRC_EA(env, src1, OS_BYTE, 1, &addr);
2762     gen_logic_cc(s, src1, OS_BYTE);
2763     tcg_gen_ori_i32(dest, src1, 0x80);
2764     DEST_EA(env, insn, OS_BYTE, dest, &addr);
2765     tcg_temp_free(dest);
2766 }
2767 
2768 DISAS_INSN(mull)
2769 {
2770     uint16_t ext;
2771     TCGv src1;
2772     int sign;
2773 
2774     ext = read_im16(env, s);
2775 
2776     sign = ext & 0x800;
2777 
2778     if (ext & 0x400) {
2779         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
2780             gen_exception(s, s->insn_pc, EXCP_UNSUPPORTED);
2781             return;
2782         }
2783 
2784         SRC_EA(env, src1, OS_LONG, 0, NULL);
2785 
2786         if (sign) {
2787             tcg_gen_muls2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2788         } else {
2789             tcg_gen_mulu2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2790         }
2791         /* if Dl == Dh, 68040 returns low word */
2792         tcg_gen_mov_i32(DREG(ext, 0), QREG_CC_N);
2793         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_Z);
2794         tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N);
2795 
2796         tcg_gen_movi_i32(QREG_CC_V, 0);
2797         tcg_gen_movi_i32(QREG_CC_C, 0);
2798 
2799         set_cc_op(s, CC_OP_FLAGS);
2800         return;
2801     }
2802     SRC_EA(env, src1, OS_LONG, 0, NULL);
2803     if (m68k_feature(s->env, M68K_FEATURE_M68000)) {
2804         tcg_gen_movi_i32(QREG_CC_C, 0);
2805         if (sign) {
2806             tcg_gen_muls2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2807             /* QREG_CC_V is -(QREG_CC_V != (QREG_CC_N >> 31)) */
2808             tcg_gen_sari_i32(QREG_CC_Z, QREG_CC_N, 31);
2809             tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_Z);
2810         } else {
2811             tcg_gen_mulu2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2812             /* QREG_CC_V is -(QREG_CC_V != 0), use QREG_CC_C as 0 */
2813             tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_C);
2814         }
2815         tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
2816         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_N);
2817 
2818         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
2819 
2820         set_cc_op(s, CC_OP_FLAGS);
2821     } else {
2822         /* The upper 32 bits of the product are discarded, so
2823            muls.l and mulu.l are functionally equivalent.  */
2824         tcg_gen_mul_i32(DREG(ext, 12), src1, DREG(ext, 12));
2825         gen_logic_cc(s, DREG(ext, 12), OS_LONG);
2826     }
2827 }
2828 
2829 static void gen_link(DisasContext *s, uint16_t insn, int32_t offset)
2830 {
2831     TCGv reg;
2832     TCGv tmp;
2833 
2834     reg = AREG(insn, 0);
2835     tmp = tcg_temp_new();
2836     tcg_gen_subi_i32(tmp, QREG_SP, 4);
2837     gen_store(s, OS_LONG, tmp, reg, IS_USER(s));
2838     if ((insn & 7) != 7) {
2839         tcg_gen_mov_i32(reg, tmp);
2840     }
2841     tcg_gen_addi_i32(QREG_SP, tmp, offset);
2842     tcg_temp_free(tmp);
2843 }
2844 
2845 DISAS_INSN(link)
2846 {
2847     int16_t offset;
2848 
2849     offset = read_im16(env, s);
2850     gen_link(s, insn, offset);
2851 }
2852 
2853 DISAS_INSN(linkl)
2854 {
2855     int32_t offset;
2856 
2857     offset = read_im32(env, s);
2858     gen_link(s, insn, offset);
2859 }
2860 
2861 DISAS_INSN(unlk)
2862 {
2863     TCGv src;
2864     TCGv reg;
2865     TCGv tmp;
2866 
2867     src = tcg_temp_new();
2868     reg = AREG(insn, 0);
2869     tcg_gen_mov_i32(src, reg);
2870     tmp = gen_load(s, OS_LONG, src, 0, IS_USER(s));
2871     tcg_gen_mov_i32(reg, tmp);
2872     tcg_gen_addi_i32(QREG_SP, src, 4);
2873     tcg_temp_free(src);
2874     tcg_temp_free(tmp);
2875 }
2876 
2877 #if defined(CONFIG_SOFTMMU)
2878 DISAS_INSN(reset)
2879 {
2880     if (IS_USER(s)) {
2881         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
2882         return;
2883     }
2884 
2885     gen_helper_reset(cpu_env);
2886 }
2887 #endif
2888 
2889 DISAS_INSN(nop)
2890 {
2891 }
2892 
2893 DISAS_INSN(rtd)
2894 {
2895     TCGv tmp;
2896     int16_t offset = read_im16(env, s);
2897 
2898     tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
2899     tcg_gen_addi_i32(QREG_SP, QREG_SP, offset + 4);
2900     gen_jmp(s, tmp);
2901 }
2902 
2903 DISAS_INSN(rts)
2904 {
2905     TCGv tmp;
2906 
2907     tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
2908     tcg_gen_addi_i32(QREG_SP, QREG_SP, 4);
2909     gen_jmp(s, tmp);
2910 }
2911 
2912 DISAS_INSN(jump)
2913 {
2914     TCGv tmp;
2915 
2916     /* Load the target address first to ensure correct exception
2917        behavior.  */
2918     tmp = gen_lea(env, s, insn, OS_LONG);
2919     if (IS_NULL_QREG(tmp)) {
2920         gen_addr_fault(s);
2921         return;
2922     }
2923     if ((insn & 0x40) == 0) {
2924         /* jsr */
2925         gen_push(s, tcg_const_i32(s->pc));
2926     }
2927     gen_jmp(s, tmp);
2928 }
2929 
2930 DISAS_INSN(addsubq)
2931 {
2932     TCGv src;
2933     TCGv dest;
2934     TCGv val;
2935     int imm;
2936     TCGv addr;
2937     int opsize;
2938 
2939     if ((insn & 070) == 010) {
2940         /* Operation on address register is always long.  */
2941         opsize = OS_LONG;
2942     } else {
2943         opsize = insn_opsize(insn);
2944     }
2945     SRC_EA(env, src, opsize, 1, &addr);
2946     imm = (insn >> 9) & 7;
2947     if (imm == 0) {
2948         imm = 8;
2949     }
2950     val = tcg_const_i32(imm);
2951     dest = tcg_temp_new();
2952     tcg_gen_mov_i32(dest, src);
2953     if ((insn & 0x38) == 0x08) {
2954         /* Don't update condition codes if the destination is an
2955            address register.  */
2956         if (insn & 0x0100) {
2957             tcg_gen_sub_i32(dest, dest, val);
2958         } else {
2959             tcg_gen_add_i32(dest, dest, val);
2960         }
2961     } else {
2962         if (insn & 0x0100) {
2963             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
2964             tcg_gen_sub_i32(dest, dest, val);
2965             set_cc_op(s, CC_OP_SUBB + opsize);
2966         } else {
2967             tcg_gen_add_i32(dest, dest, val);
2968             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
2969             set_cc_op(s, CC_OP_ADDB + opsize);
2970         }
2971         gen_update_cc_add(dest, val, opsize);
2972     }
2973     tcg_temp_free(val);
2974     DEST_EA(env, insn, opsize, dest, &addr);
2975     tcg_temp_free(dest);
2976 }
2977 
2978 DISAS_INSN(tpf)
2979 {
2980     switch (insn & 7) {
2981     case 2: /* One extension word.  */
2982         s->pc += 2;
2983         break;
2984     case 3: /* Two extension words.  */
2985         s->pc += 4;
2986         break;
2987     case 4: /* No extension words.  */
2988         break;
2989     default:
2990         disas_undef(env, s, insn);
2991     }
2992 }
2993 
2994 DISAS_INSN(branch)
2995 {
2996     int32_t offset;
2997     uint32_t base;
2998     int op;
2999     TCGLabel *l1;
3000 
3001     base = s->pc;
3002     op = (insn >> 8) & 0xf;
3003     offset = (int8_t)insn;
3004     if (offset == 0) {
3005         offset = (int16_t)read_im16(env, s);
3006     } else if (offset == -1) {
3007         offset = read_im32(env, s);
3008     }
3009     if (op == 1) {
3010         /* bsr */
3011         gen_push(s, tcg_const_i32(s->pc));
3012     }
3013     if (op > 1) {
3014         /* Bcc */
3015         l1 = gen_new_label();
3016         gen_jmpcc(s, ((insn >> 8) & 0xf) ^ 1, l1);
3017         gen_jmp_tb(s, 1, base + offset);
3018         gen_set_label(l1);
3019         gen_jmp_tb(s, 0, s->pc);
3020     } else {
3021         /* Unconditional branch.  */
3022         update_cc_op(s);
3023         gen_jmp_tb(s, 0, base + offset);
3024     }
3025 }
3026 
3027 DISAS_INSN(moveq)
3028 {
3029     tcg_gen_movi_i32(DREG(insn, 9), (int8_t)insn);
3030     gen_logic_cc(s, DREG(insn, 9), OS_LONG);
3031 }
3032 
3033 DISAS_INSN(mvzs)
3034 {
3035     int opsize;
3036     TCGv src;
3037     TCGv reg;
3038 
3039     if (insn & 0x40)
3040         opsize = OS_WORD;
3041     else
3042         opsize = OS_BYTE;
3043     SRC_EA(env, src, opsize, (insn & 0x80) == 0, NULL);
3044     reg = DREG(insn, 9);
3045     tcg_gen_mov_i32(reg, src);
3046     gen_logic_cc(s, src, opsize);
3047 }
3048 
3049 DISAS_INSN(or)
3050 {
3051     TCGv reg;
3052     TCGv dest;
3053     TCGv src;
3054     TCGv addr;
3055     int opsize;
3056 
3057     opsize = insn_opsize(insn);
3058     reg = gen_extend(DREG(insn, 9), opsize, 0);
3059     dest = tcg_temp_new();
3060     if (insn & 0x100) {
3061         SRC_EA(env, src, opsize, 0, &addr);
3062         tcg_gen_or_i32(dest, src, reg);
3063         DEST_EA(env, insn, opsize, dest, &addr);
3064     } else {
3065         SRC_EA(env, src, opsize, 0, NULL);
3066         tcg_gen_or_i32(dest, src, reg);
3067         gen_partset_reg(opsize, DREG(insn, 9), dest);
3068     }
3069     gen_logic_cc(s, dest, opsize);
3070     tcg_temp_free(dest);
3071 }
3072 
3073 DISAS_INSN(suba)
3074 {
3075     TCGv src;
3076     TCGv reg;
3077 
3078     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3079     reg = AREG(insn, 9);
3080     tcg_gen_sub_i32(reg, reg, src);
3081 }
3082 
3083 static inline void gen_subx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3084 {
3085     TCGv tmp;
3086 
3087     gen_flush_flags(s); /* compute old Z */
3088 
3089     /* Perform substract with borrow.
3090      * (X, N) = dest - (src + X);
3091      */
3092 
3093     tmp = tcg_const_i32(0);
3094     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, tmp, QREG_CC_X, tmp);
3095     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, dest, tmp, QREG_CC_N, QREG_CC_X);
3096     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3097     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
3098 
3099     /* Compute signed-overflow for substract.  */
3100 
3101     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, dest);
3102     tcg_gen_xor_i32(tmp, dest, src);
3103     tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, tmp);
3104     tcg_temp_free(tmp);
3105 
3106     /* Copy the rest of the results into place.  */
3107     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3108     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3109 
3110     set_cc_op(s, CC_OP_FLAGS);
3111 
3112     /* result is in QREG_CC_N */
3113 }
3114 
3115 DISAS_INSN(subx_reg)
3116 {
3117     TCGv dest;
3118     TCGv src;
3119     int opsize;
3120 
3121     opsize = insn_opsize(insn);
3122 
3123     src = gen_extend(DREG(insn, 0), opsize, 1);
3124     dest = gen_extend(DREG(insn, 9), opsize, 1);
3125 
3126     gen_subx(s, src, dest, opsize);
3127 
3128     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3129 }
3130 
3131 DISAS_INSN(subx_mem)
3132 {
3133     TCGv src;
3134     TCGv addr_src;
3135     TCGv dest;
3136     TCGv addr_dest;
3137     int opsize;
3138 
3139     opsize = insn_opsize(insn);
3140 
3141     addr_src = AREG(insn, 0);
3142     tcg_gen_subi_i32(addr_src, addr_src, opsize);
3143     src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3144 
3145     addr_dest = AREG(insn, 9);
3146     tcg_gen_subi_i32(addr_dest, addr_dest, opsize);
3147     dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3148 
3149     gen_subx(s, src, dest, opsize);
3150 
3151     gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3152 
3153     tcg_temp_free(dest);
3154     tcg_temp_free(src);
3155 }
3156 
3157 DISAS_INSN(mov3q)
3158 {
3159     TCGv src;
3160     int val;
3161 
3162     val = (insn >> 9) & 7;
3163     if (val == 0)
3164         val = -1;
3165     src = tcg_const_i32(val);
3166     gen_logic_cc(s, src, OS_LONG);
3167     DEST_EA(env, insn, OS_LONG, src, NULL);
3168     tcg_temp_free(src);
3169 }
3170 
3171 DISAS_INSN(cmp)
3172 {
3173     TCGv src;
3174     TCGv reg;
3175     int opsize;
3176 
3177     opsize = insn_opsize(insn);
3178     SRC_EA(env, src, opsize, 1, NULL);
3179     reg = gen_extend(DREG(insn, 9), opsize, 1);
3180     gen_update_cc_cmp(s, reg, src, opsize);
3181 }
3182 
3183 DISAS_INSN(cmpa)
3184 {
3185     int opsize;
3186     TCGv src;
3187     TCGv reg;
3188 
3189     if (insn & 0x100) {
3190         opsize = OS_LONG;
3191     } else {
3192         opsize = OS_WORD;
3193     }
3194     SRC_EA(env, src, opsize, 1, NULL);
3195     reg = AREG(insn, 9);
3196     gen_update_cc_cmp(s, reg, src, OS_LONG);
3197 }
3198 
3199 DISAS_INSN(cmpm)
3200 {
3201     int opsize = insn_opsize(insn);
3202     TCGv src, dst;
3203 
3204     /* Post-increment load (mode 3) from Ay.  */
3205     src = gen_ea_mode(env, s, 3, REG(insn, 0), opsize,
3206                       NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3207     /* Post-increment load (mode 3) from Ax.  */
3208     dst = gen_ea_mode(env, s, 3, REG(insn, 9), opsize,
3209                       NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3210 
3211     gen_update_cc_cmp(s, dst, src, opsize);
3212 }
3213 
3214 DISAS_INSN(eor)
3215 {
3216     TCGv src;
3217     TCGv dest;
3218     TCGv addr;
3219     int opsize;
3220 
3221     opsize = insn_opsize(insn);
3222 
3223     SRC_EA(env, src, opsize, 0, &addr);
3224     dest = tcg_temp_new();
3225     tcg_gen_xor_i32(dest, src, DREG(insn, 9));
3226     gen_logic_cc(s, dest, opsize);
3227     DEST_EA(env, insn, opsize, dest, &addr);
3228     tcg_temp_free(dest);
3229 }
3230 
3231 static void do_exg(TCGv reg1, TCGv reg2)
3232 {
3233     TCGv temp = tcg_temp_new();
3234     tcg_gen_mov_i32(temp, reg1);
3235     tcg_gen_mov_i32(reg1, reg2);
3236     tcg_gen_mov_i32(reg2, temp);
3237     tcg_temp_free(temp);
3238 }
3239 
3240 DISAS_INSN(exg_dd)
3241 {
3242     /* exchange Dx and Dy */
3243     do_exg(DREG(insn, 9), DREG(insn, 0));
3244 }
3245 
3246 DISAS_INSN(exg_aa)
3247 {
3248     /* exchange Ax and Ay */
3249     do_exg(AREG(insn, 9), AREG(insn, 0));
3250 }
3251 
3252 DISAS_INSN(exg_da)
3253 {
3254     /* exchange Dx and Ay */
3255     do_exg(DREG(insn, 9), AREG(insn, 0));
3256 }
3257 
3258 DISAS_INSN(and)
3259 {
3260     TCGv src;
3261     TCGv reg;
3262     TCGv dest;
3263     TCGv addr;
3264     int opsize;
3265 
3266     dest = tcg_temp_new();
3267 
3268     opsize = insn_opsize(insn);
3269     reg = DREG(insn, 9);
3270     if (insn & 0x100) {
3271         SRC_EA(env, src, opsize, 0, &addr);
3272         tcg_gen_and_i32(dest, src, reg);
3273         DEST_EA(env, insn, opsize, dest, &addr);
3274     } else {
3275         SRC_EA(env, src, opsize, 0, NULL);
3276         tcg_gen_and_i32(dest, src, reg);
3277         gen_partset_reg(opsize, reg, dest);
3278     }
3279     gen_logic_cc(s, dest, opsize);
3280     tcg_temp_free(dest);
3281 }
3282 
3283 DISAS_INSN(adda)
3284 {
3285     TCGv src;
3286     TCGv reg;
3287 
3288     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3289     reg = AREG(insn, 9);
3290     tcg_gen_add_i32(reg, reg, src);
3291 }
3292 
3293 static inline void gen_addx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3294 {
3295     TCGv tmp;
3296 
3297     gen_flush_flags(s); /* compute old Z */
3298 
3299     /* Perform addition with carry.
3300      * (X, N) = src + dest + X;
3301      */
3302 
3303     tmp = tcg_const_i32(0);
3304     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_X, tmp, dest, tmp);
3305     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_N, QREG_CC_X, src, tmp);
3306     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3307 
3308     /* Compute signed-overflow for addition.  */
3309 
3310     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3311     tcg_gen_xor_i32(tmp, dest, src);
3312     tcg_gen_andc_i32(QREG_CC_V, QREG_CC_V, tmp);
3313     tcg_temp_free(tmp);
3314 
3315     /* Copy the rest of the results into place.  */
3316     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3317     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3318 
3319     set_cc_op(s, CC_OP_FLAGS);
3320 
3321     /* result is in QREG_CC_N */
3322 }
3323 
3324 DISAS_INSN(addx_reg)
3325 {
3326     TCGv dest;
3327     TCGv src;
3328     int opsize;
3329 
3330     opsize = insn_opsize(insn);
3331 
3332     dest = gen_extend(DREG(insn, 9), opsize, 1);
3333     src = gen_extend(DREG(insn, 0), opsize, 1);
3334 
3335     gen_addx(s, src, dest, opsize);
3336 
3337     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3338 }
3339 
3340 DISAS_INSN(addx_mem)
3341 {
3342     TCGv src;
3343     TCGv addr_src;
3344     TCGv dest;
3345     TCGv addr_dest;
3346     int opsize;
3347 
3348     opsize = insn_opsize(insn);
3349 
3350     addr_src = AREG(insn, 0);
3351     tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3352     src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3353 
3354     addr_dest = AREG(insn, 9);
3355     tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3356     dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3357 
3358     gen_addx(s, src, dest, opsize);
3359 
3360     gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3361 
3362     tcg_temp_free(dest);
3363     tcg_temp_free(src);
3364 }
3365 
3366 static inline void shift_im(DisasContext *s, uint16_t insn, int opsize)
3367 {
3368     int count = (insn >> 9) & 7;
3369     int logical = insn & 8;
3370     int left = insn & 0x100;
3371     int bits = opsize_bytes(opsize) * 8;
3372     TCGv reg = gen_extend(DREG(insn, 0), opsize, !logical);
3373 
3374     if (count == 0) {
3375         count = 8;
3376     }
3377 
3378     tcg_gen_movi_i32(QREG_CC_V, 0);
3379     if (left) {
3380         tcg_gen_shri_i32(QREG_CC_C, reg, bits - count);
3381         tcg_gen_shli_i32(QREG_CC_N, reg, count);
3382 
3383         /* Note that ColdFire always clears V (done above),
3384            while M68000 sets if the most significant bit is changed at
3385            any time during the shift operation */
3386         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3387             /* if shift count >= bits, V is (reg != 0) */
3388             if (count >= bits) {
3389                 tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, reg, QREG_CC_V);
3390             } else {
3391                 TCGv t0 = tcg_temp_new();
3392                 tcg_gen_sari_i32(QREG_CC_V, reg, bits - 1);
3393                 tcg_gen_sari_i32(t0, reg, bits - count - 1);
3394                 tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, t0);
3395                 tcg_temp_free(t0);
3396             }
3397             tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3398         }
3399     } else {
3400         tcg_gen_shri_i32(QREG_CC_C, reg, count - 1);
3401         if (logical) {
3402             tcg_gen_shri_i32(QREG_CC_N, reg, count);
3403         } else {
3404             tcg_gen_sari_i32(QREG_CC_N, reg, count);
3405         }
3406     }
3407 
3408     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3409     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3410     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3411     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3412 
3413     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3414     set_cc_op(s, CC_OP_FLAGS);
3415 }
3416 
3417 static inline void shift_reg(DisasContext *s, uint16_t insn, int opsize)
3418 {
3419     int logical = insn & 8;
3420     int left = insn & 0x100;
3421     int bits = opsize_bytes(opsize) * 8;
3422     TCGv reg = gen_extend(DREG(insn, 0), opsize, !logical);
3423     TCGv s32;
3424     TCGv_i64 t64, s64;
3425 
3426     t64 = tcg_temp_new_i64();
3427     s64 = tcg_temp_new_i64();
3428     s32 = tcg_temp_new();
3429 
3430     /* Note that m68k truncates the shift count modulo 64, not 32.
3431        In addition, a 64-bit shift makes it easy to find "the last
3432        bit shifted out", for the carry flag.  */
3433     tcg_gen_andi_i32(s32, DREG(insn, 9), 63);
3434     tcg_gen_extu_i32_i64(s64, s32);
3435     tcg_gen_extu_i32_i64(t64, reg);
3436 
3437     /* Optimistically set V=0.  Also used as a zero source below.  */
3438     tcg_gen_movi_i32(QREG_CC_V, 0);
3439     if (left) {
3440         tcg_gen_shl_i64(t64, t64, s64);
3441 
3442         if (opsize == OS_LONG) {
3443             tcg_gen_extr_i64_i32(QREG_CC_N, QREG_CC_C, t64);
3444             /* Note that C=0 if shift count is 0, and we get that for free.  */
3445         } else {
3446             TCGv zero = tcg_const_i32(0);
3447             tcg_gen_extrl_i64_i32(QREG_CC_N, t64);
3448             tcg_gen_shri_i32(QREG_CC_C, QREG_CC_N, bits);
3449             tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3450                                 s32, zero, zero, QREG_CC_C);
3451             tcg_temp_free(zero);
3452         }
3453         tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3454 
3455         /* X = C, but only if the shift count was non-zero.  */
3456         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3457                             QREG_CC_C, QREG_CC_X);
3458 
3459         /* M68000 sets V if the most significant bit is changed at
3460          * any time during the shift operation.  Do this via creating
3461          * an extension of the sign bit, comparing, and discarding
3462          * the bits below the sign bit.  I.e.
3463          *     int64_t s = (intN_t)reg;
3464          *     int64_t t = (int64_t)(intN_t)reg << count;
3465          *     V = ((s ^ t) & (-1 << (bits - 1))) != 0
3466          */
3467         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3468             TCGv_i64 tt = tcg_const_i64(32);
3469             /* if shift is greater than 32, use 32 */
3470             tcg_gen_movcond_i64(TCG_COND_GT, s64, s64, tt, tt, s64);
3471             tcg_temp_free_i64(tt);
3472             /* Sign extend the input to 64 bits; re-do the shift.  */
3473             tcg_gen_ext_i32_i64(t64, reg);
3474             tcg_gen_shl_i64(s64, t64, s64);
3475             /* Clear all bits that are unchanged.  */
3476             tcg_gen_xor_i64(t64, t64, s64);
3477             /* Ignore the bits below the sign bit.  */
3478             tcg_gen_andi_i64(t64, t64, -1ULL << (bits - 1));
3479             /* If any bits remain set, we have overflow.  */
3480             tcg_gen_setcondi_i64(TCG_COND_NE, t64, t64, 0);
3481             tcg_gen_extrl_i64_i32(QREG_CC_V, t64);
3482             tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3483         }
3484     } else {
3485         tcg_gen_shli_i64(t64, t64, 32);
3486         if (logical) {
3487             tcg_gen_shr_i64(t64, t64, s64);
3488         } else {
3489             tcg_gen_sar_i64(t64, t64, s64);
3490         }
3491         tcg_gen_extr_i64_i32(QREG_CC_C, QREG_CC_N, t64);
3492 
3493         /* Note that C=0 if shift count is 0, and we get that for free.  */
3494         tcg_gen_shri_i32(QREG_CC_C, QREG_CC_C, 31);
3495 
3496         /* X = C, but only if the shift count was non-zero.  */
3497         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3498                             QREG_CC_C, QREG_CC_X);
3499     }
3500     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3501     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3502 
3503     tcg_temp_free(s32);
3504     tcg_temp_free_i64(s64);
3505     tcg_temp_free_i64(t64);
3506 
3507     /* Write back the result.  */
3508     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3509     set_cc_op(s, CC_OP_FLAGS);
3510 }
3511 
3512 DISAS_INSN(shift8_im)
3513 {
3514     shift_im(s, insn, OS_BYTE);
3515 }
3516 
3517 DISAS_INSN(shift16_im)
3518 {
3519     shift_im(s, insn, OS_WORD);
3520 }
3521 
3522 DISAS_INSN(shift_im)
3523 {
3524     shift_im(s, insn, OS_LONG);
3525 }
3526 
3527 DISAS_INSN(shift8_reg)
3528 {
3529     shift_reg(s, insn, OS_BYTE);
3530 }
3531 
3532 DISAS_INSN(shift16_reg)
3533 {
3534     shift_reg(s, insn, OS_WORD);
3535 }
3536 
3537 DISAS_INSN(shift_reg)
3538 {
3539     shift_reg(s, insn, OS_LONG);
3540 }
3541 
3542 DISAS_INSN(shift_mem)
3543 {
3544     int logical = insn & 8;
3545     int left = insn & 0x100;
3546     TCGv src;
3547     TCGv addr;
3548 
3549     SRC_EA(env, src, OS_WORD, !logical, &addr);
3550     tcg_gen_movi_i32(QREG_CC_V, 0);
3551     if (left) {
3552         tcg_gen_shri_i32(QREG_CC_C, src, 15);
3553         tcg_gen_shli_i32(QREG_CC_N, src, 1);
3554 
3555         /* Note that ColdFire always clears V,
3556            while M68000 sets if the most significant bit is changed at
3557            any time during the shift operation */
3558         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3559             src = gen_extend(src, OS_WORD, 1);
3560             tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3561         }
3562     } else {
3563         tcg_gen_mov_i32(QREG_CC_C, src);
3564         if (logical) {
3565             tcg_gen_shri_i32(QREG_CC_N, src, 1);
3566         } else {
3567             tcg_gen_sari_i32(QREG_CC_N, src, 1);
3568         }
3569     }
3570 
3571     gen_ext(QREG_CC_N, QREG_CC_N, OS_WORD, 1);
3572     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3573     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3574     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3575 
3576     DEST_EA(env, insn, OS_WORD, QREG_CC_N, &addr);
3577     set_cc_op(s, CC_OP_FLAGS);
3578 }
3579 
3580 static void rotate(TCGv reg, TCGv shift, int left, int size)
3581 {
3582     switch (size) {
3583     case 8:
3584         /* Replicate the 8-bit input so that a 32-bit rotate works.  */
3585         tcg_gen_ext8u_i32(reg, reg);
3586         tcg_gen_muli_i32(reg, reg, 0x01010101);
3587         goto do_long;
3588     case 16:
3589         /* Replicate the 16-bit input so that a 32-bit rotate works.  */
3590         tcg_gen_deposit_i32(reg, reg, reg, 16, 16);
3591         goto do_long;
3592     do_long:
3593     default:
3594         if (left) {
3595             tcg_gen_rotl_i32(reg, reg, shift);
3596         } else {
3597             tcg_gen_rotr_i32(reg, reg, shift);
3598         }
3599     }
3600 
3601     /* compute flags */
3602 
3603     switch (size) {
3604     case 8:
3605         tcg_gen_ext8s_i32(reg, reg);
3606         break;
3607     case 16:
3608         tcg_gen_ext16s_i32(reg, reg);
3609         break;
3610     default:
3611         break;
3612     }
3613 
3614     /* QREG_CC_X is not affected */
3615 
3616     tcg_gen_mov_i32(QREG_CC_N, reg);
3617     tcg_gen_mov_i32(QREG_CC_Z, reg);
3618 
3619     if (left) {
3620         tcg_gen_andi_i32(QREG_CC_C, reg, 1);
3621     } else {
3622         tcg_gen_shri_i32(QREG_CC_C, reg, 31);
3623     }
3624 
3625     tcg_gen_movi_i32(QREG_CC_V, 0); /* always cleared */
3626 }
3627 
3628 static void rotate_x_flags(TCGv reg, TCGv X, int size)
3629 {
3630     switch (size) {
3631     case 8:
3632         tcg_gen_ext8s_i32(reg, reg);
3633         break;
3634     case 16:
3635         tcg_gen_ext16s_i32(reg, reg);
3636         break;
3637     default:
3638         break;
3639     }
3640     tcg_gen_mov_i32(QREG_CC_N, reg);
3641     tcg_gen_mov_i32(QREG_CC_Z, reg);
3642     tcg_gen_mov_i32(QREG_CC_X, X);
3643     tcg_gen_mov_i32(QREG_CC_C, X);
3644     tcg_gen_movi_i32(QREG_CC_V, 0);
3645 }
3646 
3647 /* Result of rotate_x() is valid if 0 <= shift <= size */
3648 static TCGv rotate_x(TCGv reg, TCGv shift, int left, int size)
3649 {
3650     TCGv X, shl, shr, shx, sz, zero;
3651 
3652     sz = tcg_const_i32(size);
3653 
3654     shr = tcg_temp_new();
3655     shl = tcg_temp_new();
3656     shx = tcg_temp_new();
3657     if (left) {
3658         tcg_gen_mov_i32(shl, shift);      /* shl = shift */
3659         tcg_gen_movi_i32(shr, size + 1);
3660         tcg_gen_sub_i32(shr, shr, shift); /* shr = size + 1 - shift */
3661         tcg_gen_subi_i32(shx, shift, 1);  /* shx = shift - 1 */
3662         /* shx = shx < 0 ? size : shx; */
3663         zero = tcg_const_i32(0);
3664         tcg_gen_movcond_i32(TCG_COND_LT, shx, shx, zero, sz, shx);
3665         tcg_temp_free(zero);
3666     } else {
3667         tcg_gen_mov_i32(shr, shift);      /* shr = shift */
3668         tcg_gen_movi_i32(shl, size + 1);
3669         tcg_gen_sub_i32(shl, shl, shift); /* shl = size + 1 - shift */
3670         tcg_gen_sub_i32(shx, sz, shift); /* shx = size - shift */
3671     }
3672 
3673     /* reg = (reg << shl) | (reg >> shr) | (x << shx); */
3674 
3675     tcg_gen_shl_i32(shl, reg, shl);
3676     tcg_gen_shr_i32(shr, reg, shr);
3677     tcg_gen_or_i32(reg, shl, shr);
3678     tcg_temp_free(shl);
3679     tcg_temp_free(shr);
3680     tcg_gen_shl_i32(shx, QREG_CC_X, shx);
3681     tcg_gen_or_i32(reg, reg, shx);
3682     tcg_temp_free(shx);
3683 
3684     /* X = (reg >> size) & 1 */
3685 
3686     X = tcg_temp_new();
3687     tcg_gen_shr_i32(X, reg, sz);
3688     tcg_gen_andi_i32(X, X, 1);
3689     tcg_temp_free(sz);
3690 
3691     return X;
3692 }
3693 
3694 /* Result of rotate32_x() is valid if 0 <= shift < 33 */
3695 static TCGv rotate32_x(TCGv reg, TCGv shift, int left)
3696 {
3697     TCGv_i64 t0, shift64;
3698     TCGv X, lo, hi, zero;
3699 
3700     shift64 = tcg_temp_new_i64();
3701     tcg_gen_extu_i32_i64(shift64, shift);
3702 
3703     t0 = tcg_temp_new_i64();
3704 
3705     X = tcg_temp_new();
3706     lo = tcg_temp_new();
3707     hi = tcg_temp_new();
3708 
3709     if (left) {
3710         /* create [reg:X:..] */
3711 
3712         tcg_gen_shli_i32(lo, QREG_CC_X, 31);
3713         tcg_gen_concat_i32_i64(t0, lo, reg);
3714 
3715         /* rotate */
3716 
3717         tcg_gen_rotl_i64(t0, t0, shift64);
3718         tcg_temp_free_i64(shift64);
3719 
3720         /* result is [reg:..:reg:X] */
3721 
3722         tcg_gen_extr_i64_i32(lo, hi, t0);
3723         tcg_gen_andi_i32(X, lo, 1);
3724 
3725         tcg_gen_shri_i32(lo, lo, 1);
3726     } else {
3727         /* create [..:X:reg] */
3728 
3729         tcg_gen_concat_i32_i64(t0, reg, QREG_CC_X);
3730 
3731         tcg_gen_rotr_i64(t0, t0, shift64);
3732         tcg_temp_free_i64(shift64);
3733 
3734         /* result is value: [X:reg:..:reg] */
3735 
3736         tcg_gen_extr_i64_i32(lo, hi, t0);
3737 
3738         /* extract X */
3739 
3740         tcg_gen_shri_i32(X, hi, 31);
3741 
3742         /* extract result */
3743 
3744         tcg_gen_shli_i32(hi, hi, 1);
3745     }
3746     tcg_temp_free_i64(t0);
3747     tcg_gen_or_i32(lo, lo, hi);
3748     tcg_temp_free(hi);
3749 
3750     /* if shift == 0, register and X are not affected */
3751 
3752     zero = tcg_const_i32(0);
3753     tcg_gen_movcond_i32(TCG_COND_EQ, X, shift, zero, QREG_CC_X, X);
3754     tcg_gen_movcond_i32(TCG_COND_EQ, reg, shift, zero, reg, lo);
3755     tcg_temp_free(zero);
3756     tcg_temp_free(lo);
3757 
3758     return X;
3759 }
3760 
3761 DISAS_INSN(rotate_im)
3762 {
3763     TCGv shift;
3764     int tmp;
3765     int left = (insn & 0x100);
3766 
3767     tmp = (insn >> 9) & 7;
3768     if (tmp == 0) {
3769         tmp = 8;
3770     }
3771 
3772     shift = tcg_const_i32(tmp);
3773     if (insn & 8) {
3774         rotate(DREG(insn, 0), shift, left, 32);
3775     } else {
3776         TCGv X = rotate32_x(DREG(insn, 0), shift, left);
3777         rotate_x_flags(DREG(insn, 0), X, 32);
3778         tcg_temp_free(X);
3779     }
3780     tcg_temp_free(shift);
3781 
3782     set_cc_op(s, CC_OP_FLAGS);
3783 }
3784 
3785 DISAS_INSN(rotate8_im)
3786 {
3787     int left = (insn & 0x100);
3788     TCGv reg;
3789     TCGv shift;
3790     int tmp;
3791 
3792     reg = gen_extend(DREG(insn, 0), OS_BYTE, 0);
3793 
3794     tmp = (insn >> 9) & 7;
3795     if (tmp == 0) {
3796         tmp = 8;
3797     }
3798 
3799     shift = tcg_const_i32(tmp);
3800     if (insn & 8) {
3801         rotate(reg, shift, left, 8);
3802     } else {
3803         TCGv X = rotate_x(reg, shift, left, 8);
3804         rotate_x_flags(reg, X, 8);
3805         tcg_temp_free(X);
3806     }
3807     tcg_temp_free(shift);
3808     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3809     set_cc_op(s, CC_OP_FLAGS);
3810 }
3811 
3812 DISAS_INSN(rotate16_im)
3813 {
3814     int left = (insn & 0x100);
3815     TCGv reg;
3816     TCGv shift;
3817     int tmp;
3818 
3819     reg = gen_extend(DREG(insn, 0), OS_WORD, 0);
3820     tmp = (insn >> 9) & 7;
3821     if (tmp == 0) {
3822         tmp = 8;
3823     }
3824 
3825     shift = tcg_const_i32(tmp);
3826     if (insn & 8) {
3827         rotate(reg, shift, left, 16);
3828     } else {
3829         TCGv X = rotate_x(reg, shift, left, 16);
3830         rotate_x_flags(reg, X, 16);
3831         tcg_temp_free(X);
3832     }
3833     tcg_temp_free(shift);
3834     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3835     set_cc_op(s, CC_OP_FLAGS);
3836 }
3837 
3838 DISAS_INSN(rotate_reg)
3839 {
3840     TCGv reg;
3841     TCGv src;
3842     TCGv t0, t1;
3843     int left = (insn & 0x100);
3844 
3845     reg = DREG(insn, 0);
3846     src = DREG(insn, 9);
3847     /* shift in [0..63] */
3848     t0 = tcg_temp_new();
3849     tcg_gen_andi_i32(t0, src, 63);
3850     t1 = tcg_temp_new_i32();
3851     if (insn & 8) {
3852         tcg_gen_andi_i32(t1, src, 31);
3853         rotate(reg, t1, left, 32);
3854         /* if shift == 0, clear C */
3855         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3856                             t0, QREG_CC_V /* 0 */,
3857                             QREG_CC_V /* 0 */, QREG_CC_C);
3858     } else {
3859         TCGv X;
3860         /* modulo 33 */
3861         tcg_gen_movi_i32(t1, 33);
3862         tcg_gen_remu_i32(t1, t0, t1);
3863         X = rotate32_x(DREG(insn, 0), t1, left);
3864         rotate_x_flags(DREG(insn, 0), X, 32);
3865         tcg_temp_free(X);
3866     }
3867     tcg_temp_free(t1);
3868     tcg_temp_free(t0);
3869     set_cc_op(s, CC_OP_FLAGS);
3870 }
3871 
3872 DISAS_INSN(rotate8_reg)
3873 {
3874     TCGv reg;
3875     TCGv src;
3876     TCGv t0, t1;
3877     int left = (insn & 0x100);
3878 
3879     reg = gen_extend(DREG(insn, 0), OS_BYTE, 0);
3880     src = DREG(insn, 9);
3881     /* shift in [0..63] */
3882     t0 = tcg_temp_new_i32();
3883     tcg_gen_andi_i32(t0, src, 63);
3884     t1 = tcg_temp_new_i32();
3885     if (insn & 8) {
3886         tcg_gen_andi_i32(t1, src, 7);
3887         rotate(reg, t1, left, 8);
3888         /* if shift == 0, clear C */
3889         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3890                             t0, QREG_CC_V /* 0 */,
3891                             QREG_CC_V /* 0 */, QREG_CC_C);
3892     } else {
3893         TCGv X;
3894         /* modulo 9 */
3895         tcg_gen_movi_i32(t1, 9);
3896         tcg_gen_remu_i32(t1, t0, t1);
3897         X = rotate_x(reg, t1, left, 8);
3898         rotate_x_flags(reg, X, 8);
3899         tcg_temp_free(X);
3900     }
3901     tcg_temp_free(t1);
3902     tcg_temp_free(t0);
3903     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3904     set_cc_op(s, CC_OP_FLAGS);
3905 }
3906 
3907 DISAS_INSN(rotate16_reg)
3908 {
3909     TCGv reg;
3910     TCGv src;
3911     TCGv t0, t1;
3912     int left = (insn & 0x100);
3913 
3914     reg = gen_extend(DREG(insn, 0), OS_WORD, 0);
3915     src = DREG(insn, 9);
3916     /* shift in [0..63] */
3917     t0 = tcg_temp_new_i32();
3918     tcg_gen_andi_i32(t0, src, 63);
3919     t1 = tcg_temp_new_i32();
3920     if (insn & 8) {
3921         tcg_gen_andi_i32(t1, src, 15);
3922         rotate(reg, t1, left, 16);
3923         /* if shift == 0, clear C */
3924         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3925                             t0, QREG_CC_V /* 0 */,
3926                             QREG_CC_V /* 0 */, QREG_CC_C);
3927     } else {
3928         TCGv X;
3929         /* modulo 17 */
3930         tcg_gen_movi_i32(t1, 17);
3931         tcg_gen_remu_i32(t1, t0, t1);
3932         X = rotate_x(reg, t1, left, 16);
3933         rotate_x_flags(reg, X, 16);
3934         tcg_temp_free(X);
3935     }
3936     tcg_temp_free(t1);
3937     tcg_temp_free(t0);
3938     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3939     set_cc_op(s, CC_OP_FLAGS);
3940 }
3941 
3942 DISAS_INSN(rotate_mem)
3943 {
3944     TCGv src;
3945     TCGv addr;
3946     TCGv shift;
3947     int left = (insn & 0x100);
3948 
3949     SRC_EA(env, src, OS_WORD, 0, &addr);
3950 
3951     shift = tcg_const_i32(1);
3952     if (insn & 0x0200) {
3953         rotate(src, shift, left, 16);
3954     } else {
3955         TCGv X = rotate_x(src, shift, left, 16);
3956         rotate_x_flags(src, X, 16);
3957         tcg_temp_free(X);
3958     }
3959     tcg_temp_free(shift);
3960     DEST_EA(env, insn, OS_WORD, src, &addr);
3961     set_cc_op(s, CC_OP_FLAGS);
3962 }
3963 
3964 DISAS_INSN(bfext_reg)
3965 {
3966     int ext = read_im16(env, s);
3967     int is_sign = insn & 0x200;
3968     TCGv src = DREG(insn, 0);
3969     TCGv dst = DREG(ext, 12);
3970     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
3971     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
3972     int pos = 32 - ofs - len;        /* little bit-endian */
3973     TCGv tmp = tcg_temp_new();
3974     TCGv shift;
3975 
3976     /* In general, we're going to rotate the field so that it's at the
3977        top of the word and then right-shift by the compliment of the
3978        width to extend the field.  */
3979     if (ext & 0x20) {
3980         /* Variable width.  */
3981         if (ext & 0x800) {
3982             /* Variable offset.  */
3983             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
3984             tcg_gen_rotl_i32(tmp, src, tmp);
3985         } else {
3986             tcg_gen_rotli_i32(tmp, src, ofs);
3987         }
3988 
3989         shift = tcg_temp_new();
3990         tcg_gen_neg_i32(shift, DREG(ext, 0));
3991         tcg_gen_andi_i32(shift, shift, 31);
3992         tcg_gen_sar_i32(QREG_CC_N, tmp, shift);
3993         if (is_sign) {
3994             tcg_gen_mov_i32(dst, QREG_CC_N);
3995         } else {
3996             tcg_gen_shr_i32(dst, tmp, shift);
3997         }
3998         tcg_temp_free(shift);
3999     } else {
4000         /* Immediate width.  */
4001         if (ext & 0x800) {
4002             /* Variable offset */
4003             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4004             tcg_gen_rotl_i32(tmp, src, tmp);
4005             src = tmp;
4006             pos = 32 - len;
4007         } else {
4008             /* Immediate offset.  If the field doesn't wrap around the
4009                end of the word, rely on (s)extract completely.  */
4010             if (pos < 0) {
4011                 tcg_gen_rotli_i32(tmp, src, ofs);
4012                 src = tmp;
4013                 pos = 32 - len;
4014             }
4015         }
4016 
4017         tcg_gen_sextract_i32(QREG_CC_N, src, pos, len);
4018         if (is_sign) {
4019             tcg_gen_mov_i32(dst, QREG_CC_N);
4020         } else {
4021             tcg_gen_extract_i32(dst, src, pos, len);
4022         }
4023     }
4024 
4025     tcg_temp_free(tmp);
4026     set_cc_op(s, CC_OP_LOGIC);
4027 }
4028 
4029 DISAS_INSN(bfext_mem)
4030 {
4031     int ext = read_im16(env, s);
4032     int is_sign = insn & 0x200;
4033     TCGv dest = DREG(ext, 12);
4034     TCGv addr, len, ofs;
4035 
4036     addr = gen_lea(env, s, insn, OS_UNSIZED);
4037     if (IS_NULL_QREG(addr)) {
4038         gen_addr_fault(s);
4039         return;
4040     }
4041 
4042     if (ext & 0x20) {
4043         len = DREG(ext, 0);
4044     } else {
4045         len = tcg_const_i32(extract32(ext, 0, 5));
4046     }
4047     if (ext & 0x800) {
4048         ofs = DREG(ext, 6);
4049     } else {
4050         ofs = tcg_const_i32(extract32(ext, 6, 5));
4051     }
4052 
4053     if (is_sign) {
4054         gen_helper_bfexts_mem(dest, cpu_env, addr, ofs, len);
4055         tcg_gen_mov_i32(QREG_CC_N, dest);
4056     } else {
4057         TCGv_i64 tmp = tcg_temp_new_i64();
4058         gen_helper_bfextu_mem(tmp, cpu_env, addr, ofs, len);
4059         tcg_gen_extr_i64_i32(dest, QREG_CC_N, tmp);
4060         tcg_temp_free_i64(tmp);
4061     }
4062     set_cc_op(s, CC_OP_LOGIC);
4063 
4064     if (!(ext & 0x20)) {
4065         tcg_temp_free(len);
4066     }
4067     if (!(ext & 0x800)) {
4068         tcg_temp_free(ofs);
4069     }
4070 }
4071 
4072 DISAS_INSN(bfop_reg)
4073 {
4074     int ext = read_im16(env, s);
4075     TCGv src = DREG(insn, 0);
4076     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4077     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4078     TCGv mask, tofs, tlen;
4079 
4080     tofs = NULL;
4081     tlen = NULL;
4082     if ((insn & 0x0f00) == 0x0d00) { /* bfffo */
4083         tofs = tcg_temp_new();
4084         tlen = tcg_temp_new();
4085     }
4086 
4087     if ((ext & 0x820) == 0) {
4088         /* Immediate width and offset.  */
4089         uint32_t maski = 0x7fffffffu >> (len - 1);
4090         if (ofs + len <= 32) {
4091             tcg_gen_shli_i32(QREG_CC_N, src, ofs);
4092         } else {
4093             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4094         }
4095         tcg_gen_andi_i32(QREG_CC_N, QREG_CC_N, ~maski);
4096         mask = tcg_const_i32(ror32(maski, ofs));
4097         if (tofs) {
4098             tcg_gen_movi_i32(tofs, ofs);
4099             tcg_gen_movi_i32(tlen, len);
4100         }
4101     } else {
4102         TCGv tmp = tcg_temp_new();
4103         if (ext & 0x20) {
4104             /* Variable width */
4105             tcg_gen_subi_i32(tmp, DREG(ext, 0), 1);
4106             tcg_gen_andi_i32(tmp, tmp, 31);
4107             mask = tcg_const_i32(0x7fffffffu);
4108             tcg_gen_shr_i32(mask, mask, tmp);
4109             if (tlen) {
4110                 tcg_gen_addi_i32(tlen, tmp, 1);
4111             }
4112         } else {
4113             /* Immediate width */
4114             mask = tcg_const_i32(0x7fffffffu >> (len - 1));
4115             if (tlen) {
4116                 tcg_gen_movi_i32(tlen, len);
4117             }
4118         }
4119         if (ext & 0x800) {
4120             /* Variable offset */
4121             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4122             tcg_gen_rotl_i32(QREG_CC_N, src, tmp);
4123             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4124             tcg_gen_rotr_i32(mask, mask, tmp);
4125             if (tofs) {
4126                 tcg_gen_mov_i32(tofs, tmp);
4127             }
4128         } else {
4129             /* Immediate offset (and variable width) */
4130             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4131             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4132             tcg_gen_rotri_i32(mask, mask, ofs);
4133             if (tofs) {
4134                 tcg_gen_movi_i32(tofs, ofs);
4135             }
4136         }
4137         tcg_temp_free(tmp);
4138     }
4139     set_cc_op(s, CC_OP_LOGIC);
4140 
4141     switch (insn & 0x0f00) {
4142     case 0x0a00: /* bfchg */
4143         tcg_gen_eqv_i32(src, src, mask);
4144         break;
4145     case 0x0c00: /* bfclr */
4146         tcg_gen_and_i32(src, src, mask);
4147         break;
4148     case 0x0d00: /* bfffo */
4149         gen_helper_bfffo_reg(DREG(ext, 12), QREG_CC_N, tofs, tlen);
4150         tcg_temp_free(tlen);
4151         tcg_temp_free(tofs);
4152         break;
4153     case 0x0e00: /* bfset */
4154         tcg_gen_orc_i32(src, src, mask);
4155         break;
4156     case 0x0800: /* bftst */
4157         /* flags already set; no other work to do.  */
4158         break;
4159     default:
4160         g_assert_not_reached();
4161     }
4162     tcg_temp_free(mask);
4163 }
4164 
4165 DISAS_INSN(bfop_mem)
4166 {
4167     int ext = read_im16(env, s);
4168     TCGv addr, len, ofs;
4169     TCGv_i64 t64;
4170 
4171     addr = gen_lea(env, s, insn, OS_UNSIZED);
4172     if (IS_NULL_QREG(addr)) {
4173         gen_addr_fault(s);
4174         return;
4175     }
4176 
4177     if (ext & 0x20) {
4178         len = DREG(ext, 0);
4179     } else {
4180         len = tcg_const_i32(extract32(ext, 0, 5));
4181     }
4182     if (ext & 0x800) {
4183         ofs = DREG(ext, 6);
4184     } else {
4185         ofs = tcg_const_i32(extract32(ext, 6, 5));
4186     }
4187 
4188     switch (insn & 0x0f00) {
4189     case 0x0a00: /* bfchg */
4190         gen_helper_bfchg_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4191         break;
4192     case 0x0c00: /* bfclr */
4193         gen_helper_bfclr_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4194         break;
4195     case 0x0d00: /* bfffo */
4196         t64 = tcg_temp_new_i64();
4197         gen_helper_bfffo_mem(t64, cpu_env, addr, ofs, len);
4198         tcg_gen_extr_i64_i32(DREG(ext, 12), QREG_CC_N, t64);
4199         tcg_temp_free_i64(t64);
4200         break;
4201     case 0x0e00: /* bfset */
4202         gen_helper_bfset_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4203         break;
4204     case 0x0800: /* bftst */
4205         gen_helper_bfexts_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4206         break;
4207     default:
4208         g_assert_not_reached();
4209     }
4210     set_cc_op(s, CC_OP_LOGIC);
4211 
4212     if (!(ext & 0x20)) {
4213         tcg_temp_free(len);
4214     }
4215     if (!(ext & 0x800)) {
4216         tcg_temp_free(ofs);
4217     }
4218 }
4219 
4220 DISAS_INSN(bfins_reg)
4221 {
4222     int ext = read_im16(env, s);
4223     TCGv dst = DREG(insn, 0);
4224     TCGv src = DREG(ext, 12);
4225     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4226     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4227     int pos = 32 - ofs - len;        /* little bit-endian */
4228     TCGv tmp;
4229 
4230     tmp = tcg_temp_new();
4231 
4232     if (ext & 0x20) {
4233         /* Variable width */
4234         tcg_gen_neg_i32(tmp, DREG(ext, 0));
4235         tcg_gen_andi_i32(tmp, tmp, 31);
4236         tcg_gen_shl_i32(QREG_CC_N, src, tmp);
4237     } else {
4238         /* Immediate width */
4239         tcg_gen_shli_i32(QREG_CC_N, src, 32 - len);
4240     }
4241     set_cc_op(s, CC_OP_LOGIC);
4242 
4243     /* Immediate width and offset */
4244     if ((ext & 0x820) == 0) {
4245         /* Check for suitability for deposit.  */
4246         if (pos >= 0) {
4247             tcg_gen_deposit_i32(dst, dst, src, pos, len);
4248         } else {
4249             uint32_t maski = -2U << (len - 1);
4250             uint32_t roti = (ofs + len) & 31;
4251             tcg_gen_andi_i32(tmp, src, ~maski);
4252             tcg_gen_rotri_i32(tmp, tmp, roti);
4253             tcg_gen_andi_i32(dst, dst, ror32(maski, roti));
4254             tcg_gen_or_i32(dst, dst, tmp);
4255         }
4256     } else {
4257         TCGv mask = tcg_temp_new();
4258         TCGv rot = tcg_temp_new();
4259 
4260         if (ext & 0x20) {
4261             /* Variable width */
4262             tcg_gen_subi_i32(rot, DREG(ext, 0), 1);
4263             tcg_gen_andi_i32(rot, rot, 31);
4264             tcg_gen_movi_i32(mask, -2);
4265             tcg_gen_shl_i32(mask, mask, rot);
4266             tcg_gen_mov_i32(rot, DREG(ext, 0));
4267             tcg_gen_andc_i32(tmp, src, mask);
4268         } else {
4269             /* Immediate width (variable offset) */
4270             uint32_t maski = -2U << (len - 1);
4271             tcg_gen_andi_i32(tmp, src, ~maski);
4272             tcg_gen_movi_i32(mask, maski);
4273             tcg_gen_movi_i32(rot, len & 31);
4274         }
4275         if (ext & 0x800) {
4276             /* Variable offset */
4277             tcg_gen_add_i32(rot, rot, DREG(ext, 6));
4278         } else {
4279             /* Immediate offset (variable width) */
4280             tcg_gen_addi_i32(rot, rot, ofs);
4281         }
4282         tcg_gen_andi_i32(rot, rot, 31);
4283         tcg_gen_rotr_i32(mask, mask, rot);
4284         tcg_gen_rotr_i32(tmp, tmp, rot);
4285         tcg_gen_and_i32(dst, dst, mask);
4286         tcg_gen_or_i32(dst, dst, tmp);
4287 
4288         tcg_temp_free(rot);
4289         tcg_temp_free(mask);
4290     }
4291     tcg_temp_free(tmp);
4292 }
4293 
4294 DISAS_INSN(bfins_mem)
4295 {
4296     int ext = read_im16(env, s);
4297     TCGv src = DREG(ext, 12);
4298     TCGv addr, len, ofs;
4299 
4300     addr = gen_lea(env, s, insn, OS_UNSIZED);
4301     if (IS_NULL_QREG(addr)) {
4302         gen_addr_fault(s);
4303         return;
4304     }
4305 
4306     if (ext & 0x20) {
4307         len = DREG(ext, 0);
4308     } else {
4309         len = tcg_const_i32(extract32(ext, 0, 5));
4310     }
4311     if (ext & 0x800) {
4312         ofs = DREG(ext, 6);
4313     } else {
4314         ofs = tcg_const_i32(extract32(ext, 6, 5));
4315     }
4316 
4317     gen_helper_bfins_mem(QREG_CC_N, cpu_env, addr, src, ofs, len);
4318     set_cc_op(s, CC_OP_LOGIC);
4319 
4320     if (!(ext & 0x20)) {
4321         tcg_temp_free(len);
4322     }
4323     if (!(ext & 0x800)) {
4324         tcg_temp_free(ofs);
4325     }
4326 }
4327 
4328 DISAS_INSN(ff1)
4329 {
4330     TCGv reg;
4331     reg = DREG(insn, 0);
4332     gen_logic_cc(s, reg, OS_LONG);
4333     gen_helper_ff1(reg, reg);
4334 }
4335 
4336 DISAS_INSN(chk)
4337 {
4338     TCGv src, reg;
4339     int opsize;
4340 
4341     switch ((insn >> 7) & 3) {
4342     case 3:
4343         opsize = OS_WORD;
4344         break;
4345     case 2:
4346         if (m68k_feature(env, M68K_FEATURE_CHK2)) {
4347             opsize = OS_LONG;
4348             break;
4349         }
4350         /* fallthru */
4351     default:
4352         gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
4353         return;
4354     }
4355     SRC_EA(env, src, opsize, 1, NULL);
4356     reg = gen_extend(DREG(insn, 9), opsize, 1);
4357 
4358     gen_flush_flags(s);
4359     gen_helper_chk(cpu_env, reg, src);
4360 }
4361 
4362 DISAS_INSN(chk2)
4363 {
4364     uint16_t ext;
4365     TCGv addr1, addr2, bound1, bound2, reg;
4366     int opsize;
4367 
4368     switch ((insn >> 9) & 3) {
4369     case 0:
4370         opsize = OS_BYTE;
4371         break;
4372     case 1:
4373         opsize = OS_WORD;
4374         break;
4375     case 2:
4376         opsize = OS_LONG;
4377         break;
4378     default:
4379         gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
4380         return;
4381     }
4382 
4383     ext = read_im16(env, s);
4384     if ((ext & 0x0800) == 0) {
4385         gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
4386         return;
4387     }
4388 
4389     addr1 = gen_lea(env, s, insn, OS_UNSIZED);
4390     addr2 = tcg_temp_new();
4391     tcg_gen_addi_i32(addr2, addr1, opsize_bytes(opsize));
4392 
4393     bound1 = gen_load(s, opsize, addr1, 1, IS_USER(s));
4394     tcg_temp_free(addr1);
4395     bound2 = gen_load(s, opsize, addr2, 1, IS_USER(s));
4396     tcg_temp_free(addr2);
4397 
4398     reg = tcg_temp_new();
4399     if (ext & 0x8000) {
4400         tcg_gen_mov_i32(reg, AREG(ext, 12));
4401     } else {
4402         gen_ext(reg, DREG(ext, 12), opsize, 1);
4403     }
4404 
4405     gen_flush_flags(s);
4406     gen_helper_chk2(cpu_env, reg, bound1, bound2);
4407     tcg_temp_free(reg);
4408     tcg_temp_free(bound1);
4409     tcg_temp_free(bound2);
4410 }
4411 
4412 static void m68k_copy_line(TCGv dst, TCGv src, int index)
4413 {
4414     TCGv addr;
4415     TCGv_i64 t0, t1;
4416 
4417     addr = tcg_temp_new();
4418 
4419     t0 = tcg_temp_new_i64();
4420     t1 = tcg_temp_new_i64();
4421 
4422     tcg_gen_andi_i32(addr, src, ~15);
4423     tcg_gen_qemu_ld64(t0, addr, index);
4424     tcg_gen_addi_i32(addr, addr, 8);
4425     tcg_gen_qemu_ld64(t1, addr, index);
4426 
4427     tcg_gen_andi_i32(addr, dst, ~15);
4428     tcg_gen_qemu_st64(t0, addr, index);
4429     tcg_gen_addi_i32(addr, addr, 8);
4430     tcg_gen_qemu_st64(t1, addr, index);
4431 
4432     tcg_temp_free_i64(t0);
4433     tcg_temp_free_i64(t1);
4434     tcg_temp_free(addr);
4435 }
4436 
4437 DISAS_INSN(move16_reg)
4438 {
4439     int index = IS_USER(s);
4440     TCGv tmp;
4441     uint16_t ext;
4442 
4443     ext = read_im16(env, s);
4444     if ((ext & (1 << 15)) == 0) {
4445         gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
4446     }
4447 
4448     m68k_copy_line(AREG(ext, 12), AREG(insn, 0), index);
4449 
4450     /* Ax can be Ay, so save Ay before incrementing Ax */
4451     tmp = tcg_temp_new();
4452     tcg_gen_mov_i32(tmp, AREG(ext, 12));
4453     tcg_gen_addi_i32(AREG(insn, 0), AREG(insn, 0), 16);
4454     tcg_gen_addi_i32(AREG(ext, 12), tmp, 16);
4455     tcg_temp_free(tmp);
4456 }
4457 
4458 DISAS_INSN(move16_mem)
4459 {
4460     int index = IS_USER(s);
4461     TCGv reg, addr;
4462 
4463     reg = AREG(insn, 0);
4464     addr = tcg_const_i32(read_im32(env, s));
4465 
4466     if ((insn >> 3) & 1) {
4467         /* MOVE16 (xxx).L, (Ay) */
4468         m68k_copy_line(reg, addr, index);
4469     } else {
4470         /* MOVE16 (Ay), (xxx).L */
4471         m68k_copy_line(addr, reg, index);
4472     }
4473 
4474     tcg_temp_free(addr);
4475 
4476     if (((insn >> 3) & 2) == 0) {
4477         /* (Ay)+ */
4478         tcg_gen_addi_i32(reg, reg, 16);
4479     }
4480 }
4481 
4482 DISAS_INSN(strldsr)
4483 {
4484     uint16_t ext;
4485     uint32_t addr;
4486 
4487     addr = s->pc - 2;
4488     ext = read_im16(env, s);
4489     if (ext != 0x46FC) {
4490         gen_exception(s, addr, EXCP_UNSUPPORTED);
4491         return;
4492     }
4493     ext = read_im16(env, s);
4494     if (IS_USER(s) || (ext & SR_S) == 0) {
4495         gen_exception(s, addr, EXCP_PRIVILEGE);
4496         return;
4497     }
4498     gen_push(s, gen_get_sr(s));
4499     gen_set_sr_im(s, ext, 0);
4500 }
4501 
4502 DISAS_INSN(move_from_sr)
4503 {
4504     TCGv sr;
4505 
4506     if (IS_USER(s) && !m68k_feature(env, M68K_FEATURE_M68000)) {
4507         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4508         return;
4509     }
4510     sr = gen_get_sr(s);
4511     DEST_EA(env, insn, OS_WORD, sr, NULL);
4512 }
4513 
4514 #if defined(CONFIG_SOFTMMU)
4515 DISAS_INSN(moves)
4516 {
4517     int opsize;
4518     uint16_t ext;
4519     TCGv reg;
4520     TCGv addr;
4521     int extend;
4522 
4523     if (IS_USER(s)) {
4524         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4525         return;
4526     }
4527 
4528     ext = read_im16(env, s);
4529 
4530     opsize = insn_opsize(insn);
4531 
4532     if (ext & 0x8000) {
4533         /* address register */
4534         reg = AREG(ext, 12);
4535         extend = 1;
4536     } else {
4537         /* data register */
4538         reg = DREG(ext, 12);
4539         extend = 0;
4540     }
4541 
4542     addr = gen_lea(env, s, insn, opsize);
4543     if (IS_NULL_QREG(addr)) {
4544         gen_addr_fault(s);
4545         return;
4546     }
4547 
4548     if (ext & 0x0800) {
4549         /* from reg to ea */
4550         gen_store(s, opsize, addr, reg, DFC_INDEX(s));
4551     } else {
4552         /* from ea to reg */
4553         TCGv tmp = gen_load(s, opsize, addr, 0, SFC_INDEX(s));
4554         if (extend) {
4555             gen_ext(reg, tmp, opsize, 1);
4556         } else {
4557             gen_partset_reg(opsize, reg, tmp);
4558         }
4559         tcg_temp_free(tmp);
4560     }
4561     switch (extract32(insn, 3, 3)) {
4562     case 3: /* Indirect postincrement.  */
4563         tcg_gen_addi_i32(AREG(insn, 0), addr,
4564                          REG(insn, 0) == 7 && opsize == OS_BYTE
4565                          ? 2
4566                          : opsize_bytes(opsize));
4567         break;
4568     case 4: /* Indirect predecrememnt.  */
4569         tcg_gen_mov_i32(AREG(insn, 0), addr);
4570         break;
4571     }
4572 }
4573 
4574 DISAS_INSN(move_to_sr)
4575 {
4576     if (IS_USER(s)) {
4577         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4578         return;
4579     }
4580     gen_move_to_sr(env, s, insn, false);
4581     gen_lookup_tb(s);
4582 }
4583 
4584 DISAS_INSN(move_from_usp)
4585 {
4586     if (IS_USER(s)) {
4587         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4588         return;
4589     }
4590     tcg_gen_ld_i32(AREG(insn, 0), cpu_env,
4591                    offsetof(CPUM68KState, sp[M68K_USP]));
4592 }
4593 
4594 DISAS_INSN(move_to_usp)
4595 {
4596     if (IS_USER(s)) {
4597         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4598         return;
4599     }
4600     tcg_gen_st_i32(AREG(insn, 0), cpu_env,
4601                    offsetof(CPUM68KState, sp[M68K_USP]));
4602 }
4603 
4604 DISAS_INSN(halt)
4605 {
4606     if (IS_USER(s)) {
4607         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4608         return;
4609     }
4610 
4611     gen_exception(s, s->pc, EXCP_HALT_INSN);
4612 }
4613 
4614 DISAS_INSN(stop)
4615 {
4616     uint16_t ext;
4617 
4618     if (IS_USER(s)) {
4619         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4620         return;
4621     }
4622 
4623     ext = read_im16(env, s);
4624 
4625     gen_set_sr_im(s, ext, 0);
4626     tcg_gen_movi_i32(cpu_halted, 1);
4627     gen_exception(s, s->pc, EXCP_HLT);
4628 }
4629 
4630 DISAS_INSN(rte)
4631 {
4632     if (IS_USER(s)) {
4633         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4634         return;
4635     }
4636     gen_exception(s, s->insn_pc, EXCP_RTE);
4637 }
4638 
4639 DISAS_INSN(cf_movec)
4640 {
4641     uint16_t ext;
4642     TCGv reg;
4643 
4644     if (IS_USER(s)) {
4645         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4646         return;
4647     }
4648 
4649     ext = read_im16(env, s);
4650 
4651     if (ext & 0x8000) {
4652         reg = AREG(ext, 12);
4653     } else {
4654         reg = DREG(ext, 12);
4655     }
4656     gen_helper_cf_movec_to(cpu_env, tcg_const_i32(ext & 0xfff), reg);
4657     gen_lookup_tb(s);
4658 }
4659 
4660 DISAS_INSN(m68k_movec)
4661 {
4662     uint16_t ext;
4663     TCGv reg;
4664 
4665     if (IS_USER(s)) {
4666         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4667         return;
4668     }
4669 
4670     ext = read_im16(env, s);
4671 
4672     if (ext & 0x8000) {
4673         reg = AREG(ext, 12);
4674     } else {
4675         reg = DREG(ext, 12);
4676     }
4677     if (insn & 1) {
4678         gen_helper_m68k_movec_to(cpu_env, tcg_const_i32(ext & 0xfff), reg);
4679     } else {
4680         gen_helper_m68k_movec_from(reg, cpu_env, tcg_const_i32(ext & 0xfff));
4681     }
4682     gen_lookup_tb(s);
4683 }
4684 
4685 DISAS_INSN(intouch)
4686 {
4687     if (IS_USER(s)) {
4688         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4689         return;
4690     }
4691     /* ICache fetch.  Implement as no-op.  */
4692 }
4693 
4694 DISAS_INSN(cpushl)
4695 {
4696     if (IS_USER(s)) {
4697         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4698         return;
4699     }
4700     /* Cache push/invalidate.  Implement as no-op.  */
4701 }
4702 
4703 DISAS_INSN(cpush)
4704 {
4705     if (IS_USER(s)) {
4706         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4707         return;
4708     }
4709     /* Cache push/invalidate.  Implement as no-op.  */
4710 }
4711 
4712 DISAS_INSN(cinv)
4713 {
4714     if (IS_USER(s)) {
4715         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4716         return;
4717     }
4718     /* Invalidate cache line.  Implement as no-op.  */
4719 }
4720 
4721 #if defined(CONFIG_SOFTMMU)
4722 DISAS_INSN(pflush)
4723 {
4724     TCGv opmode;
4725 
4726     if (IS_USER(s)) {
4727         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4728         return;
4729     }
4730 
4731     opmode = tcg_const_i32((insn >> 3) & 3);
4732     gen_helper_pflush(cpu_env, AREG(insn, 0), opmode);
4733     tcg_temp_free(opmode);
4734 }
4735 
4736 DISAS_INSN(ptest)
4737 {
4738     TCGv is_read;
4739 
4740     if (IS_USER(s)) {
4741         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4742         return;
4743     }
4744     is_read = tcg_const_i32((insn >> 5) & 1);
4745     gen_helper_ptest(cpu_env, AREG(insn, 0), is_read);
4746     tcg_temp_free(is_read);
4747 }
4748 #endif
4749 
4750 DISAS_INSN(wddata)
4751 {
4752     gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4753 }
4754 
4755 DISAS_INSN(wdebug)
4756 {
4757     M68kCPU *cpu = m68k_env_get_cpu(env);
4758 
4759     if (IS_USER(s)) {
4760         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4761         return;
4762     }
4763     /* TODO: Implement wdebug.  */
4764     cpu_abort(CPU(cpu), "WDEBUG not implemented");
4765 }
4766 #endif
4767 
4768 DISAS_INSN(trap)
4769 {
4770     gen_exception(s, s->insn_pc, EXCP_TRAP0 + (insn & 0xf));
4771 }
4772 
4773 static void gen_load_fcr(DisasContext *s, TCGv res, int reg)
4774 {
4775     switch (reg) {
4776     case M68K_FPIAR:
4777         tcg_gen_movi_i32(res, 0);
4778         break;
4779     case M68K_FPSR:
4780         tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpsr));
4781         break;
4782     case M68K_FPCR:
4783         tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpcr));
4784         break;
4785     }
4786 }
4787 
4788 static void gen_store_fcr(DisasContext *s, TCGv val, int reg)
4789 {
4790     switch (reg) {
4791     case M68K_FPIAR:
4792         break;
4793     case M68K_FPSR:
4794         tcg_gen_st_i32(val, cpu_env, offsetof(CPUM68KState, fpsr));
4795         break;
4796     case M68K_FPCR:
4797         gen_helper_set_fpcr(cpu_env, val);
4798         break;
4799     }
4800 }
4801 
4802 static void gen_qemu_store_fcr(DisasContext *s, TCGv addr, int reg)
4803 {
4804     int index = IS_USER(s);
4805     TCGv tmp;
4806 
4807     tmp = tcg_temp_new();
4808     gen_load_fcr(s, tmp, reg);
4809     tcg_gen_qemu_st32(tmp, addr, index);
4810     tcg_temp_free(tmp);
4811 }
4812 
4813 static void gen_qemu_load_fcr(DisasContext *s, TCGv addr, int reg)
4814 {
4815     int index = IS_USER(s);
4816     TCGv tmp;
4817 
4818     tmp = tcg_temp_new();
4819     tcg_gen_qemu_ld32u(tmp, addr, index);
4820     gen_store_fcr(s, tmp, reg);
4821     tcg_temp_free(tmp);
4822 }
4823 
4824 
4825 static void gen_op_fmove_fcr(CPUM68KState *env, DisasContext *s,
4826                              uint32_t insn, uint32_t ext)
4827 {
4828     int mask = (ext >> 10) & 7;
4829     int is_write = (ext >> 13) & 1;
4830     int mode = extract32(insn, 3, 3);
4831     int i;
4832     TCGv addr, tmp;
4833 
4834     switch (mode) {
4835     case 0: /* Dn */
4836         if (mask != M68K_FPIAR && mask != M68K_FPSR && mask != M68K_FPCR) {
4837             gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
4838             return;
4839         }
4840         if (is_write) {
4841             gen_load_fcr(s, DREG(insn, 0), mask);
4842         } else {
4843             gen_store_fcr(s, DREG(insn, 0), mask);
4844         }
4845         return;
4846     case 1: /* An, only with FPIAR */
4847         if (mask != M68K_FPIAR) {
4848             gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
4849             return;
4850         }
4851         if (is_write) {
4852             gen_load_fcr(s, AREG(insn, 0), mask);
4853         } else {
4854             gen_store_fcr(s, AREG(insn, 0), mask);
4855         }
4856         return;
4857     default:
4858         break;
4859     }
4860 
4861     tmp = gen_lea(env, s, insn, OS_LONG);
4862     if (IS_NULL_QREG(tmp)) {
4863         gen_addr_fault(s);
4864         return;
4865     }
4866 
4867     addr = tcg_temp_new();
4868     tcg_gen_mov_i32(addr, tmp);
4869 
4870     /* mask:
4871      *
4872      * 0b100 Floating-Point Control Register
4873      * 0b010 Floating-Point Status Register
4874      * 0b001 Floating-Point Instruction Address Register
4875      *
4876      */
4877 
4878     if (is_write && mode == 4) {
4879         for (i = 2; i >= 0; i--, mask >>= 1) {
4880             if (mask & 1) {
4881                 gen_qemu_store_fcr(s, addr, 1 << i);
4882                 if (mask != 1) {
4883                     tcg_gen_subi_i32(addr, addr, opsize_bytes(OS_LONG));
4884                 }
4885             }
4886        }
4887        tcg_gen_mov_i32(AREG(insn, 0), addr);
4888     } else {
4889         for (i = 0; i < 3; i++, mask >>= 1) {
4890             if (mask & 1) {
4891                 if (is_write) {
4892                     gen_qemu_store_fcr(s, addr, 1 << i);
4893                 } else {
4894                     gen_qemu_load_fcr(s, addr, 1 << i);
4895                 }
4896                 if (mask != 1 || mode == 3) {
4897                     tcg_gen_addi_i32(addr, addr, opsize_bytes(OS_LONG));
4898                 }
4899             }
4900         }
4901         if (mode == 3) {
4902             tcg_gen_mov_i32(AREG(insn, 0), addr);
4903         }
4904     }
4905     tcg_temp_free_i32(addr);
4906 }
4907 
4908 static void gen_op_fmovem(CPUM68KState *env, DisasContext *s,
4909                           uint32_t insn, uint32_t ext)
4910 {
4911     int opsize;
4912     TCGv addr, tmp;
4913     int mode = (ext >> 11) & 0x3;
4914     int is_load = ((ext & 0x2000) == 0);
4915 
4916     if (m68k_feature(s->env, M68K_FEATURE_FPU)) {
4917         opsize = OS_EXTENDED;
4918     } else {
4919         opsize = OS_DOUBLE;  /* FIXME */
4920     }
4921 
4922     addr = gen_lea(env, s, insn, opsize);
4923     if (IS_NULL_QREG(addr)) {
4924         gen_addr_fault(s);
4925         return;
4926     }
4927 
4928     tmp = tcg_temp_new();
4929     if (mode & 0x1) {
4930         /* Dynamic register list */
4931         tcg_gen_ext8u_i32(tmp, DREG(ext, 4));
4932     } else {
4933         /* Static register list */
4934         tcg_gen_movi_i32(tmp, ext & 0xff);
4935     }
4936 
4937     if (!is_load && (mode & 2) == 0) {
4938         /* predecrement addressing mode
4939          * only available to store register to memory
4940          */
4941         if (opsize == OS_EXTENDED) {
4942             gen_helper_fmovemx_st_predec(tmp, cpu_env, addr, tmp);
4943         } else {
4944             gen_helper_fmovemd_st_predec(tmp, cpu_env, addr, tmp);
4945         }
4946     } else {
4947         /* postincrement addressing mode */
4948         if (opsize == OS_EXTENDED) {
4949             if (is_load) {
4950                 gen_helper_fmovemx_ld_postinc(tmp, cpu_env, addr, tmp);
4951             } else {
4952                 gen_helper_fmovemx_st_postinc(tmp, cpu_env, addr, tmp);
4953             }
4954         } else {
4955             if (is_load) {
4956                 gen_helper_fmovemd_ld_postinc(tmp, cpu_env, addr, tmp);
4957             } else {
4958                 gen_helper_fmovemd_st_postinc(tmp, cpu_env, addr, tmp);
4959             }
4960         }
4961     }
4962     if ((insn & 070) == 030 || (insn & 070) == 040) {
4963         tcg_gen_mov_i32(AREG(insn, 0), tmp);
4964     }
4965     tcg_temp_free(tmp);
4966 }
4967 
4968 /* ??? FP exceptions are not implemented.  Most exceptions are deferred until
4969    immediately before the next FP instruction is executed.  */
4970 DISAS_INSN(fpu)
4971 {
4972     uint16_t ext;
4973     int opmode;
4974     int opsize;
4975     TCGv_ptr cpu_src, cpu_dest;
4976 
4977     ext = read_im16(env, s);
4978     opmode = ext & 0x7f;
4979     switch ((ext >> 13) & 7) {
4980     case 0:
4981         break;
4982     case 1:
4983         goto undef;
4984     case 2:
4985         if (insn == 0xf200 && (ext & 0xfc00) == 0x5c00) {
4986             /* fmovecr */
4987             TCGv rom_offset = tcg_const_i32(opmode);
4988             cpu_dest = gen_fp_ptr(REG(ext, 7));
4989             gen_helper_fconst(cpu_env, cpu_dest, rom_offset);
4990             tcg_temp_free_ptr(cpu_dest);
4991             tcg_temp_free(rom_offset);
4992             return;
4993         }
4994         break;
4995     case 3: /* fmove out */
4996         cpu_src = gen_fp_ptr(REG(ext, 7));
4997         opsize = ext_opsize(ext, 10);
4998         if (gen_ea_fp(env, s, insn, opsize, cpu_src,
4999                       EA_STORE, IS_USER(s)) == -1) {
5000             gen_addr_fault(s);
5001         }
5002         gen_helper_ftst(cpu_env, cpu_src);
5003         tcg_temp_free_ptr(cpu_src);
5004         return;
5005     case 4: /* fmove to control register.  */
5006     case 5: /* fmove from control register.  */
5007         gen_op_fmove_fcr(env, s, insn, ext);
5008         return;
5009     case 6: /* fmovem */
5010     case 7:
5011         if ((ext & 0x1000) == 0 && !m68k_feature(s->env, M68K_FEATURE_FPU)) {
5012             goto undef;
5013         }
5014         gen_op_fmovem(env, s, insn, ext);
5015         return;
5016     }
5017     if (ext & (1 << 14)) {
5018         /* Source effective address.  */
5019         opsize = ext_opsize(ext, 10);
5020         cpu_src = gen_fp_result_ptr();
5021         if (gen_ea_fp(env, s, insn, opsize, cpu_src,
5022                       EA_LOADS, IS_USER(s)) == -1) {
5023             gen_addr_fault(s);
5024             return;
5025         }
5026     } else {
5027         /* Source register.  */
5028         opsize = OS_EXTENDED;
5029         cpu_src = gen_fp_ptr(REG(ext, 10));
5030     }
5031     cpu_dest = gen_fp_ptr(REG(ext, 7));
5032     switch (opmode) {
5033     case 0: /* fmove */
5034         gen_fp_move(cpu_dest, cpu_src);
5035         break;
5036     case 0x40: /* fsmove */
5037         gen_helper_fsround(cpu_env, cpu_dest, cpu_src);
5038         break;
5039     case 0x44: /* fdmove */
5040         gen_helper_fdround(cpu_env, cpu_dest, cpu_src);
5041         break;
5042     case 1: /* fint */
5043         gen_helper_firound(cpu_env, cpu_dest, cpu_src);
5044         break;
5045     case 3: /* fintrz */
5046         gen_helper_fitrunc(cpu_env, cpu_dest, cpu_src);
5047         break;
5048     case 4: /* fsqrt */
5049         gen_helper_fsqrt(cpu_env, cpu_dest, cpu_src);
5050         break;
5051     case 0x41: /* fssqrt */
5052         gen_helper_fssqrt(cpu_env, cpu_dest, cpu_src);
5053         break;
5054     case 0x45: /* fdsqrt */
5055         gen_helper_fdsqrt(cpu_env, cpu_dest, cpu_src);
5056         break;
5057     case 0x18: /* fabs */
5058         gen_helper_fabs(cpu_env, cpu_dest, cpu_src);
5059         break;
5060     case 0x58: /* fsabs */
5061         gen_helper_fsabs(cpu_env, cpu_dest, cpu_src);
5062         break;
5063     case 0x5c: /* fdabs */
5064         gen_helper_fdabs(cpu_env, cpu_dest, cpu_src);
5065         break;
5066     case 0x1a: /* fneg */
5067         gen_helper_fneg(cpu_env, cpu_dest, cpu_src);
5068         break;
5069     case 0x5a: /* fsneg */
5070         gen_helper_fsneg(cpu_env, cpu_dest, cpu_src);
5071         break;
5072     case 0x5e: /* fdneg */
5073         gen_helper_fdneg(cpu_env, cpu_dest, cpu_src);
5074         break;
5075     case 0x1e: /* fgetexp */
5076         gen_helper_fgetexp(cpu_env, cpu_dest, cpu_src);
5077         break;
5078     case 0x1f: /* fgetman */
5079         gen_helper_fgetman(cpu_env, cpu_dest, cpu_src);
5080         break;
5081     case 0x20: /* fdiv */
5082         gen_helper_fdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5083         break;
5084     case 0x60: /* fsdiv */
5085         gen_helper_fsdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5086         break;
5087     case 0x64: /* fddiv */
5088         gen_helper_fddiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5089         break;
5090     case 0x21: /* fmod */
5091         gen_helper_fmod(cpu_env, cpu_dest, cpu_src, cpu_dest);
5092         break;
5093     case 0x22: /* fadd */
5094         gen_helper_fadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5095         break;
5096     case 0x62: /* fsadd */
5097         gen_helper_fsadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5098         break;
5099     case 0x66: /* fdadd */
5100         gen_helper_fdadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5101         break;
5102     case 0x23: /* fmul */
5103         gen_helper_fmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5104         break;
5105     case 0x63: /* fsmul */
5106         gen_helper_fsmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5107         break;
5108     case 0x67: /* fdmul */
5109         gen_helper_fdmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5110         break;
5111     case 0x24: /* fsgldiv */
5112         gen_helper_fsgldiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5113         break;
5114     case 0x25: /* frem */
5115         gen_helper_frem(cpu_env, cpu_dest, cpu_src, cpu_dest);
5116         break;
5117     case 0x26: /* fscale */
5118         gen_helper_fscale(cpu_env, cpu_dest, cpu_src, cpu_dest);
5119         break;
5120     case 0x27: /* fsglmul */
5121         gen_helper_fsglmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5122         break;
5123     case 0x28: /* fsub */
5124         gen_helper_fsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5125         break;
5126     case 0x68: /* fssub */
5127         gen_helper_fssub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5128         break;
5129     case 0x6c: /* fdsub */
5130         gen_helper_fdsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5131         break;
5132     case 0x38: /* fcmp */
5133         gen_helper_fcmp(cpu_env, cpu_src, cpu_dest);
5134         return;
5135     case 0x3a: /* ftst */
5136         gen_helper_ftst(cpu_env, cpu_src);
5137         return;
5138     default:
5139         goto undef;
5140     }
5141     tcg_temp_free_ptr(cpu_src);
5142     gen_helper_ftst(cpu_env, cpu_dest);
5143     tcg_temp_free_ptr(cpu_dest);
5144     return;
5145 undef:
5146     /* FIXME: Is this right for offset addressing modes?  */
5147     s->pc -= 2;
5148     disas_undef_fpu(env, s, insn);
5149 }
5150 
5151 static void gen_fcc_cond(DisasCompare *c, DisasContext *s, int cond)
5152 {
5153     TCGv fpsr;
5154 
5155     c->g1 = 1;
5156     c->v2 = tcg_const_i32(0);
5157     c->g2 = 0;
5158     /* TODO: Raise BSUN exception.  */
5159     fpsr = tcg_temp_new();
5160     gen_load_fcr(s, fpsr, M68K_FPSR);
5161     switch (cond) {
5162     case 0:  /* False */
5163     case 16: /* Signaling False */
5164         c->v1 = c->v2;
5165         c->tcond = TCG_COND_NEVER;
5166         break;
5167     case 1:  /* EQual Z */
5168     case 17: /* Signaling EQual Z */
5169         c->v1 = tcg_temp_new();
5170         c->g1 = 0;
5171         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5172         c->tcond = TCG_COND_NE;
5173         break;
5174     case 2:  /* Ordered Greater Than !(A || Z || N) */
5175     case 18: /* Greater Than !(A || Z || N) */
5176         c->v1 = tcg_temp_new();
5177         c->g1 = 0;
5178         tcg_gen_andi_i32(c->v1, fpsr,
5179                          FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5180         c->tcond = TCG_COND_EQ;
5181         break;
5182     case 3:  /* Ordered Greater than or Equal Z || !(A || N) */
5183     case 19: /* Greater than or Equal Z || !(A || N) */
5184         c->v1 = tcg_temp_new();
5185         c->g1 = 0;
5186         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5187         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5188         tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_Z | FPSR_CC_N);
5189         tcg_gen_or_i32(c->v1, c->v1, fpsr);
5190         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5191         c->tcond = TCG_COND_NE;
5192         break;
5193     case 4:  /* Ordered Less Than !(!N || A || Z); */
5194     case 20: /* Less Than !(!N || A || Z); */
5195         c->v1 = tcg_temp_new();
5196         c->g1 = 0;
5197         tcg_gen_xori_i32(c->v1, fpsr, FPSR_CC_N);
5198         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_N | FPSR_CC_A | FPSR_CC_Z);
5199         c->tcond = TCG_COND_EQ;
5200         break;
5201     case 5:  /* Ordered Less than or Equal Z || (N && !A) */
5202     case 21: /* Less than or Equal Z || (N && !A) */
5203         c->v1 = tcg_temp_new();
5204         c->g1 = 0;
5205         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5206         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5207         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5208         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_Z | FPSR_CC_N);
5209         c->tcond = TCG_COND_NE;
5210         break;
5211     case 6:  /* Ordered Greater or Less than !(A || Z) */
5212     case 22: /* Greater or Less than !(A || Z) */
5213         c->v1 = tcg_temp_new();
5214         c->g1 = 0;
5215         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5216         c->tcond = TCG_COND_EQ;
5217         break;
5218     case 7:  /* Ordered !A */
5219     case 23: /* Greater, Less or Equal !A */
5220         c->v1 = tcg_temp_new();
5221         c->g1 = 0;
5222         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5223         c->tcond = TCG_COND_EQ;
5224         break;
5225     case 8:  /* Unordered A */
5226     case 24: /* Not Greater, Less or Equal A */
5227         c->v1 = tcg_temp_new();
5228         c->g1 = 0;
5229         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5230         c->tcond = TCG_COND_NE;
5231         break;
5232     case 9:  /* Unordered or Equal A || Z */
5233     case 25: /* Not Greater or Less then A || Z */
5234         c->v1 = tcg_temp_new();
5235         c->g1 = 0;
5236         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5237         c->tcond = TCG_COND_NE;
5238         break;
5239     case 10: /* Unordered or Greater Than A || !(N || Z)) */
5240     case 26: /* Not Less or Equal A || !(N || Z)) */
5241         c->v1 = tcg_temp_new();
5242         c->g1 = 0;
5243         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5244         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5245         tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_A | FPSR_CC_N);
5246         tcg_gen_or_i32(c->v1, c->v1, fpsr);
5247         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5248         c->tcond = TCG_COND_NE;
5249         break;
5250     case 11: /* Unordered or Greater or Equal A || Z || !N */
5251     case 27: /* Not Less Than A || Z || !N */
5252         c->v1 = tcg_temp_new();
5253         c->g1 = 0;
5254         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5255         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5256         c->tcond = TCG_COND_NE;
5257         break;
5258     case 12: /* Unordered or Less Than A || (N && !Z) */
5259     case 28: /* Not Greater than or Equal A || (N && !Z) */
5260         c->v1 = tcg_temp_new();
5261         c->g1 = 0;
5262         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5263         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5264         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5265         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_A | FPSR_CC_N);
5266         c->tcond = TCG_COND_NE;
5267         break;
5268     case 13: /* Unordered or Less or Equal A || Z || N */
5269     case 29: /* Not Greater Than A || Z || N */
5270         c->v1 = tcg_temp_new();
5271         c->g1 = 0;
5272         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5273         c->tcond = TCG_COND_NE;
5274         break;
5275     case 14: /* Not Equal !Z */
5276     case 30: /* Signaling Not Equal !Z */
5277         c->v1 = tcg_temp_new();
5278         c->g1 = 0;
5279         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5280         c->tcond = TCG_COND_EQ;
5281         break;
5282     case 15: /* True */
5283     case 31: /* Signaling True */
5284         c->v1 = c->v2;
5285         c->tcond = TCG_COND_ALWAYS;
5286         break;
5287     }
5288     tcg_temp_free(fpsr);
5289 }
5290 
5291 static void gen_fjmpcc(DisasContext *s, int cond, TCGLabel *l1)
5292 {
5293     DisasCompare c;
5294 
5295     gen_fcc_cond(&c, s, cond);
5296     update_cc_op(s);
5297     tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
5298     free_cond(&c);
5299 }
5300 
5301 DISAS_INSN(fbcc)
5302 {
5303     uint32_t offset;
5304     uint32_t base;
5305     TCGLabel *l1;
5306 
5307     base = s->pc;
5308     offset = (int16_t)read_im16(env, s);
5309     if (insn & (1 << 6)) {
5310         offset = (offset << 16) | read_im16(env, s);
5311     }
5312 
5313     l1 = gen_new_label();
5314     update_cc_op(s);
5315     gen_fjmpcc(s, insn & 0x3f, l1);
5316     gen_jmp_tb(s, 0, s->pc);
5317     gen_set_label(l1);
5318     gen_jmp_tb(s, 1, base + offset);
5319 }
5320 
5321 DISAS_INSN(fscc)
5322 {
5323     DisasCompare c;
5324     int cond;
5325     TCGv tmp;
5326     uint16_t ext;
5327 
5328     ext = read_im16(env, s);
5329     cond = ext & 0x3f;
5330     gen_fcc_cond(&c, s, cond);
5331 
5332     tmp = tcg_temp_new();
5333     tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
5334     free_cond(&c);
5335 
5336     tcg_gen_neg_i32(tmp, tmp);
5337     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
5338     tcg_temp_free(tmp);
5339 }
5340 
5341 #if defined(CONFIG_SOFTMMU)
5342 DISAS_INSN(frestore)
5343 {
5344     TCGv addr;
5345 
5346     if (IS_USER(s)) {
5347         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
5348         return;
5349     }
5350     if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5351         SRC_EA(env, addr, OS_LONG, 0, NULL);
5352         /* FIXME: check the state frame */
5353     } else {
5354         disas_undef(env, s, insn);
5355     }
5356 }
5357 
5358 DISAS_INSN(fsave)
5359 {
5360     if (IS_USER(s)) {
5361         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
5362         return;
5363     }
5364 
5365     if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5366         /* always write IDLE */
5367         TCGv idle = tcg_const_i32(0x41000000);
5368         DEST_EA(env, insn, OS_LONG, idle, NULL);
5369         tcg_temp_free(idle);
5370     } else {
5371         disas_undef(env, s, insn);
5372     }
5373 }
5374 #endif
5375 
5376 static inline TCGv gen_mac_extract_word(DisasContext *s, TCGv val, int upper)
5377 {
5378     TCGv tmp = tcg_temp_new();
5379     if (s->env->macsr & MACSR_FI) {
5380         if (upper)
5381             tcg_gen_andi_i32(tmp, val, 0xffff0000);
5382         else
5383             tcg_gen_shli_i32(tmp, val, 16);
5384     } else if (s->env->macsr & MACSR_SU) {
5385         if (upper)
5386             tcg_gen_sari_i32(tmp, val, 16);
5387         else
5388             tcg_gen_ext16s_i32(tmp, val);
5389     } else {
5390         if (upper)
5391             tcg_gen_shri_i32(tmp, val, 16);
5392         else
5393             tcg_gen_ext16u_i32(tmp, val);
5394     }
5395     return tmp;
5396 }
5397 
5398 static void gen_mac_clear_flags(void)
5399 {
5400     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR,
5401                      ~(MACSR_V | MACSR_Z | MACSR_N | MACSR_EV));
5402 }
5403 
5404 DISAS_INSN(mac)
5405 {
5406     TCGv rx;
5407     TCGv ry;
5408     uint16_t ext;
5409     int acc;
5410     TCGv tmp;
5411     TCGv addr;
5412     TCGv loadval;
5413     int dual;
5414     TCGv saved_flags;
5415 
5416     if (!s->done_mac) {
5417         s->mactmp = tcg_temp_new_i64();
5418         s->done_mac = 1;
5419     }
5420 
5421     ext = read_im16(env, s);
5422 
5423     acc = ((insn >> 7) & 1) | ((ext >> 3) & 2);
5424     dual = ((insn & 0x30) != 0 && (ext & 3) != 0);
5425     if (dual && !m68k_feature(s->env, M68K_FEATURE_CF_EMAC_B)) {
5426         disas_undef(env, s, insn);
5427         return;
5428     }
5429     if (insn & 0x30) {
5430         /* MAC with load.  */
5431         tmp = gen_lea(env, s, insn, OS_LONG);
5432         addr = tcg_temp_new();
5433         tcg_gen_and_i32(addr, tmp, QREG_MAC_MASK);
5434         /* Load the value now to ensure correct exception behavior.
5435            Perform writeback after reading the MAC inputs.  */
5436         loadval = gen_load(s, OS_LONG, addr, 0, IS_USER(s));
5437 
5438         acc ^= 1;
5439         rx = (ext & 0x8000) ? AREG(ext, 12) : DREG(insn, 12);
5440         ry = (ext & 8) ? AREG(ext, 0) : DREG(ext, 0);
5441     } else {
5442         loadval = addr = NULL_QREG;
5443         rx = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5444         ry = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5445     }
5446 
5447     gen_mac_clear_flags();
5448 #if 0
5449     l1 = -1;
5450     /* Disabled because conditional branches clobber temporary vars.  */
5451     if ((s->env->macsr & MACSR_OMC) != 0 && !dual) {
5452         /* Skip the multiply if we know we will ignore it.  */
5453         l1 = gen_new_label();
5454         tmp = tcg_temp_new();
5455         tcg_gen_andi_i32(tmp, QREG_MACSR, 1 << (acc + 8));
5456         gen_op_jmp_nz32(tmp, l1);
5457     }
5458 #endif
5459 
5460     if ((ext & 0x0800) == 0) {
5461         /* Word.  */
5462         rx = gen_mac_extract_word(s, rx, (ext & 0x80) != 0);
5463         ry = gen_mac_extract_word(s, ry, (ext & 0x40) != 0);
5464     }
5465     if (s->env->macsr & MACSR_FI) {
5466         gen_helper_macmulf(s->mactmp, cpu_env, rx, ry);
5467     } else {
5468         if (s->env->macsr & MACSR_SU)
5469             gen_helper_macmuls(s->mactmp, cpu_env, rx, ry);
5470         else
5471             gen_helper_macmulu(s->mactmp, cpu_env, rx, ry);
5472         switch ((ext >> 9) & 3) {
5473         case 1:
5474             tcg_gen_shli_i64(s->mactmp, s->mactmp, 1);
5475             break;
5476         case 3:
5477             tcg_gen_shri_i64(s->mactmp, s->mactmp, 1);
5478             break;
5479         }
5480     }
5481 
5482     if (dual) {
5483         /* Save the overflow flag from the multiply.  */
5484         saved_flags = tcg_temp_new();
5485         tcg_gen_mov_i32(saved_flags, QREG_MACSR);
5486     } else {
5487         saved_flags = NULL_QREG;
5488     }
5489 
5490 #if 0
5491     /* Disabled because conditional branches clobber temporary vars.  */
5492     if ((s->env->macsr & MACSR_OMC) != 0 && dual) {
5493         /* Skip the accumulate if the value is already saturated.  */
5494         l1 = gen_new_label();
5495         tmp = tcg_temp_new();
5496         gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
5497         gen_op_jmp_nz32(tmp, l1);
5498     }
5499 #endif
5500 
5501     if (insn & 0x100)
5502         tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5503     else
5504         tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5505 
5506     if (s->env->macsr & MACSR_FI)
5507         gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
5508     else if (s->env->macsr & MACSR_SU)
5509         gen_helper_macsats(cpu_env, tcg_const_i32(acc));
5510     else
5511         gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
5512 
5513 #if 0
5514     /* Disabled because conditional branches clobber temporary vars.  */
5515     if (l1 != -1)
5516         gen_set_label(l1);
5517 #endif
5518 
5519     if (dual) {
5520         /* Dual accumulate variant.  */
5521         acc = (ext >> 2) & 3;
5522         /* Restore the overflow flag from the multiplier.  */
5523         tcg_gen_mov_i32(QREG_MACSR, saved_flags);
5524 #if 0
5525         /* Disabled because conditional branches clobber temporary vars.  */
5526         if ((s->env->macsr & MACSR_OMC) != 0) {
5527             /* Skip the accumulate if the value is already saturated.  */
5528             l1 = gen_new_label();
5529             tmp = tcg_temp_new();
5530             gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
5531             gen_op_jmp_nz32(tmp, l1);
5532         }
5533 #endif
5534         if (ext & 2)
5535             tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5536         else
5537             tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5538         if (s->env->macsr & MACSR_FI)
5539             gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
5540         else if (s->env->macsr & MACSR_SU)
5541             gen_helper_macsats(cpu_env, tcg_const_i32(acc));
5542         else
5543             gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
5544 #if 0
5545         /* Disabled because conditional branches clobber temporary vars.  */
5546         if (l1 != -1)
5547             gen_set_label(l1);
5548 #endif
5549     }
5550     gen_helper_mac_set_flags(cpu_env, tcg_const_i32(acc));
5551 
5552     if (insn & 0x30) {
5553         TCGv rw;
5554         rw = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5555         tcg_gen_mov_i32(rw, loadval);
5556         /* FIXME: Should address writeback happen with the masked or
5557            unmasked value?  */
5558         switch ((insn >> 3) & 7) {
5559         case 3: /* Post-increment.  */
5560             tcg_gen_addi_i32(AREG(insn, 0), addr, 4);
5561             break;
5562         case 4: /* Pre-decrement.  */
5563             tcg_gen_mov_i32(AREG(insn, 0), addr);
5564         }
5565         tcg_temp_free(loadval);
5566     }
5567 }
5568 
5569 DISAS_INSN(from_mac)
5570 {
5571     TCGv rx;
5572     TCGv_i64 acc;
5573     int accnum;
5574 
5575     rx = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5576     accnum = (insn >> 9) & 3;
5577     acc = MACREG(accnum);
5578     if (s->env->macsr & MACSR_FI) {
5579         gen_helper_get_macf(rx, cpu_env, acc);
5580     } else if ((s->env->macsr & MACSR_OMC) == 0) {
5581         tcg_gen_extrl_i64_i32(rx, acc);
5582     } else if (s->env->macsr & MACSR_SU) {
5583         gen_helper_get_macs(rx, acc);
5584     } else {
5585         gen_helper_get_macu(rx, acc);
5586     }
5587     if (insn & 0x40) {
5588         tcg_gen_movi_i64(acc, 0);
5589         tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5590     }
5591 }
5592 
5593 DISAS_INSN(move_mac)
5594 {
5595     /* FIXME: This can be done without a helper.  */
5596     int src;
5597     TCGv dest;
5598     src = insn & 3;
5599     dest = tcg_const_i32((insn >> 9) & 3);
5600     gen_helper_mac_move(cpu_env, dest, tcg_const_i32(src));
5601     gen_mac_clear_flags();
5602     gen_helper_mac_set_flags(cpu_env, dest);
5603 }
5604 
5605 DISAS_INSN(from_macsr)
5606 {
5607     TCGv reg;
5608 
5609     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5610     tcg_gen_mov_i32(reg, QREG_MACSR);
5611 }
5612 
5613 DISAS_INSN(from_mask)
5614 {
5615     TCGv reg;
5616     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5617     tcg_gen_mov_i32(reg, QREG_MAC_MASK);
5618 }
5619 
5620 DISAS_INSN(from_mext)
5621 {
5622     TCGv reg;
5623     TCGv acc;
5624     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5625     acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
5626     if (s->env->macsr & MACSR_FI)
5627         gen_helper_get_mac_extf(reg, cpu_env, acc);
5628     else
5629         gen_helper_get_mac_exti(reg, cpu_env, acc);
5630 }
5631 
5632 DISAS_INSN(macsr_to_ccr)
5633 {
5634     TCGv tmp = tcg_temp_new();
5635     tcg_gen_andi_i32(tmp, QREG_MACSR, 0xf);
5636     gen_helper_set_sr(cpu_env, tmp);
5637     tcg_temp_free(tmp);
5638     set_cc_op(s, CC_OP_FLAGS);
5639 }
5640 
5641 DISAS_INSN(to_mac)
5642 {
5643     TCGv_i64 acc;
5644     TCGv val;
5645     int accnum;
5646     accnum = (insn >> 9) & 3;
5647     acc = MACREG(accnum);
5648     SRC_EA(env, val, OS_LONG, 0, NULL);
5649     if (s->env->macsr & MACSR_FI) {
5650         tcg_gen_ext_i32_i64(acc, val);
5651         tcg_gen_shli_i64(acc, acc, 8);
5652     } else if (s->env->macsr & MACSR_SU) {
5653         tcg_gen_ext_i32_i64(acc, val);
5654     } else {
5655         tcg_gen_extu_i32_i64(acc, val);
5656     }
5657     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5658     gen_mac_clear_flags();
5659     gen_helper_mac_set_flags(cpu_env, tcg_const_i32(accnum));
5660 }
5661 
5662 DISAS_INSN(to_macsr)
5663 {
5664     TCGv val;
5665     SRC_EA(env, val, OS_LONG, 0, NULL);
5666     gen_helper_set_macsr(cpu_env, val);
5667     gen_lookup_tb(s);
5668 }
5669 
5670 DISAS_INSN(to_mask)
5671 {
5672     TCGv val;
5673     SRC_EA(env, val, OS_LONG, 0, NULL);
5674     tcg_gen_ori_i32(QREG_MAC_MASK, val, 0xffff0000);
5675 }
5676 
5677 DISAS_INSN(to_mext)
5678 {
5679     TCGv val;
5680     TCGv acc;
5681     SRC_EA(env, val, OS_LONG, 0, NULL);
5682     acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
5683     if (s->env->macsr & MACSR_FI)
5684         gen_helper_set_mac_extf(cpu_env, val, acc);
5685     else if (s->env->macsr & MACSR_SU)
5686         gen_helper_set_mac_exts(cpu_env, val, acc);
5687     else
5688         gen_helper_set_mac_extu(cpu_env, val, acc);
5689 }
5690 
5691 static disas_proc opcode_table[65536];
5692 
5693 static void
5694 register_opcode (disas_proc proc, uint16_t opcode, uint16_t mask)
5695 {
5696   int i;
5697   int from;
5698   int to;
5699 
5700   /* Sanity check.  All set bits must be included in the mask.  */
5701   if (opcode & ~mask) {
5702       fprintf(stderr,
5703               "qemu internal error: bogus opcode definition %04x/%04x\n",
5704               opcode, mask);
5705       abort();
5706   }
5707   /* This could probably be cleverer.  For now just optimize the case where
5708      the top bits are known.  */
5709   /* Find the first zero bit in the mask.  */
5710   i = 0x8000;
5711   while ((i & mask) != 0)
5712       i >>= 1;
5713   /* Iterate over all combinations of this and lower bits.  */
5714   if (i == 0)
5715       i = 1;
5716   else
5717       i <<= 1;
5718   from = opcode & ~(i - 1);
5719   to = from + i;
5720   for (i = from; i < to; i++) {
5721       if ((i & mask) == opcode)
5722           opcode_table[i] = proc;
5723   }
5724 }
5725 
5726 /* Register m68k opcode handlers.  Order is important.
5727    Later insn override earlier ones.  */
5728 void register_m68k_insns (CPUM68KState *env)
5729 {
5730     /* Build the opcode table only once to avoid
5731        multithreading issues. */
5732     if (opcode_table[0] != NULL) {
5733         return;
5734     }
5735 
5736     /* use BASE() for instruction available
5737      * for CF_ISA_A and M68000.
5738      */
5739 #define BASE(name, opcode, mask) \
5740     register_opcode(disas_##name, 0x##opcode, 0x##mask)
5741 #define INSN(name, opcode, mask, feature) do { \
5742     if (m68k_feature(env, M68K_FEATURE_##feature)) \
5743         BASE(name, opcode, mask); \
5744     } while(0)
5745     BASE(undef,     0000, 0000);
5746     INSN(arith_im,  0080, fff8, CF_ISA_A);
5747     INSN(arith_im,  0000, ff00, M68000);
5748     INSN(chk2,      00c0, f9c0, CHK2);
5749     INSN(bitrev,    00c0, fff8, CF_ISA_APLUSC);
5750     BASE(bitop_reg, 0100, f1c0);
5751     BASE(bitop_reg, 0140, f1c0);
5752     BASE(bitop_reg, 0180, f1c0);
5753     BASE(bitop_reg, 01c0, f1c0);
5754     INSN(movep,     0108, f138, MOVEP);
5755     INSN(arith_im,  0280, fff8, CF_ISA_A);
5756     INSN(arith_im,  0200, ff00, M68000);
5757     INSN(undef,     02c0, ffc0, M68000);
5758     INSN(byterev,   02c0, fff8, CF_ISA_APLUSC);
5759     INSN(arith_im,  0480, fff8, CF_ISA_A);
5760     INSN(arith_im,  0400, ff00, M68000);
5761     INSN(undef,     04c0, ffc0, M68000);
5762     INSN(arith_im,  0600, ff00, M68000);
5763     INSN(undef,     06c0, ffc0, M68000);
5764     INSN(ff1,       04c0, fff8, CF_ISA_APLUSC);
5765     INSN(arith_im,  0680, fff8, CF_ISA_A);
5766     INSN(arith_im,  0c00, ff38, CF_ISA_A);
5767     INSN(arith_im,  0c00, ff00, M68000);
5768     BASE(bitop_im,  0800, ffc0);
5769     BASE(bitop_im,  0840, ffc0);
5770     BASE(bitop_im,  0880, ffc0);
5771     BASE(bitop_im,  08c0, ffc0);
5772     INSN(arith_im,  0a80, fff8, CF_ISA_A);
5773     INSN(arith_im,  0a00, ff00, M68000);
5774 #if defined(CONFIG_SOFTMMU)
5775     INSN(moves,     0e00, ff00, M68000);
5776 #endif
5777     INSN(cas,       0ac0, ffc0, CAS);
5778     INSN(cas,       0cc0, ffc0, CAS);
5779     INSN(cas,       0ec0, ffc0, CAS);
5780     INSN(cas2w,     0cfc, ffff, CAS);
5781     INSN(cas2l,     0efc, ffff, CAS);
5782     BASE(move,      1000, f000);
5783     BASE(move,      2000, f000);
5784     BASE(move,      3000, f000);
5785     INSN(chk,       4000, f040, M68000);
5786     INSN(strldsr,   40e7, ffff, CF_ISA_APLUSC);
5787     INSN(negx,      4080, fff8, CF_ISA_A);
5788     INSN(negx,      4000, ff00, M68000);
5789     INSN(undef,     40c0, ffc0, M68000);
5790     INSN(move_from_sr, 40c0, fff8, CF_ISA_A);
5791     INSN(move_from_sr, 40c0, ffc0, M68000);
5792     BASE(lea,       41c0, f1c0);
5793     BASE(clr,       4200, ff00);
5794     BASE(undef,     42c0, ffc0);
5795     INSN(move_from_ccr, 42c0, fff8, CF_ISA_A);
5796     INSN(move_from_ccr, 42c0, ffc0, M68000);
5797     INSN(neg,       4480, fff8, CF_ISA_A);
5798     INSN(neg,       4400, ff00, M68000);
5799     INSN(undef,     44c0, ffc0, M68000);
5800     BASE(move_to_ccr, 44c0, ffc0);
5801     INSN(not,       4680, fff8, CF_ISA_A);
5802     INSN(not,       4600, ff00, M68000);
5803 #if defined(CONFIG_SOFTMMU)
5804     BASE(move_to_sr, 46c0, ffc0);
5805 #endif
5806     INSN(nbcd,      4800, ffc0, M68000);
5807     INSN(linkl,     4808, fff8, M68000);
5808     BASE(pea,       4840, ffc0);
5809     BASE(swap,      4840, fff8);
5810     INSN(bkpt,      4848, fff8, BKPT);
5811     INSN(movem,     48d0, fbf8, CF_ISA_A);
5812     INSN(movem,     48e8, fbf8, CF_ISA_A);
5813     INSN(movem,     4880, fb80, M68000);
5814     BASE(ext,       4880, fff8);
5815     BASE(ext,       48c0, fff8);
5816     BASE(ext,       49c0, fff8);
5817     BASE(tst,       4a00, ff00);
5818     INSN(tas,       4ac0, ffc0, CF_ISA_B);
5819     INSN(tas,       4ac0, ffc0, M68000);
5820 #if defined(CONFIG_SOFTMMU)
5821     INSN(halt,      4ac8, ffff, CF_ISA_A);
5822 #endif
5823     INSN(pulse,     4acc, ffff, CF_ISA_A);
5824     BASE(illegal,   4afc, ffff);
5825     INSN(mull,      4c00, ffc0, CF_ISA_A);
5826     INSN(mull,      4c00, ffc0, LONG_MULDIV);
5827     INSN(divl,      4c40, ffc0, CF_ISA_A);
5828     INSN(divl,      4c40, ffc0, LONG_MULDIV);
5829     INSN(sats,      4c80, fff8, CF_ISA_B);
5830     BASE(trap,      4e40, fff0);
5831     BASE(link,      4e50, fff8);
5832     BASE(unlk,      4e58, fff8);
5833 #if defined(CONFIG_SOFTMMU)
5834     INSN(move_to_usp, 4e60, fff8, USP);
5835     INSN(move_from_usp, 4e68, fff8, USP);
5836     INSN(reset,     4e70, ffff, M68000);
5837     BASE(stop,      4e72, ffff);
5838     BASE(rte,       4e73, ffff);
5839     INSN(cf_movec,  4e7b, ffff, CF_ISA_A);
5840     INSN(m68k_movec, 4e7a, fffe, M68000);
5841 #endif
5842     BASE(nop,       4e71, ffff);
5843     INSN(rtd,       4e74, ffff, RTD);
5844     BASE(rts,       4e75, ffff);
5845     BASE(jump,      4e80, ffc0);
5846     BASE(jump,      4ec0, ffc0);
5847     INSN(addsubq,   5000, f080, M68000);
5848     BASE(addsubq,   5080, f0c0);
5849     INSN(scc,       50c0, f0f8, CF_ISA_A); /* Scc.B Dx   */
5850     INSN(scc,       50c0, f0c0, M68000);   /* Scc.B <EA> */
5851     INSN(dbcc,      50c8, f0f8, M68000);
5852     INSN(tpf,       51f8, fff8, CF_ISA_A);
5853 
5854     /* Branch instructions.  */
5855     BASE(branch,    6000, f000);
5856     /* Disable long branch instructions, then add back the ones we want.  */
5857     BASE(undef,     60ff, f0ff); /* All long branches.  */
5858     INSN(branch,    60ff, f0ff, CF_ISA_B);
5859     INSN(undef,     60ff, ffff, CF_ISA_B); /* bra.l */
5860     INSN(branch,    60ff, ffff, BRAL);
5861     INSN(branch,    60ff, f0ff, BCCL);
5862 
5863     BASE(moveq,     7000, f100);
5864     INSN(mvzs,      7100, f100, CF_ISA_B);
5865     BASE(or,        8000, f000);
5866     BASE(divw,      80c0, f0c0);
5867     INSN(sbcd_reg,  8100, f1f8, M68000);
5868     INSN(sbcd_mem,  8108, f1f8, M68000);
5869     BASE(addsub,    9000, f000);
5870     INSN(undef,     90c0, f0c0, CF_ISA_A);
5871     INSN(subx_reg,  9180, f1f8, CF_ISA_A);
5872     INSN(subx_reg,  9100, f138, M68000);
5873     INSN(subx_mem,  9108, f138, M68000);
5874     INSN(suba,      91c0, f1c0, CF_ISA_A);
5875     INSN(suba,      90c0, f0c0, M68000);
5876 
5877     BASE(undef_mac, a000, f000);
5878     INSN(mac,       a000, f100, CF_EMAC);
5879     INSN(from_mac,  a180, f9b0, CF_EMAC);
5880     INSN(move_mac,  a110, f9fc, CF_EMAC);
5881     INSN(from_macsr,a980, f9f0, CF_EMAC);
5882     INSN(from_mask, ad80, fff0, CF_EMAC);
5883     INSN(from_mext, ab80, fbf0, CF_EMAC);
5884     INSN(macsr_to_ccr, a9c0, ffff, CF_EMAC);
5885     INSN(to_mac,    a100, f9c0, CF_EMAC);
5886     INSN(to_macsr,  a900, ffc0, CF_EMAC);
5887     INSN(to_mext,   ab00, fbc0, CF_EMAC);
5888     INSN(to_mask,   ad00, ffc0, CF_EMAC);
5889 
5890     INSN(mov3q,     a140, f1c0, CF_ISA_B);
5891     INSN(cmp,       b000, f1c0, CF_ISA_B); /* cmp.b */
5892     INSN(cmp,       b040, f1c0, CF_ISA_B); /* cmp.w */
5893     INSN(cmpa,      b0c0, f1c0, CF_ISA_B); /* cmpa.w */
5894     INSN(cmp,       b080, f1c0, CF_ISA_A);
5895     INSN(cmpa,      b1c0, f1c0, CF_ISA_A);
5896     INSN(cmp,       b000, f100, M68000);
5897     INSN(eor,       b100, f100, M68000);
5898     INSN(cmpm,      b108, f138, M68000);
5899     INSN(cmpa,      b0c0, f0c0, M68000);
5900     INSN(eor,       b180, f1c0, CF_ISA_A);
5901     BASE(and,       c000, f000);
5902     INSN(exg_dd,    c140, f1f8, M68000);
5903     INSN(exg_aa,    c148, f1f8, M68000);
5904     INSN(exg_da,    c188, f1f8, M68000);
5905     BASE(mulw,      c0c0, f0c0);
5906     INSN(abcd_reg,  c100, f1f8, M68000);
5907     INSN(abcd_mem,  c108, f1f8, M68000);
5908     BASE(addsub,    d000, f000);
5909     INSN(undef,     d0c0, f0c0, CF_ISA_A);
5910     INSN(addx_reg,      d180, f1f8, CF_ISA_A);
5911     INSN(addx_reg,  d100, f138, M68000);
5912     INSN(addx_mem,  d108, f138, M68000);
5913     INSN(adda,      d1c0, f1c0, CF_ISA_A);
5914     INSN(adda,      d0c0, f0c0, M68000);
5915     INSN(shift_im,  e080, f0f0, CF_ISA_A);
5916     INSN(shift_reg, e0a0, f0f0, CF_ISA_A);
5917     INSN(shift8_im, e000, f0f0, M68000);
5918     INSN(shift16_im, e040, f0f0, M68000);
5919     INSN(shift_im,  e080, f0f0, M68000);
5920     INSN(shift8_reg, e020, f0f0, M68000);
5921     INSN(shift16_reg, e060, f0f0, M68000);
5922     INSN(shift_reg, e0a0, f0f0, M68000);
5923     INSN(shift_mem, e0c0, fcc0, M68000);
5924     INSN(rotate_im, e090, f0f0, M68000);
5925     INSN(rotate8_im, e010, f0f0, M68000);
5926     INSN(rotate16_im, e050, f0f0, M68000);
5927     INSN(rotate_reg, e0b0, f0f0, M68000);
5928     INSN(rotate8_reg, e030, f0f0, M68000);
5929     INSN(rotate16_reg, e070, f0f0, M68000);
5930     INSN(rotate_mem, e4c0, fcc0, M68000);
5931     INSN(bfext_mem, e9c0, fdc0, BITFIELD);  /* bfextu & bfexts */
5932     INSN(bfext_reg, e9c0, fdf8, BITFIELD);
5933     INSN(bfins_mem, efc0, ffc0, BITFIELD);
5934     INSN(bfins_reg, efc0, fff8, BITFIELD);
5935     INSN(bfop_mem, eac0, ffc0, BITFIELD);   /* bfchg */
5936     INSN(bfop_reg, eac0, fff8, BITFIELD);   /* bfchg */
5937     INSN(bfop_mem, ecc0, ffc0, BITFIELD);   /* bfclr */
5938     INSN(bfop_reg, ecc0, fff8, BITFIELD);   /* bfclr */
5939     INSN(bfop_mem, edc0, ffc0, BITFIELD);   /* bfffo */
5940     INSN(bfop_reg, edc0, fff8, BITFIELD);   /* bfffo */
5941     INSN(bfop_mem, eec0, ffc0, BITFIELD);   /* bfset */
5942     INSN(bfop_reg, eec0, fff8, BITFIELD);   /* bfset */
5943     INSN(bfop_mem, e8c0, ffc0, BITFIELD);   /* bftst */
5944     INSN(bfop_reg, e8c0, fff8, BITFIELD);   /* bftst */
5945     BASE(undef_fpu, f000, f000);
5946     INSN(fpu,       f200, ffc0, CF_FPU);
5947     INSN(fbcc,      f280, ffc0, CF_FPU);
5948     INSN(fpu,       f200, ffc0, FPU);
5949     INSN(fscc,      f240, ffc0, FPU);
5950     INSN(fbcc,      f280, ff80, FPU);
5951 #if defined(CONFIG_SOFTMMU)
5952     INSN(frestore,  f340, ffc0, CF_FPU);
5953     INSN(fsave,     f300, ffc0, CF_FPU);
5954     INSN(frestore,  f340, ffc0, FPU);
5955     INSN(fsave,     f300, ffc0, FPU);
5956     INSN(intouch,   f340, ffc0, CF_ISA_A);
5957     INSN(cpushl,    f428, ff38, CF_ISA_A);
5958     INSN(cpush,     f420, ff20, M68040);
5959     INSN(cinv,      f400, ff20, M68040);
5960     INSN(pflush,    f500, ffe0, M68040);
5961     INSN(ptest,     f548, ffd8, M68040);
5962     INSN(wddata,    fb00, ff00, CF_ISA_A);
5963     INSN(wdebug,    fbc0, ffc0, CF_ISA_A);
5964 #endif
5965     INSN(move16_mem, f600, ffe0, M68040);
5966     INSN(move16_reg, f620, fff8, M68040);
5967 #undef INSN
5968 }
5969 
5970 /* ??? Some of this implementation is not exception safe.  We should always
5971    write back the result to memory before setting the condition codes.  */
5972 static void disas_m68k_insn(CPUM68KState * env, DisasContext *s)
5973 {
5974     uint16_t insn = read_im16(env, s);
5975     opcode_table[insn](env, s, insn);
5976     do_writebacks(s);
5977 }
5978 
5979 /* generate intermediate code for basic block 'tb'.  */
5980 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb)
5981 {
5982     CPUM68KState *env = cs->env_ptr;
5983     DisasContext dc1, *dc = &dc1;
5984     target_ulong pc_start;
5985     int pc_offset;
5986     int num_insns;
5987     int max_insns;
5988 
5989     /* generate intermediate code */
5990     pc_start = tb->pc;
5991 
5992     dc->tb = tb;
5993 
5994     dc->env = env;
5995     dc->is_jmp = DISAS_NEXT;
5996     dc->pc = pc_start;
5997     dc->cc_op = CC_OP_DYNAMIC;
5998     dc->cc_op_synced = 1;
5999     dc->singlestep_enabled = cs->singlestep_enabled;
6000     dc->done_mac = 0;
6001     dc->writeback_mask = 0;
6002     num_insns = 0;
6003     max_insns = tb_cflags(tb) & CF_COUNT_MASK;
6004     if (max_insns == 0) {
6005         max_insns = CF_COUNT_MASK;
6006     }
6007     if (max_insns > TCG_MAX_INSNS) {
6008         max_insns = TCG_MAX_INSNS;
6009     }
6010 
6011     gen_tb_start(tb);
6012     do {
6013         pc_offset = dc->pc - pc_start;
6014         tcg_gen_insn_start(dc->pc, dc->cc_op);
6015         num_insns++;
6016 
6017         if (unlikely(cpu_breakpoint_test(cs, dc->pc, BP_ANY))) {
6018             gen_exception(dc, dc->pc, EXCP_DEBUG);
6019             dc->is_jmp = DISAS_JUMP;
6020             /* The address covered by the breakpoint must be included in
6021                [tb->pc, tb->pc + tb->size) in order to for it to be
6022                properly cleared -- thus we increment the PC here so that
6023                the logic setting tb->size below does the right thing.  */
6024             dc->pc += 2;
6025             break;
6026         }
6027 
6028         if (num_insns == max_insns && (tb_cflags(tb) & CF_LAST_IO)) {
6029             gen_io_start();
6030         }
6031 
6032         dc->insn_pc = dc->pc;
6033 	disas_m68k_insn(env, dc);
6034     } while (!dc->is_jmp && !tcg_op_buf_full() &&
6035              !cs->singlestep_enabled &&
6036              !singlestep &&
6037              (pc_offset) < (TARGET_PAGE_SIZE - 32) &&
6038              num_insns < max_insns);
6039 
6040     if (tb_cflags(tb) & CF_LAST_IO)
6041         gen_io_end();
6042     if (unlikely(cs->singlestep_enabled)) {
6043         /* Make sure the pc is updated, and raise a debug exception.  */
6044         if (!dc->is_jmp) {
6045             update_cc_op(dc);
6046             tcg_gen_movi_i32(QREG_PC, dc->pc);
6047         }
6048         gen_helper_raise_exception(cpu_env, tcg_const_i32(EXCP_DEBUG));
6049     } else {
6050         switch(dc->is_jmp) {
6051         case DISAS_NEXT:
6052             update_cc_op(dc);
6053             gen_jmp_tb(dc, 0, dc->pc);
6054             break;
6055         default:
6056         case DISAS_JUMP:
6057         case DISAS_UPDATE:
6058             update_cc_op(dc);
6059             /* indicate that the hash table must be used to find the next TB */
6060             tcg_gen_exit_tb(0);
6061             break;
6062         case DISAS_TB_JUMP:
6063             /* nothing more to generate */
6064             break;
6065         }
6066     }
6067     gen_tb_end(tb, num_insns);
6068 
6069 #ifdef DEBUG_DISAS
6070     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)
6071         && qemu_log_in_addr_range(pc_start)) {
6072         qemu_log_lock();
6073         qemu_log("----------------\n");
6074         qemu_log("IN: %s\n", lookup_symbol(pc_start));
6075         log_target_disas(cs, pc_start, dc->pc - pc_start);
6076         qemu_log("\n");
6077         qemu_log_unlock();
6078     }
6079 #endif
6080     tb->size = dc->pc - pc_start;
6081     tb->icount = num_insns;
6082 }
6083 
6084 static double floatx80_to_double(CPUM68KState *env, uint16_t high, uint64_t low)
6085 {
6086     floatx80 a = { .high = high, .low = low };
6087     union {
6088         float64 f64;
6089         double d;
6090     } u;
6091 
6092     u.f64 = floatx80_to_float64(a, &env->fp_status);
6093     return u.d;
6094 }
6095 
6096 void m68k_cpu_dump_state(CPUState *cs, FILE *f, fprintf_function cpu_fprintf,
6097                          int flags)
6098 {
6099     M68kCPU *cpu = M68K_CPU(cs);
6100     CPUM68KState *env = &cpu->env;
6101     int i;
6102     uint16_t sr;
6103     for (i = 0; i < 8; i++) {
6104         cpu_fprintf(f, "D%d = %08x   A%d = %08x   "
6105                     "F%d = %04x %016"PRIx64"  (%12g)\n",
6106                     i, env->dregs[i], i, env->aregs[i],
6107                     i, env->fregs[i].l.upper, env->fregs[i].l.lower,
6108                     floatx80_to_double(env, env->fregs[i].l.upper,
6109                                        env->fregs[i].l.lower));
6110     }
6111     cpu_fprintf (f, "PC = %08x   ", env->pc);
6112     sr = env->sr | cpu_m68k_get_ccr(env);
6113     cpu_fprintf(f, "SR = %04x T:%x I:%x %c%c %c%c%c%c%c\n",
6114                 sr, (sr & SR_T) >> SR_T_SHIFT, (sr & SR_I) >> SR_I_SHIFT,
6115                 (sr & SR_S) ? 'S' : 'U', (sr & SR_M) ? '%' : 'I',
6116                 (sr & CCF_X) ? 'X' : '-', (sr & CCF_N) ? 'N' : '-',
6117                 (sr & CCF_Z) ? 'Z' : '-', (sr & CCF_V) ? 'V' : '-',
6118                 (sr & CCF_C) ? 'C' : '-');
6119     cpu_fprintf(f, "FPSR = %08x %c%c%c%c ", env->fpsr,
6120                 (env->fpsr & FPSR_CC_A) ? 'A' : '-',
6121                 (env->fpsr & FPSR_CC_I) ? 'I' : '-',
6122                 (env->fpsr & FPSR_CC_Z) ? 'Z' : '-',
6123                 (env->fpsr & FPSR_CC_N) ? 'N' : '-');
6124     cpu_fprintf(f, "\n                                "
6125                    "FPCR =     %04x ", env->fpcr);
6126     switch (env->fpcr & FPCR_PREC_MASK) {
6127     case FPCR_PREC_X:
6128         cpu_fprintf(f, "X ");
6129         break;
6130     case FPCR_PREC_S:
6131         cpu_fprintf(f, "S ");
6132         break;
6133     case FPCR_PREC_D:
6134         cpu_fprintf(f, "D ");
6135         break;
6136     }
6137     switch (env->fpcr & FPCR_RND_MASK) {
6138     case FPCR_RND_N:
6139         cpu_fprintf(f, "RN ");
6140         break;
6141     case FPCR_RND_Z:
6142         cpu_fprintf(f, "RZ ");
6143         break;
6144     case FPCR_RND_M:
6145         cpu_fprintf(f, "RM ");
6146         break;
6147     case FPCR_RND_P:
6148         cpu_fprintf(f, "RP ");
6149         break;
6150     }
6151     cpu_fprintf(f, "\n");
6152 #ifdef CONFIG_SOFTMMU
6153     cpu_fprintf(f, "%sA7(MSP) = %08x %sA7(USP) = %08x %sA7(ISP) = %08x\n",
6154                env->current_sp == M68K_SSP ? "->" : "  ", env->sp[M68K_SSP],
6155                env->current_sp == M68K_USP ? "->" : "  ", env->sp[M68K_USP],
6156                env->current_sp == M68K_ISP ? "->" : "  ", env->sp[M68K_ISP]);
6157     cpu_fprintf(f, "VBR = 0x%08x\n", env->vbr);
6158     cpu_fprintf(f, "SFC = %x DFC %x\n", env->sfc, env->dfc);
6159     cpu_fprintf(f, "SSW %08x TCR %08x URP %08x SRP %08x\n",
6160                 env->mmu.ssw, env->mmu.tcr, env->mmu.urp, env->mmu.srp);
6161     cpu_fprintf(f, "DTTR0/1: %08x/%08x ITTR0/1: %08x/%08x\n",
6162                 env->mmu.ttr[M68K_DTTR0], env->mmu.ttr[M68K_DTTR1],
6163                 env->mmu.ttr[M68K_ITTR0], env->mmu.ttr[M68K_ITTR1]);
6164     cpu_fprintf(f, "MMUSR %08x, fault at %08x\n",
6165                 env->mmu.mmusr, env->mmu.ar);
6166 #endif
6167 }
6168 
6169 void restore_state_to_opc(CPUM68KState *env, TranslationBlock *tb,
6170                           target_ulong *data)
6171 {
6172     int cc_op = data[1];
6173     env->pc = data[0];
6174     if (cc_op != CC_OP_DYNAMIC) {
6175         env->cc_op = cc_op;
6176     }
6177 }
6178