xref: /openbmc/qemu/target/m68k/translate.c (revision 806f71ee)
1 /*
2  *  m68k translation
3  *
4  *  Copyright (c) 2005-2007 CodeSourcery
5  *  Written by Paul Brook
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "disas/disas.h"
24 #include "exec/exec-all.h"
25 #include "tcg/tcg-op.h"
26 #include "qemu/log.h"
27 #include "qemu/qemu-print.h"
28 #include "exec/translator.h"
29 
30 #include "exec/helper-proto.h"
31 #include "exec/helper-gen.h"
32 
33 #include "exec/log.h"
34 #include "fpu/softfloat.h"
35 
36 #define HELPER_H "helper.h"
37 #include "exec/helper-info.c.inc"
38 #undef  HELPER_H
39 
40 //#define DEBUG_DISPATCH 1
41 
42 #define DEFO32(name, offset) static TCGv QREG_##name;
43 #define DEFO64(name, offset) static TCGv_i64 QREG_##name;
44 #include "qregs.h.inc"
45 #undef DEFO32
46 #undef DEFO64
47 
48 static TCGv_i32 cpu_halted;
49 static TCGv_i32 cpu_exception_index;
50 
51 static char cpu_reg_names[2 * 8 * 3 + 5 * 4];
52 static TCGv cpu_dregs[8];
53 static TCGv cpu_aregs[8];
54 static TCGv_i64 cpu_macc[4];
55 
56 #define REG(insn, pos)  (((insn) >> (pos)) & 7)
57 #define DREG(insn, pos) cpu_dregs[REG(insn, pos)]
58 #define AREG(insn, pos) get_areg(s, REG(insn, pos))
59 #define MACREG(acc)     cpu_macc[acc]
60 #define QREG_SP         get_areg(s, 7)
61 
62 static TCGv NULL_QREG;
63 #define IS_NULL_QREG(t) (t == NULL_QREG)
64 /* Used to distinguish stores from bad addressing modes.  */
65 static TCGv store_dummy;
66 
67 void m68k_tcg_init(void)
68 {
69     char *p;
70     int i;
71 
72 #define DEFO32(name, offset) \
73     QREG_##name = tcg_global_mem_new_i32(tcg_env, \
74         offsetof(CPUM68KState, offset), #name);
75 #define DEFO64(name, offset) \
76     QREG_##name = tcg_global_mem_new_i64(tcg_env, \
77         offsetof(CPUM68KState, offset), #name);
78 #include "qregs.h.inc"
79 #undef DEFO32
80 #undef DEFO64
81 
82     cpu_halted = tcg_global_mem_new_i32(tcg_env,
83                                         -offsetof(M68kCPU, env) +
84                                         offsetof(CPUState, halted), "HALTED");
85     cpu_exception_index = tcg_global_mem_new_i32(tcg_env,
86                                                  -offsetof(M68kCPU, env) +
87                                                  offsetof(CPUState, exception_index),
88                                                  "EXCEPTION");
89 
90     p = cpu_reg_names;
91     for (i = 0; i < 8; i++) {
92         sprintf(p, "D%d", i);
93         cpu_dregs[i] = tcg_global_mem_new(tcg_env,
94                                           offsetof(CPUM68KState, dregs[i]), p);
95         p += 3;
96         sprintf(p, "A%d", i);
97         cpu_aregs[i] = tcg_global_mem_new(tcg_env,
98                                           offsetof(CPUM68KState, aregs[i]), p);
99         p += 3;
100     }
101     for (i = 0; i < 4; i++) {
102         sprintf(p, "ACC%d", i);
103         cpu_macc[i] = tcg_global_mem_new_i64(tcg_env,
104                                          offsetof(CPUM68KState, macc[i]), p);
105         p += 5;
106     }
107 
108     NULL_QREG = tcg_global_mem_new(tcg_env, -4, "NULL");
109     store_dummy = tcg_global_mem_new(tcg_env, -8, "NULL");
110 }
111 
112 /* internal defines */
113 typedef struct DisasContext {
114     DisasContextBase base;
115     CPUM68KState *env;
116     target_ulong pc;
117     target_ulong pc_prev;
118     CCOp cc_op; /* Current CC operation */
119     int cc_op_synced;
120     TCGv_i64 mactmp;
121     int done_mac;
122     int writeback_mask;
123     TCGv writeback[8];
124     bool ss_active;
125 } DisasContext;
126 
127 static TCGv get_areg(DisasContext *s, unsigned regno)
128 {
129     if (s->writeback_mask & (1 << regno)) {
130         return s->writeback[regno];
131     } else {
132         return cpu_aregs[regno];
133     }
134 }
135 
136 static void delay_set_areg(DisasContext *s, unsigned regno,
137                            TCGv val, bool give_temp)
138 {
139     if (s->writeback_mask & (1 << regno)) {
140         if (give_temp) {
141             s->writeback[regno] = val;
142         } else {
143             tcg_gen_mov_i32(s->writeback[regno], val);
144         }
145     } else {
146         s->writeback_mask |= 1 << regno;
147         if (give_temp) {
148             s->writeback[regno] = val;
149         } else {
150             TCGv tmp = tcg_temp_new();
151             s->writeback[regno] = tmp;
152             tcg_gen_mov_i32(tmp, val);
153         }
154     }
155 }
156 
157 static void do_writebacks(DisasContext *s)
158 {
159     unsigned mask = s->writeback_mask;
160     if (mask) {
161         s->writeback_mask = 0;
162         do {
163             unsigned regno = ctz32(mask);
164             tcg_gen_mov_i32(cpu_aregs[regno], s->writeback[regno]);
165             mask &= mask - 1;
166         } while (mask);
167     }
168 }
169 
170 /* is_jmp field values */
171 #define DISAS_JUMP      DISAS_TARGET_0 /* only pc was modified dynamically */
172 #define DISAS_EXIT      DISAS_TARGET_1 /* cpu state was modified dynamically */
173 
174 #if defined(CONFIG_USER_ONLY)
175 #define IS_USER(s) 1
176 #else
177 #define IS_USER(s)   (!(s->base.tb->flags & TB_FLAGS_MSR_S))
178 #define SFC_INDEX(s) ((s->base.tb->flags & TB_FLAGS_SFC_S) ? \
179                       MMU_KERNEL_IDX : MMU_USER_IDX)
180 #define DFC_INDEX(s) ((s->base.tb->flags & TB_FLAGS_DFC_S) ? \
181                       MMU_KERNEL_IDX : MMU_USER_IDX)
182 #endif
183 
184 typedef void (*disas_proc)(CPUM68KState *env, DisasContext *s, uint16_t insn);
185 
186 #ifdef DEBUG_DISPATCH
187 #define DISAS_INSN(name)                                                \
188     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
189                                   uint16_t insn);                       \
190     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
191                              uint16_t insn)                             \
192     {                                                                   \
193         qemu_log("Dispatch " #name "\n");                               \
194         real_disas_##name(env, s, insn);                                \
195     }                                                                   \
196     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
197                                   uint16_t insn)
198 #else
199 #define DISAS_INSN(name)                                                \
200     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
201                              uint16_t insn)
202 #endif
203 
204 static const uint8_t cc_op_live[CC_OP_NB] = {
205     [CC_OP_DYNAMIC] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
206     [CC_OP_FLAGS] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
207     [CC_OP_ADDB ... CC_OP_ADDL] = CCF_X | CCF_N | CCF_V,
208     [CC_OP_SUBB ... CC_OP_SUBL] = CCF_X | CCF_N | CCF_V,
209     [CC_OP_CMPB ... CC_OP_CMPL] = CCF_X | CCF_N | CCF_V,
210     [CC_OP_LOGIC] = CCF_X | CCF_N
211 };
212 
213 static void set_cc_op(DisasContext *s, CCOp op)
214 {
215     CCOp old_op = s->cc_op;
216     int dead;
217 
218     if (old_op == op) {
219         return;
220     }
221     s->cc_op = op;
222     s->cc_op_synced = 0;
223 
224     /*
225      * Discard CC computation that will no longer be used.
226      * Note that X and N are never dead.
227      */
228     dead = cc_op_live[old_op] & ~cc_op_live[op];
229     if (dead & CCF_C) {
230         tcg_gen_discard_i32(QREG_CC_C);
231     }
232     if (dead & CCF_Z) {
233         tcg_gen_discard_i32(QREG_CC_Z);
234     }
235     if (dead & CCF_V) {
236         tcg_gen_discard_i32(QREG_CC_V);
237     }
238 }
239 
240 /* Update the CPU env CC_OP state.  */
241 static void update_cc_op(DisasContext *s)
242 {
243     if (!s->cc_op_synced) {
244         s->cc_op_synced = 1;
245         tcg_gen_movi_i32(QREG_CC_OP, s->cc_op);
246     }
247 }
248 
249 /* Generate a jump to an immediate address.  */
250 static void gen_jmp_im(DisasContext *s, uint32_t dest)
251 {
252     update_cc_op(s);
253     tcg_gen_movi_i32(QREG_PC, dest);
254     s->base.is_jmp = DISAS_JUMP;
255 }
256 
257 /* Generate a jump to the address in qreg DEST.  */
258 static void gen_jmp(DisasContext *s, TCGv dest)
259 {
260     update_cc_op(s);
261     tcg_gen_mov_i32(QREG_PC, dest);
262     s->base.is_jmp = DISAS_JUMP;
263 }
264 
265 static void gen_raise_exception(int nr)
266 {
267     gen_helper_raise_exception(tcg_env, tcg_constant_i32(nr));
268 }
269 
270 static void gen_raise_exception_format2(DisasContext *s, int nr,
271                                         target_ulong this_pc)
272 {
273     /*
274      * Pass the address of the insn to the exception handler,
275      * for recording in the Format $2 (6-word) stack frame.
276      * Re-use mmu.ar for the purpose, since that's only valid
277      * after tlb_fill.
278      */
279     tcg_gen_st_i32(tcg_constant_i32(this_pc), tcg_env,
280                    offsetof(CPUM68KState, mmu.ar));
281     gen_raise_exception(nr);
282     s->base.is_jmp = DISAS_NORETURN;
283 }
284 
285 static void gen_exception(DisasContext *s, uint32_t dest, int nr)
286 {
287     update_cc_op(s);
288     tcg_gen_movi_i32(QREG_PC, dest);
289 
290     gen_raise_exception(nr);
291 
292     s->base.is_jmp = DISAS_NORETURN;
293 }
294 
295 static inline void gen_addr_fault(DisasContext *s)
296 {
297     gen_exception(s, s->base.pc_next, EXCP_ADDRESS);
298 }
299 
300 /*
301  * Generate a load from the specified address.  Narrow values are
302  *  sign extended to full register width.
303  */
304 static inline TCGv gen_load(DisasContext *s, int opsize, TCGv addr,
305                             int sign, int index)
306 {
307     TCGv tmp = tcg_temp_new_i32();
308 
309     switch (opsize) {
310     case OS_BYTE:
311     case OS_WORD:
312     case OS_LONG:
313         tcg_gen_qemu_ld_tl(tmp, addr, index,
314                            opsize | (sign ? MO_SIGN : 0) | MO_TE);
315         break;
316     default:
317         g_assert_not_reached();
318     }
319     return tmp;
320 }
321 
322 /* Generate a store.  */
323 static inline void gen_store(DisasContext *s, int opsize, TCGv addr, TCGv val,
324                              int index)
325 {
326     switch (opsize) {
327     case OS_BYTE:
328     case OS_WORD:
329     case OS_LONG:
330         tcg_gen_qemu_st_tl(val, addr, index, opsize | MO_TE);
331         break;
332     default:
333         g_assert_not_reached();
334     }
335 }
336 
337 typedef enum {
338     EA_STORE,
339     EA_LOADU,
340     EA_LOADS
341 } ea_what;
342 
343 /*
344  * Generate an unsigned load if VAL is 0 a signed load if val is -1,
345  * otherwise generate a store.
346  */
347 static TCGv gen_ldst(DisasContext *s, int opsize, TCGv addr, TCGv val,
348                      ea_what what, int index)
349 {
350     if (what == EA_STORE) {
351         gen_store(s, opsize, addr, val, index);
352         return store_dummy;
353     } else {
354         return gen_load(s, opsize, addr, what == EA_LOADS, index);
355     }
356 }
357 
358 /* Read a 16-bit immediate constant */
359 static inline uint16_t read_im16(CPUM68KState *env, DisasContext *s)
360 {
361     uint16_t im;
362     im = translator_lduw(env, &s->base, s->pc);
363     s->pc += 2;
364     return im;
365 }
366 
367 /* Read an 8-bit immediate constant */
368 static inline uint8_t read_im8(CPUM68KState *env, DisasContext *s)
369 {
370     return read_im16(env, s);
371 }
372 
373 /* Read a 32-bit immediate constant.  */
374 static inline uint32_t read_im32(CPUM68KState *env, DisasContext *s)
375 {
376     uint32_t im;
377     im = read_im16(env, s) << 16;
378     im |= 0xffff & read_im16(env, s);
379     return im;
380 }
381 
382 /* Read a 64-bit immediate constant.  */
383 static inline uint64_t read_im64(CPUM68KState *env, DisasContext *s)
384 {
385     uint64_t im;
386     im = (uint64_t)read_im32(env, s) << 32;
387     im |= (uint64_t)read_im32(env, s);
388     return im;
389 }
390 
391 /* Calculate and address index.  */
392 static TCGv gen_addr_index(DisasContext *s, uint16_t ext, TCGv tmp)
393 {
394     TCGv add;
395     int scale;
396 
397     add = (ext & 0x8000) ? AREG(ext, 12) : DREG(ext, 12);
398     if ((ext & 0x800) == 0) {
399         tcg_gen_ext16s_i32(tmp, add);
400         add = tmp;
401     }
402     scale = (ext >> 9) & 3;
403     if (scale != 0) {
404         tcg_gen_shli_i32(tmp, add, scale);
405         add = tmp;
406     }
407     return add;
408 }
409 
410 /*
411  * Handle a base + index + displacement effective address.
412  * A NULL_QREG base means pc-relative.
413  */
414 static TCGv gen_lea_indexed(CPUM68KState *env, DisasContext *s, TCGv base)
415 {
416     uint32_t offset;
417     uint16_t ext;
418     TCGv add;
419     TCGv tmp;
420     uint32_t bd, od;
421 
422     offset = s->pc;
423     ext = read_im16(env, s);
424 
425     if ((ext & 0x800) == 0 && !m68k_feature(s->env, M68K_FEATURE_WORD_INDEX))
426         return NULL_QREG;
427 
428     if (m68k_feature(s->env, M68K_FEATURE_M68K) &&
429         !m68k_feature(s->env, M68K_FEATURE_SCALED_INDEX)) {
430         ext &= ~(3 << 9);
431     }
432 
433     if (ext & 0x100) {
434         /* full extension word format */
435         if (!m68k_feature(s->env, M68K_FEATURE_EXT_FULL))
436             return NULL_QREG;
437 
438         if ((ext & 0x30) > 0x10) {
439             /* base displacement */
440             if ((ext & 0x30) == 0x20) {
441                 bd = (int16_t)read_im16(env, s);
442             } else {
443                 bd = read_im32(env, s);
444             }
445         } else {
446             bd = 0;
447         }
448         tmp = tcg_temp_new();
449         if ((ext & 0x44) == 0) {
450             /* pre-index */
451             add = gen_addr_index(s, ext, tmp);
452         } else {
453             add = NULL_QREG;
454         }
455         if ((ext & 0x80) == 0) {
456             /* base not suppressed */
457             if (IS_NULL_QREG(base)) {
458                 base = tcg_constant_i32(offset + bd);
459                 bd = 0;
460             }
461             if (!IS_NULL_QREG(add)) {
462                 tcg_gen_add_i32(tmp, add, base);
463                 add = tmp;
464             } else {
465                 add = base;
466             }
467         }
468         if (!IS_NULL_QREG(add)) {
469             if (bd != 0) {
470                 tcg_gen_addi_i32(tmp, add, bd);
471                 add = tmp;
472             }
473         } else {
474             add = tcg_constant_i32(bd);
475         }
476         if ((ext & 3) != 0) {
477             /* memory indirect */
478             base = gen_load(s, OS_LONG, add, 0, IS_USER(s));
479             if ((ext & 0x44) == 4) {
480                 add = gen_addr_index(s, ext, tmp);
481                 tcg_gen_add_i32(tmp, add, base);
482                 add = tmp;
483             } else {
484                 add = base;
485             }
486             if ((ext & 3) > 1) {
487                 /* outer displacement */
488                 if ((ext & 3) == 2) {
489                     od = (int16_t)read_im16(env, s);
490                 } else {
491                     od = read_im32(env, s);
492                 }
493             } else {
494                 od = 0;
495             }
496             if (od != 0) {
497                 tcg_gen_addi_i32(tmp, add, od);
498                 add = tmp;
499             }
500         }
501     } else {
502         /* brief extension word format */
503         tmp = tcg_temp_new();
504         add = gen_addr_index(s, ext, tmp);
505         if (!IS_NULL_QREG(base)) {
506             tcg_gen_add_i32(tmp, add, base);
507             if ((int8_t)ext)
508                 tcg_gen_addi_i32(tmp, tmp, (int8_t)ext);
509         } else {
510             tcg_gen_addi_i32(tmp, add, offset + (int8_t)ext);
511         }
512         add = tmp;
513     }
514     return add;
515 }
516 
517 /* Sign or zero extend a value.  */
518 
519 static inline void gen_ext(TCGv res, TCGv val, int opsize, int sign)
520 {
521     switch (opsize) {
522     case OS_BYTE:
523     case OS_WORD:
524     case OS_LONG:
525         tcg_gen_ext_i32(res, val, opsize | (sign ? MO_SIGN : 0));
526         break;
527     default:
528         g_assert_not_reached();
529     }
530 }
531 
532 /* Evaluate all the CC flags.  */
533 
534 static void gen_flush_flags(DisasContext *s)
535 {
536     TCGv t0, t1;
537 
538     switch (s->cc_op) {
539     case CC_OP_FLAGS:
540         return;
541 
542     case CC_OP_ADDB:
543     case CC_OP_ADDW:
544     case CC_OP_ADDL:
545         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
546         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
547         /* Compute signed overflow for addition.  */
548         t0 = tcg_temp_new();
549         t1 = tcg_temp_new();
550         tcg_gen_sub_i32(t0, QREG_CC_N, QREG_CC_V);
551         gen_ext(t0, t0, s->cc_op - CC_OP_ADDB, 1);
552         tcg_gen_xor_i32(t1, QREG_CC_N, QREG_CC_V);
553         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
554         tcg_gen_andc_i32(QREG_CC_V, t1, QREG_CC_V);
555         break;
556 
557     case CC_OP_SUBB:
558     case CC_OP_SUBW:
559     case CC_OP_SUBL:
560         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
561         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
562         /* Compute signed overflow for subtraction.  */
563         t0 = tcg_temp_new();
564         t1 = tcg_temp_new();
565         tcg_gen_add_i32(t0, QREG_CC_N, QREG_CC_V);
566         gen_ext(t0, t0, s->cc_op - CC_OP_SUBB, 1);
567         tcg_gen_xor_i32(t1, QREG_CC_N, t0);
568         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
569         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t1);
570         break;
571 
572     case CC_OP_CMPB:
573     case CC_OP_CMPW:
574     case CC_OP_CMPL:
575         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_C, QREG_CC_N, QREG_CC_V);
576         tcg_gen_sub_i32(QREG_CC_Z, QREG_CC_N, QREG_CC_V);
577         gen_ext(QREG_CC_Z, QREG_CC_Z, s->cc_op - CC_OP_CMPB, 1);
578         /* Compute signed overflow for subtraction.  */
579         t0 = tcg_temp_new();
580         tcg_gen_xor_i32(t0, QREG_CC_Z, QREG_CC_N);
581         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, QREG_CC_N);
582         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t0);
583         tcg_gen_mov_i32(QREG_CC_N, QREG_CC_Z);
584         break;
585 
586     case CC_OP_LOGIC:
587         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
588         tcg_gen_movi_i32(QREG_CC_C, 0);
589         tcg_gen_movi_i32(QREG_CC_V, 0);
590         break;
591 
592     case CC_OP_DYNAMIC:
593         gen_helper_flush_flags(tcg_env, QREG_CC_OP);
594         s->cc_op_synced = 1;
595         break;
596 
597     default:
598         gen_helper_flush_flags(tcg_env, tcg_constant_i32(s->cc_op));
599         s->cc_op_synced = 1;
600         break;
601     }
602 
603     /* Note that flush_flags also assigned to env->cc_op.  */
604     s->cc_op = CC_OP_FLAGS;
605 }
606 
607 static inline TCGv gen_extend(DisasContext *s, TCGv val, int opsize, int sign)
608 {
609     TCGv tmp;
610 
611     if (opsize == OS_LONG) {
612         tmp = val;
613     } else {
614         tmp = tcg_temp_new();
615         gen_ext(tmp, val, opsize, sign);
616     }
617 
618     return tmp;
619 }
620 
621 static void gen_logic_cc(DisasContext *s, TCGv val, int opsize)
622 {
623     gen_ext(QREG_CC_N, val, opsize, 1);
624     set_cc_op(s, CC_OP_LOGIC);
625 }
626 
627 static void gen_update_cc_cmp(DisasContext *s, TCGv dest, TCGv src, int opsize)
628 {
629     tcg_gen_mov_i32(QREG_CC_N, dest);
630     tcg_gen_mov_i32(QREG_CC_V, src);
631     set_cc_op(s, CC_OP_CMPB + opsize);
632 }
633 
634 static void gen_update_cc_add(TCGv dest, TCGv src, int opsize)
635 {
636     gen_ext(QREG_CC_N, dest, opsize, 1);
637     tcg_gen_mov_i32(QREG_CC_V, src);
638 }
639 
640 static inline int opsize_bytes(int opsize)
641 {
642     switch (opsize) {
643     case OS_BYTE: return 1;
644     case OS_WORD: return 2;
645     case OS_LONG: return 4;
646     case OS_SINGLE: return 4;
647     case OS_DOUBLE: return 8;
648     case OS_EXTENDED: return 12;
649     case OS_PACKED: return 12;
650     default:
651         g_assert_not_reached();
652     }
653 }
654 
655 static inline int insn_opsize(int insn)
656 {
657     switch ((insn >> 6) & 3) {
658     case 0: return OS_BYTE;
659     case 1: return OS_WORD;
660     case 2: return OS_LONG;
661     default:
662         g_assert_not_reached();
663     }
664 }
665 
666 static inline int ext_opsize(int ext, int pos)
667 {
668     switch ((ext >> pos) & 7) {
669     case 0: return OS_LONG;
670     case 1: return OS_SINGLE;
671     case 2: return OS_EXTENDED;
672     case 3: return OS_PACKED;
673     case 4: return OS_WORD;
674     case 5: return OS_DOUBLE;
675     case 6: return OS_BYTE;
676     default:
677         g_assert_not_reached();
678     }
679 }
680 
681 /*
682  * Assign value to a register.  If the width is less than the register width
683  * only the low part of the register is set.
684  */
685 static void gen_partset_reg(int opsize, TCGv reg, TCGv val)
686 {
687     switch (opsize) {
688     case OS_BYTE:
689         tcg_gen_deposit_i32(reg, reg, val, 0, 8);
690         break;
691     case OS_WORD:
692         tcg_gen_deposit_i32(reg, reg, val, 0, 16);
693         break;
694     case OS_LONG:
695     case OS_SINGLE:
696         tcg_gen_mov_i32(reg, val);
697         break;
698     default:
699         g_assert_not_reached();
700     }
701 }
702 
703 /*
704  * Generate code for an "effective address".  Does not adjust the base
705  * register for autoincrement addressing modes.
706  */
707 static TCGv gen_lea_mode(CPUM68KState *env, DisasContext *s,
708                          int mode, int reg0, int opsize)
709 {
710     TCGv reg;
711     TCGv tmp;
712     uint16_t ext;
713     uint32_t offset;
714 
715     switch (mode) {
716     case 0: /* Data register direct.  */
717     case 1: /* Address register direct.  */
718         return NULL_QREG;
719     case 3: /* Indirect postincrement.  */
720         if (opsize == OS_UNSIZED) {
721             return NULL_QREG;
722         }
723         /* fallthru */
724     case 2: /* Indirect register */
725         return get_areg(s, reg0);
726     case 4: /* Indirect predecrememnt.  */
727         if (opsize == OS_UNSIZED) {
728             return NULL_QREG;
729         }
730         reg = get_areg(s, reg0);
731         tmp = tcg_temp_new();
732         if (reg0 == 7 && opsize == OS_BYTE &&
733             m68k_feature(s->env, M68K_FEATURE_M68K)) {
734             tcg_gen_subi_i32(tmp, reg, 2);
735         } else {
736             tcg_gen_subi_i32(tmp, reg, opsize_bytes(opsize));
737         }
738         return tmp;
739     case 5: /* Indirect displacement.  */
740         reg = get_areg(s, reg0);
741         tmp = tcg_temp_new();
742         ext = read_im16(env, s);
743         tcg_gen_addi_i32(tmp, reg, (int16_t)ext);
744         return tmp;
745     case 6: /* Indirect index + displacement.  */
746         reg = get_areg(s, reg0);
747         return gen_lea_indexed(env, s, reg);
748     case 7: /* Other */
749         switch (reg0) {
750         case 0: /* Absolute short.  */
751             offset = (int16_t)read_im16(env, s);
752             return tcg_constant_i32(offset);
753         case 1: /* Absolute long.  */
754             offset = read_im32(env, s);
755             return tcg_constant_i32(offset);
756         case 2: /* pc displacement  */
757             offset = s->pc;
758             offset += (int16_t)read_im16(env, s);
759             return tcg_constant_i32(offset);
760         case 3: /* pc index+displacement.  */
761             return gen_lea_indexed(env, s, NULL_QREG);
762         case 4: /* Immediate.  */
763         default:
764             return NULL_QREG;
765         }
766     }
767     /* Should never happen.  */
768     return NULL_QREG;
769 }
770 
771 static TCGv gen_lea(CPUM68KState *env, DisasContext *s, uint16_t insn,
772                     int opsize)
773 {
774     int mode = extract32(insn, 3, 3);
775     int reg0 = REG(insn, 0);
776     return gen_lea_mode(env, s, mode, reg0, opsize);
777 }
778 
779 /*
780  * Generate code to load/store a value from/into an EA.  If WHAT > 0 this is
781  * a write otherwise it is a read (0 == sign extend, -1 == zero extend).
782  * ADDRP is non-null for readwrite operands.
783  */
784 static TCGv gen_ea_mode(CPUM68KState *env, DisasContext *s, int mode, int reg0,
785                         int opsize, TCGv val, TCGv *addrp, ea_what what,
786                         int index)
787 {
788     TCGv reg, tmp, result;
789     int32_t offset;
790 
791     switch (mode) {
792     case 0: /* Data register direct.  */
793         reg = cpu_dregs[reg0];
794         if (what == EA_STORE) {
795             gen_partset_reg(opsize, reg, val);
796             return store_dummy;
797         } else {
798             return gen_extend(s, reg, opsize, what == EA_LOADS);
799         }
800     case 1: /* Address register direct.  */
801         reg = get_areg(s, reg0);
802         if (what == EA_STORE) {
803             tcg_gen_mov_i32(reg, val);
804             return store_dummy;
805         } else {
806             return gen_extend(s, reg, opsize, what == EA_LOADS);
807         }
808     case 2: /* Indirect register */
809         reg = get_areg(s, reg0);
810         return gen_ldst(s, opsize, reg, val, what, index);
811     case 3: /* Indirect postincrement.  */
812         reg = get_areg(s, reg0);
813         result = gen_ldst(s, opsize, reg, val, what, index);
814         if (what == EA_STORE || !addrp) {
815             tmp = tcg_temp_new();
816             if (reg0 == 7 && opsize == OS_BYTE &&
817                 m68k_feature(s->env, M68K_FEATURE_M68K)) {
818                 tcg_gen_addi_i32(tmp, reg, 2);
819             } else {
820                 tcg_gen_addi_i32(tmp, reg, opsize_bytes(opsize));
821             }
822             delay_set_areg(s, reg0, tmp, true);
823         }
824         return result;
825     case 4: /* Indirect predecrememnt.  */
826         if (addrp && what == EA_STORE) {
827             tmp = *addrp;
828         } else {
829             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
830             if (IS_NULL_QREG(tmp)) {
831                 return tmp;
832             }
833             if (addrp) {
834                 *addrp = tmp;
835             }
836         }
837         result = gen_ldst(s, opsize, tmp, val, what, index);
838         if (what == EA_STORE || !addrp) {
839             delay_set_areg(s, reg0, tmp, false);
840         }
841         return result;
842     case 5: /* Indirect displacement.  */
843     case 6: /* Indirect index + displacement.  */
844     do_indirect:
845         if (addrp && what == EA_STORE) {
846             tmp = *addrp;
847         } else {
848             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
849             if (IS_NULL_QREG(tmp)) {
850                 return tmp;
851             }
852             if (addrp) {
853                 *addrp = tmp;
854             }
855         }
856         return gen_ldst(s, opsize, tmp, val, what, index);
857     case 7: /* Other */
858         switch (reg0) {
859         case 0: /* Absolute short.  */
860         case 1: /* Absolute long.  */
861         case 2: /* pc displacement  */
862         case 3: /* pc index+displacement.  */
863             goto do_indirect;
864         case 4: /* Immediate.  */
865             /* Sign extend values for consistency.  */
866             switch (opsize) {
867             case OS_BYTE:
868                 if (what == EA_LOADS) {
869                     offset = (int8_t)read_im8(env, s);
870                 } else {
871                     offset = read_im8(env, s);
872                 }
873                 break;
874             case OS_WORD:
875                 if (what == EA_LOADS) {
876                     offset = (int16_t)read_im16(env, s);
877                 } else {
878                     offset = read_im16(env, s);
879                 }
880                 break;
881             case OS_LONG:
882                 offset = read_im32(env, s);
883                 break;
884             default:
885                 g_assert_not_reached();
886             }
887             return tcg_constant_i32(offset);
888         default:
889             return NULL_QREG;
890         }
891     }
892     /* Should never happen.  */
893     return NULL_QREG;
894 }
895 
896 static TCGv gen_ea(CPUM68KState *env, DisasContext *s, uint16_t insn,
897                    int opsize, TCGv val, TCGv *addrp, ea_what what, int index)
898 {
899     int mode = extract32(insn, 3, 3);
900     int reg0 = REG(insn, 0);
901     return gen_ea_mode(env, s, mode, reg0, opsize, val, addrp, what, index);
902 }
903 
904 static TCGv_ptr gen_fp_ptr(int freg)
905 {
906     TCGv_ptr fp = tcg_temp_new_ptr();
907     tcg_gen_addi_ptr(fp, tcg_env, offsetof(CPUM68KState, fregs[freg]));
908     return fp;
909 }
910 
911 static TCGv_ptr gen_fp_result_ptr(void)
912 {
913     TCGv_ptr fp = tcg_temp_new_ptr();
914     tcg_gen_addi_ptr(fp, tcg_env, offsetof(CPUM68KState, fp_result));
915     return fp;
916 }
917 
918 static void gen_fp_move(TCGv_ptr dest, TCGv_ptr src)
919 {
920     TCGv t32;
921     TCGv_i64 t64;
922 
923     t32 = tcg_temp_new();
924     tcg_gen_ld16u_i32(t32, src, offsetof(FPReg, l.upper));
925     tcg_gen_st16_i32(t32, dest, offsetof(FPReg, l.upper));
926 
927     t64 = tcg_temp_new_i64();
928     tcg_gen_ld_i64(t64, src, offsetof(FPReg, l.lower));
929     tcg_gen_st_i64(t64, dest, offsetof(FPReg, l.lower));
930 }
931 
932 static void gen_load_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
933                         int index)
934 {
935     TCGv tmp;
936     TCGv_i64 t64;
937 
938     t64 = tcg_temp_new_i64();
939     tmp = tcg_temp_new();
940     switch (opsize) {
941     case OS_BYTE:
942     case OS_WORD:
943     case OS_LONG:
944         tcg_gen_qemu_ld_tl(tmp, addr, index, opsize | MO_SIGN | MO_TE);
945         gen_helper_exts32(tcg_env, fp, tmp);
946         break;
947     case OS_SINGLE:
948         tcg_gen_qemu_ld_tl(tmp, addr, index, MO_TEUL);
949         gen_helper_extf32(tcg_env, fp, tmp);
950         break;
951     case OS_DOUBLE:
952         tcg_gen_qemu_ld_i64(t64, addr, index, MO_TEUQ);
953         gen_helper_extf64(tcg_env, fp, t64);
954         break;
955     case OS_EXTENDED:
956         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
957             gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
958             break;
959         }
960         tcg_gen_qemu_ld_i32(tmp, addr, index, MO_TEUL);
961         tcg_gen_shri_i32(tmp, tmp, 16);
962         tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
963         tcg_gen_addi_i32(tmp, addr, 4);
964         tcg_gen_qemu_ld_i64(t64, tmp, index, MO_TEUQ);
965         tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
966         break;
967     case OS_PACKED:
968         /*
969          * unimplemented data type on 68040/ColdFire
970          * FIXME if needed for another FPU
971          */
972         gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
973         break;
974     default:
975         g_assert_not_reached();
976     }
977 }
978 
979 static void gen_store_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
980                          int index)
981 {
982     TCGv tmp;
983     TCGv_i64 t64;
984 
985     t64 = tcg_temp_new_i64();
986     tmp = tcg_temp_new();
987     switch (opsize) {
988     case OS_BYTE:
989     case OS_WORD:
990     case OS_LONG:
991         gen_helper_reds32(tmp, tcg_env, fp);
992         tcg_gen_qemu_st_tl(tmp, addr, index, opsize | MO_TE);
993         break;
994     case OS_SINGLE:
995         gen_helper_redf32(tmp, tcg_env, fp);
996         tcg_gen_qemu_st_tl(tmp, addr, index, MO_TEUL);
997         break;
998     case OS_DOUBLE:
999         gen_helper_redf64(t64, tcg_env, fp);
1000         tcg_gen_qemu_st_i64(t64, addr, index, MO_TEUQ);
1001         break;
1002     case OS_EXTENDED:
1003         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1004             gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1005             break;
1006         }
1007         tcg_gen_ld16u_i32(tmp, fp, offsetof(FPReg, l.upper));
1008         tcg_gen_shli_i32(tmp, tmp, 16);
1009         tcg_gen_qemu_st_i32(tmp, addr, index, MO_TEUL);
1010         tcg_gen_addi_i32(tmp, addr, 4);
1011         tcg_gen_ld_i64(t64, fp, offsetof(FPReg, l.lower));
1012         tcg_gen_qemu_st_i64(t64, tmp, index, MO_TEUQ);
1013         break;
1014     case OS_PACKED:
1015         /*
1016          * unimplemented data type on 68040/ColdFire
1017          * FIXME if needed for another FPU
1018          */
1019         gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1020         break;
1021     default:
1022         g_assert_not_reached();
1023     }
1024 }
1025 
1026 static void gen_ldst_fp(DisasContext *s, int opsize, TCGv addr,
1027                         TCGv_ptr fp, ea_what what, int index)
1028 {
1029     if (what == EA_STORE) {
1030         gen_store_fp(s, opsize, addr, fp, index);
1031     } else {
1032         gen_load_fp(s, opsize, addr, fp, index);
1033     }
1034 }
1035 
1036 static int gen_ea_mode_fp(CPUM68KState *env, DisasContext *s, int mode,
1037                           int reg0, int opsize, TCGv_ptr fp, ea_what what,
1038                           int index)
1039 {
1040     TCGv reg, addr, tmp;
1041     TCGv_i64 t64;
1042 
1043     switch (mode) {
1044     case 0: /* Data register direct.  */
1045         reg = cpu_dregs[reg0];
1046         if (what == EA_STORE) {
1047             switch (opsize) {
1048             case OS_BYTE:
1049             case OS_WORD:
1050             case OS_LONG:
1051                 gen_helper_reds32(reg, tcg_env, fp);
1052                 break;
1053             case OS_SINGLE:
1054                 gen_helper_redf32(reg, tcg_env, fp);
1055                 break;
1056             default:
1057                 g_assert_not_reached();
1058             }
1059         } else {
1060             tmp = tcg_temp_new();
1061             switch (opsize) {
1062             case OS_BYTE:
1063             case OS_WORD:
1064             case OS_LONG:
1065                 tcg_gen_ext_i32(tmp, reg, opsize | MO_SIGN);
1066                 gen_helper_exts32(tcg_env, fp, tmp);
1067                 break;
1068             case OS_SINGLE:
1069                 gen_helper_extf32(tcg_env, fp, reg);
1070                 break;
1071             default:
1072                 g_assert_not_reached();
1073             }
1074         }
1075         return 0;
1076     case 1: /* Address register direct.  */
1077         return -1;
1078     case 2: /* Indirect register */
1079         addr = get_areg(s, reg0);
1080         gen_ldst_fp(s, opsize, addr, fp, what, index);
1081         return 0;
1082     case 3: /* Indirect postincrement.  */
1083         addr = cpu_aregs[reg0];
1084         gen_ldst_fp(s, opsize, addr, fp, what, index);
1085         tcg_gen_addi_i32(addr, addr, opsize_bytes(opsize));
1086         return 0;
1087     case 4: /* Indirect predecrememnt.  */
1088         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1089         if (IS_NULL_QREG(addr)) {
1090             return -1;
1091         }
1092         gen_ldst_fp(s, opsize, addr, fp, what, index);
1093         tcg_gen_mov_i32(cpu_aregs[reg0], addr);
1094         return 0;
1095     case 5: /* Indirect displacement.  */
1096     case 6: /* Indirect index + displacement.  */
1097     do_indirect:
1098         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1099         if (IS_NULL_QREG(addr)) {
1100             return -1;
1101         }
1102         gen_ldst_fp(s, opsize, addr, fp, what, index);
1103         return 0;
1104     case 7: /* Other */
1105         switch (reg0) {
1106         case 0: /* Absolute short.  */
1107         case 1: /* Absolute long.  */
1108         case 2: /* pc displacement  */
1109         case 3: /* pc index+displacement.  */
1110             goto do_indirect;
1111         case 4: /* Immediate.  */
1112             if (what == EA_STORE) {
1113                 return -1;
1114             }
1115             switch (opsize) {
1116             case OS_BYTE:
1117                 tmp = tcg_constant_i32((int8_t)read_im8(env, s));
1118                 gen_helper_exts32(tcg_env, fp, tmp);
1119                 break;
1120             case OS_WORD:
1121                 tmp = tcg_constant_i32((int16_t)read_im16(env, s));
1122                 gen_helper_exts32(tcg_env, fp, tmp);
1123                 break;
1124             case OS_LONG:
1125                 tmp = tcg_constant_i32(read_im32(env, s));
1126                 gen_helper_exts32(tcg_env, fp, tmp);
1127                 break;
1128             case OS_SINGLE:
1129                 tmp = tcg_constant_i32(read_im32(env, s));
1130                 gen_helper_extf32(tcg_env, fp, tmp);
1131                 break;
1132             case OS_DOUBLE:
1133                 t64 = tcg_constant_i64(read_im64(env, s));
1134                 gen_helper_extf64(tcg_env, fp, t64);
1135                 break;
1136             case OS_EXTENDED:
1137                 if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1138                     gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1139                     break;
1140                 }
1141                 tmp = tcg_constant_i32(read_im32(env, s) >> 16);
1142                 tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
1143                 t64 = tcg_constant_i64(read_im64(env, s));
1144                 tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
1145                 break;
1146             case OS_PACKED:
1147                 /*
1148                  * unimplemented data type on 68040/ColdFire
1149                  * FIXME if needed for another FPU
1150                  */
1151                 gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1152                 break;
1153             default:
1154                 g_assert_not_reached();
1155             }
1156             return 0;
1157         default:
1158             return -1;
1159         }
1160     }
1161     return -1;
1162 }
1163 
1164 static int gen_ea_fp(CPUM68KState *env, DisasContext *s, uint16_t insn,
1165                        int opsize, TCGv_ptr fp, ea_what what, int index)
1166 {
1167     int mode = extract32(insn, 3, 3);
1168     int reg0 = REG(insn, 0);
1169     return gen_ea_mode_fp(env, s, mode, reg0, opsize, fp, what, index);
1170 }
1171 
1172 typedef struct {
1173     TCGCond tcond;
1174     TCGv v1;
1175     TCGv v2;
1176 } DisasCompare;
1177 
1178 static void gen_cc_cond(DisasCompare *c, DisasContext *s, int cond)
1179 {
1180     TCGv tmp, tmp2;
1181     TCGCond tcond;
1182     CCOp op = s->cc_op;
1183 
1184     /* The CC_OP_CMP form can handle most normal comparisons directly.  */
1185     if (op == CC_OP_CMPB || op == CC_OP_CMPW || op == CC_OP_CMPL) {
1186         c->v1 = QREG_CC_N;
1187         c->v2 = QREG_CC_V;
1188         switch (cond) {
1189         case 2: /* HI */
1190         case 3: /* LS */
1191             tcond = TCG_COND_LEU;
1192             goto done;
1193         case 4: /* CC */
1194         case 5: /* CS */
1195             tcond = TCG_COND_LTU;
1196             goto done;
1197         case 6: /* NE */
1198         case 7: /* EQ */
1199             tcond = TCG_COND_EQ;
1200             goto done;
1201         case 10: /* PL */
1202         case 11: /* MI */
1203             c->v2 = tcg_constant_i32(0);
1204             c->v1 = tmp = tcg_temp_new();
1205             tcg_gen_sub_i32(tmp, QREG_CC_N, QREG_CC_V);
1206             gen_ext(tmp, tmp, op - CC_OP_CMPB, 1);
1207             /* fallthru */
1208         case 12: /* GE */
1209         case 13: /* LT */
1210             tcond = TCG_COND_LT;
1211             goto done;
1212         case 14: /* GT */
1213         case 15: /* LE */
1214             tcond = TCG_COND_LE;
1215             goto done;
1216         }
1217     }
1218 
1219     c->v2 = tcg_constant_i32(0);
1220 
1221     switch (cond) {
1222     case 0: /* T */
1223     case 1: /* F */
1224         c->v1 = c->v2;
1225         tcond = TCG_COND_NEVER;
1226         goto done;
1227     case 14: /* GT (!(Z || (N ^ V))) */
1228     case 15: /* LE (Z || (N ^ V)) */
1229         /*
1230          * Logic operations clear V, which simplifies LE to (Z || N),
1231          * and since Z and N are co-located, this becomes a normal
1232          * comparison vs N.
1233          */
1234         if (op == CC_OP_LOGIC) {
1235             c->v1 = QREG_CC_N;
1236             tcond = TCG_COND_LE;
1237             goto done;
1238         }
1239         break;
1240     case 12: /* GE (!(N ^ V)) */
1241     case 13: /* LT (N ^ V) */
1242         /* Logic operations clear V, which simplifies this to N.  */
1243         if (op != CC_OP_LOGIC) {
1244             break;
1245         }
1246         /* fallthru */
1247     case 10: /* PL (!N) */
1248     case 11: /* MI (N) */
1249         /* Several cases represent N normally.  */
1250         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1251             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1252             op == CC_OP_LOGIC) {
1253             c->v1 = QREG_CC_N;
1254             tcond = TCG_COND_LT;
1255             goto done;
1256         }
1257         break;
1258     case 6: /* NE (!Z) */
1259     case 7: /* EQ (Z) */
1260         /* Some cases fold Z into N.  */
1261         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1262             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1263             op == CC_OP_LOGIC) {
1264             tcond = TCG_COND_EQ;
1265             c->v1 = QREG_CC_N;
1266             goto done;
1267         }
1268         break;
1269     case 4: /* CC (!C) */
1270     case 5: /* CS (C) */
1271         /* Some cases fold C into X.  */
1272         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1273             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL) {
1274             tcond = TCG_COND_NE;
1275             c->v1 = QREG_CC_X;
1276             goto done;
1277         }
1278         /* fallthru */
1279     case 8: /* VC (!V) */
1280     case 9: /* VS (V) */
1281         /* Logic operations clear V and C.  */
1282         if (op == CC_OP_LOGIC) {
1283             tcond = TCG_COND_NEVER;
1284             c->v1 = c->v2;
1285             goto done;
1286         }
1287         break;
1288     }
1289 
1290     /* Otherwise, flush flag state to CC_OP_FLAGS.  */
1291     gen_flush_flags(s);
1292 
1293     switch (cond) {
1294     case 0: /* T */
1295     case 1: /* F */
1296     default:
1297         /* Invalid, or handled above.  */
1298         abort();
1299     case 2: /* HI (!C && !Z) -> !(C || Z)*/
1300     case 3: /* LS (C || Z) */
1301         c->v1 = tmp = tcg_temp_new();
1302         tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1303         tcg_gen_or_i32(tmp, tmp, QREG_CC_C);
1304         tcond = TCG_COND_NE;
1305         break;
1306     case 4: /* CC (!C) */
1307     case 5: /* CS (C) */
1308         c->v1 = QREG_CC_C;
1309         tcond = TCG_COND_NE;
1310         break;
1311     case 6: /* NE (!Z) */
1312     case 7: /* EQ (Z) */
1313         c->v1 = QREG_CC_Z;
1314         tcond = TCG_COND_EQ;
1315         break;
1316     case 8: /* VC (!V) */
1317     case 9: /* VS (V) */
1318         c->v1 = QREG_CC_V;
1319         tcond = TCG_COND_LT;
1320         break;
1321     case 10: /* PL (!N) */
1322     case 11: /* MI (N) */
1323         c->v1 = QREG_CC_N;
1324         tcond = TCG_COND_LT;
1325         break;
1326     case 12: /* GE (!(N ^ V)) */
1327     case 13: /* LT (N ^ V) */
1328         c->v1 = tmp = tcg_temp_new();
1329         tcg_gen_xor_i32(tmp, QREG_CC_N, QREG_CC_V);
1330         tcond = TCG_COND_LT;
1331         break;
1332     case 14: /* GT (!(Z || (N ^ V))) */
1333     case 15: /* LE (Z || (N ^ V)) */
1334         c->v1 = tmp = tcg_temp_new();
1335         tcg_gen_negsetcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1336         tmp2 = tcg_temp_new();
1337         tcg_gen_xor_i32(tmp2, QREG_CC_N, QREG_CC_V);
1338         tcg_gen_or_i32(tmp, tmp, tmp2);
1339         tcond = TCG_COND_LT;
1340         break;
1341     }
1342 
1343  done:
1344     if ((cond & 1) == 0) {
1345         tcond = tcg_invert_cond(tcond);
1346     }
1347     c->tcond = tcond;
1348 }
1349 
1350 static void gen_jmpcc(DisasContext *s, int cond, TCGLabel *l1)
1351 {
1352   DisasCompare c;
1353 
1354   gen_cc_cond(&c, s, cond);
1355   update_cc_op(s);
1356   tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
1357 }
1358 
1359 /* Force a TB lookup after an instruction that changes the CPU state.  */
1360 static void gen_exit_tb(DisasContext *s)
1361 {
1362     update_cc_op(s);
1363     tcg_gen_movi_i32(QREG_PC, s->pc);
1364     s->base.is_jmp = DISAS_EXIT;
1365 }
1366 
1367 #define SRC_EA(env, result, opsize, op_sign, addrp) do {                \
1368         result = gen_ea(env, s, insn, opsize, NULL_QREG, addrp,         \
1369                         op_sign ? EA_LOADS : EA_LOADU, IS_USER(s));     \
1370         if (IS_NULL_QREG(result)) {                                     \
1371             gen_addr_fault(s);                                          \
1372             return;                                                     \
1373         }                                                               \
1374     } while (0)
1375 
1376 #define DEST_EA(env, insn, opsize, val, addrp) do {                     \
1377         TCGv ea_result = gen_ea(env, s, insn, opsize, val, addrp,       \
1378                                 EA_STORE, IS_USER(s));                  \
1379         if (IS_NULL_QREG(ea_result)) {                                  \
1380             gen_addr_fault(s);                                          \
1381             return;                                                     \
1382         }                                                               \
1383     } while (0)
1384 
1385 /* Generate a jump to an immediate address.  */
1386 static void gen_jmp_tb(DisasContext *s, int n, target_ulong dest,
1387                        target_ulong src)
1388 {
1389     if (unlikely(s->ss_active)) {
1390         update_cc_op(s);
1391         tcg_gen_movi_i32(QREG_PC, dest);
1392         gen_raise_exception_format2(s, EXCP_TRACE, src);
1393     } else if (translator_use_goto_tb(&s->base, dest)) {
1394         tcg_gen_goto_tb(n);
1395         tcg_gen_movi_i32(QREG_PC, dest);
1396         tcg_gen_exit_tb(s->base.tb, n);
1397     } else {
1398         gen_jmp_im(s, dest);
1399         tcg_gen_exit_tb(NULL, 0);
1400     }
1401     s->base.is_jmp = DISAS_NORETURN;
1402 }
1403 
1404 DISAS_INSN(scc)
1405 {
1406     DisasCompare c;
1407     int cond;
1408     TCGv tmp;
1409 
1410     cond = (insn >> 8) & 0xf;
1411     gen_cc_cond(&c, s, cond);
1412 
1413     tmp = tcg_temp_new();
1414     tcg_gen_negsetcond_i32(c.tcond, tmp, c.v1, c.v2);
1415 
1416     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
1417 }
1418 
1419 DISAS_INSN(dbcc)
1420 {
1421     TCGLabel *l1;
1422     TCGv reg;
1423     TCGv tmp;
1424     int16_t offset;
1425     uint32_t base;
1426 
1427     reg = DREG(insn, 0);
1428     base = s->pc;
1429     offset = (int16_t)read_im16(env, s);
1430     l1 = gen_new_label();
1431     gen_jmpcc(s, (insn >> 8) & 0xf, l1);
1432 
1433     tmp = tcg_temp_new();
1434     tcg_gen_ext16s_i32(tmp, reg);
1435     tcg_gen_addi_i32(tmp, tmp, -1);
1436     gen_partset_reg(OS_WORD, reg, tmp);
1437     tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, -1, l1);
1438     gen_jmp_tb(s, 1, base + offset, s->base.pc_next);
1439     gen_set_label(l1);
1440     gen_jmp_tb(s, 0, s->pc, s->base.pc_next);
1441 }
1442 
1443 DISAS_INSN(undef_mac)
1444 {
1445     gen_exception(s, s->base.pc_next, EXCP_LINEA);
1446 }
1447 
1448 DISAS_INSN(undef_fpu)
1449 {
1450     gen_exception(s, s->base.pc_next, EXCP_LINEF);
1451 }
1452 
1453 DISAS_INSN(undef)
1454 {
1455     /*
1456      * ??? This is both instructions that are as yet unimplemented
1457      * for the 680x0 series, as well as those that are implemented
1458      * but actually illegal for CPU32 or pre-68020.
1459      */
1460     qemu_log_mask(LOG_UNIMP, "Illegal instruction: %04x @ %08x\n",
1461                   insn, s->base.pc_next);
1462     gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
1463 }
1464 
1465 DISAS_INSN(mulw)
1466 {
1467     TCGv reg;
1468     TCGv tmp;
1469     TCGv src;
1470     int sign;
1471 
1472     sign = (insn & 0x100) != 0;
1473     reg = DREG(insn, 9);
1474     tmp = tcg_temp_new();
1475     if (sign)
1476         tcg_gen_ext16s_i32(tmp, reg);
1477     else
1478         tcg_gen_ext16u_i32(tmp, reg);
1479     SRC_EA(env, src, OS_WORD, sign, NULL);
1480     tcg_gen_mul_i32(tmp, tmp, src);
1481     tcg_gen_mov_i32(reg, tmp);
1482     gen_logic_cc(s, tmp, OS_LONG);
1483 }
1484 
1485 DISAS_INSN(divw)
1486 {
1487     int sign;
1488     TCGv src;
1489     TCGv destr;
1490     TCGv ilen;
1491 
1492     /* divX.w <EA>,Dn    32/16 -> 16r:16q */
1493 
1494     sign = (insn & 0x100) != 0;
1495 
1496     /* dest.l / src.w */
1497 
1498     SRC_EA(env, src, OS_WORD, sign, NULL);
1499     destr = tcg_constant_i32(REG(insn, 9));
1500     ilen = tcg_constant_i32(s->pc - s->base.pc_next);
1501     if (sign) {
1502         gen_helper_divsw(tcg_env, destr, src, ilen);
1503     } else {
1504         gen_helper_divuw(tcg_env, destr, src, ilen);
1505     }
1506 
1507     set_cc_op(s, CC_OP_FLAGS);
1508 }
1509 
1510 DISAS_INSN(divl)
1511 {
1512     TCGv num, reg, den, ilen;
1513     int sign;
1514     uint16_t ext;
1515 
1516     ext = read_im16(env, s);
1517 
1518     sign = (ext & 0x0800) != 0;
1519 
1520     if (ext & 0x400) {
1521         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
1522             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
1523             return;
1524         }
1525 
1526         /* divX.l <EA>, Dr:Dq    64/32 -> 32r:32q */
1527 
1528         SRC_EA(env, den, OS_LONG, 0, NULL);
1529         num = tcg_constant_i32(REG(ext, 12));
1530         reg = tcg_constant_i32(REG(ext, 0));
1531         ilen = tcg_constant_i32(s->pc - s->base.pc_next);
1532         if (sign) {
1533             gen_helper_divsll(tcg_env, num, reg, den, ilen);
1534         } else {
1535             gen_helper_divull(tcg_env, num, reg, den, ilen);
1536         }
1537         set_cc_op(s, CC_OP_FLAGS);
1538         return;
1539     }
1540 
1541     /* divX.l <EA>, Dq        32/32 -> 32q     */
1542     /* divXl.l <EA>, Dr:Dq    32/32 -> 32r:32q */
1543 
1544     SRC_EA(env, den, OS_LONG, 0, NULL);
1545     num = tcg_constant_i32(REG(ext, 12));
1546     reg = tcg_constant_i32(REG(ext, 0));
1547     ilen = tcg_constant_i32(s->pc - s->base.pc_next);
1548     if (sign) {
1549         gen_helper_divsl(tcg_env, num, reg, den, ilen);
1550     } else {
1551         gen_helper_divul(tcg_env, num, reg, den, ilen);
1552     }
1553 
1554     set_cc_op(s, CC_OP_FLAGS);
1555 }
1556 
1557 static void bcd_add(TCGv dest, TCGv src)
1558 {
1559     TCGv t0, t1;
1560 
1561     /*
1562      * dest10 = dest10 + src10 + X
1563      *
1564      *        t1 = src
1565      *        t2 = t1 + 0x066
1566      *        t3 = t2 + dest + X
1567      *        t4 = t2 ^ dest
1568      *        t5 = t3 ^ t4
1569      *        t6 = ~t5 & 0x110
1570      *        t7 = (t6 >> 2) | (t6 >> 3)
1571      *        return t3 - t7
1572      */
1573 
1574     /*
1575      * t1 = (src + 0x066) + dest + X
1576      *    = result with some possible exceeding 0x6
1577      */
1578 
1579     t0 = tcg_temp_new();
1580     tcg_gen_addi_i32(t0, src, 0x066);
1581 
1582     t1 = tcg_temp_new();
1583     tcg_gen_add_i32(t1, t0, dest);
1584     tcg_gen_add_i32(t1, t1, QREG_CC_X);
1585 
1586     /* we will remove exceeding 0x6 where there is no carry */
1587 
1588     /*
1589      * t0 = (src + 0x0066) ^ dest
1590      *    = t1 without carries
1591      */
1592 
1593     tcg_gen_xor_i32(t0, t0, dest);
1594 
1595     /*
1596      * extract the carries
1597      * t0 = t0 ^ t1
1598      *    = only the carries
1599      */
1600 
1601     tcg_gen_xor_i32(t0, t0, t1);
1602 
1603     /*
1604      * generate 0x1 where there is no carry
1605      * and for each 0x10, generate a 0x6
1606      */
1607 
1608     tcg_gen_shri_i32(t0, t0, 3);
1609     tcg_gen_not_i32(t0, t0);
1610     tcg_gen_andi_i32(t0, t0, 0x22);
1611     tcg_gen_add_i32(dest, t0, t0);
1612     tcg_gen_add_i32(dest, dest, t0);
1613 
1614     /*
1615      * remove the exceeding 0x6
1616      * for digits that have not generated a carry
1617      */
1618 
1619     tcg_gen_sub_i32(dest, t1, dest);
1620 }
1621 
1622 static void bcd_sub(TCGv dest, TCGv src)
1623 {
1624     TCGv t0, t1, t2;
1625 
1626     /*
1627      *  dest10 = dest10 - src10 - X
1628      *         = bcd_add(dest + 1 - X, 0x199 - src)
1629      */
1630 
1631     /* t0 = 0x066 + (0x199 - src) */
1632 
1633     t0 = tcg_temp_new();
1634     tcg_gen_subfi_i32(t0, 0x1ff, src);
1635 
1636     /* t1 = t0 + dest + 1 - X*/
1637 
1638     t1 = tcg_temp_new();
1639     tcg_gen_add_i32(t1, t0, dest);
1640     tcg_gen_addi_i32(t1, t1, 1);
1641     tcg_gen_sub_i32(t1, t1, QREG_CC_X);
1642 
1643     /* t2 = t0 ^ dest */
1644 
1645     t2 = tcg_temp_new();
1646     tcg_gen_xor_i32(t2, t0, dest);
1647 
1648     /* t0 = t1 ^ t2 */
1649 
1650     tcg_gen_xor_i32(t0, t1, t2);
1651 
1652     /*
1653      * t2 = ~t0 & 0x110
1654      * t0 = (t2 >> 2) | (t2 >> 3)
1655      *
1656      * to fit on 8bit operands, changed in:
1657      *
1658      * t2 = ~(t0 >> 3) & 0x22
1659      * t0 = t2 + t2
1660      * t0 = t0 + t2
1661      */
1662 
1663     tcg_gen_shri_i32(t2, t0, 3);
1664     tcg_gen_not_i32(t2, t2);
1665     tcg_gen_andi_i32(t2, t2, 0x22);
1666     tcg_gen_add_i32(t0, t2, t2);
1667     tcg_gen_add_i32(t0, t0, t2);
1668 
1669     /* return t1 - t0 */
1670 
1671     tcg_gen_sub_i32(dest, t1, t0);
1672 }
1673 
1674 static void bcd_flags(TCGv val)
1675 {
1676     tcg_gen_andi_i32(QREG_CC_C, val, 0x0ff);
1677     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_C);
1678 
1679     tcg_gen_extract_i32(QREG_CC_C, val, 8, 1);
1680 
1681     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
1682 }
1683 
1684 DISAS_INSN(abcd_reg)
1685 {
1686     TCGv src;
1687     TCGv dest;
1688 
1689     gen_flush_flags(s); /* !Z is sticky */
1690 
1691     src = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
1692     dest = gen_extend(s, DREG(insn, 9), OS_BYTE, 0);
1693     bcd_add(dest, src);
1694     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1695 
1696     bcd_flags(dest);
1697 }
1698 
1699 DISAS_INSN(abcd_mem)
1700 {
1701     TCGv src, dest, addr;
1702 
1703     gen_flush_flags(s); /* !Z is sticky */
1704 
1705     /* Indirect pre-decrement load (mode 4) */
1706 
1707     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1708                       NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1709     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1710                        NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1711 
1712     bcd_add(dest, src);
1713 
1714     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1715                 EA_STORE, IS_USER(s));
1716 
1717     bcd_flags(dest);
1718 }
1719 
1720 DISAS_INSN(sbcd_reg)
1721 {
1722     TCGv src, dest;
1723 
1724     gen_flush_flags(s); /* !Z is sticky */
1725 
1726     src = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
1727     dest = gen_extend(s, DREG(insn, 9), OS_BYTE, 0);
1728 
1729     bcd_sub(dest, src);
1730 
1731     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1732 
1733     bcd_flags(dest);
1734 }
1735 
1736 DISAS_INSN(sbcd_mem)
1737 {
1738     TCGv src, dest, addr;
1739 
1740     gen_flush_flags(s); /* !Z is sticky */
1741 
1742     /* Indirect pre-decrement load (mode 4) */
1743 
1744     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1745                       NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1746     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1747                        NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1748 
1749     bcd_sub(dest, src);
1750 
1751     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1752                 EA_STORE, IS_USER(s));
1753 
1754     bcd_flags(dest);
1755 }
1756 
1757 DISAS_INSN(nbcd)
1758 {
1759     TCGv src, dest;
1760     TCGv addr;
1761 
1762     gen_flush_flags(s); /* !Z is sticky */
1763 
1764     SRC_EA(env, src, OS_BYTE, 0, &addr);
1765 
1766     dest = tcg_temp_new();
1767     tcg_gen_movi_i32(dest, 0);
1768     bcd_sub(dest, src);
1769 
1770     DEST_EA(env, insn, OS_BYTE, dest, &addr);
1771 
1772     bcd_flags(dest);
1773 }
1774 
1775 DISAS_INSN(addsub)
1776 {
1777     TCGv reg;
1778     TCGv dest;
1779     TCGv src;
1780     TCGv tmp;
1781     TCGv addr;
1782     int add;
1783     int opsize;
1784 
1785     add = (insn & 0x4000) != 0;
1786     opsize = insn_opsize(insn);
1787     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
1788     dest = tcg_temp_new();
1789     if (insn & 0x100) {
1790         SRC_EA(env, tmp, opsize, 1, &addr);
1791         src = reg;
1792     } else {
1793         tmp = reg;
1794         SRC_EA(env, src, opsize, 1, NULL);
1795     }
1796     if (add) {
1797         tcg_gen_add_i32(dest, tmp, src);
1798         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, src);
1799         set_cc_op(s, CC_OP_ADDB + opsize);
1800     } else {
1801         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, tmp, src);
1802         tcg_gen_sub_i32(dest, tmp, src);
1803         set_cc_op(s, CC_OP_SUBB + opsize);
1804     }
1805     gen_update_cc_add(dest, src, opsize);
1806     if (insn & 0x100) {
1807         DEST_EA(env, insn, opsize, dest, &addr);
1808     } else {
1809         gen_partset_reg(opsize, DREG(insn, 9), dest);
1810     }
1811 }
1812 
1813 /* Reverse the order of the bits in REG.  */
1814 DISAS_INSN(bitrev)
1815 {
1816     TCGv reg;
1817     reg = DREG(insn, 0);
1818     gen_helper_bitrev(reg, reg);
1819 }
1820 
1821 DISAS_INSN(bitop_reg)
1822 {
1823     int opsize;
1824     int op;
1825     TCGv src1;
1826     TCGv src2;
1827     TCGv tmp;
1828     TCGv addr;
1829     TCGv dest;
1830 
1831     if ((insn & 0x38) != 0)
1832         opsize = OS_BYTE;
1833     else
1834         opsize = OS_LONG;
1835     op = (insn >> 6) & 3;
1836     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
1837 
1838     gen_flush_flags(s);
1839     src2 = tcg_temp_new();
1840     if (opsize == OS_BYTE)
1841         tcg_gen_andi_i32(src2, DREG(insn, 9), 7);
1842     else
1843         tcg_gen_andi_i32(src2, DREG(insn, 9), 31);
1844 
1845     tmp = tcg_temp_new();
1846     tcg_gen_shl_i32(tmp, tcg_constant_i32(1), src2);
1847 
1848     tcg_gen_and_i32(QREG_CC_Z, src1, tmp);
1849 
1850     dest = tcg_temp_new();
1851     switch (op) {
1852     case 1: /* bchg */
1853         tcg_gen_xor_i32(dest, src1, tmp);
1854         break;
1855     case 2: /* bclr */
1856         tcg_gen_andc_i32(dest, src1, tmp);
1857         break;
1858     case 3: /* bset */
1859         tcg_gen_or_i32(dest, src1, tmp);
1860         break;
1861     default: /* btst */
1862         break;
1863     }
1864     if (op) {
1865         DEST_EA(env, insn, opsize, dest, &addr);
1866     }
1867 }
1868 
1869 DISAS_INSN(sats)
1870 {
1871     TCGv reg;
1872     reg = DREG(insn, 0);
1873     gen_flush_flags(s);
1874     gen_helper_sats(reg, reg, QREG_CC_V);
1875     gen_logic_cc(s, reg, OS_LONG);
1876 }
1877 
1878 static void gen_push(DisasContext *s, TCGv val)
1879 {
1880     TCGv tmp;
1881 
1882     tmp = tcg_temp_new();
1883     tcg_gen_subi_i32(tmp, QREG_SP, 4);
1884     gen_store(s, OS_LONG, tmp, val, IS_USER(s));
1885     tcg_gen_mov_i32(QREG_SP, tmp);
1886 }
1887 
1888 static TCGv mreg(int reg)
1889 {
1890     if (reg < 8) {
1891         /* Dx */
1892         return cpu_dregs[reg];
1893     }
1894     /* Ax */
1895     return cpu_aregs[reg & 7];
1896 }
1897 
1898 DISAS_INSN(movem)
1899 {
1900     TCGv addr, incr, tmp, r[16];
1901     int is_load = (insn & 0x0400) != 0;
1902     int opsize = (insn & 0x40) != 0 ? OS_LONG : OS_WORD;
1903     uint16_t mask = read_im16(env, s);
1904     int mode = extract32(insn, 3, 3);
1905     int reg0 = REG(insn, 0);
1906     int i;
1907 
1908     tmp = cpu_aregs[reg0];
1909 
1910     switch (mode) {
1911     case 0: /* data register direct */
1912     case 1: /* addr register direct */
1913     do_addr_fault:
1914         gen_addr_fault(s);
1915         return;
1916 
1917     case 2: /* indirect */
1918         break;
1919 
1920     case 3: /* indirect post-increment */
1921         if (!is_load) {
1922             /* post-increment is not allowed */
1923             goto do_addr_fault;
1924         }
1925         break;
1926 
1927     case 4: /* indirect pre-decrement */
1928         if (is_load) {
1929             /* pre-decrement is not allowed */
1930             goto do_addr_fault;
1931         }
1932         /*
1933          * We want a bare copy of the address reg, without any pre-decrement
1934          * adjustment, as gen_lea would provide.
1935          */
1936         break;
1937 
1938     default:
1939         tmp = gen_lea_mode(env, s, mode, reg0, opsize);
1940         if (IS_NULL_QREG(tmp)) {
1941             goto do_addr_fault;
1942         }
1943         break;
1944     }
1945 
1946     addr = tcg_temp_new();
1947     tcg_gen_mov_i32(addr, tmp);
1948     incr = tcg_constant_i32(opsize_bytes(opsize));
1949 
1950     if (is_load) {
1951         /* memory to register */
1952         for (i = 0; i < 16; i++) {
1953             if (mask & (1 << i)) {
1954                 r[i] = gen_load(s, opsize, addr, 1, IS_USER(s));
1955                 tcg_gen_add_i32(addr, addr, incr);
1956             }
1957         }
1958         for (i = 0; i < 16; i++) {
1959             if (mask & (1 << i)) {
1960                 tcg_gen_mov_i32(mreg(i), r[i]);
1961             }
1962         }
1963         if (mode == 3) {
1964             /* post-increment: movem (An)+,X */
1965             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
1966         }
1967     } else {
1968         /* register to memory */
1969         if (mode == 4) {
1970             /* pre-decrement: movem X,-(An) */
1971             for (i = 15; i >= 0; i--) {
1972                 if ((mask << i) & 0x8000) {
1973                     tcg_gen_sub_i32(addr, addr, incr);
1974                     if (reg0 + 8 == i &&
1975                         m68k_feature(s->env, M68K_FEATURE_EXT_FULL)) {
1976                         /*
1977                          * M68020+: if the addressing register is the
1978                          * register moved to memory, the value written
1979                          * is the initial value decremented by the size of
1980                          * the operation, regardless of how many actual
1981                          * stores have been performed until this point.
1982                          * M68000/M68010: the value is the initial value.
1983                          */
1984                         tmp = tcg_temp_new();
1985                         tcg_gen_sub_i32(tmp, cpu_aregs[reg0], incr);
1986                         gen_store(s, opsize, addr, tmp, IS_USER(s));
1987                     } else {
1988                         gen_store(s, opsize, addr, mreg(i), IS_USER(s));
1989                     }
1990                 }
1991             }
1992             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
1993         } else {
1994             for (i = 0; i < 16; i++) {
1995                 if (mask & (1 << i)) {
1996                     gen_store(s, opsize, addr, mreg(i), IS_USER(s));
1997                     tcg_gen_add_i32(addr, addr, incr);
1998                 }
1999             }
2000         }
2001     }
2002 }
2003 
2004 DISAS_INSN(movep)
2005 {
2006     uint8_t i;
2007     int16_t displ;
2008     TCGv reg;
2009     TCGv addr;
2010     TCGv abuf;
2011     TCGv dbuf;
2012 
2013     displ = read_im16(env, s);
2014 
2015     addr = AREG(insn, 0);
2016     reg = DREG(insn, 9);
2017 
2018     abuf = tcg_temp_new();
2019     tcg_gen_addi_i32(abuf, addr, displ);
2020     dbuf = tcg_temp_new();
2021 
2022     if (insn & 0x40) {
2023         i = 4;
2024     } else {
2025         i = 2;
2026     }
2027 
2028     if (insn & 0x80) {
2029         for ( ; i > 0 ; i--) {
2030             tcg_gen_shri_i32(dbuf, reg, (i - 1) * 8);
2031             tcg_gen_qemu_st_i32(dbuf, abuf, IS_USER(s), MO_UB);
2032             if (i > 1) {
2033                 tcg_gen_addi_i32(abuf, abuf, 2);
2034             }
2035         }
2036     } else {
2037         for ( ; i > 0 ; i--) {
2038             tcg_gen_qemu_ld_tl(dbuf, abuf, IS_USER(s), MO_UB);
2039             tcg_gen_deposit_i32(reg, reg, dbuf, (i - 1) * 8, 8);
2040             if (i > 1) {
2041                 tcg_gen_addi_i32(abuf, abuf, 2);
2042             }
2043         }
2044     }
2045 }
2046 
2047 DISAS_INSN(bitop_im)
2048 {
2049     int opsize;
2050     int op;
2051     TCGv src1;
2052     uint32_t mask;
2053     int bitnum;
2054     TCGv tmp;
2055     TCGv addr;
2056 
2057     if ((insn & 0x38) != 0)
2058         opsize = OS_BYTE;
2059     else
2060         opsize = OS_LONG;
2061     op = (insn >> 6) & 3;
2062 
2063     bitnum = read_im16(env, s);
2064     if (m68k_feature(s->env, M68K_FEATURE_M68K)) {
2065         if (bitnum & 0xfe00) {
2066             disas_undef(env, s, insn);
2067             return;
2068         }
2069     } else {
2070         if (bitnum & 0xff00) {
2071             disas_undef(env, s, insn);
2072             return;
2073         }
2074     }
2075 
2076     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
2077 
2078     gen_flush_flags(s);
2079     if (opsize == OS_BYTE)
2080         bitnum &= 7;
2081     else
2082         bitnum &= 31;
2083     mask = 1 << bitnum;
2084 
2085    tcg_gen_andi_i32(QREG_CC_Z, src1, mask);
2086 
2087     if (op) {
2088         tmp = tcg_temp_new();
2089         switch (op) {
2090         case 1: /* bchg */
2091             tcg_gen_xori_i32(tmp, src1, mask);
2092             break;
2093         case 2: /* bclr */
2094             tcg_gen_andi_i32(tmp, src1, ~mask);
2095             break;
2096         case 3: /* bset */
2097             tcg_gen_ori_i32(tmp, src1, mask);
2098             break;
2099         default: /* btst */
2100             break;
2101         }
2102         DEST_EA(env, insn, opsize, tmp, &addr);
2103     }
2104 }
2105 
2106 static TCGv gen_get_ccr(DisasContext *s)
2107 {
2108     TCGv dest;
2109 
2110     update_cc_op(s);
2111     dest = tcg_temp_new();
2112     gen_helper_get_ccr(dest, tcg_env);
2113     return dest;
2114 }
2115 
2116 static TCGv gen_get_sr(DisasContext *s)
2117 {
2118     TCGv ccr;
2119     TCGv sr;
2120 
2121     ccr = gen_get_ccr(s);
2122     sr = tcg_temp_new();
2123     tcg_gen_andi_i32(sr, QREG_SR, 0xffe0);
2124     tcg_gen_or_i32(sr, sr, ccr);
2125     return sr;
2126 }
2127 
2128 static void gen_set_sr_im(DisasContext *s, uint16_t val, int ccr_only)
2129 {
2130     if (ccr_only) {
2131         tcg_gen_movi_i32(QREG_CC_C, val & CCF_C ? 1 : 0);
2132         tcg_gen_movi_i32(QREG_CC_V, val & CCF_V ? -1 : 0);
2133         tcg_gen_movi_i32(QREG_CC_Z, val & CCF_Z ? 0 : 1);
2134         tcg_gen_movi_i32(QREG_CC_N, val & CCF_N ? -1 : 0);
2135         tcg_gen_movi_i32(QREG_CC_X, val & CCF_X ? 1 : 0);
2136     } else {
2137         /* Must writeback before changing security state. */
2138         do_writebacks(s);
2139         gen_helper_set_sr(tcg_env, tcg_constant_i32(val));
2140     }
2141     set_cc_op(s, CC_OP_FLAGS);
2142 }
2143 
2144 static void gen_set_sr(DisasContext *s, TCGv val, int ccr_only)
2145 {
2146     if (ccr_only) {
2147         gen_helper_set_ccr(tcg_env, val);
2148     } else {
2149         /* Must writeback before changing security state. */
2150         do_writebacks(s);
2151         gen_helper_set_sr(tcg_env, val);
2152     }
2153     set_cc_op(s, CC_OP_FLAGS);
2154 }
2155 
2156 static void gen_move_to_sr(CPUM68KState *env, DisasContext *s, uint16_t insn,
2157                            bool ccr_only)
2158 {
2159     if ((insn & 0x3f) == 0x3c) {
2160         uint16_t val;
2161         val = read_im16(env, s);
2162         gen_set_sr_im(s, val, ccr_only);
2163     } else {
2164         TCGv src;
2165         SRC_EA(env, src, OS_WORD, 0, NULL);
2166         gen_set_sr(s, src, ccr_only);
2167     }
2168 }
2169 
2170 DISAS_INSN(arith_im)
2171 {
2172     int op;
2173     TCGv im;
2174     TCGv src1;
2175     TCGv dest;
2176     TCGv addr;
2177     int opsize;
2178     bool with_SR = ((insn & 0x3f) == 0x3c);
2179 
2180     op = (insn >> 9) & 7;
2181     opsize = insn_opsize(insn);
2182     switch (opsize) {
2183     case OS_BYTE:
2184         im = tcg_constant_i32((int8_t)read_im8(env, s));
2185         break;
2186     case OS_WORD:
2187         im = tcg_constant_i32((int16_t)read_im16(env, s));
2188         break;
2189     case OS_LONG:
2190         im = tcg_constant_i32(read_im32(env, s));
2191         break;
2192     default:
2193         g_assert_not_reached();
2194     }
2195 
2196     if (with_SR) {
2197         /* SR/CCR can only be used with andi/eori/ori */
2198         if (op == 2 || op == 3 || op == 6) {
2199             disas_undef(env, s, insn);
2200             return;
2201         }
2202         switch (opsize) {
2203         case OS_BYTE:
2204             src1 = gen_get_ccr(s);
2205             break;
2206         case OS_WORD:
2207             if (IS_USER(s)) {
2208                 gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
2209                 return;
2210             }
2211             src1 = gen_get_sr(s);
2212             break;
2213         default:
2214             /* OS_LONG; others already g_assert_not_reached.  */
2215             disas_undef(env, s, insn);
2216             return;
2217         }
2218     } else {
2219         SRC_EA(env, src1, opsize, 1, (op == 6) ? NULL : &addr);
2220     }
2221     dest = tcg_temp_new();
2222     switch (op) {
2223     case 0: /* ori */
2224         tcg_gen_or_i32(dest, src1, im);
2225         if (with_SR) {
2226             gen_set_sr(s, dest, opsize == OS_BYTE);
2227             gen_exit_tb(s);
2228         } else {
2229             DEST_EA(env, insn, opsize, dest, &addr);
2230             gen_logic_cc(s, dest, opsize);
2231         }
2232         break;
2233     case 1: /* andi */
2234         tcg_gen_and_i32(dest, src1, im);
2235         if (with_SR) {
2236             gen_set_sr(s, dest, opsize == OS_BYTE);
2237             gen_exit_tb(s);
2238         } else {
2239             DEST_EA(env, insn, opsize, dest, &addr);
2240             gen_logic_cc(s, dest, opsize);
2241         }
2242         break;
2243     case 2: /* subi */
2244         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, src1, im);
2245         tcg_gen_sub_i32(dest, src1, im);
2246         gen_update_cc_add(dest, im, opsize);
2247         set_cc_op(s, CC_OP_SUBB + opsize);
2248         DEST_EA(env, insn, opsize, dest, &addr);
2249         break;
2250     case 3: /* addi */
2251         tcg_gen_add_i32(dest, src1, im);
2252         gen_update_cc_add(dest, im, opsize);
2253         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, im);
2254         set_cc_op(s, CC_OP_ADDB + opsize);
2255         DEST_EA(env, insn, opsize, dest, &addr);
2256         break;
2257     case 5: /* eori */
2258         tcg_gen_xor_i32(dest, src1, im);
2259         if (with_SR) {
2260             gen_set_sr(s, dest, opsize == OS_BYTE);
2261             gen_exit_tb(s);
2262         } else {
2263             DEST_EA(env, insn, opsize, dest, &addr);
2264             gen_logic_cc(s, dest, opsize);
2265         }
2266         break;
2267     case 6: /* cmpi */
2268         gen_update_cc_cmp(s, src1, im, opsize);
2269         break;
2270     default:
2271         abort();
2272     }
2273 }
2274 
2275 DISAS_INSN(cas)
2276 {
2277     int opsize;
2278     TCGv addr;
2279     uint16_t ext;
2280     TCGv load;
2281     TCGv cmp;
2282     MemOp opc;
2283 
2284     switch ((insn >> 9) & 3) {
2285     case 1:
2286         opsize = OS_BYTE;
2287         opc = MO_SB;
2288         break;
2289     case 2:
2290         opsize = OS_WORD;
2291         opc = MO_TESW;
2292         break;
2293     case 3:
2294         opsize = OS_LONG;
2295         opc = MO_TESL;
2296         break;
2297     default:
2298         g_assert_not_reached();
2299     }
2300 
2301     ext = read_im16(env, s);
2302 
2303     /* cas Dc,Du,<EA> */
2304 
2305     addr = gen_lea(env, s, insn, opsize);
2306     if (IS_NULL_QREG(addr)) {
2307         gen_addr_fault(s);
2308         return;
2309     }
2310 
2311     cmp = gen_extend(s, DREG(ext, 0), opsize, 1);
2312 
2313     /*
2314      * if  <EA> == Dc then
2315      *     <EA> = Du
2316      *     Dc = <EA> (because <EA> == Dc)
2317      * else
2318      *     Dc = <EA>
2319      */
2320 
2321     load = tcg_temp_new();
2322     tcg_gen_atomic_cmpxchg_i32(load, addr, cmp, DREG(ext, 6),
2323                                IS_USER(s), opc);
2324     /* update flags before setting cmp to load */
2325     gen_update_cc_cmp(s, load, cmp, opsize);
2326     gen_partset_reg(opsize, DREG(ext, 0), load);
2327 
2328     switch (extract32(insn, 3, 3)) {
2329     case 3: /* Indirect postincrement.  */
2330         tcg_gen_addi_i32(AREG(insn, 0), addr, opsize_bytes(opsize));
2331         break;
2332     case 4: /* Indirect predecrememnt.  */
2333         tcg_gen_mov_i32(AREG(insn, 0), addr);
2334         break;
2335     }
2336 }
2337 
2338 DISAS_INSN(cas2w)
2339 {
2340     uint16_t ext1, ext2;
2341     TCGv addr1, addr2;
2342 
2343     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2344 
2345     ext1 = read_im16(env, s);
2346 
2347     if (ext1 & 0x8000) {
2348         /* Address Register */
2349         addr1 = AREG(ext1, 12);
2350     } else {
2351         /* Data Register */
2352         addr1 = DREG(ext1, 12);
2353     }
2354 
2355     ext2 = read_im16(env, s);
2356     if (ext2 & 0x8000) {
2357         /* Address Register */
2358         addr2 = AREG(ext2, 12);
2359     } else {
2360         /* Data Register */
2361         addr2 = DREG(ext2, 12);
2362     }
2363 
2364     /*
2365      * if (R1) == Dc1 && (R2) == Dc2 then
2366      *     (R1) = Du1
2367      *     (R2) = Du2
2368      * else
2369      *     Dc1 = (R1)
2370      *     Dc2 = (R2)
2371      */
2372 
2373     if (tb_cflags(s->base.tb) & CF_PARALLEL) {
2374         gen_helper_exit_atomic(tcg_env);
2375     } else {
2376         TCGv regs = tcg_constant_i32(REG(ext2, 6) |
2377                                      (REG(ext1, 6) << 3) |
2378                                      (REG(ext2, 0) << 6) |
2379                                      (REG(ext1, 0) << 9));
2380         gen_helper_cas2w(tcg_env, regs, addr1, addr2);
2381     }
2382 
2383     /* Note that cas2w also assigned to env->cc_op.  */
2384     s->cc_op = CC_OP_CMPW;
2385     s->cc_op_synced = 1;
2386 }
2387 
2388 DISAS_INSN(cas2l)
2389 {
2390     uint16_t ext1, ext2;
2391     TCGv addr1, addr2, regs;
2392 
2393     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2394 
2395     ext1 = read_im16(env, s);
2396 
2397     if (ext1 & 0x8000) {
2398         /* Address Register */
2399         addr1 = AREG(ext1, 12);
2400     } else {
2401         /* Data Register */
2402         addr1 = DREG(ext1, 12);
2403     }
2404 
2405     ext2 = read_im16(env, s);
2406     if (ext2 & 0x8000) {
2407         /* Address Register */
2408         addr2 = AREG(ext2, 12);
2409     } else {
2410         /* Data Register */
2411         addr2 = DREG(ext2, 12);
2412     }
2413 
2414     /*
2415      * if (R1) == Dc1 && (R2) == Dc2 then
2416      *     (R1) = Du1
2417      *     (R2) = Du2
2418      * else
2419      *     Dc1 = (R1)
2420      *     Dc2 = (R2)
2421      */
2422 
2423     regs = tcg_constant_i32(REG(ext2, 6) |
2424                             (REG(ext1, 6) << 3) |
2425                             (REG(ext2, 0) << 6) |
2426                             (REG(ext1, 0) << 9));
2427     if (tb_cflags(s->base.tb) & CF_PARALLEL) {
2428         gen_helper_cas2l_parallel(tcg_env, regs, addr1, addr2);
2429     } else {
2430         gen_helper_cas2l(tcg_env, regs, addr1, addr2);
2431     }
2432 
2433     /* Note that cas2l also assigned to env->cc_op.  */
2434     s->cc_op = CC_OP_CMPL;
2435     s->cc_op_synced = 1;
2436 }
2437 
2438 DISAS_INSN(byterev)
2439 {
2440     TCGv reg;
2441 
2442     reg = DREG(insn, 0);
2443     tcg_gen_bswap32_i32(reg, reg);
2444 }
2445 
2446 DISAS_INSN(move)
2447 {
2448     TCGv src;
2449     TCGv dest;
2450     int op;
2451     int opsize;
2452 
2453     switch (insn >> 12) {
2454     case 1: /* move.b */
2455         opsize = OS_BYTE;
2456         break;
2457     case 2: /* move.l */
2458         opsize = OS_LONG;
2459         break;
2460     case 3: /* move.w */
2461         opsize = OS_WORD;
2462         break;
2463     default:
2464         abort();
2465     }
2466     SRC_EA(env, src, opsize, 1, NULL);
2467     op = (insn >> 6) & 7;
2468     if (op == 1) {
2469         /* movea */
2470         /* The value will already have been sign extended.  */
2471         dest = AREG(insn, 9);
2472         tcg_gen_mov_i32(dest, src);
2473     } else {
2474         /* normal move */
2475         uint16_t dest_ea;
2476         dest_ea = ((insn >> 9) & 7) | (op << 3);
2477         DEST_EA(env, dest_ea, opsize, src, NULL);
2478         /* This will be correct because loads sign extend.  */
2479         gen_logic_cc(s, src, opsize);
2480     }
2481 }
2482 
2483 DISAS_INSN(negx)
2484 {
2485     TCGv z;
2486     TCGv src;
2487     TCGv addr;
2488     int opsize;
2489 
2490     opsize = insn_opsize(insn);
2491     SRC_EA(env, src, opsize, 1, &addr);
2492 
2493     gen_flush_flags(s); /* compute old Z */
2494 
2495     /*
2496      * Perform subtract with borrow.
2497      * (X, N) =  -(src + X);
2498      */
2499 
2500     z = tcg_constant_i32(0);
2501     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, z, QREG_CC_X, z);
2502     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, z, z, QREG_CC_N, QREG_CC_X);
2503     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
2504 
2505     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
2506 
2507     /*
2508      * Compute signed-overflow for negation.  The normal formula for
2509      * subtraction is (res ^ src) & (src ^ dest), but with dest==0
2510      * this simplifies to res & src.
2511      */
2512 
2513     tcg_gen_and_i32(QREG_CC_V, QREG_CC_N, src);
2514 
2515     /* Copy the rest of the results into place.  */
2516     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
2517     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
2518 
2519     set_cc_op(s, CC_OP_FLAGS);
2520 
2521     /* result is in QREG_CC_N */
2522 
2523     DEST_EA(env, insn, opsize, QREG_CC_N, &addr);
2524 }
2525 
2526 DISAS_INSN(lea)
2527 {
2528     TCGv reg;
2529     TCGv tmp;
2530 
2531     reg = AREG(insn, 9);
2532     tmp = gen_lea(env, s, insn, OS_LONG);
2533     if (IS_NULL_QREG(tmp)) {
2534         gen_addr_fault(s);
2535         return;
2536     }
2537     tcg_gen_mov_i32(reg, tmp);
2538 }
2539 
2540 DISAS_INSN(clr)
2541 {
2542     int opsize;
2543     TCGv zero;
2544 
2545     zero = tcg_constant_i32(0);
2546     opsize = insn_opsize(insn);
2547     DEST_EA(env, insn, opsize, zero, NULL);
2548     gen_logic_cc(s, zero, opsize);
2549 }
2550 
2551 DISAS_INSN(move_from_ccr)
2552 {
2553     TCGv ccr;
2554 
2555     ccr = gen_get_ccr(s);
2556     DEST_EA(env, insn, OS_WORD, ccr, NULL);
2557 }
2558 
2559 DISAS_INSN(neg)
2560 {
2561     TCGv src1;
2562     TCGv dest;
2563     TCGv addr;
2564     int opsize;
2565 
2566     opsize = insn_opsize(insn);
2567     SRC_EA(env, src1, opsize, 1, &addr);
2568     dest = tcg_temp_new();
2569     tcg_gen_neg_i32(dest, src1);
2570     set_cc_op(s, CC_OP_SUBB + opsize);
2571     gen_update_cc_add(dest, src1, opsize);
2572     tcg_gen_setcondi_i32(TCG_COND_NE, QREG_CC_X, dest, 0);
2573     DEST_EA(env, insn, opsize, dest, &addr);
2574 }
2575 
2576 DISAS_INSN(move_to_ccr)
2577 {
2578     gen_move_to_sr(env, s, insn, true);
2579 }
2580 
2581 DISAS_INSN(not)
2582 {
2583     TCGv src1;
2584     TCGv dest;
2585     TCGv addr;
2586     int opsize;
2587 
2588     opsize = insn_opsize(insn);
2589     SRC_EA(env, src1, opsize, 1, &addr);
2590     dest = tcg_temp_new();
2591     tcg_gen_not_i32(dest, src1);
2592     DEST_EA(env, insn, opsize, dest, &addr);
2593     gen_logic_cc(s, dest, opsize);
2594 }
2595 
2596 DISAS_INSN(swap)
2597 {
2598     TCGv src1;
2599     TCGv src2;
2600     TCGv reg;
2601 
2602     src1 = tcg_temp_new();
2603     src2 = tcg_temp_new();
2604     reg = DREG(insn, 0);
2605     tcg_gen_shli_i32(src1, reg, 16);
2606     tcg_gen_shri_i32(src2, reg, 16);
2607     tcg_gen_or_i32(reg, src1, src2);
2608     gen_logic_cc(s, reg, OS_LONG);
2609 }
2610 
2611 DISAS_INSN(bkpt)
2612 {
2613 #if defined(CONFIG_USER_ONLY)
2614     gen_exception(s, s->base.pc_next, EXCP_DEBUG);
2615 #else
2616     gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2617 #endif
2618 }
2619 
2620 DISAS_INSN(pea)
2621 {
2622     TCGv tmp;
2623 
2624     tmp = gen_lea(env, s, insn, OS_LONG);
2625     if (IS_NULL_QREG(tmp)) {
2626         gen_addr_fault(s);
2627         return;
2628     }
2629     gen_push(s, tmp);
2630 }
2631 
2632 DISAS_INSN(ext)
2633 {
2634     int op;
2635     TCGv reg;
2636     TCGv tmp;
2637 
2638     reg = DREG(insn, 0);
2639     op = (insn >> 6) & 7;
2640     tmp = tcg_temp_new();
2641     if (op == 3)
2642         tcg_gen_ext16s_i32(tmp, reg);
2643     else
2644         tcg_gen_ext8s_i32(tmp, reg);
2645     if (op == 2)
2646         gen_partset_reg(OS_WORD, reg, tmp);
2647     else
2648         tcg_gen_mov_i32(reg, tmp);
2649     gen_logic_cc(s, tmp, OS_LONG);
2650 }
2651 
2652 DISAS_INSN(tst)
2653 {
2654     int opsize;
2655     TCGv tmp;
2656 
2657     opsize = insn_opsize(insn);
2658     SRC_EA(env, tmp, opsize, 1, NULL);
2659     gen_logic_cc(s, tmp, opsize);
2660 }
2661 
2662 DISAS_INSN(pulse)
2663 {
2664   /* Implemented as a NOP.  */
2665 }
2666 
2667 DISAS_INSN(illegal)
2668 {
2669     gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2670 }
2671 
2672 DISAS_INSN(tas)
2673 {
2674     int mode = extract32(insn, 3, 3);
2675     int reg0 = REG(insn, 0);
2676 
2677     if (mode == 0) {
2678         /* data register direct */
2679         TCGv dest = cpu_dregs[reg0];
2680         gen_logic_cc(s, dest, OS_BYTE);
2681         tcg_gen_ori_tl(dest, dest, 0x80);
2682     } else {
2683         TCGv src1, addr;
2684 
2685         addr = gen_lea_mode(env, s, mode, reg0, OS_BYTE);
2686         if (IS_NULL_QREG(addr)) {
2687             gen_addr_fault(s);
2688             return;
2689         }
2690         src1 = tcg_temp_new();
2691         tcg_gen_atomic_fetch_or_tl(src1, addr, tcg_constant_tl(0x80),
2692                                    IS_USER(s), MO_SB);
2693         gen_logic_cc(s, src1, OS_BYTE);
2694 
2695         switch (mode) {
2696         case 3: /* Indirect postincrement.  */
2697             tcg_gen_addi_i32(AREG(insn, 0), addr, 1);
2698             break;
2699         case 4: /* Indirect predecrememnt.  */
2700             tcg_gen_mov_i32(AREG(insn, 0), addr);
2701             break;
2702         }
2703     }
2704 }
2705 
2706 DISAS_INSN(mull)
2707 {
2708     uint16_t ext;
2709     TCGv src1;
2710     int sign;
2711 
2712     ext = read_im16(env, s);
2713 
2714     sign = ext & 0x800;
2715 
2716     if (ext & 0x400) {
2717         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
2718             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2719             return;
2720         }
2721 
2722         SRC_EA(env, src1, OS_LONG, 0, NULL);
2723 
2724         if (sign) {
2725             tcg_gen_muls2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2726         } else {
2727             tcg_gen_mulu2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2728         }
2729         /* if Dl == Dh, 68040 returns low word */
2730         tcg_gen_mov_i32(DREG(ext, 0), QREG_CC_N);
2731         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_Z);
2732         tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N);
2733 
2734         tcg_gen_movi_i32(QREG_CC_V, 0);
2735         tcg_gen_movi_i32(QREG_CC_C, 0);
2736 
2737         set_cc_op(s, CC_OP_FLAGS);
2738         return;
2739     }
2740     SRC_EA(env, src1, OS_LONG, 0, NULL);
2741     if (m68k_feature(s->env, M68K_FEATURE_M68K)) {
2742         tcg_gen_movi_i32(QREG_CC_C, 0);
2743         if (sign) {
2744             tcg_gen_muls2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2745             /* QREG_CC_V is -(QREG_CC_V != (QREG_CC_N >> 31)) */
2746             tcg_gen_sari_i32(QREG_CC_Z, QREG_CC_N, 31);
2747             tcg_gen_negsetcond_i32(TCG_COND_NE, QREG_CC_V,
2748                                    QREG_CC_V, QREG_CC_Z);
2749         } else {
2750             tcg_gen_mulu2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2751             /* QREG_CC_V is -(QREG_CC_V != 0), use QREG_CC_C as 0 */
2752             tcg_gen_negsetcond_i32(TCG_COND_NE, QREG_CC_V,
2753                                    QREG_CC_V, QREG_CC_C);
2754         }
2755         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_N);
2756 
2757         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
2758 
2759         set_cc_op(s, CC_OP_FLAGS);
2760     } else {
2761         /*
2762          * The upper 32 bits of the product are discarded, so
2763          * muls.l and mulu.l are functionally equivalent.
2764          */
2765         tcg_gen_mul_i32(DREG(ext, 12), src1, DREG(ext, 12));
2766         gen_logic_cc(s, DREG(ext, 12), OS_LONG);
2767     }
2768 }
2769 
2770 static void gen_link(DisasContext *s, uint16_t insn, int32_t offset)
2771 {
2772     TCGv reg;
2773     TCGv tmp;
2774 
2775     reg = AREG(insn, 0);
2776     tmp = tcg_temp_new();
2777     tcg_gen_subi_i32(tmp, QREG_SP, 4);
2778     gen_store(s, OS_LONG, tmp, reg, IS_USER(s));
2779     if ((insn & 7) != 7) {
2780         tcg_gen_mov_i32(reg, tmp);
2781     }
2782     tcg_gen_addi_i32(QREG_SP, tmp, offset);
2783 }
2784 
2785 DISAS_INSN(link)
2786 {
2787     int16_t offset;
2788 
2789     offset = read_im16(env, s);
2790     gen_link(s, insn, offset);
2791 }
2792 
2793 DISAS_INSN(linkl)
2794 {
2795     int32_t offset;
2796 
2797     offset = read_im32(env, s);
2798     gen_link(s, insn, offset);
2799 }
2800 
2801 DISAS_INSN(unlk)
2802 {
2803     TCGv src;
2804     TCGv reg;
2805     TCGv tmp;
2806 
2807     src = tcg_temp_new();
2808     reg = AREG(insn, 0);
2809     tcg_gen_mov_i32(src, reg);
2810     tmp = gen_load(s, OS_LONG, src, 0, IS_USER(s));
2811     tcg_gen_mov_i32(reg, tmp);
2812     tcg_gen_addi_i32(QREG_SP, src, 4);
2813 }
2814 
2815 #if !defined(CONFIG_USER_ONLY)
2816 DISAS_INSN(reset)
2817 {
2818     if (IS_USER(s)) {
2819         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
2820         return;
2821     }
2822 
2823     gen_helper_reset(tcg_env);
2824 }
2825 #endif
2826 
2827 DISAS_INSN(nop)
2828 {
2829 }
2830 
2831 DISAS_INSN(rtd)
2832 {
2833     TCGv tmp;
2834     int16_t offset = read_im16(env, s);
2835 
2836     tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
2837     tcg_gen_addi_i32(QREG_SP, QREG_SP, offset + 4);
2838     gen_jmp(s, tmp);
2839 }
2840 
2841 DISAS_INSN(rtr)
2842 {
2843     TCGv tmp;
2844     TCGv ccr;
2845     TCGv sp;
2846 
2847     sp = tcg_temp_new();
2848     ccr = gen_load(s, OS_WORD, QREG_SP, 0, IS_USER(s));
2849     tcg_gen_addi_i32(sp, QREG_SP, 2);
2850     tmp = gen_load(s, OS_LONG, sp, 0, IS_USER(s));
2851     tcg_gen_addi_i32(QREG_SP, sp, 4);
2852 
2853     gen_set_sr(s, ccr, true);
2854 
2855     gen_jmp(s, tmp);
2856 }
2857 
2858 DISAS_INSN(rts)
2859 {
2860     TCGv tmp;
2861 
2862     tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
2863     tcg_gen_addi_i32(QREG_SP, QREG_SP, 4);
2864     gen_jmp(s, tmp);
2865 }
2866 
2867 DISAS_INSN(jump)
2868 {
2869     TCGv tmp;
2870 
2871     /*
2872      * Load the target address first to ensure correct exception
2873      * behavior.
2874      */
2875     tmp = gen_lea(env, s, insn, OS_LONG);
2876     if (IS_NULL_QREG(tmp)) {
2877         gen_addr_fault(s);
2878         return;
2879     }
2880     if ((insn & 0x40) == 0) {
2881         /* jsr */
2882         gen_push(s, tcg_constant_i32(s->pc));
2883     }
2884     gen_jmp(s, tmp);
2885 }
2886 
2887 DISAS_INSN(addsubq)
2888 {
2889     TCGv src;
2890     TCGv dest;
2891     TCGv val;
2892     int imm;
2893     TCGv addr;
2894     int opsize;
2895 
2896     if ((insn & 070) == 010) {
2897         /* Operation on address register is always long.  */
2898         opsize = OS_LONG;
2899     } else {
2900         opsize = insn_opsize(insn);
2901     }
2902     SRC_EA(env, src, opsize, 1, &addr);
2903     imm = (insn >> 9) & 7;
2904     if (imm == 0) {
2905         imm = 8;
2906     }
2907     val = tcg_constant_i32(imm);
2908     dest = tcg_temp_new();
2909     tcg_gen_mov_i32(dest, src);
2910     if ((insn & 0x38) == 0x08) {
2911         /*
2912          * Don't update condition codes if the destination is an
2913          * address register.
2914          */
2915         if (insn & 0x0100) {
2916             tcg_gen_sub_i32(dest, dest, val);
2917         } else {
2918             tcg_gen_add_i32(dest, dest, val);
2919         }
2920     } else {
2921         if (insn & 0x0100) {
2922             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
2923             tcg_gen_sub_i32(dest, dest, val);
2924             set_cc_op(s, CC_OP_SUBB + opsize);
2925         } else {
2926             tcg_gen_add_i32(dest, dest, val);
2927             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
2928             set_cc_op(s, CC_OP_ADDB + opsize);
2929         }
2930         gen_update_cc_add(dest, val, opsize);
2931     }
2932     DEST_EA(env, insn, opsize, dest, &addr);
2933 }
2934 
2935 DISAS_INSN(branch)
2936 {
2937     int32_t offset;
2938     uint32_t base;
2939     int op;
2940 
2941     base = s->pc;
2942     op = (insn >> 8) & 0xf;
2943     offset = (int8_t)insn;
2944     if (offset == 0) {
2945         offset = (int16_t)read_im16(env, s);
2946     } else if (offset == -1) {
2947         offset = read_im32(env, s);
2948     }
2949     if (op == 1) {
2950         /* bsr */
2951         gen_push(s, tcg_constant_i32(s->pc));
2952     }
2953     if (op > 1) {
2954         /* Bcc */
2955         TCGLabel *l1 = gen_new_label();
2956         gen_jmpcc(s, ((insn >> 8) & 0xf) ^ 1, l1);
2957         gen_jmp_tb(s, 1, base + offset, s->base.pc_next);
2958         gen_set_label(l1);
2959         gen_jmp_tb(s, 0, s->pc, s->base.pc_next);
2960     } else {
2961         /* Unconditional branch.  */
2962         update_cc_op(s);
2963         gen_jmp_tb(s, 0, base + offset, s->base.pc_next);
2964     }
2965 }
2966 
2967 DISAS_INSN(moveq)
2968 {
2969     tcg_gen_movi_i32(DREG(insn, 9), (int8_t)insn);
2970     gen_logic_cc(s, DREG(insn, 9), OS_LONG);
2971 }
2972 
2973 DISAS_INSN(mvzs)
2974 {
2975     int opsize;
2976     TCGv src;
2977     TCGv reg;
2978 
2979     if (insn & 0x40)
2980         opsize = OS_WORD;
2981     else
2982         opsize = OS_BYTE;
2983     SRC_EA(env, src, opsize, (insn & 0x80) == 0, NULL);
2984     reg = DREG(insn, 9);
2985     tcg_gen_mov_i32(reg, src);
2986     gen_logic_cc(s, src, opsize);
2987 }
2988 
2989 DISAS_INSN(or)
2990 {
2991     TCGv reg;
2992     TCGv dest;
2993     TCGv src;
2994     TCGv addr;
2995     int opsize;
2996 
2997     opsize = insn_opsize(insn);
2998     reg = gen_extend(s, DREG(insn, 9), opsize, 0);
2999     dest = tcg_temp_new();
3000     if (insn & 0x100) {
3001         SRC_EA(env, src, opsize, 0, &addr);
3002         tcg_gen_or_i32(dest, src, reg);
3003         DEST_EA(env, insn, opsize, dest, &addr);
3004     } else {
3005         SRC_EA(env, src, opsize, 0, NULL);
3006         tcg_gen_or_i32(dest, src, reg);
3007         gen_partset_reg(opsize, DREG(insn, 9), dest);
3008     }
3009     gen_logic_cc(s, dest, opsize);
3010 }
3011 
3012 DISAS_INSN(suba)
3013 {
3014     TCGv src;
3015     TCGv reg;
3016 
3017     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3018     reg = AREG(insn, 9);
3019     tcg_gen_sub_i32(reg, reg, src);
3020 }
3021 
3022 static inline void gen_subx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3023 {
3024     TCGv tmp, zero;
3025 
3026     gen_flush_flags(s); /* compute old Z */
3027 
3028     /*
3029      * Perform subtract with borrow.
3030      * (X, N) = dest - (src + X);
3031      */
3032 
3033     zero = tcg_constant_i32(0);
3034     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, zero, QREG_CC_X, zero);
3035     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, dest, zero, QREG_CC_N, QREG_CC_X);
3036     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3037     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
3038 
3039     /* Compute signed-overflow for subtract.  */
3040 
3041     tmp = tcg_temp_new();
3042     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, dest);
3043     tcg_gen_xor_i32(tmp, dest, src);
3044     tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, tmp);
3045 
3046     /* Copy the rest of the results into place.  */
3047     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3048     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3049 
3050     set_cc_op(s, CC_OP_FLAGS);
3051 
3052     /* result is in QREG_CC_N */
3053 }
3054 
3055 DISAS_INSN(subx_reg)
3056 {
3057     TCGv dest;
3058     TCGv src;
3059     int opsize;
3060 
3061     opsize = insn_opsize(insn);
3062 
3063     src = gen_extend(s, DREG(insn, 0), opsize, 1);
3064     dest = gen_extend(s, DREG(insn, 9), opsize, 1);
3065 
3066     gen_subx(s, src, dest, opsize);
3067 
3068     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3069 }
3070 
3071 DISAS_INSN(subx_mem)
3072 {
3073     TCGv src;
3074     TCGv addr_src;
3075     TCGv dest;
3076     TCGv addr_dest;
3077     int opsize;
3078 
3079     opsize = insn_opsize(insn);
3080 
3081     addr_src = AREG(insn, 0);
3082     tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3083     src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3084 
3085     addr_dest = AREG(insn, 9);
3086     tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3087     dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3088 
3089     gen_subx(s, src, dest, opsize);
3090 
3091     gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3092 }
3093 
3094 DISAS_INSN(mov3q)
3095 {
3096     TCGv src;
3097     int val;
3098 
3099     val = (insn >> 9) & 7;
3100     if (val == 0) {
3101         val = -1;
3102     }
3103     src = tcg_constant_i32(val);
3104     gen_logic_cc(s, src, OS_LONG);
3105     DEST_EA(env, insn, OS_LONG, src, NULL);
3106 }
3107 
3108 DISAS_INSN(cmp)
3109 {
3110     TCGv src;
3111     TCGv reg;
3112     int opsize;
3113 
3114     opsize = insn_opsize(insn);
3115     SRC_EA(env, src, opsize, 1, NULL);
3116     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
3117     gen_update_cc_cmp(s, reg, src, opsize);
3118 }
3119 
3120 DISAS_INSN(cmpa)
3121 {
3122     int opsize;
3123     TCGv src;
3124     TCGv reg;
3125 
3126     if (insn & 0x100) {
3127         opsize = OS_LONG;
3128     } else {
3129         opsize = OS_WORD;
3130     }
3131     SRC_EA(env, src, opsize, 1, NULL);
3132     reg = AREG(insn, 9);
3133     gen_update_cc_cmp(s, reg, src, OS_LONG);
3134 }
3135 
3136 DISAS_INSN(cmpm)
3137 {
3138     int opsize = insn_opsize(insn);
3139     TCGv src, dst;
3140 
3141     /* Post-increment load (mode 3) from Ay.  */
3142     src = gen_ea_mode(env, s, 3, REG(insn, 0), opsize,
3143                       NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3144     /* Post-increment load (mode 3) from Ax.  */
3145     dst = gen_ea_mode(env, s, 3, REG(insn, 9), opsize,
3146                       NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3147 
3148     gen_update_cc_cmp(s, dst, src, opsize);
3149 }
3150 
3151 DISAS_INSN(eor)
3152 {
3153     TCGv src;
3154     TCGv dest;
3155     TCGv addr;
3156     int opsize;
3157 
3158     opsize = insn_opsize(insn);
3159 
3160     SRC_EA(env, src, opsize, 0, &addr);
3161     dest = tcg_temp_new();
3162     tcg_gen_xor_i32(dest, src, DREG(insn, 9));
3163     gen_logic_cc(s, dest, opsize);
3164     DEST_EA(env, insn, opsize, dest, &addr);
3165 }
3166 
3167 static void do_exg(TCGv reg1, TCGv reg2)
3168 {
3169     TCGv temp = tcg_temp_new();
3170     tcg_gen_mov_i32(temp, reg1);
3171     tcg_gen_mov_i32(reg1, reg2);
3172     tcg_gen_mov_i32(reg2, temp);
3173 }
3174 
3175 DISAS_INSN(exg_dd)
3176 {
3177     /* exchange Dx and Dy */
3178     do_exg(DREG(insn, 9), DREG(insn, 0));
3179 }
3180 
3181 DISAS_INSN(exg_aa)
3182 {
3183     /* exchange Ax and Ay */
3184     do_exg(AREG(insn, 9), AREG(insn, 0));
3185 }
3186 
3187 DISAS_INSN(exg_da)
3188 {
3189     /* exchange Dx and Ay */
3190     do_exg(DREG(insn, 9), AREG(insn, 0));
3191 }
3192 
3193 DISAS_INSN(and)
3194 {
3195     TCGv src;
3196     TCGv reg;
3197     TCGv dest;
3198     TCGv addr;
3199     int opsize;
3200 
3201     dest = tcg_temp_new();
3202 
3203     opsize = insn_opsize(insn);
3204     reg = DREG(insn, 9);
3205     if (insn & 0x100) {
3206         SRC_EA(env, src, opsize, 0, &addr);
3207         tcg_gen_and_i32(dest, src, reg);
3208         DEST_EA(env, insn, opsize, dest, &addr);
3209     } else {
3210         SRC_EA(env, src, opsize, 0, NULL);
3211         tcg_gen_and_i32(dest, src, reg);
3212         gen_partset_reg(opsize, reg, dest);
3213     }
3214     gen_logic_cc(s, dest, opsize);
3215 }
3216 
3217 DISAS_INSN(adda)
3218 {
3219     TCGv src;
3220     TCGv reg;
3221 
3222     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3223     reg = AREG(insn, 9);
3224     tcg_gen_add_i32(reg, reg, src);
3225 }
3226 
3227 static inline void gen_addx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3228 {
3229     TCGv tmp, zero;
3230 
3231     gen_flush_flags(s); /* compute old Z */
3232 
3233     /*
3234      * Perform addition with carry.
3235      * (X, N) = src + dest + X;
3236      */
3237 
3238     zero = tcg_constant_i32(0);
3239     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_X, zero, dest, zero);
3240     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_N, QREG_CC_X, src, zero);
3241     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3242 
3243     /* Compute signed-overflow for addition.  */
3244 
3245     tmp = tcg_temp_new();
3246     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3247     tcg_gen_xor_i32(tmp, dest, src);
3248     tcg_gen_andc_i32(QREG_CC_V, QREG_CC_V, tmp);
3249 
3250     /* Copy the rest of the results into place.  */
3251     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3252     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3253 
3254     set_cc_op(s, CC_OP_FLAGS);
3255 
3256     /* result is in QREG_CC_N */
3257 }
3258 
3259 DISAS_INSN(addx_reg)
3260 {
3261     TCGv dest;
3262     TCGv src;
3263     int opsize;
3264 
3265     opsize = insn_opsize(insn);
3266 
3267     dest = gen_extend(s, DREG(insn, 9), opsize, 1);
3268     src = gen_extend(s, DREG(insn, 0), opsize, 1);
3269 
3270     gen_addx(s, src, dest, opsize);
3271 
3272     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3273 }
3274 
3275 DISAS_INSN(addx_mem)
3276 {
3277     TCGv src;
3278     TCGv addr_src;
3279     TCGv dest;
3280     TCGv addr_dest;
3281     int opsize;
3282 
3283     opsize = insn_opsize(insn);
3284 
3285     addr_src = AREG(insn, 0);
3286     tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3287     src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3288 
3289     addr_dest = AREG(insn, 9);
3290     tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3291     dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3292 
3293     gen_addx(s, src, dest, opsize);
3294 
3295     gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3296 }
3297 
3298 static inline void shift_im(DisasContext *s, uint16_t insn, int opsize)
3299 {
3300     int count = (insn >> 9) & 7;
3301     int logical = insn & 8;
3302     int left = insn & 0x100;
3303     int bits = opsize_bytes(opsize) * 8;
3304     TCGv reg = gen_extend(s, DREG(insn, 0), opsize, !logical);
3305 
3306     if (count == 0) {
3307         count = 8;
3308     }
3309 
3310     tcg_gen_movi_i32(QREG_CC_V, 0);
3311     if (left) {
3312         tcg_gen_shri_i32(QREG_CC_C, reg, bits - count);
3313         tcg_gen_shli_i32(QREG_CC_N, reg, count);
3314 
3315         /*
3316          * Note that ColdFire always clears V (done above),
3317          * while M68000 sets if the most significant bit is changed at
3318          * any time during the shift operation.
3319          */
3320         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68K)) {
3321             /* if shift count >= bits, V is (reg != 0) */
3322             if (count >= bits) {
3323                 tcg_gen_negsetcond_i32(TCG_COND_NE, QREG_CC_V, reg, QREG_CC_V);
3324             } else {
3325                 TCGv t0 = tcg_temp_new();
3326                 tcg_gen_sari_i32(QREG_CC_V, reg, bits - 1);
3327                 tcg_gen_sari_i32(t0, reg, bits - count - 1);
3328                 tcg_gen_negsetcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, t0);
3329             }
3330         }
3331     } else {
3332         tcg_gen_shri_i32(QREG_CC_C, reg, count - 1);
3333         if (logical) {
3334             tcg_gen_shri_i32(QREG_CC_N, reg, count);
3335         } else {
3336             tcg_gen_sari_i32(QREG_CC_N, reg, count);
3337         }
3338     }
3339 
3340     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3341     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3342     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3343     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3344 
3345     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3346     set_cc_op(s, CC_OP_FLAGS);
3347 }
3348 
3349 static inline void shift_reg(DisasContext *s, uint16_t insn, int opsize)
3350 {
3351     int logical = insn & 8;
3352     int left = insn & 0x100;
3353     int bits = opsize_bytes(opsize) * 8;
3354     TCGv reg = gen_extend(s, DREG(insn, 0), opsize, !logical);
3355     TCGv s32;
3356     TCGv_i64 t64, s64;
3357 
3358     t64 = tcg_temp_new_i64();
3359     s64 = tcg_temp_new_i64();
3360     s32 = tcg_temp_new();
3361 
3362     /*
3363      * Note that m68k truncates the shift count modulo 64, not 32.
3364      * In addition, a 64-bit shift makes it easy to find "the last
3365      * bit shifted out", for the carry flag.
3366      */
3367     tcg_gen_andi_i32(s32, DREG(insn, 9), 63);
3368     tcg_gen_extu_i32_i64(s64, s32);
3369     tcg_gen_extu_i32_i64(t64, reg);
3370 
3371     /* Optimistically set V=0.  Also used as a zero source below.  */
3372     tcg_gen_movi_i32(QREG_CC_V, 0);
3373     if (left) {
3374         tcg_gen_shl_i64(t64, t64, s64);
3375 
3376         if (opsize == OS_LONG) {
3377             tcg_gen_extr_i64_i32(QREG_CC_N, QREG_CC_C, t64);
3378             /* Note that C=0 if shift count is 0, and we get that for free.  */
3379         } else {
3380             TCGv zero = tcg_constant_i32(0);
3381             tcg_gen_extrl_i64_i32(QREG_CC_N, t64);
3382             tcg_gen_shri_i32(QREG_CC_C, QREG_CC_N, bits);
3383             tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3384                                 s32, zero, zero, QREG_CC_C);
3385         }
3386         tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3387 
3388         /* X = C, but only if the shift count was non-zero.  */
3389         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3390                             QREG_CC_C, QREG_CC_X);
3391 
3392         /*
3393          * M68000 sets V if the most significant bit is changed at
3394          * any time during the shift operation.  Do this via creating
3395          * an extension of the sign bit, comparing, and discarding
3396          * the bits below the sign bit.  I.e.
3397          *     int64_t s = (intN_t)reg;
3398          *     int64_t t = (int64_t)(intN_t)reg << count;
3399          *     V = ((s ^ t) & (-1 << (bits - 1))) != 0
3400          */
3401         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68K)) {
3402             TCGv_i64 tt = tcg_constant_i64(32);
3403             /* if shift is greater than 32, use 32 */
3404             tcg_gen_movcond_i64(TCG_COND_GT, s64, s64, tt, tt, s64);
3405             /* Sign extend the input to 64 bits; re-do the shift.  */
3406             tcg_gen_ext_i32_i64(t64, reg);
3407             tcg_gen_shl_i64(s64, t64, s64);
3408             /* Clear all bits that are unchanged.  */
3409             tcg_gen_xor_i64(t64, t64, s64);
3410             /* Ignore the bits below the sign bit.  */
3411             tcg_gen_andi_i64(t64, t64, -1ULL << (bits - 1));
3412             /* If any bits remain set, we have overflow.  */
3413             tcg_gen_negsetcond_i64(TCG_COND_NE, t64, t64, tcg_constant_i64(0));
3414             tcg_gen_extrl_i64_i32(QREG_CC_V, t64);
3415         }
3416     } else {
3417         tcg_gen_shli_i64(t64, t64, 32);
3418         if (logical) {
3419             tcg_gen_shr_i64(t64, t64, s64);
3420         } else {
3421             tcg_gen_sar_i64(t64, t64, s64);
3422         }
3423         tcg_gen_extr_i64_i32(QREG_CC_C, QREG_CC_N, t64);
3424 
3425         /* Note that C=0 if shift count is 0, and we get that for free.  */
3426         tcg_gen_shri_i32(QREG_CC_C, QREG_CC_C, 31);
3427 
3428         /* X = C, but only if the shift count was non-zero.  */
3429         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3430                             QREG_CC_C, QREG_CC_X);
3431     }
3432     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3433     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3434 
3435     /* Write back the result.  */
3436     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3437     set_cc_op(s, CC_OP_FLAGS);
3438 }
3439 
3440 DISAS_INSN(shift8_im)
3441 {
3442     shift_im(s, insn, OS_BYTE);
3443 }
3444 
3445 DISAS_INSN(shift16_im)
3446 {
3447     shift_im(s, insn, OS_WORD);
3448 }
3449 
3450 DISAS_INSN(shift_im)
3451 {
3452     shift_im(s, insn, OS_LONG);
3453 }
3454 
3455 DISAS_INSN(shift8_reg)
3456 {
3457     shift_reg(s, insn, OS_BYTE);
3458 }
3459 
3460 DISAS_INSN(shift16_reg)
3461 {
3462     shift_reg(s, insn, OS_WORD);
3463 }
3464 
3465 DISAS_INSN(shift_reg)
3466 {
3467     shift_reg(s, insn, OS_LONG);
3468 }
3469 
3470 DISAS_INSN(shift_mem)
3471 {
3472     int logical = insn & 8;
3473     int left = insn & 0x100;
3474     TCGv src;
3475     TCGv addr;
3476 
3477     SRC_EA(env, src, OS_WORD, !logical, &addr);
3478     tcg_gen_movi_i32(QREG_CC_V, 0);
3479     if (left) {
3480         tcg_gen_shri_i32(QREG_CC_C, src, 15);
3481         tcg_gen_shli_i32(QREG_CC_N, src, 1);
3482 
3483         /*
3484          * Note that ColdFire always clears V,
3485          * while M68000 sets if the most significant bit is changed at
3486          * any time during the shift operation
3487          */
3488         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68K)) {
3489             src = gen_extend(s, src, OS_WORD, 1);
3490             tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3491         }
3492     } else {
3493         tcg_gen_mov_i32(QREG_CC_C, src);
3494         if (logical) {
3495             tcg_gen_shri_i32(QREG_CC_N, src, 1);
3496         } else {
3497             tcg_gen_sari_i32(QREG_CC_N, src, 1);
3498         }
3499     }
3500 
3501     gen_ext(QREG_CC_N, QREG_CC_N, OS_WORD, 1);
3502     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3503     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3504     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3505 
3506     DEST_EA(env, insn, OS_WORD, QREG_CC_N, &addr);
3507     set_cc_op(s, CC_OP_FLAGS);
3508 }
3509 
3510 static void rotate(TCGv reg, TCGv shift, int left, int size)
3511 {
3512     switch (size) {
3513     case 8:
3514         /* Replicate the 8-bit input so that a 32-bit rotate works.  */
3515         tcg_gen_ext8u_i32(reg, reg);
3516         tcg_gen_muli_i32(reg, reg, 0x01010101);
3517         goto do_long;
3518     case 16:
3519         /* Replicate the 16-bit input so that a 32-bit rotate works.  */
3520         tcg_gen_deposit_i32(reg, reg, reg, 16, 16);
3521         goto do_long;
3522     do_long:
3523     default:
3524         if (left) {
3525             tcg_gen_rotl_i32(reg, reg, shift);
3526         } else {
3527             tcg_gen_rotr_i32(reg, reg, shift);
3528         }
3529     }
3530 
3531     /* compute flags */
3532 
3533     switch (size) {
3534     case 8:
3535         tcg_gen_ext8s_i32(reg, reg);
3536         break;
3537     case 16:
3538         tcg_gen_ext16s_i32(reg, reg);
3539         break;
3540     default:
3541         break;
3542     }
3543 
3544     /* QREG_CC_X is not affected */
3545 
3546     tcg_gen_mov_i32(QREG_CC_N, reg);
3547     tcg_gen_mov_i32(QREG_CC_Z, reg);
3548 
3549     if (left) {
3550         tcg_gen_andi_i32(QREG_CC_C, reg, 1);
3551     } else {
3552         tcg_gen_shri_i32(QREG_CC_C, reg, 31);
3553     }
3554 
3555     tcg_gen_movi_i32(QREG_CC_V, 0); /* always cleared */
3556 }
3557 
3558 static void rotate_x_flags(TCGv reg, TCGv X, int size)
3559 {
3560     switch (size) {
3561     case 8:
3562         tcg_gen_ext8s_i32(reg, reg);
3563         break;
3564     case 16:
3565         tcg_gen_ext16s_i32(reg, reg);
3566         break;
3567     default:
3568         break;
3569     }
3570     tcg_gen_mov_i32(QREG_CC_N, reg);
3571     tcg_gen_mov_i32(QREG_CC_Z, reg);
3572     tcg_gen_mov_i32(QREG_CC_X, X);
3573     tcg_gen_mov_i32(QREG_CC_C, X);
3574     tcg_gen_movi_i32(QREG_CC_V, 0);
3575 }
3576 
3577 /* Result of rotate_x() is valid if 0 <= shift <= size */
3578 static TCGv rotate_x(TCGv reg, TCGv shift, int left, int size)
3579 {
3580     TCGv X, shl, shr, shx, sz, zero;
3581 
3582     sz = tcg_constant_i32(size);
3583 
3584     shr = tcg_temp_new();
3585     shl = tcg_temp_new();
3586     shx = tcg_temp_new();
3587     if (left) {
3588         tcg_gen_mov_i32(shl, shift);      /* shl = shift */
3589         tcg_gen_movi_i32(shr, size + 1);
3590         tcg_gen_sub_i32(shr, shr, shift); /* shr = size + 1 - shift */
3591         tcg_gen_subi_i32(shx, shift, 1);  /* shx = shift - 1 */
3592         /* shx = shx < 0 ? size : shx; */
3593         zero = tcg_constant_i32(0);
3594         tcg_gen_movcond_i32(TCG_COND_LT, shx, shx, zero, sz, shx);
3595     } else {
3596         tcg_gen_mov_i32(shr, shift);      /* shr = shift */
3597         tcg_gen_movi_i32(shl, size + 1);
3598         tcg_gen_sub_i32(shl, shl, shift); /* shl = size + 1 - shift */
3599         tcg_gen_sub_i32(shx, sz, shift); /* shx = size - shift */
3600     }
3601 
3602     /* reg = (reg << shl) | (reg >> shr) | (x << shx); */
3603 
3604     tcg_gen_shl_i32(shl, reg, shl);
3605     tcg_gen_shr_i32(shr, reg, shr);
3606     tcg_gen_or_i32(reg, shl, shr);
3607     tcg_gen_shl_i32(shx, QREG_CC_X, shx);
3608     tcg_gen_or_i32(reg, reg, shx);
3609 
3610     /* X = (reg >> size) & 1 */
3611 
3612     X = tcg_temp_new();
3613     tcg_gen_extract_i32(X, reg, size, 1);
3614 
3615     return X;
3616 }
3617 
3618 /* Result of rotate32_x() is valid if 0 <= shift < 33 */
3619 static TCGv rotate32_x(TCGv reg, TCGv shift, int left)
3620 {
3621     TCGv_i64 t0, shift64;
3622     TCGv X, lo, hi, zero;
3623 
3624     shift64 = tcg_temp_new_i64();
3625     tcg_gen_extu_i32_i64(shift64, shift);
3626 
3627     t0 = tcg_temp_new_i64();
3628 
3629     X = tcg_temp_new();
3630     lo = tcg_temp_new();
3631     hi = tcg_temp_new();
3632 
3633     if (left) {
3634         /* create [reg:X:..] */
3635 
3636         tcg_gen_shli_i32(lo, QREG_CC_X, 31);
3637         tcg_gen_concat_i32_i64(t0, lo, reg);
3638 
3639         /* rotate */
3640 
3641         tcg_gen_rotl_i64(t0, t0, shift64);
3642 
3643         /* result is [reg:..:reg:X] */
3644 
3645         tcg_gen_extr_i64_i32(lo, hi, t0);
3646         tcg_gen_andi_i32(X, lo, 1);
3647 
3648         tcg_gen_shri_i32(lo, lo, 1);
3649     } else {
3650         /* create [..:X:reg] */
3651 
3652         tcg_gen_concat_i32_i64(t0, reg, QREG_CC_X);
3653 
3654         tcg_gen_rotr_i64(t0, t0, shift64);
3655 
3656         /* result is value: [X:reg:..:reg] */
3657 
3658         tcg_gen_extr_i64_i32(lo, hi, t0);
3659 
3660         /* extract X */
3661 
3662         tcg_gen_shri_i32(X, hi, 31);
3663 
3664         /* extract result */
3665 
3666         tcg_gen_shli_i32(hi, hi, 1);
3667     }
3668     tcg_gen_or_i32(lo, lo, hi);
3669 
3670     /* if shift == 0, register and X are not affected */
3671 
3672     zero = tcg_constant_i32(0);
3673     tcg_gen_movcond_i32(TCG_COND_EQ, X, shift, zero, QREG_CC_X, X);
3674     tcg_gen_movcond_i32(TCG_COND_EQ, reg, shift, zero, reg, lo);
3675 
3676     return X;
3677 }
3678 
3679 DISAS_INSN(rotate_im)
3680 {
3681     TCGv shift;
3682     int tmp;
3683     int left = (insn & 0x100);
3684 
3685     tmp = (insn >> 9) & 7;
3686     if (tmp == 0) {
3687         tmp = 8;
3688     }
3689 
3690     shift = tcg_constant_i32(tmp);
3691     if (insn & 8) {
3692         rotate(DREG(insn, 0), shift, left, 32);
3693     } else {
3694         TCGv X = rotate32_x(DREG(insn, 0), shift, left);
3695         rotate_x_flags(DREG(insn, 0), X, 32);
3696     }
3697 
3698     set_cc_op(s, CC_OP_FLAGS);
3699 }
3700 
3701 DISAS_INSN(rotate8_im)
3702 {
3703     int left = (insn & 0x100);
3704     TCGv reg;
3705     TCGv shift;
3706     int tmp;
3707 
3708     reg = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
3709 
3710     tmp = (insn >> 9) & 7;
3711     if (tmp == 0) {
3712         tmp = 8;
3713     }
3714 
3715     shift = tcg_constant_i32(tmp);
3716     if (insn & 8) {
3717         rotate(reg, shift, left, 8);
3718     } else {
3719         TCGv X = rotate_x(reg, shift, left, 8);
3720         rotate_x_flags(reg, X, 8);
3721     }
3722     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3723     set_cc_op(s, CC_OP_FLAGS);
3724 }
3725 
3726 DISAS_INSN(rotate16_im)
3727 {
3728     int left = (insn & 0x100);
3729     TCGv reg;
3730     TCGv shift;
3731     int tmp;
3732 
3733     reg = gen_extend(s, DREG(insn, 0), OS_WORD, 0);
3734     tmp = (insn >> 9) & 7;
3735     if (tmp == 0) {
3736         tmp = 8;
3737     }
3738 
3739     shift = tcg_constant_i32(tmp);
3740     if (insn & 8) {
3741         rotate(reg, shift, left, 16);
3742     } else {
3743         TCGv X = rotate_x(reg, shift, left, 16);
3744         rotate_x_flags(reg, X, 16);
3745     }
3746     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3747     set_cc_op(s, CC_OP_FLAGS);
3748 }
3749 
3750 DISAS_INSN(rotate_reg)
3751 {
3752     TCGv reg;
3753     TCGv src;
3754     TCGv t0, t1;
3755     int left = (insn & 0x100);
3756 
3757     reg = DREG(insn, 0);
3758     src = DREG(insn, 9);
3759     /* shift in [0..63] */
3760     t0 = tcg_temp_new();
3761     tcg_gen_andi_i32(t0, src, 63);
3762     t1 = tcg_temp_new_i32();
3763     if (insn & 8) {
3764         tcg_gen_andi_i32(t1, src, 31);
3765         rotate(reg, t1, left, 32);
3766         /* if shift == 0, clear C */
3767         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3768                             t0, QREG_CC_V /* 0 */,
3769                             QREG_CC_V /* 0 */, QREG_CC_C);
3770     } else {
3771         TCGv X;
3772         /* modulo 33 */
3773         tcg_gen_movi_i32(t1, 33);
3774         tcg_gen_remu_i32(t1, t0, t1);
3775         X = rotate32_x(DREG(insn, 0), t1, left);
3776         rotate_x_flags(DREG(insn, 0), X, 32);
3777     }
3778     set_cc_op(s, CC_OP_FLAGS);
3779 }
3780 
3781 DISAS_INSN(rotate8_reg)
3782 {
3783     TCGv reg;
3784     TCGv src;
3785     TCGv t0, t1;
3786     int left = (insn & 0x100);
3787 
3788     reg = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
3789     src = DREG(insn, 9);
3790     /* shift in [0..63] */
3791     t0 = tcg_temp_new_i32();
3792     tcg_gen_andi_i32(t0, src, 63);
3793     t1 = tcg_temp_new_i32();
3794     if (insn & 8) {
3795         tcg_gen_andi_i32(t1, src, 7);
3796         rotate(reg, t1, left, 8);
3797         /* if shift == 0, clear C */
3798         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3799                             t0, QREG_CC_V /* 0 */,
3800                             QREG_CC_V /* 0 */, QREG_CC_C);
3801     } else {
3802         TCGv X;
3803         /* modulo 9 */
3804         tcg_gen_movi_i32(t1, 9);
3805         tcg_gen_remu_i32(t1, t0, t1);
3806         X = rotate_x(reg, t1, left, 8);
3807         rotate_x_flags(reg, X, 8);
3808     }
3809     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3810     set_cc_op(s, CC_OP_FLAGS);
3811 }
3812 
3813 DISAS_INSN(rotate16_reg)
3814 {
3815     TCGv reg;
3816     TCGv src;
3817     TCGv t0, t1;
3818     int left = (insn & 0x100);
3819 
3820     reg = gen_extend(s, DREG(insn, 0), OS_WORD, 0);
3821     src = DREG(insn, 9);
3822     /* shift in [0..63] */
3823     t0 = tcg_temp_new_i32();
3824     tcg_gen_andi_i32(t0, src, 63);
3825     t1 = tcg_temp_new_i32();
3826     if (insn & 8) {
3827         tcg_gen_andi_i32(t1, src, 15);
3828         rotate(reg, t1, left, 16);
3829         /* if shift == 0, clear C */
3830         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3831                             t0, QREG_CC_V /* 0 */,
3832                             QREG_CC_V /* 0 */, QREG_CC_C);
3833     } else {
3834         TCGv X;
3835         /* modulo 17 */
3836         tcg_gen_movi_i32(t1, 17);
3837         tcg_gen_remu_i32(t1, t0, t1);
3838         X = rotate_x(reg, t1, left, 16);
3839         rotate_x_flags(reg, X, 16);
3840     }
3841     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3842     set_cc_op(s, CC_OP_FLAGS);
3843 }
3844 
3845 DISAS_INSN(rotate_mem)
3846 {
3847     TCGv src;
3848     TCGv addr;
3849     TCGv shift;
3850     int left = (insn & 0x100);
3851 
3852     SRC_EA(env, src, OS_WORD, 0, &addr);
3853 
3854     shift = tcg_constant_i32(1);
3855     if (insn & 0x0200) {
3856         rotate(src, shift, left, 16);
3857     } else {
3858         TCGv X = rotate_x(src, shift, left, 16);
3859         rotate_x_flags(src, X, 16);
3860     }
3861     DEST_EA(env, insn, OS_WORD, src, &addr);
3862     set_cc_op(s, CC_OP_FLAGS);
3863 }
3864 
3865 DISAS_INSN(bfext_reg)
3866 {
3867     int ext = read_im16(env, s);
3868     int is_sign = insn & 0x200;
3869     TCGv src = DREG(insn, 0);
3870     TCGv dst = DREG(ext, 12);
3871     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
3872     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
3873     int pos = 32 - ofs - len;        /* little bit-endian */
3874     TCGv tmp = tcg_temp_new();
3875     TCGv shift;
3876 
3877     /*
3878      * In general, we're going to rotate the field so that it's at the
3879      * top of the word and then right-shift by the complement of the
3880      * width to extend the field.
3881      */
3882     if (ext & 0x20) {
3883         /* Variable width.  */
3884         if (ext & 0x800) {
3885             /* Variable offset.  */
3886             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
3887             tcg_gen_rotl_i32(tmp, src, tmp);
3888         } else {
3889             tcg_gen_rotli_i32(tmp, src, ofs);
3890         }
3891 
3892         shift = tcg_temp_new();
3893         tcg_gen_neg_i32(shift, DREG(ext, 0));
3894         tcg_gen_andi_i32(shift, shift, 31);
3895         tcg_gen_sar_i32(QREG_CC_N, tmp, shift);
3896         if (is_sign) {
3897             tcg_gen_mov_i32(dst, QREG_CC_N);
3898         } else {
3899             tcg_gen_shr_i32(dst, tmp, shift);
3900         }
3901     } else {
3902         /* Immediate width.  */
3903         if (ext & 0x800) {
3904             /* Variable offset */
3905             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
3906             tcg_gen_rotl_i32(tmp, src, tmp);
3907             src = tmp;
3908             pos = 32 - len;
3909         } else {
3910             /*
3911              * Immediate offset.  If the field doesn't wrap around the
3912              * end of the word, rely on (s)extract completely.
3913              */
3914             if (pos < 0) {
3915                 tcg_gen_rotli_i32(tmp, src, ofs);
3916                 src = tmp;
3917                 pos = 32 - len;
3918             }
3919         }
3920 
3921         tcg_gen_sextract_i32(QREG_CC_N, src, pos, len);
3922         if (is_sign) {
3923             tcg_gen_mov_i32(dst, QREG_CC_N);
3924         } else {
3925             tcg_gen_extract_i32(dst, src, pos, len);
3926         }
3927     }
3928 
3929     set_cc_op(s, CC_OP_LOGIC);
3930 }
3931 
3932 DISAS_INSN(bfext_mem)
3933 {
3934     int ext = read_im16(env, s);
3935     int is_sign = insn & 0x200;
3936     TCGv dest = DREG(ext, 12);
3937     TCGv addr, len, ofs;
3938 
3939     addr = gen_lea(env, s, insn, OS_UNSIZED);
3940     if (IS_NULL_QREG(addr)) {
3941         gen_addr_fault(s);
3942         return;
3943     }
3944 
3945     if (ext & 0x20) {
3946         len = DREG(ext, 0);
3947     } else {
3948         len = tcg_constant_i32(extract32(ext, 0, 5));
3949     }
3950     if (ext & 0x800) {
3951         ofs = DREG(ext, 6);
3952     } else {
3953         ofs = tcg_constant_i32(extract32(ext, 6, 5));
3954     }
3955 
3956     if (is_sign) {
3957         gen_helper_bfexts_mem(dest, tcg_env, addr, ofs, len);
3958         tcg_gen_mov_i32(QREG_CC_N, dest);
3959     } else {
3960         TCGv_i64 tmp = tcg_temp_new_i64();
3961         gen_helper_bfextu_mem(tmp, tcg_env, addr, ofs, len);
3962         tcg_gen_extr_i64_i32(dest, QREG_CC_N, tmp);
3963     }
3964     set_cc_op(s, CC_OP_LOGIC);
3965 }
3966 
3967 DISAS_INSN(bfop_reg)
3968 {
3969     int ext = read_im16(env, s);
3970     TCGv src = DREG(insn, 0);
3971     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
3972     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
3973     TCGv mask, tofs = NULL, tlen = NULL;
3974     bool is_bfffo = (insn & 0x0f00) == 0x0d00;
3975 
3976     if ((ext & 0x820) == 0) {
3977         /* Immediate width and offset.  */
3978         uint32_t maski = 0x7fffffffu >> (len - 1);
3979         if (ofs + len <= 32) {
3980             tcg_gen_shli_i32(QREG_CC_N, src, ofs);
3981         } else {
3982             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
3983         }
3984         tcg_gen_andi_i32(QREG_CC_N, QREG_CC_N, ~maski);
3985 
3986         mask = tcg_constant_i32(ror32(maski, ofs));
3987         if (is_bfffo) {
3988             tofs = tcg_constant_i32(ofs);
3989             tlen = tcg_constant_i32(len);
3990         }
3991     } else {
3992         TCGv tmp = tcg_temp_new();
3993 
3994         mask = tcg_temp_new();
3995         if (ext & 0x20) {
3996             /* Variable width */
3997             tcg_gen_subi_i32(tmp, DREG(ext, 0), 1);
3998             tcg_gen_andi_i32(tmp, tmp, 31);
3999             tcg_gen_shr_i32(mask, tcg_constant_i32(0x7fffffffu), tmp);
4000             if (is_bfffo) {
4001                 tlen = tcg_temp_new();
4002                 tcg_gen_addi_i32(tlen, tmp, 1);
4003             }
4004         } else {
4005             /* Immediate width */
4006             tcg_gen_movi_i32(mask, 0x7fffffffu >> (len - 1));
4007             if (is_bfffo) {
4008                 tlen = tcg_constant_i32(len);
4009             }
4010         }
4011 
4012         if (ext & 0x800) {
4013             /* Variable offset */
4014             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4015             tcg_gen_rotl_i32(QREG_CC_N, src, tmp);
4016             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4017             tcg_gen_rotr_i32(mask, mask, tmp);
4018             if (is_bfffo) {
4019                 tofs = tmp;
4020             }
4021         } else {
4022             /* Immediate offset (and variable width) */
4023             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4024             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4025             tcg_gen_rotri_i32(mask, mask, ofs);
4026             if (is_bfffo) {
4027                 tofs = tcg_constant_i32(ofs);
4028             }
4029         }
4030     }
4031     set_cc_op(s, CC_OP_LOGIC);
4032 
4033     switch (insn & 0x0f00) {
4034     case 0x0a00: /* bfchg */
4035         tcg_gen_eqv_i32(src, src, mask);
4036         break;
4037     case 0x0c00: /* bfclr */
4038         tcg_gen_and_i32(src, src, mask);
4039         break;
4040     case 0x0d00: /* bfffo */
4041         gen_helper_bfffo_reg(DREG(ext, 12), QREG_CC_N, tofs, tlen);
4042         break;
4043     case 0x0e00: /* bfset */
4044         tcg_gen_orc_i32(src, src, mask);
4045         break;
4046     case 0x0800: /* bftst */
4047         /* flags already set; no other work to do.  */
4048         break;
4049     default:
4050         g_assert_not_reached();
4051     }
4052 }
4053 
4054 DISAS_INSN(bfop_mem)
4055 {
4056     int ext = read_im16(env, s);
4057     TCGv addr, len, ofs;
4058     TCGv_i64 t64;
4059 
4060     addr = gen_lea(env, s, insn, OS_UNSIZED);
4061     if (IS_NULL_QREG(addr)) {
4062         gen_addr_fault(s);
4063         return;
4064     }
4065 
4066     if (ext & 0x20) {
4067         len = DREG(ext, 0);
4068     } else {
4069         len = tcg_constant_i32(extract32(ext, 0, 5));
4070     }
4071     if (ext & 0x800) {
4072         ofs = DREG(ext, 6);
4073     } else {
4074         ofs = tcg_constant_i32(extract32(ext, 6, 5));
4075     }
4076 
4077     switch (insn & 0x0f00) {
4078     case 0x0a00: /* bfchg */
4079         gen_helper_bfchg_mem(QREG_CC_N, tcg_env, addr, ofs, len);
4080         break;
4081     case 0x0c00: /* bfclr */
4082         gen_helper_bfclr_mem(QREG_CC_N, tcg_env, addr, ofs, len);
4083         break;
4084     case 0x0d00: /* bfffo */
4085         t64 = tcg_temp_new_i64();
4086         gen_helper_bfffo_mem(t64, tcg_env, addr, ofs, len);
4087         tcg_gen_extr_i64_i32(DREG(ext, 12), QREG_CC_N, t64);
4088         break;
4089     case 0x0e00: /* bfset */
4090         gen_helper_bfset_mem(QREG_CC_N, tcg_env, addr, ofs, len);
4091         break;
4092     case 0x0800: /* bftst */
4093         gen_helper_bfexts_mem(QREG_CC_N, tcg_env, addr, ofs, len);
4094         break;
4095     default:
4096         g_assert_not_reached();
4097     }
4098     set_cc_op(s, CC_OP_LOGIC);
4099 }
4100 
4101 DISAS_INSN(bfins_reg)
4102 {
4103     int ext = read_im16(env, s);
4104     TCGv dst = DREG(insn, 0);
4105     TCGv src = DREG(ext, 12);
4106     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4107     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4108     int pos = 32 - ofs - len;        /* little bit-endian */
4109     TCGv tmp;
4110 
4111     tmp = tcg_temp_new();
4112 
4113     if (ext & 0x20) {
4114         /* Variable width */
4115         tcg_gen_neg_i32(tmp, DREG(ext, 0));
4116         tcg_gen_andi_i32(tmp, tmp, 31);
4117         tcg_gen_shl_i32(QREG_CC_N, src, tmp);
4118     } else {
4119         /* Immediate width */
4120         tcg_gen_shli_i32(QREG_CC_N, src, 32 - len);
4121     }
4122     set_cc_op(s, CC_OP_LOGIC);
4123 
4124     /* Immediate width and offset */
4125     if ((ext & 0x820) == 0) {
4126         /* Check for suitability for deposit.  */
4127         if (pos >= 0) {
4128             tcg_gen_deposit_i32(dst, dst, src, pos, len);
4129         } else {
4130             uint32_t maski = -2U << (len - 1);
4131             uint32_t roti = (ofs + len) & 31;
4132             tcg_gen_andi_i32(tmp, src, ~maski);
4133             tcg_gen_rotri_i32(tmp, tmp, roti);
4134             tcg_gen_andi_i32(dst, dst, ror32(maski, roti));
4135             tcg_gen_or_i32(dst, dst, tmp);
4136         }
4137     } else {
4138         TCGv mask = tcg_temp_new();
4139         TCGv rot = tcg_temp_new();
4140 
4141         if (ext & 0x20) {
4142             /* Variable width */
4143             tcg_gen_subi_i32(rot, DREG(ext, 0), 1);
4144             tcg_gen_andi_i32(rot, rot, 31);
4145             tcg_gen_movi_i32(mask, -2);
4146             tcg_gen_shl_i32(mask, mask, rot);
4147             tcg_gen_mov_i32(rot, DREG(ext, 0));
4148             tcg_gen_andc_i32(tmp, src, mask);
4149         } else {
4150             /* Immediate width (variable offset) */
4151             uint32_t maski = -2U << (len - 1);
4152             tcg_gen_andi_i32(tmp, src, ~maski);
4153             tcg_gen_movi_i32(mask, maski);
4154             tcg_gen_movi_i32(rot, len & 31);
4155         }
4156         if (ext & 0x800) {
4157             /* Variable offset */
4158             tcg_gen_add_i32(rot, rot, DREG(ext, 6));
4159         } else {
4160             /* Immediate offset (variable width) */
4161             tcg_gen_addi_i32(rot, rot, ofs);
4162         }
4163         tcg_gen_andi_i32(rot, rot, 31);
4164         tcg_gen_rotr_i32(mask, mask, rot);
4165         tcg_gen_rotr_i32(tmp, tmp, rot);
4166         tcg_gen_and_i32(dst, dst, mask);
4167         tcg_gen_or_i32(dst, dst, tmp);
4168     }
4169 }
4170 
4171 DISAS_INSN(bfins_mem)
4172 {
4173     int ext = read_im16(env, s);
4174     TCGv src = DREG(ext, 12);
4175     TCGv addr, len, ofs;
4176 
4177     addr = gen_lea(env, s, insn, OS_UNSIZED);
4178     if (IS_NULL_QREG(addr)) {
4179         gen_addr_fault(s);
4180         return;
4181     }
4182 
4183     if (ext & 0x20) {
4184         len = DREG(ext, 0);
4185     } else {
4186         len = tcg_constant_i32(extract32(ext, 0, 5));
4187     }
4188     if (ext & 0x800) {
4189         ofs = DREG(ext, 6);
4190     } else {
4191         ofs = tcg_constant_i32(extract32(ext, 6, 5));
4192     }
4193 
4194     gen_helper_bfins_mem(QREG_CC_N, tcg_env, addr, src, ofs, len);
4195     set_cc_op(s, CC_OP_LOGIC);
4196 }
4197 
4198 DISAS_INSN(ff1)
4199 {
4200     TCGv reg;
4201     reg = DREG(insn, 0);
4202     gen_logic_cc(s, reg, OS_LONG);
4203     gen_helper_ff1(reg, reg);
4204 }
4205 
4206 DISAS_INSN(chk)
4207 {
4208     TCGv src, reg;
4209     int opsize;
4210 
4211     switch ((insn >> 7) & 3) {
4212     case 3:
4213         opsize = OS_WORD;
4214         break;
4215     case 2:
4216         if (m68k_feature(env, M68K_FEATURE_CHK2)) {
4217             opsize = OS_LONG;
4218             break;
4219         }
4220         /* fallthru */
4221     default:
4222         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4223         return;
4224     }
4225     SRC_EA(env, src, opsize, 1, NULL);
4226     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
4227 
4228     gen_flush_flags(s);
4229     gen_helper_chk(tcg_env, reg, src);
4230 }
4231 
4232 DISAS_INSN(chk2)
4233 {
4234     uint16_t ext;
4235     TCGv addr1, addr2, bound1, bound2, reg;
4236     int opsize;
4237 
4238     switch ((insn >> 9) & 3) {
4239     case 0:
4240         opsize = OS_BYTE;
4241         break;
4242     case 1:
4243         opsize = OS_WORD;
4244         break;
4245     case 2:
4246         opsize = OS_LONG;
4247         break;
4248     default:
4249         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4250         return;
4251     }
4252 
4253     ext = read_im16(env, s);
4254     if ((ext & 0x0800) == 0) {
4255         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4256         return;
4257     }
4258 
4259     addr1 = gen_lea(env, s, insn, OS_UNSIZED);
4260     addr2 = tcg_temp_new();
4261     tcg_gen_addi_i32(addr2, addr1, opsize_bytes(opsize));
4262 
4263     bound1 = gen_load(s, opsize, addr1, 1, IS_USER(s));
4264     bound2 = gen_load(s, opsize, addr2, 1, IS_USER(s));
4265 
4266     reg = tcg_temp_new();
4267     if (ext & 0x8000) {
4268         tcg_gen_mov_i32(reg, AREG(ext, 12));
4269     } else {
4270         gen_ext(reg, DREG(ext, 12), opsize, 1);
4271     }
4272 
4273     gen_flush_flags(s);
4274     gen_helper_chk2(tcg_env, reg, bound1, bound2);
4275 }
4276 
4277 static void m68k_copy_line(TCGv dst, TCGv src, int index)
4278 {
4279     TCGv addr;
4280     TCGv_i64 t0, t1;
4281 
4282     addr = tcg_temp_new();
4283 
4284     t0 = tcg_temp_new_i64();
4285     t1 = tcg_temp_new_i64();
4286 
4287     tcg_gen_andi_i32(addr, src, ~15);
4288     tcg_gen_qemu_ld_i64(t0, addr, index, MO_TEUQ);
4289     tcg_gen_addi_i32(addr, addr, 8);
4290     tcg_gen_qemu_ld_i64(t1, addr, index, MO_TEUQ);
4291 
4292     tcg_gen_andi_i32(addr, dst, ~15);
4293     tcg_gen_qemu_st_i64(t0, addr, index, MO_TEUQ);
4294     tcg_gen_addi_i32(addr, addr, 8);
4295     tcg_gen_qemu_st_i64(t1, addr, index, MO_TEUQ);
4296 }
4297 
4298 DISAS_INSN(move16_reg)
4299 {
4300     int index = IS_USER(s);
4301     TCGv tmp;
4302     uint16_t ext;
4303 
4304     ext = read_im16(env, s);
4305     if ((ext & (1 << 15)) == 0) {
4306         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4307     }
4308 
4309     m68k_copy_line(AREG(ext, 12), AREG(insn, 0), index);
4310 
4311     /* Ax can be Ay, so save Ay before incrementing Ax */
4312     tmp = tcg_temp_new();
4313     tcg_gen_mov_i32(tmp, AREG(ext, 12));
4314     tcg_gen_addi_i32(AREG(insn, 0), AREG(insn, 0), 16);
4315     tcg_gen_addi_i32(AREG(ext, 12), tmp, 16);
4316 }
4317 
4318 DISAS_INSN(move16_mem)
4319 {
4320     int index = IS_USER(s);
4321     TCGv reg, addr;
4322 
4323     reg = AREG(insn, 0);
4324     addr = tcg_constant_i32(read_im32(env, s));
4325 
4326     if ((insn >> 3) & 1) {
4327         /* MOVE16 (xxx).L, (Ay) */
4328         m68k_copy_line(reg, addr, index);
4329     } else {
4330         /* MOVE16 (Ay), (xxx).L */
4331         m68k_copy_line(addr, reg, index);
4332     }
4333 
4334     if (((insn >> 3) & 2) == 0) {
4335         /* (Ay)+ */
4336         tcg_gen_addi_i32(reg, reg, 16);
4337     }
4338 }
4339 
4340 DISAS_INSN(strldsr)
4341 {
4342     uint16_t ext;
4343     uint32_t addr;
4344 
4345     addr = s->pc - 2;
4346     ext = read_im16(env, s);
4347     if (ext != 0x46FC) {
4348         gen_exception(s, addr, EXCP_ILLEGAL);
4349         return;
4350     }
4351     ext = read_im16(env, s);
4352     if (IS_USER(s) || (ext & SR_S) == 0) {
4353         gen_exception(s, addr, EXCP_PRIVILEGE);
4354         return;
4355     }
4356     gen_push(s, gen_get_sr(s));
4357     gen_set_sr_im(s, ext, 0);
4358     gen_exit_tb(s);
4359 }
4360 
4361 DISAS_INSN(move_from_sr)
4362 {
4363     TCGv sr;
4364 
4365     if (IS_USER(s) && m68k_feature(env, M68K_FEATURE_MOVEFROMSR_PRIV)) {
4366         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4367         return;
4368     }
4369     sr = gen_get_sr(s);
4370     DEST_EA(env, insn, OS_WORD, sr, NULL);
4371 }
4372 
4373 #if !defined(CONFIG_USER_ONLY)
4374 DISAS_INSN(moves)
4375 {
4376     int opsize;
4377     uint16_t ext;
4378     TCGv reg;
4379     TCGv addr;
4380     int extend;
4381 
4382     if (IS_USER(s)) {
4383         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4384         return;
4385     }
4386 
4387     ext = read_im16(env, s);
4388 
4389     opsize = insn_opsize(insn);
4390 
4391     if (ext & 0x8000) {
4392         /* address register */
4393         reg = AREG(ext, 12);
4394         extend = 1;
4395     } else {
4396         /* data register */
4397         reg = DREG(ext, 12);
4398         extend = 0;
4399     }
4400 
4401     addr = gen_lea(env, s, insn, opsize);
4402     if (IS_NULL_QREG(addr)) {
4403         gen_addr_fault(s);
4404         return;
4405     }
4406 
4407     if (ext & 0x0800) {
4408         /* from reg to ea */
4409         gen_store(s, opsize, addr, reg, DFC_INDEX(s));
4410     } else {
4411         /* from ea to reg */
4412         TCGv tmp = gen_load(s, opsize, addr, 0, SFC_INDEX(s));
4413         if (extend) {
4414             gen_ext(reg, tmp, opsize, 1);
4415         } else {
4416             gen_partset_reg(opsize, reg, tmp);
4417         }
4418     }
4419     switch (extract32(insn, 3, 3)) {
4420     case 3: /* Indirect postincrement.  */
4421         tcg_gen_addi_i32(AREG(insn, 0), addr,
4422                          REG(insn, 0) == 7 && opsize == OS_BYTE
4423                          ? 2
4424                          : opsize_bytes(opsize));
4425         break;
4426     case 4: /* Indirect predecrememnt.  */
4427         tcg_gen_mov_i32(AREG(insn, 0), addr);
4428         break;
4429     }
4430 }
4431 
4432 DISAS_INSN(move_to_sr)
4433 {
4434     if (IS_USER(s)) {
4435         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4436         return;
4437     }
4438     gen_move_to_sr(env, s, insn, false);
4439     gen_exit_tb(s);
4440 }
4441 
4442 DISAS_INSN(move_from_usp)
4443 {
4444     if (IS_USER(s)) {
4445         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4446         return;
4447     }
4448     tcg_gen_ld_i32(AREG(insn, 0), tcg_env,
4449                    offsetof(CPUM68KState, sp[M68K_USP]));
4450 }
4451 
4452 DISAS_INSN(move_to_usp)
4453 {
4454     if (IS_USER(s)) {
4455         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4456         return;
4457     }
4458     tcg_gen_st_i32(AREG(insn, 0), tcg_env,
4459                    offsetof(CPUM68KState, sp[M68K_USP]));
4460 }
4461 
4462 DISAS_INSN(halt)
4463 {
4464     if (IS_USER(s)) {
4465         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4466         return;
4467     }
4468 
4469     gen_exception(s, s->pc, EXCP_HALT_INSN);
4470 }
4471 
4472 DISAS_INSN(stop)
4473 {
4474     uint16_t ext;
4475 
4476     if (IS_USER(s)) {
4477         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4478         return;
4479     }
4480 
4481     ext = read_im16(env, s);
4482 
4483     gen_set_sr_im(s, ext, 0);
4484     tcg_gen_movi_i32(cpu_halted, 1);
4485     gen_exception(s, s->pc, EXCP_HLT);
4486 }
4487 
4488 DISAS_INSN(rte)
4489 {
4490     if (IS_USER(s)) {
4491         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4492         return;
4493     }
4494     gen_exception(s, s->base.pc_next, EXCP_RTE);
4495 }
4496 
4497 DISAS_INSN(cf_movec)
4498 {
4499     uint16_t ext;
4500     TCGv reg;
4501 
4502     if (IS_USER(s)) {
4503         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4504         return;
4505     }
4506 
4507     ext = read_im16(env, s);
4508 
4509     if (ext & 0x8000) {
4510         reg = AREG(ext, 12);
4511     } else {
4512         reg = DREG(ext, 12);
4513     }
4514     gen_helper_cf_movec_to(tcg_env, tcg_constant_i32(ext & 0xfff), reg);
4515     gen_exit_tb(s);
4516 }
4517 
4518 DISAS_INSN(m68k_movec)
4519 {
4520     uint16_t ext;
4521     TCGv reg, creg;
4522 
4523     if (IS_USER(s)) {
4524         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4525         return;
4526     }
4527 
4528     ext = read_im16(env, s);
4529 
4530     if (ext & 0x8000) {
4531         reg = AREG(ext, 12);
4532     } else {
4533         reg = DREG(ext, 12);
4534     }
4535     creg = tcg_constant_i32(ext & 0xfff);
4536     if (insn & 1) {
4537         gen_helper_m68k_movec_to(tcg_env, creg, reg);
4538     } else {
4539         gen_helper_m68k_movec_from(reg, tcg_env, creg);
4540     }
4541     gen_exit_tb(s);
4542 }
4543 
4544 DISAS_INSN(intouch)
4545 {
4546     if (IS_USER(s)) {
4547         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4548         return;
4549     }
4550     /* ICache fetch.  Implement as no-op.  */
4551 }
4552 
4553 DISAS_INSN(cpushl)
4554 {
4555     if (IS_USER(s)) {
4556         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4557         return;
4558     }
4559     /* Cache push/invalidate.  Implement as no-op.  */
4560 }
4561 
4562 DISAS_INSN(cpush)
4563 {
4564     if (IS_USER(s)) {
4565         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4566         return;
4567     }
4568     /* Cache push/invalidate.  Implement as no-op.  */
4569 }
4570 
4571 DISAS_INSN(cinv)
4572 {
4573     if (IS_USER(s)) {
4574         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4575         return;
4576     }
4577     /* Invalidate cache line.  Implement as no-op.  */
4578 }
4579 
4580 #if !defined(CONFIG_USER_ONLY)
4581 DISAS_INSN(pflush)
4582 {
4583     TCGv opmode;
4584 
4585     if (IS_USER(s)) {
4586         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4587         return;
4588     }
4589 
4590     opmode = tcg_constant_i32((insn >> 3) & 3);
4591     gen_helper_pflush(tcg_env, AREG(insn, 0), opmode);
4592 }
4593 
4594 DISAS_INSN(ptest)
4595 {
4596     TCGv is_read;
4597 
4598     if (IS_USER(s)) {
4599         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4600         return;
4601     }
4602     is_read = tcg_constant_i32((insn >> 5) & 1);
4603     gen_helper_ptest(tcg_env, AREG(insn, 0), is_read);
4604 }
4605 #endif
4606 
4607 DISAS_INSN(wddata)
4608 {
4609     gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4610 }
4611 
4612 DISAS_INSN(wdebug)
4613 {
4614     if (IS_USER(s)) {
4615         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4616         return;
4617     }
4618     /* TODO: Implement wdebug.  */
4619     cpu_abort(env_cpu(env), "WDEBUG not implemented");
4620 }
4621 #endif
4622 
4623 DISAS_INSN(trap)
4624 {
4625     gen_exception(s, s->pc, EXCP_TRAP0 + (insn & 0xf));
4626 }
4627 
4628 static void do_trapcc(DisasContext *s, DisasCompare *c)
4629 {
4630     if (c->tcond != TCG_COND_NEVER) {
4631         TCGLabel *over = NULL;
4632 
4633         update_cc_op(s);
4634 
4635         if (c->tcond != TCG_COND_ALWAYS) {
4636             /* Jump over if !c. */
4637             over = gen_new_label();
4638             tcg_gen_brcond_i32(tcg_invert_cond(c->tcond), c->v1, c->v2, over);
4639         }
4640 
4641         tcg_gen_movi_i32(QREG_PC, s->pc);
4642         gen_raise_exception_format2(s, EXCP_TRAPCC, s->base.pc_next);
4643 
4644         if (over != NULL) {
4645             gen_set_label(over);
4646             s->base.is_jmp = DISAS_NEXT;
4647         }
4648     }
4649 }
4650 
4651 DISAS_INSN(trapcc)
4652 {
4653     DisasCompare c;
4654 
4655     /* Consume and discard the immediate operand. */
4656     switch (extract32(insn, 0, 3)) {
4657     case 2: /* trapcc.w */
4658         (void)read_im16(env, s);
4659         break;
4660     case 3: /* trapcc.l */
4661         (void)read_im32(env, s);
4662         break;
4663     case 4: /* trapcc (no operand) */
4664         break;
4665     default:
4666         /* trapcc registered with only valid opmodes */
4667         g_assert_not_reached();
4668     }
4669 
4670     gen_cc_cond(&c, s, extract32(insn, 8, 4));
4671     do_trapcc(s, &c);
4672 }
4673 
4674 DISAS_INSN(trapv)
4675 {
4676     DisasCompare c;
4677 
4678     gen_cc_cond(&c, s, 9); /* V set */
4679     do_trapcc(s, &c);
4680 }
4681 
4682 static void gen_load_fcr(DisasContext *s, TCGv res, int reg)
4683 {
4684     switch (reg) {
4685     case M68K_FPIAR:
4686         tcg_gen_movi_i32(res, 0);
4687         break;
4688     case M68K_FPSR:
4689         tcg_gen_ld_i32(res, tcg_env, offsetof(CPUM68KState, fpsr));
4690         break;
4691     case M68K_FPCR:
4692         tcg_gen_ld_i32(res, tcg_env, offsetof(CPUM68KState, fpcr));
4693         break;
4694     }
4695 }
4696 
4697 static void gen_store_fcr(DisasContext *s, TCGv val, int reg)
4698 {
4699     switch (reg) {
4700     case M68K_FPIAR:
4701         break;
4702     case M68K_FPSR:
4703         tcg_gen_st_i32(val, tcg_env, offsetof(CPUM68KState, fpsr));
4704         break;
4705     case M68K_FPCR:
4706         gen_helper_set_fpcr(tcg_env, val);
4707         break;
4708     }
4709 }
4710 
4711 static void gen_qemu_store_fcr(DisasContext *s, TCGv addr, int reg)
4712 {
4713     int index = IS_USER(s);
4714     TCGv tmp;
4715 
4716     tmp = tcg_temp_new();
4717     gen_load_fcr(s, tmp, reg);
4718     tcg_gen_qemu_st_tl(tmp, addr, index, MO_TEUL);
4719 }
4720 
4721 static void gen_qemu_load_fcr(DisasContext *s, TCGv addr, int reg)
4722 {
4723     int index = IS_USER(s);
4724     TCGv tmp;
4725 
4726     tmp = tcg_temp_new();
4727     tcg_gen_qemu_ld_tl(tmp, addr, index, MO_TEUL);
4728     gen_store_fcr(s, tmp, reg);
4729 }
4730 
4731 
4732 static void gen_op_fmove_fcr(CPUM68KState *env, DisasContext *s,
4733                              uint32_t insn, uint32_t ext)
4734 {
4735     int mask = (ext >> 10) & 7;
4736     int is_write = (ext >> 13) & 1;
4737     int mode = extract32(insn, 3, 3);
4738     int i;
4739     TCGv addr, tmp;
4740 
4741     switch (mode) {
4742     case 0: /* Dn */
4743         if (mask != M68K_FPIAR && mask != M68K_FPSR && mask != M68K_FPCR) {
4744             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4745             return;
4746         }
4747         if (is_write) {
4748             gen_load_fcr(s, DREG(insn, 0), mask);
4749         } else {
4750             gen_store_fcr(s, DREG(insn, 0), mask);
4751         }
4752         return;
4753     case 1: /* An, only with FPIAR */
4754         if (mask != M68K_FPIAR) {
4755             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4756             return;
4757         }
4758         if (is_write) {
4759             gen_load_fcr(s, AREG(insn, 0), mask);
4760         } else {
4761             gen_store_fcr(s, AREG(insn, 0), mask);
4762         }
4763         return;
4764     case 7: /* Immediate */
4765         if (REG(insn, 0) == 4) {
4766             if (is_write ||
4767                 (mask != M68K_FPIAR && mask != M68K_FPSR &&
4768                  mask != M68K_FPCR)) {
4769                 gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4770                 return;
4771             }
4772             tmp = tcg_constant_i32(read_im32(env, s));
4773             gen_store_fcr(s, tmp, mask);
4774             return;
4775         }
4776         break;
4777     default:
4778         break;
4779     }
4780 
4781     tmp = gen_lea(env, s, insn, OS_LONG);
4782     if (IS_NULL_QREG(tmp)) {
4783         gen_addr_fault(s);
4784         return;
4785     }
4786 
4787     addr = tcg_temp_new();
4788     tcg_gen_mov_i32(addr, tmp);
4789 
4790     /*
4791      * mask:
4792      *
4793      * 0b100 Floating-Point Control Register
4794      * 0b010 Floating-Point Status Register
4795      * 0b001 Floating-Point Instruction Address Register
4796      *
4797      */
4798 
4799     if (is_write && mode == 4) {
4800         for (i = 2; i >= 0; i--, mask >>= 1) {
4801             if (mask & 1) {
4802                 gen_qemu_store_fcr(s, addr, 1 << i);
4803                 if (mask != 1) {
4804                     tcg_gen_subi_i32(addr, addr, opsize_bytes(OS_LONG));
4805                 }
4806             }
4807        }
4808        tcg_gen_mov_i32(AREG(insn, 0), addr);
4809     } else {
4810         for (i = 0; i < 3; i++, mask >>= 1) {
4811             if (mask & 1) {
4812                 if (is_write) {
4813                     gen_qemu_store_fcr(s, addr, 1 << i);
4814                 } else {
4815                     gen_qemu_load_fcr(s, addr, 1 << i);
4816                 }
4817                 if (mask != 1 || mode == 3) {
4818                     tcg_gen_addi_i32(addr, addr, opsize_bytes(OS_LONG));
4819                 }
4820             }
4821         }
4822         if (mode == 3) {
4823             tcg_gen_mov_i32(AREG(insn, 0), addr);
4824         }
4825     }
4826 }
4827 
4828 static void gen_op_fmovem(CPUM68KState *env, DisasContext *s,
4829                           uint32_t insn, uint32_t ext)
4830 {
4831     int opsize;
4832     TCGv addr, tmp;
4833     int mode = (ext >> 11) & 0x3;
4834     int is_load = ((ext & 0x2000) == 0);
4835 
4836     if (m68k_feature(s->env, M68K_FEATURE_FPU)) {
4837         opsize = OS_EXTENDED;
4838     } else {
4839         opsize = OS_DOUBLE;  /* FIXME */
4840     }
4841 
4842     addr = gen_lea(env, s, insn, opsize);
4843     if (IS_NULL_QREG(addr)) {
4844         gen_addr_fault(s);
4845         return;
4846     }
4847 
4848     tmp = tcg_temp_new();
4849     if (mode & 0x1) {
4850         /* Dynamic register list */
4851         tcg_gen_ext8u_i32(tmp, DREG(ext, 4));
4852     } else {
4853         /* Static register list */
4854         tcg_gen_movi_i32(tmp, ext & 0xff);
4855     }
4856 
4857     if (!is_load && (mode & 2) == 0) {
4858         /*
4859          * predecrement addressing mode
4860          * only available to store register to memory
4861          */
4862         if (opsize == OS_EXTENDED) {
4863             gen_helper_fmovemx_st_predec(tmp, tcg_env, addr, tmp);
4864         } else {
4865             gen_helper_fmovemd_st_predec(tmp, tcg_env, addr, tmp);
4866         }
4867     } else {
4868         /* postincrement addressing mode */
4869         if (opsize == OS_EXTENDED) {
4870             if (is_load) {
4871                 gen_helper_fmovemx_ld_postinc(tmp, tcg_env, addr, tmp);
4872             } else {
4873                 gen_helper_fmovemx_st_postinc(tmp, tcg_env, addr, tmp);
4874             }
4875         } else {
4876             if (is_load) {
4877                 gen_helper_fmovemd_ld_postinc(tmp, tcg_env, addr, tmp);
4878             } else {
4879                 gen_helper_fmovemd_st_postinc(tmp, tcg_env, addr, tmp);
4880             }
4881         }
4882     }
4883     if ((insn & 070) == 030 || (insn & 070) == 040) {
4884         tcg_gen_mov_i32(AREG(insn, 0), tmp);
4885     }
4886 }
4887 
4888 /*
4889  * ??? FP exceptions are not implemented.  Most exceptions are deferred until
4890  * immediately before the next FP instruction is executed.
4891  */
4892 DISAS_INSN(fpu)
4893 {
4894     uint16_t ext;
4895     int opmode;
4896     int opsize;
4897     TCGv_ptr cpu_src, cpu_dest;
4898 
4899     ext = read_im16(env, s);
4900     opmode = ext & 0x7f;
4901     switch ((ext >> 13) & 7) {
4902     case 0:
4903         break;
4904     case 1:
4905         goto undef;
4906     case 2:
4907         if (insn == 0xf200 && (ext & 0xfc00) == 0x5c00) {
4908             /* fmovecr */
4909             TCGv rom_offset = tcg_constant_i32(opmode);
4910             cpu_dest = gen_fp_ptr(REG(ext, 7));
4911             gen_helper_fconst(tcg_env, cpu_dest, rom_offset);
4912             return;
4913         }
4914         break;
4915     case 3: /* fmove out */
4916         cpu_src = gen_fp_ptr(REG(ext, 7));
4917         opsize = ext_opsize(ext, 10);
4918         if (gen_ea_fp(env, s, insn, opsize, cpu_src,
4919                       EA_STORE, IS_USER(s)) == -1) {
4920             gen_addr_fault(s);
4921         }
4922         gen_helper_ftst(tcg_env, cpu_src);
4923         return;
4924     case 4: /* fmove to control register.  */
4925     case 5: /* fmove from control register.  */
4926         gen_op_fmove_fcr(env, s, insn, ext);
4927         return;
4928     case 6: /* fmovem */
4929     case 7:
4930         if ((ext & 0x1000) == 0 && !m68k_feature(s->env, M68K_FEATURE_FPU)) {
4931             goto undef;
4932         }
4933         gen_op_fmovem(env, s, insn, ext);
4934         return;
4935     }
4936     if (ext & (1 << 14)) {
4937         /* Source effective address.  */
4938         opsize = ext_opsize(ext, 10);
4939         cpu_src = gen_fp_result_ptr();
4940         if (gen_ea_fp(env, s, insn, opsize, cpu_src,
4941                       EA_LOADS, IS_USER(s)) == -1) {
4942             gen_addr_fault(s);
4943             return;
4944         }
4945     } else {
4946         /* Source register.  */
4947         opsize = OS_EXTENDED;
4948         cpu_src = gen_fp_ptr(REG(ext, 10));
4949     }
4950     cpu_dest = gen_fp_ptr(REG(ext, 7));
4951     switch (opmode) {
4952     case 0: /* fmove */
4953         gen_fp_move(cpu_dest, cpu_src);
4954         break;
4955     case 0x40: /* fsmove */
4956         gen_helper_fsround(tcg_env, cpu_dest, cpu_src);
4957         break;
4958     case 0x44: /* fdmove */
4959         gen_helper_fdround(tcg_env, cpu_dest, cpu_src);
4960         break;
4961     case 1: /* fint */
4962         gen_helper_firound(tcg_env, cpu_dest, cpu_src);
4963         break;
4964     case 2: /* fsinh */
4965         gen_helper_fsinh(tcg_env, cpu_dest, cpu_src);
4966         break;
4967     case 3: /* fintrz */
4968         gen_helper_fitrunc(tcg_env, cpu_dest, cpu_src);
4969         break;
4970     case 4: /* fsqrt */
4971         gen_helper_fsqrt(tcg_env, cpu_dest, cpu_src);
4972         break;
4973     case 0x41: /* fssqrt */
4974         gen_helper_fssqrt(tcg_env, cpu_dest, cpu_src);
4975         break;
4976     case 0x45: /* fdsqrt */
4977         gen_helper_fdsqrt(tcg_env, cpu_dest, cpu_src);
4978         break;
4979     case 0x06: /* flognp1 */
4980         gen_helper_flognp1(tcg_env, cpu_dest, cpu_src);
4981         break;
4982     case 0x08: /* fetoxm1 */
4983         gen_helper_fetoxm1(tcg_env, cpu_dest, cpu_src);
4984         break;
4985     case 0x09: /* ftanh */
4986         gen_helper_ftanh(tcg_env, cpu_dest, cpu_src);
4987         break;
4988     case 0x0a: /* fatan */
4989         gen_helper_fatan(tcg_env, cpu_dest, cpu_src);
4990         break;
4991     case 0x0c: /* fasin */
4992         gen_helper_fasin(tcg_env, cpu_dest, cpu_src);
4993         break;
4994     case 0x0d: /* fatanh */
4995         gen_helper_fatanh(tcg_env, cpu_dest, cpu_src);
4996         break;
4997     case 0x0e: /* fsin */
4998         gen_helper_fsin(tcg_env, cpu_dest, cpu_src);
4999         break;
5000     case 0x0f: /* ftan */
5001         gen_helper_ftan(tcg_env, cpu_dest, cpu_src);
5002         break;
5003     case 0x10: /* fetox */
5004         gen_helper_fetox(tcg_env, cpu_dest, cpu_src);
5005         break;
5006     case 0x11: /* ftwotox */
5007         gen_helper_ftwotox(tcg_env, cpu_dest, cpu_src);
5008         break;
5009     case 0x12: /* ftentox */
5010         gen_helper_ftentox(tcg_env, cpu_dest, cpu_src);
5011         break;
5012     case 0x14: /* flogn */
5013         gen_helper_flogn(tcg_env, cpu_dest, cpu_src);
5014         break;
5015     case 0x15: /* flog10 */
5016         gen_helper_flog10(tcg_env, cpu_dest, cpu_src);
5017         break;
5018     case 0x16: /* flog2 */
5019         gen_helper_flog2(tcg_env, cpu_dest, cpu_src);
5020         break;
5021     case 0x18: /* fabs */
5022         gen_helper_fabs(tcg_env, cpu_dest, cpu_src);
5023         break;
5024     case 0x58: /* fsabs */
5025         gen_helper_fsabs(tcg_env, cpu_dest, cpu_src);
5026         break;
5027     case 0x5c: /* fdabs */
5028         gen_helper_fdabs(tcg_env, cpu_dest, cpu_src);
5029         break;
5030     case 0x19: /* fcosh */
5031         gen_helper_fcosh(tcg_env, cpu_dest, cpu_src);
5032         break;
5033     case 0x1a: /* fneg */
5034         gen_helper_fneg(tcg_env, cpu_dest, cpu_src);
5035         break;
5036     case 0x5a: /* fsneg */
5037         gen_helper_fsneg(tcg_env, cpu_dest, cpu_src);
5038         break;
5039     case 0x5e: /* fdneg */
5040         gen_helper_fdneg(tcg_env, cpu_dest, cpu_src);
5041         break;
5042     case 0x1c: /* facos */
5043         gen_helper_facos(tcg_env, cpu_dest, cpu_src);
5044         break;
5045     case 0x1d: /* fcos */
5046         gen_helper_fcos(tcg_env, cpu_dest, cpu_src);
5047         break;
5048     case 0x1e: /* fgetexp */
5049         gen_helper_fgetexp(tcg_env, cpu_dest, cpu_src);
5050         break;
5051     case 0x1f: /* fgetman */
5052         gen_helper_fgetman(tcg_env, cpu_dest, cpu_src);
5053         break;
5054     case 0x20: /* fdiv */
5055         gen_helper_fdiv(tcg_env, cpu_dest, cpu_src, cpu_dest);
5056         break;
5057     case 0x60: /* fsdiv */
5058         gen_helper_fsdiv(tcg_env, cpu_dest, cpu_src, cpu_dest);
5059         break;
5060     case 0x64: /* fddiv */
5061         gen_helper_fddiv(tcg_env, cpu_dest, cpu_src, cpu_dest);
5062         break;
5063     case 0x21: /* fmod */
5064         gen_helper_fmod(tcg_env, cpu_dest, cpu_src, cpu_dest);
5065         break;
5066     case 0x22: /* fadd */
5067         gen_helper_fadd(tcg_env, cpu_dest, cpu_src, cpu_dest);
5068         break;
5069     case 0x62: /* fsadd */
5070         gen_helper_fsadd(tcg_env, cpu_dest, cpu_src, cpu_dest);
5071         break;
5072     case 0x66: /* fdadd */
5073         gen_helper_fdadd(tcg_env, cpu_dest, cpu_src, cpu_dest);
5074         break;
5075     case 0x23: /* fmul */
5076         gen_helper_fmul(tcg_env, cpu_dest, cpu_src, cpu_dest);
5077         break;
5078     case 0x63: /* fsmul */
5079         gen_helper_fsmul(tcg_env, cpu_dest, cpu_src, cpu_dest);
5080         break;
5081     case 0x67: /* fdmul */
5082         gen_helper_fdmul(tcg_env, cpu_dest, cpu_src, cpu_dest);
5083         break;
5084     case 0x24: /* fsgldiv */
5085         gen_helper_fsgldiv(tcg_env, cpu_dest, cpu_src, cpu_dest);
5086         break;
5087     case 0x25: /* frem */
5088         gen_helper_frem(tcg_env, cpu_dest, cpu_src, cpu_dest);
5089         break;
5090     case 0x26: /* fscale */
5091         gen_helper_fscale(tcg_env, cpu_dest, cpu_src, cpu_dest);
5092         break;
5093     case 0x27: /* fsglmul */
5094         gen_helper_fsglmul(tcg_env, cpu_dest, cpu_src, cpu_dest);
5095         break;
5096     case 0x28: /* fsub */
5097         gen_helper_fsub(tcg_env, cpu_dest, cpu_src, cpu_dest);
5098         break;
5099     case 0x68: /* fssub */
5100         gen_helper_fssub(tcg_env, cpu_dest, cpu_src, cpu_dest);
5101         break;
5102     case 0x6c: /* fdsub */
5103         gen_helper_fdsub(tcg_env, cpu_dest, cpu_src, cpu_dest);
5104         break;
5105     case 0x30: case 0x31: case 0x32:
5106     case 0x33: case 0x34: case 0x35:
5107     case 0x36: case 0x37: {
5108             TCGv_ptr cpu_dest2 = gen_fp_ptr(REG(ext, 0));
5109             gen_helper_fsincos(tcg_env, cpu_dest, cpu_dest2, cpu_src);
5110         }
5111         break;
5112     case 0x38: /* fcmp */
5113         gen_helper_fcmp(tcg_env, cpu_src, cpu_dest);
5114         return;
5115     case 0x3a: /* ftst */
5116         gen_helper_ftst(tcg_env, cpu_src);
5117         return;
5118     default:
5119         goto undef;
5120     }
5121     gen_helper_ftst(tcg_env, cpu_dest);
5122     return;
5123 undef:
5124     /* FIXME: Is this right for offset addressing modes?  */
5125     s->pc -= 2;
5126     disas_undef_fpu(env, s, insn);
5127 }
5128 
5129 static void gen_fcc_cond(DisasCompare *c, DisasContext *s, int cond)
5130 {
5131     TCGv fpsr;
5132 
5133     c->v2 = tcg_constant_i32(0);
5134     /* TODO: Raise BSUN exception.  */
5135     fpsr = tcg_temp_new();
5136     gen_load_fcr(s, fpsr, M68K_FPSR);
5137     switch (cond) {
5138     case 0:  /* False */
5139     case 16: /* Signaling False */
5140         c->v1 = c->v2;
5141         c->tcond = TCG_COND_NEVER;
5142         break;
5143     case 1:  /* EQual Z */
5144     case 17: /* Signaling EQual Z */
5145         c->v1 = tcg_temp_new();
5146         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5147         c->tcond = TCG_COND_NE;
5148         break;
5149     case 2:  /* Ordered Greater Than !(A || Z || N) */
5150     case 18: /* Greater Than !(A || Z || N) */
5151         c->v1 = tcg_temp_new();
5152         tcg_gen_andi_i32(c->v1, fpsr,
5153                          FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5154         c->tcond = TCG_COND_EQ;
5155         break;
5156     case 3:  /* Ordered Greater than or Equal Z || !(A || N) */
5157     case 19: /* Greater than or Equal Z || !(A || N) */
5158         c->v1 = tcg_temp_new();
5159         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5160         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5161         tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_Z | FPSR_CC_N);
5162         tcg_gen_or_i32(c->v1, c->v1, fpsr);
5163         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5164         c->tcond = TCG_COND_NE;
5165         break;
5166     case 4:  /* Ordered Less Than !(!N || A || Z); */
5167     case 20: /* Less Than !(!N || A || Z); */
5168         c->v1 = tcg_temp_new();
5169         tcg_gen_xori_i32(c->v1, fpsr, FPSR_CC_N);
5170         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_N | FPSR_CC_A | FPSR_CC_Z);
5171         c->tcond = TCG_COND_EQ;
5172         break;
5173     case 5:  /* Ordered Less than or Equal Z || (N && !A) */
5174     case 21: /* Less than or Equal Z || (N && !A) */
5175         c->v1 = tcg_temp_new();
5176         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5177         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5178         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5179         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_Z | FPSR_CC_N);
5180         c->tcond = TCG_COND_NE;
5181         break;
5182     case 6:  /* Ordered Greater or Less than !(A || Z) */
5183     case 22: /* Greater or Less than !(A || Z) */
5184         c->v1 = tcg_temp_new();
5185         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5186         c->tcond = TCG_COND_EQ;
5187         break;
5188     case 7:  /* Ordered !A */
5189     case 23: /* Greater, Less or Equal !A */
5190         c->v1 = tcg_temp_new();
5191         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5192         c->tcond = TCG_COND_EQ;
5193         break;
5194     case 8:  /* Unordered A */
5195     case 24: /* Not Greater, Less or Equal A */
5196         c->v1 = tcg_temp_new();
5197         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5198         c->tcond = TCG_COND_NE;
5199         break;
5200     case 9:  /* Unordered or Equal A || Z */
5201     case 25: /* Not Greater or Less then A || Z */
5202         c->v1 = tcg_temp_new();
5203         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5204         c->tcond = TCG_COND_NE;
5205         break;
5206     case 10: /* Unordered or Greater Than A || !(N || Z)) */
5207     case 26: /* Not Less or Equal A || !(N || Z)) */
5208         c->v1 = tcg_temp_new();
5209         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5210         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5211         tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_A | FPSR_CC_N);
5212         tcg_gen_or_i32(c->v1, c->v1, fpsr);
5213         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5214         c->tcond = TCG_COND_NE;
5215         break;
5216     case 11: /* Unordered or Greater or Equal A || Z || !N */
5217     case 27: /* Not Less Than A || Z || !N */
5218         c->v1 = tcg_temp_new();
5219         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5220         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5221         c->tcond = TCG_COND_NE;
5222         break;
5223     case 12: /* Unordered or Less Than A || (N && !Z) */
5224     case 28: /* Not Greater than or Equal A || (N && !Z) */
5225         c->v1 = tcg_temp_new();
5226         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5227         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5228         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5229         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_A | FPSR_CC_N);
5230         c->tcond = TCG_COND_NE;
5231         break;
5232     case 13: /* Unordered or Less or Equal A || Z || N */
5233     case 29: /* Not Greater Than A || Z || N */
5234         c->v1 = tcg_temp_new();
5235         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5236         c->tcond = TCG_COND_NE;
5237         break;
5238     case 14: /* Not Equal !Z */
5239     case 30: /* Signaling Not Equal !Z */
5240         c->v1 = tcg_temp_new();
5241         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5242         c->tcond = TCG_COND_EQ;
5243         break;
5244     case 15: /* True */
5245     case 31: /* Signaling True */
5246         c->v1 = c->v2;
5247         c->tcond = TCG_COND_ALWAYS;
5248         break;
5249     }
5250 }
5251 
5252 static void gen_fjmpcc(DisasContext *s, int cond, TCGLabel *l1)
5253 {
5254     DisasCompare c;
5255 
5256     gen_fcc_cond(&c, s, cond);
5257     update_cc_op(s);
5258     tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
5259 }
5260 
5261 DISAS_INSN(fbcc)
5262 {
5263     uint32_t offset;
5264     uint32_t base;
5265     TCGLabel *l1;
5266 
5267     base = s->pc;
5268     offset = (int16_t)read_im16(env, s);
5269     if (insn & (1 << 6)) {
5270         offset = (offset << 16) | read_im16(env, s);
5271     }
5272 
5273     l1 = gen_new_label();
5274     update_cc_op(s);
5275     gen_fjmpcc(s, insn & 0x3f, l1);
5276     gen_jmp_tb(s, 0, s->pc, s->base.pc_next);
5277     gen_set_label(l1);
5278     gen_jmp_tb(s, 1, base + offset, s->base.pc_next);
5279 }
5280 
5281 DISAS_INSN(fscc)
5282 {
5283     DisasCompare c;
5284     int cond;
5285     TCGv tmp;
5286     uint16_t ext;
5287 
5288     ext = read_im16(env, s);
5289     cond = ext & 0x3f;
5290     gen_fcc_cond(&c, s, cond);
5291 
5292     tmp = tcg_temp_new();
5293     tcg_gen_negsetcond_i32(c.tcond, tmp, c.v1, c.v2);
5294 
5295     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
5296 }
5297 
5298 DISAS_INSN(ftrapcc)
5299 {
5300     DisasCompare c;
5301     uint16_t ext;
5302     int cond;
5303 
5304     ext = read_im16(env, s);
5305     cond = ext & 0x3f;
5306 
5307     /* Consume and discard the immediate operand. */
5308     switch (extract32(insn, 0, 3)) {
5309     case 2: /* ftrapcc.w */
5310         (void)read_im16(env, s);
5311         break;
5312     case 3: /* ftrapcc.l */
5313         (void)read_im32(env, s);
5314         break;
5315     case 4: /* ftrapcc (no operand) */
5316         break;
5317     default:
5318         /* ftrapcc registered with only valid opmodes */
5319         g_assert_not_reached();
5320     }
5321 
5322     gen_fcc_cond(&c, s, cond);
5323     do_trapcc(s, &c);
5324 }
5325 
5326 #if !defined(CONFIG_USER_ONLY)
5327 DISAS_INSN(frestore)
5328 {
5329     TCGv addr;
5330 
5331     if (IS_USER(s)) {
5332         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
5333         return;
5334     }
5335     if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5336         SRC_EA(env, addr, OS_LONG, 0, NULL);
5337         /* FIXME: check the state frame */
5338     } else {
5339         disas_undef(env, s, insn);
5340     }
5341 }
5342 
5343 DISAS_INSN(fsave)
5344 {
5345     if (IS_USER(s)) {
5346         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
5347         return;
5348     }
5349 
5350     if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5351         /* always write IDLE */
5352         TCGv idle = tcg_constant_i32(0x41000000);
5353         DEST_EA(env, insn, OS_LONG, idle, NULL);
5354     } else {
5355         disas_undef(env, s, insn);
5356     }
5357 }
5358 #endif
5359 
5360 static inline TCGv gen_mac_extract_word(DisasContext *s, TCGv val, int upper)
5361 {
5362     TCGv tmp = tcg_temp_new();
5363     if (s->env->macsr & MACSR_FI) {
5364         if (upper)
5365             tcg_gen_andi_i32(tmp, val, 0xffff0000);
5366         else
5367             tcg_gen_shli_i32(tmp, val, 16);
5368     } else if (s->env->macsr & MACSR_SU) {
5369         if (upper)
5370             tcg_gen_sari_i32(tmp, val, 16);
5371         else
5372             tcg_gen_ext16s_i32(tmp, val);
5373     } else {
5374         if (upper)
5375             tcg_gen_shri_i32(tmp, val, 16);
5376         else
5377             tcg_gen_ext16u_i32(tmp, val);
5378     }
5379     return tmp;
5380 }
5381 
5382 static void gen_mac_clear_flags(void)
5383 {
5384     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR,
5385                      ~(MACSR_V | MACSR_Z | MACSR_N | MACSR_EV));
5386 }
5387 
5388 DISAS_INSN(mac)
5389 {
5390     TCGv rx;
5391     TCGv ry;
5392     uint16_t ext;
5393     int acc;
5394     TCGv tmp;
5395     TCGv addr;
5396     TCGv loadval;
5397     int dual;
5398     TCGv saved_flags;
5399 
5400     if (!s->done_mac) {
5401         s->mactmp = tcg_temp_new_i64();
5402         s->done_mac = 1;
5403     }
5404 
5405     ext = read_im16(env, s);
5406 
5407     acc = ((insn >> 7) & 1) | ((ext >> 3) & 2);
5408     dual = ((insn & 0x30) != 0 && (ext & 3) != 0);
5409     if (dual && !m68k_feature(s->env, M68K_FEATURE_CF_EMAC_B)) {
5410         disas_undef(env, s, insn);
5411         return;
5412     }
5413     if (insn & 0x30) {
5414         /* MAC with load.  */
5415         tmp = gen_lea(env, s, insn, OS_LONG);
5416         addr = tcg_temp_new();
5417         tcg_gen_and_i32(addr, tmp, QREG_MAC_MASK);
5418         /*
5419          * Load the value now to ensure correct exception behavior.
5420          * Perform writeback after reading the MAC inputs.
5421          */
5422         loadval = gen_load(s, OS_LONG, addr, 0, IS_USER(s));
5423 
5424         acc ^= 1;
5425         rx = (ext & 0x8000) ? AREG(ext, 12) : DREG(insn, 12);
5426         ry = (ext & 8) ? AREG(ext, 0) : DREG(ext, 0);
5427     } else {
5428         loadval = addr = NULL_QREG;
5429         rx = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5430         ry = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5431     }
5432 
5433     gen_mac_clear_flags();
5434 #if 0
5435     l1 = -1;
5436     /* Disabled because conditional branches clobber temporary vars.  */
5437     if ((s->env->macsr & MACSR_OMC) != 0 && !dual) {
5438         /* Skip the multiply if we know we will ignore it.  */
5439         l1 = gen_new_label();
5440         tmp = tcg_temp_new();
5441         tcg_gen_andi_i32(tmp, QREG_MACSR, 1 << (acc + 8));
5442         gen_op_jmp_nz32(tmp, l1);
5443     }
5444 #endif
5445 
5446     if ((ext & 0x0800) == 0) {
5447         /* Word.  */
5448         rx = gen_mac_extract_word(s, rx, (ext & 0x80) != 0);
5449         ry = gen_mac_extract_word(s, ry, (ext & 0x40) != 0);
5450     }
5451     if (s->env->macsr & MACSR_FI) {
5452         gen_helper_macmulf(s->mactmp, tcg_env, rx, ry);
5453     } else {
5454         if (s->env->macsr & MACSR_SU)
5455             gen_helper_macmuls(s->mactmp, tcg_env, rx, ry);
5456         else
5457             gen_helper_macmulu(s->mactmp, tcg_env, rx, ry);
5458         switch ((ext >> 9) & 3) {
5459         case 1:
5460             tcg_gen_shli_i64(s->mactmp, s->mactmp, 1);
5461             break;
5462         case 3:
5463             tcg_gen_shri_i64(s->mactmp, s->mactmp, 1);
5464             break;
5465         }
5466     }
5467 
5468     if (dual) {
5469         /* Save the overflow flag from the multiply.  */
5470         saved_flags = tcg_temp_new();
5471         tcg_gen_mov_i32(saved_flags, QREG_MACSR);
5472     } else {
5473         saved_flags = NULL_QREG;
5474     }
5475 
5476 #if 0
5477     /* Disabled because conditional branches clobber temporary vars.  */
5478     if ((s->env->macsr & MACSR_OMC) != 0 && dual) {
5479         /* Skip the accumulate if the value is already saturated.  */
5480         l1 = gen_new_label();
5481         tmp = tcg_temp_new();
5482         gen_op_and32(tmp, QREG_MACSR, tcg_constant_i32(MACSR_PAV0 << acc));
5483         gen_op_jmp_nz32(tmp, l1);
5484     }
5485 #endif
5486 
5487     if (insn & 0x100)
5488         tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5489     else
5490         tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5491 
5492     if (s->env->macsr & MACSR_FI)
5493         gen_helper_macsatf(tcg_env, tcg_constant_i32(acc));
5494     else if (s->env->macsr & MACSR_SU)
5495         gen_helper_macsats(tcg_env, tcg_constant_i32(acc));
5496     else
5497         gen_helper_macsatu(tcg_env, tcg_constant_i32(acc));
5498 
5499 #if 0
5500     /* Disabled because conditional branches clobber temporary vars.  */
5501     if (l1 != -1)
5502         gen_set_label(l1);
5503 #endif
5504 
5505     if (dual) {
5506         /* Dual accumulate variant.  */
5507         acc = (ext >> 2) & 3;
5508         /* Restore the overflow flag from the multiplier.  */
5509         tcg_gen_mov_i32(QREG_MACSR, saved_flags);
5510 #if 0
5511         /* Disabled because conditional branches clobber temporary vars.  */
5512         if ((s->env->macsr & MACSR_OMC) != 0) {
5513             /* Skip the accumulate if the value is already saturated.  */
5514             l1 = gen_new_label();
5515             tmp = tcg_temp_new();
5516             gen_op_and32(tmp, QREG_MACSR, tcg_constant_i32(MACSR_PAV0 << acc));
5517             gen_op_jmp_nz32(tmp, l1);
5518         }
5519 #endif
5520         if (ext & 2)
5521             tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5522         else
5523             tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5524         if (s->env->macsr & MACSR_FI)
5525             gen_helper_macsatf(tcg_env, tcg_constant_i32(acc));
5526         else if (s->env->macsr & MACSR_SU)
5527             gen_helper_macsats(tcg_env, tcg_constant_i32(acc));
5528         else
5529             gen_helper_macsatu(tcg_env, tcg_constant_i32(acc));
5530 #if 0
5531         /* Disabled because conditional branches clobber temporary vars.  */
5532         if (l1 != -1)
5533             gen_set_label(l1);
5534 #endif
5535     }
5536     gen_helper_mac_set_flags(tcg_env, tcg_constant_i32(acc));
5537 
5538     if (insn & 0x30) {
5539         TCGv rw;
5540         rw = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5541         tcg_gen_mov_i32(rw, loadval);
5542         /*
5543          * FIXME: Should address writeback happen with the masked or
5544          * unmasked value?
5545          */
5546         switch ((insn >> 3) & 7) {
5547         case 3: /* Post-increment.  */
5548             tcg_gen_addi_i32(AREG(insn, 0), addr, 4);
5549             break;
5550         case 4: /* Pre-decrement.  */
5551             tcg_gen_mov_i32(AREG(insn, 0), addr);
5552         }
5553     }
5554 }
5555 
5556 DISAS_INSN(from_mac)
5557 {
5558     TCGv rx;
5559     TCGv_i64 acc;
5560     int accnum;
5561 
5562     rx = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5563     accnum = (insn >> 9) & 3;
5564     acc = MACREG(accnum);
5565     if (s->env->macsr & MACSR_FI) {
5566         gen_helper_get_macf(rx, tcg_env, acc);
5567     } else if ((s->env->macsr & MACSR_OMC) == 0) {
5568         tcg_gen_extrl_i64_i32(rx, acc);
5569     } else if (s->env->macsr & MACSR_SU) {
5570         gen_helper_get_macs(rx, acc);
5571     } else {
5572         gen_helper_get_macu(rx, acc);
5573     }
5574     if (insn & 0x40) {
5575         tcg_gen_movi_i64(acc, 0);
5576         tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5577     }
5578 }
5579 
5580 DISAS_INSN(move_mac)
5581 {
5582     /* FIXME: This can be done without a helper.  */
5583     int src;
5584     TCGv dest;
5585     src = insn & 3;
5586     dest = tcg_constant_i32((insn >> 9) & 3);
5587     gen_helper_mac_move(tcg_env, dest, tcg_constant_i32(src));
5588     gen_mac_clear_flags();
5589     gen_helper_mac_set_flags(tcg_env, dest);
5590 }
5591 
5592 DISAS_INSN(from_macsr)
5593 {
5594     TCGv reg;
5595 
5596     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5597     tcg_gen_mov_i32(reg, QREG_MACSR);
5598 }
5599 
5600 DISAS_INSN(from_mask)
5601 {
5602     TCGv reg;
5603     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5604     tcg_gen_mov_i32(reg, QREG_MAC_MASK);
5605 }
5606 
5607 DISAS_INSN(from_mext)
5608 {
5609     TCGv reg;
5610     TCGv acc;
5611     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5612     acc = tcg_constant_i32((insn & 0x400) ? 2 : 0);
5613     if (s->env->macsr & MACSR_FI)
5614         gen_helper_get_mac_extf(reg, tcg_env, acc);
5615     else
5616         gen_helper_get_mac_exti(reg, tcg_env, acc);
5617 }
5618 
5619 DISAS_INSN(macsr_to_ccr)
5620 {
5621     TCGv tmp = tcg_temp_new();
5622 
5623     /* Note that X and C are always cleared. */
5624     tcg_gen_andi_i32(tmp, QREG_MACSR, CCF_N | CCF_Z | CCF_V);
5625     gen_helper_set_ccr(tcg_env, tmp);
5626     set_cc_op(s, CC_OP_FLAGS);
5627 }
5628 
5629 DISAS_INSN(to_mac)
5630 {
5631     TCGv_i64 acc;
5632     TCGv val;
5633     int accnum;
5634     accnum = (insn >> 9) & 3;
5635     acc = MACREG(accnum);
5636     SRC_EA(env, val, OS_LONG, 0, NULL);
5637     if (s->env->macsr & MACSR_FI) {
5638         tcg_gen_ext_i32_i64(acc, val);
5639         tcg_gen_shli_i64(acc, acc, 8);
5640     } else if (s->env->macsr & MACSR_SU) {
5641         tcg_gen_ext_i32_i64(acc, val);
5642     } else {
5643         tcg_gen_extu_i32_i64(acc, val);
5644     }
5645     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5646     gen_mac_clear_flags();
5647     gen_helper_mac_set_flags(tcg_env, tcg_constant_i32(accnum));
5648 }
5649 
5650 DISAS_INSN(to_macsr)
5651 {
5652     TCGv val;
5653     SRC_EA(env, val, OS_LONG, 0, NULL);
5654     gen_helper_set_macsr(tcg_env, val);
5655     gen_exit_tb(s);
5656 }
5657 
5658 DISAS_INSN(to_mask)
5659 {
5660     TCGv val;
5661     SRC_EA(env, val, OS_LONG, 0, NULL);
5662     tcg_gen_ori_i32(QREG_MAC_MASK, val, 0xffff0000);
5663 }
5664 
5665 DISAS_INSN(to_mext)
5666 {
5667     TCGv val;
5668     TCGv acc;
5669     SRC_EA(env, val, OS_LONG, 0, NULL);
5670     acc = tcg_constant_i32((insn & 0x400) ? 2 : 0);
5671     if (s->env->macsr & MACSR_FI)
5672         gen_helper_set_mac_extf(tcg_env, val, acc);
5673     else if (s->env->macsr & MACSR_SU)
5674         gen_helper_set_mac_exts(tcg_env, val, acc);
5675     else
5676         gen_helper_set_mac_extu(tcg_env, val, acc);
5677 }
5678 
5679 static disas_proc opcode_table[65536];
5680 
5681 static void
5682 register_opcode (disas_proc proc, uint16_t opcode, uint16_t mask)
5683 {
5684   int i;
5685   int from;
5686   int to;
5687 
5688   /* Sanity check.  All set bits must be included in the mask.  */
5689   if (opcode & ~mask) {
5690       fprintf(stderr,
5691               "qemu internal error: bogus opcode definition %04x/%04x\n",
5692               opcode, mask);
5693       abort();
5694   }
5695   /*
5696    * This could probably be cleverer.  For now just optimize the case where
5697    * the top bits are known.
5698    */
5699   /* Find the first zero bit in the mask.  */
5700   i = 0x8000;
5701   while ((i & mask) != 0)
5702       i >>= 1;
5703   /* Iterate over all combinations of this and lower bits.  */
5704   if (i == 0)
5705       i = 1;
5706   else
5707       i <<= 1;
5708   from = opcode & ~(i - 1);
5709   to = from + i;
5710   for (i = from; i < to; i++) {
5711       if ((i & mask) == opcode)
5712           opcode_table[i] = proc;
5713   }
5714 }
5715 
5716 /*
5717  * Register m68k opcode handlers.  Order is important.
5718  * Later insn override earlier ones.
5719  */
5720 void register_m68k_insns (CPUM68KState *env)
5721 {
5722     /*
5723      * Build the opcode table only once to avoid
5724      * multithreading issues.
5725      */
5726     if (opcode_table[0] != NULL) {
5727         return;
5728     }
5729 
5730     /*
5731      * use BASE() for instruction available
5732      * for CF_ISA_A and M68000.
5733      */
5734 #define BASE(name, opcode, mask) \
5735     register_opcode(disas_##name, 0x##opcode, 0x##mask)
5736 #define INSN(name, opcode, mask, feature) do { \
5737     if (m68k_feature(env, M68K_FEATURE_##feature)) \
5738         BASE(name, opcode, mask); \
5739     } while(0)
5740     BASE(undef,     0000, 0000);
5741     INSN(arith_im,  0080, fff8, CF_ISA_A);
5742     INSN(arith_im,  0000, ff00, M68K);
5743     INSN(chk2,      00c0, f9c0, CHK2);
5744     INSN(bitrev,    00c0, fff8, CF_ISA_APLUSC);
5745     BASE(bitop_reg, 0100, f1c0);
5746     BASE(bitop_reg, 0140, f1c0);
5747     BASE(bitop_reg, 0180, f1c0);
5748     BASE(bitop_reg, 01c0, f1c0);
5749     INSN(movep,     0108, f138, MOVEP);
5750     INSN(arith_im,  0280, fff8, CF_ISA_A);
5751     INSN(arith_im,  0200, ff00, M68K);
5752     INSN(undef,     02c0, ffc0, M68K);
5753     INSN(byterev,   02c0, fff8, CF_ISA_APLUSC);
5754     INSN(arith_im,  0480, fff8, CF_ISA_A);
5755     INSN(arith_im,  0400, ff00, M68K);
5756     INSN(undef,     04c0, ffc0, M68K);
5757     INSN(arith_im,  0600, ff00, M68K);
5758     INSN(undef,     06c0, ffc0, M68K);
5759     INSN(ff1,       04c0, fff8, CF_ISA_APLUSC);
5760     INSN(arith_im,  0680, fff8, CF_ISA_A);
5761     INSN(arith_im,  0c00, ff38, CF_ISA_A);
5762     INSN(arith_im,  0c00, ff00, M68K);
5763     BASE(bitop_im,  0800, ffc0);
5764     BASE(bitop_im,  0840, ffc0);
5765     BASE(bitop_im,  0880, ffc0);
5766     BASE(bitop_im,  08c0, ffc0);
5767     INSN(arith_im,  0a80, fff8, CF_ISA_A);
5768     INSN(arith_im,  0a00, ff00, M68K);
5769 #if !defined(CONFIG_USER_ONLY)
5770     INSN(moves,     0e00, ff00, M68K);
5771 #endif
5772     INSN(cas,       0ac0, ffc0, CAS);
5773     INSN(cas,       0cc0, ffc0, CAS);
5774     INSN(cas,       0ec0, ffc0, CAS);
5775     INSN(cas2w,     0cfc, ffff, CAS);
5776     INSN(cas2l,     0efc, ffff, CAS);
5777     BASE(move,      1000, f000);
5778     BASE(move,      2000, f000);
5779     BASE(move,      3000, f000);
5780     INSN(chk,       4000, f040, M68K);
5781     INSN(strldsr,   40e7, ffff, CF_ISA_APLUSC);
5782     INSN(negx,      4080, fff8, CF_ISA_A);
5783     INSN(negx,      4000, ff00, M68K);
5784     INSN(undef,     40c0, ffc0, M68K);
5785     INSN(move_from_sr, 40c0, fff8, CF_ISA_A);
5786     INSN(move_from_sr, 40c0, ffc0, M68K);
5787     BASE(lea,       41c0, f1c0);
5788     BASE(clr,       4200, ff00);
5789     BASE(undef,     42c0, ffc0);
5790     INSN(move_from_ccr, 42c0, fff8, CF_ISA_A);
5791     INSN(move_from_ccr, 42c0, ffc0, M68K);
5792     INSN(neg,       4480, fff8, CF_ISA_A);
5793     INSN(neg,       4400, ff00, M68K);
5794     INSN(undef,     44c0, ffc0, M68K);
5795     BASE(move_to_ccr, 44c0, ffc0);
5796     INSN(not,       4680, fff8, CF_ISA_A);
5797     INSN(not,       4600, ff00, M68K);
5798 #if !defined(CONFIG_USER_ONLY)
5799     BASE(move_to_sr, 46c0, ffc0);
5800 #endif
5801     INSN(nbcd,      4800, ffc0, M68K);
5802     INSN(linkl,     4808, fff8, M68K);
5803     BASE(pea,       4840, ffc0);
5804     BASE(swap,      4840, fff8);
5805     INSN(bkpt,      4848, fff8, BKPT);
5806     INSN(movem,     48d0, fbf8, CF_ISA_A);
5807     INSN(movem,     48e8, fbf8, CF_ISA_A);
5808     INSN(movem,     4880, fb80, M68K);
5809     BASE(ext,       4880, fff8);
5810     BASE(ext,       48c0, fff8);
5811     BASE(ext,       49c0, fff8);
5812     BASE(tst,       4a00, ff00);
5813     INSN(tas,       4ac0, ffc0, CF_ISA_B);
5814     INSN(tas,       4ac0, ffc0, M68K);
5815 #if !defined(CONFIG_USER_ONLY)
5816     INSN(halt,      4ac8, ffff, CF_ISA_A);
5817     INSN(halt,      4ac8, ffff, M68K);
5818 #endif
5819     INSN(pulse,     4acc, ffff, CF_ISA_A);
5820     BASE(illegal,   4afc, ffff);
5821     INSN(mull,      4c00, ffc0, CF_ISA_A);
5822     INSN(mull,      4c00, ffc0, LONG_MULDIV);
5823     INSN(divl,      4c40, ffc0, CF_ISA_A);
5824     INSN(divl,      4c40, ffc0, LONG_MULDIV);
5825     INSN(sats,      4c80, fff8, CF_ISA_B);
5826     BASE(trap,      4e40, fff0);
5827     BASE(link,      4e50, fff8);
5828     BASE(unlk,      4e58, fff8);
5829 #if !defined(CONFIG_USER_ONLY)
5830     INSN(move_to_usp, 4e60, fff8, USP);
5831     INSN(move_from_usp, 4e68, fff8, USP);
5832     INSN(reset,     4e70, ffff, M68K);
5833     BASE(stop,      4e72, ffff);
5834     BASE(rte,       4e73, ffff);
5835     INSN(cf_movec,  4e7b, ffff, CF_ISA_A);
5836     INSN(m68k_movec, 4e7a, fffe, MOVEC);
5837 #endif
5838     BASE(nop,       4e71, ffff);
5839     INSN(rtd,       4e74, ffff, RTD);
5840     BASE(rts,       4e75, ffff);
5841     INSN(trapv,     4e76, ffff, M68K);
5842     INSN(rtr,       4e77, ffff, M68K);
5843     BASE(jump,      4e80, ffc0);
5844     BASE(jump,      4ec0, ffc0);
5845     INSN(addsubq,   5000, f080, M68K);
5846     BASE(addsubq,   5080, f0c0);
5847     INSN(scc,       50c0, f0f8, CF_ISA_A); /* Scc.B Dx   */
5848     INSN(scc,       50c0, f0c0, M68K);     /* Scc.B <EA> */
5849     INSN(dbcc,      50c8, f0f8, M68K);
5850     INSN(trapcc,    50fa, f0fe, TRAPCC);   /* opmode 010, 011 */
5851     INSN(trapcc,    50fc, f0ff, TRAPCC);   /* opmode 100 */
5852     INSN(trapcc,    51fa, fffe, CF_ISA_A); /* TPF (trapf) opmode 010, 011 */
5853     INSN(trapcc,    51fc, ffff, CF_ISA_A); /* TPF (trapf) opmode 100 */
5854 
5855     /* Branch instructions.  */
5856     BASE(branch,    6000, f000);
5857     /* Disable long branch instructions, then add back the ones we want.  */
5858     BASE(undef,     60ff, f0ff); /* All long branches.  */
5859     INSN(branch,    60ff, f0ff, CF_ISA_B);
5860     INSN(undef,     60ff, ffff, CF_ISA_B); /* bra.l */
5861     INSN(branch,    60ff, ffff, BRAL);
5862     INSN(branch,    60ff, f0ff, BCCL);
5863 
5864     BASE(moveq,     7000, f100);
5865     INSN(mvzs,      7100, f100, CF_ISA_B);
5866     BASE(or,        8000, f000);
5867     BASE(divw,      80c0, f0c0);
5868     INSN(sbcd_reg,  8100, f1f8, M68K);
5869     INSN(sbcd_mem,  8108, f1f8, M68K);
5870     BASE(addsub,    9000, f000);
5871     INSN(undef,     90c0, f0c0, CF_ISA_A);
5872     INSN(subx_reg,  9180, f1f8, CF_ISA_A);
5873     INSN(subx_reg,  9100, f138, M68K);
5874     INSN(subx_mem,  9108, f138, M68K);
5875     INSN(suba,      91c0, f1c0, CF_ISA_A);
5876     INSN(suba,      90c0, f0c0, M68K);
5877 
5878     BASE(undef_mac, a000, f000);
5879     INSN(mac,       a000, f100, CF_EMAC);
5880     INSN(from_mac,  a180, f9b0, CF_EMAC);
5881     INSN(move_mac,  a110, f9fc, CF_EMAC);
5882     INSN(from_macsr,a980, f9f0, CF_EMAC);
5883     INSN(from_mask, ad80, fff0, CF_EMAC);
5884     INSN(from_mext, ab80, fbf0, CF_EMAC);
5885     INSN(macsr_to_ccr, a9c0, ffff, CF_EMAC);
5886     INSN(to_mac,    a100, f9c0, CF_EMAC);
5887     INSN(to_macsr,  a900, ffc0, CF_EMAC);
5888     INSN(to_mext,   ab00, fbc0, CF_EMAC);
5889     INSN(to_mask,   ad00, ffc0, CF_EMAC);
5890 
5891     INSN(mov3q,     a140, f1c0, CF_ISA_B);
5892     INSN(cmp,       b000, f1c0, CF_ISA_B); /* cmp.b */
5893     INSN(cmp,       b040, f1c0, CF_ISA_B); /* cmp.w */
5894     INSN(cmpa,      b0c0, f1c0, CF_ISA_B); /* cmpa.w */
5895     INSN(cmp,       b080, f1c0, CF_ISA_A);
5896     INSN(cmpa,      b1c0, f1c0, CF_ISA_A);
5897     INSN(cmp,       b000, f100, M68K);
5898     INSN(eor,       b100, f100, M68K);
5899     INSN(cmpm,      b108, f138, M68K);
5900     INSN(cmpa,      b0c0, f0c0, M68K);
5901     INSN(eor,       b180, f1c0, CF_ISA_A);
5902     BASE(and,       c000, f000);
5903     INSN(exg_dd,    c140, f1f8, M68K);
5904     INSN(exg_aa,    c148, f1f8, M68K);
5905     INSN(exg_da,    c188, f1f8, M68K);
5906     BASE(mulw,      c0c0, f0c0);
5907     INSN(abcd_reg,  c100, f1f8, M68K);
5908     INSN(abcd_mem,  c108, f1f8, M68K);
5909     BASE(addsub,    d000, f000);
5910     INSN(undef,     d0c0, f0c0, CF_ISA_A);
5911     INSN(addx_reg,      d180, f1f8, CF_ISA_A);
5912     INSN(addx_reg,  d100, f138, M68K);
5913     INSN(addx_mem,  d108, f138, M68K);
5914     INSN(adda,      d1c0, f1c0, CF_ISA_A);
5915     INSN(adda,      d0c0, f0c0, M68K);
5916     INSN(shift_im,  e080, f0f0, CF_ISA_A);
5917     INSN(shift_reg, e0a0, f0f0, CF_ISA_A);
5918     INSN(shift8_im, e000, f0f0, M68K);
5919     INSN(shift16_im, e040, f0f0, M68K);
5920     INSN(shift_im,  e080, f0f0, M68K);
5921     INSN(shift8_reg, e020, f0f0, M68K);
5922     INSN(shift16_reg, e060, f0f0, M68K);
5923     INSN(shift_reg, e0a0, f0f0, M68K);
5924     INSN(shift_mem, e0c0, fcc0, M68K);
5925     INSN(rotate_im, e090, f0f0, M68K);
5926     INSN(rotate8_im, e010, f0f0, M68K);
5927     INSN(rotate16_im, e050, f0f0, M68K);
5928     INSN(rotate_reg, e0b0, f0f0, M68K);
5929     INSN(rotate8_reg, e030, f0f0, M68K);
5930     INSN(rotate16_reg, e070, f0f0, M68K);
5931     INSN(rotate_mem, e4c0, fcc0, M68K);
5932     INSN(bfext_mem, e9c0, fdc0, BITFIELD);  /* bfextu & bfexts */
5933     INSN(bfext_reg, e9c0, fdf8, BITFIELD);
5934     INSN(bfins_mem, efc0, ffc0, BITFIELD);
5935     INSN(bfins_reg, efc0, fff8, BITFIELD);
5936     INSN(bfop_mem, eac0, ffc0, BITFIELD);   /* bfchg */
5937     INSN(bfop_reg, eac0, fff8, BITFIELD);   /* bfchg */
5938     INSN(bfop_mem, ecc0, ffc0, BITFIELD);   /* bfclr */
5939     INSN(bfop_reg, ecc0, fff8, BITFIELD);   /* bfclr */
5940     INSN(bfop_mem, edc0, ffc0, BITFIELD);   /* bfffo */
5941     INSN(bfop_reg, edc0, fff8, BITFIELD);   /* bfffo */
5942     INSN(bfop_mem, eec0, ffc0, BITFIELD);   /* bfset */
5943     INSN(bfop_reg, eec0, fff8, BITFIELD);   /* bfset */
5944     INSN(bfop_mem, e8c0, ffc0, BITFIELD);   /* bftst */
5945     INSN(bfop_reg, e8c0, fff8, BITFIELD);   /* bftst */
5946     BASE(undef_fpu, f000, f000);
5947     INSN(fpu,       f200, ffc0, CF_FPU);
5948     INSN(fbcc,      f280, ffc0, CF_FPU);
5949     INSN(fpu,       f200, ffc0, FPU);
5950     INSN(fscc,      f240, ffc0, FPU);
5951     INSN(ftrapcc,   f27a, fffe, FPU);       /* opmode 010, 011 */
5952     INSN(ftrapcc,   f27c, ffff, FPU);       /* opmode 100 */
5953     INSN(fbcc,      f280, ff80, FPU);
5954 #if !defined(CONFIG_USER_ONLY)
5955     INSN(frestore,  f340, ffc0, CF_FPU);
5956     INSN(fsave,     f300, ffc0, CF_FPU);
5957     INSN(frestore,  f340, ffc0, FPU);
5958     INSN(fsave,     f300, ffc0, FPU);
5959     INSN(intouch,   f340, ffc0, CF_ISA_A);
5960     INSN(cpushl,    f428, ff38, CF_ISA_A);
5961     INSN(cpush,     f420, ff20, M68040);
5962     INSN(cinv,      f400, ff20, M68040);
5963     INSN(pflush,    f500, ffe0, M68040);
5964     INSN(ptest,     f548, ffd8, M68040);
5965     INSN(wddata,    fb00, ff00, CF_ISA_A);
5966     INSN(wdebug,    fbc0, ffc0, CF_ISA_A);
5967 #endif
5968     INSN(move16_mem, f600, ffe0, M68040);
5969     INSN(move16_reg, f620, fff8, M68040);
5970 #undef INSN
5971 }
5972 
5973 static void m68k_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cpu)
5974 {
5975     DisasContext *dc = container_of(dcbase, DisasContext, base);
5976     CPUM68KState *env = cpu_env(cpu);
5977 
5978     dc->env = env;
5979     dc->pc = dc->base.pc_first;
5980     /* This value will always be filled in properly before m68k_tr_tb_stop. */
5981     dc->pc_prev = 0xdeadbeef;
5982     dc->cc_op = CC_OP_DYNAMIC;
5983     dc->cc_op_synced = 1;
5984     dc->done_mac = 0;
5985     dc->writeback_mask = 0;
5986 
5987     dc->ss_active = (M68K_SR_TRACE(env->sr) == M68K_SR_TRACE_ANY_INS);
5988     /* If architectural single step active, limit to 1 */
5989     if (dc->ss_active) {
5990         dc->base.max_insns = 1;
5991     }
5992 }
5993 
5994 static void m68k_tr_tb_start(DisasContextBase *dcbase, CPUState *cpu)
5995 {
5996 }
5997 
5998 static void m68k_tr_insn_start(DisasContextBase *dcbase, CPUState *cpu)
5999 {
6000     DisasContext *dc = container_of(dcbase, DisasContext, base);
6001     tcg_gen_insn_start(dc->base.pc_next, dc->cc_op);
6002 }
6003 
6004 static void m68k_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
6005 {
6006     DisasContext *dc = container_of(dcbase, DisasContext, base);
6007     CPUM68KState *env = cpu_env(cpu);
6008     uint16_t insn = read_im16(env, dc);
6009 
6010     opcode_table[insn](env, dc, insn);
6011     do_writebacks(dc);
6012 
6013     dc->pc_prev = dc->base.pc_next;
6014     dc->base.pc_next = dc->pc;
6015 
6016     if (dc->base.is_jmp == DISAS_NEXT) {
6017         /*
6018          * Stop translation when the next insn might touch a new page.
6019          * This ensures that prefetch aborts at the right place.
6020          *
6021          * We cannot determine the size of the next insn without
6022          * completely decoding it.  However, the maximum insn size
6023          * is 32 bytes, so end if we do not have that much remaining.
6024          * This may produce several small TBs at the end of each page,
6025          * but they will all be linked with goto_tb.
6026          *
6027          * ??? ColdFire maximum is 4 bytes; MC68000's maximum is also
6028          * smaller than MC68020's.
6029          */
6030         target_ulong start_page_offset
6031             = dc->pc - (dc->base.pc_first & TARGET_PAGE_MASK);
6032 
6033         if (start_page_offset >= TARGET_PAGE_SIZE - 32) {
6034             dc->base.is_jmp = DISAS_TOO_MANY;
6035         }
6036     }
6037 }
6038 
6039 static void m68k_tr_tb_stop(DisasContextBase *dcbase, CPUState *cpu)
6040 {
6041     DisasContext *dc = container_of(dcbase, DisasContext, base);
6042 
6043     switch (dc->base.is_jmp) {
6044     case DISAS_NORETURN:
6045         break;
6046     case DISAS_TOO_MANY:
6047         update_cc_op(dc);
6048         gen_jmp_tb(dc, 0, dc->pc, dc->pc_prev);
6049         break;
6050     case DISAS_JUMP:
6051         /* We updated CC_OP and PC in gen_jmp/gen_jmp_im.  */
6052         if (dc->ss_active) {
6053             gen_raise_exception_format2(dc, EXCP_TRACE, dc->pc_prev);
6054         } else {
6055             tcg_gen_lookup_and_goto_ptr();
6056         }
6057         break;
6058     case DISAS_EXIT:
6059         /*
6060          * We updated CC_OP and PC in gen_exit_tb, but also modified
6061          * other state that may require returning to the main loop.
6062          */
6063         if (dc->ss_active) {
6064             gen_raise_exception_format2(dc, EXCP_TRACE, dc->pc_prev);
6065         } else {
6066             tcg_gen_exit_tb(NULL, 0);
6067         }
6068         break;
6069     default:
6070         g_assert_not_reached();
6071     }
6072 }
6073 
6074 static void m68k_tr_disas_log(const DisasContextBase *dcbase,
6075                               CPUState *cpu, FILE *logfile)
6076 {
6077     fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
6078     target_disas(logfile, cpu, dcbase->pc_first, dcbase->tb->size);
6079 }
6080 
6081 static const TranslatorOps m68k_tr_ops = {
6082     .init_disas_context = m68k_tr_init_disas_context,
6083     .tb_start           = m68k_tr_tb_start,
6084     .insn_start         = m68k_tr_insn_start,
6085     .translate_insn     = m68k_tr_translate_insn,
6086     .tb_stop            = m68k_tr_tb_stop,
6087     .disas_log          = m68k_tr_disas_log,
6088 };
6089 
6090 void gen_intermediate_code(CPUState *cpu, TranslationBlock *tb, int *max_insns,
6091                            target_ulong pc, void *host_pc)
6092 {
6093     DisasContext dc;
6094     translator_loop(cpu, tb, max_insns, pc, host_pc, &m68k_tr_ops, &dc.base);
6095 }
6096 
6097 static double floatx80_to_double(CPUM68KState *env, uint16_t high, uint64_t low)
6098 {
6099     floatx80 a = { .high = high, .low = low };
6100     union {
6101         float64 f64;
6102         double d;
6103     } u;
6104 
6105     u.f64 = floatx80_to_float64(a, &env->fp_status);
6106     return u.d;
6107 }
6108 
6109 void m68k_cpu_dump_state(CPUState *cs, FILE *f, int flags)
6110 {
6111     M68kCPU *cpu = M68K_CPU(cs);
6112     CPUM68KState *env = &cpu->env;
6113     int i;
6114     uint16_t sr;
6115     for (i = 0; i < 8; i++) {
6116         qemu_fprintf(f, "D%d = %08x   A%d = %08x   "
6117                      "F%d = %04x %016"PRIx64"  (%12g)\n",
6118                      i, env->dregs[i], i, env->aregs[i],
6119                      i, env->fregs[i].l.upper, env->fregs[i].l.lower,
6120                      floatx80_to_double(env, env->fregs[i].l.upper,
6121                                         env->fregs[i].l.lower));
6122     }
6123     qemu_fprintf(f, "PC = %08x   ", env->pc);
6124     sr = env->sr | cpu_m68k_get_ccr(env);
6125     qemu_fprintf(f, "SR = %04x T:%x I:%x %c%c %c%c%c%c%c\n",
6126                  sr, (sr & SR_T) >> SR_T_SHIFT, (sr & SR_I) >> SR_I_SHIFT,
6127                  (sr & SR_S) ? 'S' : 'U', (sr & SR_M) ? '%' : 'I',
6128                  (sr & CCF_X) ? 'X' : '-', (sr & CCF_N) ? 'N' : '-',
6129                  (sr & CCF_Z) ? 'Z' : '-', (sr & CCF_V) ? 'V' : '-',
6130                  (sr & CCF_C) ? 'C' : '-');
6131     qemu_fprintf(f, "FPSR = %08x %c%c%c%c ", env->fpsr,
6132                  (env->fpsr & FPSR_CC_A) ? 'A' : '-',
6133                  (env->fpsr & FPSR_CC_I) ? 'I' : '-',
6134                  (env->fpsr & FPSR_CC_Z) ? 'Z' : '-',
6135                  (env->fpsr & FPSR_CC_N) ? 'N' : '-');
6136     qemu_fprintf(f, "\n                                "
6137                  "FPCR =     %04x ", env->fpcr);
6138     switch (env->fpcr & FPCR_PREC_MASK) {
6139     case FPCR_PREC_X:
6140         qemu_fprintf(f, "X ");
6141         break;
6142     case FPCR_PREC_S:
6143         qemu_fprintf(f, "S ");
6144         break;
6145     case FPCR_PREC_D:
6146         qemu_fprintf(f, "D ");
6147         break;
6148     }
6149     switch (env->fpcr & FPCR_RND_MASK) {
6150     case FPCR_RND_N:
6151         qemu_fprintf(f, "RN ");
6152         break;
6153     case FPCR_RND_Z:
6154         qemu_fprintf(f, "RZ ");
6155         break;
6156     case FPCR_RND_M:
6157         qemu_fprintf(f, "RM ");
6158         break;
6159     case FPCR_RND_P:
6160         qemu_fprintf(f, "RP ");
6161         break;
6162     }
6163     qemu_fprintf(f, "\n");
6164 #ifndef CONFIG_USER_ONLY
6165     qemu_fprintf(f, "%sA7(MSP) = %08x %sA7(USP) = %08x %sA7(ISP) = %08x\n",
6166                  env->current_sp == M68K_SSP ? "->" : "  ", env->sp[M68K_SSP],
6167                  env->current_sp == M68K_USP ? "->" : "  ", env->sp[M68K_USP],
6168                  env->current_sp == M68K_ISP ? "->" : "  ", env->sp[M68K_ISP]);
6169     qemu_fprintf(f, "VBR = 0x%08x\n", env->vbr);
6170     qemu_fprintf(f, "SFC = %x DFC %x\n", env->sfc, env->dfc);
6171     qemu_fprintf(f, "SSW %08x TCR %08x URP %08x SRP %08x\n",
6172                  env->mmu.ssw, env->mmu.tcr, env->mmu.urp, env->mmu.srp);
6173     qemu_fprintf(f, "DTTR0/1: %08x/%08x ITTR0/1: %08x/%08x\n",
6174                  env->mmu.ttr[M68K_DTTR0], env->mmu.ttr[M68K_DTTR1],
6175                  env->mmu.ttr[M68K_ITTR0], env->mmu.ttr[M68K_ITTR1]);
6176     qemu_fprintf(f, "MMUSR %08x, fault at %08x\n",
6177                  env->mmu.mmusr, env->mmu.ar);
6178 #endif /* !CONFIG_USER_ONLY */
6179 }
6180