xref: /openbmc/qemu/target/m68k/translate.c (revision d3860a57)
1 /*
2  *  m68k translation
3  *
4  *  Copyright (c) 2005-2007 CodeSourcery
5  *  Written by Paul Brook
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "disas/disas.h"
24 #include "exec/exec-all.h"
25 #include "tcg/tcg-op.h"
26 #include "qemu/log.h"
27 #include "qemu/qemu-print.h"
28 #include "exec/cpu_ldst.h"
29 #include "exec/translator.h"
30 
31 #include "exec/helper-proto.h"
32 #include "exec/helper-gen.h"
33 
34 #include "exec/log.h"
35 #include "fpu/softfloat.h"
36 
37 
38 //#define DEBUG_DISPATCH 1
39 
40 #define DEFO32(name, offset) static TCGv QREG_##name;
41 #define DEFO64(name, offset) static TCGv_i64 QREG_##name;
42 #include "qregs.h.inc"
43 #undef DEFO32
44 #undef DEFO64
45 
46 static TCGv_i32 cpu_halted;
47 static TCGv_i32 cpu_exception_index;
48 
49 static char cpu_reg_names[2 * 8 * 3 + 5 * 4];
50 static TCGv cpu_dregs[8];
51 static TCGv cpu_aregs[8];
52 static TCGv_i64 cpu_macc[4];
53 
54 #define REG(insn, pos)  (((insn) >> (pos)) & 7)
55 #define DREG(insn, pos) cpu_dregs[REG(insn, pos)]
56 #define AREG(insn, pos) get_areg(s, REG(insn, pos))
57 #define MACREG(acc)     cpu_macc[acc]
58 #define QREG_SP         get_areg(s, 7)
59 
60 static TCGv NULL_QREG;
61 #define IS_NULL_QREG(t) (t == NULL_QREG)
62 /* Used to distinguish stores from bad addressing modes.  */
63 static TCGv store_dummy;
64 
65 #include "exec/gen-icount.h"
66 
67 void m68k_tcg_init(void)
68 {
69     char *p;
70     int i;
71 
72 #define DEFO32(name, offset) \
73     QREG_##name = tcg_global_mem_new_i32(cpu_env, \
74         offsetof(CPUM68KState, offset), #name);
75 #define DEFO64(name, offset) \
76     QREG_##name = tcg_global_mem_new_i64(cpu_env, \
77         offsetof(CPUM68KState, offset), #name);
78 #include "qregs.h.inc"
79 #undef DEFO32
80 #undef DEFO64
81 
82     cpu_halted = tcg_global_mem_new_i32(cpu_env,
83                                         -offsetof(M68kCPU, env) +
84                                         offsetof(CPUState, halted), "HALTED");
85     cpu_exception_index = tcg_global_mem_new_i32(cpu_env,
86                                                  -offsetof(M68kCPU, env) +
87                                                  offsetof(CPUState, exception_index),
88                                                  "EXCEPTION");
89 
90     p = cpu_reg_names;
91     for (i = 0; i < 8; i++) {
92         sprintf(p, "D%d", i);
93         cpu_dregs[i] = tcg_global_mem_new(cpu_env,
94                                           offsetof(CPUM68KState, dregs[i]), p);
95         p += 3;
96         sprintf(p, "A%d", i);
97         cpu_aregs[i] = tcg_global_mem_new(cpu_env,
98                                           offsetof(CPUM68KState, aregs[i]), p);
99         p += 3;
100     }
101     for (i = 0; i < 4; i++) {
102         sprintf(p, "ACC%d", i);
103         cpu_macc[i] = tcg_global_mem_new_i64(cpu_env,
104                                          offsetof(CPUM68KState, macc[i]), p);
105         p += 5;
106     }
107 
108     NULL_QREG = tcg_global_mem_new(cpu_env, -4, "NULL");
109     store_dummy = tcg_global_mem_new(cpu_env, -8, "NULL");
110 }
111 
112 /* internal defines */
113 typedef struct DisasContext {
114     DisasContextBase base;
115     CPUM68KState *env;
116     target_ulong pc;
117     target_ulong pc_prev;
118     CCOp cc_op; /* Current CC operation */
119     int cc_op_synced;
120     TCGv_i64 mactmp;
121     int done_mac;
122     int writeback_mask;
123     TCGv writeback[8];
124     bool ss_active;
125 } DisasContext;
126 
127 static TCGv get_areg(DisasContext *s, unsigned regno)
128 {
129     if (s->writeback_mask & (1 << regno)) {
130         return s->writeback[regno];
131     } else {
132         return cpu_aregs[regno];
133     }
134 }
135 
136 static void delay_set_areg(DisasContext *s, unsigned regno,
137                            TCGv val, bool give_temp)
138 {
139     if (s->writeback_mask & (1 << regno)) {
140         if (give_temp) {
141             s->writeback[regno] = val;
142         } else {
143             tcg_gen_mov_i32(s->writeback[regno], val);
144         }
145     } else {
146         s->writeback_mask |= 1 << regno;
147         if (give_temp) {
148             s->writeback[regno] = val;
149         } else {
150             TCGv tmp = tcg_temp_new();
151             s->writeback[regno] = tmp;
152             tcg_gen_mov_i32(tmp, val);
153         }
154     }
155 }
156 
157 static void do_writebacks(DisasContext *s)
158 {
159     unsigned mask = s->writeback_mask;
160     if (mask) {
161         s->writeback_mask = 0;
162         do {
163             unsigned regno = ctz32(mask);
164             tcg_gen_mov_i32(cpu_aregs[regno], s->writeback[regno]);
165             mask &= mask - 1;
166         } while (mask);
167     }
168 }
169 
170 /* is_jmp field values */
171 #define DISAS_JUMP      DISAS_TARGET_0 /* only pc was modified dynamically */
172 #define DISAS_EXIT      DISAS_TARGET_1 /* cpu state was modified dynamically */
173 
174 #if defined(CONFIG_USER_ONLY)
175 #define IS_USER(s) 1
176 #else
177 #define IS_USER(s)   (!(s->base.tb->flags & TB_FLAGS_MSR_S))
178 #define SFC_INDEX(s) ((s->base.tb->flags & TB_FLAGS_SFC_S) ? \
179                       MMU_KERNEL_IDX : MMU_USER_IDX)
180 #define DFC_INDEX(s) ((s->base.tb->flags & TB_FLAGS_DFC_S) ? \
181                       MMU_KERNEL_IDX : MMU_USER_IDX)
182 #endif
183 
184 typedef void (*disas_proc)(CPUM68KState *env, DisasContext *s, uint16_t insn);
185 
186 #ifdef DEBUG_DISPATCH
187 #define DISAS_INSN(name)                                                \
188     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
189                                   uint16_t insn);                       \
190     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
191                              uint16_t insn)                             \
192     {                                                                   \
193         qemu_log("Dispatch " #name "\n");                               \
194         real_disas_##name(env, s, insn);                                \
195     }                                                                   \
196     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
197                                   uint16_t insn)
198 #else
199 #define DISAS_INSN(name)                                                \
200     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
201                              uint16_t insn)
202 #endif
203 
204 static const uint8_t cc_op_live[CC_OP_NB] = {
205     [CC_OP_DYNAMIC] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
206     [CC_OP_FLAGS] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
207     [CC_OP_ADDB ... CC_OP_ADDL] = CCF_X | CCF_N | CCF_V,
208     [CC_OP_SUBB ... CC_OP_SUBL] = CCF_X | CCF_N | CCF_V,
209     [CC_OP_CMPB ... CC_OP_CMPL] = CCF_X | CCF_N | CCF_V,
210     [CC_OP_LOGIC] = CCF_X | CCF_N
211 };
212 
213 static void set_cc_op(DisasContext *s, CCOp op)
214 {
215     CCOp old_op = s->cc_op;
216     int dead;
217 
218     if (old_op == op) {
219         return;
220     }
221     s->cc_op = op;
222     s->cc_op_synced = 0;
223 
224     /*
225      * Discard CC computation that will no longer be used.
226      * Note that X and N are never dead.
227      */
228     dead = cc_op_live[old_op] & ~cc_op_live[op];
229     if (dead & CCF_C) {
230         tcg_gen_discard_i32(QREG_CC_C);
231     }
232     if (dead & CCF_Z) {
233         tcg_gen_discard_i32(QREG_CC_Z);
234     }
235     if (dead & CCF_V) {
236         tcg_gen_discard_i32(QREG_CC_V);
237     }
238 }
239 
240 /* Update the CPU env CC_OP state.  */
241 static void update_cc_op(DisasContext *s)
242 {
243     if (!s->cc_op_synced) {
244         s->cc_op_synced = 1;
245         tcg_gen_movi_i32(QREG_CC_OP, s->cc_op);
246     }
247 }
248 
249 /* Generate a jump to an immediate address.  */
250 static void gen_jmp_im(DisasContext *s, uint32_t dest)
251 {
252     update_cc_op(s);
253     tcg_gen_movi_i32(QREG_PC, dest);
254     s->base.is_jmp = DISAS_JUMP;
255 }
256 
257 /* Generate a jump to the address in qreg DEST.  */
258 static void gen_jmp(DisasContext *s, TCGv dest)
259 {
260     update_cc_op(s);
261     tcg_gen_mov_i32(QREG_PC, dest);
262     s->base.is_jmp = DISAS_JUMP;
263 }
264 
265 static void gen_raise_exception(int nr)
266 {
267     TCGv_i32 tmp;
268 
269     tmp = tcg_const_i32(nr);
270     gen_helper_raise_exception(cpu_env, tmp);
271 }
272 
273 static void gen_raise_exception_format2(DisasContext *s, int nr,
274                                         target_ulong this_pc)
275 {
276     /*
277      * Pass the address of the insn to the exception handler,
278      * for recording in the Format $2 (6-word) stack frame.
279      * Re-use mmu.ar for the purpose, since that's only valid
280      * after tlb_fill.
281      */
282     tcg_gen_st_i32(tcg_constant_i32(this_pc), cpu_env,
283                    offsetof(CPUM68KState, mmu.ar));
284     gen_raise_exception(nr);
285     s->base.is_jmp = DISAS_NORETURN;
286 }
287 
288 static void gen_exception(DisasContext *s, uint32_t dest, int nr)
289 {
290     update_cc_op(s);
291     tcg_gen_movi_i32(QREG_PC, dest);
292 
293     gen_raise_exception(nr);
294 
295     s->base.is_jmp = DISAS_NORETURN;
296 }
297 
298 static inline void gen_addr_fault(DisasContext *s)
299 {
300     gen_exception(s, s->base.pc_next, EXCP_ADDRESS);
301 }
302 
303 /*
304  * Generate a load from the specified address.  Narrow values are
305  *  sign extended to full register width.
306  */
307 static inline TCGv gen_load(DisasContext *s, int opsize, TCGv addr,
308                             int sign, int index)
309 {
310     TCGv tmp;
311     tmp = tcg_temp_new_i32();
312     switch(opsize) {
313     case OS_BYTE:
314         if (sign)
315             tcg_gen_qemu_ld8s(tmp, addr, index);
316         else
317             tcg_gen_qemu_ld8u(tmp, addr, index);
318         break;
319     case OS_WORD:
320         if (sign)
321             tcg_gen_qemu_ld16s(tmp, addr, index);
322         else
323             tcg_gen_qemu_ld16u(tmp, addr, index);
324         break;
325     case OS_LONG:
326         tcg_gen_qemu_ld32u(tmp, addr, index);
327         break;
328     default:
329         g_assert_not_reached();
330     }
331     return tmp;
332 }
333 
334 /* Generate a store.  */
335 static inline void gen_store(DisasContext *s, int opsize, TCGv addr, TCGv val,
336                              int index)
337 {
338     switch(opsize) {
339     case OS_BYTE:
340         tcg_gen_qemu_st8(val, addr, index);
341         break;
342     case OS_WORD:
343         tcg_gen_qemu_st16(val, addr, index);
344         break;
345     case OS_LONG:
346         tcg_gen_qemu_st32(val, addr, index);
347         break;
348     default:
349         g_assert_not_reached();
350     }
351 }
352 
353 typedef enum {
354     EA_STORE,
355     EA_LOADU,
356     EA_LOADS
357 } ea_what;
358 
359 /*
360  * Generate an unsigned load if VAL is 0 a signed load if val is -1,
361  * otherwise generate a store.
362  */
363 static TCGv gen_ldst(DisasContext *s, int opsize, TCGv addr, TCGv val,
364                      ea_what what, int index)
365 {
366     if (what == EA_STORE) {
367         gen_store(s, opsize, addr, val, index);
368         return store_dummy;
369     } else {
370         return gen_load(s, opsize, addr, what == EA_LOADS, index);
371     }
372 }
373 
374 /* Read a 16-bit immediate constant */
375 static inline uint16_t read_im16(CPUM68KState *env, DisasContext *s)
376 {
377     uint16_t im;
378     im = translator_lduw(env, &s->base, s->pc);
379     s->pc += 2;
380     return im;
381 }
382 
383 /* Read an 8-bit immediate constant */
384 static inline uint8_t read_im8(CPUM68KState *env, DisasContext *s)
385 {
386     return read_im16(env, s);
387 }
388 
389 /* Read a 32-bit immediate constant.  */
390 static inline uint32_t read_im32(CPUM68KState *env, DisasContext *s)
391 {
392     uint32_t im;
393     im = read_im16(env, s) << 16;
394     im |= 0xffff & read_im16(env, s);
395     return im;
396 }
397 
398 /* Read a 64-bit immediate constant.  */
399 static inline uint64_t read_im64(CPUM68KState *env, DisasContext *s)
400 {
401     uint64_t im;
402     im = (uint64_t)read_im32(env, s) << 32;
403     im |= (uint64_t)read_im32(env, s);
404     return im;
405 }
406 
407 /* Calculate and address index.  */
408 static TCGv gen_addr_index(DisasContext *s, uint16_t ext, TCGv tmp)
409 {
410     TCGv add;
411     int scale;
412 
413     add = (ext & 0x8000) ? AREG(ext, 12) : DREG(ext, 12);
414     if ((ext & 0x800) == 0) {
415         tcg_gen_ext16s_i32(tmp, add);
416         add = tmp;
417     }
418     scale = (ext >> 9) & 3;
419     if (scale != 0) {
420         tcg_gen_shli_i32(tmp, add, scale);
421         add = tmp;
422     }
423     return add;
424 }
425 
426 /*
427  * Handle a base + index + displacement effective address.
428  * A NULL_QREG base means pc-relative.
429  */
430 static TCGv gen_lea_indexed(CPUM68KState *env, DisasContext *s, TCGv base)
431 {
432     uint32_t offset;
433     uint16_t ext;
434     TCGv add;
435     TCGv tmp;
436     uint32_t bd, od;
437 
438     offset = s->pc;
439     ext = read_im16(env, s);
440 
441     if ((ext & 0x800) == 0 && !m68k_feature(s->env, M68K_FEATURE_WORD_INDEX))
442         return NULL_QREG;
443 
444     if (m68k_feature(s->env, M68K_FEATURE_M68K) &&
445         !m68k_feature(s->env, M68K_FEATURE_SCALED_INDEX)) {
446         ext &= ~(3 << 9);
447     }
448 
449     if (ext & 0x100) {
450         /* full extension word format */
451         if (!m68k_feature(s->env, M68K_FEATURE_EXT_FULL))
452             return NULL_QREG;
453 
454         if ((ext & 0x30) > 0x10) {
455             /* base displacement */
456             if ((ext & 0x30) == 0x20) {
457                 bd = (int16_t)read_im16(env, s);
458             } else {
459                 bd = read_im32(env, s);
460             }
461         } else {
462             bd = 0;
463         }
464         tmp = tcg_temp_new();
465         if ((ext & 0x44) == 0) {
466             /* pre-index */
467             add = gen_addr_index(s, ext, tmp);
468         } else {
469             add = NULL_QREG;
470         }
471         if ((ext & 0x80) == 0) {
472             /* base not suppressed */
473             if (IS_NULL_QREG(base)) {
474                 base = tcg_const_i32(offset + bd);
475                 bd = 0;
476             }
477             if (!IS_NULL_QREG(add)) {
478                 tcg_gen_add_i32(tmp, add, base);
479                 add = tmp;
480             } else {
481                 add = base;
482             }
483         }
484         if (!IS_NULL_QREG(add)) {
485             if (bd != 0) {
486                 tcg_gen_addi_i32(tmp, add, bd);
487                 add = tmp;
488             }
489         } else {
490             add = tcg_const_i32(bd);
491         }
492         if ((ext & 3) != 0) {
493             /* memory indirect */
494             base = gen_load(s, OS_LONG, add, 0, IS_USER(s));
495             if ((ext & 0x44) == 4) {
496                 add = gen_addr_index(s, ext, tmp);
497                 tcg_gen_add_i32(tmp, add, base);
498                 add = tmp;
499             } else {
500                 add = base;
501             }
502             if ((ext & 3) > 1) {
503                 /* outer displacement */
504                 if ((ext & 3) == 2) {
505                     od = (int16_t)read_im16(env, s);
506                 } else {
507                     od = read_im32(env, s);
508                 }
509             } else {
510                 od = 0;
511             }
512             if (od != 0) {
513                 tcg_gen_addi_i32(tmp, add, od);
514                 add = tmp;
515             }
516         }
517     } else {
518         /* brief extension word format */
519         tmp = tcg_temp_new();
520         add = gen_addr_index(s, ext, tmp);
521         if (!IS_NULL_QREG(base)) {
522             tcg_gen_add_i32(tmp, add, base);
523             if ((int8_t)ext)
524                 tcg_gen_addi_i32(tmp, tmp, (int8_t)ext);
525         } else {
526             tcg_gen_addi_i32(tmp, add, offset + (int8_t)ext);
527         }
528         add = tmp;
529     }
530     return add;
531 }
532 
533 /* Sign or zero extend a value.  */
534 
535 static inline void gen_ext(TCGv res, TCGv val, int opsize, int sign)
536 {
537     switch (opsize) {
538     case OS_BYTE:
539         if (sign) {
540             tcg_gen_ext8s_i32(res, val);
541         } else {
542             tcg_gen_ext8u_i32(res, val);
543         }
544         break;
545     case OS_WORD:
546         if (sign) {
547             tcg_gen_ext16s_i32(res, val);
548         } else {
549             tcg_gen_ext16u_i32(res, val);
550         }
551         break;
552     case OS_LONG:
553         tcg_gen_mov_i32(res, val);
554         break;
555     default:
556         g_assert_not_reached();
557     }
558 }
559 
560 /* Evaluate all the CC flags.  */
561 
562 static void gen_flush_flags(DisasContext *s)
563 {
564     TCGv t0, t1;
565 
566     switch (s->cc_op) {
567     case CC_OP_FLAGS:
568         return;
569 
570     case CC_OP_ADDB:
571     case CC_OP_ADDW:
572     case CC_OP_ADDL:
573         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
574         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
575         /* Compute signed overflow for addition.  */
576         t0 = tcg_temp_new();
577         t1 = tcg_temp_new();
578         tcg_gen_sub_i32(t0, QREG_CC_N, QREG_CC_V);
579         gen_ext(t0, t0, s->cc_op - CC_OP_ADDB, 1);
580         tcg_gen_xor_i32(t1, QREG_CC_N, QREG_CC_V);
581         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
582         tcg_gen_andc_i32(QREG_CC_V, t1, QREG_CC_V);
583         break;
584 
585     case CC_OP_SUBB:
586     case CC_OP_SUBW:
587     case CC_OP_SUBL:
588         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
589         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
590         /* Compute signed overflow for subtraction.  */
591         t0 = tcg_temp_new();
592         t1 = tcg_temp_new();
593         tcg_gen_add_i32(t0, QREG_CC_N, QREG_CC_V);
594         gen_ext(t0, t0, s->cc_op - CC_OP_SUBB, 1);
595         tcg_gen_xor_i32(t1, QREG_CC_N, t0);
596         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
597         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t1);
598         break;
599 
600     case CC_OP_CMPB:
601     case CC_OP_CMPW:
602     case CC_OP_CMPL:
603         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_C, QREG_CC_N, QREG_CC_V);
604         tcg_gen_sub_i32(QREG_CC_Z, QREG_CC_N, QREG_CC_V);
605         gen_ext(QREG_CC_Z, QREG_CC_Z, s->cc_op - CC_OP_CMPB, 1);
606         /* Compute signed overflow for subtraction.  */
607         t0 = tcg_temp_new();
608         tcg_gen_xor_i32(t0, QREG_CC_Z, QREG_CC_N);
609         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, QREG_CC_N);
610         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t0);
611         tcg_gen_mov_i32(QREG_CC_N, QREG_CC_Z);
612         break;
613 
614     case CC_OP_LOGIC:
615         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
616         tcg_gen_movi_i32(QREG_CC_C, 0);
617         tcg_gen_movi_i32(QREG_CC_V, 0);
618         break;
619 
620     case CC_OP_DYNAMIC:
621         gen_helper_flush_flags(cpu_env, QREG_CC_OP);
622         s->cc_op_synced = 1;
623         break;
624 
625     default:
626         t0 = tcg_const_i32(s->cc_op);
627         gen_helper_flush_flags(cpu_env, t0);
628         s->cc_op_synced = 1;
629         break;
630     }
631 
632     /* Note that flush_flags also assigned to env->cc_op.  */
633     s->cc_op = CC_OP_FLAGS;
634 }
635 
636 static inline TCGv gen_extend(DisasContext *s, TCGv val, int opsize, int sign)
637 {
638     TCGv tmp;
639 
640     if (opsize == OS_LONG) {
641         tmp = val;
642     } else {
643         tmp = tcg_temp_new();
644         gen_ext(tmp, val, opsize, sign);
645     }
646 
647     return tmp;
648 }
649 
650 static void gen_logic_cc(DisasContext *s, TCGv val, int opsize)
651 {
652     gen_ext(QREG_CC_N, val, opsize, 1);
653     set_cc_op(s, CC_OP_LOGIC);
654 }
655 
656 static void gen_update_cc_cmp(DisasContext *s, TCGv dest, TCGv src, int opsize)
657 {
658     tcg_gen_mov_i32(QREG_CC_N, dest);
659     tcg_gen_mov_i32(QREG_CC_V, src);
660     set_cc_op(s, CC_OP_CMPB + opsize);
661 }
662 
663 static void gen_update_cc_add(TCGv dest, TCGv src, int opsize)
664 {
665     gen_ext(QREG_CC_N, dest, opsize, 1);
666     tcg_gen_mov_i32(QREG_CC_V, src);
667 }
668 
669 static inline int opsize_bytes(int opsize)
670 {
671     switch (opsize) {
672     case OS_BYTE: return 1;
673     case OS_WORD: return 2;
674     case OS_LONG: return 4;
675     case OS_SINGLE: return 4;
676     case OS_DOUBLE: return 8;
677     case OS_EXTENDED: return 12;
678     case OS_PACKED: return 12;
679     default:
680         g_assert_not_reached();
681     }
682 }
683 
684 static inline int insn_opsize(int insn)
685 {
686     switch ((insn >> 6) & 3) {
687     case 0: return OS_BYTE;
688     case 1: return OS_WORD;
689     case 2: return OS_LONG;
690     default:
691         g_assert_not_reached();
692     }
693 }
694 
695 static inline int ext_opsize(int ext, int pos)
696 {
697     switch ((ext >> pos) & 7) {
698     case 0: return OS_LONG;
699     case 1: return OS_SINGLE;
700     case 2: return OS_EXTENDED;
701     case 3: return OS_PACKED;
702     case 4: return OS_WORD;
703     case 5: return OS_DOUBLE;
704     case 6: return OS_BYTE;
705     default:
706         g_assert_not_reached();
707     }
708 }
709 
710 /*
711  * Assign value to a register.  If the width is less than the register width
712  * only the low part of the register is set.
713  */
714 static void gen_partset_reg(int opsize, TCGv reg, TCGv val)
715 {
716     TCGv tmp;
717     switch (opsize) {
718     case OS_BYTE:
719         tcg_gen_andi_i32(reg, reg, 0xffffff00);
720         tmp = tcg_temp_new();
721         tcg_gen_ext8u_i32(tmp, val);
722         tcg_gen_or_i32(reg, reg, tmp);
723         break;
724     case OS_WORD:
725         tcg_gen_andi_i32(reg, reg, 0xffff0000);
726         tmp = tcg_temp_new();
727         tcg_gen_ext16u_i32(tmp, val);
728         tcg_gen_or_i32(reg, reg, tmp);
729         break;
730     case OS_LONG:
731     case OS_SINGLE:
732         tcg_gen_mov_i32(reg, val);
733         break;
734     default:
735         g_assert_not_reached();
736     }
737 }
738 
739 /*
740  * Generate code for an "effective address".  Does not adjust the base
741  * register for autoincrement addressing modes.
742  */
743 static TCGv gen_lea_mode(CPUM68KState *env, DisasContext *s,
744                          int mode, int reg0, int opsize)
745 {
746     TCGv reg;
747     TCGv tmp;
748     uint16_t ext;
749     uint32_t offset;
750 
751     switch (mode) {
752     case 0: /* Data register direct.  */
753     case 1: /* Address register direct.  */
754         return NULL_QREG;
755     case 3: /* Indirect postincrement.  */
756         if (opsize == OS_UNSIZED) {
757             return NULL_QREG;
758         }
759         /* fallthru */
760     case 2: /* Indirect register */
761         return get_areg(s, reg0);
762     case 4: /* Indirect predecrememnt.  */
763         if (opsize == OS_UNSIZED) {
764             return NULL_QREG;
765         }
766         reg = get_areg(s, reg0);
767         tmp = tcg_temp_new();
768         if (reg0 == 7 && opsize == OS_BYTE &&
769             m68k_feature(s->env, M68K_FEATURE_M68K)) {
770             tcg_gen_subi_i32(tmp, reg, 2);
771         } else {
772             tcg_gen_subi_i32(tmp, reg, opsize_bytes(opsize));
773         }
774         return tmp;
775     case 5: /* Indirect displacement.  */
776         reg = get_areg(s, reg0);
777         tmp = tcg_temp_new();
778         ext = read_im16(env, s);
779         tcg_gen_addi_i32(tmp, reg, (int16_t)ext);
780         return tmp;
781     case 6: /* Indirect index + displacement.  */
782         reg = get_areg(s, reg0);
783         return gen_lea_indexed(env, s, reg);
784     case 7: /* Other */
785         switch (reg0) {
786         case 0: /* Absolute short.  */
787             offset = (int16_t)read_im16(env, s);
788             return tcg_const_i32(offset);
789         case 1: /* Absolute long.  */
790             offset = read_im32(env, s);
791             return tcg_const_i32(offset);
792         case 2: /* pc displacement  */
793             offset = s->pc;
794             offset += (int16_t)read_im16(env, s);
795             return tcg_const_i32(offset);
796         case 3: /* pc index+displacement.  */
797             return gen_lea_indexed(env, s, NULL_QREG);
798         case 4: /* Immediate.  */
799         default:
800             return NULL_QREG;
801         }
802     }
803     /* Should never happen.  */
804     return NULL_QREG;
805 }
806 
807 static TCGv gen_lea(CPUM68KState *env, DisasContext *s, uint16_t insn,
808                     int opsize)
809 {
810     int mode = extract32(insn, 3, 3);
811     int reg0 = REG(insn, 0);
812     return gen_lea_mode(env, s, mode, reg0, opsize);
813 }
814 
815 /*
816  * Generate code to load/store a value from/into an EA.  If WHAT > 0 this is
817  * a write otherwise it is a read (0 == sign extend, -1 == zero extend).
818  * ADDRP is non-null for readwrite operands.
819  */
820 static TCGv gen_ea_mode(CPUM68KState *env, DisasContext *s, int mode, int reg0,
821                         int opsize, TCGv val, TCGv *addrp, ea_what what,
822                         int index)
823 {
824     TCGv reg, tmp, result;
825     int32_t offset;
826 
827     switch (mode) {
828     case 0: /* Data register direct.  */
829         reg = cpu_dregs[reg0];
830         if (what == EA_STORE) {
831             gen_partset_reg(opsize, reg, val);
832             return store_dummy;
833         } else {
834             return gen_extend(s, reg, opsize, what == EA_LOADS);
835         }
836     case 1: /* Address register direct.  */
837         reg = get_areg(s, reg0);
838         if (what == EA_STORE) {
839             tcg_gen_mov_i32(reg, val);
840             return store_dummy;
841         } else {
842             return gen_extend(s, reg, opsize, what == EA_LOADS);
843         }
844     case 2: /* Indirect register */
845         reg = get_areg(s, reg0);
846         return gen_ldst(s, opsize, reg, val, what, index);
847     case 3: /* Indirect postincrement.  */
848         reg = get_areg(s, reg0);
849         result = gen_ldst(s, opsize, reg, val, what, index);
850         if (what == EA_STORE || !addrp) {
851             TCGv tmp = tcg_temp_new();
852             if (reg0 == 7 && opsize == OS_BYTE &&
853                 m68k_feature(s->env, M68K_FEATURE_M68K)) {
854                 tcg_gen_addi_i32(tmp, reg, 2);
855             } else {
856                 tcg_gen_addi_i32(tmp, reg, opsize_bytes(opsize));
857             }
858             delay_set_areg(s, reg0, tmp, true);
859         }
860         return result;
861     case 4: /* Indirect predecrememnt.  */
862         if (addrp && what == EA_STORE) {
863             tmp = *addrp;
864         } else {
865             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
866             if (IS_NULL_QREG(tmp)) {
867                 return tmp;
868             }
869             if (addrp) {
870                 *addrp = tmp;
871             }
872         }
873         result = gen_ldst(s, opsize, tmp, val, what, index);
874         if (what == EA_STORE || !addrp) {
875             delay_set_areg(s, reg0, tmp, false);
876         }
877         return result;
878     case 5: /* Indirect displacement.  */
879     case 6: /* Indirect index + displacement.  */
880     do_indirect:
881         if (addrp && what == EA_STORE) {
882             tmp = *addrp;
883         } else {
884             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
885             if (IS_NULL_QREG(tmp)) {
886                 return tmp;
887             }
888             if (addrp) {
889                 *addrp = tmp;
890             }
891         }
892         return gen_ldst(s, opsize, tmp, val, what, index);
893     case 7: /* Other */
894         switch (reg0) {
895         case 0: /* Absolute short.  */
896         case 1: /* Absolute long.  */
897         case 2: /* pc displacement  */
898         case 3: /* pc index+displacement.  */
899             goto do_indirect;
900         case 4: /* Immediate.  */
901             /* Sign extend values for consistency.  */
902             switch (opsize) {
903             case OS_BYTE:
904                 if (what == EA_LOADS) {
905                     offset = (int8_t)read_im8(env, s);
906                 } else {
907                     offset = read_im8(env, s);
908                 }
909                 break;
910             case OS_WORD:
911                 if (what == EA_LOADS) {
912                     offset = (int16_t)read_im16(env, s);
913                 } else {
914                     offset = read_im16(env, s);
915                 }
916                 break;
917             case OS_LONG:
918                 offset = read_im32(env, s);
919                 break;
920             default:
921                 g_assert_not_reached();
922             }
923             return tcg_const_i32(offset);
924         default:
925             return NULL_QREG;
926         }
927     }
928     /* Should never happen.  */
929     return NULL_QREG;
930 }
931 
932 static TCGv gen_ea(CPUM68KState *env, DisasContext *s, uint16_t insn,
933                    int opsize, TCGv val, TCGv *addrp, ea_what what, int index)
934 {
935     int mode = extract32(insn, 3, 3);
936     int reg0 = REG(insn, 0);
937     return gen_ea_mode(env, s, mode, reg0, opsize, val, addrp, what, index);
938 }
939 
940 static TCGv_ptr gen_fp_ptr(int freg)
941 {
942     TCGv_ptr fp = tcg_temp_new_ptr();
943     tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fregs[freg]));
944     return fp;
945 }
946 
947 static TCGv_ptr gen_fp_result_ptr(void)
948 {
949     TCGv_ptr fp = tcg_temp_new_ptr();
950     tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fp_result));
951     return fp;
952 }
953 
954 static void gen_fp_move(TCGv_ptr dest, TCGv_ptr src)
955 {
956     TCGv t32;
957     TCGv_i64 t64;
958 
959     t32 = tcg_temp_new();
960     tcg_gen_ld16u_i32(t32, src, offsetof(FPReg, l.upper));
961     tcg_gen_st16_i32(t32, dest, offsetof(FPReg, l.upper));
962 
963     t64 = tcg_temp_new_i64();
964     tcg_gen_ld_i64(t64, src, offsetof(FPReg, l.lower));
965     tcg_gen_st_i64(t64, dest, offsetof(FPReg, l.lower));
966 }
967 
968 static void gen_load_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
969                         int index)
970 {
971     TCGv tmp;
972     TCGv_i64 t64;
973 
974     t64 = tcg_temp_new_i64();
975     tmp = tcg_temp_new();
976     switch (opsize) {
977     case OS_BYTE:
978         tcg_gen_qemu_ld8s(tmp, addr, index);
979         gen_helper_exts32(cpu_env, fp, tmp);
980         break;
981     case OS_WORD:
982         tcg_gen_qemu_ld16s(tmp, addr, index);
983         gen_helper_exts32(cpu_env, fp, tmp);
984         break;
985     case OS_LONG:
986         tcg_gen_qemu_ld32u(tmp, addr, index);
987         gen_helper_exts32(cpu_env, fp, tmp);
988         break;
989     case OS_SINGLE:
990         tcg_gen_qemu_ld32u(tmp, addr, index);
991         gen_helper_extf32(cpu_env, fp, tmp);
992         break;
993     case OS_DOUBLE:
994         tcg_gen_qemu_ld64(t64, addr, index);
995         gen_helper_extf64(cpu_env, fp, t64);
996         break;
997     case OS_EXTENDED:
998         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
999             gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1000             break;
1001         }
1002         tcg_gen_qemu_ld32u(tmp, addr, index);
1003         tcg_gen_shri_i32(tmp, tmp, 16);
1004         tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
1005         tcg_gen_addi_i32(tmp, addr, 4);
1006         tcg_gen_qemu_ld64(t64, tmp, index);
1007         tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
1008         break;
1009     case OS_PACKED:
1010         /*
1011          * unimplemented data type on 68040/ColdFire
1012          * FIXME if needed for another FPU
1013          */
1014         gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1015         break;
1016     default:
1017         g_assert_not_reached();
1018     }
1019 }
1020 
1021 static void gen_store_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
1022                          int index)
1023 {
1024     TCGv tmp;
1025     TCGv_i64 t64;
1026 
1027     t64 = tcg_temp_new_i64();
1028     tmp = tcg_temp_new();
1029     switch (opsize) {
1030     case OS_BYTE:
1031         gen_helper_reds32(tmp, cpu_env, fp);
1032         tcg_gen_qemu_st8(tmp, addr, index);
1033         break;
1034     case OS_WORD:
1035         gen_helper_reds32(tmp, cpu_env, fp);
1036         tcg_gen_qemu_st16(tmp, addr, index);
1037         break;
1038     case OS_LONG:
1039         gen_helper_reds32(tmp, cpu_env, fp);
1040         tcg_gen_qemu_st32(tmp, addr, index);
1041         break;
1042     case OS_SINGLE:
1043         gen_helper_redf32(tmp, cpu_env, fp);
1044         tcg_gen_qemu_st32(tmp, addr, index);
1045         break;
1046     case OS_DOUBLE:
1047         gen_helper_redf64(t64, cpu_env, fp);
1048         tcg_gen_qemu_st64(t64, addr, index);
1049         break;
1050     case OS_EXTENDED:
1051         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1052             gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1053             break;
1054         }
1055         tcg_gen_ld16u_i32(tmp, fp, offsetof(FPReg, l.upper));
1056         tcg_gen_shli_i32(tmp, tmp, 16);
1057         tcg_gen_qemu_st32(tmp, addr, index);
1058         tcg_gen_addi_i32(tmp, addr, 4);
1059         tcg_gen_ld_i64(t64, fp, offsetof(FPReg, l.lower));
1060         tcg_gen_qemu_st64(t64, tmp, index);
1061         break;
1062     case OS_PACKED:
1063         /*
1064          * unimplemented data type on 68040/ColdFire
1065          * FIXME if needed for another FPU
1066          */
1067         gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1068         break;
1069     default:
1070         g_assert_not_reached();
1071     }
1072 }
1073 
1074 static void gen_ldst_fp(DisasContext *s, int opsize, TCGv addr,
1075                         TCGv_ptr fp, ea_what what, int index)
1076 {
1077     if (what == EA_STORE) {
1078         gen_store_fp(s, opsize, addr, fp, index);
1079     } else {
1080         gen_load_fp(s, opsize, addr, fp, index);
1081     }
1082 }
1083 
1084 static int gen_ea_mode_fp(CPUM68KState *env, DisasContext *s, int mode,
1085                           int reg0, int opsize, TCGv_ptr fp, ea_what what,
1086                           int index)
1087 {
1088     TCGv reg, addr, tmp;
1089     TCGv_i64 t64;
1090 
1091     switch (mode) {
1092     case 0: /* Data register direct.  */
1093         reg = cpu_dregs[reg0];
1094         if (what == EA_STORE) {
1095             switch (opsize) {
1096             case OS_BYTE:
1097             case OS_WORD:
1098             case OS_LONG:
1099                 gen_helper_reds32(reg, cpu_env, fp);
1100                 break;
1101             case OS_SINGLE:
1102                 gen_helper_redf32(reg, cpu_env, fp);
1103                 break;
1104             default:
1105                 g_assert_not_reached();
1106             }
1107         } else {
1108             tmp = tcg_temp_new();
1109             switch (opsize) {
1110             case OS_BYTE:
1111                 tcg_gen_ext8s_i32(tmp, reg);
1112                 gen_helper_exts32(cpu_env, fp, tmp);
1113                 break;
1114             case OS_WORD:
1115                 tcg_gen_ext16s_i32(tmp, reg);
1116                 gen_helper_exts32(cpu_env, fp, tmp);
1117                 break;
1118             case OS_LONG:
1119                 gen_helper_exts32(cpu_env, fp, reg);
1120                 break;
1121             case OS_SINGLE:
1122                 gen_helper_extf32(cpu_env, fp, reg);
1123                 break;
1124             default:
1125                 g_assert_not_reached();
1126             }
1127         }
1128         return 0;
1129     case 1: /* Address register direct.  */
1130         return -1;
1131     case 2: /* Indirect register */
1132         addr = get_areg(s, reg0);
1133         gen_ldst_fp(s, opsize, addr, fp, what, index);
1134         return 0;
1135     case 3: /* Indirect postincrement.  */
1136         addr = cpu_aregs[reg0];
1137         gen_ldst_fp(s, opsize, addr, fp, what, index);
1138         tcg_gen_addi_i32(addr, addr, opsize_bytes(opsize));
1139         return 0;
1140     case 4: /* Indirect predecrememnt.  */
1141         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1142         if (IS_NULL_QREG(addr)) {
1143             return -1;
1144         }
1145         gen_ldst_fp(s, opsize, addr, fp, what, index);
1146         tcg_gen_mov_i32(cpu_aregs[reg0], addr);
1147         return 0;
1148     case 5: /* Indirect displacement.  */
1149     case 6: /* Indirect index + displacement.  */
1150     do_indirect:
1151         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1152         if (IS_NULL_QREG(addr)) {
1153             return -1;
1154         }
1155         gen_ldst_fp(s, opsize, addr, fp, what, index);
1156         return 0;
1157     case 7: /* Other */
1158         switch (reg0) {
1159         case 0: /* Absolute short.  */
1160         case 1: /* Absolute long.  */
1161         case 2: /* pc displacement  */
1162         case 3: /* pc index+displacement.  */
1163             goto do_indirect;
1164         case 4: /* Immediate.  */
1165             if (what == EA_STORE) {
1166                 return -1;
1167             }
1168             switch (opsize) {
1169             case OS_BYTE:
1170                 tmp = tcg_const_i32((int8_t)read_im8(env, s));
1171                 gen_helper_exts32(cpu_env, fp, tmp);
1172                 break;
1173             case OS_WORD:
1174                 tmp = tcg_const_i32((int16_t)read_im16(env, s));
1175                 gen_helper_exts32(cpu_env, fp, tmp);
1176                 break;
1177             case OS_LONG:
1178                 tmp = tcg_const_i32(read_im32(env, s));
1179                 gen_helper_exts32(cpu_env, fp, tmp);
1180                 break;
1181             case OS_SINGLE:
1182                 tmp = tcg_const_i32(read_im32(env, s));
1183                 gen_helper_extf32(cpu_env, fp, tmp);
1184                 break;
1185             case OS_DOUBLE:
1186                 t64 = tcg_const_i64(read_im64(env, s));
1187                 gen_helper_extf64(cpu_env, fp, t64);
1188                 break;
1189             case OS_EXTENDED:
1190                 if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1191                     gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1192                     break;
1193                 }
1194                 tmp = tcg_const_i32(read_im32(env, s) >> 16);
1195                 tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
1196                 t64 = tcg_const_i64(read_im64(env, s));
1197                 tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
1198                 break;
1199             case OS_PACKED:
1200                 /*
1201                  * unimplemented data type on 68040/ColdFire
1202                  * FIXME if needed for another FPU
1203                  */
1204                 gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1205                 break;
1206             default:
1207                 g_assert_not_reached();
1208             }
1209             return 0;
1210         default:
1211             return -1;
1212         }
1213     }
1214     return -1;
1215 }
1216 
1217 static int gen_ea_fp(CPUM68KState *env, DisasContext *s, uint16_t insn,
1218                        int opsize, TCGv_ptr fp, ea_what what, int index)
1219 {
1220     int mode = extract32(insn, 3, 3);
1221     int reg0 = REG(insn, 0);
1222     return gen_ea_mode_fp(env, s, mode, reg0, opsize, fp, what, index);
1223 }
1224 
1225 typedef struct {
1226     TCGCond tcond;
1227     TCGv v1;
1228     TCGv v2;
1229 } DisasCompare;
1230 
1231 static void gen_cc_cond(DisasCompare *c, DisasContext *s, int cond)
1232 {
1233     TCGv tmp, tmp2;
1234     TCGCond tcond;
1235     CCOp op = s->cc_op;
1236 
1237     /* The CC_OP_CMP form can handle most normal comparisons directly.  */
1238     if (op == CC_OP_CMPB || op == CC_OP_CMPW || op == CC_OP_CMPL) {
1239         c->v1 = QREG_CC_N;
1240         c->v2 = QREG_CC_V;
1241         switch (cond) {
1242         case 2: /* HI */
1243         case 3: /* LS */
1244             tcond = TCG_COND_LEU;
1245             goto done;
1246         case 4: /* CC */
1247         case 5: /* CS */
1248             tcond = TCG_COND_LTU;
1249             goto done;
1250         case 6: /* NE */
1251         case 7: /* EQ */
1252             tcond = TCG_COND_EQ;
1253             goto done;
1254         case 10: /* PL */
1255         case 11: /* MI */
1256             c->v2 = tcg_const_i32(0);
1257             c->v1 = tmp = tcg_temp_new();
1258             tcg_gen_sub_i32(tmp, QREG_CC_N, QREG_CC_V);
1259             gen_ext(tmp, tmp, op - CC_OP_CMPB, 1);
1260             /* fallthru */
1261         case 12: /* GE */
1262         case 13: /* LT */
1263             tcond = TCG_COND_LT;
1264             goto done;
1265         case 14: /* GT */
1266         case 15: /* LE */
1267             tcond = TCG_COND_LE;
1268             goto done;
1269         }
1270     }
1271 
1272     c->v2 = tcg_const_i32(0);
1273 
1274     switch (cond) {
1275     case 0: /* T */
1276     case 1: /* F */
1277         c->v1 = c->v2;
1278         tcond = TCG_COND_NEVER;
1279         goto done;
1280     case 14: /* GT (!(Z || (N ^ V))) */
1281     case 15: /* LE (Z || (N ^ V)) */
1282         /*
1283          * Logic operations clear V, which simplifies LE to (Z || N),
1284          * and since Z and N are co-located, this becomes a normal
1285          * comparison vs N.
1286          */
1287         if (op == CC_OP_LOGIC) {
1288             c->v1 = QREG_CC_N;
1289             tcond = TCG_COND_LE;
1290             goto done;
1291         }
1292         break;
1293     case 12: /* GE (!(N ^ V)) */
1294     case 13: /* LT (N ^ V) */
1295         /* Logic operations clear V, which simplifies this to N.  */
1296         if (op != CC_OP_LOGIC) {
1297             break;
1298         }
1299         /* fallthru */
1300     case 10: /* PL (!N) */
1301     case 11: /* MI (N) */
1302         /* Several cases represent N normally.  */
1303         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1304             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1305             op == CC_OP_LOGIC) {
1306             c->v1 = QREG_CC_N;
1307             tcond = TCG_COND_LT;
1308             goto done;
1309         }
1310         break;
1311     case 6: /* NE (!Z) */
1312     case 7: /* EQ (Z) */
1313         /* Some cases fold Z into N.  */
1314         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1315             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1316             op == CC_OP_LOGIC) {
1317             tcond = TCG_COND_EQ;
1318             c->v1 = QREG_CC_N;
1319             goto done;
1320         }
1321         break;
1322     case 4: /* CC (!C) */
1323     case 5: /* CS (C) */
1324         /* Some cases fold C into X.  */
1325         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1326             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL) {
1327             tcond = TCG_COND_NE;
1328             c->v1 = QREG_CC_X;
1329             goto done;
1330         }
1331         /* fallthru */
1332     case 8: /* VC (!V) */
1333     case 9: /* VS (V) */
1334         /* Logic operations clear V and C.  */
1335         if (op == CC_OP_LOGIC) {
1336             tcond = TCG_COND_NEVER;
1337             c->v1 = c->v2;
1338             goto done;
1339         }
1340         break;
1341     }
1342 
1343     /* Otherwise, flush flag state to CC_OP_FLAGS.  */
1344     gen_flush_flags(s);
1345 
1346     switch (cond) {
1347     case 0: /* T */
1348     case 1: /* F */
1349     default:
1350         /* Invalid, or handled above.  */
1351         abort();
1352     case 2: /* HI (!C && !Z) -> !(C || Z)*/
1353     case 3: /* LS (C || Z) */
1354         c->v1 = tmp = tcg_temp_new();
1355         tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1356         tcg_gen_or_i32(tmp, tmp, QREG_CC_C);
1357         tcond = TCG_COND_NE;
1358         break;
1359     case 4: /* CC (!C) */
1360     case 5: /* CS (C) */
1361         c->v1 = QREG_CC_C;
1362         tcond = TCG_COND_NE;
1363         break;
1364     case 6: /* NE (!Z) */
1365     case 7: /* EQ (Z) */
1366         c->v1 = QREG_CC_Z;
1367         tcond = TCG_COND_EQ;
1368         break;
1369     case 8: /* VC (!V) */
1370     case 9: /* VS (V) */
1371         c->v1 = QREG_CC_V;
1372         tcond = TCG_COND_LT;
1373         break;
1374     case 10: /* PL (!N) */
1375     case 11: /* MI (N) */
1376         c->v1 = QREG_CC_N;
1377         tcond = TCG_COND_LT;
1378         break;
1379     case 12: /* GE (!(N ^ V)) */
1380     case 13: /* LT (N ^ V) */
1381         c->v1 = tmp = tcg_temp_new();
1382         tcg_gen_xor_i32(tmp, QREG_CC_N, QREG_CC_V);
1383         tcond = TCG_COND_LT;
1384         break;
1385     case 14: /* GT (!(Z || (N ^ V))) */
1386     case 15: /* LE (Z || (N ^ V)) */
1387         c->v1 = tmp = tcg_temp_new();
1388         tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1389         tcg_gen_neg_i32(tmp, tmp);
1390         tmp2 = tcg_temp_new();
1391         tcg_gen_xor_i32(tmp2, QREG_CC_N, QREG_CC_V);
1392         tcg_gen_or_i32(tmp, tmp, tmp2);
1393         tcond = TCG_COND_LT;
1394         break;
1395     }
1396 
1397  done:
1398     if ((cond & 1) == 0) {
1399         tcond = tcg_invert_cond(tcond);
1400     }
1401     c->tcond = tcond;
1402 }
1403 
1404 static void gen_jmpcc(DisasContext *s, int cond, TCGLabel *l1)
1405 {
1406   DisasCompare c;
1407 
1408   gen_cc_cond(&c, s, cond);
1409   update_cc_op(s);
1410   tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
1411 }
1412 
1413 /* Force a TB lookup after an instruction that changes the CPU state.  */
1414 static void gen_exit_tb(DisasContext *s)
1415 {
1416     update_cc_op(s);
1417     tcg_gen_movi_i32(QREG_PC, s->pc);
1418     s->base.is_jmp = DISAS_EXIT;
1419 }
1420 
1421 #define SRC_EA(env, result, opsize, op_sign, addrp) do {                \
1422         result = gen_ea(env, s, insn, opsize, NULL_QREG, addrp,         \
1423                         op_sign ? EA_LOADS : EA_LOADU, IS_USER(s));     \
1424         if (IS_NULL_QREG(result)) {                                     \
1425             gen_addr_fault(s);                                          \
1426             return;                                                     \
1427         }                                                               \
1428     } while (0)
1429 
1430 #define DEST_EA(env, insn, opsize, val, addrp) do {                     \
1431         TCGv ea_result = gen_ea(env, s, insn, opsize, val, addrp,       \
1432                                 EA_STORE, IS_USER(s));                  \
1433         if (IS_NULL_QREG(ea_result)) {                                  \
1434             gen_addr_fault(s);                                          \
1435             return;                                                     \
1436         }                                                               \
1437     } while (0)
1438 
1439 /* Generate a jump to an immediate address.  */
1440 static void gen_jmp_tb(DisasContext *s, int n, target_ulong dest,
1441                        target_ulong src)
1442 {
1443     if (unlikely(s->ss_active)) {
1444         update_cc_op(s);
1445         tcg_gen_movi_i32(QREG_PC, dest);
1446         gen_raise_exception_format2(s, EXCP_TRACE, src);
1447     } else if (translator_use_goto_tb(&s->base, dest)) {
1448         tcg_gen_goto_tb(n);
1449         tcg_gen_movi_i32(QREG_PC, dest);
1450         tcg_gen_exit_tb(s->base.tb, n);
1451     } else {
1452         gen_jmp_im(s, dest);
1453         tcg_gen_exit_tb(NULL, 0);
1454     }
1455     s->base.is_jmp = DISAS_NORETURN;
1456 }
1457 
1458 DISAS_INSN(scc)
1459 {
1460     DisasCompare c;
1461     int cond;
1462     TCGv tmp;
1463 
1464     cond = (insn >> 8) & 0xf;
1465     gen_cc_cond(&c, s, cond);
1466 
1467     tmp = tcg_temp_new();
1468     tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
1469 
1470     tcg_gen_neg_i32(tmp, tmp);
1471     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
1472 }
1473 
1474 DISAS_INSN(dbcc)
1475 {
1476     TCGLabel *l1;
1477     TCGv reg;
1478     TCGv tmp;
1479     int16_t offset;
1480     uint32_t base;
1481 
1482     reg = DREG(insn, 0);
1483     base = s->pc;
1484     offset = (int16_t)read_im16(env, s);
1485     l1 = gen_new_label();
1486     gen_jmpcc(s, (insn >> 8) & 0xf, l1);
1487 
1488     tmp = tcg_temp_new();
1489     tcg_gen_ext16s_i32(tmp, reg);
1490     tcg_gen_addi_i32(tmp, tmp, -1);
1491     gen_partset_reg(OS_WORD, reg, tmp);
1492     tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, -1, l1);
1493     gen_jmp_tb(s, 1, base + offset, s->base.pc_next);
1494     gen_set_label(l1);
1495     gen_jmp_tb(s, 0, s->pc, s->base.pc_next);
1496 }
1497 
1498 DISAS_INSN(undef_mac)
1499 {
1500     gen_exception(s, s->base.pc_next, EXCP_LINEA);
1501 }
1502 
1503 DISAS_INSN(undef_fpu)
1504 {
1505     gen_exception(s, s->base.pc_next, EXCP_LINEF);
1506 }
1507 
1508 DISAS_INSN(undef)
1509 {
1510     /*
1511      * ??? This is both instructions that are as yet unimplemented
1512      * for the 680x0 series, as well as those that are implemented
1513      * but actually illegal for CPU32 or pre-68020.
1514      */
1515     qemu_log_mask(LOG_UNIMP, "Illegal instruction: %04x @ %08x\n",
1516                   insn, s->base.pc_next);
1517     gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
1518 }
1519 
1520 DISAS_INSN(mulw)
1521 {
1522     TCGv reg;
1523     TCGv tmp;
1524     TCGv src;
1525     int sign;
1526 
1527     sign = (insn & 0x100) != 0;
1528     reg = DREG(insn, 9);
1529     tmp = tcg_temp_new();
1530     if (sign)
1531         tcg_gen_ext16s_i32(tmp, reg);
1532     else
1533         tcg_gen_ext16u_i32(tmp, reg);
1534     SRC_EA(env, src, OS_WORD, sign, NULL);
1535     tcg_gen_mul_i32(tmp, tmp, src);
1536     tcg_gen_mov_i32(reg, tmp);
1537     gen_logic_cc(s, tmp, OS_LONG);
1538 }
1539 
1540 DISAS_INSN(divw)
1541 {
1542     int sign;
1543     TCGv src;
1544     TCGv destr;
1545     TCGv ilen;
1546 
1547     /* divX.w <EA>,Dn    32/16 -> 16r:16q */
1548 
1549     sign = (insn & 0x100) != 0;
1550 
1551     /* dest.l / src.w */
1552 
1553     SRC_EA(env, src, OS_WORD, sign, NULL);
1554     destr = tcg_constant_i32(REG(insn, 9));
1555     ilen = tcg_constant_i32(s->pc - s->base.pc_next);
1556     if (sign) {
1557         gen_helper_divsw(cpu_env, destr, src, ilen);
1558     } else {
1559         gen_helper_divuw(cpu_env, destr, src, ilen);
1560     }
1561 
1562     set_cc_op(s, CC_OP_FLAGS);
1563 }
1564 
1565 DISAS_INSN(divl)
1566 {
1567     TCGv num, reg, den, ilen;
1568     int sign;
1569     uint16_t ext;
1570 
1571     ext = read_im16(env, s);
1572 
1573     sign = (ext & 0x0800) != 0;
1574 
1575     if (ext & 0x400) {
1576         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
1577             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
1578             return;
1579         }
1580 
1581         /* divX.l <EA>, Dr:Dq    64/32 -> 32r:32q */
1582 
1583         SRC_EA(env, den, OS_LONG, 0, NULL);
1584         num = tcg_constant_i32(REG(ext, 12));
1585         reg = tcg_constant_i32(REG(ext, 0));
1586         ilen = tcg_constant_i32(s->pc - s->base.pc_next);
1587         if (sign) {
1588             gen_helper_divsll(cpu_env, num, reg, den, ilen);
1589         } else {
1590             gen_helper_divull(cpu_env, num, reg, den, ilen);
1591         }
1592         set_cc_op(s, CC_OP_FLAGS);
1593         return;
1594     }
1595 
1596     /* divX.l <EA>, Dq        32/32 -> 32q     */
1597     /* divXl.l <EA>, Dr:Dq    32/32 -> 32r:32q */
1598 
1599     SRC_EA(env, den, OS_LONG, 0, NULL);
1600     num = tcg_constant_i32(REG(ext, 12));
1601     reg = tcg_constant_i32(REG(ext, 0));
1602     ilen = tcg_constant_i32(s->pc - s->base.pc_next);
1603     if (sign) {
1604         gen_helper_divsl(cpu_env, num, reg, den, ilen);
1605     } else {
1606         gen_helper_divul(cpu_env, num, reg, den, ilen);
1607     }
1608 
1609     set_cc_op(s, CC_OP_FLAGS);
1610 }
1611 
1612 static void bcd_add(TCGv dest, TCGv src)
1613 {
1614     TCGv t0, t1;
1615 
1616     /*
1617      * dest10 = dest10 + src10 + X
1618      *
1619      *        t1 = src
1620      *        t2 = t1 + 0x066
1621      *        t3 = t2 + dest + X
1622      *        t4 = t2 ^ dest
1623      *        t5 = t3 ^ t4
1624      *        t6 = ~t5 & 0x110
1625      *        t7 = (t6 >> 2) | (t6 >> 3)
1626      *        return t3 - t7
1627      */
1628 
1629     /*
1630      * t1 = (src + 0x066) + dest + X
1631      *    = result with some possible exceeding 0x6
1632      */
1633 
1634     t0 = tcg_const_i32(0x066);
1635     tcg_gen_add_i32(t0, t0, src);
1636 
1637     t1 = tcg_temp_new();
1638     tcg_gen_add_i32(t1, t0, dest);
1639     tcg_gen_add_i32(t1, t1, QREG_CC_X);
1640 
1641     /* we will remove exceeding 0x6 where there is no carry */
1642 
1643     /*
1644      * t0 = (src + 0x0066) ^ dest
1645      *    = t1 without carries
1646      */
1647 
1648     tcg_gen_xor_i32(t0, t0, dest);
1649 
1650     /*
1651      * extract the carries
1652      * t0 = t0 ^ t1
1653      *    = only the carries
1654      */
1655 
1656     tcg_gen_xor_i32(t0, t0, t1);
1657 
1658     /*
1659      * generate 0x1 where there is no carry
1660      * and for each 0x10, generate a 0x6
1661      */
1662 
1663     tcg_gen_shri_i32(t0, t0, 3);
1664     tcg_gen_not_i32(t0, t0);
1665     tcg_gen_andi_i32(t0, t0, 0x22);
1666     tcg_gen_add_i32(dest, t0, t0);
1667     tcg_gen_add_i32(dest, dest, t0);
1668 
1669     /*
1670      * remove the exceeding 0x6
1671      * for digits that have not generated a carry
1672      */
1673 
1674     tcg_gen_sub_i32(dest, t1, dest);
1675 }
1676 
1677 static void bcd_sub(TCGv dest, TCGv src)
1678 {
1679     TCGv t0, t1, t2;
1680 
1681     /*
1682      *  dest10 = dest10 - src10 - X
1683      *         = bcd_add(dest + 1 - X, 0x199 - src)
1684      */
1685 
1686     /* t0 = 0x066 + (0x199 - src) */
1687 
1688     t0 = tcg_temp_new();
1689     tcg_gen_subfi_i32(t0, 0x1ff, src);
1690 
1691     /* t1 = t0 + dest + 1 - X*/
1692 
1693     t1 = tcg_temp_new();
1694     tcg_gen_add_i32(t1, t0, dest);
1695     tcg_gen_addi_i32(t1, t1, 1);
1696     tcg_gen_sub_i32(t1, t1, QREG_CC_X);
1697 
1698     /* t2 = t0 ^ dest */
1699 
1700     t2 = tcg_temp_new();
1701     tcg_gen_xor_i32(t2, t0, dest);
1702 
1703     /* t0 = t1 ^ t2 */
1704 
1705     tcg_gen_xor_i32(t0, t1, t2);
1706 
1707     /*
1708      * t2 = ~t0 & 0x110
1709      * t0 = (t2 >> 2) | (t2 >> 3)
1710      *
1711      * to fit on 8bit operands, changed in:
1712      *
1713      * t2 = ~(t0 >> 3) & 0x22
1714      * t0 = t2 + t2
1715      * t0 = t0 + t2
1716      */
1717 
1718     tcg_gen_shri_i32(t2, t0, 3);
1719     tcg_gen_not_i32(t2, t2);
1720     tcg_gen_andi_i32(t2, t2, 0x22);
1721     tcg_gen_add_i32(t0, t2, t2);
1722     tcg_gen_add_i32(t0, t0, t2);
1723 
1724     /* return t1 - t0 */
1725 
1726     tcg_gen_sub_i32(dest, t1, t0);
1727 }
1728 
1729 static void bcd_flags(TCGv val)
1730 {
1731     tcg_gen_andi_i32(QREG_CC_C, val, 0x0ff);
1732     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_C);
1733 
1734     tcg_gen_extract_i32(QREG_CC_C, val, 8, 1);
1735 
1736     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
1737 }
1738 
1739 DISAS_INSN(abcd_reg)
1740 {
1741     TCGv src;
1742     TCGv dest;
1743 
1744     gen_flush_flags(s); /* !Z is sticky */
1745 
1746     src = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
1747     dest = gen_extend(s, DREG(insn, 9), OS_BYTE, 0);
1748     bcd_add(dest, src);
1749     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1750 
1751     bcd_flags(dest);
1752 }
1753 
1754 DISAS_INSN(abcd_mem)
1755 {
1756     TCGv src, dest, addr;
1757 
1758     gen_flush_flags(s); /* !Z is sticky */
1759 
1760     /* Indirect pre-decrement load (mode 4) */
1761 
1762     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1763                       NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1764     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1765                        NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1766 
1767     bcd_add(dest, src);
1768 
1769     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1770                 EA_STORE, IS_USER(s));
1771 
1772     bcd_flags(dest);
1773 }
1774 
1775 DISAS_INSN(sbcd_reg)
1776 {
1777     TCGv src, dest;
1778 
1779     gen_flush_flags(s); /* !Z is sticky */
1780 
1781     src = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
1782     dest = gen_extend(s, DREG(insn, 9), OS_BYTE, 0);
1783 
1784     bcd_sub(dest, src);
1785 
1786     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1787 
1788     bcd_flags(dest);
1789 }
1790 
1791 DISAS_INSN(sbcd_mem)
1792 {
1793     TCGv src, dest, addr;
1794 
1795     gen_flush_flags(s); /* !Z is sticky */
1796 
1797     /* Indirect pre-decrement load (mode 4) */
1798 
1799     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1800                       NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1801     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1802                        NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1803 
1804     bcd_sub(dest, src);
1805 
1806     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1807                 EA_STORE, IS_USER(s));
1808 
1809     bcd_flags(dest);
1810 }
1811 
1812 DISAS_INSN(nbcd)
1813 {
1814     TCGv src, dest;
1815     TCGv addr;
1816 
1817     gen_flush_flags(s); /* !Z is sticky */
1818 
1819     SRC_EA(env, src, OS_BYTE, 0, &addr);
1820 
1821     dest = tcg_const_i32(0);
1822     bcd_sub(dest, src);
1823 
1824     DEST_EA(env, insn, OS_BYTE, dest, &addr);
1825 
1826     bcd_flags(dest);
1827 }
1828 
1829 DISAS_INSN(addsub)
1830 {
1831     TCGv reg;
1832     TCGv dest;
1833     TCGv src;
1834     TCGv tmp;
1835     TCGv addr;
1836     int add;
1837     int opsize;
1838 
1839     add = (insn & 0x4000) != 0;
1840     opsize = insn_opsize(insn);
1841     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
1842     dest = tcg_temp_new();
1843     if (insn & 0x100) {
1844         SRC_EA(env, tmp, opsize, 1, &addr);
1845         src = reg;
1846     } else {
1847         tmp = reg;
1848         SRC_EA(env, src, opsize, 1, NULL);
1849     }
1850     if (add) {
1851         tcg_gen_add_i32(dest, tmp, src);
1852         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, src);
1853         set_cc_op(s, CC_OP_ADDB + opsize);
1854     } else {
1855         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, tmp, src);
1856         tcg_gen_sub_i32(dest, tmp, src);
1857         set_cc_op(s, CC_OP_SUBB + opsize);
1858     }
1859     gen_update_cc_add(dest, src, opsize);
1860     if (insn & 0x100) {
1861         DEST_EA(env, insn, opsize, dest, &addr);
1862     } else {
1863         gen_partset_reg(opsize, DREG(insn, 9), dest);
1864     }
1865 }
1866 
1867 /* Reverse the order of the bits in REG.  */
1868 DISAS_INSN(bitrev)
1869 {
1870     TCGv reg;
1871     reg = DREG(insn, 0);
1872     gen_helper_bitrev(reg, reg);
1873 }
1874 
1875 DISAS_INSN(bitop_reg)
1876 {
1877     int opsize;
1878     int op;
1879     TCGv src1;
1880     TCGv src2;
1881     TCGv tmp;
1882     TCGv addr;
1883     TCGv dest;
1884 
1885     if ((insn & 0x38) != 0)
1886         opsize = OS_BYTE;
1887     else
1888         opsize = OS_LONG;
1889     op = (insn >> 6) & 3;
1890     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
1891 
1892     gen_flush_flags(s);
1893     src2 = tcg_temp_new();
1894     if (opsize == OS_BYTE)
1895         tcg_gen_andi_i32(src2, DREG(insn, 9), 7);
1896     else
1897         tcg_gen_andi_i32(src2, DREG(insn, 9), 31);
1898 
1899     tmp = tcg_const_i32(1);
1900     tcg_gen_shl_i32(tmp, tmp, src2);
1901 
1902     tcg_gen_and_i32(QREG_CC_Z, src1, tmp);
1903 
1904     dest = tcg_temp_new();
1905     switch (op) {
1906     case 1: /* bchg */
1907         tcg_gen_xor_i32(dest, src1, tmp);
1908         break;
1909     case 2: /* bclr */
1910         tcg_gen_andc_i32(dest, src1, tmp);
1911         break;
1912     case 3: /* bset */
1913         tcg_gen_or_i32(dest, src1, tmp);
1914         break;
1915     default: /* btst */
1916         break;
1917     }
1918     if (op) {
1919         DEST_EA(env, insn, opsize, dest, &addr);
1920     }
1921 }
1922 
1923 DISAS_INSN(sats)
1924 {
1925     TCGv reg;
1926     reg = DREG(insn, 0);
1927     gen_flush_flags(s);
1928     gen_helper_sats(reg, reg, QREG_CC_V);
1929     gen_logic_cc(s, reg, OS_LONG);
1930 }
1931 
1932 static void gen_push(DisasContext *s, TCGv val)
1933 {
1934     TCGv tmp;
1935 
1936     tmp = tcg_temp_new();
1937     tcg_gen_subi_i32(tmp, QREG_SP, 4);
1938     gen_store(s, OS_LONG, tmp, val, IS_USER(s));
1939     tcg_gen_mov_i32(QREG_SP, tmp);
1940 }
1941 
1942 static TCGv mreg(int reg)
1943 {
1944     if (reg < 8) {
1945         /* Dx */
1946         return cpu_dregs[reg];
1947     }
1948     /* Ax */
1949     return cpu_aregs[reg & 7];
1950 }
1951 
1952 DISAS_INSN(movem)
1953 {
1954     TCGv addr, incr, tmp, r[16];
1955     int is_load = (insn & 0x0400) != 0;
1956     int opsize = (insn & 0x40) != 0 ? OS_LONG : OS_WORD;
1957     uint16_t mask = read_im16(env, s);
1958     int mode = extract32(insn, 3, 3);
1959     int reg0 = REG(insn, 0);
1960     int i;
1961 
1962     tmp = cpu_aregs[reg0];
1963 
1964     switch (mode) {
1965     case 0: /* data register direct */
1966     case 1: /* addr register direct */
1967     do_addr_fault:
1968         gen_addr_fault(s);
1969         return;
1970 
1971     case 2: /* indirect */
1972         break;
1973 
1974     case 3: /* indirect post-increment */
1975         if (!is_load) {
1976             /* post-increment is not allowed */
1977             goto do_addr_fault;
1978         }
1979         break;
1980 
1981     case 4: /* indirect pre-decrement */
1982         if (is_load) {
1983             /* pre-decrement is not allowed */
1984             goto do_addr_fault;
1985         }
1986         /*
1987          * We want a bare copy of the address reg, without any pre-decrement
1988          * adjustment, as gen_lea would provide.
1989          */
1990         break;
1991 
1992     default:
1993         tmp = gen_lea_mode(env, s, mode, reg0, opsize);
1994         if (IS_NULL_QREG(tmp)) {
1995             goto do_addr_fault;
1996         }
1997         break;
1998     }
1999 
2000     addr = tcg_temp_new();
2001     tcg_gen_mov_i32(addr, tmp);
2002     incr = tcg_const_i32(opsize_bytes(opsize));
2003 
2004     if (is_load) {
2005         /* memory to register */
2006         for (i = 0; i < 16; i++) {
2007             if (mask & (1 << i)) {
2008                 r[i] = gen_load(s, opsize, addr, 1, IS_USER(s));
2009                 tcg_gen_add_i32(addr, addr, incr);
2010             }
2011         }
2012         for (i = 0; i < 16; i++) {
2013             if (mask & (1 << i)) {
2014                 tcg_gen_mov_i32(mreg(i), r[i]);
2015             }
2016         }
2017         if (mode == 3) {
2018             /* post-increment: movem (An)+,X */
2019             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2020         }
2021     } else {
2022         /* register to memory */
2023         if (mode == 4) {
2024             /* pre-decrement: movem X,-(An) */
2025             for (i = 15; i >= 0; i--) {
2026                 if ((mask << i) & 0x8000) {
2027                     tcg_gen_sub_i32(addr, addr, incr);
2028                     if (reg0 + 8 == i &&
2029                         m68k_feature(s->env, M68K_FEATURE_EXT_FULL)) {
2030                         /*
2031                          * M68020+: if the addressing register is the
2032                          * register moved to memory, the value written
2033                          * is the initial value decremented by the size of
2034                          * the operation, regardless of how many actual
2035                          * stores have been performed until this point.
2036                          * M68000/M68010: the value is the initial value.
2037                          */
2038                         tmp = tcg_temp_new();
2039                         tcg_gen_sub_i32(tmp, cpu_aregs[reg0], incr);
2040                         gen_store(s, opsize, addr, tmp, IS_USER(s));
2041                     } else {
2042                         gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2043                     }
2044                 }
2045             }
2046             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2047         } else {
2048             for (i = 0; i < 16; i++) {
2049                 if (mask & (1 << i)) {
2050                     gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2051                     tcg_gen_add_i32(addr, addr, incr);
2052                 }
2053             }
2054         }
2055     }
2056 }
2057 
2058 DISAS_INSN(movep)
2059 {
2060     uint8_t i;
2061     int16_t displ;
2062     TCGv reg;
2063     TCGv addr;
2064     TCGv abuf;
2065     TCGv dbuf;
2066 
2067     displ = read_im16(env, s);
2068 
2069     addr = AREG(insn, 0);
2070     reg = DREG(insn, 9);
2071 
2072     abuf = tcg_temp_new();
2073     tcg_gen_addi_i32(abuf, addr, displ);
2074     dbuf = tcg_temp_new();
2075 
2076     if (insn & 0x40) {
2077         i = 4;
2078     } else {
2079         i = 2;
2080     }
2081 
2082     if (insn & 0x80) {
2083         for ( ; i > 0 ; i--) {
2084             tcg_gen_shri_i32(dbuf, reg, (i - 1) * 8);
2085             tcg_gen_qemu_st8(dbuf, abuf, IS_USER(s));
2086             if (i > 1) {
2087                 tcg_gen_addi_i32(abuf, abuf, 2);
2088             }
2089         }
2090     } else {
2091         for ( ; i > 0 ; i--) {
2092             tcg_gen_qemu_ld8u(dbuf, abuf, IS_USER(s));
2093             tcg_gen_deposit_i32(reg, reg, dbuf, (i - 1) * 8, 8);
2094             if (i > 1) {
2095                 tcg_gen_addi_i32(abuf, abuf, 2);
2096             }
2097         }
2098     }
2099 }
2100 
2101 DISAS_INSN(bitop_im)
2102 {
2103     int opsize;
2104     int op;
2105     TCGv src1;
2106     uint32_t mask;
2107     int bitnum;
2108     TCGv tmp;
2109     TCGv addr;
2110 
2111     if ((insn & 0x38) != 0)
2112         opsize = OS_BYTE;
2113     else
2114         opsize = OS_LONG;
2115     op = (insn >> 6) & 3;
2116 
2117     bitnum = read_im16(env, s);
2118     if (m68k_feature(s->env, M68K_FEATURE_M68K)) {
2119         if (bitnum & 0xfe00) {
2120             disas_undef(env, s, insn);
2121             return;
2122         }
2123     } else {
2124         if (bitnum & 0xff00) {
2125             disas_undef(env, s, insn);
2126             return;
2127         }
2128     }
2129 
2130     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
2131 
2132     gen_flush_flags(s);
2133     if (opsize == OS_BYTE)
2134         bitnum &= 7;
2135     else
2136         bitnum &= 31;
2137     mask = 1 << bitnum;
2138 
2139    tcg_gen_andi_i32(QREG_CC_Z, src1, mask);
2140 
2141     if (op) {
2142         tmp = tcg_temp_new();
2143         switch (op) {
2144         case 1: /* bchg */
2145             tcg_gen_xori_i32(tmp, src1, mask);
2146             break;
2147         case 2: /* bclr */
2148             tcg_gen_andi_i32(tmp, src1, ~mask);
2149             break;
2150         case 3: /* bset */
2151             tcg_gen_ori_i32(tmp, src1, mask);
2152             break;
2153         default: /* btst */
2154             break;
2155         }
2156         DEST_EA(env, insn, opsize, tmp, &addr);
2157     }
2158 }
2159 
2160 static TCGv gen_get_ccr(DisasContext *s)
2161 {
2162     TCGv dest;
2163 
2164     update_cc_op(s);
2165     dest = tcg_temp_new();
2166     gen_helper_get_ccr(dest, cpu_env);
2167     return dest;
2168 }
2169 
2170 static TCGv gen_get_sr(DisasContext *s)
2171 {
2172     TCGv ccr;
2173     TCGv sr;
2174 
2175     ccr = gen_get_ccr(s);
2176     sr = tcg_temp_new();
2177     tcg_gen_andi_i32(sr, QREG_SR, 0xffe0);
2178     tcg_gen_or_i32(sr, sr, ccr);
2179     return sr;
2180 }
2181 
2182 static void gen_set_sr_im(DisasContext *s, uint16_t val, int ccr_only)
2183 {
2184     if (ccr_only) {
2185         tcg_gen_movi_i32(QREG_CC_C, val & CCF_C ? 1 : 0);
2186         tcg_gen_movi_i32(QREG_CC_V, val & CCF_V ? -1 : 0);
2187         tcg_gen_movi_i32(QREG_CC_Z, val & CCF_Z ? 0 : 1);
2188         tcg_gen_movi_i32(QREG_CC_N, val & CCF_N ? -1 : 0);
2189         tcg_gen_movi_i32(QREG_CC_X, val & CCF_X ? 1 : 0);
2190     } else {
2191         /* Must writeback before changing security state. */
2192         do_writebacks(s);
2193         gen_helper_set_sr(cpu_env, tcg_constant_i32(val));
2194     }
2195     set_cc_op(s, CC_OP_FLAGS);
2196 }
2197 
2198 static void gen_set_sr(DisasContext *s, TCGv val, int ccr_only)
2199 {
2200     if (ccr_only) {
2201         gen_helper_set_ccr(cpu_env, val);
2202     } else {
2203         /* Must writeback before changing security state. */
2204         do_writebacks(s);
2205         gen_helper_set_sr(cpu_env, val);
2206     }
2207     set_cc_op(s, CC_OP_FLAGS);
2208 }
2209 
2210 static void gen_move_to_sr(CPUM68KState *env, DisasContext *s, uint16_t insn,
2211                            bool ccr_only)
2212 {
2213     if ((insn & 0x3f) == 0x3c) {
2214         uint16_t val;
2215         val = read_im16(env, s);
2216         gen_set_sr_im(s, val, ccr_only);
2217     } else {
2218         TCGv src;
2219         SRC_EA(env, src, OS_WORD, 0, NULL);
2220         gen_set_sr(s, src, ccr_only);
2221     }
2222 }
2223 
2224 DISAS_INSN(arith_im)
2225 {
2226     int op;
2227     TCGv im;
2228     TCGv src1;
2229     TCGv dest;
2230     TCGv addr;
2231     int opsize;
2232     bool with_SR = ((insn & 0x3f) == 0x3c);
2233 
2234     op = (insn >> 9) & 7;
2235     opsize = insn_opsize(insn);
2236     switch (opsize) {
2237     case OS_BYTE:
2238         im = tcg_const_i32((int8_t)read_im8(env, s));
2239         break;
2240     case OS_WORD:
2241         im = tcg_const_i32((int16_t)read_im16(env, s));
2242         break;
2243     case OS_LONG:
2244         im = tcg_const_i32(read_im32(env, s));
2245         break;
2246     default:
2247         g_assert_not_reached();
2248     }
2249 
2250     if (with_SR) {
2251         /* SR/CCR can only be used with andi/eori/ori */
2252         if (op == 2 || op == 3 || op == 6) {
2253             disas_undef(env, s, insn);
2254             return;
2255         }
2256         switch (opsize) {
2257         case OS_BYTE:
2258             src1 = gen_get_ccr(s);
2259             break;
2260         case OS_WORD:
2261             if (IS_USER(s)) {
2262                 gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
2263                 return;
2264             }
2265             src1 = gen_get_sr(s);
2266             break;
2267         default:
2268             /* OS_LONG; others already g_assert_not_reached.  */
2269             disas_undef(env, s, insn);
2270             return;
2271         }
2272     } else {
2273         SRC_EA(env, src1, opsize, 1, (op == 6) ? NULL : &addr);
2274     }
2275     dest = tcg_temp_new();
2276     switch (op) {
2277     case 0: /* ori */
2278         tcg_gen_or_i32(dest, src1, im);
2279         if (with_SR) {
2280             gen_set_sr(s, dest, opsize == OS_BYTE);
2281             gen_exit_tb(s);
2282         } else {
2283             DEST_EA(env, insn, opsize, dest, &addr);
2284             gen_logic_cc(s, dest, opsize);
2285         }
2286         break;
2287     case 1: /* andi */
2288         tcg_gen_and_i32(dest, src1, im);
2289         if (with_SR) {
2290             gen_set_sr(s, dest, opsize == OS_BYTE);
2291             gen_exit_tb(s);
2292         } else {
2293             DEST_EA(env, insn, opsize, dest, &addr);
2294             gen_logic_cc(s, dest, opsize);
2295         }
2296         break;
2297     case 2: /* subi */
2298         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, src1, im);
2299         tcg_gen_sub_i32(dest, src1, im);
2300         gen_update_cc_add(dest, im, opsize);
2301         set_cc_op(s, CC_OP_SUBB + opsize);
2302         DEST_EA(env, insn, opsize, dest, &addr);
2303         break;
2304     case 3: /* addi */
2305         tcg_gen_add_i32(dest, src1, im);
2306         gen_update_cc_add(dest, im, opsize);
2307         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, im);
2308         set_cc_op(s, CC_OP_ADDB + opsize);
2309         DEST_EA(env, insn, opsize, dest, &addr);
2310         break;
2311     case 5: /* eori */
2312         tcg_gen_xor_i32(dest, src1, im);
2313         if (with_SR) {
2314             gen_set_sr(s, dest, opsize == OS_BYTE);
2315             gen_exit_tb(s);
2316         } else {
2317             DEST_EA(env, insn, opsize, dest, &addr);
2318             gen_logic_cc(s, dest, opsize);
2319         }
2320         break;
2321     case 6: /* cmpi */
2322         gen_update_cc_cmp(s, src1, im, opsize);
2323         break;
2324     default:
2325         abort();
2326     }
2327 }
2328 
2329 DISAS_INSN(cas)
2330 {
2331     int opsize;
2332     TCGv addr;
2333     uint16_t ext;
2334     TCGv load;
2335     TCGv cmp;
2336     MemOp opc;
2337 
2338     switch ((insn >> 9) & 3) {
2339     case 1:
2340         opsize = OS_BYTE;
2341         opc = MO_SB;
2342         break;
2343     case 2:
2344         opsize = OS_WORD;
2345         opc = MO_TESW;
2346         break;
2347     case 3:
2348         opsize = OS_LONG;
2349         opc = MO_TESL;
2350         break;
2351     default:
2352         g_assert_not_reached();
2353     }
2354 
2355     ext = read_im16(env, s);
2356 
2357     /* cas Dc,Du,<EA> */
2358 
2359     addr = gen_lea(env, s, insn, opsize);
2360     if (IS_NULL_QREG(addr)) {
2361         gen_addr_fault(s);
2362         return;
2363     }
2364 
2365     cmp = gen_extend(s, DREG(ext, 0), opsize, 1);
2366 
2367     /*
2368      * if  <EA> == Dc then
2369      *     <EA> = Du
2370      *     Dc = <EA> (because <EA> == Dc)
2371      * else
2372      *     Dc = <EA>
2373      */
2374 
2375     load = tcg_temp_new();
2376     tcg_gen_atomic_cmpxchg_i32(load, addr, cmp, DREG(ext, 6),
2377                                IS_USER(s), opc);
2378     /* update flags before setting cmp to load */
2379     gen_update_cc_cmp(s, load, cmp, opsize);
2380     gen_partset_reg(opsize, DREG(ext, 0), load);
2381 
2382     switch (extract32(insn, 3, 3)) {
2383     case 3: /* Indirect postincrement.  */
2384         tcg_gen_addi_i32(AREG(insn, 0), addr, opsize_bytes(opsize));
2385         break;
2386     case 4: /* Indirect predecrememnt.  */
2387         tcg_gen_mov_i32(AREG(insn, 0), addr);
2388         break;
2389     }
2390 }
2391 
2392 DISAS_INSN(cas2w)
2393 {
2394     uint16_t ext1, ext2;
2395     TCGv addr1, addr2;
2396     TCGv regs;
2397 
2398     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2399 
2400     ext1 = read_im16(env, s);
2401 
2402     if (ext1 & 0x8000) {
2403         /* Address Register */
2404         addr1 = AREG(ext1, 12);
2405     } else {
2406         /* Data Register */
2407         addr1 = DREG(ext1, 12);
2408     }
2409 
2410     ext2 = read_im16(env, s);
2411     if (ext2 & 0x8000) {
2412         /* Address Register */
2413         addr2 = AREG(ext2, 12);
2414     } else {
2415         /* Data Register */
2416         addr2 = DREG(ext2, 12);
2417     }
2418 
2419     /*
2420      * if (R1) == Dc1 && (R2) == Dc2 then
2421      *     (R1) = Du1
2422      *     (R2) = Du2
2423      * else
2424      *     Dc1 = (R1)
2425      *     Dc2 = (R2)
2426      */
2427 
2428     regs = tcg_const_i32(REG(ext2, 6) |
2429                          (REG(ext1, 6) << 3) |
2430                          (REG(ext2, 0) << 6) |
2431                          (REG(ext1, 0) << 9));
2432     if (tb_cflags(s->base.tb) & CF_PARALLEL) {
2433         gen_helper_exit_atomic(cpu_env);
2434     } else {
2435         gen_helper_cas2w(cpu_env, regs, addr1, addr2);
2436     }
2437 
2438     /* Note that cas2w also assigned to env->cc_op.  */
2439     s->cc_op = CC_OP_CMPW;
2440     s->cc_op_synced = 1;
2441 }
2442 
2443 DISAS_INSN(cas2l)
2444 {
2445     uint16_t ext1, ext2;
2446     TCGv addr1, addr2, regs;
2447 
2448     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2449 
2450     ext1 = read_im16(env, s);
2451 
2452     if (ext1 & 0x8000) {
2453         /* Address Register */
2454         addr1 = AREG(ext1, 12);
2455     } else {
2456         /* Data Register */
2457         addr1 = DREG(ext1, 12);
2458     }
2459 
2460     ext2 = read_im16(env, s);
2461     if (ext2 & 0x8000) {
2462         /* Address Register */
2463         addr2 = AREG(ext2, 12);
2464     } else {
2465         /* Data Register */
2466         addr2 = DREG(ext2, 12);
2467     }
2468 
2469     /*
2470      * if (R1) == Dc1 && (R2) == Dc2 then
2471      *     (R1) = Du1
2472      *     (R2) = Du2
2473      * else
2474      *     Dc1 = (R1)
2475      *     Dc2 = (R2)
2476      */
2477 
2478     regs = tcg_const_i32(REG(ext2, 6) |
2479                          (REG(ext1, 6) << 3) |
2480                          (REG(ext2, 0) << 6) |
2481                          (REG(ext1, 0) << 9));
2482     if (tb_cflags(s->base.tb) & CF_PARALLEL) {
2483         gen_helper_cas2l_parallel(cpu_env, regs, addr1, addr2);
2484     } else {
2485         gen_helper_cas2l(cpu_env, regs, addr1, addr2);
2486     }
2487 
2488     /* Note that cas2l also assigned to env->cc_op.  */
2489     s->cc_op = CC_OP_CMPL;
2490     s->cc_op_synced = 1;
2491 }
2492 
2493 DISAS_INSN(byterev)
2494 {
2495     TCGv reg;
2496 
2497     reg = DREG(insn, 0);
2498     tcg_gen_bswap32_i32(reg, reg);
2499 }
2500 
2501 DISAS_INSN(move)
2502 {
2503     TCGv src;
2504     TCGv dest;
2505     int op;
2506     int opsize;
2507 
2508     switch (insn >> 12) {
2509     case 1: /* move.b */
2510         opsize = OS_BYTE;
2511         break;
2512     case 2: /* move.l */
2513         opsize = OS_LONG;
2514         break;
2515     case 3: /* move.w */
2516         opsize = OS_WORD;
2517         break;
2518     default:
2519         abort();
2520     }
2521     SRC_EA(env, src, opsize, 1, NULL);
2522     op = (insn >> 6) & 7;
2523     if (op == 1) {
2524         /* movea */
2525         /* The value will already have been sign extended.  */
2526         dest = AREG(insn, 9);
2527         tcg_gen_mov_i32(dest, src);
2528     } else {
2529         /* normal move */
2530         uint16_t dest_ea;
2531         dest_ea = ((insn >> 9) & 7) | (op << 3);
2532         DEST_EA(env, dest_ea, opsize, src, NULL);
2533         /* This will be correct because loads sign extend.  */
2534         gen_logic_cc(s, src, opsize);
2535     }
2536 }
2537 
2538 DISAS_INSN(negx)
2539 {
2540     TCGv z;
2541     TCGv src;
2542     TCGv addr;
2543     int opsize;
2544 
2545     opsize = insn_opsize(insn);
2546     SRC_EA(env, src, opsize, 1, &addr);
2547 
2548     gen_flush_flags(s); /* compute old Z */
2549 
2550     /*
2551      * Perform subtract with borrow.
2552      * (X, N) =  -(src + X);
2553      */
2554 
2555     z = tcg_const_i32(0);
2556     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, z, QREG_CC_X, z);
2557     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, z, z, QREG_CC_N, QREG_CC_X);
2558     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
2559 
2560     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
2561 
2562     /*
2563      * Compute signed-overflow for negation.  The normal formula for
2564      * subtraction is (res ^ src) & (src ^ dest), but with dest==0
2565      * this simplifies to res & src.
2566      */
2567 
2568     tcg_gen_and_i32(QREG_CC_V, QREG_CC_N, src);
2569 
2570     /* Copy the rest of the results into place.  */
2571     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
2572     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
2573 
2574     set_cc_op(s, CC_OP_FLAGS);
2575 
2576     /* result is in QREG_CC_N */
2577 
2578     DEST_EA(env, insn, opsize, QREG_CC_N, &addr);
2579 }
2580 
2581 DISAS_INSN(lea)
2582 {
2583     TCGv reg;
2584     TCGv tmp;
2585 
2586     reg = AREG(insn, 9);
2587     tmp = gen_lea(env, s, insn, OS_LONG);
2588     if (IS_NULL_QREG(tmp)) {
2589         gen_addr_fault(s);
2590         return;
2591     }
2592     tcg_gen_mov_i32(reg, tmp);
2593 }
2594 
2595 DISAS_INSN(clr)
2596 {
2597     int opsize;
2598     TCGv zero;
2599 
2600     zero = tcg_const_i32(0);
2601 
2602     opsize = insn_opsize(insn);
2603     DEST_EA(env, insn, opsize, zero, NULL);
2604     gen_logic_cc(s, zero, opsize);
2605 }
2606 
2607 DISAS_INSN(move_from_ccr)
2608 {
2609     TCGv ccr;
2610 
2611     ccr = gen_get_ccr(s);
2612     DEST_EA(env, insn, OS_WORD, ccr, NULL);
2613 }
2614 
2615 DISAS_INSN(neg)
2616 {
2617     TCGv src1;
2618     TCGv dest;
2619     TCGv addr;
2620     int opsize;
2621 
2622     opsize = insn_opsize(insn);
2623     SRC_EA(env, src1, opsize, 1, &addr);
2624     dest = tcg_temp_new();
2625     tcg_gen_neg_i32(dest, src1);
2626     set_cc_op(s, CC_OP_SUBB + opsize);
2627     gen_update_cc_add(dest, src1, opsize);
2628     tcg_gen_setcondi_i32(TCG_COND_NE, QREG_CC_X, dest, 0);
2629     DEST_EA(env, insn, opsize, dest, &addr);
2630 }
2631 
2632 DISAS_INSN(move_to_ccr)
2633 {
2634     gen_move_to_sr(env, s, insn, true);
2635 }
2636 
2637 DISAS_INSN(not)
2638 {
2639     TCGv src1;
2640     TCGv dest;
2641     TCGv addr;
2642     int opsize;
2643 
2644     opsize = insn_opsize(insn);
2645     SRC_EA(env, src1, opsize, 1, &addr);
2646     dest = tcg_temp_new();
2647     tcg_gen_not_i32(dest, src1);
2648     DEST_EA(env, insn, opsize, dest, &addr);
2649     gen_logic_cc(s, dest, opsize);
2650 }
2651 
2652 DISAS_INSN(swap)
2653 {
2654     TCGv src1;
2655     TCGv src2;
2656     TCGv reg;
2657 
2658     src1 = tcg_temp_new();
2659     src2 = tcg_temp_new();
2660     reg = DREG(insn, 0);
2661     tcg_gen_shli_i32(src1, reg, 16);
2662     tcg_gen_shri_i32(src2, reg, 16);
2663     tcg_gen_or_i32(reg, src1, src2);
2664     gen_logic_cc(s, reg, OS_LONG);
2665 }
2666 
2667 DISAS_INSN(bkpt)
2668 {
2669 #if defined(CONFIG_SOFTMMU)
2670     gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2671 #else
2672     gen_exception(s, s->base.pc_next, EXCP_DEBUG);
2673 #endif
2674 }
2675 
2676 DISAS_INSN(pea)
2677 {
2678     TCGv tmp;
2679 
2680     tmp = gen_lea(env, s, insn, OS_LONG);
2681     if (IS_NULL_QREG(tmp)) {
2682         gen_addr_fault(s);
2683         return;
2684     }
2685     gen_push(s, tmp);
2686 }
2687 
2688 DISAS_INSN(ext)
2689 {
2690     int op;
2691     TCGv reg;
2692     TCGv tmp;
2693 
2694     reg = DREG(insn, 0);
2695     op = (insn >> 6) & 7;
2696     tmp = tcg_temp_new();
2697     if (op == 3)
2698         tcg_gen_ext16s_i32(tmp, reg);
2699     else
2700         tcg_gen_ext8s_i32(tmp, reg);
2701     if (op == 2)
2702         gen_partset_reg(OS_WORD, reg, tmp);
2703     else
2704         tcg_gen_mov_i32(reg, tmp);
2705     gen_logic_cc(s, tmp, OS_LONG);
2706 }
2707 
2708 DISAS_INSN(tst)
2709 {
2710     int opsize;
2711     TCGv tmp;
2712 
2713     opsize = insn_opsize(insn);
2714     SRC_EA(env, tmp, opsize, 1, NULL);
2715     gen_logic_cc(s, tmp, opsize);
2716 }
2717 
2718 DISAS_INSN(pulse)
2719 {
2720   /* Implemented as a NOP.  */
2721 }
2722 
2723 DISAS_INSN(illegal)
2724 {
2725     gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2726 }
2727 
2728 DISAS_INSN(tas)
2729 {
2730     int mode = extract32(insn, 3, 3);
2731     int reg0 = REG(insn, 0);
2732 
2733     if (mode == 0) {
2734         /* data register direct */
2735         TCGv dest = cpu_dregs[reg0];
2736         gen_logic_cc(s, dest, OS_BYTE);
2737         tcg_gen_ori_tl(dest, dest, 0x80);
2738     } else {
2739         TCGv src1, addr;
2740 
2741         addr = gen_lea_mode(env, s, mode, reg0, OS_BYTE);
2742         if (IS_NULL_QREG(addr)) {
2743             gen_addr_fault(s);
2744             return;
2745         }
2746         src1 = tcg_temp_new();
2747         tcg_gen_atomic_fetch_or_tl(src1, addr, tcg_constant_tl(0x80),
2748                                    IS_USER(s), MO_SB);
2749         gen_logic_cc(s, src1, OS_BYTE);
2750 
2751         switch (mode) {
2752         case 3: /* Indirect postincrement.  */
2753             tcg_gen_addi_i32(AREG(insn, 0), addr, 1);
2754             break;
2755         case 4: /* Indirect predecrememnt.  */
2756             tcg_gen_mov_i32(AREG(insn, 0), addr);
2757             break;
2758         }
2759     }
2760 }
2761 
2762 DISAS_INSN(mull)
2763 {
2764     uint16_t ext;
2765     TCGv src1;
2766     int sign;
2767 
2768     ext = read_im16(env, s);
2769 
2770     sign = ext & 0x800;
2771 
2772     if (ext & 0x400) {
2773         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
2774             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2775             return;
2776         }
2777 
2778         SRC_EA(env, src1, OS_LONG, 0, NULL);
2779 
2780         if (sign) {
2781             tcg_gen_muls2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2782         } else {
2783             tcg_gen_mulu2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2784         }
2785         /* if Dl == Dh, 68040 returns low word */
2786         tcg_gen_mov_i32(DREG(ext, 0), QREG_CC_N);
2787         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_Z);
2788         tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N);
2789 
2790         tcg_gen_movi_i32(QREG_CC_V, 0);
2791         tcg_gen_movi_i32(QREG_CC_C, 0);
2792 
2793         set_cc_op(s, CC_OP_FLAGS);
2794         return;
2795     }
2796     SRC_EA(env, src1, OS_LONG, 0, NULL);
2797     if (m68k_feature(s->env, M68K_FEATURE_M68K)) {
2798         tcg_gen_movi_i32(QREG_CC_C, 0);
2799         if (sign) {
2800             tcg_gen_muls2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2801             /* QREG_CC_V is -(QREG_CC_V != (QREG_CC_N >> 31)) */
2802             tcg_gen_sari_i32(QREG_CC_Z, QREG_CC_N, 31);
2803             tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_Z);
2804         } else {
2805             tcg_gen_mulu2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2806             /* QREG_CC_V is -(QREG_CC_V != 0), use QREG_CC_C as 0 */
2807             tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_C);
2808         }
2809         tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
2810         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_N);
2811 
2812         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
2813 
2814         set_cc_op(s, CC_OP_FLAGS);
2815     } else {
2816         /*
2817          * The upper 32 bits of the product are discarded, so
2818          * muls.l and mulu.l are functionally equivalent.
2819          */
2820         tcg_gen_mul_i32(DREG(ext, 12), src1, DREG(ext, 12));
2821         gen_logic_cc(s, DREG(ext, 12), OS_LONG);
2822     }
2823 }
2824 
2825 static void gen_link(DisasContext *s, uint16_t insn, int32_t offset)
2826 {
2827     TCGv reg;
2828     TCGv tmp;
2829 
2830     reg = AREG(insn, 0);
2831     tmp = tcg_temp_new();
2832     tcg_gen_subi_i32(tmp, QREG_SP, 4);
2833     gen_store(s, OS_LONG, tmp, reg, IS_USER(s));
2834     if ((insn & 7) != 7) {
2835         tcg_gen_mov_i32(reg, tmp);
2836     }
2837     tcg_gen_addi_i32(QREG_SP, tmp, offset);
2838 }
2839 
2840 DISAS_INSN(link)
2841 {
2842     int16_t offset;
2843 
2844     offset = read_im16(env, s);
2845     gen_link(s, insn, offset);
2846 }
2847 
2848 DISAS_INSN(linkl)
2849 {
2850     int32_t offset;
2851 
2852     offset = read_im32(env, s);
2853     gen_link(s, insn, offset);
2854 }
2855 
2856 DISAS_INSN(unlk)
2857 {
2858     TCGv src;
2859     TCGv reg;
2860     TCGv tmp;
2861 
2862     src = tcg_temp_new();
2863     reg = AREG(insn, 0);
2864     tcg_gen_mov_i32(src, reg);
2865     tmp = gen_load(s, OS_LONG, src, 0, IS_USER(s));
2866     tcg_gen_mov_i32(reg, tmp);
2867     tcg_gen_addi_i32(QREG_SP, src, 4);
2868 }
2869 
2870 #if defined(CONFIG_SOFTMMU)
2871 DISAS_INSN(reset)
2872 {
2873     if (IS_USER(s)) {
2874         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
2875         return;
2876     }
2877 
2878     gen_helper_reset(cpu_env);
2879 }
2880 #endif
2881 
2882 DISAS_INSN(nop)
2883 {
2884 }
2885 
2886 DISAS_INSN(rtd)
2887 {
2888     TCGv tmp;
2889     int16_t offset = read_im16(env, s);
2890 
2891     tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
2892     tcg_gen_addi_i32(QREG_SP, QREG_SP, offset + 4);
2893     gen_jmp(s, tmp);
2894 }
2895 
2896 DISAS_INSN(rtr)
2897 {
2898     TCGv tmp;
2899     TCGv ccr;
2900     TCGv sp;
2901 
2902     sp = tcg_temp_new();
2903     ccr = gen_load(s, OS_WORD, QREG_SP, 0, IS_USER(s));
2904     tcg_gen_addi_i32(sp, QREG_SP, 2);
2905     tmp = gen_load(s, OS_LONG, sp, 0, IS_USER(s));
2906     tcg_gen_addi_i32(QREG_SP, sp, 4);
2907 
2908     gen_set_sr(s, ccr, true);
2909 
2910     gen_jmp(s, tmp);
2911 }
2912 
2913 DISAS_INSN(rts)
2914 {
2915     TCGv tmp;
2916 
2917     tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
2918     tcg_gen_addi_i32(QREG_SP, QREG_SP, 4);
2919     gen_jmp(s, tmp);
2920 }
2921 
2922 DISAS_INSN(jump)
2923 {
2924     TCGv tmp;
2925 
2926     /*
2927      * Load the target address first to ensure correct exception
2928      * behavior.
2929      */
2930     tmp = gen_lea(env, s, insn, OS_LONG);
2931     if (IS_NULL_QREG(tmp)) {
2932         gen_addr_fault(s);
2933         return;
2934     }
2935     if ((insn & 0x40) == 0) {
2936         /* jsr */
2937         gen_push(s, tcg_const_i32(s->pc));
2938     }
2939     gen_jmp(s, tmp);
2940 }
2941 
2942 DISAS_INSN(addsubq)
2943 {
2944     TCGv src;
2945     TCGv dest;
2946     TCGv val;
2947     int imm;
2948     TCGv addr;
2949     int opsize;
2950 
2951     if ((insn & 070) == 010) {
2952         /* Operation on address register is always long.  */
2953         opsize = OS_LONG;
2954     } else {
2955         opsize = insn_opsize(insn);
2956     }
2957     SRC_EA(env, src, opsize, 1, &addr);
2958     imm = (insn >> 9) & 7;
2959     if (imm == 0) {
2960         imm = 8;
2961     }
2962     val = tcg_const_i32(imm);
2963     dest = tcg_temp_new();
2964     tcg_gen_mov_i32(dest, src);
2965     if ((insn & 0x38) == 0x08) {
2966         /*
2967          * Don't update condition codes if the destination is an
2968          * address register.
2969          */
2970         if (insn & 0x0100) {
2971             tcg_gen_sub_i32(dest, dest, val);
2972         } else {
2973             tcg_gen_add_i32(dest, dest, val);
2974         }
2975     } else {
2976         if (insn & 0x0100) {
2977             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
2978             tcg_gen_sub_i32(dest, dest, val);
2979             set_cc_op(s, CC_OP_SUBB + opsize);
2980         } else {
2981             tcg_gen_add_i32(dest, dest, val);
2982             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
2983             set_cc_op(s, CC_OP_ADDB + opsize);
2984         }
2985         gen_update_cc_add(dest, val, opsize);
2986     }
2987     DEST_EA(env, insn, opsize, dest, &addr);
2988 }
2989 
2990 DISAS_INSN(branch)
2991 {
2992     int32_t offset;
2993     uint32_t base;
2994     int op;
2995 
2996     base = s->pc;
2997     op = (insn >> 8) & 0xf;
2998     offset = (int8_t)insn;
2999     if (offset == 0) {
3000         offset = (int16_t)read_im16(env, s);
3001     } else if (offset == -1) {
3002         offset = read_im32(env, s);
3003     }
3004     if (op == 1) {
3005         /* bsr */
3006         gen_push(s, tcg_const_i32(s->pc));
3007     }
3008     if (op > 1) {
3009         /* Bcc */
3010         TCGLabel *l1 = gen_new_label();
3011         gen_jmpcc(s, ((insn >> 8) & 0xf) ^ 1, l1);
3012         gen_jmp_tb(s, 1, base + offset, s->base.pc_next);
3013         gen_set_label(l1);
3014         gen_jmp_tb(s, 0, s->pc, s->base.pc_next);
3015     } else {
3016         /* Unconditional branch.  */
3017         update_cc_op(s);
3018         gen_jmp_tb(s, 0, base + offset, s->base.pc_next);
3019     }
3020 }
3021 
3022 DISAS_INSN(moveq)
3023 {
3024     tcg_gen_movi_i32(DREG(insn, 9), (int8_t)insn);
3025     gen_logic_cc(s, DREG(insn, 9), OS_LONG);
3026 }
3027 
3028 DISAS_INSN(mvzs)
3029 {
3030     int opsize;
3031     TCGv src;
3032     TCGv reg;
3033 
3034     if (insn & 0x40)
3035         opsize = OS_WORD;
3036     else
3037         opsize = OS_BYTE;
3038     SRC_EA(env, src, opsize, (insn & 0x80) == 0, NULL);
3039     reg = DREG(insn, 9);
3040     tcg_gen_mov_i32(reg, src);
3041     gen_logic_cc(s, src, opsize);
3042 }
3043 
3044 DISAS_INSN(or)
3045 {
3046     TCGv reg;
3047     TCGv dest;
3048     TCGv src;
3049     TCGv addr;
3050     int opsize;
3051 
3052     opsize = insn_opsize(insn);
3053     reg = gen_extend(s, DREG(insn, 9), opsize, 0);
3054     dest = tcg_temp_new();
3055     if (insn & 0x100) {
3056         SRC_EA(env, src, opsize, 0, &addr);
3057         tcg_gen_or_i32(dest, src, reg);
3058         DEST_EA(env, insn, opsize, dest, &addr);
3059     } else {
3060         SRC_EA(env, src, opsize, 0, NULL);
3061         tcg_gen_or_i32(dest, src, reg);
3062         gen_partset_reg(opsize, DREG(insn, 9), dest);
3063     }
3064     gen_logic_cc(s, dest, opsize);
3065 }
3066 
3067 DISAS_INSN(suba)
3068 {
3069     TCGv src;
3070     TCGv reg;
3071 
3072     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3073     reg = AREG(insn, 9);
3074     tcg_gen_sub_i32(reg, reg, src);
3075 }
3076 
3077 static inline void gen_subx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3078 {
3079     TCGv tmp;
3080 
3081     gen_flush_flags(s); /* compute old Z */
3082 
3083     /*
3084      * Perform subtract with borrow.
3085      * (X, N) = dest - (src + X);
3086      */
3087 
3088     tmp = tcg_const_i32(0);
3089     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, tmp, QREG_CC_X, tmp);
3090     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, dest, tmp, QREG_CC_N, QREG_CC_X);
3091     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3092     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
3093 
3094     /* Compute signed-overflow for subtract.  */
3095 
3096     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, dest);
3097     tcg_gen_xor_i32(tmp, dest, src);
3098     tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, tmp);
3099 
3100     /* Copy the rest of the results into place.  */
3101     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3102     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3103 
3104     set_cc_op(s, CC_OP_FLAGS);
3105 
3106     /* result is in QREG_CC_N */
3107 }
3108 
3109 DISAS_INSN(subx_reg)
3110 {
3111     TCGv dest;
3112     TCGv src;
3113     int opsize;
3114 
3115     opsize = insn_opsize(insn);
3116 
3117     src = gen_extend(s, DREG(insn, 0), opsize, 1);
3118     dest = gen_extend(s, DREG(insn, 9), opsize, 1);
3119 
3120     gen_subx(s, src, dest, opsize);
3121 
3122     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3123 }
3124 
3125 DISAS_INSN(subx_mem)
3126 {
3127     TCGv src;
3128     TCGv addr_src;
3129     TCGv dest;
3130     TCGv addr_dest;
3131     int opsize;
3132 
3133     opsize = insn_opsize(insn);
3134 
3135     addr_src = AREG(insn, 0);
3136     tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3137     src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3138 
3139     addr_dest = AREG(insn, 9);
3140     tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3141     dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3142 
3143     gen_subx(s, src, dest, opsize);
3144 
3145     gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3146 }
3147 
3148 DISAS_INSN(mov3q)
3149 {
3150     TCGv src;
3151     int val;
3152 
3153     val = (insn >> 9) & 7;
3154     if (val == 0)
3155         val = -1;
3156     src = tcg_const_i32(val);
3157     gen_logic_cc(s, src, OS_LONG);
3158     DEST_EA(env, insn, OS_LONG, src, NULL);
3159 }
3160 
3161 DISAS_INSN(cmp)
3162 {
3163     TCGv src;
3164     TCGv reg;
3165     int opsize;
3166 
3167     opsize = insn_opsize(insn);
3168     SRC_EA(env, src, opsize, 1, NULL);
3169     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
3170     gen_update_cc_cmp(s, reg, src, opsize);
3171 }
3172 
3173 DISAS_INSN(cmpa)
3174 {
3175     int opsize;
3176     TCGv src;
3177     TCGv reg;
3178 
3179     if (insn & 0x100) {
3180         opsize = OS_LONG;
3181     } else {
3182         opsize = OS_WORD;
3183     }
3184     SRC_EA(env, src, opsize, 1, NULL);
3185     reg = AREG(insn, 9);
3186     gen_update_cc_cmp(s, reg, src, OS_LONG);
3187 }
3188 
3189 DISAS_INSN(cmpm)
3190 {
3191     int opsize = insn_opsize(insn);
3192     TCGv src, dst;
3193 
3194     /* Post-increment load (mode 3) from Ay.  */
3195     src = gen_ea_mode(env, s, 3, REG(insn, 0), opsize,
3196                       NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3197     /* Post-increment load (mode 3) from Ax.  */
3198     dst = gen_ea_mode(env, s, 3, REG(insn, 9), opsize,
3199                       NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3200 
3201     gen_update_cc_cmp(s, dst, src, opsize);
3202 }
3203 
3204 DISAS_INSN(eor)
3205 {
3206     TCGv src;
3207     TCGv dest;
3208     TCGv addr;
3209     int opsize;
3210 
3211     opsize = insn_opsize(insn);
3212 
3213     SRC_EA(env, src, opsize, 0, &addr);
3214     dest = tcg_temp_new();
3215     tcg_gen_xor_i32(dest, src, DREG(insn, 9));
3216     gen_logic_cc(s, dest, opsize);
3217     DEST_EA(env, insn, opsize, dest, &addr);
3218 }
3219 
3220 static void do_exg(TCGv reg1, TCGv reg2)
3221 {
3222     TCGv temp = tcg_temp_new();
3223     tcg_gen_mov_i32(temp, reg1);
3224     tcg_gen_mov_i32(reg1, reg2);
3225     tcg_gen_mov_i32(reg2, temp);
3226 }
3227 
3228 DISAS_INSN(exg_dd)
3229 {
3230     /* exchange Dx and Dy */
3231     do_exg(DREG(insn, 9), DREG(insn, 0));
3232 }
3233 
3234 DISAS_INSN(exg_aa)
3235 {
3236     /* exchange Ax and Ay */
3237     do_exg(AREG(insn, 9), AREG(insn, 0));
3238 }
3239 
3240 DISAS_INSN(exg_da)
3241 {
3242     /* exchange Dx and Ay */
3243     do_exg(DREG(insn, 9), AREG(insn, 0));
3244 }
3245 
3246 DISAS_INSN(and)
3247 {
3248     TCGv src;
3249     TCGv reg;
3250     TCGv dest;
3251     TCGv addr;
3252     int opsize;
3253 
3254     dest = tcg_temp_new();
3255 
3256     opsize = insn_opsize(insn);
3257     reg = DREG(insn, 9);
3258     if (insn & 0x100) {
3259         SRC_EA(env, src, opsize, 0, &addr);
3260         tcg_gen_and_i32(dest, src, reg);
3261         DEST_EA(env, insn, opsize, dest, &addr);
3262     } else {
3263         SRC_EA(env, src, opsize, 0, NULL);
3264         tcg_gen_and_i32(dest, src, reg);
3265         gen_partset_reg(opsize, reg, dest);
3266     }
3267     gen_logic_cc(s, dest, opsize);
3268 }
3269 
3270 DISAS_INSN(adda)
3271 {
3272     TCGv src;
3273     TCGv reg;
3274 
3275     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3276     reg = AREG(insn, 9);
3277     tcg_gen_add_i32(reg, reg, src);
3278 }
3279 
3280 static inline void gen_addx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3281 {
3282     TCGv tmp;
3283 
3284     gen_flush_flags(s); /* compute old Z */
3285 
3286     /*
3287      * Perform addition with carry.
3288      * (X, N) = src + dest + X;
3289      */
3290 
3291     tmp = tcg_const_i32(0);
3292     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_X, tmp, dest, tmp);
3293     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_N, QREG_CC_X, src, tmp);
3294     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3295 
3296     /* Compute signed-overflow for addition.  */
3297 
3298     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3299     tcg_gen_xor_i32(tmp, dest, src);
3300     tcg_gen_andc_i32(QREG_CC_V, QREG_CC_V, tmp);
3301 
3302     /* Copy the rest of the results into place.  */
3303     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3304     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3305 
3306     set_cc_op(s, CC_OP_FLAGS);
3307 
3308     /* result is in QREG_CC_N */
3309 }
3310 
3311 DISAS_INSN(addx_reg)
3312 {
3313     TCGv dest;
3314     TCGv src;
3315     int opsize;
3316 
3317     opsize = insn_opsize(insn);
3318 
3319     dest = gen_extend(s, DREG(insn, 9), opsize, 1);
3320     src = gen_extend(s, DREG(insn, 0), opsize, 1);
3321 
3322     gen_addx(s, src, dest, opsize);
3323 
3324     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3325 }
3326 
3327 DISAS_INSN(addx_mem)
3328 {
3329     TCGv src;
3330     TCGv addr_src;
3331     TCGv dest;
3332     TCGv addr_dest;
3333     int opsize;
3334 
3335     opsize = insn_opsize(insn);
3336 
3337     addr_src = AREG(insn, 0);
3338     tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3339     src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3340 
3341     addr_dest = AREG(insn, 9);
3342     tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3343     dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3344 
3345     gen_addx(s, src, dest, opsize);
3346 
3347     gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3348 }
3349 
3350 static inline void shift_im(DisasContext *s, uint16_t insn, int opsize)
3351 {
3352     int count = (insn >> 9) & 7;
3353     int logical = insn & 8;
3354     int left = insn & 0x100;
3355     int bits = opsize_bytes(opsize) * 8;
3356     TCGv reg = gen_extend(s, DREG(insn, 0), opsize, !logical);
3357 
3358     if (count == 0) {
3359         count = 8;
3360     }
3361 
3362     tcg_gen_movi_i32(QREG_CC_V, 0);
3363     if (left) {
3364         tcg_gen_shri_i32(QREG_CC_C, reg, bits - count);
3365         tcg_gen_shli_i32(QREG_CC_N, reg, count);
3366 
3367         /*
3368          * Note that ColdFire always clears V (done above),
3369          * while M68000 sets if the most significant bit is changed at
3370          * any time during the shift operation.
3371          */
3372         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68K)) {
3373             /* if shift count >= bits, V is (reg != 0) */
3374             if (count >= bits) {
3375                 tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, reg, QREG_CC_V);
3376             } else {
3377                 TCGv t0 = tcg_temp_new();
3378                 tcg_gen_sari_i32(QREG_CC_V, reg, bits - 1);
3379                 tcg_gen_sari_i32(t0, reg, bits - count - 1);
3380                 tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, t0);
3381             }
3382             tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3383         }
3384     } else {
3385         tcg_gen_shri_i32(QREG_CC_C, reg, count - 1);
3386         if (logical) {
3387             tcg_gen_shri_i32(QREG_CC_N, reg, count);
3388         } else {
3389             tcg_gen_sari_i32(QREG_CC_N, reg, count);
3390         }
3391     }
3392 
3393     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3394     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3395     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3396     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3397 
3398     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3399     set_cc_op(s, CC_OP_FLAGS);
3400 }
3401 
3402 static inline void shift_reg(DisasContext *s, uint16_t insn, int opsize)
3403 {
3404     int logical = insn & 8;
3405     int left = insn & 0x100;
3406     int bits = opsize_bytes(opsize) * 8;
3407     TCGv reg = gen_extend(s, DREG(insn, 0), opsize, !logical);
3408     TCGv s32;
3409     TCGv_i64 t64, s64;
3410 
3411     t64 = tcg_temp_new_i64();
3412     s64 = tcg_temp_new_i64();
3413     s32 = tcg_temp_new();
3414 
3415     /*
3416      * Note that m68k truncates the shift count modulo 64, not 32.
3417      * In addition, a 64-bit shift makes it easy to find "the last
3418      * bit shifted out", for the carry flag.
3419      */
3420     tcg_gen_andi_i32(s32, DREG(insn, 9), 63);
3421     tcg_gen_extu_i32_i64(s64, s32);
3422     tcg_gen_extu_i32_i64(t64, reg);
3423 
3424     /* Optimistically set V=0.  Also used as a zero source below.  */
3425     tcg_gen_movi_i32(QREG_CC_V, 0);
3426     if (left) {
3427         tcg_gen_shl_i64(t64, t64, s64);
3428 
3429         if (opsize == OS_LONG) {
3430             tcg_gen_extr_i64_i32(QREG_CC_N, QREG_CC_C, t64);
3431             /* Note that C=0 if shift count is 0, and we get that for free.  */
3432         } else {
3433             TCGv zero = tcg_const_i32(0);
3434             tcg_gen_extrl_i64_i32(QREG_CC_N, t64);
3435             tcg_gen_shri_i32(QREG_CC_C, QREG_CC_N, bits);
3436             tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3437                                 s32, zero, zero, QREG_CC_C);
3438         }
3439         tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3440 
3441         /* X = C, but only if the shift count was non-zero.  */
3442         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3443                             QREG_CC_C, QREG_CC_X);
3444 
3445         /*
3446          * M68000 sets V if the most significant bit is changed at
3447          * any time during the shift operation.  Do this via creating
3448          * an extension of the sign bit, comparing, and discarding
3449          * the bits below the sign bit.  I.e.
3450          *     int64_t s = (intN_t)reg;
3451          *     int64_t t = (int64_t)(intN_t)reg << count;
3452          *     V = ((s ^ t) & (-1 << (bits - 1))) != 0
3453          */
3454         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68K)) {
3455             TCGv_i64 tt = tcg_const_i64(32);
3456             /* if shift is greater than 32, use 32 */
3457             tcg_gen_movcond_i64(TCG_COND_GT, s64, s64, tt, tt, s64);
3458             /* Sign extend the input to 64 bits; re-do the shift.  */
3459             tcg_gen_ext_i32_i64(t64, reg);
3460             tcg_gen_shl_i64(s64, t64, s64);
3461             /* Clear all bits that are unchanged.  */
3462             tcg_gen_xor_i64(t64, t64, s64);
3463             /* Ignore the bits below the sign bit.  */
3464             tcg_gen_andi_i64(t64, t64, -1ULL << (bits - 1));
3465             /* If any bits remain set, we have overflow.  */
3466             tcg_gen_setcondi_i64(TCG_COND_NE, t64, t64, 0);
3467             tcg_gen_extrl_i64_i32(QREG_CC_V, t64);
3468             tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3469         }
3470     } else {
3471         tcg_gen_shli_i64(t64, t64, 32);
3472         if (logical) {
3473             tcg_gen_shr_i64(t64, t64, s64);
3474         } else {
3475             tcg_gen_sar_i64(t64, t64, s64);
3476         }
3477         tcg_gen_extr_i64_i32(QREG_CC_C, QREG_CC_N, t64);
3478 
3479         /* Note that C=0 if shift count is 0, and we get that for free.  */
3480         tcg_gen_shri_i32(QREG_CC_C, QREG_CC_C, 31);
3481 
3482         /* X = C, but only if the shift count was non-zero.  */
3483         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3484                             QREG_CC_C, QREG_CC_X);
3485     }
3486     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3487     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3488 
3489     /* Write back the result.  */
3490     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3491     set_cc_op(s, CC_OP_FLAGS);
3492 }
3493 
3494 DISAS_INSN(shift8_im)
3495 {
3496     shift_im(s, insn, OS_BYTE);
3497 }
3498 
3499 DISAS_INSN(shift16_im)
3500 {
3501     shift_im(s, insn, OS_WORD);
3502 }
3503 
3504 DISAS_INSN(shift_im)
3505 {
3506     shift_im(s, insn, OS_LONG);
3507 }
3508 
3509 DISAS_INSN(shift8_reg)
3510 {
3511     shift_reg(s, insn, OS_BYTE);
3512 }
3513 
3514 DISAS_INSN(shift16_reg)
3515 {
3516     shift_reg(s, insn, OS_WORD);
3517 }
3518 
3519 DISAS_INSN(shift_reg)
3520 {
3521     shift_reg(s, insn, OS_LONG);
3522 }
3523 
3524 DISAS_INSN(shift_mem)
3525 {
3526     int logical = insn & 8;
3527     int left = insn & 0x100;
3528     TCGv src;
3529     TCGv addr;
3530 
3531     SRC_EA(env, src, OS_WORD, !logical, &addr);
3532     tcg_gen_movi_i32(QREG_CC_V, 0);
3533     if (left) {
3534         tcg_gen_shri_i32(QREG_CC_C, src, 15);
3535         tcg_gen_shli_i32(QREG_CC_N, src, 1);
3536 
3537         /*
3538          * Note that ColdFire always clears V,
3539          * while M68000 sets if the most significant bit is changed at
3540          * any time during the shift operation
3541          */
3542         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68K)) {
3543             src = gen_extend(s, src, OS_WORD, 1);
3544             tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3545         }
3546     } else {
3547         tcg_gen_mov_i32(QREG_CC_C, src);
3548         if (logical) {
3549             tcg_gen_shri_i32(QREG_CC_N, src, 1);
3550         } else {
3551             tcg_gen_sari_i32(QREG_CC_N, src, 1);
3552         }
3553     }
3554 
3555     gen_ext(QREG_CC_N, QREG_CC_N, OS_WORD, 1);
3556     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3557     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3558     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3559 
3560     DEST_EA(env, insn, OS_WORD, QREG_CC_N, &addr);
3561     set_cc_op(s, CC_OP_FLAGS);
3562 }
3563 
3564 static void rotate(TCGv reg, TCGv shift, int left, int size)
3565 {
3566     switch (size) {
3567     case 8:
3568         /* Replicate the 8-bit input so that a 32-bit rotate works.  */
3569         tcg_gen_ext8u_i32(reg, reg);
3570         tcg_gen_muli_i32(reg, reg, 0x01010101);
3571         goto do_long;
3572     case 16:
3573         /* Replicate the 16-bit input so that a 32-bit rotate works.  */
3574         tcg_gen_deposit_i32(reg, reg, reg, 16, 16);
3575         goto do_long;
3576     do_long:
3577     default:
3578         if (left) {
3579             tcg_gen_rotl_i32(reg, reg, shift);
3580         } else {
3581             tcg_gen_rotr_i32(reg, reg, shift);
3582         }
3583     }
3584 
3585     /* compute flags */
3586 
3587     switch (size) {
3588     case 8:
3589         tcg_gen_ext8s_i32(reg, reg);
3590         break;
3591     case 16:
3592         tcg_gen_ext16s_i32(reg, reg);
3593         break;
3594     default:
3595         break;
3596     }
3597 
3598     /* QREG_CC_X is not affected */
3599 
3600     tcg_gen_mov_i32(QREG_CC_N, reg);
3601     tcg_gen_mov_i32(QREG_CC_Z, reg);
3602 
3603     if (left) {
3604         tcg_gen_andi_i32(QREG_CC_C, reg, 1);
3605     } else {
3606         tcg_gen_shri_i32(QREG_CC_C, reg, 31);
3607     }
3608 
3609     tcg_gen_movi_i32(QREG_CC_V, 0); /* always cleared */
3610 }
3611 
3612 static void rotate_x_flags(TCGv reg, TCGv X, int size)
3613 {
3614     switch (size) {
3615     case 8:
3616         tcg_gen_ext8s_i32(reg, reg);
3617         break;
3618     case 16:
3619         tcg_gen_ext16s_i32(reg, reg);
3620         break;
3621     default:
3622         break;
3623     }
3624     tcg_gen_mov_i32(QREG_CC_N, reg);
3625     tcg_gen_mov_i32(QREG_CC_Z, reg);
3626     tcg_gen_mov_i32(QREG_CC_X, X);
3627     tcg_gen_mov_i32(QREG_CC_C, X);
3628     tcg_gen_movi_i32(QREG_CC_V, 0);
3629 }
3630 
3631 /* Result of rotate_x() is valid if 0 <= shift <= size */
3632 static TCGv rotate_x(TCGv reg, TCGv shift, int left, int size)
3633 {
3634     TCGv X, shl, shr, shx, sz, zero;
3635 
3636     sz = tcg_const_i32(size);
3637 
3638     shr = tcg_temp_new();
3639     shl = tcg_temp_new();
3640     shx = tcg_temp_new();
3641     if (left) {
3642         tcg_gen_mov_i32(shl, shift);      /* shl = shift */
3643         tcg_gen_movi_i32(shr, size + 1);
3644         tcg_gen_sub_i32(shr, shr, shift); /* shr = size + 1 - shift */
3645         tcg_gen_subi_i32(shx, shift, 1);  /* shx = shift - 1 */
3646         /* shx = shx < 0 ? size : shx; */
3647         zero = tcg_const_i32(0);
3648         tcg_gen_movcond_i32(TCG_COND_LT, shx, shx, zero, sz, shx);
3649     } else {
3650         tcg_gen_mov_i32(shr, shift);      /* shr = shift */
3651         tcg_gen_movi_i32(shl, size + 1);
3652         tcg_gen_sub_i32(shl, shl, shift); /* shl = size + 1 - shift */
3653         tcg_gen_sub_i32(shx, sz, shift); /* shx = size - shift */
3654     }
3655 
3656     /* reg = (reg << shl) | (reg >> shr) | (x << shx); */
3657 
3658     tcg_gen_shl_i32(shl, reg, shl);
3659     tcg_gen_shr_i32(shr, reg, shr);
3660     tcg_gen_or_i32(reg, shl, shr);
3661     tcg_gen_shl_i32(shx, QREG_CC_X, shx);
3662     tcg_gen_or_i32(reg, reg, shx);
3663 
3664     /* X = (reg >> size) & 1 */
3665 
3666     X = tcg_temp_new();
3667     tcg_gen_extract_i32(X, reg, size, 1);
3668 
3669     return X;
3670 }
3671 
3672 /* Result of rotate32_x() is valid if 0 <= shift < 33 */
3673 static TCGv rotate32_x(TCGv reg, TCGv shift, int left)
3674 {
3675     TCGv_i64 t0, shift64;
3676     TCGv X, lo, hi, zero;
3677 
3678     shift64 = tcg_temp_new_i64();
3679     tcg_gen_extu_i32_i64(shift64, shift);
3680 
3681     t0 = tcg_temp_new_i64();
3682 
3683     X = tcg_temp_new();
3684     lo = tcg_temp_new();
3685     hi = tcg_temp_new();
3686 
3687     if (left) {
3688         /* create [reg:X:..] */
3689 
3690         tcg_gen_shli_i32(lo, QREG_CC_X, 31);
3691         tcg_gen_concat_i32_i64(t0, lo, reg);
3692 
3693         /* rotate */
3694 
3695         tcg_gen_rotl_i64(t0, t0, shift64);
3696 
3697         /* result is [reg:..:reg:X] */
3698 
3699         tcg_gen_extr_i64_i32(lo, hi, t0);
3700         tcg_gen_andi_i32(X, lo, 1);
3701 
3702         tcg_gen_shri_i32(lo, lo, 1);
3703     } else {
3704         /* create [..:X:reg] */
3705 
3706         tcg_gen_concat_i32_i64(t0, reg, QREG_CC_X);
3707 
3708         tcg_gen_rotr_i64(t0, t0, shift64);
3709 
3710         /* result is value: [X:reg:..:reg] */
3711 
3712         tcg_gen_extr_i64_i32(lo, hi, t0);
3713 
3714         /* extract X */
3715 
3716         tcg_gen_shri_i32(X, hi, 31);
3717 
3718         /* extract result */
3719 
3720         tcg_gen_shli_i32(hi, hi, 1);
3721     }
3722     tcg_gen_or_i32(lo, lo, hi);
3723 
3724     /* if shift == 0, register and X are not affected */
3725 
3726     zero = tcg_const_i32(0);
3727     tcg_gen_movcond_i32(TCG_COND_EQ, X, shift, zero, QREG_CC_X, X);
3728     tcg_gen_movcond_i32(TCG_COND_EQ, reg, shift, zero, reg, lo);
3729 
3730     return X;
3731 }
3732 
3733 DISAS_INSN(rotate_im)
3734 {
3735     TCGv shift;
3736     int tmp;
3737     int left = (insn & 0x100);
3738 
3739     tmp = (insn >> 9) & 7;
3740     if (tmp == 0) {
3741         tmp = 8;
3742     }
3743 
3744     shift = tcg_const_i32(tmp);
3745     if (insn & 8) {
3746         rotate(DREG(insn, 0), shift, left, 32);
3747     } else {
3748         TCGv X = rotate32_x(DREG(insn, 0), shift, left);
3749         rotate_x_flags(DREG(insn, 0), X, 32);
3750     }
3751 
3752     set_cc_op(s, CC_OP_FLAGS);
3753 }
3754 
3755 DISAS_INSN(rotate8_im)
3756 {
3757     int left = (insn & 0x100);
3758     TCGv reg;
3759     TCGv shift;
3760     int tmp;
3761 
3762     reg = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
3763 
3764     tmp = (insn >> 9) & 7;
3765     if (tmp == 0) {
3766         tmp = 8;
3767     }
3768 
3769     shift = tcg_const_i32(tmp);
3770     if (insn & 8) {
3771         rotate(reg, shift, left, 8);
3772     } else {
3773         TCGv X = rotate_x(reg, shift, left, 8);
3774         rotate_x_flags(reg, X, 8);
3775     }
3776     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3777     set_cc_op(s, CC_OP_FLAGS);
3778 }
3779 
3780 DISAS_INSN(rotate16_im)
3781 {
3782     int left = (insn & 0x100);
3783     TCGv reg;
3784     TCGv shift;
3785     int tmp;
3786 
3787     reg = gen_extend(s, DREG(insn, 0), OS_WORD, 0);
3788     tmp = (insn >> 9) & 7;
3789     if (tmp == 0) {
3790         tmp = 8;
3791     }
3792 
3793     shift = tcg_const_i32(tmp);
3794     if (insn & 8) {
3795         rotate(reg, shift, left, 16);
3796     } else {
3797         TCGv X = rotate_x(reg, shift, left, 16);
3798         rotate_x_flags(reg, X, 16);
3799     }
3800     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3801     set_cc_op(s, CC_OP_FLAGS);
3802 }
3803 
3804 DISAS_INSN(rotate_reg)
3805 {
3806     TCGv reg;
3807     TCGv src;
3808     TCGv t0, t1;
3809     int left = (insn & 0x100);
3810 
3811     reg = DREG(insn, 0);
3812     src = DREG(insn, 9);
3813     /* shift in [0..63] */
3814     t0 = tcg_temp_new();
3815     tcg_gen_andi_i32(t0, src, 63);
3816     t1 = tcg_temp_new_i32();
3817     if (insn & 8) {
3818         tcg_gen_andi_i32(t1, src, 31);
3819         rotate(reg, t1, left, 32);
3820         /* if shift == 0, clear C */
3821         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3822                             t0, QREG_CC_V /* 0 */,
3823                             QREG_CC_V /* 0 */, QREG_CC_C);
3824     } else {
3825         TCGv X;
3826         /* modulo 33 */
3827         tcg_gen_movi_i32(t1, 33);
3828         tcg_gen_remu_i32(t1, t0, t1);
3829         X = rotate32_x(DREG(insn, 0), t1, left);
3830         rotate_x_flags(DREG(insn, 0), X, 32);
3831     }
3832     set_cc_op(s, CC_OP_FLAGS);
3833 }
3834 
3835 DISAS_INSN(rotate8_reg)
3836 {
3837     TCGv reg;
3838     TCGv src;
3839     TCGv t0, t1;
3840     int left = (insn & 0x100);
3841 
3842     reg = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
3843     src = DREG(insn, 9);
3844     /* shift in [0..63] */
3845     t0 = tcg_temp_new_i32();
3846     tcg_gen_andi_i32(t0, src, 63);
3847     t1 = tcg_temp_new_i32();
3848     if (insn & 8) {
3849         tcg_gen_andi_i32(t1, src, 7);
3850         rotate(reg, t1, left, 8);
3851         /* if shift == 0, clear C */
3852         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3853                             t0, QREG_CC_V /* 0 */,
3854                             QREG_CC_V /* 0 */, QREG_CC_C);
3855     } else {
3856         TCGv X;
3857         /* modulo 9 */
3858         tcg_gen_movi_i32(t1, 9);
3859         tcg_gen_remu_i32(t1, t0, t1);
3860         X = rotate_x(reg, t1, left, 8);
3861         rotate_x_flags(reg, X, 8);
3862     }
3863     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3864     set_cc_op(s, CC_OP_FLAGS);
3865 }
3866 
3867 DISAS_INSN(rotate16_reg)
3868 {
3869     TCGv reg;
3870     TCGv src;
3871     TCGv t0, t1;
3872     int left = (insn & 0x100);
3873 
3874     reg = gen_extend(s, DREG(insn, 0), OS_WORD, 0);
3875     src = DREG(insn, 9);
3876     /* shift in [0..63] */
3877     t0 = tcg_temp_new_i32();
3878     tcg_gen_andi_i32(t0, src, 63);
3879     t1 = tcg_temp_new_i32();
3880     if (insn & 8) {
3881         tcg_gen_andi_i32(t1, src, 15);
3882         rotate(reg, t1, left, 16);
3883         /* if shift == 0, clear C */
3884         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3885                             t0, QREG_CC_V /* 0 */,
3886                             QREG_CC_V /* 0 */, QREG_CC_C);
3887     } else {
3888         TCGv X;
3889         /* modulo 17 */
3890         tcg_gen_movi_i32(t1, 17);
3891         tcg_gen_remu_i32(t1, t0, t1);
3892         X = rotate_x(reg, t1, left, 16);
3893         rotate_x_flags(reg, X, 16);
3894     }
3895     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3896     set_cc_op(s, CC_OP_FLAGS);
3897 }
3898 
3899 DISAS_INSN(rotate_mem)
3900 {
3901     TCGv src;
3902     TCGv addr;
3903     TCGv shift;
3904     int left = (insn & 0x100);
3905 
3906     SRC_EA(env, src, OS_WORD, 0, &addr);
3907 
3908     shift = tcg_const_i32(1);
3909     if (insn & 0x0200) {
3910         rotate(src, shift, left, 16);
3911     } else {
3912         TCGv X = rotate_x(src, shift, left, 16);
3913         rotate_x_flags(src, X, 16);
3914     }
3915     DEST_EA(env, insn, OS_WORD, src, &addr);
3916     set_cc_op(s, CC_OP_FLAGS);
3917 }
3918 
3919 DISAS_INSN(bfext_reg)
3920 {
3921     int ext = read_im16(env, s);
3922     int is_sign = insn & 0x200;
3923     TCGv src = DREG(insn, 0);
3924     TCGv dst = DREG(ext, 12);
3925     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
3926     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
3927     int pos = 32 - ofs - len;        /* little bit-endian */
3928     TCGv tmp = tcg_temp_new();
3929     TCGv shift;
3930 
3931     /*
3932      * In general, we're going to rotate the field so that it's at the
3933      * top of the word and then right-shift by the complement of the
3934      * width to extend the field.
3935      */
3936     if (ext & 0x20) {
3937         /* Variable width.  */
3938         if (ext & 0x800) {
3939             /* Variable offset.  */
3940             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
3941             tcg_gen_rotl_i32(tmp, src, tmp);
3942         } else {
3943             tcg_gen_rotli_i32(tmp, src, ofs);
3944         }
3945 
3946         shift = tcg_temp_new();
3947         tcg_gen_neg_i32(shift, DREG(ext, 0));
3948         tcg_gen_andi_i32(shift, shift, 31);
3949         tcg_gen_sar_i32(QREG_CC_N, tmp, shift);
3950         if (is_sign) {
3951             tcg_gen_mov_i32(dst, QREG_CC_N);
3952         } else {
3953             tcg_gen_shr_i32(dst, tmp, shift);
3954         }
3955     } else {
3956         /* Immediate width.  */
3957         if (ext & 0x800) {
3958             /* Variable offset */
3959             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
3960             tcg_gen_rotl_i32(tmp, src, tmp);
3961             src = tmp;
3962             pos = 32 - len;
3963         } else {
3964             /*
3965              * Immediate offset.  If the field doesn't wrap around the
3966              * end of the word, rely on (s)extract completely.
3967              */
3968             if (pos < 0) {
3969                 tcg_gen_rotli_i32(tmp, src, ofs);
3970                 src = tmp;
3971                 pos = 32 - len;
3972             }
3973         }
3974 
3975         tcg_gen_sextract_i32(QREG_CC_N, src, pos, len);
3976         if (is_sign) {
3977             tcg_gen_mov_i32(dst, QREG_CC_N);
3978         } else {
3979             tcg_gen_extract_i32(dst, src, pos, len);
3980         }
3981     }
3982 
3983     set_cc_op(s, CC_OP_LOGIC);
3984 }
3985 
3986 DISAS_INSN(bfext_mem)
3987 {
3988     int ext = read_im16(env, s);
3989     int is_sign = insn & 0x200;
3990     TCGv dest = DREG(ext, 12);
3991     TCGv addr, len, ofs;
3992 
3993     addr = gen_lea(env, s, insn, OS_UNSIZED);
3994     if (IS_NULL_QREG(addr)) {
3995         gen_addr_fault(s);
3996         return;
3997     }
3998 
3999     if (ext & 0x20) {
4000         len = DREG(ext, 0);
4001     } else {
4002         len = tcg_const_i32(extract32(ext, 0, 5));
4003     }
4004     if (ext & 0x800) {
4005         ofs = DREG(ext, 6);
4006     } else {
4007         ofs = tcg_const_i32(extract32(ext, 6, 5));
4008     }
4009 
4010     if (is_sign) {
4011         gen_helper_bfexts_mem(dest, cpu_env, addr, ofs, len);
4012         tcg_gen_mov_i32(QREG_CC_N, dest);
4013     } else {
4014         TCGv_i64 tmp = tcg_temp_new_i64();
4015         gen_helper_bfextu_mem(tmp, cpu_env, addr, ofs, len);
4016         tcg_gen_extr_i64_i32(dest, QREG_CC_N, tmp);
4017     }
4018     set_cc_op(s, CC_OP_LOGIC);
4019 }
4020 
4021 DISAS_INSN(bfop_reg)
4022 {
4023     int ext = read_im16(env, s);
4024     TCGv src = DREG(insn, 0);
4025     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4026     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4027     TCGv mask, tofs, tlen;
4028 
4029     tofs = NULL;
4030     tlen = NULL;
4031     if ((insn & 0x0f00) == 0x0d00) { /* bfffo */
4032         tofs = tcg_temp_new();
4033         tlen = tcg_temp_new();
4034     }
4035 
4036     if ((ext & 0x820) == 0) {
4037         /* Immediate width and offset.  */
4038         uint32_t maski = 0x7fffffffu >> (len - 1);
4039         if (ofs + len <= 32) {
4040             tcg_gen_shli_i32(QREG_CC_N, src, ofs);
4041         } else {
4042             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4043         }
4044         tcg_gen_andi_i32(QREG_CC_N, QREG_CC_N, ~maski);
4045         mask = tcg_const_i32(ror32(maski, ofs));
4046         if (tofs) {
4047             tcg_gen_movi_i32(tofs, ofs);
4048             tcg_gen_movi_i32(tlen, len);
4049         }
4050     } else {
4051         TCGv tmp = tcg_temp_new();
4052         if (ext & 0x20) {
4053             /* Variable width */
4054             tcg_gen_subi_i32(tmp, DREG(ext, 0), 1);
4055             tcg_gen_andi_i32(tmp, tmp, 31);
4056             mask = tcg_const_i32(0x7fffffffu);
4057             tcg_gen_shr_i32(mask, mask, tmp);
4058             if (tlen) {
4059                 tcg_gen_addi_i32(tlen, tmp, 1);
4060             }
4061         } else {
4062             /* Immediate width */
4063             mask = tcg_const_i32(0x7fffffffu >> (len - 1));
4064             if (tlen) {
4065                 tcg_gen_movi_i32(tlen, len);
4066             }
4067         }
4068         if (ext & 0x800) {
4069             /* Variable offset */
4070             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4071             tcg_gen_rotl_i32(QREG_CC_N, src, tmp);
4072             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4073             tcg_gen_rotr_i32(mask, mask, tmp);
4074             if (tofs) {
4075                 tcg_gen_mov_i32(tofs, tmp);
4076             }
4077         } else {
4078             /* Immediate offset (and variable width) */
4079             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4080             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4081             tcg_gen_rotri_i32(mask, mask, ofs);
4082             if (tofs) {
4083                 tcg_gen_movi_i32(tofs, ofs);
4084             }
4085         }
4086     }
4087     set_cc_op(s, CC_OP_LOGIC);
4088 
4089     switch (insn & 0x0f00) {
4090     case 0x0a00: /* bfchg */
4091         tcg_gen_eqv_i32(src, src, mask);
4092         break;
4093     case 0x0c00: /* bfclr */
4094         tcg_gen_and_i32(src, src, mask);
4095         break;
4096     case 0x0d00: /* bfffo */
4097         gen_helper_bfffo_reg(DREG(ext, 12), QREG_CC_N, tofs, tlen);
4098         break;
4099     case 0x0e00: /* bfset */
4100         tcg_gen_orc_i32(src, src, mask);
4101         break;
4102     case 0x0800: /* bftst */
4103         /* flags already set; no other work to do.  */
4104         break;
4105     default:
4106         g_assert_not_reached();
4107     }
4108 }
4109 
4110 DISAS_INSN(bfop_mem)
4111 {
4112     int ext = read_im16(env, s);
4113     TCGv addr, len, ofs;
4114     TCGv_i64 t64;
4115 
4116     addr = gen_lea(env, s, insn, OS_UNSIZED);
4117     if (IS_NULL_QREG(addr)) {
4118         gen_addr_fault(s);
4119         return;
4120     }
4121 
4122     if (ext & 0x20) {
4123         len = DREG(ext, 0);
4124     } else {
4125         len = tcg_const_i32(extract32(ext, 0, 5));
4126     }
4127     if (ext & 0x800) {
4128         ofs = DREG(ext, 6);
4129     } else {
4130         ofs = tcg_const_i32(extract32(ext, 6, 5));
4131     }
4132 
4133     switch (insn & 0x0f00) {
4134     case 0x0a00: /* bfchg */
4135         gen_helper_bfchg_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4136         break;
4137     case 0x0c00: /* bfclr */
4138         gen_helper_bfclr_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4139         break;
4140     case 0x0d00: /* bfffo */
4141         t64 = tcg_temp_new_i64();
4142         gen_helper_bfffo_mem(t64, cpu_env, addr, ofs, len);
4143         tcg_gen_extr_i64_i32(DREG(ext, 12), QREG_CC_N, t64);
4144         break;
4145     case 0x0e00: /* bfset */
4146         gen_helper_bfset_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4147         break;
4148     case 0x0800: /* bftst */
4149         gen_helper_bfexts_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4150         break;
4151     default:
4152         g_assert_not_reached();
4153     }
4154     set_cc_op(s, CC_OP_LOGIC);
4155 }
4156 
4157 DISAS_INSN(bfins_reg)
4158 {
4159     int ext = read_im16(env, s);
4160     TCGv dst = DREG(insn, 0);
4161     TCGv src = DREG(ext, 12);
4162     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4163     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4164     int pos = 32 - ofs - len;        /* little bit-endian */
4165     TCGv tmp;
4166 
4167     tmp = tcg_temp_new();
4168 
4169     if (ext & 0x20) {
4170         /* Variable width */
4171         tcg_gen_neg_i32(tmp, DREG(ext, 0));
4172         tcg_gen_andi_i32(tmp, tmp, 31);
4173         tcg_gen_shl_i32(QREG_CC_N, src, tmp);
4174     } else {
4175         /* Immediate width */
4176         tcg_gen_shli_i32(QREG_CC_N, src, 32 - len);
4177     }
4178     set_cc_op(s, CC_OP_LOGIC);
4179 
4180     /* Immediate width and offset */
4181     if ((ext & 0x820) == 0) {
4182         /* Check for suitability for deposit.  */
4183         if (pos >= 0) {
4184             tcg_gen_deposit_i32(dst, dst, src, pos, len);
4185         } else {
4186             uint32_t maski = -2U << (len - 1);
4187             uint32_t roti = (ofs + len) & 31;
4188             tcg_gen_andi_i32(tmp, src, ~maski);
4189             tcg_gen_rotri_i32(tmp, tmp, roti);
4190             tcg_gen_andi_i32(dst, dst, ror32(maski, roti));
4191             tcg_gen_or_i32(dst, dst, tmp);
4192         }
4193     } else {
4194         TCGv mask = tcg_temp_new();
4195         TCGv rot = tcg_temp_new();
4196 
4197         if (ext & 0x20) {
4198             /* Variable width */
4199             tcg_gen_subi_i32(rot, DREG(ext, 0), 1);
4200             tcg_gen_andi_i32(rot, rot, 31);
4201             tcg_gen_movi_i32(mask, -2);
4202             tcg_gen_shl_i32(mask, mask, rot);
4203             tcg_gen_mov_i32(rot, DREG(ext, 0));
4204             tcg_gen_andc_i32(tmp, src, mask);
4205         } else {
4206             /* Immediate width (variable offset) */
4207             uint32_t maski = -2U << (len - 1);
4208             tcg_gen_andi_i32(tmp, src, ~maski);
4209             tcg_gen_movi_i32(mask, maski);
4210             tcg_gen_movi_i32(rot, len & 31);
4211         }
4212         if (ext & 0x800) {
4213             /* Variable offset */
4214             tcg_gen_add_i32(rot, rot, DREG(ext, 6));
4215         } else {
4216             /* Immediate offset (variable width) */
4217             tcg_gen_addi_i32(rot, rot, ofs);
4218         }
4219         tcg_gen_andi_i32(rot, rot, 31);
4220         tcg_gen_rotr_i32(mask, mask, rot);
4221         tcg_gen_rotr_i32(tmp, tmp, rot);
4222         tcg_gen_and_i32(dst, dst, mask);
4223         tcg_gen_or_i32(dst, dst, tmp);
4224     }
4225 }
4226 
4227 DISAS_INSN(bfins_mem)
4228 {
4229     int ext = read_im16(env, s);
4230     TCGv src = DREG(ext, 12);
4231     TCGv addr, len, ofs;
4232 
4233     addr = gen_lea(env, s, insn, OS_UNSIZED);
4234     if (IS_NULL_QREG(addr)) {
4235         gen_addr_fault(s);
4236         return;
4237     }
4238 
4239     if (ext & 0x20) {
4240         len = DREG(ext, 0);
4241     } else {
4242         len = tcg_const_i32(extract32(ext, 0, 5));
4243     }
4244     if (ext & 0x800) {
4245         ofs = DREG(ext, 6);
4246     } else {
4247         ofs = tcg_const_i32(extract32(ext, 6, 5));
4248     }
4249 
4250     gen_helper_bfins_mem(QREG_CC_N, cpu_env, addr, src, ofs, len);
4251     set_cc_op(s, CC_OP_LOGIC);
4252 }
4253 
4254 DISAS_INSN(ff1)
4255 {
4256     TCGv reg;
4257     reg = DREG(insn, 0);
4258     gen_logic_cc(s, reg, OS_LONG);
4259     gen_helper_ff1(reg, reg);
4260 }
4261 
4262 DISAS_INSN(chk)
4263 {
4264     TCGv src, reg;
4265     int opsize;
4266 
4267     switch ((insn >> 7) & 3) {
4268     case 3:
4269         opsize = OS_WORD;
4270         break;
4271     case 2:
4272         if (m68k_feature(env, M68K_FEATURE_CHK2)) {
4273             opsize = OS_LONG;
4274             break;
4275         }
4276         /* fallthru */
4277     default:
4278         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4279         return;
4280     }
4281     SRC_EA(env, src, opsize, 1, NULL);
4282     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
4283 
4284     gen_flush_flags(s);
4285     gen_helper_chk(cpu_env, reg, src);
4286 }
4287 
4288 DISAS_INSN(chk2)
4289 {
4290     uint16_t ext;
4291     TCGv addr1, addr2, bound1, bound2, reg;
4292     int opsize;
4293 
4294     switch ((insn >> 9) & 3) {
4295     case 0:
4296         opsize = OS_BYTE;
4297         break;
4298     case 1:
4299         opsize = OS_WORD;
4300         break;
4301     case 2:
4302         opsize = OS_LONG;
4303         break;
4304     default:
4305         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4306         return;
4307     }
4308 
4309     ext = read_im16(env, s);
4310     if ((ext & 0x0800) == 0) {
4311         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4312         return;
4313     }
4314 
4315     addr1 = gen_lea(env, s, insn, OS_UNSIZED);
4316     addr2 = tcg_temp_new();
4317     tcg_gen_addi_i32(addr2, addr1, opsize_bytes(opsize));
4318 
4319     bound1 = gen_load(s, opsize, addr1, 1, IS_USER(s));
4320     bound2 = gen_load(s, opsize, addr2, 1, IS_USER(s));
4321 
4322     reg = tcg_temp_new();
4323     if (ext & 0x8000) {
4324         tcg_gen_mov_i32(reg, AREG(ext, 12));
4325     } else {
4326         gen_ext(reg, DREG(ext, 12), opsize, 1);
4327     }
4328 
4329     gen_flush_flags(s);
4330     gen_helper_chk2(cpu_env, reg, bound1, bound2);
4331 }
4332 
4333 static void m68k_copy_line(TCGv dst, TCGv src, int index)
4334 {
4335     TCGv addr;
4336     TCGv_i64 t0, t1;
4337 
4338     addr = tcg_temp_new();
4339 
4340     t0 = tcg_temp_new_i64();
4341     t1 = tcg_temp_new_i64();
4342 
4343     tcg_gen_andi_i32(addr, src, ~15);
4344     tcg_gen_qemu_ld64(t0, addr, index);
4345     tcg_gen_addi_i32(addr, addr, 8);
4346     tcg_gen_qemu_ld64(t1, addr, index);
4347 
4348     tcg_gen_andi_i32(addr, dst, ~15);
4349     tcg_gen_qemu_st64(t0, addr, index);
4350     tcg_gen_addi_i32(addr, addr, 8);
4351     tcg_gen_qemu_st64(t1, addr, index);
4352 }
4353 
4354 DISAS_INSN(move16_reg)
4355 {
4356     int index = IS_USER(s);
4357     TCGv tmp;
4358     uint16_t ext;
4359 
4360     ext = read_im16(env, s);
4361     if ((ext & (1 << 15)) == 0) {
4362         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4363     }
4364 
4365     m68k_copy_line(AREG(ext, 12), AREG(insn, 0), index);
4366 
4367     /* Ax can be Ay, so save Ay before incrementing Ax */
4368     tmp = tcg_temp_new();
4369     tcg_gen_mov_i32(tmp, AREG(ext, 12));
4370     tcg_gen_addi_i32(AREG(insn, 0), AREG(insn, 0), 16);
4371     tcg_gen_addi_i32(AREG(ext, 12), tmp, 16);
4372 }
4373 
4374 DISAS_INSN(move16_mem)
4375 {
4376     int index = IS_USER(s);
4377     TCGv reg, addr;
4378 
4379     reg = AREG(insn, 0);
4380     addr = tcg_const_i32(read_im32(env, s));
4381 
4382     if ((insn >> 3) & 1) {
4383         /* MOVE16 (xxx).L, (Ay) */
4384         m68k_copy_line(reg, addr, index);
4385     } else {
4386         /* MOVE16 (Ay), (xxx).L */
4387         m68k_copy_line(addr, reg, index);
4388     }
4389 
4390     if (((insn >> 3) & 2) == 0) {
4391         /* (Ay)+ */
4392         tcg_gen_addi_i32(reg, reg, 16);
4393     }
4394 }
4395 
4396 DISAS_INSN(strldsr)
4397 {
4398     uint16_t ext;
4399     uint32_t addr;
4400 
4401     addr = s->pc - 2;
4402     ext = read_im16(env, s);
4403     if (ext != 0x46FC) {
4404         gen_exception(s, addr, EXCP_ILLEGAL);
4405         return;
4406     }
4407     ext = read_im16(env, s);
4408     if (IS_USER(s) || (ext & SR_S) == 0) {
4409         gen_exception(s, addr, EXCP_PRIVILEGE);
4410         return;
4411     }
4412     gen_push(s, gen_get_sr(s));
4413     gen_set_sr_im(s, ext, 0);
4414     gen_exit_tb(s);
4415 }
4416 
4417 DISAS_INSN(move_from_sr)
4418 {
4419     TCGv sr;
4420 
4421     if (IS_USER(s) && m68k_feature(env, M68K_FEATURE_MOVEFROMSR_PRIV)) {
4422         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4423         return;
4424     }
4425     sr = gen_get_sr(s);
4426     DEST_EA(env, insn, OS_WORD, sr, NULL);
4427 }
4428 
4429 #if defined(CONFIG_SOFTMMU)
4430 DISAS_INSN(moves)
4431 {
4432     int opsize;
4433     uint16_t ext;
4434     TCGv reg;
4435     TCGv addr;
4436     int extend;
4437 
4438     if (IS_USER(s)) {
4439         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4440         return;
4441     }
4442 
4443     ext = read_im16(env, s);
4444 
4445     opsize = insn_opsize(insn);
4446 
4447     if (ext & 0x8000) {
4448         /* address register */
4449         reg = AREG(ext, 12);
4450         extend = 1;
4451     } else {
4452         /* data register */
4453         reg = DREG(ext, 12);
4454         extend = 0;
4455     }
4456 
4457     addr = gen_lea(env, s, insn, opsize);
4458     if (IS_NULL_QREG(addr)) {
4459         gen_addr_fault(s);
4460         return;
4461     }
4462 
4463     if (ext & 0x0800) {
4464         /* from reg to ea */
4465         gen_store(s, opsize, addr, reg, DFC_INDEX(s));
4466     } else {
4467         /* from ea to reg */
4468         TCGv tmp = gen_load(s, opsize, addr, 0, SFC_INDEX(s));
4469         if (extend) {
4470             gen_ext(reg, tmp, opsize, 1);
4471         } else {
4472             gen_partset_reg(opsize, reg, tmp);
4473         }
4474     }
4475     switch (extract32(insn, 3, 3)) {
4476     case 3: /* Indirect postincrement.  */
4477         tcg_gen_addi_i32(AREG(insn, 0), addr,
4478                          REG(insn, 0) == 7 && opsize == OS_BYTE
4479                          ? 2
4480                          : opsize_bytes(opsize));
4481         break;
4482     case 4: /* Indirect predecrememnt.  */
4483         tcg_gen_mov_i32(AREG(insn, 0), addr);
4484         break;
4485     }
4486 }
4487 
4488 DISAS_INSN(move_to_sr)
4489 {
4490     if (IS_USER(s)) {
4491         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4492         return;
4493     }
4494     gen_move_to_sr(env, s, insn, false);
4495     gen_exit_tb(s);
4496 }
4497 
4498 DISAS_INSN(move_from_usp)
4499 {
4500     if (IS_USER(s)) {
4501         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4502         return;
4503     }
4504     tcg_gen_ld_i32(AREG(insn, 0), cpu_env,
4505                    offsetof(CPUM68KState, sp[M68K_USP]));
4506 }
4507 
4508 DISAS_INSN(move_to_usp)
4509 {
4510     if (IS_USER(s)) {
4511         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4512         return;
4513     }
4514     tcg_gen_st_i32(AREG(insn, 0), cpu_env,
4515                    offsetof(CPUM68KState, sp[M68K_USP]));
4516 }
4517 
4518 DISAS_INSN(halt)
4519 {
4520     if (IS_USER(s)) {
4521         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4522         return;
4523     }
4524 
4525     gen_exception(s, s->pc, EXCP_HALT_INSN);
4526 }
4527 
4528 DISAS_INSN(stop)
4529 {
4530     uint16_t ext;
4531 
4532     if (IS_USER(s)) {
4533         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4534         return;
4535     }
4536 
4537     ext = read_im16(env, s);
4538 
4539     gen_set_sr_im(s, ext, 0);
4540     tcg_gen_movi_i32(cpu_halted, 1);
4541     gen_exception(s, s->pc, EXCP_HLT);
4542 }
4543 
4544 DISAS_INSN(rte)
4545 {
4546     if (IS_USER(s)) {
4547         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4548         return;
4549     }
4550     gen_exception(s, s->base.pc_next, EXCP_RTE);
4551 }
4552 
4553 DISAS_INSN(cf_movec)
4554 {
4555     uint16_t ext;
4556     TCGv reg;
4557 
4558     if (IS_USER(s)) {
4559         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4560         return;
4561     }
4562 
4563     ext = read_im16(env, s);
4564 
4565     if (ext & 0x8000) {
4566         reg = AREG(ext, 12);
4567     } else {
4568         reg = DREG(ext, 12);
4569     }
4570     gen_helper_cf_movec_to(cpu_env, tcg_const_i32(ext & 0xfff), reg);
4571     gen_exit_tb(s);
4572 }
4573 
4574 DISAS_INSN(m68k_movec)
4575 {
4576     uint16_t ext;
4577     TCGv reg;
4578 
4579     if (IS_USER(s)) {
4580         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4581         return;
4582     }
4583 
4584     ext = read_im16(env, s);
4585 
4586     if (ext & 0x8000) {
4587         reg = AREG(ext, 12);
4588     } else {
4589         reg = DREG(ext, 12);
4590     }
4591     if (insn & 1) {
4592         gen_helper_m68k_movec_to(cpu_env, tcg_const_i32(ext & 0xfff), reg);
4593     } else {
4594         gen_helper_m68k_movec_from(reg, cpu_env, tcg_const_i32(ext & 0xfff));
4595     }
4596     gen_exit_tb(s);
4597 }
4598 
4599 DISAS_INSN(intouch)
4600 {
4601     if (IS_USER(s)) {
4602         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4603         return;
4604     }
4605     /* ICache fetch.  Implement as no-op.  */
4606 }
4607 
4608 DISAS_INSN(cpushl)
4609 {
4610     if (IS_USER(s)) {
4611         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4612         return;
4613     }
4614     /* Cache push/invalidate.  Implement as no-op.  */
4615 }
4616 
4617 DISAS_INSN(cpush)
4618 {
4619     if (IS_USER(s)) {
4620         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4621         return;
4622     }
4623     /* Cache push/invalidate.  Implement as no-op.  */
4624 }
4625 
4626 DISAS_INSN(cinv)
4627 {
4628     if (IS_USER(s)) {
4629         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4630         return;
4631     }
4632     /* Invalidate cache line.  Implement as no-op.  */
4633 }
4634 
4635 #if defined(CONFIG_SOFTMMU)
4636 DISAS_INSN(pflush)
4637 {
4638     TCGv opmode;
4639 
4640     if (IS_USER(s)) {
4641         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4642         return;
4643     }
4644 
4645     opmode = tcg_const_i32((insn >> 3) & 3);
4646     gen_helper_pflush(cpu_env, AREG(insn, 0), opmode);
4647 }
4648 
4649 DISAS_INSN(ptest)
4650 {
4651     TCGv is_read;
4652 
4653     if (IS_USER(s)) {
4654         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4655         return;
4656     }
4657     is_read = tcg_const_i32((insn >> 5) & 1);
4658     gen_helper_ptest(cpu_env, AREG(insn, 0), is_read);
4659 }
4660 #endif
4661 
4662 DISAS_INSN(wddata)
4663 {
4664     gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4665 }
4666 
4667 DISAS_INSN(wdebug)
4668 {
4669     if (IS_USER(s)) {
4670         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4671         return;
4672     }
4673     /* TODO: Implement wdebug.  */
4674     cpu_abort(env_cpu(env), "WDEBUG not implemented");
4675 }
4676 #endif
4677 
4678 DISAS_INSN(trap)
4679 {
4680     gen_exception(s, s->pc, EXCP_TRAP0 + (insn & 0xf));
4681 }
4682 
4683 static void do_trapcc(DisasContext *s, DisasCompare *c)
4684 {
4685     if (c->tcond != TCG_COND_NEVER) {
4686         TCGLabel *over = NULL;
4687 
4688         update_cc_op(s);
4689 
4690         if (c->tcond != TCG_COND_ALWAYS) {
4691             /* Jump over if !c. */
4692             over = gen_new_label();
4693             tcg_gen_brcond_i32(tcg_invert_cond(c->tcond), c->v1, c->v2, over);
4694         }
4695 
4696         tcg_gen_movi_i32(QREG_PC, s->pc);
4697         gen_raise_exception_format2(s, EXCP_TRAPCC, s->base.pc_next);
4698 
4699         if (over != NULL) {
4700             gen_set_label(over);
4701             s->base.is_jmp = DISAS_NEXT;
4702         }
4703     }
4704 }
4705 
4706 DISAS_INSN(trapcc)
4707 {
4708     DisasCompare c;
4709 
4710     /* Consume and discard the immediate operand. */
4711     switch (extract32(insn, 0, 3)) {
4712     case 2: /* trapcc.w */
4713         (void)read_im16(env, s);
4714         break;
4715     case 3: /* trapcc.l */
4716         (void)read_im32(env, s);
4717         break;
4718     case 4: /* trapcc (no operand) */
4719         break;
4720     default:
4721         /* trapcc registered with only valid opmodes */
4722         g_assert_not_reached();
4723     }
4724 
4725     gen_cc_cond(&c, s, extract32(insn, 8, 4));
4726     do_trapcc(s, &c);
4727 }
4728 
4729 DISAS_INSN(trapv)
4730 {
4731     DisasCompare c;
4732 
4733     gen_cc_cond(&c, s, 9); /* V set */
4734     do_trapcc(s, &c);
4735 }
4736 
4737 static void gen_load_fcr(DisasContext *s, TCGv res, int reg)
4738 {
4739     switch (reg) {
4740     case M68K_FPIAR:
4741         tcg_gen_movi_i32(res, 0);
4742         break;
4743     case M68K_FPSR:
4744         tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpsr));
4745         break;
4746     case M68K_FPCR:
4747         tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpcr));
4748         break;
4749     }
4750 }
4751 
4752 static void gen_store_fcr(DisasContext *s, TCGv val, int reg)
4753 {
4754     switch (reg) {
4755     case M68K_FPIAR:
4756         break;
4757     case M68K_FPSR:
4758         tcg_gen_st_i32(val, cpu_env, offsetof(CPUM68KState, fpsr));
4759         break;
4760     case M68K_FPCR:
4761         gen_helper_set_fpcr(cpu_env, val);
4762         break;
4763     }
4764 }
4765 
4766 static void gen_qemu_store_fcr(DisasContext *s, TCGv addr, int reg)
4767 {
4768     int index = IS_USER(s);
4769     TCGv tmp;
4770 
4771     tmp = tcg_temp_new();
4772     gen_load_fcr(s, tmp, reg);
4773     tcg_gen_qemu_st32(tmp, addr, index);
4774 }
4775 
4776 static void gen_qemu_load_fcr(DisasContext *s, TCGv addr, int reg)
4777 {
4778     int index = IS_USER(s);
4779     TCGv tmp;
4780 
4781     tmp = tcg_temp_new();
4782     tcg_gen_qemu_ld32u(tmp, addr, index);
4783     gen_store_fcr(s, tmp, reg);
4784 }
4785 
4786 
4787 static void gen_op_fmove_fcr(CPUM68KState *env, DisasContext *s,
4788                              uint32_t insn, uint32_t ext)
4789 {
4790     int mask = (ext >> 10) & 7;
4791     int is_write = (ext >> 13) & 1;
4792     int mode = extract32(insn, 3, 3);
4793     int i;
4794     TCGv addr, tmp;
4795 
4796     switch (mode) {
4797     case 0: /* Dn */
4798         if (mask != M68K_FPIAR && mask != M68K_FPSR && mask != M68K_FPCR) {
4799             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4800             return;
4801         }
4802         if (is_write) {
4803             gen_load_fcr(s, DREG(insn, 0), mask);
4804         } else {
4805             gen_store_fcr(s, DREG(insn, 0), mask);
4806         }
4807         return;
4808     case 1: /* An, only with FPIAR */
4809         if (mask != M68K_FPIAR) {
4810             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4811             return;
4812         }
4813         if (is_write) {
4814             gen_load_fcr(s, AREG(insn, 0), mask);
4815         } else {
4816             gen_store_fcr(s, AREG(insn, 0), mask);
4817         }
4818         return;
4819     case 7: /* Immediate */
4820         if (REG(insn, 0) == 4) {
4821             if (is_write ||
4822                 (mask != M68K_FPIAR && mask != M68K_FPSR &&
4823                  mask != M68K_FPCR)) {
4824                 gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4825                 return;
4826             }
4827             tmp = tcg_const_i32(read_im32(env, s));
4828             gen_store_fcr(s, tmp, mask);
4829             return;
4830         }
4831         break;
4832     default:
4833         break;
4834     }
4835 
4836     tmp = gen_lea(env, s, insn, OS_LONG);
4837     if (IS_NULL_QREG(tmp)) {
4838         gen_addr_fault(s);
4839         return;
4840     }
4841 
4842     addr = tcg_temp_new();
4843     tcg_gen_mov_i32(addr, tmp);
4844 
4845     /*
4846      * mask:
4847      *
4848      * 0b100 Floating-Point Control Register
4849      * 0b010 Floating-Point Status Register
4850      * 0b001 Floating-Point Instruction Address Register
4851      *
4852      */
4853 
4854     if (is_write && mode == 4) {
4855         for (i = 2; i >= 0; i--, mask >>= 1) {
4856             if (mask & 1) {
4857                 gen_qemu_store_fcr(s, addr, 1 << i);
4858                 if (mask != 1) {
4859                     tcg_gen_subi_i32(addr, addr, opsize_bytes(OS_LONG));
4860                 }
4861             }
4862        }
4863        tcg_gen_mov_i32(AREG(insn, 0), addr);
4864     } else {
4865         for (i = 0; i < 3; i++, mask >>= 1) {
4866             if (mask & 1) {
4867                 if (is_write) {
4868                     gen_qemu_store_fcr(s, addr, 1 << i);
4869                 } else {
4870                     gen_qemu_load_fcr(s, addr, 1 << i);
4871                 }
4872                 if (mask != 1 || mode == 3) {
4873                     tcg_gen_addi_i32(addr, addr, opsize_bytes(OS_LONG));
4874                 }
4875             }
4876         }
4877         if (mode == 3) {
4878             tcg_gen_mov_i32(AREG(insn, 0), addr);
4879         }
4880     }
4881 }
4882 
4883 static void gen_op_fmovem(CPUM68KState *env, DisasContext *s,
4884                           uint32_t insn, uint32_t ext)
4885 {
4886     int opsize;
4887     TCGv addr, tmp;
4888     int mode = (ext >> 11) & 0x3;
4889     int is_load = ((ext & 0x2000) == 0);
4890 
4891     if (m68k_feature(s->env, M68K_FEATURE_FPU)) {
4892         opsize = OS_EXTENDED;
4893     } else {
4894         opsize = OS_DOUBLE;  /* FIXME */
4895     }
4896 
4897     addr = gen_lea(env, s, insn, opsize);
4898     if (IS_NULL_QREG(addr)) {
4899         gen_addr_fault(s);
4900         return;
4901     }
4902 
4903     tmp = tcg_temp_new();
4904     if (mode & 0x1) {
4905         /* Dynamic register list */
4906         tcg_gen_ext8u_i32(tmp, DREG(ext, 4));
4907     } else {
4908         /* Static register list */
4909         tcg_gen_movi_i32(tmp, ext & 0xff);
4910     }
4911 
4912     if (!is_load && (mode & 2) == 0) {
4913         /*
4914          * predecrement addressing mode
4915          * only available to store register to memory
4916          */
4917         if (opsize == OS_EXTENDED) {
4918             gen_helper_fmovemx_st_predec(tmp, cpu_env, addr, tmp);
4919         } else {
4920             gen_helper_fmovemd_st_predec(tmp, cpu_env, addr, tmp);
4921         }
4922     } else {
4923         /* postincrement addressing mode */
4924         if (opsize == OS_EXTENDED) {
4925             if (is_load) {
4926                 gen_helper_fmovemx_ld_postinc(tmp, cpu_env, addr, tmp);
4927             } else {
4928                 gen_helper_fmovemx_st_postinc(tmp, cpu_env, addr, tmp);
4929             }
4930         } else {
4931             if (is_load) {
4932                 gen_helper_fmovemd_ld_postinc(tmp, cpu_env, addr, tmp);
4933             } else {
4934                 gen_helper_fmovemd_st_postinc(tmp, cpu_env, addr, tmp);
4935             }
4936         }
4937     }
4938     if ((insn & 070) == 030 || (insn & 070) == 040) {
4939         tcg_gen_mov_i32(AREG(insn, 0), tmp);
4940     }
4941 }
4942 
4943 /*
4944  * ??? FP exceptions are not implemented.  Most exceptions are deferred until
4945  * immediately before the next FP instruction is executed.
4946  */
4947 DISAS_INSN(fpu)
4948 {
4949     uint16_t ext;
4950     int opmode;
4951     int opsize;
4952     TCGv_ptr cpu_src, cpu_dest;
4953 
4954     ext = read_im16(env, s);
4955     opmode = ext & 0x7f;
4956     switch ((ext >> 13) & 7) {
4957     case 0:
4958         break;
4959     case 1:
4960         goto undef;
4961     case 2:
4962         if (insn == 0xf200 && (ext & 0xfc00) == 0x5c00) {
4963             /* fmovecr */
4964             TCGv rom_offset = tcg_const_i32(opmode);
4965             cpu_dest = gen_fp_ptr(REG(ext, 7));
4966             gen_helper_fconst(cpu_env, cpu_dest, rom_offset);
4967             return;
4968         }
4969         break;
4970     case 3: /* fmove out */
4971         cpu_src = gen_fp_ptr(REG(ext, 7));
4972         opsize = ext_opsize(ext, 10);
4973         if (gen_ea_fp(env, s, insn, opsize, cpu_src,
4974                       EA_STORE, IS_USER(s)) == -1) {
4975             gen_addr_fault(s);
4976         }
4977         gen_helper_ftst(cpu_env, cpu_src);
4978         return;
4979     case 4: /* fmove to control register.  */
4980     case 5: /* fmove from control register.  */
4981         gen_op_fmove_fcr(env, s, insn, ext);
4982         return;
4983     case 6: /* fmovem */
4984     case 7:
4985         if ((ext & 0x1000) == 0 && !m68k_feature(s->env, M68K_FEATURE_FPU)) {
4986             goto undef;
4987         }
4988         gen_op_fmovem(env, s, insn, ext);
4989         return;
4990     }
4991     if (ext & (1 << 14)) {
4992         /* Source effective address.  */
4993         opsize = ext_opsize(ext, 10);
4994         cpu_src = gen_fp_result_ptr();
4995         if (gen_ea_fp(env, s, insn, opsize, cpu_src,
4996                       EA_LOADS, IS_USER(s)) == -1) {
4997             gen_addr_fault(s);
4998             return;
4999         }
5000     } else {
5001         /* Source register.  */
5002         opsize = OS_EXTENDED;
5003         cpu_src = gen_fp_ptr(REG(ext, 10));
5004     }
5005     cpu_dest = gen_fp_ptr(REG(ext, 7));
5006     switch (opmode) {
5007     case 0: /* fmove */
5008         gen_fp_move(cpu_dest, cpu_src);
5009         break;
5010     case 0x40: /* fsmove */
5011         gen_helper_fsround(cpu_env, cpu_dest, cpu_src);
5012         break;
5013     case 0x44: /* fdmove */
5014         gen_helper_fdround(cpu_env, cpu_dest, cpu_src);
5015         break;
5016     case 1: /* fint */
5017         gen_helper_firound(cpu_env, cpu_dest, cpu_src);
5018         break;
5019     case 2: /* fsinh */
5020         gen_helper_fsinh(cpu_env, cpu_dest, cpu_src);
5021         break;
5022     case 3: /* fintrz */
5023         gen_helper_fitrunc(cpu_env, cpu_dest, cpu_src);
5024         break;
5025     case 4: /* fsqrt */
5026         gen_helper_fsqrt(cpu_env, cpu_dest, cpu_src);
5027         break;
5028     case 0x41: /* fssqrt */
5029         gen_helper_fssqrt(cpu_env, cpu_dest, cpu_src);
5030         break;
5031     case 0x45: /* fdsqrt */
5032         gen_helper_fdsqrt(cpu_env, cpu_dest, cpu_src);
5033         break;
5034     case 0x06: /* flognp1 */
5035         gen_helper_flognp1(cpu_env, cpu_dest, cpu_src);
5036         break;
5037     case 0x08: /* fetoxm1 */
5038         gen_helper_fetoxm1(cpu_env, cpu_dest, cpu_src);
5039         break;
5040     case 0x09: /* ftanh */
5041         gen_helper_ftanh(cpu_env, cpu_dest, cpu_src);
5042         break;
5043     case 0x0a: /* fatan */
5044         gen_helper_fatan(cpu_env, cpu_dest, cpu_src);
5045         break;
5046     case 0x0c: /* fasin */
5047         gen_helper_fasin(cpu_env, cpu_dest, cpu_src);
5048         break;
5049     case 0x0d: /* fatanh */
5050         gen_helper_fatanh(cpu_env, cpu_dest, cpu_src);
5051         break;
5052     case 0x0e: /* fsin */
5053         gen_helper_fsin(cpu_env, cpu_dest, cpu_src);
5054         break;
5055     case 0x0f: /* ftan */
5056         gen_helper_ftan(cpu_env, cpu_dest, cpu_src);
5057         break;
5058     case 0x10: /* fetox */
5059         gen_helper_fetox(cpu_env, cpu_dest, cpu_src);
5060         break;
5061     case 0x11: /* ftwotox */
5062         gen_helper_ftwotox(cpu_env, cpu_dest, cpu_src);
5063         break;
5064     case 0x12: /* ftentox */
5065         gen_helper_ftentox(cpu_env, cpu_dest, cpu_src);
5066         break;
5067     case 0x14: /* flogn */
5068         gen_helper_flogn(cpu_env, cpu_dest, cpu_src);
5069         break;
5070     case 0x15: /* flog10 */
5071         gen_helper_flog10(cpu_env, cpu_dest, cpu_src);
5072         break;
5073     case 0x16: /* flog2 */
5074         gen_helper_flog2(cpu_env, cpu_dest, cpu_src);
5075         break;
5076     case 0x18: /* fabs */
5077         gen_helper_fabs(cpu_env, cpu_dest, cpu_src);
5078         break;
5079     case 0x58: /* fsabs */
5080         gen_helper_fsabs(cpu_env, cpu_dest, cpu_src);
5081         break;
5082     case 0x5c: /* fdabs */
5083         gen_helper_fdabs(cpu_env, cpu_dest, cpu_src);
5084         break;
5085     case 0x19: /* fcosh */
5086         gen_helper_fcosh(cpu_env, cpu_dest, cpu_src);
5087         break;
5088     case 0x1a: /* fneg */
5089         gen_helper_fneg(cpu_env, cpu_dest, cpu_src);
5090         break;
5091     case 0x5a: /* fsneg */
5092         gen_helper_fsneg(cpu_env, cpu_dest, cpu_src);
5093         break;
5094     case 0x5e: /* fdneg */
5095         gen_helper_fdneg(cpu_env, cpu_dest, cpu_src);
5096         break;
5097     case 0x1c: /* facos */
5098         gen_helper_facos(cpu_env, cpu_dest, cpu_src);
5099         break;
5100     case 0x1d: /* fcos */
5101         gen_helper_fcos(cpu_env, cpu_dest, cpu_src);
5102         break;
5103     case 0x1e: /* fgetexp */
5104         gen_helper_fgetexp(cpu_env, cpu_dest, cpu_src);
5105         break;
5106     case 0x1f: /* fgetman */
5107         gen_helper_fgetman(cpu_env, cpu_dest, cpu_src);
5108         break;
5109     case 0x20: /* fdiv */
5110         gen_helper_fdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5111         break;
5112     case 0x60: /* fsdiv */
5113         gen_helper_fsdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5114         break;
5115     case 0x64: /* fddiv */
5116         gen_helper_fddiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5117         break;
5118     case 0x21: /* fmod */
5119         gen_helper_fmod(cpu_env, cpu_dest, cpu_src, cpu_dest);
5120         break;
5121     case 0x22: /* fadd */
5122         gen_helper_fadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5123         break;
5124     case 0x62: /* fsadd */
5125         gen_helper_fsadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5126         break;
5127     case 0x66: /* fdadd */
5128         gen_helper_fdadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5129         break;
5130     case 0x23: /* fmul */
5131         gen_helper_fmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5132         break;
5133     case 0x63: /* fsmul */
5134         gen_helper_fsmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5135         break;
5136     case 0x67: /* fdmul */
5137         gen_helper_fdmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5138         break;
5139     case 0x24: /* fsgldiv */
5140         gen_helper_fsgldiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5141         break;
5142     case 0x25: /* frem */
5143         gen_helper_frem(cpu_env, cpu_dest, cpu_src, cpu_dest);
5144         break;
5145     case 0x26: /* fscale */
5146         gen_helper_fscale(cpu_env, cpu_dest, cpu_src, cpu_dest);
5147         break;
5148     case 0x27: /* fsglmul */
5149         gen_helper_fsglmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5150         break;
5151     case 0x28: /* fsub */
5152         gen_helper_fsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5153         break;
5154     case 0x68: /* fssub */
5155         gen_helper_fssub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5156         break;
5157     case 0x6c: /* fdsub */
5158         gen_helper_fdsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5159         break;
5160     case 0x30: case 0x31: case 0x32:
5161     case 0x33: case 0x34: case 0x35:
5162     case 0x36: case 0x37: {
5163             TCGv_ptr cpu_dest2 = gen_fp_ptr(REG(ext, 0));
5164             gen_helper_fsincos(cpu_env, cpu_dest, cpu_dest2, cpu_src);
5165         }
5166         break;
5167     case 0x38: /* fcmp */
5168         gen_helper_fcmp(cpu_env, cpu_src, cpu_dest);
5169         return;
5170     case 0x3a: /* ftst */
5171         gen_helper_ftst(cpu_env, cpu_src);
5172         return;
5173     default:
5174         goto undef;
5175     }
5176     gen_helper_ftst(cpu_env, cpu_dest);
5177     return;
5178 undef:
5179     /* FIXME: Is this right for offset addressing modes?  */
5180     s->pc -= 2;
5181     disas_undef_fpu(env, s, insn);
5182 }
5183 
5184 static void gen_fcc_cond(DisasCompare *c, DisasContext *s, int cond)
5185 {
5186     TCGv fpsr;
5187 
5188     c->v2 = tcg_const_i32(0);
5189     /* TODO: Raise BSUN exception.  */
5190     fpsr = tcg_temp_new();
5191     gen_load_fcr(s, fpsr, M68K_FPSR);
5192     switch (cond) {
5193     case 0:  /* False */
5194     case 16: /* Signaling False */
5195         c->v1 = c->v2;
5196         c->tcond = TCG_COND_NEVER;
5197         break;
5198     case 1:  /* EQual Z */
5199     case 17: /* Signaling EQual Z */
5200         c->v1 = tcg_temp_new();
5201         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5202         c->tcond = TCG_COND_NE;
5203         break;
5204     case 2:  /* Ordered Greater Than !(A || Z || N) */
5205     case 18: /* Greater Than !(A || Z || N) */
5206         c->v1 = tcg_temp_new();
5207         tcg_gen_andi_i32(c->v1, fpsr,
5208                          FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5209         c->tcond = TCG_COND_EQ;
5210         break;
5211     case 3:  /* Ordered Greater than or Equal Z || !(A || N) */
5212     case 19: /* Greater than or Equal Z || !(A || N) */
5213         c->v1 = tcg_temp_new();
5214         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5215         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5216         tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_Z | FPSR_CC_N);
5217         tcg_gen_or_i32(c->v1, c->v1, fpsr);
5218         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5219         c->tcond = TCG_COND_NE;
5220         break;
5221     case 4:  /* Ordered Less Than !(!N || A || Z); */
5222     case 20: /* Less Than !(!N || A || Z); */
5223         c->v1 = tcg_temp_new();
5224         tcg_gen_xori_i32(c->v1, fpsr, FPSR_CC_N);
5225         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_N | FPSR_CC_A | FPSR_CC_Z);
5226         c->tcond = TCG_COND_EQ;
5227         break;
5228     case 5:  /* Ordered Less than or Equal Z || (N && !A) */
5229     case 21: /* Less than or Equal Z || (N && !A) */
5230         c->v1 = tcg_temp_new();
5231         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5232         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5233         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5234         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_Z | FPSR_CC_N);
5235         c->tcond = TCG_COND_NE;
5236         break;
5237     case 6:  /* Ordered Greater or Less than !(A || Z) */
5238     case 22: /* Greater or Less than !(A || Z) */
5239         c->v1 = tcg_temp_new();
5240         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5241         c->tcond = TCG_COND_EQ;
5242         break;
5243     case 7:  /* Ordered !A */
5244     case 23: /* Greater, Less or Equal !A */
5245         c->v1 = tcg_temp_new();
5246         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5247         c->tcond = TCG_COND_EQ;
5248         break;
5249     case 8:  /* Unordered A */
5250     case 24: /* Not Greater, Less or Equal A */
5251         c->v1 = tcg_temp_new();
5252         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5253         c->tcond = TCG_COND_NE;
5254         break;
5255     case 9:  /* Unordered or Equal A || Z */
5256     case 25: /* Not Greater or Less then A || Z */
5257         c->v1 = tcg_temp_new();
5258         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5259         c->tcond = TCG_COND_NE;
5260         break;
5261     case 10: /* Unordered or Greater Than A || !(N || Z)) */
5262     case 26: /* Not Less or Equal A || !(N || Z)) */
5263         c->v1 = tcg_temp_new();
5264         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5265         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5266         tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_A | FPSR_CC_N);
5267         tcg_gen_or_i32(c->v1, c->v1, fpsr);
5268         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5269         c->tcond = TCG_COND_NE;
5270         break;
5271     case 11: /* Unordered or Greater or Equal A || Z || !N */
5272     case 27: /* Not Less Than A || Z || !N */
5273         c->v1 = tcg_temp_new();
5274         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5275         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5276         c->tcond = TCG_COND_NE;
5277         break;
5278     case 12: /* Unordered or Less Than A || (N && !Z) */
5279     case 28: /* Not Greater than or Equal A || (N && !Z) */
5280         c->v1 = tcg_temp_new();
5281         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5282         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5283         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5284         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_A | FPSR_CC_N);
5285         c->tcond = TCG_COND_NE;
5286         break;
5287     case 13: /* Unordered or Less or Equal A || Z || N */
5288     case 29: /* Not Greater Than A || Z || N */
5289         c->v1 = tcg_temp_new();
5290         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5291         c->tcond = TCG_COND_NE;
5292         break;
5293     case 14: /* Not Equal !Z */
5294     case 30: /* Signaling Not Equal !Z */
5295         c->v1 = tcg_temp_new();
5296         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5297         c->tcond = TCG_COND_EQ;
5298         break;
5299     case 15: /* True */
5300     case 31: /* Signaling True */
5301         c->v1 = c->v2;
5302         c->tcond = TCG_COND_ALWAYS;
5303         break;
5304     }
5305 }
5306 
5307 static void gen_fjmpcc(DisasContext *s, int cond, TCGLabel *l1)
5308 {
5309     DisasCompare c;
5310 
5311     gen_fcc_cond(&c, s, cond);
5312     update_cc_op(s);
5313     tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
5314 }
5315 
5316 DISAS_INSN(fbcc)
5317 {
5318     uint32_t offset;
5319     uint32_t base;
5320     TCGLabel *l1;
5321 
5322     base = s->pc;
5323     offset = (int16_t)read_im16(env, s);
5324     if (insn & (1 << 6)) {
5325         offset = (offset << 16) | read_im16(env, s);
5326     }
5327 
5328     l1 = gen_new_label();
5329     update_cc_op(s);
5330     gen_fjmpcc(s, insn & 0x3f, l1);
5331     gen_jmp_tb(s, 0, s->pc, s->base.pc_next);
5332     gen_set_label(l1);
5333     gen_jmp_tb(s, 1, base + offset, s->base.pc_next);
5334 }
5335 
5336 DISAS_INSN(fscc)
5337 {
5338     DisasCompare c;
5339     int cond;
5340     TCGv tmp;
5341     uint16_t ext;
5342 
5343     ext = read_im16(env, s);
5344     cond = ext & 0x3f;
5345     gen_fcc_cond(&c, s, cond);
5346 
5347     tmp = tcg_temp_new();
5348     tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
5349 
5350     tcg_gen_neg_i32(tmp, tmp);
5351     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
5352 }
5353 
5354 DISAS_INSN(ftrapcc)
5355 {
5356     DisasCompare c;
5357     uint16_t ext;
5358     int cond;
5359 
5360     ext = read_im16(env, s);
5361     cond = ext & 0x3f;
5362 
5363     /* Consume and discard the immediate operand. */
5364     switch (extract32(insn, 0, 3)) {
5365     case 2: /* ftrapcc.w */
5366         (void)read_im16(env, s);
5367         break;
5368     case 3: /* ftrapcc.l */
5369         (void)read_im32(env, s);
5370         break;
5371     case 4: /* ftrapcc (no operand) */
5372         break;
5373     default:
5374         /* ftrapcc registered with only valid opmodes */
5375         g_assert_not_reached();
5376     }
5377 
5378     gen_fcc_cond(&c, s, cond);
5379     do_trapcc(s, &c);
5380 }
5381 
5382 #if defined(CONFIG_SOFTMMU)
5383 DISAS_INSN(frestore)
5384 {
5385     TCGv addr;
5386 
5387     if (IS_USER(s)) {
5388         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
5389         return;
5390     }
5391     if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5392         SRC_EA(env, addr, OS_LONG, 0, NULL);
5393         /* FIXME: check the state frame */
5394     } else {
5395         disas_undef(env, s, insn);
5396     }
5397 }
5398 
5399 DISAS_INSN(fsave)
5400 {
5401     if (IS_USER(s)) {
5402         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
5403         return;
5404     }
5405 
5406     if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5407         /* always write IDLE */
5408         TCGv idle = tcg_const_i32(0x41000000);
5409         DEST_EA(env, insn, OS_LONG, idle, NULL);
5410     } else {
5411         disas_undef(env, s, insn);
5412     }
5413 }
5414 #endif
5415 
5416 static inline TCGv gen_mac_extract_word(DisasContext *s, TCGv val, int upper)
5417 {
5418     TCGv tmp = tcg_temp_new();
5419     if (s->env->macsr & MACSR_FI) {
5420         if (upper)
5421             tcg_gen_andi_i32(tmp, val, 0xffff0000);
5422         else
5423             tcg_gen_shli_i32(tmp, val, 16);
5424     } else if (s->env->macsr & MACSR_SU) {
5425         if (upper)
5426             tcg_gen_sari_i32(tmp, val, 16);
5427         else
5428             tcg_gen_ext16s_i32(tmp, val);
5429     } else {
5430         if (upper)
5431             tcg_gen_shri_i32(tmp, val, 16);
5432         else
5433             tcg_gen_ext16u_i32(tmp, val);
5434     }
5435     return tmp;
5436 }
5437 
5438 static void gen_mac_clear_flags(void)
5439 {
5440     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR,
5441                      ~(MACSR_V | MACSR_Z | MACSR_N | MACSR_EV));
5442 }
5443 
5444 DISAS_INSN(mac)
5445 {
5446     TCGv rx;
5447     TCGv ry;
5448     uint16_t ext;
5449     int acc;
5450     TCGv tmp;
5451     TCGv addr;
5452     TCGv loadval;
5453     int dual;
5454     TCGv saved_flags;
5455 
5456     if (!s->done_mac) {
5457         s->mactmp = tcg_temp_new_i64();
5458         s->done_mac = 1;
5459     }
5460 
5461     ext = read_im16(env, s);
5462 
5463     acc = ((insn >> 7) & 1) | ((ext >> 3) & 2);
5464     dual = ((insn & 0x30) != 0 && (ext & 3) != 0);
5465     if (dual && !m68k_feature(s->env, M68K_FEATURE_CF_EMAC_B)) {
5466         disas_undef(env, s, insn);
5467         return;
5468     }
5469     if (insn & 0x30) {
5470         /* MAC with load.  */
5471         tmp = gen_lea(env, s, insn, OS_LONG);
5472         addr = tcg_temp_new();
5473         tcg_gen_and_i32(addr, tmp, QREG_MAC_MASK);
5474         /*
5475          * Load the value now to ensure correct exception behavior.
5476          * Perform writeback after reading the MAC inputs.
5477          */
5478         loadval = gen_load(s, OS_LONG, addr, 0, IS_USER(s));
5479 
5480         acc ^= 1;
5481         rx = (ext & 0x8000) ? AREG(ext, 12) : DREG(insn, 12);
5482         ry = (ext & 8) ? AREG(ext, 0) : DREG(ext, 0);
5483     } else {
5484         loadval = addr = NULL_QREG;
5485         rx = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5486         ry = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5487     }
5488 
5489     gen_mac_clear_flags();
5490 #if 0
5491     l1 = -1;
5492     /* Disabled because conditional branches clobber temporary vars.  */
5493     if ((s->env->macsr & MACSR_OMC) != 0 && !dual) {
5494         /* Skip the multiply if we know we will ignore it.  */
5495         l1 = gen_new_label();
5496         tmp = tcg_temp_new();
5497         tcg_gen_andi_i32(tmp, QREG_MACSR, 1 << (acc + 8));
5498         gen_op_jmp_nz32(tmp, l1);
5499     }
5500 #endif
5501 
5502     if ((ext & 0x0800) == 0) {
5503         /* Word.  */
5504         rx = gen_mac_extract_word(s, rx, (ext & 0x80) != 0);
5505         ry = gen_mac_extract_word(s, ry, (ext & 0x40) != 0);
5506     }
5507     if (s->env->macsr & MACSR_FI) {
5508         gen_helper_macmulf(s->mactmp, cpu_env, rx, ry);
5509     } else {
5510         if (s->env->macsr & MACSR_SU)
5511             gen_helper_macmuls(s->mactmp, cpu_env, rx, ry);
5512         else
5513             gen_helper_macmulu(s->mactmp, cpu_env, rx, ry);
5514         switch ((ext >> 9) & 3) {
5515         case 1:
5516             tcg_gen_shli_i64(s->mactmp, s->mactmp, 1);
5517             break;
5518         case 3:
5519             tcg_gen_shri_i64(s->mactmp, s->mactmp, 1);
5520             break;
5521         }
5522     }
5523 
5524     if (dual) {
5525         /* Save the overflow flag from the multiply.  */
5526         saved_flags = tcg_temp_new();
5527         tcg_gen_mov_i32(saved_flags, QREG_MACSR);
5528     } else {
5529         saved_flags = NULL_QREG;
5530     }
5531 
5532 #if 0
5533     /* Disabled because conditional branches clobber temporary vars.  */
5534     if ((s->env->macsr & MACSR_OMC) != 0 && dual) {
5535         /* Skip the accumulate if the value is already saturated.  */
5536         l1 = gen_new_label();
5537         tmp = tcg_temp_new();
5538         gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
5539         gen_op_jmp_nz32(tmp, l1);
5540     }
5541 #endif
5542 
5543     if (insn & 0x100)
5544         tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5545     else
5546         tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5547 
5548     if (s->env->macsr & MACSR_FI)
5549         gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
5550     else if (s->env->macsr & MACSR_SU)
5551         gen_helper_macsats(cpu_env, tcg_const_i32(acc));
5552     else
5553         gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
5554 
5555 #if 0
5556     /* Disabled because conditional branches clobber temporary vars.  */
5557     if (l1 != -1)
5558         gen_set_label(l1);
5559 #endif
5560 
5561     if (dual) {
5562         /* Dual accumulate variant.  */
5563         acc = (ext >> 2) & 3;
5564         /* Restore the overflow flag from the multiplier.  */
5565         tcg_gen_mov_i32(QREG_MACSR, saved_flags);
5566 #if 0
5567         /* Disabled because conditional branches clobber temporary vars.  */
5568         if ((s->env->macsr & MACSR_OMC) != 0) {
5569             /* Skip the accumulate if the value is already saturated.  */
5570             l1 = gen_new_label();
5571             tmp = tcg_temp_new();
5572             gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
5573             gen_op_jmp_nz32(tmp, l1);
5574         }
5575 #endif
5576         if (ext & 2)
5577             tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5578         else
5579             tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5580         if (s->env->macsr & MACSR_FI)
5581             gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
5582         else if (s->env->macsr & MACSR_SU)
5583             gen_helper_macsats(cpu_env, tcg_const_i32(acc));
5584         else
5585             gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
5586 #if 0
5587         /* Disabled because conditional branches clobber temporary vars.  */
5588         if (l1 != -1)
5589             gen_set_label(l1);
5590 #endif
5591     }
5592     gen_helper_mac_set_flags(cpu_env, tcg_const_i32(acc));
5593 
5594     if (insn & 0x30) {
5595         TCGv rw;
5596         rw = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5597         tcg_gen_mov_i32(rw, loadval);
5598         /*
5599          * FIXME: Should address writeback happen with the masked or
5600          * unmasked value?
5601          */
5602         switch ((insn >> 3) & 7) {
5603         case 3: /* Post-increment.  */
5604             tcg_gen_addi_i32(AREG(insn, 0), addr, 4);
5605             break;
5606         case 4: /* Pre-decrement.  */
5607             tcg_gen_mov_i32(AREG(insn, 0), addr);
5608         }
5609     }
5610 }
5611 
5612 DISAS_INSN(from_mac)
5613 {
5614     TCGv rx;
5615     TCGv_i64 acc;
5616     int accnum;
5617 
5618     rx = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5619     accnum = (insn >> 9) & 3;
5620     acc = MACREG(accnum);
5621     if (s->env->macsr & MACSR_FI) {
5622         gen_helper_get_macf(rx, cpu_env, acc);
5623     } else if ((s->env->macsr & MACSR_OMC) == 0) {
5624         tcg_gen_extrl_i64_i32(rx, acc);
5625     } else if (s->env->macsr & MACSR_SU) {
5626         gen_helper_get_macs(rx, acc);
5627     } else {
5628         gen_helper_get_macu(rx, acc);
5629     }
5630     if (insn & 0x40) {
5631         tcg_gen_movi_i64(acc, 0);
5632         tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5633     }
5634 }
5635 
5636 DISAS_INSN(move_mac)
5637 {
5638     /* FIXME: This can be done without a helper.  */
5639     int src;
5640     TCGv dest;
5641     src = insn & 3;
5642     dest = tcg_const_i32((insn >> 9) & 3);
5643     gen_helper_mac_move(cpu_env, dest, tcg_const_i32(src));
5644     gen_mac_clear_flags();
5645     gen_helper_mac_set_flags(cpu_env, dest);
5646 }
5647 
5648 DISAS_INSN(from_macsr)
5649 {
5650     TCGv reg;
5651 
5652     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5653     tcg_gen_mov_i32(reg, QREG_MACSR);
5654 }
5655 
5656 DISAS_INSN(from_mask)
5657 {
5658     TCGv reg;
5659     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5660     tcg_gen_mov_i32(reg, QREG_MAC_MASK);
5661 }
5662 
5663 DISAS_INSN(from_mext)
5664 {
5665     TCGv reg;
5666     TCGv acc;
5667     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5668     acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
5669     if (s->env->macsr & MACSR_FI)
5670         gen_helper_get_mac_extf(reg, cpu_env, acc);
5671     else
5672         gen_helper_get_mac_exti(reg, cpu_env, acc);
5673 }
5674 
5675 DISAS_INSN(macsr_to_ccr)
5676 {
5677     TCGv tmp = tcg_temp_new();
5678 
5679     /* Note that X and C are always cleared. */
5680     tcg_gen_andi_i32(tmp, QREG_MACSR, CCF_N | CCF_Z | CCF_V);
5681     gen_helper_set_ccr(cpu_env, tmp);
5682     set_cc_op(s, CC_OP_FLAGS);
5683 }
5684 
5685 DISAS_INSN(to_mac)
5686 {
5687     TCGv_i64 acc;
5688     TCGv val;
5689     int accnum;
5690     accnum = (insn >> 9) & 3;
5691     acc = MACREG(accnum);
5692     SRC_EA(env, val, OS_LONG, 0, NULL);
5693     if (s->env->macsr & MACSR_FI) {
5694         tcg_gen_ext_i32_i64(acc, val);
5695         tcg_gen_shli_i64(acc, acc, 8);
5696     } else if (s->env->macsr & MACSR_SU) {
5697         tcg_gen_ext_i32_i64(acc, val);
5698     } else {
5699         tcg_gen_extu_i32_i64(acc, val);
5700     }
5701     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5702     gen_mac_clear_flags();
5703     gen_helper_mac_set_flags(cpu_env, tcg_const_i32(accnum));
5704 }
5705 
5706 DISAS_INSN(to_macsr)
5707 {
5708     TCGv val;
5709     SRC_EA(env, val, OS_LONG, 0, NULL);
5710     gen_helper_set_macsr(cpu_env, val);
5711     gen_exit_tb(s);
5712 }
5713 
5714 DISAS_INSN(to_mask)
5715 {
5716     TCGv val;
5717     SRC_EA(env, val, OS_LONG, 0, NULL);
5718     tcg_gen_ori_i32(QREG_MAC_MASK, val, 0xffff0000);
5719 }
5720 
5721 DISAS_INSN(to_mext)
5722 {
5723     TCGv val;
5724     TCGv acc;
5725     SRC_EA(env, val, OS_LONG, 0, NULL);
5726     acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
5727     if (s->env->macsr & MACSR_FI)
5728         gen_helper_set_mac_extf(cpu_env, val, acc);
5729     else if (s->env->macsr & MACSR_SU)
5730         gen_helper_set_mac_exts(cpu_env, val, acc);
5731     else
5732         gen_helper_set_mac_extu(cpu_env, val, acc);
5733 }
5734 
5735 static disas_proc opcode_table[65536];
5736 
5737 static void
5738 register_opcode (disas_proc proc, uint16_t opcode, uint16_t mask)
5739 {
5740   int i;
5741   int from;
5742   int to;
5743 
5744   /* Sanity check.  All set bits must be included in the mask.  */
5745   if (opcode & ~mask) {
5746       fprintf(stderr,
5747               "qemu internal error: bogus opcode definition %04x/%04x\n",
5748               opcode, mask);
5749       abort();
5750   }
5751   /*
5752    * This could probably be cleverer.  For now just optimize the case where
5753    * the top bits are known.
5754    */
5755   /* Find the first zero bit in the mask.  */
5756   i = 0x8000;
5757   while ((i & mask) != 0)
5758       i >>= 1;
5759   /* Iterate over all combinations of this and lower bits.  */
5760   if (i == 0)
5761       i = 1;
5762   else
5763       i <<= 1;
5764   from = opcode & ~(i - 1);
5765   to = from + i;
5766   for (i = from; i < to; i++) {
5767       if ((i & mask) == opcode)
5768           opcode_table[i] = proc;
5769   }
5770 }
5771 
5772 /*
5773  * Register m68k opcode handlers.  Order is important.
5774  * Later insn override earlier ones.
5775  */
5776 void register_m68k_insns (CPUM68KState *env)
5777 {
5778     /*
5779      * Build the opcode table only once to avoid
5780      * multithreading issues.
5781      */
5782     if (opcode_table[0] != NULL) {
5783         return;
5784     }
5785 
5786     /*
5787      * use BASE() for instruction available
5788      * for CF_ISA_A and M68000.
5789      */
5790 #define BASE(name, opcode, mask) \
5791     register_opcode(disas_##name, 0x##opcode, 0x##mask)
5792 #define INSN(name, opcode, mask, feature) do { \
5793     if (m68k_feature(env, M68K_FEATURE_##feature)) \
5794         BASE(name, opcode, mask); \
5795     } while(0)
5796     BASE(undef,     0000, 0000);
5797     INSN(arith_im,  0080, fff8, CF_ISA_A);
5798     INSN(arith_im,  0000, ff00, M68K);
5799     INSN(chk2,      00c0, f9c0, CHK2);
5800     INSN(bitrev,    00c0, fff8, CF_ISA_APLUSC);
5801     BASE(bitop_reg, 0100, f1c0);
5802     BASE(bitop_reg, 0140, f1c0);
5803     BASE(bitop_reg, 0180, f1c0);
5804     BASE(bitop_reg, 01c0, f1c0);
5805     INSN(movep,     0108, f138, MOVEP);
5806     INSN(arith_im,  0280, fff8, CF_ISA_A);
5807     INSN(arith_im,  0200, ff00, M68K);
5808     INSN(undef,     02c0, ffc0, M68K);
5809     INSN(byterev,   02c0, fff8, CF_ISA_APLUSC);
5810     INSN(arith_im,  0480, fff8, CF_ISA_A);
5811     INSN(arith_im,  0400, ff00, M68K);
5812     INSN(undef,     04c0, ffc0, M68K);
5813     INSN(arith_im,  0600, ff00, M68K);
5814     INSN(undef,     06c0, ffc0, M68K);
5815     INSN(ff1,       04c0, fff8, CF_ISA_APLUSC);
5816     INSN(arith_im,  0680, fff8, CF_ISA_A);
5817     INSN(arith_im,  0c00, ff38, CF_ISA_A);
5818     INSN(arith_im,  0c00, ff00, M68K);
5819     BASE(bitop_im,  0800, ffc0);
5820     BASE(bitop_im,  0840, ffc0);
5821     BASE(bitop_im,  0880, ffc0);
5822     BASE(bitop_im,  08c0, ffc0);
5823     INSN(arith_im,  0a80, fff8, CF_ISA_A);
5824     INSN(arith_im,  0a00, ff00, M68K);
5825 #if defined(CONFIG_SOFTMMU)
5826     INSN(moves,     0e00, ff00, M68K);
5827 #endif
5828     INSN(cas,       0ac0, ffc0, CAS);
5829     INSN(cas,       0cc0, ffc0, CAS);
5830     INSN(cas,       0ec0, ffc0, CAS);
5831     INSN(cas2w,     0cfc, ffff, CAS);
5832     INSN(cas2l,     0efc, ffff, CAS);
5833     BASE(move,      1000, f000);
5834     BASE(move,      2000, f000);
5835     BASE(move,      3000, f000);
5836     INSN(chk,       4000, f040, M68K);
5837     INSN(strldsr,   40e7, ffff, CF_ISA_APLUSC);
5838     INSN(negx,      4080, fff8, CF_ISA_A);
5839     INSN(negx,      4000, ff00, M68K);
5840     INSN(undef,     40c0, ffc0, M68K);
5841     INSN(move_from_sr, 40c0, fff8, CF_ISA_A);
5842     INSN(move_from_sr, 40c0, ffc0, M68K);
5843     BASE(lea,       41c0, f1c0);
5844     BASE(clr,       4200, ff00);
5845     BASE(undef,     42c0, ffc0);
5846     INSN(move_from_ccr, 42c0, fff8, CF_ISA_A);
5847     INSN(move_from_ccr, 42c0, ffc0, M68K);
5848     INSN(neg,       4480, fff8, CF_ISA_A);
5849     INSN(neg,       4400, ff00, M68K);
5850     INSN(undef,     44c0, ffc0, M68K);
5851     BASE(move_to_ccr, 44c0, ffc0);
5852     INSN(not,       4680, fff8, CF_ISA_A);
5853     INSN(not,       4600, ff00, M68K);
5854 #if defined(CONFIG_SOFTMMU)
5855     BASE(move_to_sr, 46c0, ffc0);
5856 #endif
5857     INSN(nbcd,      4800, ffc0, M68K);
5858     INSN(linkl,     4808, fff8, M68K);
5859     BASE(pea,       4840, ffc0);
5860     BASE(swap,      4840, fff8);
5861     INSN(bkpt,      4848, fff8, BKPT);
5862     INSN(movem,     48d0, fbf8, CF_ISA_A);
5863     INSN(movem,     48e8, fbf8, CF_ISA_A);
5864     INSN(movem,     4880, fb80, M68K);
5865     BASE(ext,       4880, fff8);
5866     BASE(ext,       48c0, fff8);
5867     BASE(ext,       49c0, fff8);
5868     BASE(tst,       4a00, ff00);
5869     INSN(tas,       4ac0, ffc0, CF_ISA_B);
5870     INSN(tas,       4ac0, ffc0, M68K);
5871 #if defined(CONFIG_SOFTMMU)
5872     INSN(halt,      4ac8, ffff, CF_ISA_A);
5873     INSN(halt,      4ac8, ffff, M68K);
5874 #endif
5875     INSN(pulse,     4acc, ffff, CF_ISA_A);
5876     BASE(illegal,   4afc, ffff);
5877     INSN(mull,      4c00, ffc0, CF_ISA_A);
5878     INSN(mull,      4c00, ffc0, LONG_MULDIV);
5879     INSN(divl,      4c40, ffc0, CF_ISA_A);
5880     INSN(divl,      4c40, ffc0, LONG_MULDIV);
5881     INSN(sats,      4c80, fff8, CF_ISA_B);
5882     BASE(trap,      4e40, fff0);
5883     BASE(link,      4e50, fff8);
5884     BASE(unlk,      4e58, fff8);
5885 #if defined(CONFIG_SOFTMMU)
5886     INSN(move_to_usp, 4e60, fff8, USP);
5887     INSN(move_from_usp, 4e68, fff8, USP);
5888     INSN(reset,     4e70, ffff, M68K);
5889     BASE(stop,      4e72, ffff);
5890     BASE(rte,       4e73, ffff);
5891     INSN(cf_movec,  4e7b, ffff, CF_ISA_A);
5892     INSN(m68k_movec, 4e7a, fffe, MOVEC);
5893 #endif
5894     BASE(nop,       4e71, ffff);
5895     INSN(rtd,       4e74, ffff, RTD);
5896     BASE(rts,       4e75, ffff);
5897     INSN(trapv,     4e76, ffff, M68K);
5898     INSN(rtr,       4e77, ffff, M68K);
5899     BASE(jump,      4e80, ffc0);
5900     BASE(jump,      4ec0, ffc0);
5901     INSN(addsubq,   5000, f080, M68K);
5902     BASE(addsubq,   5080, f0c0);
5903     INSN(scc,       50c0, f0f8, CF_ISA_A); /* Scc.B Dx   */
5904     INSN(scc,       50c0, f0c0, M68K);     /* Scc.B <EA> */
5905     INSN(dbcc,      50c8, f0f8, M68K);
5906     INSN(trapcc,    50fa, f0fe, TRAPCC);   /* opmode 010, 011 */
5907     INSN(trapcc,    50fc, f0ff, TRAPCC);   /* opmode 100 */
5908     INSN(trapcc,    51fa, fffe, CF_ISA_A); /* TPF (trapf) opmode 010, 011 */
5909     INSN(trapcc,    51fc, ffff, CF_ISA_A); /* TPF (trapf) opmode 100 */
5910 
5911     /* Branch instructions.  */
5912     BASE(branch,    6000, f000);
5913     /* Disable long branch instructions, then add back the ones we want.  */
5914     BASE(undef,     60ff, f0ff); /* All long branches.  */
5915     INSN(branch,    60ff, f0ff, CF_ISA_B);
5916     INSN(undef,     60ff, ffff, CF_ISA_B); /* bra.l */
5917     INSN(branch,    60ff, ffff, BRAL);
5918     INSN(branch,    60ff, f0ff, BCCL);
5919 
5920     BASE(moveq,     7000, f100);
5921     INSN(mvzs,      7100, f100, CF_ISA_B);
5922     BASE(or,        8000, f000);
5923     BASE(divw,      80c0, f0c0);
5924     INSN(sbcd_reg,  8100, f1f8, M68K);
5925     INSN(sbcd_mem,  8108, f1f8, M68K);
5926     BASE(addsub,    9000, f000);
5927     INSN(undef,     90c0, f0c0, CF_ISA_A);
5928     INSN(subx_reg,  9180, f1f8, CF_ISA_A);
5929     INSN(subx_reg,  9100, f138, M68K);
5930     INSN(subx_mem,  9108, f138, M68K);
5931     INSN(suba,      91c0, f1c0, CF_ISA_A);
5932     INSN(suba,      90c0, f0c0, M68K);
5933 
5934     BASE(undef_mac, a000, f000);
5935     INSN(mac,       a000, f100, CF_EMAC);
5936     INSN(from_mac,  a180, f9b0, CF_EMAC);
5937     INSN(move_mac,  a110, f9fc, CF_EMAC);
5938     INSN(from_macsr,a980, f9f0, CF_EMAC);
5939     INSN(from_mask, ad80, fff0, CF_EMAC);
5940     INSN(from_mext, ab80, fbf0, CF_EMAC);
5941     INSN(macsr_to_ccr, a9c0, ffff, CF_EMAC);
5942     INSN(to_mac,    a100, f9c0, CF_EMAC);
5943     INSN(to_macsr,  a900, ffc0, CF_EMAC);
5944     INSN(to_mext,   ab00, fbc0, CF_EMAC);
5945     INSN(to_mask,   ad00, ffc0, CF_EMAC);
5946 
5947     INSN(mov3q,     a140, f1c0, CF_ISA_B);
5948     INSN(cmp,       b000, f1c0, CF_ISA_B); /* cmp.b */
5949     INSN(cmp,       b040, f1c0, CF_ISA_B); /* cmp.w */
5950     INSN(cmpa,      b0c0, f1c0, CF_ISA_B); /* cmpa.w */
5951     INSN(cmp,       b080, f1c0, CF_ISA_A);
5952     INSN(cmpa,      b1c0, f1c0, CF_ISA_A);
5953     INSN(cmp,       b000, f100, M68K);
5954     INSN(eor,       b100, f100, M68K);
5955     INSN(cmpm,      b108, f138, M68K);
5956     INSN(cmpa,      b0c0, f0c0, M68K);
5957     INSN(eor,       b180, f1c0, CF_ISA_A);
5958     BASE(and,       c000, f000);
5959     INSN(exg_dd,    c140, f1f8, M68K);
5960     INSN(exg_aa,    c148, f1f8, M68K);
5961     INSN(exg_da,    c188, f1f8, M68K);
5962     BASE(mulw,      c0c0, f0c0);
5963     INSN(abcd_reg,  c100, f1f8, M68K);
5964     INSN(abcd_mem,  c108, f1f8, M68K);
5965     BASE(addsub,    d000, f000);
5966     INSN(undef,     d0c0, f0c0, CF_ISA_A);
5967     INSN(addx_reg,      d180, f1f8, CF_ISA_A);
5968     INSN(addx_reg,  d100, f138, M68K);
5969     INSN(addx_mem,  d108, f138, M68K);
5970     INSN(adda,      d1c0, f1c0, CF_ISA_A);
5971     INSN(adda,      d0c0, f0c0, M68K);
5972     INSN(shift_im,  e080, f0f0, CF_ISA_A);
5973     INSN(shift_reg, e0a0, f0f0, CF_ISA_A);
5974     INSN(shift8_im, e000, f0f0, M68K);
5975     INSN(shift16_im, e040, f0f0, M68K);
5976     INSN(shift_im,  e080, f0f0, M68K);
5977     INSN(shift8_reg, e020, f0f0, M68K);
5978     INSN(shift16_reg, e060, f0f0, M68K);
5979     INSN(shift_reg, e0a0, f0f0, M68K);
5980     INSN(shift_mem, e0c0, fcc0, M68K);
5981     INSN(rotate_im, e090, f0f0, M68K);
5982     INSN(rotate8_im, e010, f0f0, M68K);
5983     INSN(rotate16_im, e050, f0f0, M68K);
5984     INSN(rotate_reg, e0b0, f0f0, M68K);
5985     INSN(rotate8_reg, e030, f0f0, M68K);
5986     INSN(rotate16_reg, e070, f0f0, M68K);
5987     INSN(rotate_mem, e4c0, fcc0, M68K);
5988     INSN(bfext_mem, e9c0, fdc0, BITFIELD);  /* bfextu & bfexts */
5989     INSN(bfext_reg, e9c0, fdf8, BITFIELD);
5990     INSN(bfins_mem, efc0, ffc0, BITFIELD);
5991     INSN(bfins_reg, efc0, fff8, BITFIELD);
5992     INSN(bfop_mem, eac0, ffc0, BITFIELD);   /* bfchg */
5993     INSN(bfop_reg, eac0, fff8, BITFIELD);   /* bfchg */
5994     INSN(bfop_mem, ecc0, ffc0, BITFIELD);   /* bfclr */
5995     INSN(bfop_reg, ecc0, fff8, BITFIELD);   /* bfclr */
5996     INSN(bfop_mem, edc0, ffc0, BITFIELD);   /* bfffo */
5997     INSN(bfop_reg, edc0, fff8, BITFIELD);   /* bfffo */
5998     INSN(bfop_mem, eec0, ffc0, BITFIELD);   /* bfset */
5999     INSN(bfop_reg, eec0, fff8, BITFIELD);   /* bfset */
6000     INSN(bfop_mem, e8c0, ffc0, BITFIELD);   /* bftst */
6001     INSN(bfop_reg, e8c0, fff8, BITFIELD);   /* bftst */
6002     BASE(undef_fpu, f000, f000);
6003     INSN(fpu,       f200, ffc0, CF_FPU);
6004     INSN(fbcc,      f280, ffc0, CF_FPU);
6005     INSN(fpu,       f200, ffc0, FPU);
6006     INSN(fscc,      f240, ffc0, FPU);
6007     INSN(ftrapcc,   f27a, fffe, FPU);       /* opmode 010, 011 */
6008     INSN(ftrapcc,   f27c, ffff, FPU);       /* opmode 100 */
6009     INSN(fbcc,      f280, ff80, FPU);
6010 #if defined(CONFIG_SOFTMMU)
6011     INSN(frestore,  f340, ffc0, CF_FPU);
6012     INSN(fsave,     f300, ffc0, CF_FPU);
6013     INSN(frestore,  f340, ffc0, FPU);
6014     INSN(fsave,     f300, ffc0, FPU);
6015     INSN(intouch,   f340, ffc0, CF_ISA_A);
6016     INSN(cpushl,    f428, ff38, CF_ISA_A);
6017     INSN(cpush,     f420, ff20, M68040);
6018     INSN(cinv,      f400, ff20, M68040);
6019     INSN(pflush,    f500, ffe0, M68040);
6020     INSN(ptest,     f548, ffd8, M68040);
6021     INSN(wddata,    fb00, ff00, CF_ISA_A);
6022     INSN(wdebug,    fbc0, ffc0, CF_ISA_A);
6023 #endif
6024     INSN(move16_mem, f600, ffe0, M68040);
6025     INSN(move16_reg, f620, fff8, M68040);
6026 #undef INSN
6027 }
6028 
6029 static void m68k_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cpu)
6030 {
6031     DisasContext *dc = container_of(dcbase, DisasContext, base);
6032     CPUM68KState *env = cpu->env_ptr;
6033 
6034     dc->env = env;
6035     dc->pc = dc->base.pc_first;
6036     /* This value will always be filled in properly before m68k_tr_tb_stop. */
6037     dc->pc_prev = 0xdeadbeef;
6038     dc->cc_op = CC_OP_DYNAMIC;
6039     dc->cc_op_synced = 1;
6040     dc->done_mac = 0;
6041     dc->writeback_mask = 0;
6042 
6043     dc->ss_active = (M68K_SR_TRACE(env->sr) == M68K_SR_TRACE_ANY_INS);
6044     /* If architectural single step active, limit to 1 */
6045     if (dc->ss_active) {
6046         dc->base.max_insns = 1;
6047     }
6048 }
6049 
6050 static void m68k_tr_tb_start(DisasContextBase *dcbase, CPUState *cpu)
6051 {
6052 }
6053 
6054 static void m68k_tr_insn_start(DisasContextBase *dcbase, CPUState *cpu)
6055 {
6056     DisasContext *dc = container_of(dcbase, DisasContext, base);
6057     tcg_gen_insn_start(dc->base.pc_next, dc->cc_op);
6058 }
6059 
6060 static void m68k_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
6061 {
6062     DisasContext *dc = container_of(dcbase, DisasContext, base);
6063     CPUM68KState *env = cpu->env_ptr;
6064     uint16_t insn = read_im16(env, dc);
6065 
6066     opcode_table[insn](env, dc, insn);
6067     do_writebacks(dc);
6068 
6069     dc->pc_prev = dc->base.pc_next;
6070     dc->base.pc_next = dc->pc;
6071 
6072     if (dc->base.is_jmp == DISAS_NEXT) {
6073         /*
6074          * Stop translation when the next insn might touch a new page.
6075          * This ensures that prefetch aborts at the right place.
6076          *
6077          * We cannot determine the size of the next insn without
6078          * completely decoding it.  However, the maximum insn size
6079          * is 32 bytes, so end if we do not have that much remaining.
6080          * This may produce several small TBs at the end of each page,
6081          * but they will all be linked with goto_tb.
6082          *
6083          * ??? ColdFire maximum is 4 bytes; MC68000's maximum is also
6084          * smaller than MC68020's.
6085          */
6086         target_ulong start_page_offset
6087             = dc->pc - (dc->base.pc_first & TARGET_PAGE_MASK);
6088 
6089         if (start_page_offset >= TARGET_PAGE_SIZE - 32) {
6090             dc->base.is_jmp = DISAS_TOO_MANY;
6091         }
6092     }
6093 }
6094 
6095 static void m68k_tr_tb_stop(DisasContextBase *dcbase, CPUState *cpu)
6096 {
6097     DisasContext *dc = container_of(dcbase, DisasContext, base);
6098 
6099     switch (dc->base.is_jmp) {
6100     case DISAS_NORETURN:
6101         break;
6102     case DISAS_TOO_MANY:
6103         update_cc_op(dc);
6104         gen_jmp_tb(dc, 0, dc->pc, dc->pc_prev);
6105         break;
6106     case DISAS_JUMP:
6107         /* We updated CC_OP and PC in gen_jmp/gen_jmp_im.  */
6108         if (dc->ss_active) {
6109             gen_raise_exception_format2(dc, EXCP_TRACE, dc->pc_prev);
6110         } else {
6111             tcg_gen_lookup_and_goto_ptr();
6112         }
6113         break;
6114     case DISAS_EXIT:
6115         /*
6116          * We updated CC_OP and PC in gen_exit_tb, but also modified
6117          * other state that may require returning to the main loop.
6118          */
6119         if (dc->ss_active) {
6120             gen_raise_exception_format2(dc, EXCP_TRACE, dc->pc_prev);
6121         } else {
6122             tcg_gen_exit_tb(NULL, 0);
6123         }
6124         break;
6125     default:
6126         g_assert_not_reached();
6127     }
6128 }
6129 
6130 static void m68k_tr_disas_log(const DisasContextBase *dcbase,
6131                               CPUState *cpu, FILE *logfile)
6132 {
6133     fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
6134     target_disas(logfile, cpu, dcbase->pc_first, dcbase->tb->size);
6135 }
6136 
6137 static const TranslatorOps m68k_tr_ops = {
6138     .init_disas_context = m68k_tr_init_disas_context,
6139     .tb_start           = m68k_tr_tb_start,
6140     .insn_start         = m68k_tr_insn_start,
6141     .translate_insn     = m68k_tr_translate_insn,
6142     .tb_stop            = m68k_tr_tb_stop,
6143     .disas_log          = m68k_tr_disas_log,
6144 };
6145 
6146 void gen_intermediate_code(CPUState *cpu, TranslationBlock *tb, int *max_insns,
6147                            target_ulong pc, void *host_pc)
6148 {
6149     DisasContext dc;
6150     translator_loop(cpu, tb, max_insns, pc, host_pc, &m68k_tr_ops, &dc.base);
6151 }
6152 
6153 static double floatx80_to_double(CPUM68KState *env, uint16_t high, uint64_t low)
6154 {
6155     floatx80 a = { .high = high, .low = low };
6156     union {
6157         float64 f64;
6158         double d;
6159     } u;
6160 
6161     u.f64 = floatx80_to_float64(a, &env->fp_status);
6162     return u.d;
6163 }
6164 
6165 void m68k_cpu_dump_state(CPUState *cs, FILE *f, int flags)
6166 {
6167     M68kCPU *cpu = M68K_CPU(cs);
6168     CPUM68KState *env = &cpu->env;
6169     int i;
6170     uint16_t sr;
6171     for (i = 0; i < 8; i++) {
6172         qemu_fprintf(f, "D%d = %08x   A%d = %08x   "
6173                      "F%d = %04x %016"PRIx64"  (%12g)\n",
6174                      i, env->dregs[i], i, env->aregs[i],
6175                      i, env->fregs[i].l.upper, env->fregs[i].l.lower,
6176                      floatx80_to_double(env, env->fregs[i].l.upper,
6177                                         env->fregs[i].l.lower));
6178     }
6179     qemu_fprintf(f, "PC = %08x   ", env->pc);
6180     sr = env->sr | cpu_m68k_get_ccr(env);
6181     qemu_fprintf(f, "SR = %04x T:%x I:%x %c%c %c%c%c%c%c\n",
6182                  sr, (sr & SR_T) >> SR_T_SHIFT, (sr & SR_I) >> SR_I_SHIFT,
6183                  (sr & SR_S) ? 'S' : 'U', (sr & SR_M) ? '%' : 'I',
6184                  (sr & CCF_X) ? 'X' : '-', (sr & CCF_N) ? 'N' : '-',
6185                  (sr & CCF_Z) ? 'Z' : '-', (sr & CCF_V) ? 'V' : '-',
6186                  (sr & CCF_C) ? 'C' : '-');
6187     qemu_fprintf(f, "FPSR = %08x %c%c%c%c ", env->fpsr,
6188                  (env->fpsr & FPSR_CC_A) ? 'A' : '-',
6189                  (env->fpsr & FPSR_CC_I) ? 'I' : '-',
6190                  (env->fpsr & FPSR_CC_Z) ? 'Z' : '-',
6191                  (env->fpsr & FPSR_CC_N) ? 'N' : '-');
6192     qemu_fprintf(f, "\n                                "
6193                  "FPCR =     %04x ", env->fpcr);
6194     switch (env->fpcr & FPCR_PREC_MASK) {
6195     case FPCR_PREC_X:
6196         qemu_fprintf(f, "X ");
6197         break;
6198     case FPCR_PREC_S:
6199         qemu_fprintf(f, "S ");
6200         break;
6201     case FPCR_PREC_D:
6202         qemu_fprintf(f, "D ");
6203         break;
6204     }
6205     switch (env->fpcr & FPCR_RND_MASK) {
6206     case FPCR_RND_N:
6207         qemu_fprintf(f, "RN ");
6208         break;
6209     case FPCR_RND_Z:
6210         qemu_fprintf(f, "RZ ");
6211         break;
6212     case FPCR_RND_M:
6213         qemu_fprintf(f, "RM ");
6214         break;
6215     case FPCR_RND_P:
6216         qemu_fprintf(f, "RP ");
6217         break;
6218     }
6219     qemu_fprintf(f, "\n");
6220 #ifdef CONFIG_SOFTMMU
6221     qemu_fprintf(f, "%sA7(MSP) = %08x %sA7(USP) = %08x %sA7(ISP) = %08x\n",
6222                  env->current_sp == M68K_SSP ? "->" : "  ", env->sp[M68K_SSP],
6223                  env->current_sp == M68K_USP ? "->" : "  ", env->sp[M68K_USP],
6224                  env->current_sp == M68K_ISP ? "->" : "  ", env->sp[M68K_ISP]);
6225     qemu_fprintf(f, "VBR = 0x%08x\n", env->vbr);
6226     qemu_fprintf(f, "SFC = %x DFC %x\n", env->sfc, env->dfc);
6227     qemu_fprintf(f, "SSW %08x TCR %08x URP %08x SRP %08x\n",
6228                  env->mmu.ssw, env->mmu.tcr, env->mmu.urp, env->mmu.srp);
6229     qemu_fprintf(f, "DTTR0/1: %08x/%08x ITTR0/1: %08x/%08x\n",
6230                  env->mmu.ttr[M68K_DTTR0], env->mmu.ttr[M68K_DTTR1],
6231                  env->mmu.ttr[M68K_ITTR0], env->mmu.ttr[M68K_ITTR1]);
6232     qemu_fprintf(f, "MMUSR %08x, fault at %08x\n",
6233                  env->mmu.mmusr, env->mmu.ar);
6234 #endif
6235 }
6236