xref: /openbmc/qemu/target/m68k/translate.c (revision e0091133)
1 /*
2  *  m68k translation
3  *
4  *  Copyright (c) 2005-2007 CodeSourcery
5  *  Written by Paul Brook
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "disas/disas.h"
24 #include "exec/exec-all.h"
25 #include "tcg/tcg-op.h"
26 #include "qemu/log.h"
27 #include "qemu/qemu-print.h"
28 #include "exec/cpu_ldst.h"
29 #include "exec/translator.h"
30 
31 #include "exec/helper-proto.h"
32 #include "exec/helper-gen.h"
33 
34 #include "exec/log.h"
35 #include "fpu/softfloat.h"
36 
37 
38 //#define DEBUG_DISPATCH 1
39 
40 #define DEFO32(name, offset) static TCGv QREG_##name;
41 #define DEFO64(name, offset) static TCGv_i64 QREG_##name;
42 #include "qregs.h.inc"
43 #undef DEFO32
44 #undef DEFO64
45 
46 static TCGv_i32 cpu_halted;
47 static TCGv_i32 cpu_exception_index;
48 
49 static char cpu_reg_names[2 * 8 * 3 + 5 * 4];
50 static TCGv cpu_dregs[8];
51 static TCGv cpu_aregs[8];
52 static TCGv_i64 cpu_macc[4];
53 
54 #define REG(insn, pos)  (((insn) >> (pos)) & 7)
55 #define DREG(insn, pos) cpu_dregs[REG(insn, pos)]
56 #define AREG(insn, pos) get_areg(s, REG(insn, pos))
57 #define MACREG(acc)     cpu_macc[acc]
58 #define QREG_SP         get_areg(s, 7)
59 
60 static TCGv NULL_QREG;
61 #define IS_NULL_QREG(t) (t == NULL_QREG)
62 /* Used to distinguish stores from bad addressing modes.  */
63 static TCGv store_dummy;
64 
65 #include "exec/gen-icount.h"
66 
67 void m68k_tcg_init(void)
68 {
69     char *p;
70     int i;
71 
72 #define DEFO32(name, offset) \
73     QREG_##name = tcg_global_mem_new_i32(cpu_env, \
74         offsetof(CPUM68KState, offset), #name);
75 #define DEFO64(name, offset) \
76     QREG_##name = tcg_global_mem_new_i64(cpu_env, \
77         offsetof(CPUM68KState, offset), #name);
78 #include "qregs.h.inc"
79 #undef DEFO32
80 #undef DEFO64
81 
82     cpu_halted = tcg_global_mem_new_i32(cpu_env,
83                                         -offsetof(M68kCPU, env) +
84                                         offsetof(CPUState, halted), "HALTED");
85     cpu_exception_index = tcg_global_mem_new_i32(cpu_env,
86                                                  -offsetof(M68kCPU, env) +
87                                                  offsetof(CPUState, exception_index),
88                                                  "EXCEPTION");
89 
90     p = cpu_reg_names;
91     for (i = 0; i < 8; i++) {
92         sprintf(p, "D%d", i);
93         cpu_dregs[i] = tcg_global_mem_new(cpu_env,
94                                           offsetof(CPUM68KState, dregs[i]), p);
95         p += 3;
96         sprintf(p, "A%d", i);
97         cpu_aregs[i] = tcg_global_mem_new(cpu_env,
98                                           offsetof(CPUM68KState, aregs[i]), p);
99         p += 3;
100     }
101     for (i = 0; i < 4; i++) {
102         sprintf(p, "ACC%d", i);
103         cpu_macc[i] = tcg_global_mem_new_i64(cpu_env,
104                                          offsetof(CPUM68KState, macc[i]), p);
105         p += 5;
106     }
107 
108     NULL_QREG = tcg_global_mem_new(cpu_env, -4, "NULL");
109     store_dummy = tcg_global_mem_new(cpu_env, -8, "NULL");
110 }
111 
112 /* internal defines */
113 typedef struct DisasContext {
114     DisasContextBase base;
115     CPUM68KState *env;
116     target_ulong pc;
117     target_ulong pc_prev;
118     CCOp cc_op; /* Current CC operation */
119     int cc_op_synced;
120     TCGv_i64 mactmp;
121     int done_mac;
122     int writeback_mask;
123     TCGv writeback[8];
124 #define MAX_TO_RELEASE 8
125     int release_count;
126     TCGv release[MAX_TO_RELEASE];
127     bool ss_active;
128 } DisasContext;
129 
130 static void init_release_array(DisasContext *s)
131 {
132 #ifdef CONFIG_DEBUG_TCG
133     memset(s->release, 0, sizeof(s->release));
134 #endif
135     s->release_count = 0;
136 }
137 
138 static void do_release(DisasContext *s)
139 {
140     int i;
141     for (i = 0; i < s->release_count; i++) {
142         tcg_temp_free(s->release[i]);
143     }
144     init_release_array(s);
145 }
146 
147 static TCGv mark_to_release(DisasContext *s, TCGv tmp)
148 {
149     g_assert(s->release_count < MAX_TO_RELEASE);
150     return s->release[s->release_count++] = tmp;
151 }
152 
153 static TCGv get_areg(DisasContext *s, unsigned regno)
154 {
155     if (s->writeback_mask & (1 << regno)) {
156         return s->writeback[regno];
157     } else {
158         return cpu_aregs[regno];
159     }
160 }
161 
162 static void delay_set_areg(DisasContext *s, unsigned regno,
163                            TCGv val, bool give_temp)
164 {
165     if (s->writeback_mask & (1 << regno)) {
166         if (give_temp) {
167             tcg_temp_free(s->writeback[regno]);
168             s->writeback[regno] = val;
169         } else {
170             tcg_gen_mov_i32(s->writeback[regno], val);
171         }
172     } else {
173         s->writeback_mask |= 1 << regno;
174         if (give_temp) {
175             s->writeback[regno] = val;
176         } else {
177             TCGv tmp = tcg_temp_new();
178             s->writeback[regno] = tmp;
179             tcg_gen_mov_i32(tmp, val);
180         }
181     }
182 }
183 
184 static void do_writebacks(DisasContext *s)
185 {
186     unsigned mask = s->writeback_mask;
187     if (mask) {
188         s->writeback_mask = 0;
189         do {
190             unsigned regno = ctz32(mask);
191             tcg_gen_mov_i32(cpu_aregs[regno], s->writeback[regno]);
192             tcg_temp_free(s->writeback[regno]);
193             mask &= mask - 1;
194         } while (mask);
195     }
196 }
197 
198 /* is_jmp field values */
199 #define DISAS_JUMP      DISAS_TARGET_0 /* only pc was modified dynamically */
200 #define DISAS_EXIT      DISAS_TARGET_1 /* cpu state was modified dynamically */
201 
202 #if defined(CONFIG_USER_ONLY)
203 #define IS_USER(s) 1
204 #else
205 #define IS_USER(s)   (!(s->base.tb->flags & TB_FLAGS_MSR_S))
206 #define SFC_INDEX(s) ((s->base.tb->flags & TB_FLAGS_SFC_S) ? \
207                       MMU_KERNEL_IDX : MMU_USER_IDX)
208 #define DFC_INDEX(s) ((s->base.tb->flags & TB_FLAGS_DFC_S) ? \
209                       MMU_KERNEL_IDX : MMU_USER_IDX)
210 #endif
211 
212 typedef void (*disas_proc)(CPUM68KState *env, DisasContext *s, uint16_t insn);
213 
214 #ifdef DEBUG_DISPATCH
215 #define DISAS_INSN(name)                                                \
216     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
217                                   uint16_t insn);                       \
218     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
219                              uint16_t insn)                             \
220     {                                                                   \
221         qemu_log("Dispatch " #name "\n");                               \
222         real_disas_##name(env, s, insn);                                \
223     }                                                                   \
224     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
225                                   uint16_t insn)
226 #else
227 #define DISAS_INSN(name)                                                \
228     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
229                              uint16_t insn)
230 #endif
231 
232 static const uint8_t cc_op_live[CC_OP_NB] = {
233     [CC_OP_DYNAMIC] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
234     [CC_OP_FLAGS] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
235     [CC_OP_ADDB ... CC_OP_ADDL] = CCF_X | CCF_N | CCF_V,
236     [CC_OP_SUBB ... CC_OP_SUBL] = CCF_X | CCF_N | CCF_V,
237     [CC_OP_CMPB ... CC_OP_CMPL] = CCF_X | CCF_N | CCF_V,
238     [CC_OP_LOGIC] = CCF_X | CCF_N
239 };
240 
241 static void set_cc_op(DisasContext *s, CCOp op)
242 {
243     CCOp old_op = s->cc_op;
244     int dead;
245 
246     if (old_op == op) {
247         return;
248     }
249     s->cc_op = op;
250     s->cc_op_synced = 0;
251 
252     /*
253      * Discard CC computation that will no longer be used.
254      * Note that X and N are never dead.
255      */
256     dead = cc_op_live[old_op] & ~cc_op_live[op];
257     if (dead & CCF_C) {
258         tcg_gen_discard_i32(QREG_CC_C);
259     }
260     if (dead & CCF_Z) {
261         tcg_gen_discard_i32(QREG_CC_Z);
262     }
263     if (dead & CCF_V) {
264         tcg_gen_discard_i32(QREG_CC_V);
265     }
266 }
267 
268 /* Update the CPU env CC_OP state.  */
269 static void update_cc_op(DisasContext *s)
270 {
271     if (!s->cc_op_synced) {
272         s->cc_op_synced = 1;
273         tcg_gen_movi_i32(QREG_CC_OP, s->cc_op);
274     }
275 }
276 
277 /* Generate a jump to an immediate address.  */
278 static void gen_jmp_im(DisasContext *s, uint32_t dest)
279 {
280     update_cc_op(s);
281     tcg_gen_movi_i32(QREG_PC, dest);
282     s->base.is_jmp = DISAS_JUMP;
283 }
284 
285 /* Generate a jump to the address in qreg DEST.  */
286 static void gen_jmp(DisasContext *s, TCGv dest)
287 {
288     update_cc_op(s);
289     tcg_gen_mov_i32(QREG_PC, dest);
290     s->base.is_jmp = DISAS_JUMP;
291 }
292 
293 static void gen_raise_exception(int nr)
294 {
295     TCGv_i32 tmp;
296 
297     tmp = tcg_const_i32(nr);
298     gen_helper_raise_exception(cpu_env, tmp);
299     tcg_temp_free_i32(tmp);
300 }
301 
302 static void gen_raise_exception_format2(DisasContext *s, int nr,
303                                         target_ulong this_pc)
304 {
305     /*
306      * Pass the address of the insn to the exception handler,
307      * for recording in the Format $2 (6-word) stack frame.
308      * Re-use mmu.ar for the purpose, since that's only valid
309      * after tlb_fill.
310      */
311     tcg_gen_st_i32(tcg_constant_i32(this_pc), cpu_env,
312                    offsetof(CPUM68KState, mmu.ar));
313     gen_raise_exception(nr);
314     s->base.is_jmp = DISAS_NORETURN;
315 }
316 
317 static void gen_exception(DisasContext *s, uint32_t dest, int nr)
318 {
319     update_cc_op(s);
320     tcg_gen_movi_i32(QREG_PC, dest);
321 
322     gen_raise_exception(nr);
323 
324     s->base.is_jmp = DISAS_NORETURN;
325 }
326 
327 static inline void gen_addr_fault(DisasContext *s)
328 {
329     gen_exception(s, s->base.pc_next, EXCP_ADDRESS);
330 }
331 
332 /*
333  * Generate a load from the specified address.  Narrow values are
334  *  sign extended to full register width.
335  */
336 static inline TCGv gen_load(DisasContext *s, int opsize, TCGv addr,
337                             int sign, int index)
338 {
339     TCGv tmp;
340     tmp = tcg_temp_new_i32();
341     switch(opsize) {
342     case OS_BYTE:
343         if (sign)
344             tcg_gen_qemu_ld8s(tmp, addr, index);
345         else
346             tcg_gen_qemu_ld8u(tmp, addr, index);
347         break;
348     case OS_WORD:
349         if (sign)
350             tcg_gen_qemu_ld16s(tmp, addr, index);
351         else
352             tcg_gen_qemu_ld16u(tmp, addr, index);
353         break;
354     case OS_LONG:
355         tcg_gen_qemu_ld32u(tmp, addr, index);
356         break;
357     default:
358         g_assert_not_reached();
359     }
360     return tmp;
361 }
362 
363 /* Generate a store.  */
364 static inline void gen_store(DisasContext *s, int opsize, TCGv addr, TCGv val,
365                              int index)
366 {
367     switch(opsize) {
368     case OS_BYTE:
369         tcg_gen_qemu_st8(val, addr, index);
370         break;
371     case OS_WORD:
372         tcg_gen_qemu_st16(val, addr, index);
373         break;
374     case OS_LONG:
375         tcg_gen_qemu_st32(val, addr, index);
376         break;
377     default:
378         g_assert_not_reached();
379     }
380 }
381 
382 typedef enum {
383     EA_STORE,
384     EA_LOADU,
385     EA_LOADS
386 } ea_what;
387 
388 /*
389  * Generate an unsigned load if VAL is 0 a signed load if val is -1,
390  * otherwise generate a store.
391  */
392 static TCGv gen_ldst(DisasContext *s, int opsize, TCGv addr, TCGv val,
393                      ea_what what, int index)
394 {
395     if (what == EA_STORE) {
396         gen_store(s, opsize, addr, val, index);
397         return store_dummy;
398     } else {
399         return mark_to_release(s, gen_load(s, opsize, addr,
400                                            what == EA_LOADS, index));
401     }
402 }
403 
404 /* Read a 16-bit immediate constant */
405 static inline uint16_t read_im16(CPUM68KState *env, DisasContext *s)
406 {
407     uint16_t im;
408     im = translator_lduw(env, &s->base, s->pc);
409     s->pc += 2;
410     return im;
411 }
412 
413 /* Read an 8-bit immediate constant */
414 static inline uint8_t read_im8(CPUM68KState *env, DisasContext *s)
415 {
416     return read_im16(env, s);
417 }
418 
419 /* Read a 32-bit immediate constant.  */
420 static inline uint32_t read_im32(CPUM68KState *env, DisasContext *s)
421 {
422     uint32_t im;
423     im = read_im16(env, s) << 16;
424     im |= 0xffff & read_im16(env, s);
425     return im;
426 }
427 
428 /* Read a 64-bit immediate constant.  */
429 static inline uint64_t read_im64(CPUM68KState *env, DisasContext *s)
430 {
431     uint64_t im;
432     im = (uint64_t)read_im32(env, s) << 32;
433     im |= (uint64_t)read_im32(env, s);
434     return im;
435 }
436 
437 /* Calculate and address index.  */
438 static TCGv gen_addr_index(DisasContext *s, uint16_t ext, TCGv tmp)
439 {
440     TCGv add;
441     int scale;
442 
443     add = (ext & 0x8000) ? AREG(ext, 12) : DREG(ext, 12);
444     if ((ext & 0x800) == 0) {
445         tcg_gen_ext16s_i32(tmp, add);
446         add = tmp;
447     }
448     scale = (ext >> 9) & 3;
449     if (scale != 0) {
450         tcg_gen_shli_i32(tmp, add, scale);
451         add = tmp;
452     }
453     return add;
454 }
455 
456 /*
457  * Handle a base + index + displacement effective address.
458  * A NULL_QREG base means pc-relative.
459  */
460 static TCGv gen_lea_indexed(CPUM68KState *env, DisasContext *s, TCGv base)
461 {
462     uint32_t offset;
463     uint16_t ext;
464     TCGv add;
465     TCGv tmp;
466     uint32_t bd, od;
467 
468     offset = s->pc;
469     ext = read_im16(env, s);
470 
471     if ((ext & 0x800) == 0 && !m68k_feature(s->env, M68K_FEATURE_WORD_INDEX))
472         return NULL_QREG;
473 
474     if (m68k_feature(s->env, M68K_FEATURE_M68K) &&
475         !m68k_feature(s->env, M68K_FEATURE_SCALED_INDEX)) {
476         ext &= ~(3 << 9);
477     }
478 
479     if (ext & 0x100) {
480         /* full extension word format */
481         if (!m68k_feature(s->env, M68K_FEATURE_EXT_FULL))
482             return NULL_QREG;
483 
484         if ((ext & 0x30) > 0x10) {
485             /* base displacement */
486             if ((ext & 0x30) == 0x20) {
487                 bd = (int16_t)read_im16(env, s);
488             } else {
489                 bd = read_im32(env, s);
490             }
491         } else {
492             bd = 0;
493         }
494         tmp = mark_to_release(s, tcg_temp_new());
495         if ((ext & 0x44) == 0) {
496             /* pre-index */
497             add = gen_addr_index(s, ext, tmp);
498         } else {
499             add = NULL_QREG;
500         }
501         if ((ext & 0x80) == 0) {
502             /* base not suppressed */
503             if (IS_NULL_QREG(base)) {
504                 base = mark_to_release(s, tcg_const_i32(offset + bd));
505                 bd = 0;
506             }
507             if (!IS_NULL_QREG(add)) {
508                 tcg_gen_add_i32(tmp, add, base);
509                 add = tmp;
510             } else {
511                 add = base;
512             }
513         }
514         if (!IS_NULL_QREG(add)) {
515             if (bd != 0) {
516                 tcg_gen_addi_i32(tmp, add, bd);
517                 add = tmp;
518             }
519         } else {
520             add = mark_to_release(s, tcg_const_i32(bd));
521         }
522         if ((ext & 3) != 0) {
523             /* memory indirect */
524             base = mark_to_release(s, gen_load(s, OS_LONG, add, 0, IS_USER(s)));
525             if ((ext & 0x44) == 4) {
526                 add = gen_addr_index(s, ext, tmp);
527                 tcg_gen_add_i32(tmp, add, base);
528                 add = tmp;
529             } else {
530                 add = base;
531             }
532             if ((ext & 3) > 1) {
533                 /* outer displacement */
534                 if ((ext & 3) == 2) {
535                     od = (int16_t)read_im16(env, s);
536                 } else {
537                     od = read_im32(env, s);
538                 }
539             } else {
540                 od = 0;
541             }
542             if (od != 0) {
543                 tcg_gen_addi_i32(tmp, add, od);
544                 add = tmp;
545             }
546         }
547     } else {
548         /* brief extension word format */
549         tmp = mark_to_release(s, tcg_temp_new());
550         add = gen_addr_index(s, ext, tmp);
551         if (!IS_NULL_QREG(base)) {
552             tcg_gen_add_i32(tmp, add, base);
553             if ((int8_t)ext)
554                 tcg_gen_addi_i32(tmp, tmp, (int8_t)ext);
555         } else {
556             tcg_gen_addi_i32(tmp, add, offset + (int8_t)ext);
557         }
558         add = tmp;
559     }
560     return add;
561 }
562 
563 /* Sign or zero extend a value.  */
564 
565 static inline void gen_ext(TCGv res, TCGv val, int opsize, int sign)
566 {
567     switch (opsize) {
568     case OS_BYTE:
569         if (sign) {
570             tcg_gen_ext8s_i32(res, val);
571         } else {
572             tcg_gen_ext8u_i32(res, val);
573         }
574         break;
575     case OS_WORD:
576         if (sign) {
577             tcg_gen_ext16s_i32(res, val);
578         } else {
579             tcg_gen_ext16u_i32(res, val);
580         }
581         break;
582     case OS_LONG:
583         tcg_gen_mov_i32(res, val);
584         break;
585     default:
586         g_assert_not_reached();
587     }
588 }
589 
590 /* Evaluate all the CC flags.  */
591 
592 static void gen_flush_flags(DisasContext *s)
593 {
594     TCGv t0, t1;
595 
596     switch (s->cc_op) {
597     case CC_OP_FLAGS:
598         return;
599 
600     case CC_OP_ADDB:
601     case CC_OP_ADDW:
602     case CC_OP_ADDL:
603         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
604         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
605         /* Compute signed overflow for addition.  */
606         t0 = tcg_temp_new();
607         t1 = tcg_temp_new();
608         tcg_gen_sub_i32(t0, QREG_CC_N, QREG_CC_V);
609         gen_ext(t0, t0, s->cc_op - CC_OP_ADDB, 1);
610         tcg_gen_xor_i32(t1, QREG_CC_N, QREG_CC_V);
611         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
612         tcg_temp_free(t0);
613         tcg_gen_andc_i32(QREG_CC_V, t1, QREG_CC_V);
614         tcg_temp_free(t1);
615         break;
616 
617     case CC_OP_SUBB:
618     case CC_OP_SUBW:
619     case CC_OP_SUBL:
620         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
621         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
622         /* Compute signed overflow for subtraction.  */
623         t0 = tcg_temp_new();
624         t1 = tcg_temp_new();
625         tcg_gen_add_i32(t0, QREG_CC_N, QREG_CC_V);
626         gen_ext(t0, t0, s->cc_op - CC_OP_SUBB, 1);
627         tcg_gen_xor_i32(t1, QREG_CC_N, t0);
628         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
629         tcg_temp_free(t0);
630         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t1);
631         tcg_temp_free(t1);
632         break;
633 
634     case CC_OP_CMPB:
635     case CC_OP_CMPW:
636     case CC_OP_CMPL:
637         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_C, QREG_CC_N, QREG_CC_V);
638         tcg_gen_sub_i32(QREG_CC_Z, QREG_CC_N, QREG_CC_V);
639         gen_ext(QREG_CC_Z, QREG_CC_Z, s->cc_op - CC_OP_CMPB, 1);
640         /* Compute signed overflow for subtraction.  */
641         t0 = tcg_temp_new();
642         tcg_gen_xor_i32(t0, QREG_CC_Z, QREG_CC_N);
643         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, QREG_CC_N);
644         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t0);
645         tcg_temp_free(t0);
646         tcg_gen_mov_i32(QREG_CC_N, QREG_CC_Z);
647         break;
648 
649     case CC_OP_LOGIC:
650         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
651         tcg_gen_movi_i32(QREG_CC_C, 0);
652         tcg_gen_movi_i32(QREG_CC_V, 0);
653         break;
654 
655     case CC_OP_DYNAMIC:
656         gen_helper_flush_flags(cpu_env, QREG_CC_OP);
657         s->cc_op_synced = 1;
658         break;
659 
660     default:
661         t0 = tcg_const_i32(s->cc_op);
662         gen_helper_flush_flags(cpu_env, t0);
663         tcg_temp_free(t0);
664         s->cc_op_synced = 1;
665         break;
666     }
667 
668     /* Note that flush_flags also assigned to env->cc_op.  */
669     s->cc_op = CC_OP_FLAGS;
670 }
671 
672 static inline TCGv gen_extend(DisasContext *s, TCGv val, int opsize, int sign)
673 {
674     TCGv tmp;
675 
676     if (opsize == OS_LONG) {
677         tmp = val;
678     } else {
679         tmp = mark_to_release(s, tcg_temp_new());
680         gen_ext(tmp, val, opsize, sign);
681     }
682 
683     return tmp;
684 }
685 
686 static void gen_logic_cc(DisasContext *s, TCGv val, int opsize)
687 {
688     gen_ext(QREG_CC_N, val, opsize, 1);
689     set_cc_op(s, CC_OP_LOGIC);
690 }
691 
692 static void gen_update_cc_cmp(DisasContext *s, TCGv dest, TCGv src, int opsize)
693 {
694     tcg_gen_mov_i32(QREG_CC_N, dest);
695     tcg_gen_mov_i32(QREG_CC_V, src);
696     set_cc_op(s, CC_OP_CMPB + opsize);
697 }
698 
699 static void gen_update_cc_add(TCGv dest, TCGv src, int opsize)
700 {
701     gen_ext(QREG_CC_N, dest, opsize, 1);
702     tcg_gen_mov_i32(QREG_CC_V, src);
703 }
704 
705 static inline int opsize_bytes(int opsize)
706 {
707     switch (opsize) {
708     case OS_BYTE: return 1;
709     case OS_WORD: return 2;
710     case OS_LONG: return 4;
711     case OS_SINGLE: return 4;
712     case OS_DOUBLE: return 8;
713     case OS_EXTENDED: return 12;
714     case OS_PACKED: return 12;
715     default:
716         g_assert_not_reached();
717     }
718 }
719 
720 static inline int insn_opsize(int insn)
721 {
722     switch ((insn >> 6) & 3) {
723     case 0: return OS_BYTE;
724     case 1: return OS_WORD;
725     case 2: return OS_LONG;
726     default:
727         g_assert_not_reached();
728     }
729 }
730 
731 static inline int ext_opsize(int ext, int pos)
732 {
733     switch ((ext >> pos) & 7) {
734     case 0: return OS_LONG;
735     case 1: return OS_SINGLE;
736     case 2: return OS_EXTENDED;
737     case 3: return OS_PACKED;
738     case 4: return OS_WORD;
739     case 5: return OS_DOUBLE;
740     case 6: return OS_BYTE;
741     default:
742         g_assert_not_reached();
743     }
744 }
745 
746 /*
747  * Assign value to a register.  If the width is less than the register width
748  * only the low part of the register is set.
749  */
750 static void gen_partset_reg(int opsize, TCGv reg, TCGv val)
751 {
752     TCGv tmp;
753     switch (opsize) {
754     case OS_BYTE:
755         tcg_gen_andi_i32(reg, reg, 0xffffff00);
756         tmp = tcg_temp_new();
757         tcg_gen_ext8u_i32(tmp, val);
758         tcg_gen_or_i32(reg, reg, tmp);
759         tcg_temp_free(tmp);
760         break;
761     case OS_WORD:
762         tcg_gen_andi_i32(reg, reg, 0xffff0000);
763         tmp = tcg_temp_new();
764         tcg_gen_ext16u_i32(tmp, val);
765         tcg_gen_or_i32(reg, reg, tmp);
766         tcg_temp_free(tmp);
767         break;
768     case OS_LONG:
769     case OS_SINGLE:
770         tcg_gen_mov_i32(reg, val);
771         break;
772     default:
773         g_assert_not_reached();
774     }
775 }
776 
777 /*
778  * Generate code for an "effective address".  Does not adjust the base
779  * register for autoincrement addressing modes.
780  */
781 static TCGv gen_lea_mode(CPUM68KState *env, DisasContext *s,
782                          int mode, int reg0, int opsize)
783 {
784     TCGv reg;
785     TCGv tmp;
786     uint16_t ext;
787     uint32_t offset;
788 
789     switch (mode) {
790     case 0: /* Data register direct.  */
791     case 1: /* Address register direct.  */
792         return NULL_QREG;
793     case 3: /* Indirect postincrement.  */
794         if (opsize == OS_UNSIZED) {
795             return NULL_QREG;
796         }
797         /* fallthru */
798     case 2: /* Indirect register */
799         return get_areg(s, reg0);
800     case 4: /* Indirect predecrememnt.  */
801         if (opsize == OS_UNSIZED) {
802             return NULL_QREG;
803         }
804         reg = get_areg(s, reg0);
805         tmp = mark_to_release(s, tcg_temp_new());
806         if (reg0 == 7 && opsize == OS_BYTE &&
807             m68k_feature(s->env, M68K_FEATURE_M68K)) {
808             tcg_gen_subi_i32(tmp, reg, 2);
809         } else {
810             tcg_gen_subi_i32(tmp, reg, opsize_bytes(opsize));
811         }
812         return tmp;
813     case 5: /* Indirect displacement.  */
814         reg = get_areg(s, reg0);
815         tmp = mark_to_release(s, tcg_temp_new());
816         ext = read_im16(env, s);
817         tcg_gen_addi_i32(tmp, reg, (int16_t)ext);
818         return tmp;
819     case 6: /* Indirect index + displacement.  */
820         reg = get_areg(s, reg0);
821         return gen_lea_indexed(env, s, reg);
822     case 7: /* Other */
823         switch (reg0) {
824         case 0: /* Absolute short.  */
825             offset = (int16_t)read_im16(env, s);
826             return mark_to_release(s, tcg_const_i32(offset));
827         case 1: /* Absolute long.  */
828             offset = read_im32(env, s);
829             return mark_to_release(s, tcg_const_i32(offset));
830         case 2: /* pc displacement  */
831             offset = s->pc;
832             offset += (int16_t)read_im16(env, s);
833             return mark_to_release(s, tcg_const_i32(offset));
834         case 3: /* pc index+displacement.  */
835             return gen_lea_indexed(env, s, NULL_QREG);
836         case 4: /* Immediate.  */
837         default:
838             return NULL_QREG;
839         }
840     }
841     /* Should never happen.  */
842     return NULL_QREG;
843 }
844 
845 static TCGv gen_lea(CPUM68KState *env, DisasContext *s, uint16_t insn,
846                     int opsize)
847 {
848     int mode = extract32(insn, 3, 3);
849     int reg0 = REG(insn, 0);
850     return gen_lea_mode(env, s, mode, reg0, opsize);
851 }
852 
853 /*
854  * Generate code to load/store a value from/into an EA.  If WHAT > 0 this is
855  * a write otherwise it is a read (0 == sign extend, -1 == zero extend).
856  * ADDRP is non-null for readwrite operands.
857  */
858 static TCGv gen_ea_mode(CPUM68KState *env, DisasContext *s, int mode, int reg0,
859                         int opsize, TCGv val, TCGv *addrp, ea_what what,
860                         int index)
861 {
862     TCGv reg, tmp, result;
863     int32_t offset;
864 
865     switch (mode) {
866     case 0: /* Data register direct.  */
867         reg = cpu_dregs[reg0];
868         if (what == EA_STORE) {
869             gen_partset_reg(opsize, reg, val);
870             return store_dummy;
871         } else {
872             return gen_extend(s, reg, opsize, what == EA_LOADS);
873         }
874     case 1: /* Address register direct.  */
875         reg = get_areg(s, reg0);
876         if (what == EA_STORE) {
877             tcg_gen_mov_i32(reg, val);
878             return store_dummy;
879         } else {
880             return gen_extend(s, reg, opsize, what == EA_LOADS);
881         }
882     case 2: /* Indirect register */
883         reg = get_areg(s, reg0);
884         return gen_ldst(s, opsize, reg, val, what, index);
885     case 3: /* Indirect postincrement.  */
886         reg = get_areg(s, reg0);
887         result = gen_ldst(s, opsize, reg, val, what, index);
888         if (what == EA_STORE || !addrp) {
889             TCGv tmp = tcg_temp_new();
890             if (reg0 == 7 && opsize == OS_BYTE &&
891                 m68k_feature(s->env, M68K_FEATURE_M68K)) {
892                 tcg_gen_addi_i32(tmp, reg, 2);
893             } else {
894                 tcg_gen_addi_i32(tmp, reg, opsize_bytes(opsize));
895             }
896             delay_set_areg(s, reg0, tmp, true);
897         }
898         return result;
899     case 4: /* Indirect predecrememnt.  */
900         if (addrp && what == EA_STORE) {
901             tmp = *addrp;
902         } else {
903             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
904             if (IS_NULL_QREG(tmp)) {
905                 return tmp;
906             }
907             if (addrp) {
908                 *addrp = tmp;
909             }
910         }
911         result = gen_ldst(s, opsize, tmp, val, what, index);
912         if (what == EA_STORE || !addrp) {
913             delay_set_areg(s, reg0, tmp, false);
914         }
915         return result;
916     case 5: /* Indirect displacement.  */
917     case 6: /* Indirect index + displacement.  */
918     do_indirect:
919         if (addrp && what == EA_STORE) {
920             tmp = *addrp;
921         } else {
922             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
923             if (IS_NULL_QREG(tmp)) {
924                 return tmp;
925             }
926             if (addrp) {
927                 *addrp = tmp;
928             }
929         }
930         return gen_ldst(s, opsize, tmp, val, what, index);
931     case 7: /* Other */
932         switch (reg0) {
933         case 0: /* Absolute short.  */
934         case 1: /* Absolute long.  */
935         case 2: /* pc displacement  */
936         case 3: /* pc index+displacement.  */
937             goto do_indirect;
938         case 4: /* Immediate.  */
939             /* Sign extend values for consistency.  */
940             switch (opsize) {
941             case OS_BYTE:
942                 if (what == EA_LOADS) {
943                     offset = (int8_t)read_im8(env, s);
944                 } else {
945                     offset = read_im8(env, s);
946                 }
947                 break;
948             case OS_WORD:
949                 if (what == EA_LOADS) {
950                     offset = (int16_t)read_im16(env, s);
951                 } else {
952                     offset = read_im16(env, s);
953                 }
954                 break;
955             case OS_LONG:
956                 offset = read_im32(env, s);
957                 break;
958             default:
959                 g_assert_not_reached();
960             }
961             return mark_to_release(s, tcg_const_i32(offset));
962         default:
963             return NULL_QREG;
964         }
965     }
966     /* Should never happen.  */
967     return NULL_QREG;
968 }
969 
970 static TCGv gen_ea(CPUM68KState *env, DisasContext *s, uint16_t insn,
971                    int opsize, TCGv val, TCGv *addrp, ea_what what, int index)
972 {
973     int mode = extract32(insn, 3, 3);
974     int reg0 = REG(insn, 0);
975     return gen_ea_mode(env, s, mode, reg0, opsize, val, addrp, what, index);
976 }
977 
978 static TCGv_ptr gen_fp_ptr(int freg)
979 {
980     TCGv_ptr fp = tcg_temp_new_ptr();
981     tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fregs[freg]));
982     return fp;
983 }
984 
985 static TCGv_ptr gen_fp_result_ptr(void)
986 {
987     TCGv_ptr fp = tcg_temp_new_ptr();
988     tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fp_result));
989     return fp;
990 }
991 
992 static void gen_fp_move(TCGv_ptr dest, TCGv_ptr src)
993 {
994     TCGv t32;
995     TCGv_i64 t64;
996 
997     t32 = tcg_temp_new();
998     tcg_gen_ld16u_i32(t32, src, offsetof(FPReg, l.upper));
999     tcg_gen_st16_i32(t32, dest, offsetof(FPReg, l.upper));
1000     tcg_temp_free(t32);
1001 
1002     t64 = tcg_temp_new_i64();
1003     tcg_gen_ld_i64(t64, src, offsetof(FPReg, l.lower));
1004     tcg_gen_st_i64(t64, dest, offsetof(FPReg, l.lower));
1005     tcg_temp_free_i64(t64);
1006 }
1007 
1008 static void gen_load_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
1009                         int index)
1010 {
1011     TCGv tmp;
1012     TCGv_i64 t64;
1013 
1014     t64 = tcg_temp_new_i64();
1015     tmp = tcg_temp_new();
1016     switch (opsize) {
1017     case OS_BYTE:
1018         tcg_gen_qemu_ld8s(tmp, addr, index);
1019         gen_helper_exts32(cpu_env, fp, tmp);
1020         break;
1021     case OS_WORD:
1022         tcg_gen_qemu_ld16s(tmp, addr, index);
1023         gen_helper_exts32(cpu_env, fp, tmp);
1024         break;
1025     case OS_LONG:
1026         tcg_gen_qemu_ld32u(tmp, addr, index);
1027         gen_helper_exts32(cpu_env, fp, tmp);
1028         break;
1029     case OS_SINGLE:
1030         tcg_gen_qemu_ld32u(tmp, addr, index);
1031         gen_helper_extf32(cpu_env, fp, tmp);
1032         break;
1033     case OS_DOUBLE:
1034         tcg_gen_qemu_ld64(t64, addr, index);
1035         gen_helper_extf64(cpu_env, fp, t64);
1036         break;
1037     case OS_EXTENDED:
1038         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1039             gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1040             break;
1041         }
1042         tcg_gen_qemu_ld32u(tmp, addr, index);
1043         tcg_gen_shri_i32(tmp, tmp, 16);
1044         tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
1045         tcg_gen_addi_i32(tmp, addr, 4);
1046         tcg_gen_qemu_ld64(t64, tmp, index);
1047         tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
1048         break;
1049     case OS_PACKED:
1050         /*
1051          * unimplemented data type on 68040/ColdFire
1052          * FIXME if needed for another FPU
1053          */
1054         gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1055         break;
1056     default:
1057         g_assert_not_reached();
1058     }
1059     tcg_temp_free(tmp);
1060     tcg_temp_free_i64(t64);
1061 }
1062 
1063 static void gen_store_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
1064                          int index)
1065 {
1066     TCGv tmp;
1067     TCGv_i64 t64;
1068 
1069     t64 = tcg_temp_new_i64();
1070     tmp = tcg_temp_new();
1071     switch (opsize) {
1072     case OS_BYTE:
1073         gen_helper_reds32(tmp, cpu_env, fp);
1074         tcg_gen_qemu_st8(tmp, addr, index);
1075         break;
1076     case OS_WORD:
1077         gen_helper_reds32(tmp, cpu_env, fp);
1078         tcg_gen_qemu_st16(tmp, addr, index);
1079         break;
1080     case OS_LONG:
1081         gen_helper_reds32(tmp, cpu_env, fp);
1082         tcg_gen_qemu_st32(tmp, addr, index);
1083         break;
1084     case OS_SINGLE:
1085         gen_helper_redf32(tmp, cpu_env, fp);
1086         tcg_gen_qemu_st32(tmp, addr, index);
1087         break;
1088     case OS_DOUBLE:
1089         gen_helper_redf64(t64, cpu_env, fp);
1090         tcg_gen_qemu_st64(t64, addr, index);
1091         break;
1092     case OS_EXTENDED:
1093         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1094             gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1095             break;
1096         }
1097         tcg_gen_ld16u_i32(tmp, fp, offsetof(FPReg, l.upper));
1098         tcg_gen_shli_i32(tmp, tmp, 16);
1099         tcg_gen_qemu_st32(tmp, addr, index);
1100         tcg_gen_addi_i32(tmp, addr, 4);
1101         tcg_gen_ld_i64(t64, fp, offsetof(FPReg, l.lower));
1102         tcg_gen_qemu_st64(t64, tmp, index);
1103         break;
1104     case OS_PACKED:
1105         /*
1106          * unimplemented data type on 68040/ColdFire
1107          * FIXME if needed for another FPU
1108          */
1109         gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1110         break;
1111     default:
1112         g_assert_not_reached();
1113     }
1114     tcg_temp_free(tmp);
1115     tcg_temp_free_i64(t64);
1116 }
1117 
1118 static void gen_ldst_fp(DisasContext *s, int opsize, TCGv addr,
1119                         TCGv_ptr fp, ea_what what, int index)
1120 {
1121     if (what == EA_STORE) {
1122         gen_store_fp(s, opsize, addr, fp, index);
1123     } else {
1124         gen_load_fp(s, opsize, addr, fp, index);
1125     }
1126 }
1127 
1128 static int gen_ea_mode_fp(CPUM68KState *env, DisasContext *s, int mode,
1129                           int reg0, int opsize, TCGv_ptr fp, ea_what what,
1130                           int index)
1131 {
1132     TCGv reg, addr, tmp;
1133     TCGv_i64 t64;
1134 
1135     switch (mode) {
1136     case 0: /* Data register direct.  */
1137         reg = cpu_dregs[reg0];
1138         if (what == EA_STORE) {
1139             switch (opsize) {
1140             case OS_BYTE:
1141             case OS_WORD:
1142             case OS_LONG:
1143                 gen_helper_reds32(reg, cpu_env, fp);
1144                 break;
1145             case OS_SINGLE:
1146                 gen_helper_redf32(reg, cpu_env, fp);
1147                 break;
1148             default:
1149                 g_assert_not_reached();
1150             }
1151         } else {
1152             tmp = tcg_temp_new();
1153             switch (opsize) {
1154             case OS_BYTE:
1155                 tcg_gen_ext8s_i32(tmp, reg);
1156                 gen_helper_exts32(cpu_env, fp, tmp);
1157                 break;
1158             case OS_WORD:
1159                 tcg_gen_ext16s_i32(tmp, reg);
1160                 gen_helper_exts32(cpu_env, fp, tmp);
1161                 break;
1162             case OS_LONG:
1163                 gen_helper_exts32(cpu_env, fp, reg);
1164                 break;
1165             case OS_SINGLE:
1166                 gen_helper_extf32(cpu_env, fp, reg);
1167                 break;
1168             default:
1169                 g_assert_not_reached();
1170             }
1171             tcg_temp_free(tmp);
1172         }
1173         return 0;
1174     case 1: /* Address register direct.  */
1175         return -1;
1176     case 2: /* Indirect register */
1177         addr = get_areg(s, reg0);
1178         gen_ldst_fp(s, opsize, addr, fp, what, index);
1179         return 0;
1180     case 3: /* Indirect postincrement.  */
1181         addr = cpu_aregs[reg0];
1182         gen_ldst_fp(s, opsize, addr, fp, what, index);
1183         tcg_gen_addi_i32(addr, addr, opsize_bytes(opsize));
1184         return 0;
1185     case 4: /* Indirect predecrememnt.  */
1186         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1187         if (IS_NULL_QREG(addr)) {
1188             return -1;
1189         }
1190         gen_ldst_fp(s, opsize, addr, fp, what, index);
1191         tcg_gen_mov_i32(cpu_aregs[reg0], addr);
1192         return 0;
1193     case 5: /* Indirect displacement.  */
1194     case 6: /* Indirect index + displacement.  */
1195     do_indirect:
1196         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1197         if (IS_NULL_QREG(addr)) {
1198             return -1;
1199         }
1200         gen_ldst_fp(s, opsize, addr, fp, what, index);
1201         return 0;
1202     case 7: /* Other */
1203         switch (reg0) {
1204         case 0: /* Absolute short.  */
1205         case 1: /* Absolute long.  */
1206         case 2: /* pc displacement  */
1207         case 3: /* pc index+displacement.  */
1208             goto do_indirect;
1209         case 4: /* Immediate.  */
1210             if (what == EA_STORE) {
1211                 return -1;
1212             }
1213             switch (opsize) {
1214             case OS_BYTE:
1215                 tmp = tcg_const_i32((int8_t)read_im8(env, s));
1216                 gen_helper_exts32(cpu_env, fp, tmp);
1217                 tcg_temp_free(tmp);
1218                 break;
1219             case OS_WORD:
1220                 tmp = tcg_const_i32((int16_t)read_im16(env, s));
1221                 gen_helper_exts32(cpu_env, fp, tmp);
1222                 tcg_temp_free(tmp);
1223                 break;
1224             case OS_LONG:
1225                 tmp = tcg_const_i32(read_im32(env, s));
1226                 gen_helper_exts32(cpu_env, fp, tmp);
1227                 tcg_temp_free(tmp);
1228                 break;
1229             case OS_SINGLE:
1230                 tmp = tcg_const_i32(read_im32(env, s));
1231                 gen_helper_extf32(cpu_env, fp, tmp);
1232                 tcg_temp_free(tmp);
1233                 break;
1234             case OS_DOUBLE:
1235                 t64 = tcg_const_i64(read_im64(env, s));
1236                 gen_helper_extf64(cpu_env, fp, t64);
1237                 tcg_temp_free_i64(t64);
1238                 break;
1239             case OS_EXTENDED:
1240                 if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1241                     gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1242                     break;
1243                 }
1244                 tmp = tcg_const_i32(read_im32(env, s) >> 16);
1245                 tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
1246                 tcg_temp_free(tmp);
1247                 t64 = tcg_const_i64(read_im64(env, s));
1248                 tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
1249                 tcg_temp_free_i64(t64);
1250                 break;
1251             case OS_PACKED:
1252                 /*
1253                  * unimplemented data type on 68040/ColdFire
1254                  * FIXME if needed for another FPU
1255                  */
1256                 gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1257                 break;
1258             default:
1259                 g_assert_not_reached();
1260             }
1261             return 0;
1262         default:
1263             return -1;
1264         }
1265     }
1266     return -1;
1267 }
1268 
1269 static int gen_ea_fp(CPUM68KState *env, DisasContext *s, uint16_t insn,
1270                        int opsize, TCGv_ptr fp, ea_what what, int index)
1271 {
1272     int mode = extract32(insn, 3, 3);
1273     int reg0 = REG(insn, 0);
1274     return gen_ea_mode_fp(env, s, mode, reg0, opsize, fp, what, index);
1275 }
1276 
1277 typedef struct {
1278     TCGCond tcond;
1279     bool g1;
1280     bool g2;
1281     TCGv v1;
1282     TCGv v2;
1283 } DisasCompare;
1284 
1285 static void gen_cc_cond(DisasCompare *c, DisasContext *s, int cond)
1286 {
1287     TCGv tmp, tmp2;
1288     TCGCond tcond;
1289     CCOp op = s->cc_op;
1290 
1291     /* The CC_OP_CMP form can handle most normal comparisons directly.  */
1292     if (op == CC_OP_CMPB || op == CC_OP_CMPW || op == CC_OP_CMPL) {
1293         c->g1 = c->g2 = 1;
1294         c->v1 = QREG_CC_N;
1295         c->v2 = QREG_CC_V;
1296         switch (cond) {
1297         case 2: /* HI */
1298         case 3: /* LS */
1299             tcond = TCG_COND_LEU;
1300             goto done;
1301         case 4: /* CC */
1302         case 5: /* CS */
1303             tcond = TCG_COND_LTU;
1304             goto done;
1305         case 6: /* NE */
1306         case 7: /* EQ */
1307             tcond = TCG_COND_EQ;
1308             goto done;
1309         case 10: /* PL */
1310         case 11: /* MI */
1311             c->g1 = c->g2 = 0;
1312             c->v2 = tcg_const_i32(0);
1313             c->v1 = tmp = tcg_temp_new();
1314             tcg_gen_sub_i32(tmp, QREG_CC_N, QREG_CC_V);
1315             gen_ext(tmp, tmp, op - CC_OP_CMPB, 1);
1316             /* fallthru */
1317         case 12: /* GE */
1318         case 13: /* LT */
1319             tcond = TCG_COND_LT;
1320             goto done;
1321         case 14: /* GT */
1322         case 15: /* LE */
1323             tcond = TCG_COND_LE;
1324             goto done;
1325         }
1326     }
1327 
1328     c->g1 = 1;
1329     c->g2 = 0;
1330     c->v2 = tcg_const_i32(0);
1331 
1332     switch (cond) {
1333     case 0: /* T */
1334     case 1: /* F */
1335         c->v1 = c->v2;
1336         tcond = TCG_COND_NEVER;
1337         goto done;
1338     case 14: /* GT (!(Z || (N ^ V))) */
1339     case 15: /* LE (Z || (N ^ V)) */
1340         /*
1341          * Logic operations clear V, which simplifies LE to (Z || N),
1342          * and since Z and N are co-located, this becomes a normal
1343          * comparison vs N.
1344          */
1345         if (op == CC_OP_LOGIC) {
1346             c->v1 = QREG_CC_N;
1347             tcond = TCG_COND_LE;
1348             goto done;
1349         }
1350         break;
1351     case 12: /* GE (!(N ^ V)) */
1352     case 13: /* LT (N ^ V) */
1353         /* Logic operations clear V, which simplifies this to N.  */
1354         if (op != CC_OP_LOGIC) {
1355             break;
1356         }
1357         /* fallthru */
1358     case 10: /* PL (!N) */
1359     case 11: /* MI (N) */
1360         /* Several cases represent N normally.  */
1361         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1362             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1363             op == CC_OP_LOGIC) {
1364             c->v1 = QREG_CC_N;
1365             tcond = TCG_COND_LT;
1366             goto done;
1367         }
1368         break;
1369     case 6: /* NE (!Z) */
1370     case 7: /* EQ (Z) */
1371         /* Some cases fold Z into N.  */
1372         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1373             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1374             op == CC_OP_LOGIC) {
1375             tcond = TCG_COND_EQ;
1376             c->v1 = QREG_CC_N;
1377             goto done;
1378         }
1379         break;
1380     case 4: /* CC (!C) */
1381     case 5: /* CS (C) */
1382         /* Some cases fold C into X.  */
1383         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1384             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL) {
1385             tcond = TCG_COND_NE;
1386             c->v1 = QREG_CC_X;
1387             goto done;
1388         }
1389         /* fallthru */
1390     case 8: /* VC (!V) */
1391     case 9: /* VS (V) */
1392         /* Logic operations clear V and C.  */
1393         if (op == CC_OP_LOGIC) {
1394             tcond = TCG_COND_NEVER;
1395             c->v1 = c->v2;
1396             goto done;
1397         }
1398         break;
1399     }
1400 
1401     /* Otherwise, flush flag state to CC_OP_FLAGS.  */
1402     gen_flush_flags(s);
1403 
1404     switch (cond) {
1405     case 0: /* T */
1406     case 1: /* F */
1407     default:
1408         /* Invalid, or handled above.  */
1409         abort();
1410     case 2: /* HI (!C && !Z) -> !(C || Z)*/
1411     case 3: /* LS (C || Z) */
1412         c->v1 = tmp = tcg_temp_new();
1413         c->g1 = 0;
1414         tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1415         tcg_gen_or_i32(tmp, tmp, QREG_CC_C);
1416         tcond = TCG_COND_NE;
1417         break;
1418     case 4: /* CC (!C) */
1419     case 5: /* CS (C) */
1420         c->v1 = QREG_CC_C;
1421         tcond = TCG_COND_NE;
1422         break;
1423     case 6: /* NE (!Z) */
1424     case 7: /* EQ (Z) */
1425         c->v1 = QREG_CC_Z;
1426         tcond = TCG_COND_EQ;
1427         break;
1428     case 8: /* VC (!V) */
1429     case 9: /* VS (V) */
1430         c->v1 = QREG_CC_V;
1431         tcond = TCG_COND_LT;
1432         break;
1433     case 10: /* PL (!N) */
1434     case 11: /* MI (N) */
1435         c->v1 = QREG_CC_N;
1436         tcond = TCG_COND_LT;
1437         break;
1438     case 12: /* GE (!(N ^ V)) */
1439     case 13: /* LT (N ^ V) */
1440         c->v1 = tmp = tcg_temp_new();
1441         c->g1 = 0;
1442         tcg_gen_xor_i32(tmp, QREG_CC_N, QREG_CC_V);
1443         tcond = TCG_COND_LT;
1444         break;
1445     case 14: /* GT (!(Z || (N ^ V))) */
1446     case 15: /* LE (Z || (N ^ V)) */
1447         c->v1 = tmp = tcg_temp_new();
1448         c->g1 = 0;
1449         tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1450         tcg_gen_neg_i32(tmp, tmp);
1451         tmp2 = tcg_temp_new();
1452         tcg_gen_xor_i32(tmp2, QREG_CC_N, QREG_CC_V);
1453         tcg_gen_or_i32(tmp, tmp, tmp2);
1454         tcg_temp_free(tmp2);
1455         tcond = TCG_COND_LT;
1456         break;
1457     }
1458 
1459  done:
1460     if ((cond & 1) == 0) {
1461         tcond = tcg_invert_cond(tcond);
1462     }
1463     c->tcond = tcond;
1464 }
1465 
1466 static void free_cond(DisasCompare *c)
1467 {
1468     if (!c->g1) {
1469         tcg_temp_free(c->v1);
1470     }
1471     if (!c->g2) {
1472         tcg_temp_free(c->v2);
1473     }
1474 }
1475 
1476 static void gen_jmpcc(DisasContext *s, int cond, TCGLabel *l1)
1477 {
1478   DisasCompare c;
1479 
1480   gen_cc_cond(&c, s, cond);
1481   update_cc_op(s);
1482   tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
1483   free_cond(&c);
1484 }
1485 
1486 /* Force a TB lookup after an instruction that changes the CPU state.  */
1487 static void gen_exit_tb(DisasContext *s)
1488 {
1489     update_cc_op(s);
1490     tcg_gen_movi_i32(QREG_PC, s->pc);
1491     s->base.is_jmp = DISAS_EXIT;
1492 }
1493 
1494 #define SRC_EA(env, result, opsize, op_sign, addrp) do {                \
1495         result = gen_ea(env, s, insn, opsize, NULL_QREG, addrp,         \
1496                         op_sign ? EA_LOADS : EA_LOADU, IS_USER(s));     \
1497         if (IS_NULL_QREG(result)) {                                     \
1498             gen_addr_fault(s);                                          \
1499             return;                                                     \
1500         }                                                               \
1501     } while (0)
1502 
1503 #define DEST_EA(env, insn, opsize, val, addrp) do {                     \
1504         TCGv ea_result = gen_ea(env, s, insn, opsize, val, addrp,       \
1505                                 EA_STORE, IS_USER(s));                  \
1506         if (IS_NULL_QREG(ea_result)) {                                  \
1507             gen_addr_fault(s);                                          \
1508             return;                                                     \
1509         }                                                               \
1510     } while (0)
1511 
1512 /* Generate a jump to an immediate address.  */
1513 static void gen_jmp_tb(DisasContext *s, int n, target_ulong dest,
1514                        target_ulong src)
1515 {
1516     if (unlikely(s->ss_active)) {
1517         update_cc_op(s);
1518         tcg_gen_movi_i32(QREG_PC, dest);
1519         gen_raise_exception_format2(s, EXCP_TRACE, src);
1520     } else if (translator_use_goto_tb(&s->base, dest)) {
1521         tcg_gen_goto_tb(n);
1522         tcg_gen_movi_i32(QREG_PC, dest);
1523         tcg_gen_exit_tb(s->base.tb, n);
1524     } else {
1525         gen_jmp_im(s, dest);
1526         tcg_gen_exit_tb(NULL, 0);
1527     }
1528     s->base.is_jmp = DISAS_NORETURN;
1529 }
1530 
1531 DISAS_INSN(scc)
1532 {
1533     DisasCompare c;
1534     int cond;
1535     TCGv tmp;
1536 
1537     cond = (insn >> 8) & 0xf;
1538     gen_cc_cond(&c, s, cond);
1539 
1540     tmp = tcg_temp_new();
1541     tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
1542     free_cond(&c);
1543 
1544     tcg_gen_neg_i32(tmp, tmp);
1545     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
1546     tcg_temp_free(tmp);
1547 }
1548 
1549 DISAS_INSN(dbcc)
1550 {
1551     TCGLabel *l1;
1552     TCGv reg;
1553     TCGv tmp;
1554     int16_t offset;
1555     uint32_t base;
1556 
1557     reg = DREG(insn, 0);
1558     base = s->pc;
1559     offset = (int16_t)read_im16(env, s);
1560     l1 = gen_new_label();
1561     gen_jmpcc(s, (insn >> 8) & 0xf, l1);
1562 
1563     tmp = tcg_temp_new();
1564     tcg_gen_ext16s_i32(tmp, reg);
1565     tcg_gen_addi_i32(tmp, tmp, -1);
1566     gen_partset_reg(OS_WORD, reg, tmp);
1567     tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, -1, l1);
1568     gen_jmp_tb(s, 1, base + offset, s->base.pc_next);
1569     gen_set_label(l1);
1570     gen_jmp_tb(s, 0, s->pc, s->base.pc_next);
1571 }
1572 
1573 DISAS_INSN(undef_mac)
1574 {
1575     gen_exception(s, s->base.pc_next, EXCP_LINEA);
1576 }
1577 
1578 DISAS_INSN(undef_fpu)
1579 {
1580     gen_exception(s, s->base.pc_next, EXCP_LINEF);
1581 }
1582 
1583 DISAS_INSN(undef)
1584 {
1585     /*
1586      * ??? This is both instructions that are as yet unimplemented
1587      * for the 680x0 series, as well as those that are implemented
1588      * but actually illegal for CPU32 or pre-68020.
1589      */
1590     qemu_log_mask(LOG_UNIMP, "Illegal instruction: %04x @ %08x\n",
1591                   insn, s->base.pc_next);
1592     gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
1593 }
1594 
1595 DISAS_INSN(mulw)
1596 {
1597     TCGv reg;
1598     TCGv tmp;
1599     TCGv src;
1600     int sign;
1601 
1602     sign = (insn & 0x100) != 0;
1603     reg = DREG(insn, 9);
1604     tmp = tcg_temp_new();
1605     if (sign)
1606         tcg_gen_ext16s_i32(tmp, reg);
1607     else
1608         tcg_gen_ext16u_i32(tmp, reg);
1609     SRC_EA(env, src, OS_WORD, sign, NULL);
1610     tcg_gen_mul_i32(tmp, tmp, src);
1611     tcg_gen_mov_i32(reg, tmp);
1612     gen_logic_cc(s, tmp, OS_LONG);
1613     tcg_temp_free(tmp);
1614 }
1615 
1616 DISAS_INSN(divw)
1617 {
1618     int sign;
1619     TCGv src;
1620     TCGv destr;
1621     TCGv ilen;
1622 
1623     /* divX.w <EA>,Dn    32/16 -> 16r:16q */
1624 
1625     sign = (insn & 0x100) != 0;
1626 
1627     /* dest.l / src.w */
1628 
1629     SRC_EA(env, src, OS_WORD, sign, NULL);
1630     destr = tcg_constant_i32(REG(insn, 9));
1631     ilen = tcg_constant_i32(s->pc - s->base.pc_next);
1632     if (sign) {
1633         gen_helper_divsw(cpu_env, destr, src, ilen);
1634     } else {
1635         gen_helper_divuw(cpu_env, destr, src, ilen);
1636     }
1637 
1638     set_cc_op(s, CC_OP_FLAGS);
1639 }
1640 
1641 DISAS_INSN(divl)
1642 {
1643     TCGv num, reg, den, ilen;
1644     int sign;
1645     uint16_t ext;
1646 
1647     ext = read_im16(env, s);
1648 
1649     sign = (ext & 0x0800) != 0;
1650 
1651     if (ext & 0x400) {
1652         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
1653             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
1654             return;
1655         }
1656 
1657         /* divX.l <EA>, Dr:Dq    64/32 -> 32r:32q */
1658 
1659         SRC_EA(env, den, OS_LONG, 0, NULL);
1660         num = tcg_constant_i32(REG(ext, 12));
1661         reg = tcg_constant_i32(REG(ext, 0));
1662         ilen = tcg_constant_i32(s->pc - s->base.pc_next);
1663         if (sign) {
1664             gen_helper_divsll(cpu_env, num, reg, den, ilen);
1665         } else {
1666             gen_helper_divull(cpu_env, num, reg, den, ilen);
1667         }
1668         set_cc_op(s, CC_OP_FLAGS);
1669         return;
1670     }
1671 
1672     /* divX.l <EA>, Dq        32/32 -> 32q     */
1673     /* divXl.l <EA>, Dr:Dq    32/32 -> 32r:32q */
1674 
1675     SRC_EA(env, den, OS_LONG, 0, NULL);
1676     num = tcg_constant_i32(REG(ext, 12));
1677     reg = tcg_constant_i32(REG(ext, 0));
1678     ilen = tcg_constant_i32(s->pc - s->base.pc_next);
1679     if (sign) {
1680         gen_helper_divsl(cpu_env, num, reg, den, ilen);
1681     } else {
1682         gen_helper_divul(cpu_env, num, reg, den, ilen);
1683     }
1684 
1685     set_cc_op(s, CC_OP_FLAGS);
1686 }
1687 
1688 static void bcd_add(TCGv dest, TCGv src)
1689 {
1690     TCGv t0, t1;
1691 
1692     /*
1693      * dest10 = dest10 + src10 + X
1694      *
1695      *        t1 = src
1696      *        t2 = t1 + 0x066
1697      *        t3 = t2 + dest + X
1698      *        t4 = t2 ^ dest
1699      *        t5 = t3 ^ t4
1700      *        t6 = ~t5 & 0x110
1701      *        t7 = (t6 >> 2) | (t6 >> 3)
1702      *        return t3 - t7
1703      */
1704 
1705     /*
1706      * t1 = (src + 0x066) + dest + X
1707      *    = result with some possible exceeding 0x6
1708      */
1709 
1710     t0 = tcg_const_i32(0x066);
1711     tcg_gen_add_i32(t0, t0, src);
1712 
1713     t1 = tcg_temp_new();
1714     tcg_gen_add_i32(t1, t0, dest);
1715     tcg_gen_add_i32(t1, t1, QREG_CC_X);
1716 
1717     /* we will remove exceeding 0x6 where there is no carry */
1718 
1719     /*
1720      * t0 = (src + 0x0066) ^ dest
1721      *    = t1 without carries
1722      */
1723 
1724     tcg_gen_xor_i32(t0, t0, dest);
1725 
1726     /*
1727      * extract the carries
1728      * t0 = t0 ^ t1
1729      *    = only the carries
1730      */
1731 
1732     tcg_gen_xor_i32(t0, t0, t1);
1733 
1734     /*
1735      * generate 0x1 where there is no carry
1736      * and for each 0x10, generate a 0x6
1737      */
1738 
1739     tcg_gen_shri_i32(t0, t0, 3);
1740     tcg_gen_not_i32(t0, t0);
1741     tcg_gen_andi_i32(t0, t0, 0x22);
1742     tcg_gen_add_i32(dest, t0, t0);
1743     tcg_gen_add_i32(dest, dest, t0);
1744     tcg_temp_free(t0);
1745 
1746     /*
1747      * remove the exceeding 0x6
1748      * for digits that have not generated a carry
1749      */
1750 
1751     tcg_gen_sub_i32(dest, t1, dest);
1752     tcg_temp_free(t1);
1753 }
1754 
1755 static void bcd_sub(TCGv dest, TCGv src)
1756 {
1757     TCGv t0, t1, t2;
1758 
1759     /*
1760      *  dest10 = dest10 - src10 - X
1761      *         = bcd_add(dest + 1 - X, 0x199 - src)
1762      */
1763 
1764     /* t0 = 0x066 + (0x199 - src) */
1765 
1766     t0 = tcg_temp_new();
1767     tcg_gen_subfi_i32(t0, 0x1ff, src);
1768 
1769     /* t1 = t0 + dest + 1 - X*/
1770 
1771     t1 = tcg_temp_new();
1772     tcg_gen_add_i32(t1, t0, dest);
1773     tcg_gen_addi_i32(t1, t1, 1);
1774     tcg_gen_sub_i32(t1, t1, QREG_CC_X);
1775 
1776     /* t2 = t0 ^ dest */
1777 
1778     t2 = tcg_temp_new();
1779     tcg_gen_xor_i32(t2, t0, dest);
1780 
1781     /* t0 = t1 ^ t2 */
1782 
1783     tcg_gen_xor_i32(t0, t1, t2);
1784 
1785     /*
1786      * t2 = ~t0 & 0x110
1787      * t0 = (t2 >> 2) | (t2 >> 3)
1788      *
1789      * to fit on 8bit operands, changed in:
1790      *
1791      * t2 = ~(t0 >> 3) & 0x22
1792      * t0 = t2 + t2
1793      * t0 = t0 + t2
1794      */
1795 
1796     tcg_gen_shri_i32(t2, t0, 3);
1797     tcg_gen_not_i32(t2, t2);
1798     tcg_gen_andi_i32(t2, t2, 0x22);
1799     tcg_gen_add_i32(t0, t2, t2);
1800     tcg_gen_add_i32(t0, t0, t2);
1801     tcg_temp_free(t2);
1802 
1803     /* return t1 - t0 */
1804 
1805     tcg_gen_sub_i32(dest, t1, t0);
1806     tcg_temp_free(t0);
1807     tcg_temp_free(t1);
1808 }
1809 
1810 static void bcd_flags(TCGv val)
1811 {
1812     tcg_gen_andi_i32(QREG_CC_C, val, 0x0ff);
1813     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_C);
1814 
1815     tcg_gen_extract_i32(QREG_CC_C, val, 8, 1);
1816 
1817     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
1818 }
1819 
1820 DISAS_INSN(abcd_reg)
1821 {
1822     TCGv src;
1823     TCGv dest;
1824 
1825     gen_flush_flags(s); /* !Z is sticky */
1826 
1827     src = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
1828     dest = gen_extend(s, DREG(insn, 9), OS_BYTE, 0);
1829     bcd_add(dest, src);
1830     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1831 
1832     bcd_flags(dest);
1833 }
1834 
1835 DISAS_INSN(abcd_mem)
1836 {
1837     TCGv src, dest, addr;
1838 
1839     gen_flush_flags(s); /* !Z is sticky */
1840 
1841     /* Indirect pre-decrement load (mode 4) */
1842 
1843     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1844                       NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1845     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1846                        NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1847 
1848     bcd_add(dest, src);
1849 
1850     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1851                 EA_STORE, IS_USER(s));
1852 
1853     bcd_flags(dest);
1854 }
1855 
1856 DISAS_INSN(sbcd_reg)
1857 {
1858     TCGv src, dest;
1859 
1860     gen_flush_flags(s); /* !Z is sticky */
1861 
1862     src = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
1863     dest = gen_extend(s, DREG(insn, 9), OS_BYTE, 0);
1864 
1865     bcd_sub(dest, src);
1866 
1867     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1868 
1869     bcd_flags(dest);
1870 }
1871 
1872 DISAS_INSN(sbcd_mem)
1873 {
1874     TCGv src, dest, addr;
1875 
1876     gen_flush_flags(s); /* !Z is sticky */
1877 
1878     /* Indirect pre-decrement load (mode 4) */
1879 
1880     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1881                       NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1882     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1883                        NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1884 
1885     bcd_sub(dest, src);
1886 
1887     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1888                 EA_STORE, IS_USER(s));
1889 
1890     bcd_flags(dest);
1891 }
1892 
1893 DISAS_INSN(nbcd)
1894 {
1895     TCGv src, dest;
1896     TCGv addr;
1897 
1898     gen_flush_flags(s); /* !Z is sticky */
1899 
1900     SRC_EA(env, src, OS_BYTE, 0, &addr);
1901 
1902     dest = tcg_const_i32(0);
1903     bcd_sub(dest, src);
1904 
1905     DEST_EA(env, insn, OS_BYTE, dest, &addr);
1906 
1907     bcd_flags(dest);
1908 
1909     tcg_temp_free(dest);
1910 }
1911 
1912 DISAS_INSN(addsub)
1913 {
1914     TCGv reg;
1915     TCGv dest;
1916     TCGv src;
1917     TCGv tmp;
1918     TCGv addr;
1919     int add;
1920     int opsize;
1921 
1922     add = (insn & 0x4000) != 0;
1923     opsize = insn_opsize(insn);
1924     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
1925     dest = tcg_temp_new();
1926     if (insn & 0x100) {
1927         SRC_EA(env, tmp, opsize, 1, &addr);
1928         src = reg;
1929     } else {
1930         tmp = reg;
1931         SRC_EA(env, src, opsize, 1, NULL);
1932     }
1933     if (add) {
1934         tcg_gen_add_i32(dest, tmp, src);
1935         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, src);
1936         set_cc_op(s, CC_OP_ADDB + opsize);
1937     } else {
1938         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, tmp, src);
1939         tcg_gen_sub_i32(dest, tmp, src);
1940         set_cc_op(s, CC_OP_SUBB + opsize);
1941     }
1942     gen_update_cc_add(dest, src, opsize);
1943     if (insn & 0x100) {
1944         DEST_EA(env, insn, opsize, dest, &addr);
1945     } else {
1946         gen_partset_reg(opsize, DREG(insn, 9), dest);
1947     }
1948     tcg_temp_free(dest);
1949 }
1950 
1951 /* Reverse the order of the bits in REG.  */
1952 DISAS_INSN(bitrev)
1953 {
1954     TCGv reg;
1955     reg = DREG(insn, 0);
1956     gen_helper_bitrev(reg, reg);
1957 }
1958 
1959 DISAS_INSN(bitop_reg)
1960 {
1961     int opsize;
1962     int op;
1963     TCGv src1;
1964     TCGv src2;
1965     TCGv tmp;
1966     TCGv addr;
1967     TCGv dest;
1968 
1969     if ((insn & 0x38) != 0)
1970         opsize = OS_BYTE;
1971     else
1972         opsize = OS_LONG;
1973     op = (insn >> 6) & 3;
1974     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
1975 
1976     gen_flush_flags(s);
1977     src2 = tcg_temp_new();
1978     if (opsize == OS_BYTE)
1979         tcg_gen_andi_i32(src2, DREG(insn, 9), 7);
1980     else
1981         tcg_gen_andi_i32(src2, DREG(insn, 9), 31);
1982 
1983     tmp = tcg_const_i32(1);
1984     tcg_gen_shl_i32(tmp, tmp, src2);
1985     tcg_temp_free(src2);
1986 
1987     tcg_gen_and_i32(QREG_CC_Z, src1, tmp);
1988 
1989     dest = tcg_temp_new();
1990     switch (op) {
1991     case 1: /* bchg */
1992         tcg_gen_xor_i32(dest, src1, tmp);
1993         break;
1994     case 2: /* bclr */
1995         tcg_gen_andc_i32(dest, src1, tmp);
1996         break;
1997     case 3: /* bset */
1998         tcg_gen_or_i32(dest, src1, tmp);
1999         break;
2000     default: /* btst */
2001         break;
2002     }
2003     tcg_temp_free(tmp);
2004     if (op) {
2005         DEST_EA(env, insn, opsize, dest, &addr);
2006     }
2007     tcg_temp_free(dest);
2008 }
2009 
2010 DISAS_INSN(sats)
2011 {
2012     TCGv reg;
2013     reg = DREG(insn, 0);
2014     gen_flush_flags(s);
2015     gen_helper_sats(reg, reg, QREG_CC_V);
2016     gen_logic_cc(s, reg, OS_LONG);
2017 }
2018 
2019 static void gen_push(DisasContext *s, TCGv val)
2020 {
2021     TCGv tmp;
2022 
2023     tmp = tcg_temp_new();
2024     tcg_gen_subi_i32(tmp, QREG_SP, 4);
2025     gen_store(s, OS_LONG, tmp, val, IS_USER(s));
2026     tcg_gen_mov_i32(QREG_SP, tmp);
2027     tcg_temp_free(tmp);
2028 }
2029 
2030 static TCGv mreg(int reg)
2031 {
2032     if (reg < 8) {
2033         /* Dx */
2034         return cpu_dregs[reg];
2035     }
2036     /* Ax */
2037     return cpu_aregs[reg & 7];
2038 }
2039 
2040 DISAS_INSN(movem)
2041 {
2042     TCGv addr, incr, tmp, r[16];
2043     int is_load = (insn & 0x0400) != 0;
2044     int opsize = (insn & 0x40) != 0 ? OS_LONG : OS_WORD;
2045     uint16_t mask = read_im16(env, s);
2046     int mode = extract32(insn, 3, 3);
2047     int reg0 = REG(insn, 0);
2048     int i;
2049 
2050     tmp = cpu_aregs[reg0];
2051 
2052     switch (mode) {
2053     case 0: /* data register direct */
2054     case 1: /* addr register direct */
2055     do_addr_fault:
2056         gen_addr_fault(s);
2057         return;
2058 
2059     case 2: /* indirect */
2060         break;
2061 
2062     case 3: /* indirect post-increment */
2063         if (!is_load) {
2064             /* post-increment is not allowed */
2065             goto do_addr_fault;
2066         }
2067         break;
2068 
2069     case 4: /* indirect pre-decrement */
2070         if (is_load) {
2071             /* pre-decrement is not allowed */
2072             goto do_addr_fault;
2073         }
2074         /*
2075          * We want a bare copy of the address reg, without any pre-decrement
2076          * adjustment, as gen_lea would provide.
2077          */
2078         break;
2079 
2080     default:
2081         tmp = gen_lea_mode(env, s, mode, reg0, opsize);
2082         if (IS_NULL_QREG(tmp)) {
2083             goto do_addr_fault;
2084         }
2085         break;
2086     }
2087 
2088     addr = tcg_temp_new();
2089     tcg_gen_mov_i32(addr, tmp);
2090     incr = tcg_const_i32(opsize_bytes(opsize));
2091 
2092     if (is_load) {
2093         /* memory to register */
2094         for (i = 0; i < 16; i++) {
2095             if (mask & (1 << i)) {
2096                 r[i] = gen_load(s, opsize, addr, 1, IS_USER(s));
2097                 tcg_gen_add_i32(addr, addr, incr);
2098             }
2099         }
2100         for (i = 0; i < 16; i++) {
2101             if (mask & (1 << i)) {
2102                 tcg_gen_mov_i32(mreg(i), r[i]);
2103                 tcg_temp_free(r[i]);
2104             }
2105         }
2106         if (mode == 3) {
2107             /* post-increment: movem (An)+,X */
2108             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2109         }
2110     } else {
2111         /* register to memory */
2112         if (mode == 4) {
2113             /* pre-decrement: movem X,-(An) */
2114             for (i = 15; i >= 0; i--) {
2115                 if ((mask << i) & 0x8000) {
2116                     tcg_gen_sub_i32(addr, addr, incr);
2117                     if (reg0 + 8 == i &&
2118                         m68k_feature(s->env, M68K_FEATURE_EXT_FULL)) {
2119                         /*
2120                          * M68020+: if the addressing register is the
2121                          * register moved to memory, the value written
2122                          * is the initial value decremented by the size of
2123                          * the operation, regardless of how many actual
2124                          * stores have been performed until this point.
2125                          * M68000/M68010: the value is the initial value.
2126                          */
2127                         tmp = tcg_temp_new();
2128                         tcg_gen_sub_i32(tmp, cpu_aregs[reg0], incr);
2129                         gen_store(s, opsize, addr, tmp, IS_USER(s));
2130                         tcg_temp_free(tmp);
2131                     } else {
2132                         gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2133                     }
2134                 }
2135             }
2136             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2137         } else {
2138             for (i = 0; i < 16; i++) {
2139                 if (mask & (1 << i)) {
2140                     gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2141                     tcg_gen_add_i32(addr, addr, incr);
2142                 }
2143             }
2144         }
2145     }
2146 
2147     tcg_temp_free(incr);
2148     tcg_temp_free(addr);
2149 }
2150 
2151 DISAS_INSN(movep)
2152 {
2153     uint8_t i;
2154     int16_t displ;
2155     TCGv reg;
2156     TCGv addr;
2157     TCGv abuf;
2158     TCGv dbuf;
2159 
2160     displ = read_im16(env, s);
2161 
2162     addr = AREG(insn, 0);
2163     reg = DREG(insn, 9);
2164 
2165     abuf = tcg_temp_new();
2166     tcg_gen_addi_i32(abuf, addr, displ);
2167     dbuf = tcg_temp_new();
2168 
2169     if (insn & 0x40) {
2170         i = 4;
2171     } else {
2172         i = 2;
2173     }
2174 
2175     if (insn & 0x80) {
2176         for ( ; i > 0 ; i--) {
2177             tcg_gen_shri_i32(dbuf, reg, (i - 1) * 8);
2178             tcg_gen_qemu_st8(dbuf, abuf, IS_USER(s));
2179             if (i > 1) {
2180                 tcg_gen_addi_i32(abuf, abuf, 2);
2181             }
2182         }
2183     } else {
2184         for ( ; i > 0 ; i--) {
2185             tcg_gen_qemu_ld8u(dbuf, abuf, IS_USER(s));
2186             tcg_gen_deposit_i32(reg, reg, dbuf, (i - 1) * 8, 8);
2187             if (i > 1) {
2188                 tcg_gen_addi_i32(abuf, abuf, 2);
2189             }
2190         }
2191     }
2192     tcg_temp_free(abuf);
2193     tcg_temp_free(dbuf);
2194 }
2195 
2196 DISAS_INSN(bitop_im)
2197 {
2198     int opsize;
2199     int op;
2200     TCGv src1;
2201     uint32_t mask;
2202     int bitnum;
2203     TCGv tmp;
2204     TCGv addr;
2205 
2206     if ((insn & 0x38) != 0)
2207         opsize = OS_BYTE;
2208     else
2209         opsize = OS_LONG;
2210     op = (insn >> 6) & 3;
2211 
2212     bitnum = read_im16(env, s);
2213     if (m68k_feature(s->env, M68K_FEATURE_M68K)) {
2214         if (bitnum & 0xfe00) {
2215             disas_undef(env, s, insn);
2216             return;
2217         }
2218     } else {
2219         if (bitnum & 0xff00) {
2220             disas_undef(env, s, insn);
2221             return;
2222         }
2223     }
2224 
2225     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
2226 
2227     gen_flush_flags(s);
2228     if (opsize == OS_BYTE)
2229         bitnum &= 7;
2230     else
2231         bitnum &= 31;
2232     mask = 1 << bitnum;
2233 
2234    tcg_gen_andi_i32(QREG_CC_Z, src1, mask);
2235 
2236     if (op) {
2237         tmp = tcg_temp_new();
2238         switch (op) {
2239         case 1: /* bchg */
2240             tcg_gen_xori_i32(tmp, src1, mask);
2241             break;
2242         case 2: /* bclr */
2243             tcg_gen_andi_i32(tmp, src1, ~mask);
2244             break;
2245         case 3: /* bset */
2246             tcg_gen_ori_i32(tmp, src1, mask);
2247             break;
2248         default: /* btst */
2249             break;
2250         }
2251         DEST_EA(env, insn, opsize, tmp, &addr);
2252         tcg_temp_free(tmp);
2253     }
2254 }
2255 
2256 static TCGv gen_get_ccr(DisasContext *s)
2257 {
2258     TCGv dest;
2259 
2260     update_cc_op(s);
2261     dest = tcg_temp_new();
2262     gen_helper_get_ccr(dest, cpu_env);
2263     return dest;
2264 }
2265 
2266 static TCGv gen_get_sr(DisasContext *s)
2267 {
2268     TCGv ccr;
2269     TCGv sr;
2270 
2271     ccr = gen_get_ccr(s);
2272     sr = tcg_temp_new();
2273     tcg_gen_andi_i32(sr, QREG_SR, 0xffe0);
2274     tcg_gen_or_i32(sr, sr, ccr);
2275     tcg_temp_free(ccr);
2276     return sr;
2277 }
2278 
2279 static void gen_set_sr_im(DisasContext *s, uint16_t val, int ccr_only)
2280 {
2281     if (ccr_only) {
2282         tcg_gen_movi_i32(QREG_CC_C, val & CCF_C ? 1 : 0);
2283         tcg_gen_movi_i32(QREG_CC_V, val & CCF_V ? -1 : 0);
2284         tcg_gen_movi_i32(QREG_CC_Z, val & CCF_Z ? 0 : 1);
2285         tcg_gen_movi_i32(QREG_CC_N, val & CCF_N ? -1 : 0);
2286         tcg_gen_movi_i32(QREG_CC_X, val & CCF_X ? 1 : 0);
2287     } else {
2288         /* Must writeback before changing security state. */
2289         do_writebacks(s);
2290         gen_helper_set_sr(cpu_env, tcg_constant_i32(val));
2291     }
2292     set_cc_op(s, CC_OP_FLAGS);
2293 }
2294 
2295 static void gen_set_sr(DisasContext *s, TCGv val, int ccr_only)
2296 {
2297     if (ccr_only) {
2298         gen_helper_set_ccr(cpu_env, val);
2299     } else {
2300         /* Must writeback before changing security state. */
2301         do_writebacks(s);
2302         gen_helper_set_sr(cpu_env, val);
2303     }
2304     set_cc_op(s, CC_OP_FLAGS);
2305 }
2306 
2307 static void gen_move_to_sr(CPUM68KState *env, DisasContext *s, uint16_t insn,
2308                            bool ccr_only)
2309 {
2310     if ((insn & 0x3f) == 0x3c) {
2311         uint16_t val;
2312         val = read_im16(env, s);
2313         gen_set_sr_im(s, val, ccr_only);
2314     } else {
2315         TCGv src;
2316         SRC_EA(env, src, OS_WORD, 0, NULL);
2317         gen_set_sr(s, src, ccr_only);
2318     }
2319 }
2320 
2321 DISAS_INSN(arith_im)
2322 {
2323     int op;
2324     TCGv im;
2325     TCGv src1;
2326     TCGv dest;
2327     TCGv addr;
2328     int opsize;
2329     bool with_SR = ((insn & 0x3f) == 0x3c);
2330 
2331     op = (insn >> 9) & 7;
2332     opsize = insn_opsize(insn);
2333     switch (opsize) {
2334     case OS_BYTE:
2335         im = tcg_const_i32((int8_t)read_im8(env, s));
2336         break;
2337     case OS_WORD:
2338         im = tcg_const_i32((int16_t)read_im16(env, s));
2339         break;
2340     case OS_LONG:
2341         im = tcg_const_i32(read_im32(env, s));
2342         break;
2343     default:
2344         g_assert_not_reached();
2345     }
2346 
2347     if (with_SR) {
2348         /* SR/CCR can only be used with andi/eori/ori */
2349         if (op == 2 || op == 3 || op == 6) {
2350             disas_undef(env, s, insn);
2351             return;
2352         }
2353         switch (opsize) {
2354         case OS_BYTE:
2355             src1 = gen_get_ccr(s);
2356             break;
2357         case OS_WORD:
2358             if (IS_USER(s)) {
2359                 gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
2360                 return;
2361             }
2362             src1 = gen_get_sr(s);
2363             break;
2364         default:
2365             /* OS_LONG; others already g_assert_not_reached.  */
2366             disas_undef(env, s, insn);
2367             return;
2368         }
2369     } else {
2370         SRC_EA(env, src1, opsize, 1, (op == 6) ? NULL : &addr);
2371     }
2372     dest = tcg_temp_new();
2373     switch (op) {
2374     case 0: /* ori */
2375         tcg_gen_or_i32(dest, src1, im);
2376         if (with_SR) {
2377             gen_set_sr(s, dest, opsize == OS_BYTE);
2378             gen_exit_tb(s);
2379         } else {
2380             DEST_EA(env, insn, opsize, dest, &addr);
2381             gen_logic_cc(s, dest, opsize);
2382         }
2383         break;
2384     case 1: /* andi */
2385         tcg_gen_and_i32(dest, src1, im);
2386         if (with_SR) {
2387             gen_set_sr(s, dest, opsize == OS_BYTE);
2388             gen_exit_tb(s);
2389         } else {
2390             DEST_EA(env, insn, opsize, dest, &addr);
2391             gen_logic_cc(s, dest, opsize);
2392         }
2393         break;
2394     case 2: /* subi */
2395         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, src1, im);
2396         tcg_gen_sub_i32(dest, src1, im);
2397         gen_update_cc_add(dest, im, opsize);
2398         set_cc_op(s, CC_OP_SUBB + opsize);
2399         DEST_EA(env, insn, opsize, dest, &addr);
2400         break;
2401     case 3: /* addi */
2402         tcg_gen_add_i32(dest, src1, im);
2403         gen_update_cc_add(dest, im, opsize);
2404         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, im);
2405         set_cc_op(s, CC_OP_ADDB + opsize);
2406         DEST_EA(env, insn, opsize, dest, &addr);
2407         break;
2408     case 5: /* eori */
2409         tcg_gen_xor_i32(dest, src1, im);
2410         if (with_SR) {
2411             gen_set_sr(s, dest, opsize == OS_BYTE);
2412             gen_exit_tb(s);
2413         } else {
2414             DEST_EA(env, insn, opsize, dest, &addr);
2415             gen_logic_cc(s, dest, opsize);
2416         }
2417         break;
2418     case 6: /* cmpi */
2419         gen_update_cc_cmp(s, src1, im, opsize);
2420         break;
2421     default:
2422         abort();
2423     }
2424     tcg_temp_free(im);
2425     tcg_temp_free(dest);
2426 }
2427 
2428 DISAS_INSN(cas)
2429 {
2430     int opsize;
2431     TCGv addr;
2432     uint16_t ext;
2433     TCGv load;
2434     TCGv cmp;
2435     MemOp opc;
2436 
2437     switch ((insn >> 9) & 3) {
2438     case 1:
2439         opsize = OS_BYTE;
2440         opc = MO_SB;
2441         break;
2442     case 2:
2443         opsize = OS_WORD;
2444         opc = MO_TESW;
2445         break;
2446     case 3:
2447         opsize = OS_LONG;
2448         opc = MO_TESL;
2449         break;
2450     default:
2451         g_assert_not_reached();
2452     }
2453 
2454     ext = read_im16(env, s);
2455 
2456     /* cas Dc,Du,<EA> */
2457 
2458     addr = gen_lea(env, s, insn, opsize);
2459     if (IS_NULL_QREG(addr)) {
2460         gen_addr_fault(s);
2461         return;
2462     }
2463 
2464     cmp = gen_extend(s, DREG(ext, 0), opsize, 1);
2465 
2466     /*
2467      * if  <EA> == Dc then
2468      *     <EA> = Du
2469      *     Dc = <EA> (because <EA> == Dc)
2470      * else
2471      *     Dc = <EA>
2472      */
2473 
2474     load = tcg_temp_new();
2475     tcg_gen_atomic_cmpxchg_i32(load, addr, cmp, DREG(ext, 6),
2476                                IS_USER(s), opc);
2477     /* update flags before setting cmp to load */
2478     gen_update_cc_cmp(s, load, cmp, opsize);
2479     gen_partset_reg(opsize, DREG(ext, 0), load);
2480 
2481     tcg_temp_free(load);
2482 
2483     switch (extract32(insn, 3, 3)) {
2484     case 3: /* Indirect postincrement.  */
2485         tcg_gen_addi_i32(AREG(insn, 0), addr, opsize_bytes(opsize));
2486         break;
2487     case 4: /* Indirect predecrememnt.  */
2488         tcg_gen_mov_i32(AREG(insn, 0), addr);
2489         break;
2490     }
2491 }
2492 
2493 DISAS_INSN(cas2w)
2494 {
2495     uint16_t ext1, ext2;
2496     TCGv addr1, addr2;
2497     TCGv regs;
2498 
2499     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2500 
2501     ext1 = read_im16(env, s);
2502 
2503     if (ext1 & 0x8000) {
2504         /* Address Register */
2505         addr1 = AREG(ext1, 12);
2506     } else {
2507         /* Data Register */
2508         addr1 = DREG(ext1, 12);
2509     }
2510 
2511     ext2 = read_im16(env, s);
2512     if (ext2 & 0x8000) {
2513         /* Address Register */
2514         addr2 = AREG(ext2, 12);
2515     } else {
2516         /* Data Register */
2517         addr2 = DREG(ext2, 12);
2518     }
2519 
2520     /*
2521      * if (R1) == Dc1 && (R2) == Dc2 then
2522      *     (R1) = Du1
2523      *     (R2) = Du2
2524      * else
2525      *     Dc1 = (R1)
2526      *     Dc2 = (R2)
2527      */
2528 
2529     regs = tcg_const_i32(REG(ext2, 6) |
2530                          (REG(ext1, 6) << 3) |
2531                          (REG(ext2, 0) << 6) |
2532                          (REG(ext1, 0) << 9));
2533     if (tb_cflags(s->base.tb) & CF_PARALLEL) {
2534         gen_helper_exit_atomic(cpu_env);
2535     } else {
2536         gen_helper_cas2w(cpu_env, regs, addr1, addr2);
2537     }
2538     tcg_temp_free(regs);
2539 
2540     /* Note that cas2w also assigned to env->cc_op.  */
2541     s->cc_op = CC_OP_CMPW;
2542     s->cc_op_synced = 1;
2543 }
2544 
2545 DISAS_INSN(cas2l)
2546 {
2547     uint16_t ext1, ext2;
2548     TCGv addr1, addr2, regs;
2549 
2550     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2551 
2552     ext1 = read_im16(env, s);
2553 
2554     if (ext1 & 0x8000) {
2555         /* Address Register */
2556         addr1 = AREG(ext1, 12);
2557     } else {
2558         /* Data Register */
2559         addr1 = DREG(ext1, 12);
2560     }
2561 
2562     ext2 = read_im16(env, s);
2563     if (ext2 & 0x8000) {
2564         /* Address Register */
2565         addr2 = AREG(ext2, 12);
2566     } else {
2567         /* Data Register */
2568         addr2 = DREG(ext2, 12);
2569     }
2570 
2571     /*
2572      * if (R1) == Dc1 && (R2) == Dc2 then
2573      *     (R1) = Du1
2574      *     (R2) = Du2
2575      * else
2576      *     Dc1 = (R1)
2577      *     Dc2 = (R2)
2578      */
2579 
2580     regs = tcg_const_i32(REG(ext2, 6) |
2581                          (REG(ext1, 6) << 3) |
2582                          (REG(ext2, 0) << 6) |
2583                          (REG(ext1, 0) << 9));
2584     if (tb_cflags(s->base.tb) & CF_PARALLEL) {
2585         gen_helper_cas2l_parallel(cpu_env, regs, addr1, addr2);
2586     } else {
2587         gen_helper_cas2l(cpu_env, regs, addr1, addr2);
2588     }
2589     tcg_temp_free(regs);
2590 
2591     /* Note that cas2l also assigned to env->cc_op.  */
2592     s->cc_op = CC_OP_CMPL;
2593     s->cc_op_synced = 1;
2594 }
2595 
2596 DISAS_INSN(byterev)
2597 {
2598     TCGv reg;
2599 
2600     reg = DREG(insn, 0);
2601     tcg_gen_bswap32_i32(reg, reg);
2602 }
2603 
2604 DISAS_INSN(move)
2605 {
2606     TCGv src;
2607     TCGv dest;
2608     int op;
2609     int opsize;
2610 
2611     switch (insn >> 12) {
2612     case 1: /* move.b */
2613         opsize = OS_BYTE;
2614         break;
2615     case 2: /* move.l */
2616         opsize = OS_LONG;
2617         break;
2618     case 3: /* move.w */
2619         opsize = OS_WORD;
2620         break;
2621     default:
2622         abort();
2623     }
2624     SRC_EA(env, src, opsize, 1, NULL);
2625     op = (insn >> 6) & 7;
2626     if (op == 1) {
2627         /* movea */
2628         /* The value will already have been sign extended.  */
2629         dest = AREG(insn, 9);
2630         tcg_gen_mov_i32(dest, src);
2631     } else {
2632         /* normal move */
2633         uint16_t dest_ea;
2634         dest_ea = ((insn >> 9) & 7) | (op << 3);
2635         DEST_EA(env, dest_ea, opsize, src, NULL);
2636         /* This will be correct because loads sign extend.  */
2637         gen_logic_cc(s, src, opsize);
2638     }
2639 }
2640 
2641 DISAS_INSN(negx)
2642 {
2643     TCGv z;
2644     TCGv src;
2645     TCGv addr;
2646     int opsize;
2647 
2648     opsize = insn_opsize(insn);
2649     SRC_EA(env, src, opsize, 1, &addr);
2650 
2651     gen_flush_flags(s); /* compute old Z */
2652 
2653     /*
2654      * Perform subtract with borrow.
2655      * (X, N) =  -(src + X);
2656      */
2657 
2658     z = tcg_const_i32(0);
2659     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, z, QREG_CC_X, z);
2660     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, z, z, QREG_CC_N, QREG_CC_X);
2661     tcg_temp_free(z);
2662     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
2663 
2664     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
2665 
2666     /*
2667      * Compute signed-overflow for negation.  The normal formula for
2668      * subtraction is (res ^ src) & (src ^ dest), but with dest==0
2669      * this simplifies to res & src.
2670      */
2671 
2672     tcg_gen_and_i32(QREG_CC_V, QREG_CC_N, src);
2673 
2674     /* Copy the rest of the results into place.  */
2675     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
2676     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
2677 
2678     set_cc_op(s, CC_OP_FLAGS);
2679 
2680     /* result is in QREG_CC_N */
2681 
2682     DEST_EA(env, insn, opsize, QREG_CC_N, &addr);
2683 }
2684 
2685 DISAS_INSN(lea)
2686 {
2687     TCGv reg;
2688     TCGv tmp;
2689 
2690     reg = AREG(insn, 9);
2691     tmp = gen_lea(env, s, insn, OS_LONG);
2692     if (IS_NULL_QREG(tmp)) {
2693         gen_addr_fault(s);
2694         return;
2695     }
2696     tcg_gen_mov_i32(reg, tmp);
2697 }
2698 
2699 DISAS_INSN(clr)
2700 {
2701     int opsize;
2702     TCGv zero;
2703 
2704     zero = tcg_const_i32(0);
2705 
2706     opsize = insn_opsize(insn);
2707     DEST_EA(env, insn, opsize, zero, NULL);
2708     gen_logic_cc(s, zero, opsize);
2709     tcg_temp_free(zero);
2710 }
2711 
2712 DISAS_INSN(move_from_ccr)
2713 {
2714     TCGv ccr;
2715 
2716     ccr = gen_get_ccr(s);
2717     DEST_EA(env, insn, OS_WORD, ccr, NULL);
2718 }
2719 
2720 DISAS_INSN(neg)
2721 {
2722     TCGv src1;
2723     TCGv dest;
2724     TCGv addr;
2725     int opsize;
2726 
2727     opsize = insn_opsize(insn);
2728     SRC_EA(env, src1, opsize, 1, &addr);
2729     dest = tcg_temp_new();
2730     tcg_gen_neg_i32(dest, src1);
2731     set_cc_op(s, CC_OP_SUBB + opsize);
2732     gen_update_cc_add(dest, src1, opsize);
2733     tcg_gen_setcondi_i32(TCG_COND_NE, QREG_CC_X, dest, 0);
2734     DEST_EA(env, insn, opsize, dest, &addr);
2735     tcg_temp_free(dest);
2736 }
2737 
2738 DISAS_INSN(move_to_ccr)
2739 {
2740     gen_move_to_sr(env, s, insn, true);
2741 }
2742 
2743 DISAS_INSN(not)
2744 {
2745     TCGv src1;
2746     TCGv dest;
2747     TCGv addr;
2748     int opsize;
2749 
2750     opsize = insn_opsize(insn);
2751     SRC_EA(env, src1, opsize, 1, &addr);
2752     dest = tcg_temp_new();
2753     tcg_gen_not_i32(dest, src1);
2754     DEST_EA(env, insn, opsize, dest, &addr);
2755     gen_logic_cc(s, dest, opsize);
2756 }
2757 
2758 DISAS_INSN(swap)
2759 {
2760     TCGv src1;
2761     TCGv src2;
2762     TCGv reg;
2763 
2764     src1 = tcg_temp_new();
2765     src2 = tcg_temp_new();
2766     reg = DREG(insn, 0);
2767     tcg_gen_shli_i32(src1, reg, 16);
2768     tcg_gen_shri_i32(src2, reg, 16);
2769     tcg_gen_or_i32(reg, src1, src2);
2770     tcg_temp_free(src2);
2771     tcg_temp_free(src1);
2772     gen_logic_cc(s, reg, OS_LONG);
2773 }
2774 
2775 DISAS_INSN(bkpt)
2776 {
2777     gen_exception(s, s->base.pc_next, EXCP_DEBUG);
2778 }
2779 
2780 DISAS_INSN(pea)
2781 {
2782     TCGv tmp;
2783 
2784     tmp = gen_lea(env, s, insn, OS_LONG);
2785     if (IS_NULL_QREG(tmp)) {
2786         gen_addr_fault(s);
2787         return;
2788     }
2789     gen_push(s, tmp);
2790 }
2791 
2792 DISAS_INSN(ext)
2793 {
2794     int op;
2795     TCGv reg;
2796     TCGv tmp;
2797 
2798     reg = DREG(insn, 0);
2799     op = (insn >> 6) & 7;
2800     tmp = tcg_temp_new();
2801     if (op == 3)
2802         tcg_gen_ext16s_i32(tmp, reg);
2803     else
2804         tcg_gen_ext8s_i32(tmp, reg);
2805     if (op == 2)
2806         gen_partset_reg(OS_WORD, reg, tmp);
2807     else
2808         tcg_gen_mov_i32(reg, tmp);
2809     gen_logic_cc(s, tmp, OS_LONG);
2810     tcg_temp_free(tmp);
2811 }
2812 
2813 DISAS_INSN(tst)
2814 {
2815     int opsize;
2816     TCGv tmp;
2817 
2818     opsize = insn_opsize(insn);
2819     SRC_EA(env, tmp, opsize, 1, NULL);
2820     gen_logic_cc(s, tmp, opsize);
2821 }
2822 
2823 DISAS_INSN(pulse)
2824 {
2825   /* Implemented as a NOP.  */
2826 }
2827 
2828 DISAS_INSN(illegal)
2829 {
2830     gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2831 }
2832 
2833 DISAS_INSN(tas)
2834 {
2835     int mode = extract32(insn, 3, 3);
2836     int reg0 = REG(insn, 0);
2837 
2838     if (mode == 0) {
2839         /* data register direct */
2840         TCGv dest = cpu_dregs[reg0];
2841         gen_logic_cc(s, dest, OS_BYTE);
2842         tcg_gen_ori_tl(dest, dest, 0x80);
2843     } else {
2844         TCGv src1, addr;
2845 
2846         addr = gen_lea_mode(env, s, mode, reg0, OS_BYTE);
2847         if (IS_NULL_QREG(addr)) {
2848             gen_addr_fault(s);
2849             return;
2850         }
2851         src1 = tcg_temp_new();
2852         tcg_gen_atomic_fetch_or_tl(src1, addr, tcg_constant_tl(0x80),
2853                                    IS_USER(s), MO_SB);
2854         gen_logic_cc(s, src1, OS_BYTE);
2855         tcg_temp_free(src1);
2856 
2857         switch (mode) {
2858         case 3: /* Indirect postincrement.  */
2859             tcg_gen_addi_i32(AREG(insn, 0), addr, 1);
2860             break;
2861         case 4: /* Indirect predecrememnt.  */
2862             tcg_gen_mov_i32(AREG(insn, 0), addr);
2863             break;
2864         }
2865     }
2866 }
2867 
2868 DISAS_INSN(mull)
2869 {
2870     uint16_t ext;
2871     TCGv src1;
2872     int sign;
2873 
2874     ext = read_im16(env, s);
2875 
2876     sign = ext & 0x800;
2877 
2878     if (ext & 0x400) {
2879         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
2880             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2881             return;
2882         }
2883 
2884         SRC_EA(env, src1, OS_LONG, 0, NULL);
2885 
2886         if (sign) {
2887             tcg_gen_muls2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2888         } else {
2889             tcg_gen_mulu2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2890         }
2891         /* if Dl == Dh, 68040 returns low word */
2892         tcg_gen_mov_i32(DREG(ext, 0), QREG_CC_N);
2893         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_Z);
2894         tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N);
2895 
2896         tcg_gen_movi_i32(QREG_CC_V, 0);
2897         tcg_gen_movi_i32(QREG_CC_C, 0);
2898 
2899         set_cc_op(s, CC_OP_FLAGS);
2900         return;
2901     }
2902     SRC_EA(env, src1, OS_LONG, 0, NULL);
2903     if (m68k_feature(s->env, M68K_FEATURE_M68K)) {
2904         tcg_gen_movi_i32(QREG_CC_C, 0);
2905         if (sign) {
2906             tcg_gen_muls2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2907             /* QREG_CC_V is -(QREG_CC_V != (QREG_CC_N >> 31)) */
2908             tcg_gen_sari_i32(QREG_CC_Z, QREG_CC_N, 31);
2909             tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_Z);
2910         } else {
2911             tcg_gen_mulu2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2912             /* QREG_CC_V is -(QREG_CC_V != 0), use QREG_CC_C as 0 */
2913             tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_C);
2914         }
2915         tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
2916         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_N);
2917 
2918         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
2919 
2920         set_cc_op(s, CC_OP_FLAGS);
2921     } else {
2922         /*
2923          * The upper 32 bits of the product are discarded, so
2924          * muls.l and mulu.l are functionally equivalent.
2925          */
2926         tcg_gen_mul_i32(DREG(ext, 12), src1, DREG(ext, 12));
2927         gen_logic_cc(s, DREG(ext, 12), OS_LONG);
2928     }
2929 }
2930 
2931 static void gen_link(DisasContext *s, uint16_t insn, int32_t offset)
2932 {
2933     TCGv reg;
2934     TCGv tmp;
2935 
2936     reg = AREG(insn, 0);
2937     tmp = tcg_temp_new();
2938     tcg_gen_subi_i32(tmp, QREG_SP, 4);
2939     gen_store(s, OS_LONG, tmp, reg, IS_USER(s));
2940     if ((insn & 7) != 7) {
2941         tcg_gen_mov_i32(reg, tmp);
2942     }
2943     tcg_gen_addi_i32(QREG_SP, tmp, offset);
2944     tcg_temp_free(tmp);
2945 }
2946 
2947 DISAS_INSN(link)
2948 {
2949     int16_t offset;
2950 
2951     offset = read_im16(env, s);
2952     gen_link(s, insn, offset);
2953 }
2954 
2955 DISAS_INSN(linkl)
2956 {
2957     int32_t offset;
2958 
2959     offset = read_im32(env, s);
2960     gen_link(s, insn, offset);
2961 }
2962 
2963 DISAS_INSN(unlk)
2964 {
2965     TCGv src;
2966     TCGv reg;
2967     TCGv tmp;
2968 
2969     src = tcg_temp_new();
2970     reg = AREG(insn, 0);
2971     tcg_gen_mov_i32(src, reg);
2972     tmp = gen_load(s, OS_LONG, src, 0, IS_USER(s));
2973     tcg_gen_mov_i32(reg, tmp);
2974     tcg_gen_addi_i32(QREG_SP, src, 4);
2975     tcg_temp_free(src);
2976     tcg_temp_free(tmp);
2977 }
2978 
2979 #if defined(CONFIG_SOFTMMU)
2980 DISAS_INSN(reset)
2981 {
2982     if (IS_USER(s)) {
2983         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
2984         return;
2985     }
2986 
2987     gen_helper_reset(cpu_env);
2988 }
2989 #endif
2990 
2991 DISAS_INSN(nop)
2992 {
2993 }
2994 
2995 DISAS_INSN(rtd)
2996 {
2997     TCGv tmp;
2998     int16_t offset = read_im16(env, s);
2999 
3000     tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
3001     tcg_gen_addi_i32(QREG_SP, QREG_SP, offset + 4);
3002     gen_jmp(s, tmp);
3003 }
3004 
3005 DISAS_INSN(rtr)
3006 {
3007     TCGv tmp;
3008     TCGv ccr;
3009     TCGv sp;
3010 
3011     sp = tcg_temp_new();
3012     ccr = gen_load(s, OS_WORD, QREG_SP, 0, IS_USER(s));
3013     tcg_gen_addi_i32(sp, QREG_SP, 2);
3014     tmp = gen_load(s, OS_LONG, sp, 0, IS_USER(s));
3015     tcg_gen_addi_i32(QREG_SP, sp, 4);
3016     tcg_temp_free(sp);
3017 
3018     gen_set_sr(s, ccr, true);
3019     tcg_temp_free(ccr);
3020 
3021     gen_jmp(s, tmp);
3022 }
3023 
3024 DISAS_INSN(rts)
3025 {
3026     TCGv tmp;
3027 
3028     tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
3029     tcg_gen_addi_i32(QREG_SP, QREG_SP, 4);
3030     gen_jmp(s, tmp);
3031 }
3032 
3033 DISAS_INSN(jump)
3034 {
3035     TCGv tmp;
3036 
3037     /*
3038      * Load the target address first to ensure correct exception
3039      * behavior.
3040      */
3041     tmp = gen_lea(env, s, insn, OS_LONG);
3042     if (IS_NULL_QREG(tmp)) {
3043         gen_addr_fault(s);
3044         return;
3045     }
3046     if ((insn & 0x40) == 0) {
3047         /* jsr */
3048         gen_push(s, tcg_const_i32(s->pc));
3049     }
3050     gen_jmp(s, tmp);
3051 }
3052 
3053 DISAS_INSN(addsubq)
3054 {
3055     TCGv src;
3056     TCGv dest;
3057     TCGv val;
3058     int imm;
3059     TCGv addr;
3060     int opsize;
3061 
3062     if ((insn & 070) == 010) {
3063         /* Operation on address register is always long.  */
3064         opsize = OS_LONG;
3065     } else {
3066         opsize = insn_opsize(insn);
3067     }
3068     SRC_EA(env, src, opsize, 1, &addr);
3069     imm = (insn >> 9) & 7;
3070     if (imm == 0) {
3071         imm = 8;
3072     }
3073     val = tcg_const_i32(imm);
3074     dest = tcg_temp_new();
3075     tcg_gen_mov_i32(dest, src);
3076     if ((insn & 0x38) == 0x08) {
3077         /*
3078          * Don't update condition codes if the destination is an
3079          * address register.
3080          */
3081         if (insn & 0x0100) {
3082             tcg_gen_sub_i32(dest, dest, val);
3083         } else {
3084             tcg_gen_add_i32(dest, dest, val);
3085         }
3086     } else {
3087         if (insn & 0x0100) {
3088             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
3089             tcg_gen_sub_i32(dest, dest, val);
3090             set_cc_op(s, CC_OP_SUBB + opsize);
3091         } else {
3092             tcg_gen_add_i32(dest, dest, val);
3093             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
3094             set_cc_op(s, CC_OP_ADDB + opsize);
3095         }
3096         gen_update_cc_add(dest, val, opsize);
3097     }
3098     tcg_temp_free(val);
3099     DEST_EA(env, insn, opsize, dest, &addr);
3100     tcg_temp_free(dest);
3101 }
3102 
3103 DISAS_INSN(branch)
3104 {
3105     int32_t offset;
3106     uint32_t base;
3107     int op;
3108 
3109     base = s->pc;
3110     op = (insn >> 8) & 0xf;
3111     offset = (int8_t)insn;
3112     if (offset == 0) {
3113         offset = (int16_t)read_im16(env, s);
3114     } else if (offset == -1) {
3115         offset = read_im32(env, s);
3116     }
3117     if (op == 1) {
3118         /* bsr */
3119         gen_push(s, tcg_const_i32(s->pc));
3120     }
3121     if (op > 1) {
3122         /* Bcc */
3123         TCGLabel *l1 = gen_new_label();
3124         gen_jmpcc(s, ((insn >> 8) & 0xf) ^ 1, l1);
3125         gen_jmp_tb(s, 1, base + offset, s->base.pc_next);
3126         gen_set_label(l1);
3127         gen_jmp_tb(s, 0, s->pc, s->base.pc_next);
3128     } else {
3129         /* Unconditional branch.  */
3130         update_cc_op(s);
3131         gen_jmp_tb(s, 0, base + offset, s->base.pc_next);
3132     }
3133 }
3134 
3135 DISAS_INSN(moveq)
3136 {
3137     tcg_gen_movi_i32(DREG(insn, 9), (int8_t)insn);
3138     gen_logic_cc(s, DREG(insn, 9), OS_LONG);
3139 }
3140 
3141 DISAS_INSN(mvzs)
3142 {
3143     int opsize;
3144     TCGv src;
3145     TCGv reg;
3146 
3147     if (insn & 0x40)
3148         opsize = OS_WORD;
3149     else
3150         opsize = OS_BYTE;
3151     SRC_EA(env, src, opsize, (insn & 0x80) == 0, NULL);
3152     reg = DREG(insn, 9);
3153     tcg_gen_mov_i32(reg, src);
3154     gen_logic_cc(s, src, opsize);
3155 }
3156 
3157 DISAS_INSN(or)
3158 {
3159     TCGv reg;
3160     TCGv dest;
3161     TCGv src;
3162     TCGv addr;
3163     int opsize;
3164 
3165     opsize = insn_opsize(insn);
3166     reg = gen_extend(s, DREG(insn, 9), opsize, 0);
3167     dest = tcg_temp_new();
3168     if (insn & 0x100) {
3169         SRC_EA(env, src, opsize, 0, &addr);
3170         tcg_gen_or_i32(dest, src, reg);
3171         DEST_EA(env, insn, opsize, dest, &addr);
3172     } else {
3173         SRC_EA(env, src, opsize, 0, NULL);
3174         tcg_gen_or_i32(dest, src, reg);
3175         gen_partset_reg(opsize, DREG(insn, 9), dest);
3176     }
3177     gen_logic_cc(s, dest, opsize);
3178     tcg_temp_free(dest);
3179 }
3180 
3181 DISAS_INSN(suba)
3182 {
3183     TCGv src;
3184     TCGv reg;
3185 
3186     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3187     reg = AREG(insn, 9);
3188     tcg_gen_sub_i32(reg, reg, src);
3189 }
3190 
3191 static inline void gen_subx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3192 {
3193     TCGv tmp;
3194 
3195     gen_flush_flags(s); /* compute old Z */
3196 
3197     /*
3198      * Perform subtract with borrow.
3199      * (X, N) = dest - (src + X);
3200      */
3201 
3202     tmp = tcg_const_i32(0);
3203     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, tmp, QREG_CC_X, tmp);
3204     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, dest, tmp, QREG_CC_N, QREG_CC_X);
3205     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3206     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
3207 
3208     /* Compute signed-overflow for subtract.  */
3209 
3210     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, dest);
3211     tcg_gen_xor_i32(tmp, dest, src);
3212     tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, tmp);
3213     tcg_temp_free(tmp);
3214 
3215     /* Copy the rest of the results into place.  */
3216     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3217     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3218 
3219     set_cc_op(s, CC_OP_FLAGS);
3220 
3221     /* result is in QREG_CC_N */
3222 }
3223 
3224 DISAS_INSN(subx_reg)
3225 {
3226     TCGv dest;
3227     TCGv src;
3228     int opsize;
3229 
3230     opsize = insn_opsize(insn);
3231 
3232     src = gen_extend(s, DREG(insn, 0), opsize, 1);
3233     dest = gen_extend(s, DREG(insn, 9), opsize, 1);
3234 
3235     gen_subx(s, src, dest, opsize);
3236 
3237     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3238 }
3239 
3240 DISAS_INSN(subx_mem)
3241 {
3242     TCGv src;
3243     TCGv addr_src;
3244     TCGv dest;
3245     TCGv addr_dest;
3246     int opsize;
3247 
3248     opsize = insn_opsize(insn);
3249 
3250     addr_src = AREG(insn, 0);
3251     tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3252     src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3253 
3254     addr_dest = AREG(insn, 9);
3255     tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3256     dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3257 
3258     gen_subx(s, src, dest, opsize);
3259 
3260     gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3261 
3262     tcg_temp_free(dest);
3263     tcg_temp_free(src);
3264 }
3265 
3266 DISAS_INSN(mov3q)
3267 {
3268     TCGv src;
3269     int val;
3270 
3271     val = (insn >> 9) & 7;
3272     if (val == 0)
3273         val = -1;
3274     src = tcg_const_i32(val);
3275     gen_logic_cc(s, src, OS_LONG);
3276     DEST_EA(env, insn, OS_LONG, src, NULL);
3277     tcg_temp_free(src);
3278 }
3279 
3280 DISAS_INSN(cmp)
3281 {
3282     TCGv src;
3283     TCGv reg;
3284     int opsize;
3285 
3286     opsize = insn_opsize(insn);
3287     SRC_EA(env, src, opsize, 1, NULL);
3288     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
3289     gen_update_cc_cmp(s, reg, src, opsize);
3290 }
3291 
3292 DISAS_INSN(cmpa)
3293 {
3294     int opsize;
3295     TCGv src;
3296     TCGv reg;
3297 
3298     if (insn & 0x100) {
3299         opsize = OS_LONG;
3300     } else {
3301         opsize = OS_WORD;
3302     }
3303     SRC_EA(env, src, opsize, 1, NULL);
3304     reg = AREG(insn, 9);
3305     gen_update_cc_cmp(s, reg, src, OS_LONG);
3306 }
3307 
3308 DISAS_INSN(cmpm)
3309 {
3310     int opsize = insn_opsize(insn);
3311     TCGv src, dst;
3312 
3313     /* Post-increment load (mode 3) from Ay.  */
3314     src = gen_ea_mode(env, s, 3, REG(insn, 0), opsize,
3315                       NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3316     /* Post-increment load (mode 3) from Ax.  */
3317     dst = gen_ea_mode(env, s, 3, REG(insn, 9), opsize,
3318                       NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3319 
3320     gen_update_cc_cmp(s, dst, src, opsize);
3321 }
3322 
3323 DISAS_INSN(eor)
3324 {
3325     TCGv src;
3326     TCGv dest;
3327     TCGv addr;
3328     int opsize;
3329 
3330     opsize = insn_opsize(insn);
3331 
3332     SRC_EA(env, src, opsize, 0, &addr);
3333     dest = tcg_temp_new();
3334     tcg_gen_xor_i32(dest, src, DREG(insn, 9));
3335     gen_logic_cc(s, dest, opsize);
3336     DEST_EA(env, insn, opsize, dest, &addr);
3337     tcg_temp_free(dest);
3338 }
3339 
3340 static void do_exg(TCGv reg1, TCGv reg2)
3341 {
3342     TCGv temp = tcg_temp_new();
3343     tcg_gen_mov_i32(temp, reg1);
3344     tcg_gen_mov_i32(reg1, reg2);
3345     tcg_gen_mov_i32(reg2, temp);
3346     tcg_temp_free(temp);
3347 }
3348 
3349 DISAS_INSN(exg_dd)
3350 {
3351     /* exchange Dx and Dy */
3352     do_exg(DREG(insn, 9), DREG(insn, 0));
3353 }
3354 
3355 DISAS_INSN(exg_aa)
3356 {
3357     /* exchange Ax and Ay */
3358     do_exg(AREG(insn, 9), AREG(insn, 0));
3359 }
3360 
3361 DISAS_INSN(exg_da)
3362 {
3363     /* exchange Dx and Ay */
3364     do_exg(DREG(insn, 9), AREG(insn, 0));
3365 }
3366 
3367 DISAS_INSN(and)
3368 {
3369     TCGv src;
3370     TCGv reg;
3371     TCGv dest;
3372     TCGv addr;
3373     int opsize;
3374 
3375     dest = tcg_temp_new();
3376 
3377     opsize = insn_opsize(insn);
3378     reg = DREG(insn, 9);
3379     if (insn & 0x100) {
3380         SRC_EA(env, src, opsize, 0, &addr);
3381         tcg_gen_and_i32(dest, src, reg);
3382         DEST_EA(env, insn, opsize, dest, &addr);
3383     } else {
3384         SRC_EA(env, src, opsize, 0, NULL);
3385         tcg_gen_and_i32(dest, src, reg);
3386         gen_partset_reg(opsize, reg, dest);
3387     }
3388     gen_logic_cc(s, dest, opsize);
3389     tcg_temp_free(dest);
3390 }
3391 
3392 DISAS_INSN(adda)
3393 {
3394     TCGv src;
3395     TCGv reg;
3396 
3397     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3398     reg = AREG(insn, 9);
3399     tcg_gen_add_i32(reg, reg, src);
3400 }
3401 
3402 static inline void gen_addx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3403 {
3404     TCGv tmp;
3405 
3406     gen_flush_flags(s); /* compute old Z */
3407 
3408     /*
3409      * Perform addition with carry.
3410      * (X, N) = src + dest + X;
3411      */
3412 
3413     tmp = tcg_const_i32(0);
3414     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_X, tmp, dest, tmp);
3415     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_N, QREG_CC_X, src, tmp);
3416     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3417 
3418     /* Compute signed-overflow for addition.  */
3419 
3420     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3421     tcg_gen_xor_i32(tmp, dest, src);
3422     tcg_gen_andc_i32(QREG_CC_V, QREG_CC_V, tmp);
3423     tcg_temp_free(tmp);
3424 
3425     /* Copy the rest of the results into place.  */
3426     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3427     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3428 
3429     set_cc_op(s, CC_OP_FLAGS);
3430 
3431     /* result is in QREG_CC_N */
3432 }
3433 
3434 DISAS_INSN(addx_reg)
3435 {
3436     TCGv dest;
3437     TCGv src;
3438     int opsize;
3439 
3440     opsize = insn_opsize(insn);
3441 
3442     dest = gen_extend(s, DREG(insn, 9), opsize, 1);
3443     src = gen_extend(s, DREG(insn, 0), opsize, 1);
3444 
3445     gen_addx(s, src, dest, opsize);
3446 
3447     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3448 }
3449 
3450 DISAS_INSN(addx_mem)
3451 {
3452     TCGv src;
3453     TCGv addr_src;
3454     TCGv dest;
3455     TCGv addr_dest;
3456     int opsize;
3457 
3458     opsize = insn_opsize(insn);
3459 
3460     addr_src = AREG(insn, 0);
3461     tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3462     src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3463 
3464     addr_dest = AREG(insn, 9);
3465     tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3466     dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3467 
3468     gen_addx(s, src, dest, opsize);
3469 
3470     gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3471 
3472     tcg_temp_free(dest);
3473     tcg_temp_free(src);
3474 }
3475 
3476 static inline void shift_im(DisasContext *s, uint16_t insn, int opsize)
3477 {
3478     int count = (insn >> 9) & 7;
3479     int logical = insn & 8;
3480     int left = insn & 0x100;
3481     int bits = opsize_bytes(opsize) * 8;
3482     TCGv reg = gen_extend(s, DREG(insn, 0), opsize, !logical);
3483 
3484     if (count == 0) {
3485         count = 8;
3486     }
3487 
3488     tcg_gen_movi_i32(QREG_CC_V, 0);
3489     if (left) {
3490         tcg_gen_shri_i32(QREG_CC_C, reg, bits - count);
3491         tcg_gen_shli_i32(QREG_CC_N, reg, count);
3492 
3493         /*
3494          * Note that ColdFire always clears V (done above),
3495          * while M68000 sets if the most significant bit is changed at
3496          * any time during the shift operation.
3497          */
3498         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68K)) {
3499             /* if shift count >= bits, V is (reg != 0) */
3500             if (count >= bits) {
3501                 tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, reg, QREG_CC_V);
3502             } else {
3503                 TCGv t0 = tcg_temp_new();
3504                 tcg_gen_sari_i32(QREG_CC_V, reg, bits - 1);
3505                 tcg_gen_sari_i32(t0, reg, bits - count - 1);
3506                 tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, t0);
3507                 tcg_temp_free(t0);
3508             }
3509             tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3510         }
3511     } else {
3512         tcg_gen_shri_i32(QREG_CC_C, reg, count - 1);
3513         if (logical) {
3514             tcg_gen_shri_i32(QREG_CC_N, reg, count);
3515         } else {
3516             tcg_gen_sari_i32(QREG_CC_N, reg, count);
3517         }
3518     }
3519 
3520     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3521     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3522     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3523     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3524 
3525     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3526     set_cc_op(s, CC_OP_FLAGS);
3527 }
3528 
3529 static inline void shift_reg(DisasContext *s, uint16_t insn, int opsize)
3530 {
3531     int logical = insn & 8;
3532     int left = insn & 0x100;
3533     int bits = opsize_bytes(opsize) * 8;
3534     TCGv reg = gen_extend(s, DREG(insn, 0), opsize, !logical);
3535     TCGv s32;
3536     TCGv_i64 t64, s64;
3537 
3538     t64 = tcg_temp_new_i64();
3539     s64 = tcg_temp_new_i64();
3540     s32 = tcg_temp_new();
3541 
3542     /*
3543      * Note that m68k truncates the shift count modulo 64, not 32.
3544      * In addition, a 64-bit shift makes it easy to find "the last
3545      * bit shifted out", for the carry flag.
3546      */
3547     tcg_gen_andi_i32(s32, DREG(insn, 9), 63);
3548     tcg_gen_extu_i32_i64(s64, s32);
3549     tcg_gen_extu_i32_i64(t64, reg);
3550 
3551     /* Optimistically set V=0.  Also used as a zero source below.  */
3552     tcg_gen_movi_i32(QREG_CC_V, 0);
3553     if (left) {
3554         tcg_gen_shl_i64(t64, t64, s64);
3555 
3556         if (opsize == OS_LONG) {
3557             tcg_gen_extr_i64_i32(QREG_CC_N, QREG_CC_C, t64);
3558             /* Note that C=0 if shift count is 0, and we get that for free.  */
3559         } else {
3560             TCGv zero = tcg_const_i32(0);
3561             tcg_gen_extrl_i64_i32(QREG_CC_N, t64);
3562             tcg_gen_shri_i32(QREG_CC_C, QREG_CC_N, bits);
3563             tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3564                                 s32, zero, zero, QREG_CC_C);
3565             tcg_temp_free(zero);
3566         }
3567         tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3568 
3569         /* X = C, but only if the shift count was non-zero.  */
3570         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3571                             QREG_CC_C, QREG_CC_X);
3572 
3573         /*
3574          * M68000 sets V if the most significant bit is changed at
3575          * any time during the shift operation.  Do this via creating
3576          * an extension of the sign bit, comparing, and discarding
3577          * the bits below the sign bit.  I.e.
3578          *     int64_t s = (intN_t)reg;
3579          *     int64_t t = (int64_t)(intN_t)reg << count;
3580          *     V = ((s ^ t) & (-1 << (bits - 1))) != 0
3581          */
3582         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68K)) {
3583             TCGv_i64 tt = tcg_const_i64(32);
3584             /* if shift is greater than 32, use 32 */
3585             tcg_gen_movcond_i64(TCG_COND_GT, s64, s64, tt, tt, s64);
3586             tcg_temp_free_i64(tt);
3587             /* Sign extend the input to 64 bits; re-do the shift.  */
3588             tcg_gen_ext_i32_i64(t64, reg);
3589             tcg_gen_shl_i64(s64, t64, s64);
3590             /* Clear all bits that are unchanged.  */
3591             tcg_gen_xor_i64(t64, t64, s64);
3592             /* Ignore the bits below the sign bit.  */
3593             tcg_gen_andi_i64(t64, t64, -1ULL << (bits - 1));
3594             /* If any bits remain set, we have overflow.  */
3595             tcg_gen_setcondi_i64(TCG_COND_NE, t64, t64, 0);
3596             tcg_gen_extrl_i64_i32(QREG_CC_V, t64);
3597             tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3598         }
3599     } else {
3600         tcg_gen_shli_i64(t64, t64, 32);
3601         if (logical) {
3602             tcg_gen_shr_i64(t64, t64, s64);
3603         } else {
3604             tcg_gen_sar_i64(t64, t64, s64);
3605         }
3606         tcg_gen_extr_i64_i32(QREG_CC_C, QREG_CC_N, t64);
3607 
3608         /* Note that C=0 if shift count is 0, and we get that for free.  */
3609         tcg_gen_shri_i32(QREG_CC_C, QREG_CC_C, 31);
3610 
3611         /* X = C, but only if the shift count was non-zero.  */
3612         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3613                             QREG_CC_C, QREG_CC_X);
3614     }
3615     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3616     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3617 
3618     tcg_temp_free(s32);
3619     tcg_temp_free_i64(s64);
3620     tcg_temp_free_i64(t64);
3621 
3622     /* Write back the result.  */
3623     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3624     set_cc_op(s, CC_OP_FLAGS);
3625 }
3626 
3627 DISAS_INSN(shift8_im)
3628 {
3629     shift_im(s, insn, OS_BYTE);
3630 }
3631 
3632 DISAS_INSN(shift16_im)
3633 {
3634     shift_im(s, insn, OS_WORD);
3635 }
3636 
3637 DISAS_INSN(shift_im)
3638 {
3639     shift_im(s, insn, OS_LONG);
3640 }
3641 
3642 DISAS_INSN(shift8_reg)
3643 {
3644     shift_reg(s, insn, OS_BYTE);
3645 }
3646 
3647 DISAS_INSN(shift16_reg)
3648 {
3649     shift_reg(s, insn, OS_WORD);
3650 }
3651 
3652 DISAS_INSN(shift_reg)
3653 {
3654     shift_reg(s, insn, OS_LONG);
3655 }
3656 
3657 DISAS_INSN(shift_mem)
3658 {
3659     int logical = insn & 8;
3660     int left = insn & 0x100;
3661     TCGv src;
3662     TCGv addr;
3663 
3664     SRC_EA(env, src, OS_WORD, !logical, &addr);
3665     tcg_gen_movi_i32(QREG_CC_V, 0);
3666     if (left) {
3667         tcg_gen_shri_i32(QREG_CC_C, src, 15);
3668         tcg_gen_shli_i32(QREG_CC_N, src, 1);
3669 
3670         /*
3671          * Note that ColdFire always clears V,
3672          * while M68000 sets if the most significant bit is changed at
3673          * any time during the shift operation
3674          */
3675         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68K)) {
3676             src = gen_extend(s, src, OS_WORD, 1);
3677             tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3678         }
3679     } else {
3680         tcg_gen_mov_i32(QREG_CC_C, src);
3681         if (logical) {
3682             tcg_gen_shri_i32(QREG_CC_N, src, 1);
3683         } else {
3684             tcg_gen_sari_i32(QREG_CC_N, src, 1);
3685         }
3686     }
3687 
3688     gen_ext(QREG_CC_N, QREG_CC_N, OS_WORD, 1);
3689     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3690     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3691     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3692 
3693     DEST_EA(env, insn, OS_WORD, QREG_CC_N, &addr);
3694     set_cc_op(s, CC_OP_FLAGS);
3695 }
3696 
3697 static void rotate(TCGv reg, TCGv shift, int left, int size)
3698 {
3699     switch (size) {
3700     case 8:
3701         /* Replicate the 8-bit input so that a 32-bit rotate works.  */
3702         tcg_gen_ext8u_i32(reg, reg);
3703         tcg_gen_muli_i32(reg, reg, 0x01010101);
3704         goto do_long;
3705     case 16:
3706         /* Replicate the 16-bit input so that a 32-bit rotate works.  */
3707         tcg_gen_deposit_i32(reg, reg, reg, 16, 16);
3708         goto do_long;
3709     do_long:
3710     default:
3711         if (left) {
3712             tcg_gen_rotl_i32(reg, reg, shift);
3713         } else {
3714             tcg_gen_rotr_i32(reg, reg, shift);
3715         }
3716     }
3717 
3718     /* compute flags */
3719 
3720     switch (size) {
3721     case 8:
3722         tcg_gen_ext8s_i32(reg, reg);
3723         break;
3724     case 16:
3725         tcg_gen_ext16s_i32(reg, reg);
3726         break;
3727     default:
3728         break;
3729     }
3730 
3731     /* QREG_CC_X is not affected */
3732 
3733     tcg_gen_mov_i32(QREG_CC_N, reg);
3734     tcg_gen_mov_i32(QREG_CC_Z, reg);
3735 
3736     if (left) {
3737         tcg_gen_andi_i32(QREG_CC_C, reg, 1);
3738     } else {
3739         tcg_gen_shri_i32(QREG_CC_C, reg, 31);
3740     }
3741 
3742     tcg_gen_movi_i32(QREG_CC_V, 0); /* always cleared */
3743 }
3744 
3745 static void rotate_x_flags(TCGv reg, TCGv X, int size)
3746 {
3747     switch (size) {
3748     case 8:
3749         tcg_gen_ext8s_i32(reg, reg);
3750         break;
3751     case 16:
3752         tcg_gen_ext16s_i32(reg, reg);
3753         break;
3754     default:
3755         break;
3756     }
3757     tcg_gen_mov_i32(QREG_CC_N, reg);
3758     tcg_gen_mov_i32(QREG_CC_Z, reg);
3759     tcg_gen_mov_i32(QREG_CC_X, X);
3760     tcg_gen_mov_i32(QREG_CC_C, X);
3761     tcg_gen_movi_i32(QREG_CC_V, 0);
3762 }
3763 
3764 /* Result of rotate_x() is valid if 0 <= shift <= size */
3765 static TCGv rotate_x(TCGv reg, TCGv shift, int left, int size)
3766 {
3767     TCGv X, shl, shr, shx, sz, zero;
3768 
3769     sz = tcg_const_i32(size);
3770 
3771     shr = tcg_temp_new();
3772     shl = tcg_temp_new();
3773     shx = tcg_temp_new();
3774     if (left) {
3775         tcg_gen_mov_i32(shl, shift);      /* shl = shift */
3776         tcg_gen_movi_i32(shr, size + 1);
3777         tcg_gen_sub_i32(shr, shr, shift); /* shr = size + 1 - shift */
3778         tcg_gen_subi_i32(shx, shift, 1);  /* shx = shift - 1 */
3779         /* shx = shx < 0 ? size : shx; */
3780         zero = tcg_const_i32(0);
3781         tcg_gen_movcond_i32(TCG_COND_LT, shx, shx, zero, sz, shx);
3782         tcg_temp_free(zero);
3783     } else {
3784         tcg_gen_mov_i32(shr, shift);      /* shr = shift */
3785         tcg_gen_movi_i32(shl, size + 1);
3786         tcg_gen_sub_i32(shl, shl, shift); /* shl = size + 1 - shift */
3787         tcg_gen_sub_i32(shx, sz, shift); /* shx = size - shift */
3788     }
3789     tcg_temp_free_i32(sz);
3790 
3791     /* reg = (reg << shl) | (reg >> shr) | (x << shx); */
3792 
3793     tcg_gen_shl_i32(shl, reg, shl);
3794     tcg_gen_shr_i32(shr, reg, shr);
3795     tcg_gen_or_i32(reg, shl, shr);
3796     tcg_temp_free(shl);
3797     tcg_temp_free(shr);
3798     tcg_gen_shl_i32(shx, QREG_CC_X, shx);
3799     tcg_gen_or_i32(reg, reg, shx);
3800     tcg_temp_free(shx);
3801 
3802     /* X = (reg >> size) & 1 */
3803 
3804     X = tcg_temp_new();
3805     tcg_gen_extract_i32(X, reg, size, 1);
3806 
3807     return X;
3808 }
3809 
3810 /* Result of rotate32_x() is valid if 0 <= shift < 33 */
3811 static TCGv rotate32_x(TCGv reg, TCGv shift, int left)
3812 {
3813     TCGv_i64 t0, shift64;
3814     TCGv X, lo, hi, zero;
3815 
3816     shift64 = tcg_temp_new_i64();
3817     tcg_gen_extu_i32_i64(shift64, shift);
3818 
3819     t0 = tcg_temp_new_i64();
3820 
3821     X = tcg_temp_new();
3822     lo = tcg_temp_new();
3823     hi = tcg_temp_new();
3824 
3825     if (left) {
3826         /* create [reg:X:..] */
3827 
3828         tcg_gen_shli_i32(lo, QREG_CC_X, 31);
3829         tcg_gen_concat_i32_i64(t0, lo, reg);
3830 
3831         /* rotate */
3832 
3833         tcg_gen_rotl_i64(t0, t0, shift64);
3834         tcg_temp_free_i64(shift64);
3835 
3836         /* result is [reg:..:reg:X] */
3837 
3838         tcg_gen_extr_i64_i32(lo, hi, t0);
3839         tcg_gen_andi_i32(X, lo, 1);
3840 
3841         tcg_gen_shri_i32(lo, lo, 1);
3842     } else {
3843         /* create [..:X:reg] */
3844 
3845         tcg_gen_concat_i32_i64(t0, reg, QREG_CC_X);
3846 
3847         tcg_gen_rotr_i64(t0, t0, shift64);
3848         tcg_temp_free_i64(shift64);
3849 
3850         /* result is value: [X:reg:..:reg] */
3851 
3852         tcg_gen_extr_i64_i32(lo, hi, t0);
3853 
3854         /* extract X */
3855 
3856         tcg_gen_shri_i32(X, hi, 31);
3857 
3858         /* extract result */
3859 
3860         tcg_gen_shli_i32(hi, hi, 1);
3861     }
3862     tcg_temp_free_i64(t0);
3863     tcg_gen_or_i32(lo, lo, hi);
3864     tcg_temp_free(hi);
3865 
3866     /* if shift == 0, register and X are not affected */
3867 
3868     zero = tcg_const_i32(0);
3869     tcg_gen_movcond_i32(TCG_COND_EQ, X, shift, zero, QREG_CC_X, X);
3870     tcg_gen_movcond_i32(TCG_COND_EQ, reg, shift, zero, reg, lo);
3871     tcg_temp_free(zero);
3872     tcg_temp_free(lo);
3873 
3874     return X;
3875 }
3876 
3877 DISAS_INSN(rotate_im)
3878 {
3879     TCGv shift;
3880     int tmp;
3881     int left = (insn & 0x100);
3882 
3883     tmp = (insn >> 9) & 7;
3884     if (tmp == 0) {
3885         tmp = 8;
3886     }
3887 
3888     shift = tcg_const_i32(tmp);
3889     if (insn & 8) {
3890         rotate(DREG(insn, 0), shift, left, 32);
3891     } else {
3892         TCGv X = rotate32_x(DREG(insn, 0), shift, left);
3893         rotate_x_flags(DREG(insn, 0), X, 32);
3894         tcg_temp_free(X);
3895     }
3896     tcg_temp_free(shift);
3897 
3898     set_cc_op(s, CC_OP_FLAGS);
3899 }
3900 
3901 DISAS_INSN(rotate8_im)
3902 {
3903     int left = (insn & 0x100);
3904     TCGv reg;
3905     TCGv shift;
3906     int tmp;
3907 
3908     reg = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
3909 
3910     tmp = (insn >> 9) & 7;
3911     if (tmp == 0) {
3912         tmp = 8;
3913     }
3914 
3915     shift = tcg_const_i32(tmp);
3916     if (insn & 8) {
3917         rotate(reg, shift, left, 8);
3918     } else {
3919         TCGv X = rotate_x(reg, shift, left, 8);
3920         rotate_x_flags(reg, X, 8);
3921         tcg_temp_free(X);
3922     }
3923     tcg_temp_free(shift);
3924     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3925     set_cc_op(s, CC_OP_FLAGS);
3926 }
3927 
3928 DISAS_INSN(rotate16_im)
3929 {
3930     int left = (insn & 0x100);
3931     TCGv reg;
3932     TCGv shift;
3933     int tmp;
3934 
3935     reg = gen_extend(s, DREG(insn, 0), OS_WORD, 0);
3936     tmp = (insn >> 9) & 7;
3937     if (tmp == 0) {
3938         tmp = 8;
3939     }
3940 
3941     shift = tcg_const_i32(tmp);
3942     if (insn & 8) {
3943         rotate(reg, shift, left, 16);
3944     } else {
3945         TCGv X = rotate_x(reg, shift, left, 16);
3946         rotate_x_flags(reg, X, 16);
3947         tcg_temp_free(X);
3948     }
3949     tcg_temp_free(shift);
3950     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3951     set_cc_op(s, CC_OP_FLAGS);
3952 }
3953 
3954 DISAS_INSN(rotate_reg)
3955 {
3956     TCGv reg;
3957     TCGv src;
3958     TCGv t0, t1;
3959     int left = (insn & 0x100);
3960 
3961     reg = DREG(insn, 0);
3962     src = DREG(insn, 9);
3963     /* shift in [0..63] */
3964     t0 = tcg_temp_new();
3965     tcg_gen_andi_i32(t0, src, 63);
3966     t1 = tcg_temp_new_i32();
3967     if (insn & 8) {
3968         tcg_gen_andi_i32(t1, src, 31);
3969         rotate(reg, t1, left, 32);
3970         /* if shift == 0, clear C */
3971         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3972                             t0, QREG_CC_V /* 0 */,
3973                             QREG_CC_V /* 0 */, QREG_CC_C);
3974     } else {
3975         TCGv X;
3976         /* modulo 33 */
3977         tcg_gen_movi_i32(t1, 33);
3978         tcg_gen_remu_i32(t1, t0, t1);
3979         X = rotate32_x(DREG(insn, 0), t1, left);
3980         rotate_x_flags(DREG(insn, 0), X, 32);
3981         tcg_temp_free(X);
3982     }
3983     tcg_temp_free(t1);
3984     tcg_temp_free(t0);
3985     set_cc_op(s, CC_OP_FLAGS);
3986 }
3987 
3988 DISAS_INSN(rotate8_reg)
3989 {
3990     TCGv reg;
3991     TCGv src;
3992     TCGv t0, t1;
3993     int left = (insn & 0x100);
3994 
3995     reg = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
3996     src = DREG(insn, 9);
3997     /* shift in [0..63] */
3998     t0 = tcg_temp_new_i32();
3999     tcg_gen_andi_i32(t0, src, 63);
4000     t1 = tcg_temp_new_i32();
4001     if (insn & 8) {
4002         tcg_gen_andi_i32(t1, src, 7);
4003         rotate(reg, t1, left, 8);
4004         /* if shift == 0, clear C */
4005         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
4006                             t0, QREG_CC_V /* 0 */,
4007                             QREG_CC_V /* 0 */, QREG_CC_C);
4008     } else {
4009         TCGv X;
4010         /* modulo 9 */
4011         tcg_gen_movi_i32(t1, 9);
4012         tcg_gen_remu_i32(t1, t0, t1);
4013         X = rotate_x(reg, t1, left, 8);
4014         rotate_x_flags(reg, X, 8);
4015         tcg_temp_free(X);
4016     }
4017     tcg_temp_free(t1);
4018     tcg_temp_free(t0);
4019     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
4020     set_cc_op(s, CC_OP_FLAGS);
4021 }
4022 
4023 DISAS_INSN(rotate16_reg)
4024 {
4025     TCGv reg;
4026     TCGv src;
4027     TCGv t0, t1;
4028     int left = (insn & 0x100);
4029 
4030     reg = gen_extend(s, DREG(insn, 0), OS_WORD, 0);
4031     src = DREG(insn, 9);
4032     /* shift in [0..63] */
4033     t0 = tcg_temp_new_i32();
4034     tcg_gen_andi_i32(t0, src, 63);
4035     t1 = tcg_temp_new_i32();
4036     if (insn & 8) {
4037         tcg_gen_andi_i32(t1, src, 15);
4038         rotate(reg, t1, left, 16);
4039         /* if shift == 0, clear C */
4040         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
4041                             t0, QREG_CC_V /* 0 */,
4042                             QREG_CC_V /* 0 */, QREG_CC_C);
4043     } else {
4044         TCGv X;
4045         /* modulo 17 */
4046         tcg_gen_movi_i32(t1, 17);
4047         tcg_gen_remu_i32(t1, t0, t1);
4048         X = rotate_x(reg, t1, left, 16);
4049         rotate_x_flags(reg, X, 16);
4050         tcg_temp_free(X);
4051     }
4052     tcg_temp_free(t1);
4053     tcg_temp_free(t0);
4054     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
4055     set_cc_op(s, CC_OP_FLAGS);
4056 }
4057 
4058 DISAS_INSN(rotate_mem)
4059 {
4060     TCGv src;
4061     TCGv addr;
4062     TCGv shift;
4063     int left = (insn & 0x100);
4064 
4065     SRC_EA(env, src, OS_WORD, 0, &addr);
4066 
4067     shift = tcg_const_i32(1);
4068     if (insn & 0x0200) {
4069         rotate(src, shift, left, 16);
4070     } else {
4071         TCGv X = rotate_x(src, shift, left, 16);
4072         rotate_x_flags(src, X, 16);
4073         tcg_temp_free(X);
4074     }
4075     tcg_temp_free(shift);
4076     DEST_EA(env, insn, OS_WORD, src, &addr);
4077     set_cc_op(s, CC_OP_FLAGS);
4078 }
4079 
4080 DISAS_INSN(bfext_reg)
4081 {
4082     int ext = read_im16(env, s);
4083     int is_sign = insn & 0x200;
4084     TCGv src = DREG(insn, 0);
4085     TCGv dst = DREG(ext, 12);
4086     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4087     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4088     int pos = 32 - ofs - len;        /* little bit-endian */
4089     TCGv tmp = tcg_temp_new();
4090     TCGv shift;
4091 
4092     /*
4093      * In general, we're going to rotate the field so that it's at the
4094      * top of the word and then right-shift by the complement of the
4095      * width to extend the field.
4096      */
4097     if (ext & 0x20) {
4098         /* Variable width.  */
4099         if (ext & 0x800) {
4100             /* Variable offset.  */
4101             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4102             tcg_gen_rotl_i32(tmp, src, tmp);
4103         } else {
4104             tcg_gen_rotli_i32(tmp, src, ofs);
4105         }
4106 
4107         shift = tcg_temp_new();
4108         tcg_gen_neg_i32(shift, DREG(ext, 0));
4109         tcg_gen_andi_i32(shift, shift, 31);
4110         tcg_gen_sar_i32(QREG_CC_N, tmp, shift);
4111         if (is_sign) {
4112             tcg_gen_mov_i32(dst, QREG_CC_N);
4113         } else {
4114             tcg_gen_shr_i32(dst, tmp, shift);
4115         }
4116         tcg_temp_free(shift);
4117     } else {
4118         /* Immediate width.  */
4119         if (ext & 0x800) {
4120             /* Variable offset */
4121             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4122             tcg_gen_rotl_i32(tmp, src, tmp);
4123             src = tmp;
4124             pos = 32 - len;
4125         } else {
4126             /*
4127              * Immediate offset.  If the field doesn't wrap around the
4128              * end of the word, rely on (s)extract completely.
4129              */
4130             if (pos < 0) {
4131                 tcg_gen_rotli_i32(tmp, src, ofs);
4132                 src = tmp;
4133                 pos = 32 - len;
4134             }
4135         }
4136 
4137         tcg_gen_sextract_i32(QREG_CC_N, src, pos, len);
4138         if (is_sign) {
4139             tcg_gen_mov_i32(dst, QREG_CC_N);
4140         } else {
4141             tcg_gen_extract_i32(dst, src, pos, len);
4142         }
4143     }
4144 
4145     tcg_temp_free(tmp);
4146     set_cc_op(s, CC_OP_LOGIC);
4147 }
4148 
4149 DISAS_INSN(bfext_mem)
4150 {
4151     int ext = read_im16(env, s);
4152     int is_sign = insn & 0x200;
4153     TCGv dest = DREG(ext, 12);
4154     TCGv addr, len, ofs;
4155 
4156     addr = gen_lea(env, s, insn, OS_UNSIZED);
4157     if (IS_NULL_QREG(addr)) {
4158         gen_addr_fault(s);
4159         return;
4160     }
4161 
4162     if (ext & 0x20) {
4163         len = DREG(ext, 0);
4164     } else {
4165         len = tcg_const_i32(extract32(ext, 0, 5));
4166     }
4167     if (ext & 0x800) {
4168         ofs = DREG(ext, 6);
4169     } else {
4170         ofs = tcg_const_i32(extract32(ext, 6, 5));
4171     }
4172 
4173     if (is_sign) {
4174         gen_helper_bfexts_mem(dest, cpu_env, addr, ofs, len);
4175         tcg_gen_mov_i32(QREG_CC_N, dest);
4176     } else {
4177         TCGv_i64 tmp = tcg_temp_new_i64();
4178         gen_helper_bfextu_mem(tmp, cpu_env, addr, ofs, len);
4179         tcg_gen_extr_i64_i32(dest, QREG_CC_N, tmp);
4180         tcg_temp_free_i64(tmp);
4181     }
4182     set_cc_op(s, CC_OP_LOGIC);
4183 
4184     if (!(ext & 0x20)) {
4185         tcg_temp_free(len);
4186     }
4187     if (!(ext & 0x800)) {
4188         tcg_temp_free(ofs);
4189     }
4190 }
4191 
4192 DISAS_INSN(bfop_reg)
4193 {
4194     int ext = read_im16(env, s);
4195     TCGv src = DREG(insn, 0);
4196     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4197     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4198     TCGv mask, tofs, tlen;
4199 
4200     tofs = NULL;
4201     tlen = NULL;
4202     if ((insn & 0x0f00) == 0x0d00) { /* bfffo */
4203         tofs = tcg_temp_new();
4204         tlen = tcg_temp_new();
4205     }
4206 
4207     if ((ext & 0x820) == 0) {
4208         /* Immediate width and offset.  */
4209         uint32_t maski = 0x7fffffffu >> (len - 1);
4210         if (ofs + len <= 32) {
4211             tcg_gen_shli_i32(QREG_CC_N, src, ofs);
4212         } else {
4213             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4214         }
4215         tcg_gen_andi_i32(QREG_CC_N, QREG_CC_N, ~maski);
4216         mask = tcg_const_i32(ror32(maski, ofs));
4217         if (tofs) {
4218             tcg_gen_movi_i32(tofs, ofs);
4219             tcg_gen_movi_i32(tlen, len);
4220         }
4221     } else {
4222         TCGv tmp = tcg_temp_new();
4223         if (ext & 0x20) {
4224             /* Variable width */
4225             tcg_gen_subi_i32(tmp, DREG(ext, 0), 1);
4226             tcg_gen_andi_i32(tmp, tmp, 31);
4227             mask = tcg_const_i32(0x7fffffffu);
4228             tcg_gen_shr_i32(mask, mask, tmp);
4229             if (tlen) {
4230                 tcg_gen_addi_i32(tlen, tmp, 1);
4231             }
4232         } else {
4233             /* Immediate width */
4234             mask = tcg_const_i32(0x7fffffffu >> (len - 1));
4235             if (tlen) {
4236                 tcg_gen_movi_i32(tlen, len);
4237             }
4238         }
4239         if (ext & 0x800) {
4240             /* Variable offset */
4241             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4242             tcg_gen_rotl_i32(QREG_CC_N, src, tmp);
4243             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4244             tcg_gen_rotr_i32(mask, mask, tmp);
4245             if (tofs) {
4246                 tcg_gen_mov_i32(tofs, tmp);
4247             }
4248         } else {
4249             /* Immediate offset (and variable width) */
4250             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4251             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4252             tcg_gen_rotri_i32(mask, mask, ofs);
4253             if (tofs) {
4254                 tcg_gen_movi_i32(tofs, ofs);
4255             }
4256         }
4257         tcg_temp_free(tmp);
4258     }
4259     set_cc_op(s, CC_OP_LOGIC);
4260 
4261     switch (insn & 0x0f00) {
4262     case 0x0a00: /* bfchg */
4263         tcg_gen_eqv_i32(src, src, mask);
4264         break;
4265     case 0x0c00: /* bfclr */
4266         tcg_gen_and_i32(src, src, mask);
4267         break;
4268     case 0x0d00: /* bfffo */
4269         gen_helper_bfffo_reg(DREG(ext, 12), QREG_CC_N, tofs, tlen);
4270         tcg_temp_free(tlen);
4271         tcg_temp_free(tofs);
4272         break;
4273     case 0x0e00: /* bfset */
4274         tcg_gen_orc_i32(src, src, mask);
4275         break;
4276     case 0x0800: /* bftst */
4277         /* flags already set; no other work to do.  */
4278         break;
4279     default:
4280         g_assert_not_reached();
4281     }
4282     tcg_temp_free(mask);
4283 }
4284 
4285 DISAS_INSN(bfop_mem)
4286 {
4287     int ext = read_im16(env, s);
4288     TCGv addr, len, ofs;
4289     TCGv_i64 t64;
4290 
4291     addr = gen_lea(env, s, insn, OS_UNSIZED);
4292     if (IS_NULL_QREG(addr)) {
4293         gen_addr_fault(s);
4294         return;
4295     }
4296 
4297     if (ext & 0x20) {
4298         len = DREG(ext, 0);
4299     } else {
4300         len = tcg_const_i32(extract32(ext, 0, 5));
4301     }
4302     if (ext & 0x800) {
4303         ofs = DREG(ext, 6);
4304     } else {
4305         ofs = tcg_const_i32(extract32(ext, 6, 5));
4306     }
4307 
4308     switch (insn & 0x0f00) {
4309     case 0x0a00: /* bfchg */
4310         gen_helper_bfchg_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4311         break;
4312     case 0x0c00: /* bfclr */
4313         gen_helper_bfclr_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4314         break;
4315     case 0x0d00: /* bfffo */
4316         t64 = tcg_temp_new_i64();
4317         gen_helper_bfffo_mem(t64, cpu_env, addr, ofs, len);
4318         tcg_gen_extr_i64_i32(DREG(ext, 12), QREG_CC_N, t64);
4319         tcg_temp_free_i64(t64);
4320         break;
4321     case 0x0e00: /* bfset */
4322         gen_helper_bfset_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4323         break;
4324     case 0x0800: /* bftst */
4325         gen_helper_bfexts_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4326         break;
4327     default:
4328         g_assert_not_reached();
4329     }
4330     set_cc_op(s, CC_OP_LOGIC);
4331 
4332     if (!(ext & 0x20)) {
4333         tcg_temp_free(len);
4334     }
4335     if (!(ext & 0x800)) {
4336         tcg_temp_free(ofs);
4337     }
4338 }
4339 
4340 DISAS_INSN(bfins_reg)
4341 {
4342     int ext = read_im16(env, s);
4343     TCGv dst = DREG(insn, 0);
4344     TCGv src = DREG(ext, 12);
4345     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4346     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4347     int pos = 32 - ofs - len;        /* little bit-endian */
4348     TCGv tmp;
4349 
4350     tmp = tcg_temp_new();
4351 
4352     if (ext & 0x20) {
4353         /* Variable width */
4354         tcg_gen_neg_i32(tmp, DREG(ext, 0));
4355         tcg_gen_andi_i32(tmp, tmp, 31);
4356         tcg_gen_shl_i32(QREG_CC_N, src, tmp);
4357     } else {
4358         /* Immediate width */
4359         tcg_gen_shli_i32(QREG_CC_N, src, 32 - len);
4360     }
4361     set_cc_op(s, CC_OP_LOGIC);
4362 
4363     /* Immediate width and offset */
4364     if ((ext & 0x820) == 0) {
4365         /* Check for suitability for deposit.  */
4366         if (pos >= 0) {
4367             tcg_gen_deposit_i32(dst, dst, src, pos, len);
4368         } else {
4369             uint32_t maski = -2U << (len - 1);
4370             uint32_t roti = (ofs + len) & 31;
4371             tcg_gen_andi_i32(tmp, src, ~maski);
4372             tcg_gen_rotri_i32(tmp, tmp, roti);
4373             tcg_gen_andi_i32(dst, dst, ror32(maski, roti));
4374             tcg_gen_or_i32(dst, dst, tmp);
4375         }
4376     } else {
4377         TCGv mask = tcg_temp_new();
4378         TCGv rot = tcg_temp_new();
4379 
4380         if (ext & 0x20) {
4381             /* Variable width */
4382             tcg_gen_subi_i32(rot, DREG(ext, 0), 1);
4383             tcg_gen_andi_i32(rot, rot, 31);
4384             tcg_gen_movi_i32(mask, -2);
4385             tcg_gen_shl_i32(mask, mask, rot);
4386             tcg_gen_mov_i32(rot, DREG(ext, 0));
4387             tcg_gen_andc_i32(tmp, src, mask);
4388         } else {
4389             /* Immediate width (variable offset) */
4390             uint32_t maski = -2U << (len - 1);
4391             tcg_gen_andi_i32(tmp, src, ~maski);
4392             tcg_gen_movi_i32(mask, maski);
4393             tcg_gen_movi_i32(rot, len & 31);
4394         }
4395         if (ext & 0x800) {
4396             /* Variable offset */
4397             tcg_gen_add_i32(rot, rot, DREG(ext, 6));
4398         } else {
4399             /* Immediate offset (variable width) */
4400             tcg_gen_addi_i32(rot, rot, ofs);
4401         }
4402         tcg_gen_andi_i32(rot, rot, 31);
4403         tcg_gen_rotr_i32(mask, mask, rot);
4404         tcg_gen_rotr_i32(tmp, tmp, rot);
4405         tcg_gen_and_i32(dst, dst, mask);
4406         tcg_gen_or_i32(dst, dst, tmp);
4407 
4408         tcg_temp_free(rot);
4409         tcg_temp_free(mask);
4410     }
4411     tcg_temp_free(tmp);
4412 }
4413 
4414 DISAS_INSN(bfins_mem)
4415 {
4416     int ext = read_im16(env, s);
4417     TCGv src = DREG(ext, 12);
4418     TCGv addr, len, ofs;
4419 
4420     addr = gen_lea(env, s, insn, OS_UNSIZED);
4421     if (IS_NULL_QREG(addr)) {
4422         gen_addr_fault(s);
4423         return;
4424     }
4425 
4426     if (ext & 0x20) {
4427         len = DREG(ext, 0);
4428     } else {
4429         len = tcg_const_i32(extract32(ext, 0, 5));
4430     }
4431     if (ext & 0x800) {
4432         ofs = DREG(ext, 6);
4433     } else {
4434         ofs = tcg_const_i32(extract32(ext, 6, 5));
4435     }
4436 
4437     gen_helper_bfins_mem(QREG_CC_N, cpu_env, addr, src, ofs, len);
4438     set_cc_op(s, CC_OP_LOGIC);
4439 
4440     if (!(ext & 0x20)) {
4441         tcg_temp_free(len);
4442     }
4443     if (!(ext & 0x800)) {
4444         tcg_temp_free(ofs);
4445     }
4446 }
4447 
4448 DISAS_INSN(ff1)
4449 {
4450     TCGv reg;
4451     reg = DREG(insn, 0);
4452     gen_logic_cc(s, reg, OS_LONG);
4453     gen_helper_ff1(reg, reg);
4454 }
4455 
4456 DISAS_INSN(chk)
4457 {
4458     TCGv src, reg;
4459     int opsize;
4460 
4461     switch ((insn >> 7) & 3) {
4462     case 3:
4463         opsize = OS_WORD;
4464         break;
4465     case 2:
4466         if (m68k_feature(env, M68K_FEATURE_CHK2)) {
4467             opsize = OS_LONG;
4468             break;
4469         }
4470         /* fallthru */
4471     default:
4472         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4473         return;
4474     }
4475     SRC_EA(env, src, opsize, 1, NULL);
4476     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
4477 
4478     gen_flush_flags(s);
4479     gen_helper_chk(cpu_env, reg, src);
4480 }
4481 
4482 DISAS_INSN(chk2)
4483 {
4484     uint16_t ext;
4485     TCGv addr1, addr2, bound1, bound2, reg;
4486     int opsize;
4487 
4488     switch ((insn >> 9) & 3) {
4489     case 0:
4490         opsize = OS_BYTE;
4491         break;
4492     case 1:
4493         opsize = OS_WORD;
4494         break;
4495     case 2:
4496         opsize = OS_LONG;
4497         break;
4498     default:
4499         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4500         return;
4501     }
4502 
4503     ext = read_im16(env, s);
4504     if ((ext & 0x0800) == 0) {
4505         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4506         return;
4507     }
4508 
4509     addr1 = gen_lea(env, s, insn, OS_UNSIZED);
4510     addr2 = tcg_temp_new();
4511     tcg_gen_addi_i32(addr2, addr1, opsize_bytes(opsize));
4512 
4513     bound1 = gen_load(s, opsize, addr1, 1, IS_USER(s));
4514     tcg_temp_free(addr1);
4515     bound2 = gen_load(s, opsize, addr2, 1, IS_USER(s));
4516     tcg_temp_free(addr2);
4517 
4518     reg = tcg_temp_new();
4519     if (ext & 0x8000) {
4520         tcg_gen_mov_i32(reg, AREG(ext, 12));
4521     } else {
4522         gen_ext(reg, DREG(ext, 12), opsize, 1);
4523     }
4524 
4525     gen_flush_flags(s);
4526     gen_helper_chk2(cpu_env, reg, bound1, bound2);
4527     tcg_temp_free(reg);
4528     tcg_temp_free(bound1);
4529     tcg_temp_free(bound2);
4530 }
4531 
4532 static void m68k_copy_line(TCGv dst, TCGv src, int index)
4533 {
4534     TCGv addr;
4535     TCGv_i64 t0, t1;
4536 
4537     addr = tcg_temp_new();
4538 
4539     t0 = tcg_temp_new_i64();
4540     t1 = tcg_temp_new_i64();
4541 
4542     tcg_gen_andi_i32(addr, src, ~15);
4543     tcg_gen_qemu_ld64(t0, addr, index);
4544     tcg_gen_addi_i32(addr, addr, 8);
4545     tcg_gen_qemu_ld64(t1, addr, index);
4546 
4547     tcg_gen_andi_i32(addr, dst, ~15);
4548     tcg_gen_qemu_st64(t0, addr, index);
4549     tcg_gen_addi_i32(addr, addr, 8);
4550     tcg_gen_qemu_st64(t1, addr, index);
4551 
4552     tcg_temp_free_i64(t0);
4553     tcg_temp_free_i64(t1);
4554     tcg_temp_free(addr);
4555 }
4556 
4557 DISAS_INSN(move16_reg)
4558 {
4559     int index = IS_USER(s);
4560     TCGv tmp;
4561     uint16_t ext;
4562 
4563     ext = read_im16(env, s);
4564     if ((ext & (1 << 15)) == 0) {
4565         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4566     }
4567 
4568     m68k_copy_line(AREG(ext, 12), AREG(insn, 0), index);
4569 
4570     /* Ax can be Ay, so save Ay before incrementing Ax */
4571     tmp = tcg_temp_new();
4572     tcg_gen_mov_i32(tmp, AREG(ext, 12));
4573     tcg_gen_addi_i32(AREG(insn, 0), AREG(insn, 0), 16);
4574     tcg_gen_addi_i32(AREG(ext, 12), tmp, 16);
4575     tcg_temp_free(tmp);
4576 }
4577 
4578 DISAS_INSN(move16_mem)
4579 {
4580     int index = IS_USER(s);
4581     TCGv reg, addr;
4582 
4583     reg = AREG(insn, 0);
4584     addr = tcg_const_i32(read_im32(env, s));
4585 
4586     if ((insn >> 3) & 1) {
4587         /* MOVE16 (xxx).L, (Ay) */
4588         m68k_copy_line(reg, addr, index);
4589     } else {
4590         /* MOVE16 (Ay), (xxx).L */
4591         m68k_copy_line(addr, reg, index);
4592     }
4593 
4594     tcg_temp_free(addr);
4595 
4596     if (((insn >> 3) & 2) == 0) {
4597         /* (Ay)+ */
4598         tcg_gen_addi_i32(reg, reg, 16);
4599     }
4600 }
4601 
4602 DISAS_INSN(strldsr)
4603 {
4604     uint16_t ext;
4605     uint32_t addr;
4606 
4607     addr = s->pc - 2;
4608     ext = read_im16(env, s);
4609     if (ext != 0x46FC) {
4610         gen_exception(s, addr, EXCP_ILLEGAL);
4611         return;
4612     }
4613     ext = read_im16(env, s);
4614     if (IS_USER(s) || (ext & SR_S) == 0) {
4615         gen_exception(s, addr, EXCP_PRIVILEGE);
4616         return;
4617     }
4618     gen_push(s, gen_get_sr(s));
4619     gen_set_sr_im(s, ext, 0);
4620     gen_exit_tb(s);
4621 }
4622 
4623 DISAS_INSN(move_from_sr)
4624 {
4625     TCGv sr;
4626 
4627     if (IS_USER(s) && m68k_feature(env, M68K_FEATURE_MOVEFROMSR_PRIV)) {
4628         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4629         return;
4630     }
4631     sr = gen_get_sr(s);
4632     DEST_EA(env, insn, OS_WORD, sr, NULL);
4633 }
4634 
4635 #if defined(CONFIG_SOFTMMU)
4636 DISAS_INSN(moves)
4637 {
4638     int opsize;
4639     uint16_t ext;
4640     TCGv reg;
4641     TCGv addr;
4642     int extend;
4643 
4644     if (IS_USER(s)) {
4645         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4646         return;
4647     }
4648 
4649     ext = read_im16(env, s);
4650 
4651     opsize = insn_opsize(insn);
4652 
4653     if (ext & 0x8000) {
4654         /* address register */
4655         reg = AREG(ext, 12);
4656         extend = 1;
4657     } else {
4658         /* data register */
4659         reg = DREG(ext, 12);
4660         extend = 0;
4661     }
4662 
4663     addr = gen_lea(env, s, insn, opsize);
4664     if (IS_NULL_QREG(addr)) {
4665         gen_addr_fault(s);
4666         return;
4667     }
4668 
4669     if (ext & 0x0800) {
4670         /* from reg to ea */
4671         gen_store(s, opsize, addr, reg, DFC_INDEX(s));
4672     } else {
4673         /* from ea to reg */
4674         TCGv tmp = gen_load(s, opsize, addr, 0, SFC_INDEX(s));
4675         if (extend) {
4676             gen_ext(reg, tmp, opsize, 1);
4677         } else {
4678             gen_partset_reg(opsize, reg, tmp);
4679         }
4680         tcg_temp_free(tmp);
4681     }
4682     switch (extract32(insn, 3, 3)) {
4683     case 3: /* Indirect postincrement.  */
4684         tcg_gen_addi_i32(AREG(insn, 0), addr,
4685                          REG(insn, 0) == 7 && opsize == OS_BYTE
4686                          ? 2
4687                          : opsize_bytes(opsize));
4688         break;
4689     case 4: /* Indirect predecrememnt.  */
4690         tcg_gen_mov_i32(AREG(insn, 0), addr);
4691         break;
4692     }
4693 }
4694 
4695 DISAS_INSN(move_to_sr)
4696 {
4697     if (IS_USER(s)) {
4698         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4699         return;
4700     }
4701     gen_move_to_sr(env, s, insn, false);
4702     gen_exit_tb(s);
4703 }
4704 
4705 DISAS_INSN(move_from_usp)
4706 {
4707     if (IS_USER(s)) {
4708         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4709         return;
4710     }
4711     tcg_gen_ld_i32(AREG(insn, 0), cpu_env,
4712                    offsetof(CPUM68KState, sp[M68K_USP]));
4713 }
4714 
4715 DISAS_INSN(move_to_usp)
4716 {
4717     if (IS_USER(s)) {
4718         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4719         return;
4720     }
4721     tcg_gen_st_i32(AREG(insn, 0), cpu_env,
4722                    offsetof(CPUM68KState, sp[M68K_USP]));
4723 }
4724 
4725 DISAS_INSN(halt)
4726 {
4727     if (IS_USER(s)) {
4728         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4729         return;
4730     }
4731 
4732     gen_exception(s, s->pc, EXCP_HALT_INSN);
4733 }
4734 
4735 DISAS_INSN(stop)
4736 {
4737     uint16_t ext;
4738 
4739     if (IS_USER(s)) {
4740         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4741         return;
4742     }
4743 
4744     ext = read_im16(env, s);
4745 
4746     gen_set_sr_im(s, ext, 0);
4747     tcg_gen_movi_i32(cpu_halted, 1);
4748     gen_exception(s, s->pc, EXCP_HLT);
4749 }
4750 
4751 DISAS_INSN(rte)
4752 {
4753     if (IS_USER(s)) {
4754         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4755         return;
4756     }
4757     gen_exception(s, s->base.pc_next, EXCP_RTE);
4758 }
4759 
4760 DISAS_INSN(cf_movec)
4761 {
4762     uint16_t ext;
4763     TCGv reg;
4764 
4765     if (IS_USER(s)) {
4766         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4767         return;
4768     }
4769 
4770     ext = read_im16(env, s);
4771 
4772     if (ext & 0x8000) {
4773         reg = AREG(ext, 12);
4774     } else {
4775         reg = DREG(ext, 12);
4776     }
4777     gen_helper_cf_movec_to(cpu_env, tcg_const_i32(ext & 0xfff), reg);
4778     gen_exit_tb(s);
4779 }
4780 
4781 DISAS_INSN(m68k_movec)
4782 {
4783     uint16_t ext;
4784     TCGv reg;
4785 
4786     if (IS_USER(s)) {
4787         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4788         return;
4789     }
4790 
4791     ext = read_im16(env, s);
4792 
4793     if (ext & 0x8000) {
4794         reg = AREG(ext, 12);
4795     } else {
4796         reg = DREG(ext, 12);
4797     }
4798     if (insn & 1) {
4799         gen_helper_m68k_movec_to(cpu_env, tcg_const_i32(ext & 0xfff), reg);
4800     } else {
4801         gen_helper_m68k_movec_from(reg, cpu_env, tcg_const_i32(ext & 0xfff));
4802     }
4803     gen_exit_tb(s);
4804 }
4805 
4806 DISAS_INSN(intouch)
4807 {
4808     if (IS_USER(s)) {
4809         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4810         return;
4811     }
4812     /* ICache fetch.  Implement as no-op.  */
4813 }
4814 
4815 DISAS_INSN(cpushl)
4816 {
4817     if (IS_USER(s)) {
4818         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4819         return;
4820     }
4821     /* Cache push/invalidate.  Implement as no-op.  */
4822 }
4823 
4824 DISAS_INSN(cpush)
4825 {
4826     if (IS_USER(s)) {
4827         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4828         return;
4829     }
4830     /* Cache push/invalidate.  Implement as no-op.  */
4831 }
4832 
4833 DISAS_INSN(cinv)
4834 {
4835     if (IS_USER(s)) {
4836         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4837         return;
4838     }
4839     /* Invalidate cache line.  Implement as no-op.  */
4840 }
4841 
4842 #if defined(CONFIG_SOFTMMU)
4843 DISAS_INSN(pflush)
4844 {
4845     TCGv opmode;
4846 
4847     if (IS_USER(s)) {
4848         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4849         return;
4850     }
4851 
4852     opmode = tcg_const_i32((insn >> 3) & 3);
4853     gen_helper_pflush(cpu_env, AREG(insn, 0), opmode);
4854     tcg_temp_free(opmode);
4855 }
4856 
4857 DISAS_INSN(ptest)
4858 {
4859     TCGv is_read;
4860 
4861     if (IS_USER(s)) {
4862         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4863         return;
4864     }
4865     is_read = tcg_const_i32((insn >> 5) & 1);
4866     gen_helper_ptest(cpu_env, AREG(insn, 0), is_read);
4867     tcg_temp_free(is_read);
4868 }
4869 #endif
4870 
4871 DISAS_INSN(wddata)
4872 {
4873     gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4874 }
4875 
4876 DISAS_INSN(wdebug)
4877 {
4878     if (IS_USER(s)) {
4879         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4880         return;
4881     }
4882     /* TODO: Implement wdebug.  */
4883     cpu_abort(env_cpu(env), "WDEBUG not implemented");
4884 }
4885 #endif
4886 
4887 DISAS_INSN(trap)
4888 {
4889     gen_exception(s, s->pc, EXCP_TRAP0 + (insn & 0xf));
4890 }
4891 
4892 static void do_trapcc(DisasContext *s, DisasCompare *c)
4893 {
4894     if (c->tcond != TCG_COND_NEVER) {
4895         TCGLabel *over = NULL;
4896 
4897         update_cc_op(s);
4898 
4899         if (c->tcond != TCG_COND_ALWAYS) {
4900             /* Jump over if !c. */
4901             over = gen_new_label();
4902             tcg_gen_brcond_i32(tcg_invert_cond(c->tcond), c->v1, c->v2, over);
4903         }
4904 
4905         tcg_gen_movi_i32(QREG_PC, s->pc);
4906         gen_raise_exception_format2(s, EXCP_TRAPCC, s->base.pc_next);
4907 
4908         if (over != NULL) {
4909             gen_set_label(over);
4910             s->base.is_jmp = DISAS_NEXT;
4911         }
4912     }
4913     free_cond(c);
4914 }
4915 
4916 DISAS_INSN(trapcc)
4917 {
4918     DisasCompare c;
4919 
4920     /* Consume and discard the immediate operand. */
4921     switch (extract32(insn, 0, 3)) {
4922     case 2: /* trapcc.w */
4923         (void)read_im16(env, s);
4924         break;
4925     case 3: /* trapcc.l */
4926         (void)read_im32(env, s);
4927         break;
4928     case 4: /* trapcc (no operand) */
4929         break;
4930     default:
4931         /* trapcc registered with only valid opmodes */
4932         g_assert_not_reached();
4933     }
4934 
4935     gen_cc_cond(&c, s, extract32(insn, 8, 4));
4936     do_trapcc(s, &c);
4937 }
4938 
4939 DISAS_INSN(trapv)
4940 {
4941     DisasCompare c;
4942 
4943     gen_cc_cond(&c, s, 9); /* V set */
4944     do_trapcc(s, &c);
4945 }
4946 
4947 static void gen_load_fcr(DisasContext *s, TCGv res, int reg)
4948 {
4949     switch (reg) {
4950     case M68K_FPIAR:
4951         tcg_gen_movi_i32(res, 0);
4952         break;
4953     case M68K_FPSR:
4954         tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpsr));
4955         break;
4956     case M68K_FPCR:
4957         tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpcr));
4958         break;
4959     }
4960 }
4961 
4962 static void gen_store_fcr(DisasContext *s, TCGv val, int reg)
4963 {
4964     switch (reg) {
4965     case M68K_FPIAR:
4966         break;
4967     case M68K_FPSR:
4968         tcg_gen_st_i32(val, cpu_env, offsetof(CPUM68KState, fpsr));
4969         break;
4970     case M68K_FPCR:
4971         gen_helper_set_fpcr(cpu_env, val);
4972         break;
4973     }
4974 }
4975 
4976 static void gen_qemu_store_fcr(DisasContext *s, TCGv addr, int reg)
4977 {
4978     int index = IS_USER(s);
4979     TCGv tmp;
4980 
4981     tmp = tcg_temp_new();
4982     gen_load_fcr(s, tmp, reg);
4983     tcg_gen_qemu_st32(tmp, addr, index);
4984     tcg_temp_free(tmp);
4985 }
4986 
4987 static void gen_qemu_load_fcr(DisasContext *s, TCGv addr, int reg)
4988 {
4989     int index = IS_USER(s);
4990     TCGv tmp;
4991 
4992     tmp = tcg_temp_new();
4993     tcg_gen_qemu_ld32u(tmp, addr, index);
4994     gen_store_fcr(s, tmp, reg);
4995     tcg_temp_free(tmp);
4996 }
4997 
4998 
4999 static void gen_op_fmove_fcr(CPUM68KState *env, DisasContext *s,
5000                              uint32_t insn, uint32_t ext)
5001 {
5002     int mask = (ext >> 10) & 7;
5003     int is_write = (ext >> 13) & 1;
5004     int mode = extract32(insn, 3, 3);
5005     int i;
5006     TCGv addr, tmp;
5007 
5008     switch (mode) {
5009     case 0: /* Dn */
5010         if (mask != M68K_FPIAR && mask != M68K_FPSR && mask != M68K_FPCR) {
5011             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
5012             return;
5013         }
5014         if (is_write) {
5015             gen_load_fcr(s, DREG(insn, 0), mask);
5016         } else {
5017             gen_store_fcr(s, DREG(insn, 0), mask);
5018         }
5019         return;
5020     case 1: /* An, only with FPIAR */
5021         if (mask != M68K_FPIAR) {
5022             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
5023             return;
5024         }
5025         if (is_write) {
5026             gen_load_fcr(s, AREG(insn, 0), mask);
5027         } else {
5028             gen_store_fcr(s, AREG(insn, 0), mask);
5029         }
5030         return;
5031     case 7: /* Immediate */
5032         if (REG(insn, 0) == 4) {
5033             if (is_write ||
5034                 (mask != M68K_FPIAR && mask != M68K_FPSR &&
5035                  mask != M68K_FPCR)) {
5036                 gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
5037                 return;
5038             }
5039             tmp = tcg_const_i32(read_im32(env, s));
5040             gen_store_fcr(s, tmp, mask);
5041             tcg_temp_free(tmp);
5042             return;
5043         }
5044         break;
5045     default:
5046         break;
5047     }
5048 
5049     tmp = gen_lea(env, s, insn, OS_LONG);
5050     if (IS_NULL_QREG(tmp)) {
5051         gen_addr_fault(s);
5052         return;
5053     }
5054 
5055     addr = tcg_temp_new();
5056     tcg_gen_mov_i32(addr, tmp);
5057 
5058     /*
5059      * mask:
5060      *
5061      * 0b100 Floating-Point Control Register
5062      * 0b010 Floating-Point Status Register
5063      * 0b001 Floating-Point Instruction Address Register
5064      *
5065      */
5066 
5067     if (is_write && mode == 4) {
5068         for (i = 2; i >= 0; i--, mask >>= 1) {
5069             if (mask & 1) {
5070                 gen_qemu_store_fcr(s, addr, 1 << i);
5071                 if (mask != 1) {
5072                     tcg_gen_subi_i32(addr, addr, opsize_bytes(OS_LONG));
5073                 }
5074             }
5075        }
5076        tcg_gen_mov_i32(AREG(insn, 0), addr);
5077     } else {
5078         for (i = 0; i < 3; i++, mask >>= 1) {
5079             if (mask & 1) {
5080                 if (is_write) {
5081                     gen_qemu_store_fcr(s, addr, 1 << i);
5082                 } else {
5083                     gen_qemu_load_fcr(s, addr, 1 << i);
5084                 }
5085                 if (mask != 1 || mode == 3) {
5086                     tcg_gen_addi_i32(addr, addr, opsize_bytes(OS_LONG));
5087                 }
5088             }
5089         }
5090         if (mode == 3) {
5091             tcg_gen_mov_i32(AREG(insn, 0), addr);
5092         }
5093     }
5094     tcg_temp_free_i32(addr);
5095 }
5096 
5097 static void gen_op_fmovem(CPUM68KState *env, DisasContext *s,
5098                           uint32_t insn, uint32_t ext)
5099 {
5100     int opsize;
5101     TCGv addr, tmp;
5102     int mode = (ext >> 11) & 0x3;
5103     int is_load = ((ext & 0x2000) == 0);
5104 
5105     if (m68k_feature(s->env, M68K_FEATURE_FPU)) {
5106         opsize = OS_EXTENDED;
5107     } else {
5108         opsize = OS_DOUBLE;  /* FIXME */
5109     }
5110 
5111     addr = gen_lea(env, s, insn, opsize);
5112     if (IS_NULL_QREG(addr)) {
5113         gen_addr_fault(s);
5114         return;
5115     }
5116 
5117     tmp = tcg_temp_new();
5118     if (mode & 0x1) {
5119         /* Dynamic register list */
5120         tcg_gen_ext8u_i32(tmp, DREG(ext, 4));
5121     } else {
5122         /* Static register list */
5123         tcg_gen_movi_i32(tmp, ext & 0xff);
5124     }
5125 
5126     if (!is_load && (mode & 2) == 0) {
5127         /*
5128          * predecrement addressing mode
5129          * only available to store register to memory
5130          */
5131         if (opsize == OS_EXTENDED) {
5132             gen_helper_fmovemx_st_predec(tmp, cpu_env, addr, tmp);
5133         } else {
5134             gen_helper_fmovemd_st_predec(tmp, cpu_env, addr, tmp);
5135         }
5136     } else {
5137         /* postincrement addressing mode */
5138         if (opsize == OS_EXTENDED) {
5139             if (is_load) {
5140                 gen_helper_fmovemx_ld_postinc(tmp, cpu_env, addr, tmp);
5141             } else {
5142                 gen_helper_fmovemx_st_postinc(tmp, cpu_env, addr, tmp);
5143             }
5144         } else {
5145             if (is_load) {
5146                 gen_helper_fmovemd_ld_postinc(tmp, cpu_env, addr, tmp);
5147             } else {
5148                 gen_helper_fmovemd_st_postinc(tmp, cpu_env, addr, tmp);
5149             }
5150         }
5151     }
5152     if ((insn & 070) == 030 || (insn & 070) == 040) {
5153         tcg_gen_mov_i32(AREG(insn, 0), tmp);
5154     }
5155     tcg_temp_free(tmp);
5156 }
5157 
5158 /*
5159  * ??? FP exceptions are not implemented.  Most exceptions are deferred until
5160  * immediately before the next FP instruction is executed.
5161  */
5162 DISAS_INSN(fpu)
5163 {
5164     uint16_t ext;
5165     int opmode;
5166     int opsize;
5167     TCGv_ptr cpu_src, cpu_dest;
5168 
5169     ext = read_im16(env, s);
5170     opmode = ext & 0x7f;
5171     switch ((ext >> 13) & 7) {
5172     case 0:
5173         break;
5174     case 1:
5175         goto undef;
5176     case 2:
5177         if (insn == 0xf200 && (ext & 0xfc00) == 0x5c00) {
5178             /* fmovecr */
5179             TCGv rom_offset = tcg_const_i32(opmode);
5180             cpu_dest = gen_fp_ptr(REG(ext, 7));
5181             gen_helper_fconst(cpu_env, cpu_dest, rom_offset);
5182             tcg_temp_free_ptr(cpu_dest);
5183             tcg_temp_free(rom_offset);
5184             return;
5185         }
5186         break;
5187     case 3: /* fmove out */
5188         cpu_src = gen_fp_ptr(REG(ext, 7));
5189         opsize = ext_opsize(ext, 10);
5190         if (gen_ea_fp(env, s, insn, opsize, cpu_src,
5191                       EA_STORE, IS_USER(s)) == -1) {
5192             gen_addr_fault(s);
5193         }
5194         gen_helper_ftst(cpu_env, cpu_src);
5195         tcg_temp_free_ptr(cpu_src);
5196         return;
5197     case 4: /* fmove to control register.  */
5198     case 5: /* fmove from control register.  */
5199         gen_op_fmove_fcr(env, s, insn, ext);
5200         return;
5201     case 6: /* fmovem */
5202     case 7:
5203         if ((ext & 0x1000) == 0 && !m68k_feature(s->env, M68K_FEATURE_FPU)) {
5204             goto undef;
5205         }
5206         gen_op_fmovem(env, s, insn, ext);
5207         return;
5208     }
5209     if (ext & (1 << 14)) {
5210         /* Source effective address.  */
5211         opsize = ext_opsize(ext, 10);
5212         cpu_src = gen_fp_result_ptr();
5213         if (gen_ea_fp(env, s, insn, opsize, cpu_src,
5214                       EA_LOADS, IS_USER(s)) == -1) {
5215             gen_addr_fault(s);
5216             return;
5217         }
5218     } else {
5219         /* Source register.  */
5220         opsize = OS_EXTENDED;
5221         cpu_src = gen_fp_ptr(REG(ext, 10));
5222     }
5223     cpu_dest = gen_fp_ptr(REG(ext, 7));
5224     switch (opmode) {
5225     case 0: /* fmove */
5226         gen_fp_move(cpu_dest, cpu_src);
5227         break;
5228     case 0x40: /* fsmove */
5229         gen_helper_fsround(cpu_env, cpu_dest, cpu_src);
5230         break;
5231     case 0x44: /* fdmove */
5232         gen_helper_fdround(cpu_env, cpu_dest, cpu_src);
5233         break;
5234     case 1: /* fint */
5235         gen_helper_firound(cpu_env, cpu_dest, cpu_src);
5236         break;
5237     case 2: /* fsinh */
5238         gen_helper_fsinh(cpu_env, cpu_dest, cpu_src);
5239         break;
5240     case 3: /* fintrz */
5241         gen_helper_fitrunc(cpu_env, cpu_dest, cpu_src);
5242         break;
5243     case 4: /* fsqrt */
5244         gen_helper_fsqrt(cpu_env, cpu_dest, cpu_src);
5245         break;
5246     case 0x41: /* fssqrt */
5247         gen_helper_fssqrt(cpu_env, cpu_dest, cpu_src);
5248         break;
5249     case 0x45: /* fdsqrt */
5250         gen_helper_fdsqrt(cpu_env, cpu_dest, cpu_src);
5251         break;
5252     case 0x06: /* flognp1 */
5253         gen_helper_flognp1(cpu_env, cpu_dest, cpu_src);
5254         break;
5255     case 0x08: /* fetoxm1 */
5256         gen_helper_fetoxm1(cpu_env, cpu_dest, cpu_src);
5257         break;
5258     case 0x09: /* ftanh */
5259         gen_helper_ftanh(cpu_env, cpu_dest, cpu_src);
5260         break;
5261     case 0x0a: /* fatan */
5262         gen_helper_fatan(cpu_env, cpu_dest, cpu_src);
5263         break;
5264     case 0x0c: /* fasin */
5265         gen_helper_fasin(cpu_env, cpu_dest, cpu_src);
5266         break;
5267     case 0x0d: /* fatanh */
5268         gen_helper_fatanh(cpu_env, cpu_dest, cpu_src);
5269         break;
5270     case 0x0e: /* fsin */
5271         gen_helper_fsin(cpu_env, cpu_dest, cpu_src);
5272         break;
5273     case 0x0f: /* ftan */
5274         gen_helper_ftan(cpu_env, cpu_dest, cpu_src);
5275         break;
5276     case 0x10: /* fetox */
5277         gen_helper_fetox(cpu_env, cpu_dest, cpu_src);
5278         break;
5279     case 0x11: /* ftwotox */
5280         gen_helper_ftwotox(cpu_env, cpu_dest, cpu_src);
5281         break;
5282     case 0x12: /* ftentox */
5283         gen_helper_ftentox(cpu_env, cpu_dest, cpu_src);
5284         break;
5285     case 0x14: /* flogn */
5286         gen_helper_flogn(cpu_env, cpu_dest, cpu_src);
5287         break;
5288     case 0x15: /* flog10 */
5289         gen_helper_flog10(cpu_env, cpu_dest, cpu_src);
5290         break;
5291     case 0x16: /* flog2 */
5292         gen_helper_flog2(cpu_env, cpu_dest, cpu_src);
5293         break;
5294     case 0x18: /* fabs */
5295         gen_helper_fabs(cpu_env, cpu_dest, cpu_src);
5296         break;
5297     case 0x58: /* fsabs */
5298         gen_helper_fsabs(cpu_env, cpu_dest, cpu_src);
5299         break;
5300     case 0x5c: /* fdabs */
5301         gen_helper_fdabs(cpu_env, cpu_dest, cpu_src);
5302         break;
5303     case 0x19: /* fcosh */
5304         gen_helper_fcosh(cpu_env, cpu_dest, cpu_src);
5305         break;
5306     case 0x1a: /* fneg */
5307         gen_helper_fneg(cpu_env, cpu_dest, cpu_src);
5308         break;
5309     case 0x5a: /* fsneg */
5310         gen_helper_fsneg(cpu_env, cpu_dest, cpu_src);
5311         break;
5312     case 0x5e: /* fdneg */
5313         gen_helper_fdneg(cpu_env, cpu_dest, cpu_src);
5314         break;
5315     case 0x1c: /* facos */
5316         gen_helper_facos(cpu_env, cpu_dest, cpu_src);
5317         break;
5318     case 0x1d: /* fcos */
5319         gen_helper_fcos(cpu_env, cpu_dest, cpu_src);
5320         break;
5321     case 0x1e: /* fgetexp */
5322         gen_helper_fgetexp(cpu_env, cpu_dest, cpu_src);
5323         break;
5324     case 0x1f: /* fgetman */
5325         gen_helper_fgetman(cpu_env, cpu_dest, cpu_src);
5326         break;
5327     case 0x20: /* fdiv */
5328         gen_helper_fdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5329         break;
5330     case 0x60: /* fsdiv */
5331         gen_helper_fsdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5332         break;
5333     case 0x64: /* fddiv */
5334         gen_helper_fddiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5335         break;
5336     case 0x21: /* fmod */
5337         gen_helper_fmod(cpu_env, cpu_dest, cpu_src, cpu_dest);
5338         break;
5339     case 0x22: /* fadd */
5340         gen_helper_fadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5341         break;
5342     case 0x62: /* fsadd */
5343         gen_helper_fsadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5344         break;
5345     case 0x66: /* fdadd */
5346         gen_helper_fdadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5347         break;
5348     case 0x23: /* fmul */
5349         gen_helper_fmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5350         break;
5351     case 0x63: /* fsmul */
5352         gen_helper_fsmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5353         break;
5354     case 0x67: /* fdmul */
5355         gen_helper_fdmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5356         break;
5357     case 0x24: /* fsgldiv */
5358         gen_helper_fsgldiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5359         break;
5360     case 0x25: /* frem */
5361         gen_helper_frem(cpu_env, cpu_dest, cpu_src, cpu_dest);
5362         break;
5363     case 0x26: /* fscale */
5364         gen_helper_fscale(cpu_env, cpu_dest, cpu_src, cpu_dest);
5365         break;
5366     case 0x27: /* fsglmul */
5367         gen_helper_fsglmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5368         break;
5369     case 0x28: /* fsub */
5370         gen_helper_fsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5371         break;
5372     case 0x68: /* fssub */
5373         gen_helper_fssub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5374         break;
5375     case 0x6c: /* fdsub */
5376         gen_helper_fdsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5377         break;
5378     case 0x30: case 0x31: case 0x32:
5379     case 0x33: case 0x34: case 0x35:
5380     case 0x36: case 0x37: {
5381             TCGv_ptr cpu_dest2 = gen_fp_ptr(REG(ext, 0));
5382             gen_helper_fsincos(cpu_env, cpu_dest, cpu_dest2, cpu_src);
5383             tcg_temp_free_ptr(cpu_dest2);
5384         }
5385         break;
5386     case 0x38: /* fcmp */
5387         gen_helper_fcmp(cpu_env, cpu_src, cpu_dest);
5388         return;
5389     case 0x3a: /* ftst */
5390         gen_helper_ftst(cpu_env, cpu_src);
5391         return;
5392     default:
5393         goto undef;
5394     }
5395     tcg_temp_free_ptr(cpu_src);
5396     gen_helper_ftst(cpu_env, cpu_dest);
5397     tcg_temp_free_ptr(cpu_dest);
5398     return;
5399 undef:
5400     /* FIXME: Is this right for offset addressing modes?  */
5401     s->pc -= 2;
5402     disas_undef_fpu(env, s, insn);
5403 }
5404 
5405 static void gen_fcc_cond(DisasCompare *c, DisasContext *s, int cond)
5406 {
5407     TCGv fpsr;
5408 
5409     c->g1 = 1;
5410     c->v2 = tcg_const_i32(0);
5411     c->g2 = 0;
5412     /* TODO: Raise BSUN exception.  */
5413     fpsr = tcg_temp_new();
5414     gen_load_fcr(s, fpsr, M68K_FPSR);
5415     switch (cond) {
5416     case 0:  /* False */
5417     case 16: /* Signaling False */
5418         c->v1 = c->v2;
5419         c->tcond = TCG_COND_NEVER;
5420         break;
5421     case 1:  /* EQual Z */
5422     case 17: /* Signaling EQual Z */
5423         c->v1 = tcg_temp_new();
5424         c->g1 = 0;
5425         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5426         c->tcond = TCG_COND_NE;
5427         break;
5428     case 2:  /* Ordered Greater Than !(A || Z || N) */
5429     case 18: /* Greater Than !(A || Z || N) */
5430         c->v1 = tcg_temp_new();
5431         c->g1 = 0;
5432         tcg_gen_andi_i32(c->v1, fpsr,
5433                          FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5434         c->tcond = TCG_COND_EQ;
5435         break;
5436     case 3:  /* Ordered Greater than or Equal Z || !(A || N) */
5437     case 19: /* Greater than or Equal Z || !(A || N) */
5438         c->v1 = tcg_temp_new();
5439         c->g1 = 0;
5440         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5441         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5442         tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_Z | FPSR_CC_N);
5443         tcg_gen_or_i32(c->v1, c->v1, fpsr);
5444         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5445         c->tcond = TCG_COND_NE;
5446         break;
5447     case 4:  /* Ordered Less Than !(!N || A || Z); */
5448     case 20: /* Less Than !(!N || A || Z); */
5449         c->v1 = tcg_temp_new();
5450         c->g1 = 0;
5451         tcg_gen_xori_i32(c->v1, fpsr, FPSR_CC_N);
5452         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_N | FPSR_CC_A | FPSR_CC_Z);
5453         c->tcond = TCG_COND_EQ;
5454         break;
5455     case 5:  /* Ordered Less than or Equal Z || (N && !A) */
5456     case 21: /* Less than or Equal Z || (N && !A) */
5457         c->v1 = tcg_temp_new();
5458         c->g1 = 0;
5459         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5460         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5461         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5462         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_Z | FPSR_CC_N);
5463         c->tcond = TCG_COND_NE;
5464         break;
5465     case 6:  /* Ordered Greater or Less than !(A || Z) */
5466     case 22: /* Greater or Less than !(A || Z) */
5467         c->v1 = tcg_temp_new();
5468         c->g1 = 0;
5469         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5470         c->tcond = TCG_COND_EQ;
5471         break;
5472     case 7:  /* Ordered !A */
5473     case 23: /* Greater, Less or Equal !A */
5474         c->v1 = tcg_temp_new();
5475         c->g1 = 0;
5476         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5477         c->tcond = TCG_COND_EQ;
5478         break;
5479     case 8:  /* Unordered A */
5480     case 24: /* Not Greater, Less or Equal A */
5481         c->v1 = tcg_temp_new();
5482         c->g1 = 0;
5483         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5484         c->tcond = TCG_COND_NE;
5485         break;
5486     case 9:  /* Unordered or Equal A || Z */
5487     case 25: /* Not Greater or Less then A || Z */
5488         c->v1 = tcg_temp_new();
5489         c->g1 = 0;
5490         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5491         c->tcond = TCG_COND_NE;
5492         break;
5493     case 10: /* Unordered or Greater Than A || !(N || Z)) */
5494     case 26: /* Not Less or Equal A || !(N || Z)) */
5495         c->v1 = tcg_temp_new();
5496         c->g1 = 0;
5497         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5498         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5499         tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_A | FPSR_CC_N);
5500         tcg_gen_or_i32(c->v1, c->v1, fpsr);
5501         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5502         c->tcond = TCG_COND_NE;
5503         break;
5504     case 11: /* Unordered or Greater or Equal A || Z || !N */
5505     case 27: /* Not Less Than A || Z || !N */
5506         c->v1 = tcg_temp_new();
5507         c->g1 = 0;
5508         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5509         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5510         c->tcond = TCG_COND_NE;
5511         break;
5512     case 12: /* Unordered or Less Than A || (N && !Z) */
5513     case 28: /* Not Greater than or Equal A || (N && !Z) */
5514         c->v1 = tcg_temp_new();
5515         c->g1 = 0;
5516         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5517         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5518         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5519         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_A | FPSR_CC_N);
5520         c->tcond = TCG_COND_NE;
5521         break;
5522     case 13: /* Unordered or Less or Equal A || Z || N */
5523     case 29: /* Not Greater Than A || Z || N */
5524         c->v1 = tcg_temp_new();
5525         c->g1 = 0;
5526         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5527         c->tcond = TCG_COND_NE;
5528         break;
5529     case 14: /* Not Equal !Z */
5530     case 30: /* Signaling Not Equal !Z */
5531         c->v1 = tcg_temp_new();
5532         c->g1 = 0;
5533         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5534         c->tcond = TCG_COND_EQ;
5535         break;
5536     case 15: /* True */
5537     case 31: /* Signaling True */
5538         c->v1 = c->v2;
5539         c->tcond = TCG_COND_ALWAYS;
5540         break;
5541     }
5542     tcg_temp_free(fpsr);
5543 }
5544 
5545 static void gen_fjmpcc(DisasContext *s, int cond, TCGLabel *l1)
5546 {
5547     DisasCompare c;
5548 
5549     gen_fcc_cond(&c, s, cond);
5550     update_cc_op(s);
5551     tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
5552     free_cond(&c);
5553 }
5554 
5555 DISAS_INSN(fbcc)
5556 {
5557     uint32_t offset;
5558     uint32_t base;
5559     TCGLabel *l1;
5560 
5561     base = s->pc;
5562     offset = (int16_t)read_im16(env, s);
5563     if (insn & (1 << 6)) {
5564         offset = (offset << 16) | read_im16(env, s);
5565     }
5566 
5567     l1 = gen_new_label();
5568     update_cc_op(s);
5569     gen_fjmpcc(s, insn & 0x3f, l1);
5570     gen_jmp_tb(s, 0, s->pc, s->base.pc_next);
5571     gen_set_label(l1);
5572     gen_jmp_tb(s, 1, base + offset, s->base.pc_next);
5573 }
5574 
5575 DISAS_INSN(fscc)
5576 {
5577     DisasCompare c;
5578     int cond;
5579     TCGv tmp;
5580     uint16_t ext;
5581 
5582     ext = read_im16(env, s);
5583     cond = ext & 0x3f;
5584     gen_fcc_cond(&c, s, cond);
5585 
5586     tmp = tcg_temp_new();
5587     tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
5588     free_cond(&c);
5589 
5590     tcg_gen_neg_i32(tmp, tmp);
5591     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
5592     tcg_temp_free(tmp);
5593 }
5594 
5595 DISAS_INSN(ftrapcc)
5596 {
5597     DisasCompare c;
5598     uint16_t ext;
5599     int cond;
5600 
5601     ext = read_im16(env, s);
5602     cond = ext & 0x3f;
5603 
5604     /* Consume and discard the immediate operand. */
5605     switch (extract32(insn, 0, 3)) {
5606     case 2: /* ftrapcc.w */
5607         (void)read_im16(env, s);
5608         break;
5609     case 3: /* ftrapcc.l */
5610         (void)read_im32(env, s);
5611         break;
5612     case 4: /* ftrapcc (no operand) */
5613         break;
5614     default:
5615         /* ftrapcc registered with only valid opmodes */
5616         g_assert_not_reached();
5617     }
5618 
5619     gen_fcc_cond(&c, s, cond);
5620     do_trapcc(s, &c);
5621 }
5622 
5623 #if defined(CONFIG_SOFTMMU)
5624 DISAS_INSN(frestore)
5625 {
5626     TCGv addr;
5627 
5628     if (IS_USER(s)) {
5629         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
5630         return;
5631     }
5632     if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5633         SRC_EA(env, addr, OS_LONG, 0, NULL);
5634         /* FIXME: check the state frame */
5635     } else {
5636         disas_undef(env, s, insn);
5637     }
5638 }
5639 
5640 DISAS_INSN(fsave)
5641 {
5642     if (IS_USER(s)) {
5643         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
5644         return;
5645     }
5646 
5647     if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5648         /* always write IDLE */
5649         TCGv idle = tcg_const_i32(0x41000000);
5650         DEST_EA(env, insn, OS_LONG, idle, NULL);
5651         tcg_temp_free(idle);
5652     } else {
5653         disas_undef(env, s, insn);
5654     }
5655 }
5656 #endif
5657 
5658 static inline TCGv gen_mac_extract_word(DisasContext *s, TCGv val, int upper)
5659 {
5660     TCGv tmp = tcg_temp_new();
5661     if (s->env->macsr & MACSR_FI) {
5662         if (upper)
5663             tcg_gen_andi_i32(tmp, val, 0xffff0000);
5664         else
5665             tcg_gen_shli_i32(tmp, val, 16);
5666     } else if (s->env->macsr & MACSR_SU) {
5667         if (upper)
5668             tcg_gen_sari_i32(tmp, val, 16);
5669         else
5670             tcg_gen_ext16s_i32(tmp, val);
5671     } else {
5672         if (upper)
5673             tcg_gen_shri_i32(tmp, val, 16);
5674         else
5675             tcg_gen_ext16u_i32(tmp, val);
5676     }
5677     return tmp;
5678 }
5679 
5680 static void gen_mac_clear_flags(void)
5681 {
5682     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR,
5683                      ~(MACSR_V | MACSR_Z | MACSR_N | MACSR_EV));
5684 }
5685 
5686 DISAS_INSN(mac)
5687 {
5688     TCGv rx;
5689     TCGv ry;
5690     uint16_t ext;
5691     int acc;
5692     TCGv tmp;
5693     TCGv addr;
5694     TCGv loadval;
5695     int dual;
5696     TCGv saved_flags;
5697 
5698     if (!s->done_mac) {
5699         s->mactmp = tcg_temp_new_i64();
5700         s->done_mac = 1;
5701     }
5702 
5703     ext = read_im16(env, s);
5704 
5705     acc = ((insn >> 7) & 1) | ((ext >> 3) & 2);
5706     dual = ((insn & 0x30) != 0 && (ext & 3) != 0);
5707     if (dual && !m68k_feature(s->env, M68K_FEATURE_CF_EMAC_B)) {
5708         disas_undef(env, s, insn);
5709         return;
5710     }
5711     if (insn & 0x30) {
5712         /* MAC with load.  */
5713         tmp = gen_lea(env, s, insn, OS_LONG);
5714         addr = tcg_temp_new();
5715         tcg_gen_and_i32(addr, tmp, QREG_MAC_MASK);
5716         /*
5717          * Load the value now to ensure correct exception behavior.
5718          * Perform writeback after reading the MAC inputs.
5719          */
5720         loadval = gen_load(s, OS_LONG, addr, 0, IS_USER(s));
5721 
5722         acc ^= 1;
5723         rx = (ext & 0x8000) ? AREG(ext, 12) : DREG(insn, 12);
5724         ry = (ext & 8) ? AREG(ext, 0) : DREG(ext, 0);
5725     } else {
5726         loadval = addr = NULL_QREG;
5727         rx = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5728         ry = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5729     }
5730 
5731     gen_mac_clear_flags();
5732 #if 0
5733     l1 = -1;
5734     /* Disabled because conditional branches clobber temporary vars.  */
5735     if ((s->env->macsr & MACSR_OMC) != 0 && !dual) {
5736         /* Skip the multiply if we know we will ignore it.  */
5737         l1 = gen_new_label();
5738         tmp = tcg_temp_new();
5739         tcg_gen_andi_i32(tmp, QREG_MACSR, 1 << (acc + 8));
5740         gen_op_jmp_nz32(tmp, l1);
5741     }
5742 #endif
5743 
5744     if ((ext & 0x0800) == 0) {
5745         /* Word.  */
5746         rx = gen_mac_extract_word(s, rx, (ext & 0x80) != 0);
5747         ry = gen_mac_extract_word(s, ry, (ext & 0x40) != 0);
5748     }
5749     if (s->env->macsr & MACSR_FI) {
5750         gen_helper_macmulf(s->mactmp, cpu_env, rx, ry);
5751     } else {
5752         if (s->env->macsr & MACSR_SU)
5753             gen_helper_macmuls(s->mactmp, cpu_env, rx, ry);
5754         else
5755             gen_helper_macmulu(s->mactmp, cpu_env, rx, ry);
5756         switch ((ext >> 9) & 3) {
5757         case 1:
5758             tcg_gen_shli_i64(s->mactmp, s->mactmp, 1);
5759             break;
5760         case 3:
5761             tcg_gen_shri_i64(s->mactmp, s->mactmp, 1);
5762             break;
5763         }
5764     }
5765 
5766     if (dual) {
5767         /* Save the overflow flag from the multiply.  */
5768         saved_flags = tcg_temp_new();
5769         tcg_gen_mov_i32(saved_flags, QREG_MACSR);
5770     } else {
5771         saved_flags = NULL_QREG;
5772     }
5773 
5774 #if 0
5775     /* Disabled because conditional branches clobber temporary vars.  */
5776     if ((s->env->macsr & MACSR_OMC) != 0 && dual) {
5777         /* Skip the accumulate if the value is already saturated.  */
5778         l1 = gen_new_label();
5779         tmp = tcg_temp_new();
5780         gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
5781         gen_op_jmp_nz32(tmp, l1);
5782     }
5783 #endif
5784 
5785     if (insn & 0x100)
5786         tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5787     else
5788         tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5789 
5790     if (s->env->macsr & MACSR_FI)
5791         gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
5792     else if (s->env->macsr & MACSR_SU)
5793         gen_helper_macsats(cpu_env, tcg_const_i32(acc));
5794     else
5795         gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
5796 
5797 #if 0
5798     /* Disabled because conditional branches clobber temporary vars.  */
5799     if (l1 != -1)
5800         gen_set_label(l1);
5801 #endif
5802 
5803     if (dual) {
5804         /* Dual accumulate variant.  */
5805         acc = (ext >> 2) & 3;
5806         /* Restore the overflow flag from the multiplier.  */
5807         tcg_gen_mov_i32(QREG_MACSR, saved_flags);
5808 #if 0
5809         /* Disabled because conditional branches clobber temporary vars.  */
5810         if ((s->env->macsr & MACSR_OMC) != 0) {
5811             /* Skip the accumulate if the value is already saturated.  */
5812             l1 = gen_new_label();
5813             tmp = tcg_temp_new();
5814             gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
5815             gen_op_jmp_nz32(tmp, l1);
5816         }
5817 #endif
5818         if (ext & 2)
5819             tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5820         else
5821             tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5822         if (s->env->macsr & MACSR_FI)
5823             gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
5824         else if (s->env->macsr & MACSR_SU)
5825             gen_helper_macsats(cpu_env, tcg_const_i32(acc));
5826         else
5827             gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
5828 #if 0
5829         /* Disabled because conditional branches clobber temporary vars.  */
5830         if (l1 != -1)
5831             gen_set_label(l1);
5832 #endif
5833     }
5834     gen_helper_mac_set_flags(cpu_env, tcg_const_i32(acc));
5835 
5836     if (insn & 0x30) {
5837         TCGv rw;
5838         rw = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5839         tcg_gen_mov_i32(rw, loadval);
5840         /*
5841          * FIXME: Should address writeback happen with the masked or
5842          * unmasked value?
5843          */
5844         switch ((insn >> 3) & 7) {
5845         case 3: /* Post-increment.  */
5846             tcg_gen_addi_i32(AREG(insn, 0), addr, 4);
5847             break;
5848         case 4: /* Pre-decrement.  */
5849             tcg_gen_mov_i32(AREG(insn, 0), addr);
5850         }
5851         tcg_temp_free(loadval);
5852     }
5853 }
5854 
5855 DISAS_INSN(from_mac)
5856 {
5857     TCGv rx;
5858     TCGv_i64 acc;
5859     int accnum;
5860 
5861     rx = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5862     accnum = (insn >> 9) & 3;
5863     acc = MACREG(accnum);
5864     if (s->env->macsr & MACSR_FI) {
5865         gen_helper_get_macf(rx, cpu_env, acc);
5866     } else if ((s->env->macsr & MACSR_OMC) == 0) {
5867         tcg_gen_extrl_i64_i32(rx, acc);
5868     } else if (s->env->macsr & MACSR_SU) {
5869         gen_helper_get_macs(rx, acc);
5870     } else {
5871         gen_helper_get_macu(rx, acc);
5872     }
5873     if (insn & 0x40) {
5874         tcg_gen_movi_i64(acc, 0);
5875         tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5876     }
5877 }
5878 
5879 DISAS_INSN(move_mac)
5880 {
5881     /* FIXME: This can be done without a helper.  */
5882     int src;
5883     TCGv dest;
5884     src = insn & 3;
5885     dest = tcg_const_i32((insn >> 9) & 3);
5886     gen_helper_mac_move(cpu_env, dest, tcg_const_i32(src));
5887     gen_mac_clear_flags();
5888     gen_helper_mac_set_flags(cpu_env, dest);
5889 }
5890 
5891 DISAS_INSN(from_macsr)
5892 {
5893     TCGv reg;
5894 
5895     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5896     tcg_gen_mov_i32(reg, QREG_MACSR);
5897 }
5898 
5899 DISAS_INSN(from_mask)
5900 {
5901     TCGv reg;
5902     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5903     tcg_gen_mov_i32(reg, QREG_MAC_MASK);
5904 }
5905 
5906 DISAS_INSN(from_mext)
5907 {
5908     TCGv reg;
5909     TCGv acc;
5910     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5911     acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
5912     if (s->env->macsr & MACSR_FI)
5913         gen_helper_get_mac_extf(reg, cpu_env, acc);
5914     else
5915         gen_helper_get_mac_exti(reg, cpu_env, acc);
5916 }
5917 
5918 DISAS_INSN(macsr_to_ccr)
5919 {
5920     TCGv tmp = tcg_temp_new();
5921 
5922     /* Note that X and C are always cleared. */
5923     tcg_gen_andi_i32(tmp, QREG_MACSR, CCF_N | CCF_Z | CCF_V);
5924     gen_helper_set_ccr(cpu_env, tmp);
5925     tcg_temp_free(tmp);
5926     set_cc_op(s, CC_OP_FLAGS);
5927 }
5928 
5929 DISAS_INSN(to_mac)
5930 {
5931     TCGv_i64 acc;
5932     TCGv val;
5933     int accnum;
5934     accnum = (insn >> 9) & 3;
5935     acc = MACREG(accnum);
5936     SRC_EA(env, val, OS_LONG, 0, NULL);
5937     if (s->env->macsr & MACSR_FI) {
5938         tcg_gen_ext_i32_i64(acc, val);
5939         tcg_gen_shli_i64(acc, acc, 8);
5940     } else if (s->env->macsr & MACSR_SU) {
5941         tcg_gen_ext_i32_i64(acc, val);
5942     } else {
5943         tcg_gen_extu_i32_i64(acc, val);
5944     }
5945     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5946     gen_mac_clear_flags();
5947     gen_helper_mac_set_flags(cpu_env, tcg_const_i32(accnum));
5948 }
5949 
5950 DISAS_INSN(to_macsr)
5951 {
5952     TCGv val;
5953     SRC_EA(env, val, OS_LONG, 0, NULL);
5954     gen_helper_set_macsr(cpu_env, val);
5955     gen_exit_tb(s);
5956 }
5957 
5958 DISAS_INSN(to_mask)
5959 {
5960     TCGv val;
5961     SRC_EA(env, val, OS_LONG, 0, NULL);
5962     tcg_gen_ori_i32(QREG_MAC_MASK, val, 0xffff0000);
5963 }
5964 
5965 DISAS_INSN(to_mext)
5966 {
5967     TCGv val;
5968     TCGv acc;
5969     SRC_EA(env, val, OS_LONG, 0, NULL);
5970     acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
5971     if (s->env->macsr & MACSR_FI)
5972         gen_helper_set_mac_extf(cpu_env, val, acc);
5973     else if (s->env->macsr & MACSR_SU)
5974         gen_helper_set_mac_exts(cpu_env, val, acc);
5975     else
5976         gen_helper_set_mac_extu(cpu_env, val, acc);
5977 }
5978 
5979 static disas_proc opcode_table[65536];
5980 
5981 static void
5982 register_opcode (disas_proc proc, uint16_t opcode, uint16_t mask)
5983 {
5984   int i;
5985   int from;
5986   int to;
5987 
5988   /* Sanity check.  All set bits must be included in the mask.  */
5989   if (opcode & ~mask) {
5990       fprintf(stderr,
5991               "qemu internal error: bogus opcode definition %04x/%04x\n",
5992               opcode, mask);
5993       abort();
5994   }
5995   /*
5996    * This could probably be cleverer.  For now just optimize the case where
5997    * the top bits are known.
5998    */
5999   /* Find the first zero bit in the mask.  */
6000   i = 0x8000;
6001   while ((i & mask) != 0)
6002       i >>= 1;
6003   /* Iterate over all combinations of this and lower bits.  */
6004   if (i == 0)
6005       i = 1;
6006   else
6007       i <<= 1;
6008   from = opcode & ~(i - 1);
6009   to = from + i;
6010   for (i = from; i < to; i++) {
6011       if ((i & mask) == opcode)
6012           opcode_table[i] = proc;
6013   }
6014 }
6015 
6016 /*
6017  * Register m68k opcode handlers.  Order is important.
6018  * Later insn override earlier ones.
6019  */
6020 void register_m68k_insns (CPUM68KState *env)
6021 {
6022     /*
6023      * Build the opcode table only once to avoid
6024      * multithreading issues.
6025      */
6026     if (opcode_table[0] != NULL) {
6027         return;
6028     }
6029 
6030     /*
6031      * use BASE() for instruction available
6032      * for CF_ISA_A and M68000.
6033      */
6034 #define BASE(name, opcode, mask) \
6035     register_opcode(disas_##name, 0x##opcode, 0x##mask)
6036 #define INSN(name, opcode, mask, feature) do { \
6037     if (m68k_feature(env, M68K_FEATURE_##feature)) \
6038         BASE(name, opcode, mask); \
6039     } while(0)
6040     BASE(undef,     0000, 0000);
6041     INSN(arith_im,  0080, fff8, CF_ISA_A);
6042     INSN(arith_im,  0000, ff00, M68K);
6043     INSN(chk2,      00c0, f9c0, CHK2);
6044     INSN(bitrev,    00c0, fff8, CF_ISA_APLUSC);
6045     BASE(bitop_reg, 0100, f1c0);
6046     BASE(bitop_reg, 0140, f1c0);
6047     BASE(bitop_reg, 0180, f1c0);
6048     BASE(bitop_reg, 01c0, f1c0);
6049     INSN(movep,     0108, f138, MOVEP);
6050     INSN(arith_im,  0280, fff8, CF_ISA_A);
6051     INSN(arith_im,  0200, ff00, M68K);
6052     INSN(undef,     02c0, ffc0, M68K);
6053     INSN(byterev,   02c0, fff8, CF_ISA_APLUSC);
6054     INSN(arith_im,  0480, fff8, CF_ISA_A);
6055     INSN(arith_im,  0400, ff00, M68K);
6056     INSN(undef,     04c0, ffc0, M68K);
6057     INSN(arith_im,  0600, ff00, M68K);
6058     INSN(undef,     06c0, ffc0, M68K);
6059     INSN(ff1,       04c0, fff8, CF_ISA_APLUSC);
6060     INSN(arith_im,  0680, fff8, CF_ISA_A);
6061     INSN(arith_im,  0c00, ff38, CF_ISA_A);
6062     INSN(arith_im,  0c00, ff00, M68K);
6063     BASE(bitop_im,  0800, ffc0);
6064     BASE(bitop_im,  0840, ffc0);
6065     BASE(bitop_im,  0880, ffc0);
6066     BASE(bitop_im,  08c0, ffc0);
6067     INSN(arith_im,  0a80, fff8, CF_ISA_A);
6068     INSN(arith_im,  0a00, ff00, M68K);
6069 #if defined(CONFIG_SOFTMMU)
6070     INSN(moves,     0e00, ff00, M68K);
6071 #endif
6072     INSN(cas,       0ac0, ffc0, CAS);
6073     INSN(cas,       0cc0, ffc0, CAS);
6074     INSN(cas,       0ec0, ffc0, CAS);
6075     INSN(cas2w,     0cfc, ffff, CAS);
6076     INSN(cas2l,     0efc, ffff, CAS);
6077     BASE(move,      1000, f000);
6078     BASE(move,      2000, f000);
6079     BASE(move,      3000, f000);
6080     INSN(chk,       4000, f040, M68K);
6081     INSN(strldsr,   40e7, ffff, CF_ISA_APLUSC);
6082     INSN(negx,      4080, fff8, CF_ISA_A);
6083     INSN(negx,      4000, ff00, M68K);
6084     INSN(undef,     40c0, ffc0, M68K);
6085     INSN(move_from_sr, 40c0, fff8, CF_ISA_A);
6086     INSN(move_from_sr, 40c0, ffc0, M68K);
6087     BASE(lea,       41c0, f1c0);
6088     BASE(clr,       4200, ff00);
6089     BASE(undef,     42c0, ffc0);
6090     INSN(move_from_ccr, 42c0, fff8, CF_ISA_A);
6091     INSN(move_from_ccr, 42c0, ffc0, M68K);
6092     INSN(neg,       4480, fff8, CF_ISA_A);
6093     INSN(neg,       4400, ff00, M68K);
6094     INSN(undef,     44c0, ffc0, M68K);
6095     BASE(move_to_ccr, 44c0, ffc0);
6096     INSN(not,       4680, fff8, CF_ISA_A);
6097     INSN(not,       4600, ff00, M68K);
6098 #if defined(CONFIG_SOFTMMU)
6099     BASE(move_to_sr, 46c0, ffc0);
6100 #endif
6101     INSN(nbcd,      4800, ffc0, M68K);
6102     INSN(linkl,     4808, fff8, M68K);
6103     BASE(pea,       4840, ffc0);
6104     BASE(swap,      4840, fff8);
6105     INSN(bkpt,      4848, fff8, BKPT);
6106     INSN(movem,     48d0, fbf8, CF_ISA_A);
6107     INSN(movem,     48e8, fbf8, CF_ISA_A);
6108     INSN(movem,     4880, fb80, M68K);
6109     BASE(ext,       4880, fff8);
6110     BASE(ext,       48c0, fff8);
6111     BASE(ext,       49c0, fff8);
6112     BASE(tst,       4a00, ff00);
6113     INSN(tas,       4ac0, ffc0, CF_ISA_B);
6114     INSN(tas,       4ac0, ffc0, M68K);
6115 #if defined(CONFIG_SOFTMMU)
6116     INSN(halt,      4ac8, ffff, CF_ISA_A);
6117     INSN(halt,      4ac8, ffff, M68K);
6118 #endif
6119     INSN(pulse,     4acc, ffff, CF_ISA_A);
6120     BASE(illegal,   4afc, ffff);
6121     INSN(mull,      4c00, ffc0, CF_ISA_A);
6122     INSN(mull,      4c00, ffc0, LONG_MULDIV);
6123     INSN(divl,      4c40, ffc0, CF_ISA_A);
6124     INSN(divl,      4c40, ffc0, LONG_MULDIV);
6125     INSN(sats,      4c80, fff8, CF_ISA_B);
6126     BASE(trap,      4e40, fff0);
6127     BASE(link,      4e50, fff8);
6128     BASE(unlk,      4e58, fff8);
6129 #if defined(CONFIG_SOFTMMU)
6130     INSN(move_to_usp, 4e60, fff8, USP);
6131     INSN(move_from_usp, 4e68, fff8, USP);
6132     INSN(reset,     4e70, ffff, M68K);
6133     BASE(stop,      4e72, ffff);
6134     BASE(rte,       4e73, ffff);
6135     INSN(cf_movec,  4e7b, ffff, CF_ISA_A);
6136     INSN(m68k_movec, 4e7a, fffe, MOVEC);
6137 #endif
6138     BASE(nop,       4e71, ffff);
6139     INSN(rtd,       4e74, ffff, RTD);
6140     BASE(rts,       4e75, ffff);
6141     INSN(trapv,     4e76, ffff, M68K);
6142     INSN(rtr,       4e77, ffff, M68K);
6143     BASE(jump,      4e80, ffc0);
6144     BASE(jump,      4ec0, ffc0);
6145     INSN(addsubq,   5000, f080, M68K);
6146     BASE(addsubq,   5080, f0c0);
6147     INSN(scc,       50c0, f0f8, CF_ISA_A); /* Scc.B Dx   */
6148     INSN(scc,       50c0, f0c0, M68K);     /* Scc.B <EA> */
6149     INSN(dbcc,      50c8, f0f8, M68K);
6150     INSN(trapcc,    50fa, f0fe, TRAPCC);   /* opmode 010, 011 */
6151     INSN(trapcc,    50fc, f0ff, TRAPCC);   /* opmode 100 */
6152     INSN(trapcc,    51fa, fffe, CF_ISA_A); /* TPF (trapf) opmode 010, 011 */
6153     INSN(trapcc,    51fc, ffff, CF_ISA_A); /* TPF (trapf) opmode 100 */
6154 
6155     /* Branch instructions.  */
6156     BASE(branch,    6000, f000);
6157     /* Disable long branch instructions, then add back the ones we want.  */
6158     BASE(undef,     60ff, f0ff); /* All long branches.  */
6159     INSN(branch,    60ff, f0ff, CF_ISA_B);
6160     INSN(undef,     60ff, ffff, CF_ISA_B); /* bra.l */
6161     INSN(branch,    60ff, ffff, BRAL);
6162     INSN(branch,    60ff, f0ff, BCCL);
6163 
6164     BASE(moveq,     7000, f100);
6165     INSN(mvzs,      7100, f100, CF_ISA_B);
6166     BASE(or,        8000, f000);
6167     BASE(divw,      80c0, f0c0);
6168     INSN(sbcd_reg,  8100, f1f8, M68K);
6169     INSN(sbcd_mem,  8108, f1f8, M68K);
6170     BASE(addsub,    9000, f000);
6171     INSN(undef,     90c0, f0c0, CF_ISA_A);
6172     INSN(subx_reg,  9180, f1f8, CF_ISA_A);
6173     INSN(subx_reg,  9100, f138, M68K);
6174     INSN(subx_mem,  9108, f138, M68K);
6175     INSN(suba,      91c0, f1c0, CF_ISA_A);
6176     INSN(suba,      90c0, f0c0, M68K);
6177 
6178     BASE(undef_mac, a000, f000);
6179     INSN(mac,       a000, f100, CF_EMAC);
6180     INSN(from_mac,  a180, f9b0, CF_EMAC);
6181     INSN(move_mac,  a110, f9fc, CF_EMAC);
6182     INSN(from_macsr,a980, f9f0, CF_EMAC);
6183     INSN(from_mask, ad80, fff0, CF_EMAC);
6184     INSN(from_mext, ab80, fbf0, CF_EMAC);
6185     INSN(macsr_to_ccr, a9c0, ffff, CF_EMAC);
6186     INSN(to_mac,    a100, f9c0, CF_EMAC);
6187     INSN(to_macsr,  a900, ffc0, CF_EMAC);
6188     INSN(to_mext,   ab00, fbc0, CF_EMAC);
6189     INSN(to_mask,   ad00, ffc0, CF_EMAC);
6190 
6191     INSN(mov3q,     a140, f1c0, CF_ISA_B);
6192     INSN(cmp,       b000, f1c0, CF_ISA_B); /* cmp.b */
6193     INSN(cmp,       b040, f1c0, CF_ISA_B); /* cmp.w */
6194     INSN(cmpa,      b0c0, f1c0, CF_ISA_B); /* cmpa.w */
6195     INSN(cmp,       b080, f1c0, CF_ISA_A);
6196     INSN(cmpa,      b1c0, f1c0, CF_ISA_A);
6197     INSN(cmp,       b000, f100, M68K);
6198     INSN(eor,       b100, f100, M68K);
6199     INSN(cmpm,      b108, f138, M68K);
6200     INSN(cmpa,      b0c0, f0c0, M68K);
6201     INSN(eor,       b180, f1c0, CF_ISA_A);
6202     BASE(and,       c000, f000);
6203     INSN(exg_dd,    c140, f1f8, M68K);
6204     INSN(exg_aa,    c148, f1f8, M68K);
6205     INSN(exg_da,    c188, f1f8, M68K);
6206     BASE(mulw,      c0c0, f0c0);
6207     INSN(abcd_reg,  c100, f1f8, M68K);
6208     INSN(abcd_mem,  c108, f1f8, M68K);
6209     BASE(addsub,    d000, f000);
6210     INSN(undef,     d0c0, f0c0, CF_ISA_A);
6211     INSN(addx_reg,      d180, f1f8, CF_ISA_A);
6212     INSN(addx_reg,  d100, f138, M68K);
6213     INSN(addx_mem,  d108, f138, M68K);
6214     INSN(adda,      d1c0, f1c0, CF_ISA_A);
6215     INSN(adda,      d0c0, f0c0, M68K);
6216     INSN(shift_im,  e080, f0f0, CF_ISA_A);
6217     INSN(shift_reg, e0a0, f0f0, CF_ISA_A);
6218     INSN(shift8_im, e000, f0f0, M68K);
6219     INSN(shift16_im, e040, f0f0, M68K);
6220     INSN(shift_im,  e080, f0f0, M68K);
6221     INSN(shift8_reg, e020, f0f0, M68K);
6222     INSN(shift16_reg, e060, f0f0, M68K);
6223     INSN(shift_reg, e0a0, f0f0, M68K);
6224     INSN(shift_mem, e0c0, fcc0, M68K);
6225     INSN(rotate_im, e090, f0f0, M68K);
6226     INSN(rotate8_im, e010, f0f0, M68K);
6227     INSN(rotate16_im, e050, f0f0, M68K);
6228     INSN(rotate_reg, e0b0, f0f0, M68K);
6229     INSN(rotate8_reg, e030, f0f0, M68K);
6230     INSN(rotate16_reg, e070, f0f0, M68K);
6231     INSN(rotate_mem, e4c0, fcc0, M68K);
6232     INSN(bfext_mem, e9c0, fdc0, BITFIELD);  /* bfextu & bfexts */
6233     INSN(bfext_reg, e9c0, fdf8, BITFIELD);
6234     INSN(bfins_mem, efc0, ffc0, BITFIELD);
6235     INSN(bfins_reg, efc0, fff8, BITFIELD);
6236     INSN(bfop_mem, eac0, ffc0, BITFIELD);   /* bfchg */
6237     INSN(bfop_reg, eac0, fff8, BITFIELD);   /* bfchg */
6238     INSN(bfop_mem, ecc0, ffc0, BITFIELD);   /* bfclr */
6239     INSN(bfop_reg, ecc0, fff8, BITFIELD);   /* bfclr */
6240     INSN(bfop_mem, edc0, ffc0, BITFIELD);   /* bfffo */
6241     INSN(bfop_reg, edc0, fff8, BITFIELD);   /* bfffo */
6242     INSN(bfop_mem, eec0, ffc0, BITFIELD);   /* bfset */
6243     INSN(bfop_reg, eec0, fff8, BITFIELD);   /* bfset */
6244     INSN(bfop_mem, e8c0, ffc0, BITFIELD);   /* bftst */
6245     INSN(bfop_reg, e8c0, fff8, BITFIELD);   /* bftst */
6246     BASE(undef_fpu, f000, f000);
6247     INSN(fpu,       f200, ffc0, CF_FPU);
6248     INSN(fbcc,      f280, ffc0, CF_FPU);
6249     INSN(fpu,       f200, ffc0, FPU);
6250     INSN(fscc,      f240, ffc0, FPU);
6251     INSN(ftrapcc,   f27a, fffe, FPU);       /* opmode 010, 011 */
6252     INSN(ftrapcc,   f27c, ffff, FPU);       /* opmode 100 */
6253     INSN(fbcc,      f280, ff80, FPU);
6254 #if defined(CONFIG_SOFTMMU)
6255     INSN(frestore,  f340, ffc0, CF_FPU);
6256     INSN(fsave,     f300, ffc0, CF_FPU);
6257     INSN(frestore,  f340, ffc0, FPU);
6258     INSN(fsave,     f300, ffc0, FPU);
6259     INSN(intouch,   f340, ffc0, CF_ISA_A);
6260     INSN(cpushl,    f428, ff38, CF_ISA_A);
6261     INSN(cpush,     f420, ff20, M68040);
6262     INSN(cinv,      f400, ff20, M68040);
6263     INSN(pflush,    f500, ffe0, M68040);
6264     INSN(ptest,     f548, ffd8, M68040);
6265     INSN(wddata,    fb00, ff00, CF_ISA_A);
6266     INSN(wdebug,    fbc0, ffc0, CF_ISA_A);
6267 #endif
6268     INSN(move16_mem, f600, ffe0, M68040);
6269     INSN(move16_reg, f620, fff8, M68040);
6270 #undef INSN
6271 }
6272 
6273 static void m68k_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cpu)
6274 {
6275     DisasContext *dc = container_of(dcbase, DisasContext, base);
6276     CPUM68KState *env = cpu->env_ptr;
6277 
6278     dc->env = env;
6279     dc->pc = dc->base.pc_first;
6280     /* This value will always be filled in properly before m68k_tr_tb_stop. */
6281     dc->pc_prev = 0xdeadbeef;
6282     dc->cc_op = CC_OP_DYNAMIC;
6283     dc->cc_op_synced = 1;
6284     dc->done_mac = 0;
6285     dc->writeback_mask = 0;
6286     init_release_array(dc);
6287 
6288     dc->ss_active = (M68K_SR_TRACE(env->sr) == M68K_SR_TRACE_ANY_INS);
6289     /* If architectural single step active, limit to 1 */
6290     if (dc->ss_active) {
6291         dc->base.max_insns = 1;
6292     }
6293 }
6294 
6295 static void m68k_tr_tb_start(DisasContextBase *dcbase, CPUState *cpu)
6296 {
6297 }
6298 
6299 static void m68k_tr_insn_start(DisasContextBase *dcbase, CPUState *cpu)
6300 {
6301     DisasContext *dc = container_of(dcbase, DisasContext, base);
6302     tcg_gen_insn_start(dc->base.pc_next, dc->cc_op);
6303 }
6304 
6305 static void m68k_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
6306 {
6307     DisasContext *dc = container_of(dcbase, DisasContext, base);
6308     CPUM68KState *env = cpu->env_ptr;
6309     uint16_t insn = read_im16(env, dc);
6310 
6311     opcode_table[insn](env, dc, insn);
6312     do_writebacks(dc);
6313     do_release(dc);
6314 
6315     dc->pc_prev = dc->base.pc_next;
6316     dc->base.pc_next = dc->pc;
6317 
6318     if (dc->base.is_jmp == DISAS_NEXT) {
6319         /*
6320          * Stop translation when the next insn might touch a new page.
6321          * This ensures that prefetch aborts at the right place.
6322          *
6323          * We cannot determine the size of the next insn without
6324          * completely decoding it.  However, the maximum insn size
6325          * is 32 bytes, so end if we do not have that much remaining.
6326          * This may produce several small TBs at the end of each page,
6327          * but they will all be linked with goto_tb.
6328          *
6329          * ??? ColdFire maximum is 4 bytes; MC68000's maximum is also
6330          * smaller than MC68020's.
6331          */
6332         target_ulong start_page_offset
6333             = dc->pc - (dc->base.pc_first & TARGET_PAGE_MASK);
6334 
6335         if (start_page_offset >= TARGET_PAGE_SIZE - 32) {
6336             dc->base.is_jmp = DISAS_TOO_MANY;
6337         }
6338     }
6339 }
6340 
6341 static void m68k_tr_tb_stop(DisasContextBase *dcbase, CPUState *cpu)
6342 {
6343     DisasContext *dc = container_of(dcbase, DisasContext, base);
6344 
6345     switch (dc->base.is_jmp) {
6346     case DISAS_NORETURN:
6347         break;
6348     case DISAS_TOO_MANY:
6349         update_cc_op(dc);
6350         gen_jmp_tb(dc, 0, dc->pc, dc->pc_prev);
6351         break;
6352     case DISAS_JUMP:
6353         /* We updated CC_OP and PC in gen_jmp/gen_jmp_im.  */
6354         if (dc->ss_active) {
6355             gen_raise_exception_format2(dc, EXCP_TRACE, dc->pc_prev);
6356         } else {
6357             tcg_gen_lookup_and_goto_ptr();
6358         }
6359         break;
6360     case DISAS_EXIT:
6361         /*
6362          * We updated CC_OP and PC in gen_exit_tb, but also modified
6363          * other state that may require returning to the main loop.
6364          */
6365         if (dc->ss_active) {
6366             gen_raise_exception_format2(dc, EXCP_TRACE, dc->pc_prev);
6367         } else {
6368             tcg_gen_exit_tb(NULL, 0);
6369         }
6370         break;
6371     default:
6372         g_assert_not_reached();
6373     }
6374 }
6375 
6376 static void m68k_tr_disas_log(const DisasContextBase *dcbase,
6377                               CPUState *cpu, FILE *logfile)
6378 {
6379     fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
6380     target_disas(logfile, cpu, dcbase->pc_first, dcbase->tb->size);
6381 }
6382 
6383 static const TranslatorOps m68k_tr_ops = {
6384     .init_disas_context = m68k_tr_init_disas_context,
6385     .tb_start           = m68k_tr_tb_start,
6386     .insn_start         = m68k_tr_insn_start,
6387     .translate_insn     = m68k_tr_translate_insn,
6388     .tb_stop            = m68k_tr_tb_stop,
6389     .disas_log          = m68k_tr_disas_log,
6390 };
6391 
6392 void gen_intermediate_code(CPUState *cpu, TranslationBlock *tb, int max_insns,
6393                            target_ulong pc, void *host_pc)
6394 {
6395     DisasContext dc;
6396     translator_loop(cpu, tb, max_insns, pc, host_pc, &m68k_tr_ops, &dc.base);
6397 }
6398 
6399 static double floatx80_to_double(CPUM68KState *env, uint16_t high, uint64_t low)
6400 {
6401     floatx80 a = { .high = high, .low = low };
6402     union {
6403         float64 f64;
6404         double d;
6405     } u;
6406 
6407     u.f64 = floatx80_to_float64(a, &env->fp_status);
6408     return u.d;
6409 }
6410 
6411 void m68k_cpu_dump_state(CPUState *cs, FILE *f, int flags)
6412 {
6413     M68kCPU *cpu = M68K_CPU(cs);
6414     CPUM68KState *env = &cpu->env;
6415     int i;
6416     uint16_t sr;
6417     for (i = 0; i < 8; i++) {
6418         qemu_fprintf(f, "D%d = %08x   A%d = %08x   "
6419                      "F%d = %04x %016"PRIx64"  (%12g)\n",
6420                      i, env->dregs[i], i, env->aregs[i],
6421                      i, env->fregs[i].l.upper, env->fregs[i].l.lower,
6422                      floatx80_to_double(env, env->fregs[i].l.upper,
6423                                         env->fregs[i].l.lower));
6424     }
6425     qemu_fprintf(f, "PC = %08x   ", env->pc);
6426     sr = env->sr | cpu_m68k_get_ccr(env);
6427     qemu_fprintf(f, "SR = %04x T:%x I:%x %c%c %c%c%c%c%c\n",
6428                  sr, (sr & SR_T) >> SR_T_SHIFT, (sr & SR_I) >> SR_I_SHIFT,
6429                  (sr & SR_S) ? 'S' : 'U', (sr & SR_M) ? '%' : 'I',
6430                  (sr & CCF_X) ? 'X' : '-', (sr & CCF_N) ? 'N' : '-',
6431                  (sr & CCF_Z) ? 'Z' : '-', (sr & CCF_V) ? 'V' : '-',
6432                  (sr & CCF_C) ? 'C' : '-');
6433     qemu_fprintf(f, "FPSR = %08x %c%c%c%c ", env->fpsr,
6434                  (env->fpsr & FPSR_CC_A) ? 'A' : '-',
6435                  (env->fpsr & FPSR_CC_I) ? 'I' : '-',
6436                  (env->fpsr & FPSR_CC_Z) ? 'Z' : '-',
6437                  (env->fpsr & FPSR_CC_N) ? 'N' : '-');
6438     qemu_fprintf(f, "\n                                "
6439                  "FPCR =     %04x ", env->fpcr);
6440     switch (env->fpcr & FPCR_PREC_MASK) {
6441     case FPCR_PREC_X:
6442         qemu_fprintf(f, "X ");
6443         break;
6444     case FPCR_PREC_S:
6445         qemu_fprintf(f, "S ");
6446         break;
6447     case FPCR_PREC_D:
6448         qemu_fprintf(f, "D ");
6449         break;
6450     }
6451     switch (env->fpcr & FPCR_RND_MASK) {
6452     case FPCR_RND_N:
6453         qemu_fprintf(f, "RN ");
6454         break;
6455     case FPCR_RND_Z:
6456         qemu_fprintf(f, "RZ ");
6457         break;
6458     case FPCR_RND_M:
6459         qemu_fprintf(f, "RM ");
6460         break;
6461     case FPCR_RND_P:
6462         qemu_fprintf(f, "RP ");
6463         break;
6464     }
6465     qemu_fprintf(f, "\n");
6466 #ifdef CONFIG_SOFTMMU
6467     qemu_fprintf(f, "%sA7(MSP) = %08x %sA7(USP) = %08x %sA7(ISP) = %08x\n",
6468                  env->current_sp == M68K_SSP ? "->" : "  ", env->sp[M68K_SSP],
6469                  env->current_sp == M68K_USP ? "->" : "  ", env->sp[M68K_USP],
6470                  env->current_sp == M68K_ISP ? "->" : "  ", env->sp[M68K_ISP]);
6471     qemu_fprintf(f, "VBR = 0x%08x\n", env->vbr);
6472     qemu_fprintf(f, "SFC = %x DFC %x\n", env->sfc, env->dfc);
6473     qemu_fprintf(f, "SSW %08x TCR %08x URP %08x SRP %08x\n",
6474                  env->mmu.ssw, env->mmu.tcr, env->mmu.urp, env->mmu.srp);
6475     qemu_fprintf(f, "DTTR0/1: %08x/%08x ITTR0/1: %08x/%08x\n",
6476                  env->mmu.ttr[M68K_DTTR0], env->mmu.ttr[M68K_DTTR1],
6477                  env->mmu.ttr[M68K_ITTR0], env->mmu.ttr[M68K_ITTR1]);
6478     qemu_fprintf(f, "MMUSR %08x, fault at %08x\n",
6479                  env->mmu.mmusr, env->mmu.ar);
6480 #endif
6481 }
6482