xref: /openbmc/qemu/target/m68k/translate.c (revision ea06a006)
1 /*
2  *  m68k translation
3  *
4  *  Copyright (c) 2005-2007 CodeSourcery
5  *  Written by Paul Brook
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "disas/disas.h"
24 #include "exec/exec-all.h"
25 #include "tcg/tcg-op.h"
26 #include "qemu/log.h"
27 #include "qemu/qemu-print.h"
28 #include "exec/cpu_ldst.h"
29 #include "exec/translator.h"
30 
31 #include "exec/helper-proto.h"
32 #include "exec/helper-gen.h"
33 
34 #include "exec/log.h"
35 #include "fpu/softfloat.h"
36 
37 
38 //#define DEBUG_DISPATCH 1
39 
40 #define DEFO32(name, offset) static TCGv QREG_##name;
41 #define DEFO64(name, offset) static TCGv_i64 QREG_##name;
42 #include "qregs.def"
43 #undef DEFO32
44 #undef DEFO64
45 
46 static TCGv_i32 cpu_halted;
47 static TCGv_i32 cpu_exception_index;
48 
49 static char cpu_reg_names[2 * 8 * 3 + 5 * 4];
50 static TCGv cpu_dregs[8];
51 static TCGv cpu_aregs[8];
52 static TCGv_i64 cpu_macc[4];
53 
54 #define REG(insn, pos)  (((insn) >> (pos)) & 7)
55 #define DREG(insn, pos) cpu_dregs[REG(insn, pos)]
56 #define AREG(insn, pos) get_areg(s, REG(insn, pos))
57 #define MACREG(acc)     cpu_macc[acc]
58 #define QREG_SP         get_areg(s, 7)
59 
60 static TCGv NULL_QREG;
61 #define IS_NULL_QREG(t) (t == NULL_QREG)
62 /* Used to distinguish stores from bad addressing modes.  */
63 static TCGv store_dummy;
64 
65 #include "exec/gen-icount.h"
66 
67 void m68k_tcg_init(void)
68 {
69     char *p;
70     int i;
71 
72 #define DEFO32(name, offset) \
73     QREG_##name = tcg_global_mem_new_i32(cpu_env, \
74         offsetof(CPUM68KState, offset), #name);
75 #define DEFO64(name, offset) \
76     QREG_##name = tcg_global_mem_new_i64(cpu_env, \
77         offsetof(CPUM68KState, offset), #name);
78 #include "qregs.def"
79 #undef DEFO32
80 #undef DEFO64
81 
82     cpu_halted = tcg_global_mem_new_i32(cpu_env,
83                                         -offsetof(M68kCPU, env) +
84                                         offsetof(CPUState, halted), "HALTED");
85     cpu_exception_index = tcg_global_mem_new_i32(cpu_env,
86                                                  -offsetof(M68kCPU, env) +
87                                                  offsetof(CPUState, exception_index),
88                                                  "EXCEPTION");
89 
90     p = cpu_reg_names;
91     for (i = 0; i < 8; i++) {
92         sprintf(p, "D%d", i);
93         cpu_dregs[i] = tcg_global_mem_new(cpu_env,
94                                           offsetof(CPUM68KState, dregs[i]), p);
95         p += 3;
96         sprintf(p, "A%d", i);
97         cpu_aregs[i] = tcg_global_mem_new(cpu_env,
98                                           offsetof(CPUM68KState, aregs[i]), p);
99         p += 3;
100     }
101     for (i = 0; i < 4; i++) {
102         sprintf(p, "ACC%d", i);
103         cpu_macc[i] = tcg_global_mem_new_i64(cpu_env,
104                                          offsetof(CPUM68KState, macc[i]), p);
105         p += 5;
106     }
107 
108     NULL_QREG = tcg_global_mem_new(cpu_env, -4, "NULL");
109     store_dummy = tcg_global_mem_new(cpu_env, -8, "NULL");
110 }
111 
112 /* internal defines */
113 typedef struct DisasContext {
114     DisasContextBase base;
115     CPUM68KState *env;
116     target_ulong pc;
117     CCOp cc_op; /* Current CC operation */
118     int cc_op_synced;
119     TCGv_i64 mactmp;
120     int done_mac;
121     int writeback_mask;
122     TCGv writeback[8];
123 #define MAX_TO_RELEASE 8
124     int release_count;
125     TCGv release[MAX_TO_RELEASE];
126     bool ss_active;
127 } DisasContext;
128 
129 static void init_release_array(DisasContext *s)
130 {
131 #ifdef CONFIG_DEBUG_TCG
132     memset(s->release, 0, sizeof(s->release));
133 #endif
134     s->release_count = 0;
135 }
136 
137 static void do_release(DisasContext *s)
138 {
139     int i;
140     for (i = 0; i < s->release_count; i++) {
141         tcg_temp_free(s->release[i]);
142     }
143     init_release_array(s);
144 }
145 
146 static TCGv mark_to_release(DisasContext *s, TCGv tmp)
147 {
148     g_assert(s->release_count < MAX_TO_RELEASE);
149     return s->release[s->release_count++] = tmp;
150 }
151 
152 static TCGv get_areg(DisasContext *s, unsigned regno)
153 {
154     if (s->writeback_mask & (1 << regno)) {
155         return s->writeback[regno];
156     } else {
157         return cpu_aregs[regno];
158     }
159 }
160 
161 static void delay_set_areg(DisasContext *s, unsigned regno,
162                            TCGv val, bool give_temp)
163 {
164     if (s->writeback_mask & (1 << regno)) {
165         if (give_temp) {
166             tcg_temp_free(s->writeback[regno]);
167             s->writeback[regno] = val;
168         } else {
169             tcg_gen_mov_i32(s->writeback[regno], val);
170         }
171     } else {
172         s->writeback_mask |= 1 << regno;
173         if (give_temp) {
174             s->writeback[regno] = val;
175         } else {
176             TCGv tmp = tcg_temp_new();
177             s->writeback[regno] = tmp;
178             tcg_gen_mov_i32(tmp, val);
179         }
180     }
181 }
182 
183 static void do_writebacks(DisasContext *s)
184 {
185     unsigned mask = s->writeback_mask;
186     if (mask) {
187         s->writeback_mask = 0;
188         do {
189             unsigned regno = ctz32(mask);
190             tcg_gen_mov_i32(cpu_aregs[regno], s->writeback[regno]);
191             tcg_temp_free(s->writeback[regno]);
192             mask &= mask - 1;
193         } while (mask);
194     }
195 }
196 
197 /* is_jmp field values */
198 #define DISAS_JUMP      DISAS_TARGET_0 /* only pc was modified dynamically */
199 #define DISAS_EXIT      DISAS_TARGET_1 /* cpu state was modified dynamically */
200 
201 #if defined(CONFIG_USER_ONLY)
202 #define IS_USER(s) 1
203 #else
204 #define IS_USER(s)   (!(s->base.tb->flags & TB_FLAGS_MSR_S))
205 #define SFC_INDEX(s) ((s->base.tb->flags & TB_FLAGS_SFC_S) ? \
206                       MMU_KERNEL_IDX : MMU_USER_IDX)
207 #define DFC_INDEX(s) ((s->base.tb->flags & TB_FLAGS_DFC_S) ? \
208                       MMU_KERNEL_IDX : MMU_USER_IDX)
209 #endif
210 
211 typedef void (*disas_proc)(CPUM68KState *env, DisasContext *s, uint16_t insn);
212 
213 #ifdef DEBUG_DISPATCH
214 #define DISAS_INSN(name)                                                \
215     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
216                                   uint16_t insn);                       \
217     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
218                              uint16_t insn)                             \
219     {                                                                   \
220         qemu_log("Dispatch " #name "\n");                               \
221         real_disas_##name(env, s, insn);                                \
222     }                                                                   \
223     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
224                                   uint16_t insn)
225 #else
226 #define DISAS_INSN(name)                                                \
227     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
228                              uint16_t insn)
229 #endif
230 
231 static const uint8_t cc_op_live[CC_OP_NB] = {
232     [CC_OP_DYNAMIC] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
233     [CC_OP_FLAGS] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
234     [CC_OP_ADDB ... CC_OP_ADDL] = CCF_X | CCF_N | CCF_V,
235     [CC_OP_SUBB ... CC_OP_SUBL] = CCF_X | CCF_N | CCF_V,
236     [CC_OP_CMPB ... CC_OP_CMPL] = CCF_X | CCF_N | CCF_V,
237     [CC_OP_LOGIC] = CCF_X | CCF_N
238 };
239 
240 static void set_cc_op(DisasContext *s, CCOp op)
241 {
242     CCOp old_op = s->cc_op;
243     int dead;
244 
245     if (old_op == op) {
246         return;
247     }
248     s->cc_op = op;
249     s->cc_op_synced = 0;
250 
251     /*
252      * Discard CC computation that will no longer be used.
253      * Note that X and N are never dead.
254      */
255     dead = cc_op_live[old_op] & ~cc_op_live[op];
256     if (dead & CCF_C) {
257         tcg_gen_discard_i32(QREG_CC_C);
258     }
259     if (dead & CCF_Z) {
260         tcg_gen_discard_i32(QREG_CC_Z);
261     }
262     if (dead & CCF_V) {
263         tcg_gen_discard_i32(QREG_CC_V);
264     }
265 }
266 
267 /* Update the CPU env CC_OP state.  */
268 static void update_cc_op(DisasContext *s)
269 {
270     if (!s->cc_op_synced) {
271         s->cc_op_synced = 1;
272         tcg_gen_movi_i32(QREG_CC_OP, s->cc_op);
273     }
274 }
275 
276 /* Generate a jump to an immediate address.  */
277 static void gen_jmp_im(DisasContext *s, uint32_t dest)
278 {
279     update_cc_op(s);
280     tcg_gen_movi_i32(QREG_PC, dest);
281     s->base.is_jmp = DISAS_JUMP;
282 }
283 
284 /* Generate a jump to the address in qreg DEST.  */
285 static void gen_jmp(DisasContext *s, TCGv dest)
286 {
287     update_cc_op(s);
288     tcg_gen_mov_i32(QREG_PC, dest);
289     s->base.is_jmp = DISAS_JUMP;
290 }
291 
292 static void gen_raise_exception(int nr)
293 {
294     TCGv_i32 tmp;
295 
296     tmp = tcg_const_i32(nr);
297     gen_helper_raise_exception(cpu_env, tmp);
298     tcg_temp_free_i32(tmp);
299 }
300 
301 static void gen_exception(DisasContext *s, uint32_t dest, int nr)
302 {
303     update_cc_op(s);
304     tcg_gen_movi_i32(QREG_PC, dest);
305 
306     gen_raise_exception(nr);
307 
308     s->base.is_jmp = DISAS_NORETURN;
309 }
310 
311 static inline void gen_addr_fault(DisasContext *s)
312 {
313     gen_exception(s, s->base.pc_next, EXCP_ADDRESS);
314 }
315 
316 /*
317  * Generate a load from the specified address.  Narrow values are
318  *  sign extended to full register width.
319  */
320 static inline TCGv gen_load(DisasContext *s, int opsize, TCGv addr,
321                             int sign, int index)
322 {
323     TCGv tmp;
324     tmp = tcg_temp_new_i32();
325     switch(opsize) {
326     case OS_BYTE:
327         if (sign)
328             tcg_gen_qemu_ld8s(tmp, addr, index);
329         else
330             tcg_gen_qemu_ld8u(tmp, addr, index);
331         break;
332     case OS_WORD:
333         if (sign)
334             tcg_gen_qemu_ld16s(tmp, addr, index);
335         else
336             tcg_gen_qemu_ld16u(tmp, addr, index);
337         break;
338     case OS_LONG:
339         tcg_gen_qemu_ld32u(tmp, addr, index);
340         break;
341     default:
342         g_assert_not_reached();
343     }
344     return tmp;
345 }
346 
347 /* Generate a store.  */
348 static inline void gen_store(DisasContext *s, int opsize, TCGv addr, TCGv val,
349                              int index)
350 {
351     switch(opsize) {
352     case OS_BYTE:
353         tcg_gen_qemu_st8(val, addr, index);
354         break;
355     case OS_WORD:
356         tcg_gen_qemu_st16(val, addr, index);
357         break;
358     case OS_LONG:
359         tcg_gen_qemu_st32(val, addr, index);
360         break;
361     default:
362         g_assert_not_reached();
363     }
364 }
365 
366 typedef enum {
367     EA_STORE,
368     EA_LOADU,
369     EA_LOADS
370 } ea_what;
371 
372 /*
373  * Generate an unsigned load if VAL is 0 a signed load if val is -1,
374  * otherwise generate a store.
375  */
376 static TCGv gen_ldst(DisasContext *s, int opsize, TCGv addr, TCGv val,
377                      ea_what what, int index)
378 {
379     if (what == EA_STORE) {
380         gen_store(s, opsize, addr, val, index);
381         return store_dummy;
382     } else {
383         return mark_to_release(s, gen_load(s, opsize, addr,
384                                            what == EA_LOADS, index));
385     }
386 }
387 
388 /* Read a 16-bit immediate constant */
389 static inline uint16_t read_im16(CPUM68KState *env, DisasContext *s)
390 {
391     uint16_t im;
392     im = translator_lduw(env, &s->base, s->pc);
393     s->pc += 2;
394     return im;
395 }
396 
397 /* Read an 8-bit immediate constant */
398 static inline uint8_t read_im8(CPUM68KState *env, DisasContext *s)
399 {
400     return read_im16(env, s);
401 }
402 
403 /* Read a 32-bit immediate constant.  */
404 static inline uint32_t read_im32(CPUM68KState *env, DisasContext *s)
405 {
406     uint32_t im;
407     im = read_im16(env, s) << 16;
408     im |= 0xffff & read_im16(env, s);
409     return im;
410 }
411 
412 /* Read a 64-bit immediate constant.  */
413 static inline uint64_t read_im64(CPUM68KState *env, DisasContext *s)
414 {
415     uint64_t im;
416     im = (uint64_t)read_im32(env, s) << 32;
417     im |= (uint64_t)read_im32(env, s);
418     return im;
419 }
420 
421 /* Calculate and address index.  */
422 static TCGv gen_addr_index(DisasContext *s, uint16_t ext, TCGv tmp)
423 {
424     TCGv add;
425     int scale;
426 
427     add = (ext & 0x8000) ? AREG(ext, 12) : DREG(ext, 12);
428     if ((ext & 0x800) == 0) {
429         tcg_gen_ext16s_i32(tmp, add);
430         add = tmp;
431     }
432     scale = (ext >> 9) & 3;
433     if (scale != 0) {
434         tcg_gen_shli_i32(tmp, add, scale);
435         add = tmp;
436     }
437     return add;
438 }
439 
440 /*
441  * Handle a base + index + displacement effective address.
442  * A NULL_QREG base means pc-relative.
443  */
444 static TCGv gen_lea_indexed(CPUM68KState *env, DisasContext *s, TCGv base)
445 {
446     uint32_t offset;
447     uint16_t ext;
448     TCGv add;
449     TCGv tmp;
450     uint32_t bd, od;
451 
452     offset = s->pc;
453     ext = read_im16(env, s);
454 
455     if ((ext & 0x800) == 0 && !m68k_feature(s->env, M68K_FEATURE_WORD_INDEX))
456         return NULL_QREG;
457 
458     if (m68k_feature(s->env, M68K_FEATURE_M68000) &&
459         !m68k_feature(s->env, M68K_FEATURE_SCALED_INDEX)) {
460         ext &= ~(3 << 9);
461     }
462 
463     if (ext & 0x100) {
464         /* full extension word format */
465         if (!m68k_feature(s->env, M68K_FEATURE_EXT_FULL))
466             return NULL_QREG;
467 
468         if ((ext & 0x30) > 0x10) {
469             /* base displacement */
470             if ((ext & 0x30) == 0x20) {
471                 bd = (int16_t)read_im16(env, s);
472             } else {
473                 bd = read_im32(env, s);
474             }
475         } else {
476             bd = 0;
477         }
478         tmp = mark_to_release(s, tcg_temp_new());
479         if ((ext & 0x44) == 0) {
480             /* pre-index */
481             add = gen_addr_index(s, ext, tmp);
482         } else {
483             add = NULL_QREG;
484         }
485         if ((ext & 0x80) == 0) {
486             /* base not suppressed */
487             if (IS_NULL_QREG(base)) {
488                 base = mark_to_release(s, tcg_const_i32(offset + bd));
489                 bd = 0;
490             }
491             if (!IS_NULL_QREG(add)) {
492                 tcg_gen_add_i32(tmp, add, base);
493                 add = tmp;
494             } else {
495                 add = base;
496             }
497         }
498         if (!IS_NULL_QREG(add)) {
499             if (bd != 0) {
500                 tcg_gen_addi_i32(tmp, add, bd);
501                 add = tmp;
502             }
503         } else {
504             add = mark_to_release(s, tcg_const_i32(bd));
505         }
506         if ((ext & 3) != 0) {
507             /* memory indirect */
508             base = mark_to_release(s, gen_load(s, OS_LONG, add, 0, IS_USER(s)));
509             if ((ext & 0x44) == 4) {
510                 add = gen_addr_index(s, ext, tmp);
511                 tcg_gen_add_i32(tmp, add, base);
512                 add = tmp;
513             } else {
514                 add = base;
515             }
516             if ((ext & 3) > 1) {
517                 /* outer displacement */
518                 if ((ext & 3) == 2) {
519                     od = (int16_t)read_im16(env, s);
520                 } else {
521                     od = read_im32(env, s);
522                 }
523             } else {
524                 od = 0;
525             }
526             if (od != 0) {
527                 tcg_gen_addi_i32(tmp, add, od);
528                 add = tmp;
529             }
530         }
531     } else {
532         /* brief extension word format */
533         tmp = mark_to_release(s, tcg_temp_new());
534         add = gen_addr_index(s, ext, tmp);
535         if (!IS_NULL_QREG(base)) {
536             tcg_gen_add_i32(tmp, add, base);
537             if ((int8_t)ext)
538                 tcg_gen_addi_i32(tmp, tmp, (int8_t)ext);
539         } else {
540             tcg_gen_addi_i32(tmp, add, offset + (int8_t)ext);
541         }
542         add = tmp;
543     }
544     return add;
545 }
546 
547 /* Sign or zero extend a value.  */
548 
549 static inline void gen_ext(TCGv res, TCGv val, int opsize, int sign)
550 {
551     switch (opsize) {
552     case OS_BYTE:
553         if (sign) {
554             tcg_gen_ext8s_i32(res, val);
555         } else {
556             tcg_gen_ext8u_i32(res, val);
557         }
558         break;
559     case OS_WORD:
560         if (sign) {
561             tcg_gen_ext16s_i32(res, val);
562         } else {
563             tcg_gen_ext16u_i32(res, val);
564         }
565         break;
566     case OS_LONG:
567         tcg_gen_mov_i32(res, val);
568         break;
569     default:
570         g_assert_not_reached();
571     }
572 }
573 
574 /* Evaluate all the CC flags.  */
575 
576 static void gen_flush_flags(DisasContext *s)
577 {
578     TCGv t0, t1;
579 
580     switch (s->cc_op) {
581     case CC_OP_FLAGS:
582         return;
583 
584     case CC_OP_ADDB:
585     case CC_OP_ADDW:
586     case CC_OP_ADDL:
587         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
588         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
589         /* Compute signed overflow for addition.  */
590         t0 = tcg_temp_new();
591         t1 = tcg_temp_new();
592         tcg_gen_sub_i32(t0, QREG_CC_N, QREG_CC_V);
593         gen_ext(t0, t0, s->cc_op - CC_OP_ADDB, 1);
594         tcg_gen_xor_i32(t1, QREG_CC_N, QREG_CC_V);
595         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
596         tcg_temp_free(t0);
597         tcg_gen_andc_i32(QREG_CC_V, t1, QREG_CC_V);
598         tcg_temp_free(t1);
599         break;
600 
601     case CC_OP_SUBB:
602     case CC_OP_SUBW:
603     case CC_OP_SUBL:
604         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
605         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
606         /* Compute signed overflow for subtraction.  */
607         t0 = tcg_temp_new();
608         t1 = tcg_temp_new();
609         tcg_gen_add_i32(t0, QREG_CC_N, QREG_CC_V);
610         gen_ext(t0, t0, s->cc_op - CC_OP_SUBB, 1);
611         tcg_gen_xor_i32(t1, QREG_CC_N, t0);
612         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
613         tcg_temp_free(t0);
614         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t1);
615         tcg_temp_free(t1);
616         break;
617 
618     case CC_OP_CMPB:
619     case CC_OP_CMPW:
620     case CC_OP_CMPL:
621         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_C, QREG_CC_N, QREG_CC_V);
622         tcg_gen_sub_i32(QREG_CC_Z, QREG_CC_N, QREG_CC_V);
623         gen_ext(QREG_CC_Z, QREG_CC_Z, s->cc_op - CC_OP_CMPB, 1);
624         /* Compute signed overflow for subtraction.  */
625         t0 = tcg_temp_new();
626         tcg_gen_xor_i32(t0, QREG_CC_Z, QREG_CC_N);
627         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, QREG_CC_N);
628         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t0);
629         tcg_temp_free(t0);
630         tcg_gen_mov_i32(QREG_CC_N, QREG_CC_Z);
631         break;
632 
633     case CC_OP_LOGIC:
634         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
635         tcg_gen_movi_i32(QREG_CC_C, 0);
636         tcg_gen_movi_i32(QREG_CC_V, 0);
637         break;
638 
639     case CC_OP_DYNAMIC:
640         gen_helper_flush_flags(cpu_env, QREG_CC_OP);
641         s->cc_op_synced = 1;
642         break;
643 
644     default:
645         t0 = tcg_const_i32(s->cc_op);
646         gen_helper_flush_flags(cpu_env, t0);
647         tcg_temp_free(t0);
648         s->cc_op_synced = 1;
649         break;
650     }
651 
652     /* Note that flush_flags also assigned to env->cc_op.  */
653     s->cc_op = CC_OP_FLAGS;
654 }
655 
656 static inline TCGv gen_extend(DisasContext *s, TCGv val, int opsize, int sign)
657 {
658     TCGv tmp;
659 
660     if (opsize == OS_LONG) {
661         tmp = val;
662     } else {
663         tmp = mark_to_release(s, tcg_temp_new());
664         gen_ext(tmp, val, opsize, sign);
665     }
666 
667     return tmp;
668 }
669 
670 static void gen_logic_cc(DisasContext *s, TCGv val, int opsize)
671 {
672     gen_ext(QREG_CC_N, val, opsize, 1);
673     set_cc_op(s, CC_OP_LOGIC);
674 }
675 
676 static void gen_update_cc_cmp(DisasContext *s, TCGv dest, TCGv src, int opsize)
677 {
678     tcg_gen_mov_i32(QREG_CC_N, dest);
679     tcg_gen_mov_i32(QREG_CC_V, src);
680     set_cc_op(s, CC_OP_CMPB + opsize);
681 }
682 
683 static void gen_update_cc_add(TCGv dest, TCGv src, int opsize)
684 {
685     gen_ext(QREG_CC_N, dest, opsize, 1);
686     tcg_gen_mov_i32(QREG_CC_V, src);
687 }
688 
689 static inline int opsize_bytes(int opsize)
690 {
691     switch (opsize) {
692     case OS_BYTE: return 1;
693     case OS_WORD: return 2;
694     case OS_LONG: return 4;
695     case OS_SINGLE: return 4;
696     case OS_DOUBLE: return 8;
697     case OS_EXTENDED: return 12;
698     case OS_PACKED: return 12;
699     default:
700         g_assert_not_reached();
701     }
702 }
703 
704 static inline int insn_opsize(int insn)
705 {
706     switch ((insn >> 6) & 3) {
707     case 0: return OS_BYTE;
708     case 1: return OS_WORD;
709     case 2: return OS_LONG;
710     default:
711         g_assert_not_reached();
712     }
713 }
714 
715 static inline int ext_opsize(int ext, int pos)
716 {
717     switch ((ext >> pos) & 7) {
718     case 0: return OS_LONG;
719     case 1: return OS_SINGLE;
720     case 2: return OS_EXTENDED;
721     case 3: return OS_PACKED;
722     case 4: return OS_WORD;
723     case 5: return OS_DOUBLE;
724     case 6: return OS_BYTE;
725     default:
726         g_assert_not_reached();
727     }
728 }
729 
730 /*
731  * Assign value to a register.  If the width is less than the register width
732  * only the low part of the register is set.
733  */
734 static void gen_partset_reg(int opsize, TCGv reg, TCGv val)
735 {
736     TCGv tmp;
737     switch (opsize) {
738     case OS_BYTE:
739         tcg_gen_andi_i32(reg, reg, 0xffffff00);
740         tmp = tcg_temp_new();
741         tcg_gen_ext8u_i32(tmp, val);
742         tcg_gen_or_i32(reg, reg, tmp);
743         tcg_temp_free(tmp);
744         break;
745     case OS_WORD:
746         tcg_gen_andi_i32(reg, reg, 0xffff0000);
747         tmp = tcg_temp_new();
748         tcg_gen_ext16u_i32(tmp, val);
749         tcg_gen_or_i32(reg, reg, tmp);
750         tcg_temp_free(tmp);
751         break;
752     case OS_LONG:
753     case OS_SINGLE:
754         tcg_gen_mov_i32(reg, val);
755         break;
756     default:
757         g_assert_not_reached();
758     }
759 }
760 
761 /*
762  * Generate code for an "effective address".  Does not adjust the base
763  * register for autoincrement addressing modes.
764  */
765 static TCGv gen_lea_mode(CPUM68KState *env, DisasContext *s,
766                          int mode, int reg0, int opsize)
767 {
768     TCGv reg;
769     TCGv tmp;
770     uint16_t ext;
771     uint32_t offset;
772 
773     switch (mode) {
774     case 0: /* Data register direct.  */
775     case 1: /* Address register direct.  */
776         return NULL_QREG;
777     case 3: /* Indirect postincrement.  */
778         if (opsize == OS_UNSIZED) {
779             return NULL_QREG;
780         }
781         /* fallthru */
782     case 2: /* Indirect register */
783         return get_areg(s, reg0);
784     case 4: /* Indirect predecrememnt.  */
785         if (opsize == OS_UNSIZED) {
786             return NULL_QREG;
787         }
788         reg = get_areg(s, reg0);
789         tmp = mark_to_release(s, tcg_temp_new());
790         if (reg0 == 7 && opsize == OS_BYTE &&
791             m68k_feature(s->env, M68K_FEATURE_M68000)) {
792             tcg_gen_subi_i32(tmp, reg, 2);
793         } else {
794             tcg_gen_subi_i32(tmp, reg, opsize_bytes(opsize));
795         }
796         return tmp;
797     case 5: /* Indirect displacement.  */
798         reg = get_areg(s, reg0);
799         tmp = mark_to_release(s, tcg_temp_new());
800         ext = read_im16(env, s);
801         tcg_gen_addi_i32(tmp, reg, (int16_t)ext);
802         return tmp;
803     case 6: /* Indirect index + displacement.  */
804         reg = get_areg(s, reg0);
805         return gen_lea_indexed(env, s, reg);
806     case 7: /* Other */
807         switch (reg0) {
808         case 0: /* Absolute short.  */
809             offset = (int16_t)read_im16(env, s);
810             return mark_to_release(s, tcg_const_i32(offset));
811         case 1: /* Absolute long.  */
812             offset = read_im32(env, s);
813             return mark_to_release(s, tcg_const_i32(offset));
814         case 2: /* pc displacement  */
815             offset = s->pc;
816             offset += (int16_t)read_im16(env, s);
817             return mark_to_release(s, tcg_const_i32(offset));
818         case 3: /* pc index+displacement.  */
819             return gen_lea_indexed(env, s, NULL_QREG);
820         case 4: /* Immediate.  */
821         default:
822             return NULL_QREG;
823         }
824     }
825     /* Should never happen.  */
826     return NULL_QREG;
827 }
828 
829 static TCGv gen_lea(CPUM68KState *env, DisasContext *s, uint16_t insn,
830                     int opsize)
831 {
832     int mode = extract32(insn, 3, 3);
833     int reg0 = REG(insn, 0);
834     return gen_lea_mode(env, s, mode, reg0, opsize);
835 }
836 
837 /*
838  * Generate code to load/store a value from/into an EA.  If WHAT > 0 this is
839  * a write otherwise it is a read (0 == sign extend, -1 == zero extend).
840  * ADDRP is non-null for readwrite operands.
841  */
842 static TCGv gen_ea_mode(CPUM68KState *env, DisasContext *s, int mode, int reg0,
843                         int opsize, TCGv val, TCGv *addrp, ea_what what,
844                         int index)
845 {
846     TCGv reg, tmp, result;
847     int32_t offset;
848 
849     switch (mode) {
850     case 0: /* Data register direct.  */
851         reg = cpu_dregs[reg0];
852         if (what == EA_STORE) {
853             gen_partset_reg(opsize, reg, val);
854             return store_dummy;
855         } else {
856             return gen_extend(s, reg, opsize, what == EA_LOADS);
857         }
858     case 1: /* Address register direct.  */
859         reg = get_areg(s, reg0);
860         if (what == EA_STORE) {
861             tcg_gen_mov_i32(reg, val);
862             return store_dummy;
863         } else {
864             return gen_extend(s, reg, opsize, what == EA_LOADS);
865         }
866     case 2: /* Indirect register */
867         reg = get_areg(s, reg0);
868         return gen_ldst(s, opsize, reg, val, what, index);
869     case 3: /* Indirect postincrement.  */
870         reg = get_areg(s, reg0);
871         result = gen_ldst(s, opsize, reg, val, what, index);
872         if (what == EA_STORE || !addrp) {
873             TCGv tmp = tcg_temp_new();
874             if (reg0 == 7 && opsize == OS_BYTE &&
875                 m68k_feature(s->env, M68K_FEATURE_M68000)) {
876                 tcg_gen_addi_i32(tmp, reg, 2);
877             } else {
878                 tcg_gen_addi_i32(tmp, reg, opsize_bytes(opsize));
879             }
880             delay_set_areg(s, reg0, tmp, true);
881         }
882         return result;
883     case 4: /* Indirect predecrememnt.  */
884         if (addrp && what == EA_STORE) {
885             tmp = *addrp;
886         } else {
887             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
888             if (IS_NULL_QREG(tmp)) {
889                 return tmp;
890             }
891             if (addrp) {
892                 *addrp = tmp;
893             }
894         }
895         result = gen_ldst(s, opsize, tmp, val, what, index);
896         if (what == EA_STORE || !addrp) {
897             delay_set_areg(s, reg0, tmp, false);
898         }
899         return result;
900     case 5: /* Indirect displacement.  */
901     case 6: /* Indirect index + displacement.  */
902     do_indirect:
903         if (addrp && what == EA_STORE) {
904             tmp = *addrp;
905         } else {
906             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
907             if (IS_NULL_QREG(tmp)) {
908                 return tmp;
909             }
910             if (addrp) {
911                 *addrp = tmp;
912             }
913         }
914         return gen_ldst(s, opsize, tmp, val, what, index);
915     case 7: /* Other */
916         switch (reg0) {
917         case 0: /* Absolute short.  */
918         case 1: /* Absolute long.  */
919         case 2: /* pc displacement  */
920         case 3: /* pc index+displacement.  */
921             goto do_indirect;
922         case 4: /* Immediate.  */
923             /* Sign extend values for consistency.  */
924             switch (opsize) {
925             case OS_BYTE:
926                 if (what == EA_LOADS) {
927                     offset = (int8_t)read_im8(env, s);
928                 } else {
929                     offset = read_im8(env, s);
930                 }
931                 break;
932             case OS_WORD:
933                 if (what == EA_LOADS) {
934                     offset = (int16_t)read_im16(env, s);
935                 } else {
936                     offset = read_im16(env, s);
937                 }
938                 break;
939             case OS_LONG:
940                 offset = read_im32(env, s);
941                 break;
942             default:
943                 g_assert_not_reached();
944             }
945             return mark_to_release(s, tcg_const_i32(offset));
946         default:
947             return NULL_QREG;
948         }
949     }
950     /* Should never happen.  */
951     return NULL_QREG;
952 }
953 
954 static TCGv gen_ea(CPUM68KState *env, DisasContext *s, uint16_t insn,
955                    int opsize, TCGv val, TCGv *addrp, ea_what what, int index)
956 {
957     int mode = extract32(insn, 3, 3);
958     int reg0 = REG(insn, 0);
959     return gen_ea_mode(env, s, mode, reg0, opsize, val, addrp, what, index);
960 }
961 
962 static TCGv_ptr gen_fp_ptr(int freg)
963 {
964     TCGv_ptr fp = tcg_temp_new_ptr();
965     tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fregs[freg]));
966     return fp;
967 }
968 
969 static TCGv_ptr gen_fp_result_ptr(void)
970 {
971     TCGv_ptr fp = tcg_temp_new_ptr();
972     tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fp_result));
973     return fp;
974 }
975 
976 static void gen_fp_move(TCGv_ptr dest, TCGv_ptr src)
977 {
978     TCGv t32;
979     TCGv_i64 t64;
980 
981     t32 = tcg_temp_new();
982     tcg_gen_ld16u_i32(t32, src, offsetof(FPReg, l.upper));
983     tcg_gen_st16_i32(t32, dest, offsetof(FPReg, l.upper));
984     tcg_temp_free(t32);
985 
986     t64 = tcg_temp_new_i64();
987     tcg_gen_ld_i64(t64, src, offsetof(FPReg, l.lower));
988     tcg_gen_st_i64(t64, dest, offsetof(FPReg, l.lower));
989     tcg_temp_free_i64(t64);
990 }
991 
992 static void gen_load_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
993                         int index)
994 {
995     TCGv tmp;
996     TCGv_i64 t64;
997 
998     t64 = tcg_temp_new_i64();
999     tmp = tcg_temp_new();
1000     switch (opsize) {
1001     case OS_BYTE:
1002         tcg_gen_qemu_ld8s(tmp, addr, index);
1003         gen_helper_exts32(cpu_env, fp, tmp);
1004         break;
1005     case OS_WORD:
1006         tcg_gen_qemu_ld16s(tmp, addr, index);
1007         gen_helper_exts32(cpu_env, fp, tmp);
1008         break;
1009     case OS_LONG:
1010         tcg_gen_qemu_ld32u(tmp, addr, index);
1011         gen_helper_exts32(cpu_env, fp, tmp);
1012         break;
1013     case OS_SINGLE:
1014         tcg_gen_qemu_ld32u(tmp, addr, index);
1015         gen_helper_extf32(cpu_env, fp, tmp);
1016         break;
1017     case OS_DOUBLE:
1018         tcg_gen_qemu_ld64(t64, addr, index);
1019         gen_helper_extf64(cpu_env, fp, t64);
1020         break;
1021     case OS_EXTENDED:
1022         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1023             gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1024             break;
1025         }
1026         tcg_gen_qemu_ld32u(tmp, addr, index);
1027         tcg_gen_shri_i32(tmp, tmp, 16);
1028         tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
1029         tcg_gen_addi_i32(tmp, addr, 4);
1030         tcg_gen_qemu_ld64(t64, tmp, index);
1031         tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
1032         break;
1033     case OS_PACKED:
1034         /*
1035          * unimplemented data type on 68040/ColdFire
1036          * FIXME if needed for another FPU
1037          */
1038         gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1039         break;
1040     default:
1041         g_assert_not_reached();
1042     }
1043     tcg_temp_free(tmp);
1044     tcg_temp_free_i64(t64);
1045 }
1046 
1047 static void gen_store_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
1048                          int index)
1049 {
1050     TCGv tmp;
1051     TCGv_i64 t64;
1052 
1053     t64 = tcg_temp_new_i64();
1054     tmp = tcg_temp_new();
1055     switch (opsize) {
1056     case OS_BYTE:
1057         gen_helper_reds32(tmp, cpu_env, fp);
1058         tcg_gen_qemu_st8(tmp, addr, index);
1059         break;
1060     case OS_WORD:
1061         gen_helper_reds32(tmp, cpu_env, fp);
1062         tcg_gen_qemu_st16(tmp, addr, index);
1063         break;
1064     case OS_LONG:
1065         gen_helper_reds32(tmp, cpu_env, fp);
1066         tcg_gen_qemu_st32(tmp, addr, index);
1067         break;
1068     case OS_SINGLE:
1069         gen_helper_redf32(tmp, cpu_env, fp);
1070         tcg_gen_qemu_st32(tmp, addr, index);
1071         break;
1072     case OS_DOUBLE:
1073         gen_helper_redf64(t64, cpu_env, fp);
1074         tcg_gen_qemu_st64(t64, addr, index);
1075         break;
1076     case OS_EXTENDED:
1077         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1078             gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1079             break;
1080         }
1081         tcg_gen_ld16u_i32(tmp, fp, offsetof(FPReg, l.upper));
1082         tcg_gen_shli_i32(tmp, tmp, 16);
1083         tcg_gen_qemu_st32(tmp, addr, index);
1084         tcg_gen_addi_i32(tmp, addr, 4);
1085         tcg_gen_ld_i64(t64, fp, offsetof(FPReg, l.lower));
1086         tcg_gen_qemu_st64(t64, tmp, index);
1087         break;
1088     case OS_PACKED:
1089         /*
1090          * unimplemented data type on 68040/ColdFire
1091          * FIXME if needed for another FPU
1092          */
1093         gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1094         break;
1095     default:
1096         g_assert_not_reached();
1097     }
1098     tcg_temp_free(tmp);
1099     tcg_temp_free_i64(t64);
1100 }
1101 
1102 static void gen_ldst_fp(DisasContext *s, int opsize, TCGv addr,
1103                         TCGv_ptr fp, ea_what what, int index)
1104 {
1105     if (what == EA_STORE) {
1106         gen_store_fp(s, opsize, addr, fp, index);
1107     } else {
1108         gen_load_fp(s, opsize, addr, fp, index);
1109     }
1110 }
1111 
1112 static int gen_ea_mode_fp(CPUM68KState *env, DisasContext *s, int mode,
1113                           int reg0, int opsize, TCGv_ptr fp, ea_what what,
1114                           int index)
1115 {
1116     TCGv reg, addr, tmp;
1117     TCGv_i64 t64;
1118 
1119     switch (mode) {
1120     case 0: /* Data register direct.  */
1121         reg = cpu_dregs[reg0];
1122         if (what == EA_STORE) {
1123             switch (opsize) {
1124             case OS_BYTE:
1125             case OS_WORD:
1126             case OS_LONG:
1127                 gen_helper_reds32(reg, cpu_env, fp);
1128                 break;
1129             case OS_SINGLE:
1130                 gen_helper_redf32(reg, cpu_env, fp);
1131                 break;
1132             default:
1133                 g_assert_not_reached();
1134             }
1135         } else {
1136             tmp = tcg_temp_new();
1137             switch (opsize) {
1138             case OS_BYTE:
1139                 tcg_gen_ext8s_i32(tmp, reg);
1140                 gen_helper_exts32(cpu_env, fp, tmp);
1141                 break;
1142             case OS_WORD:
1143                 tcg_gen_ext16s_i32(tmp, reg);
1144                 gen_helper_exts32(cpu_env, fp, tmp);
1145                 break;
1146             case OS_LONG:
1147                 gen_helper_exts32(cpu_env, fp, reg);
1148                 break;
1149             case OS_SINGLE:
1150                 gen_helper_extf32(cpu_env, fp, reg);
1151                 break;
1152             default:
1153                 g_assert_not_reached();
1154             }
1155             tcg_temp_free(tmp);
1156         }
1157         return 0;
1158     case 1: /* Address register direct.  */
1159         return -1;
1160     case 2: /* Indirect register */
1161         addr = get_areg(s, reg0);
1162         gen_ldst_fp(s, opsize, addr, fp, what, index);
1163         return 0;
1164     case 3: /* Indirect postincrement.  */
1165         addr = cpu_aregs[reg0];
1166         gen_ldst_fp(s, opsize, addr, fp, what, index);
1167         tcg_gen_addi_i32(addr, addr, opsize_bytes(opsize));
1168         return 0;
1169     case 4: /* Indirect predecrememnt.  */
1170         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1171         if (IS_NULL_QREG(addr)) {
1172             return -1;
1173         }
1174         gen_ldst_fp(s, opsize, addr, fp, what, index);
1175         tcg_gen_mov_i32(cpu_aregs[reg0], addr);
1176         return 0;
1177     case 5: /* Indirect displacement.  */
1178     case 6: /* Indirect index + displacement.  */
1179     do_indirect:
1180         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1181         if (IS_NULL_QREG(addr)) {
1182             return -1;
1183         }
1184         gen_ldst_fp(s, opsize, addr, fp, what, index);
1185         return 0;
1186     case 7: /* Other */
1187         switch (reg0) {
1188         case 0: /* Absolute short.  */
1189         case 1: /* Absolute long.  */
1190         case 2: /* pc displacement  */
1191         case 3: /* pc index+displacement.  */
1192             goto do_indirect;
1193         case 4: /* Immediate.  */
1194             if (what == EA_STORE) {
1195                 return -1;
1196             }
1197             switch (opsize) {
1198             case OS_BYTE:
1199                 tmp = tcg_const_i32((int8_t)read_im8(env, s));
1200                 gen_helper_exts32(cpu_env, fp, tmp);
1201                 tcg_temp_free(tmp);
1202                 break;
1203             case OS_WORD:
1204                 tmp = tcg_const_i32((int16_t)read_im16(env, s));
1205                 gen_helper_exts32(cpu_env, fp, tmp);
1206                 tcg_temp_free(tmp);
1207                 break;
1208             case OS_LONG:
1209                 tmp = tcg_const_i32(read_im32(env, s));
1210                 gen_helper_exts32(cpu_env, fp, tmp);
1211                 tcg_temp_free(tmp);
1212                 break;
1213             case OS_SINGLE:
1214                 tmp = tcg_const_i32(read_im32(env, s));
1215                 gen_helper_extf32(cpu_env, fp, tmp);
1216                 tcg_temp_free(tmp);
1217                 break;
1218             case OS_DOUBLE:
1219                 t64 = tcg_const_i64(read_im64(env, s));
1220                 gen_helper_extf64(cpu_env, fp, t64);
1221                 tcg_temp_free_i64(t64);
1222                 break;
1223             case OS_EXTENDED:
1224                 if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1225                     gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1226                     break;
1227                 }
1228                 tmp = tcg_const_i32(read_im32(env, s) >> 16);
1229                 tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
1230                 tcg_temp_free(tmp);
1231                 t64 = tcg_const_i64(read_im64(env, s));
1232                 tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
1233                 tcg_temp_free_i64(t64);
1234                 break;
1235             case OS_PACKED:
1236                 /*
1237                  * unimplemented data type on 68040/ColdFire
1238                  * FIXME if needed for another FPU
1239                  */
1240                 gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1241                 break;
1242             default:
1243                 g_assert_not_reached();
1244             }
1245             return 0;
1246         default:
1247             return -1;
1248         }
1249     }
1250     return -1;
1251 }
1252 
1253 static int gen_ea_fp(CPUM68KState *env, DisasContext *s, uint16_t insn,
1254                        int opsize, TCGv_ptr fp, ea_what what, int index)
1255 {
1256     int mode = extract32(insn, 3, 3);
1257     int reg0 = REG(insn, 0);
1258     return gen_ea_mode_fp(env, s, mode, reg0, opsize, fp, what, index);
1259 }
1260 
1261 typedef struct {
1262     TCGCond tcond;
1263     bool g1;
1264     bool g2;
1265     TCGv v1;
1266     TCGv v2;
1267 } DisasCompare;
1268 
1269 static void gen_cc_cond(DisasCompare *c, DisasContext *s, int cond)
1270 {
1271     TCGv tmp, tmp2;
1272     TCGCond tcond;
1273     CCOp op = s->cc_op;
1274 
1275     /* The CC_OP_CMP form can handle most normal comparisons directly.  */
1276     if (op == CC_OP_CMPB || op == CC_OP_CMPW || op == CC_OP_CMPL) {
1277         c->g1 = c->g2 = 1;
1278         c->v1 = QREG_CC_N;
1279         c->v2 = QREG_CC_V;
1280         switch (cond) {
1281         case 2: /* HI */
1282         case 3: /* LS */
1283             tcond = TCG_COND_LEU;
1284             goto done;
1285         case 4: /* CC */
1286         case 5: /* CS */
1287             tcond = TCG_COND_LTU;
1288             goto done;
1289         case 6: /* NE */
1290         case 7: /* EQ */
1291             tcond = TCG_COND_EQ;
1292             goto done;
1293         case 10: /* PL */
1294         case 11: /* MI */
1295             c->g1 = c->g2 = 0;
1296             c->v2 = tcg_const_i32(0);
1297             c->v1 = tmp = tcg_temp_new();
1298             tcg_gen_sub_i32(tmp, QREG_CC_N, QREG_CC_V);
1299             gen_ext(tmp, tmp, op - CC_OP_CMPB, 1);
1300             /* fallthru */
1301         case 12: /* GE */
1302         case 13: /* LT */
1303             tcond = TCG_COND_LT;
1304             goto done;
1305         case 14: /* GT */
1306         case 15: /* LE */
1307             tcond = TCG_COND_LE;
1308             goto done;
1309         }
1310     }
1311 
1312     c->g1 = 1;
1313     c->g2 = 0;
1314     c->v2 = tcg_const_i32(0);
1315 
1316     switch (cond) {
1317     case 0: /* T */
1318     case 1: /* F */
1319         c->v1 = c->v2;
1320         tcond = TCG_COND_NEVER;
1321         goto done;
1322     case 14: /* GT (!(Z || (N ^ V))) */
1323     case 15: /* LE (Z || (N ^ V)) */
1324         /*
1325          * Logic operations clear V, which simplifies LE to (Z || N),
1326          * and since Z and N are co-located, this becomes a normal
1327          * comparison vs N.
1328          */
1329         if (op == CC_OP_LOGIC) {
1330             c->v1 = QREG_CC_N;
1331             tcond = TCG_COND_LE;
1332             goto done;
1333         }
1334         break;
1335     case 12: /* GE (!(N ^ V)) */
1336     case 13: /* LT (N ^ V) */
1337         /* Logic operations clear V, which simplifies this to N.  */
1338         if (op != CC_OP_LOGIC) {
1339             break;
1340         }
1341         /* fallthru */
1342     case 10: /* PL (!N) */
1343     case 11: /* MI (N) */
1344         /* Several cases represent N normally.  */
1345         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1346             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1347             op == CC_OP_LOGIC) {
1348             c->v1 = QREG_CC_N;
1349             tcond = TCG_COND_LT;
1350             goto done;
1351         }
1352         break;
1353     case 6: /* NE (!Z) */
1354     case 7: /* EQ (Z) */
1355         /* Some cases fold Z into N.  */
1356         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1357             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1358             op == CC_OP_LOGIC) {
1359             tcond = TCG_COND_EQ;
1360             c->v1 = QREG_CC_N;
1361             goto done;
1362         }
1363         break;
1364     case 4: /* CC (!C) */
1365     case 5: /* CS (C) */
1366         /* Some cases fold C into X.  */
1367         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1368             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL) {
1369             tcond = TCG_COND_NE;
1370             c->v1 = QREG_CC_X;
1371             goto done;
1372         }
1373         /* fallthru */
1374     case 8: /* VC (!V) */
1375     case 9: /* VS (V) */
1376         /* Logic operations clear V and C.  */
1377         if (op == CC_OP_LOGIC) {
1378             tcond = TCG_COND_NEVER;
1379             c->v1 = c->v2;
1380             goto done;
1381         }
1382         break;
1383     }
1384 
1385     /* Otherwise, flush flag state to CC_OP_FLAGS.  */
1386     gen_flush_flags(s);
1387 
1388     switch (cond) {
1389     case 0: /* T */
1390     case 1: /* F */
1391     default:
1392         /* Invalid, or handled above.  */
1393         abort();
1394     case 2: /* HI (!C && !Z) -> !(C || Z)*/
1395     case 3: /* LS (C || Z) */
1396         c->v1 = tmp = tcg_temp_new();
1397         c->g1 = 0;
1398         tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1399         tcg_gen_or_i32(tmp, tmp, QREG_CC_C);
1400         tcond = TCG_COND_NE;
1401         break;
1402     case 4: /* CC (!C) */
1403     case 5: /* CS (C) */
1404         c->v1 = QREG_CC_C;
1405         tcond = TCG_COND_NE;
1406         break;
1407     case 6: /* NE (!Z) */
1408     case 7: /* EQ (Z) */
1409         c->v1 = QREG_CC_Z;
1410         tcond = TCG_COND_EQ;
1411         break;
1412     case 8: /* VC (!V) */
1413     case 9: /* VS (V) */
1414         c->v1 = QREG_CC_V;
1415         tcond = TCG_COND_LT;
1416         break;
1417     case 10: /* PL (!N) */
1418     case 11: /* MI (N) */
1419         c->v1 = QREG_CC_N;
1420         tcond = TCG_COND_LT;
1421         break;
1422     case 12: /* GE (!(N ^ V)) */
1423     case 13: /* LT (N ^ V) */
1424         c->v1 = tmp = tcg_temp_new();
1425         c->g1 = 0;
1426         tcg_gen_xor_i32(tmp, QREG_CC_N, QREG_CC_V);
1427         tcond = TCG_COND_LT;
1428         break;
1429     case 14: /* GT (!(Z || (N ^ V))) */
1430     case 15: /* LE (Z || (N ^ V)) */
1431         c->v1 = tmp = tcg_temp_new();
1432         c->g1 = 0;
1433         tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1434         tcg_gen_neg_i32(tmp, tmp);
1435         tmp2 = tcg_temp_new();
1436         tcg_gen_xor_i32(tmp2, QREG_CC_N, QREG_CC_V);
1437         tcg_gen_or_i32(tmp, tmp, tmp2);
1438         tcg_temp_free(tmp2);
1439         tcond = TCG_COND_LT;
1440         break;
1441     }
1442 
1443  done:
1444     if ((cond & 1) == 0) {
1445         tcond = tcg_invert_cond(tcond);
1446     }
1447     c->tcond = tcond;
1448 }
1449 
1450 static void free_cond(DisasCompare *c)
1451 {
1452     if (!c->g1) {
1453         tcg_temp_free(c->v1);
1454     }
1455     if (!c->g2) {
1456         tcg_temp_free(c->v2);
1457     }
1458 }
1459 
1460 static void gen_jmpcc(DisasContext *s, int cond, TCGLabel *l1)
1461 {
1462   DisasCompare c;
1463 
1464   gen_cc_cond(&c, s, cond);
1465   update_cc_op(s);
1466   tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
1467   free_cond(&c);
1468 }
1469 
1470 /* Force a TB lookup after an instruction that changes the CPU state.  */
1471 static void gen_exit_tb(DisasContext *s)
1472 {
1473     update_cc_op(s);
1474     tcg_gen_movi_i32(QREG_PC, s->pc);
1475     s->base.is_jmp = DISAS_EXIT;
1476 }
1477 
1478 #define SRC_EA(env, result, opsize, op_sign, addrp) do {                \
1479         result = gen_ea(env, s, insn, opsize, NULL_QREG, addrp,         \
1480                         op_sign ? EA_LOADS : EA_LOADU, IS_USER(s));     \
1481         if (IS_NULL_QREG(result)) {                                     \
1482             gen_addr_fault(s);                                          \
1483             return;                                                     \
1484         }                                                               \
1485     } while (0)
1486 
1487 #define DEST_EA(env, insn, opsize, val, addrp) do {                     \
1488         TCGv ea_result = gen_ea(env, s, insn, opsize, val, addrp,       \
1489                                 EA_STORE, IS_USER(s));                  \
1490         if (IS_NULL_QREG(ea_result)) {                                  \
1491             gen_addr_fault(s);                                          \
1492             return;                                                     \
1493         }                                                               \
1494     } while (0)
1495 
1496 /* Generate a jump to an immediate address.  */
1497 static void gen_jmp_tb(DisasContext *s, int n, uint32_t dest)
1498 {
1499     if (unlikely(s->ss_active)) {
1500         update_cc_op(s);
1501         tcg_gen_movi_i32(QREG_PC, dest);
1502         gen_raise_exception(EXCP_TRACE);
1503     } else if (translator_use_goto_tb(&s->base, dest)) {
1504         tcg_gen_goto_tb(n);
1505         tcg_gen_movi_i32(QREG_PC, dest);
1506         tcg_gen_exit_tb(s->base.tb, n);
1507     } else {
1508         gen_jmp_im(s, dest);
1509         tcg_gen_exit_tb(NULL, 0);
1510     }
1511     s->base.is_jmp = DISAS_NORETURN;
1512 }
1513 
1514 DISAS_INSN(scc)
1515 {
1516     DisasCompare c;
1517     int cond;
1518     TCGv tmp;
1519 
1520     cond = (insn >> 8) & 0xf;
1521     gen_cc_cond(&c, s, cond);
1522 
1523     tmp = tcg_temp_new();
1524     tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
1525     free_cond(&c);
1526 
1527     tcg_gen_neg_i32(tmp, tmp);
1528     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
1529     tcg_temp_free(tmp);
1530 }
1531 
1532 DISAS_INSN(dbcc)
1533 {
1534     TCGLabel *l1;
1535     TCGv reg;
1536     TCGv tmp;
1537     int16_t offset;
1538     uint32_t base;
1539 
1540     reg = DREG(insn, 0);
1541     base = s->pc;
1542     offset = (int16_t)read_im16(env, s);
1543     l1 = gen_new_label();
1544     gen_jmpcc(s, (insn >> 8) & 0xf, l1);
1545 
1546     tmp = tcg_temp_new();
1547     tcg_gen_ext16s_i32(tmp, reg);
1548     tcg_gen_addi_i32(tmp, tmp, -1);
1549     gen_partset_reg(OS_WORD, reg, tmp);
1550     tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, -1, l1);
1551     gen_jmp_tb(s, 1, base + offset);
1552     gen_set_label(l1);
1553     gen_jmp_tb(s, 0, s->pc);
1554 }
1555 
1556 DISAS_INSN(undef_mac)
1557 {
1558     gen_exception(s, s->base.pc_next, EXCP_LINEA);
1559 }
1560 
1561 DISAS_INSN(undef_fpu)
1562 {
1563     gen_exception(s, s->base.pc_next, EXCP_LINEF);
1564 }
1565 
1566 DISAS_INSN(undef)
1567 {
1568     /*
1569      * ??? This is both instructions that are as yet unimplemented
1570      * for the 680x0 series, as well as those that are implemented
1571      * but actually illegal for CPU32 or pre-68020.
1572      */
1573     qemu_log_mask(LOG_UNIMP, "Illegal instruction: %04x @ %08x\n",
1574                   insn, s->base.pc_next);
1575     gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
1576 }
1577 
1578 DISAS_INSN(mulw)
1579 {
1580     TCGv reg;
1581     TCGv tmp;
1582     TCGv src;
1583     int sign;
1584 
1585     sign = (insn & 0x100) != 0;
1586     reg = DREG(insn, 9);
1587     tmp = tcg_temp_new();
1588     if (sign)
1589         tcg_gen_ext16s_i32(tmp, reg);
1590     else
1591         tcg_gen_ext16u_i32(tmp, reg);
1592     SRC_EA(env, src, OS_WORD, sign, NULL);
1593     tcg_gen_mul_i32(tmp, tmp, src);
1594     tcg_gen_mov_i32(reg, tmp);
1595     gen_logic_cc(s, tmp, OS_LONG);
1596     tcg_temp_free(tmp);
1597 }
1598 
1599 DISAS_INSN(divw)
1600 {
1601     int sign;
1602     TCGv src;
1603     TCGv destr;
1604 
1605     /* divX.w <EA>,Dn    32/16 -> 16r:16q */
1606 
1607     sign = (insn & 0x100) != 0;
1608 
1609     /* dest.l / src.w */
1610 
1611     SRC_EA(env, src, OS_WORD, sign, NULL);
1612     destr = tcg_const_i32(REG(insn, 9));
1613     if (sign) {
1614         gen_helper_divsw(cpu_env, destr, src);
1615     } else {
1616         gen_helper_divuw(cpu_env, destr, src);
1617     }
1618     tcg_temp_free(destr);
1619 
1620     set_cc_op(s, CC_OP_FLAGS);
1621 }
1622 
1623 DISAS_INSN(divl)
1624 {
1625     TCGv num, reg, den;
1626     int sign;
1627     uint16_t ext;
1628 
1629     ext = read_im16(env, s);
1630 
1631     sign = (ext & 0x0800) != 0;
1632 
1633     if (ext & 0x400) {
1634         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
1635             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
1636             return;
1637         }
1638 
1639         /* divX.l <EA>, Dr:Dq    64/32 -> 32r:32q */
1640 
1641         SRC_EA(env, den, OS_LONG, 0, NULL);
1642         num = tcg_const_i32(REG(ext, 12));
1643         reg = tcg_const_i32(REG(ext, 0));
1644         if (sign) {
1645             gen_helper_divsll(cpu_env, num, reg, den);
1646         } else {
1647             gen_helper_divull(cpu_env, num, reg, den);
1648         }
1649         tcg_temp_free(reg);
1650         tcg_temp_free(num);
1651         set_cc_op(s, CC_OP_FLAGS);
1652         return;
1653     }
1654 
1655     /* divX.l <EA>, Dq        32/32 -> 32q     */
1656     /* divXl.l <EA>, Dr:Dq    32/32 -> 32r:32q */
1657 
1658     SRC_EA(env, den, OS_LONG, 0, NULL);
1659     num = tcg_const_i32(REG(ext, 12));
1660     reg = tcg_const_i32(REG(ext, 0));
1661     if (sign) {
1662         gen_helper_divsl(cpu_env, num, reg, den);
1663     } else {
1664         gen_helper_divul(cpu_env, num, reg, den);
1665     }
1666     tcg_temp_free(reg);
1667     tcg_temp_free(num);
1668 
1669     set_cc_op(s, CC_OP_FLAGS);
1670 }
1671 
1672 static void bcd_add(TCGv dest, TCGv src)
1673 {
1674     TCGv t0, t1;
1675 
1676     /*
1677      * dest10 = dest10 + src10 + X
1678      *
1679      *        t1 = src
1680      *        t2 = t1 + 0x066
1681      *        t3 = t2 + dest + X
1682      *        t4 = t2 ^ dest
1683      *        t5 = t3 ^ t4
1684      *        t6 = ~t5 & 0x110
1685      *        t7 = (t6 >> 2) | (t6 >> 3)
1686      *        return t3 - t7
1687      */
1688 
1689     /*
1690      * t1 = (src + 0x066) + dest + X
1691      *    = result with some possible exceeding 0x6
1692      */
1693 
1694     t0 = tcg_const_i32(0x066);
1695     tcg_gen_add_i32(t0, t0, src);
1696 
1697     t1 = tcg_temp_new();
1698     tcg_gen_add_i32(t1, t0, dest);
1699     tcg_gen_add_i32(t1, t1, QREG_CC_X);
1700 
1701     /* we will remove exceeding 0x6 where there is no carry */
1702 
1703     /*
1704      * t0 = (src + 0x0066) ^ dest
1705      *    = t1 without carries
1706      */
1707 
1708     tcg_gen_xor_i32(t0, t0, dest);
1709 
1710     /*
1711      * extract the carries
1712      * t0 = t0 ^ t1
1713      *    = only the carries
1714      */
1715 
1716     tcg_gen_xor_i32(t0, t0, t1);
1717 
1718     /*
1719      * generate 0x1 where there is no carry
1720      * and for each 0x10, generate a 0x6
1721      */
1722 
1723     tcg_gen_shri_i32(t0, t0, 3);
1724     tcg_gen_not_i32(t0, t0);
1725     tcg_gen_andi_i32(t0, t0, 0x22);
1726     tcg_gen_add_i32(dest, t0, t0);
1727     tcg_gen_add_i32(dest, dest, t0);
1728     tcg_temp_free(t0);
1729 
1730     /*
1731      * remove the exceeding 0x6
1732      * for digits that have not generated a carry
1733      */
1734 
1735     tcg_gen_sub_i32(dest, t1, dest);
1736     tcg_temp_free(t1);
1737 }
1738 
1739 static void bcd_sub(TCGv dest, TCGv src)
1740 {
1741     TCGv t0, t1, t2;
1742 
1743     /*
1744      *  dest10 = dest10 - src10 - X
1745      *         = bcd_add(dest + 1 - X, 0x199 - src)
1746      */
1747 
1748     /* t0 = 0x066 + (0x199 - src) */
1749 
1750     t0 = tcg_temp_new();
1751     tcg_gen_subfi_i32(t0, 0x1ff, src);
1752 
1753     /* t1 = t0 + dest + 1 - X*/
1754 
1755     t1 = tcg_temp_new();
1756     tcg_gen_add_i32(t1, t0, dest);
1757     tcg_gen_addi_i32(t1, t1, 1);
1758     tcg_gen_sub_i32(t1, t1, QREG_CC_X);
1759 
1760     /* t2 = t0 ^ dest */
1761 
1762     t2 = tcg_temp_new();
1763     tcg_gen_xor_i32(t2, t0, dest);
1764 
1765     /* t0 = t1 ^ t2 */
1766 
1767     tcg_gen_xor_i32(t0, t1, t2);
1768 
1769     /*
1770      * t2 = ~t0 & 0x110
1771      * t0 = (t2 >> 2) | (t2 >> 3)
1772      *
1773      * to fit on 8bit operands, changed in:
1774      *
1775      * t2 = ~(t0 >> 3) & 0x22
1776      * t0 = t2 + t2
1777      * t0 = t0 + t2
1778      */
1779 
1780     tcg_gen_shri_i32(t2, t0, 3);
1781     tcg_gen_not_i32(t2, t2);
1782     tcg_gen_andi_i32(t2, t2, 0x22);
1783     tcg_gen_add_i32(t0, t2, t2);
1784     tcg_gen_add_i32(t0, t0, t2);
1785     tcg_temp_free(t2);
1786 
1787     /* return t1 - t0 */
1788 
1789     tcg_gen_sub_i32(dest, t1, t0);
1790     tcg_temp_free(t0);
1791     tcg_temp_free(t1);
1792 }
1793 
1794 static void bcd_flags(TCGv val)
1795 {
1796     tcg_gen_andi_i32(QREG_CC_C, val, 0x0ff);
1797     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_C);
1798 
1799     tcg_gen_extract_i32(QREG_CC_C, val, 8, 1);
1800 
1801     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
1802 }
1803 
1804 DISAS_INSN(abcd_reg)
1805 {
1806     TCGv src;
1807     TCGv dest;
1808 
1809     gen_flush_flags(s); /* !Z is sticky */
1810 
1811     src = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
1812     dest = gen_extend(s, DREG(insn, 9), OS_BYTE, 0);
1813     bcd_add(dest, src);
1814     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1815 
1816     bcd_flags(dest);
1817 }
1818 
1819 DISAS_INSN(abcd_mem)
1820 {
1821     TCGv src, dest, addr;
1822 
1823     gen_flush_flags(s); /* !Z is sticky */
1824 
1825     /* Indirect pre-decrement load (mode 4) */
1826 
1827     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1828                       NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1829     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1830                        NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1831 
1832     bcd_add(dest, src);
1833 
1834     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1835                 EA_STORE, IS_USER(s));
1836 
1837     bcd_flags(dest);
1838 }
1839 
1840 DISAS_INSN(sbcd_reg)
1841 {
1842     TCGv src, dest;
1843 
1844     gen_flush_flags(s); /* !Z is sticky */
1845 
1846     src = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
1847     dest = gen_extend(s, DREG(insn, 9), OS_BYTE, 0);
1848 
1849     bcd_sub(dest, src);
1850 
1851     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1852 
1853     bcd_flags(dest);
1854 }
1855 
1856 DISAS_INSN(sbcd_mem)
1857 {
1858     TCGv src, dest, addr;
1859 
1860     gen_flush_flags(s); /* !Z is sticky */
1861 
1862     /* Indirect pre-decrement load (mode 4) */
1863 
1864     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1865                       NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1866     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1867                        NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1868 
1869     bcd_sub(dest, src);
1870 
1871     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1872                 EA_STORE, IS_USER(s));
1873 
1874     bcd_flags(dest);
1875 }
1876 
1877 DISAS_INSN(nbcd)
1878 {
1879     TCGv src, dest;
1880     TCGv addr;
1881 
1882     gen_flush_flags(s); /* !Z is sticky */
1883 
1884     SRC_EA(env, src, OS_BYTE, 0, &addr);
1885 
1886     dest = tcg_const_i32(0);
1887     bcd_sub(dest, src);
1888 
1889     DEST_EA(env, insn, OS_BYTE, dest, &addr);
1890 
1891     bcd_flags(dest);
1892 
1893     tcg_temp_free(dest);
1894 }
1895 
1896 DISAS_INSN(addsub)
1897 {
1898     TCGv reg;
1899     TCGv dest;
1900     TCGv src;
1901     TCGv tmp;
1902     TCGv addr;
1903     int add;
1904     int opsize;
1905 
1906     add = (insn & 0x4000) != 0;
1907     opsize = insn_opsize(insn);
1908     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
1909     dest = tcg_temp_new();
1910     if (insn & 0x100) {
1911         SRC_EA(env, tmp, opsize, 1, &addr);
1912         src = reg;
1913     } else {
1914         tmp = reg;
1915         SRC_EA(env, src, opsize, 1, NULL);
1916     }
1917     if (add) {
1918         tcg_gen_add_i32(dest, tmp, src);
1919         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, src);
1920         set_cc_op(s, CC_OP_ADDB + opsize);
1921     } else {
1922         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, tmp, src);
1923         tcg_gen_sub_i32(dest, tmp, src);
1924         set_cc_op(s, CC_OP_SUBB + opsize);
1925     }
1926     gen_update_cc_add(dest, src, opsize);
1927     if (insn & 0x100) {
1928         DEST_EA(env, insn, opsize, dest, &addr);
1929     } else {
1930         gen_partset_reg(opsize, DREG(insn, 9), dest);
1931     }
1932     tcg_temp_free(dest);
1933 }
1934 
1935 /* Reverse the order of the bits in REG.  */
1936 DISAS_INSN(bitrev)
1937 {
1938     TCGv reg;
1939     reg = DREG(insn, 0);
1940     gen_helper_bitrev(reg, reg);
1941 }
1942 
1943 DISAS_INSN(bitop_reg)
1944 {
1945     int opsize;
1946     int op;
1947     TCGv src1;
1948     TCGv src2;
1949     TCGv tmp;
1950     TCGv addr;
1951     TCGv dest;
1952 
1953     if ((insn & 0x38) != 0)
1954         opsize = OS_BYTE;
1955     else
1956         opsize = OS_LONG;
1957     op = (insn >> 6) & 3;
1958     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
1959 
1960     gen_flush_flags(s);
1961     src2 = tcg_temp_new();
1962     if (opsize == OS_BYTE)
1963         tcg_gen_andi_i32(src2, DREG(insn, 9), 7);
1964     else
1965         tcg_gen_andi_i32(src2, DREG(insn, 9), 31);
1966 
1967     tmp = tcg_const_i32(1);
1968     tcg_gen_shl_i32(tmp, tmp, src2);
1969     tcg_temp_free(src2);
1970 
1971     tcg_gen_and_i32(QREG_CC_Z, src1, tmp);
1972 
1973     dest = tcg_temp_new();
1974     switch (op) {
1975     case 1: /* bchg */
1976         tcg_gen_xor_i32(dest, src1, tmp);
1977         break;
1978     case 2: /* bclr */
1979         tcg_gen_andc_i32(dest, src1, tmp);
1980         break;
1981     case 3: /* bset */
1982         tcg_gen_or_i32(dest, src1, tmp);
1983         break;
1984     default: /* btst */
1985         break;
1986     }
1987     tcg_temp_free(tmp);
1988     if (op) {
1989         DEST_EA(env, insn, opsize, dest, &addr);
1990     }
1991     tcg_temp_free(dest);
1992 }
1993 
1994 DISAS_INSN(sats)
1995 {
1996     TCGv reg;
1997     reg = DREG(insn, 0);
1998     gen_flush_flags(s);
1999     gen_helper_sats(reg, reg, QREG_CC_V);
2000     gen_logic_cc(s, reg, OS_LONG);
2001 }
2002 
2003 static void gen_push(DisasContext *s, TCGv val)
2004 {
2005     TCGv tmp;
2006 
2007     tmp = tcg_temp_new();
2008     tcg_gen_subi_i32(tmp, QREG_SP, 4);
2009     gen_store(s, OS_LONG, tmp, val, IS_USER(s));
2010     tcg_gen_mov_i32(QREG_SP, tmp);
2011     tcg_temp_free(tmp);
2012 }
2013 
2014 static TCGv mreg(int reg)
2015 {
2016     if (reg < 8) {
2017         /* Dx */
2018         return cpu_dregs[reg];
2019     }
2020     /* Ax */
2021     return cpu_aregs[reg & 7];
2022 }
2023 
2024 DISAS_INSN(movem)
2025 {
2026     TCGv addr, incr, tmp, r[16];
2027     int is_load = (insn & 0x0400) != 0;
2028     int opsize = (insn & 0x40) != 0 ? OS_LONG : OS_WORD;
2029     uint16_t mask = read_im16(env, s);
2030     int mode = extract32(insn, 3, 3);
2031     int reg0 = REG(insn, 0);
2032     int i;
2033 
2034     tmp = cpu_aregs[reg0];
2035 
2036     switch (mode) {
2037     case 0: /* data register direct */
2038     case 1: /* addr register direct */
2039     do_addr_fault:
2040         gen_addr_fault(s);
2041         return;
2042 
2043     case 2: /* indirect */
2044         break;
2045 
2046     case 3: /* indirect post-increment */
2047         if (!is_load) {
2048             /* post-increment is not allowed */
2049             goto do_addr_fault;
2050         }
2051         break;
2052 
2053     case 4: /* indirect pre-decrement */
2054         if (is_load) {
2055             /* pre-decrement is not allowed */
2056             goto do_addr_fault;
2057         }
2058         /*
2059          * We want a bare copy of the address reg, without any pre-decrement
2060          * adjustment, as gen_lea would provide.
2061          */
2062         break;
2063 
2064     default:
2065         tmp = gen_lea_mode(env, s, mode, reg0, opsize);
2066         if (IS_NULL_QREG(tmp)) {
2067             goto do_addr_fault;
2068         }
2069         break;
2070     }
2071 
2072     addr = tcg_temp_new();
2073     tcg_gen_mov_i32(addr, tmp);
2074     incr = tcg_const_i32(opsize_bytes(opsize));
2075 
2076     if (is_load) {
2077         /* memory to register */
2078         for (i = 0; i < 16; i++) {
2079             if (mask & (1 << i)) {
2080                 r[i] = gen_load(s, opsize, addr, 1, IS_USER(s));
2081                 tcg_gen_add_i32(addr, addr, incr);
2082             }
2083         }
2084         for (i = 0; i < 16; i++) {
2085             if (mask & (1 << i)) {
2086                 tcg_gen_mov_i32(mreg(i), r[i]);
2087                 tcg_temp_free(r[i]);
2088             }
2089         }
2090         if (mode == 3) {
2091             /* post-increment: movem (An)+,X */
2092             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2093         }
2094     } else {
2095         /* register to memory */
2096         if (mode == 4) {
2097             /* pre-decrement: movem X,-(An) */
2098             for (i = 15; i >= 0; i--) {
2099                 if ((mask << i) & 0x8000) {
2100                     tcg_gen_sub_i32(addr, addr, incr);
2101                     if (reg0 + 8 == i &&
2102                         m68k_feature(s->env, M68K_FEATURE_EXT_FULL)) {
2103                         /*
2104                          * M68020+: if the addressing register is the
2105                          * register moved to memory, the value written
2106                          * is the initial value decremented by the size of
2107                          * the operation, regardless of how many actual
2108                          * stores have been performed until this point.
2109                          * M68000/M68010: the value is the initial value.
2110                          */
2111                         tmp = tcg_temp_new();
2112                         tcg_gen_sub_i32(tmp, cpu_aregs[reg0], incr);
2113                         gen_store(s, opsize, addr, tmp, IS_USER(s));
2114                         tcg_temp_free(tmp);
2115                     } else {
2116                         gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2117                     }
2118                 }
2119             }
2120             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2121         } else {
2122             for (i = 0; i < 16; i++) {
2123                 if (mask & (1 << i)) {
2124                     gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2125                     tcg_gen_add_i32(addr, addr, incr);
2126                 }
2127             }
2128         }
2129     }
2130 
2131     tcg_temp_free(incr);
2132     tcg_temp_free(addr);
2133 }
2134 
2135 DISAS_INSN(movep)
2136 {
2137     uint8_t i;
2138     int16_t displ;
2139     TCGv reg;
2140     TCGv addr;
2141     TCGv abuf;
2142     TCGv dbuf;
2143 
2144     displ = read_im16(env, s);
2145 
2146     addr = AREG(insn, 0);
2147     reg = DREG(insn, 9);
2148 
2149     abuf = tcg_temp_new();
2150     tcg_gen_addi_i32(abuf, addr, displ);
2151     dbuf = tcg_temp_new();
2152 
2153     if (insn & 0x40) {
2154         i = 4;
2155     } else {
2156         i = 2;
2157     }
2158 
2159     if (insn & 0x80) {
2160         for ( ; i > 0 ; i--) {
2161             tcg_gen_shri_i32(dbuf, reg, (i - 1) * 8);
2162             tcg_gen_qemu_st8(dbuf, abuf, IS_USER(s));
2163             if (i > 1) {
2164                 tcg_gen_addi_i32(abuf, abuf, 2);
2165             }
2166         }
2167     } else {
2168         for ( ; i > 0 ; i--) {
2169             tcg_gen_qemu_ld8u(dbuf, abuf, IS_USER(s));
2170             tcg_gen_deposit_i32(reg, reg, dbuf, (i - 1) * 8, 8);
2171             if (i > 1) {
2172                 tcg_gen_addi_i32(abuf, abuf, 2);
2173             }
2174         }
2175     }
2176     tcg_temp_free(abuf);
2177     tcg_temp_free(dbuf);
2178 }
2179 
2180 DISAS_INSN(bitop_im)
2181 {
2182     int opsize;
2183     int op;
2184     TCGv src1;
2185     uint32_t mask;
2186     int bitnum;
2187     TCGv tmp;
2188     TCGv addr;
2189 
2190     if ((insn & 0x38) != 0)
2191         opsize = OS_BYTE;
2192     else
2193         opsize = OS_LONG;
2194     op = (insn >> 6) & 3;
2195 
2196     bitnum = read_im16(env, s);
2197     if (m68k_feature(s->env, M68K_FEATURE_M68000)) {
2198         if (bitnum & 0xfe00) {
2199             disas_undef(env, s, insn);
2200             return;
2201         }
2202     } else {
2203         if (bitnum & 0xff00) {
2204             disas_undef(env, s, insn);
2205             return;
2206         }
2207     }
2208 
2209     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
2210 
2211     gen_flush_flags(s);
2212     if (opsize == OS_BYTE)
2213         bitnum &= 7;
2214     else
2215         bitnum &= 31;
2216     mask = 1 << bitnum;
2217 
2218    tcg_gen_andi_i32(QREG_CC_Z, src1, mask);
2219 
2220     if (op) {
2221         tmp = tcg_temp_new();
2222         switch (op) {
2223         case 1: /* bchg */
2224             tcg_gen_xori_i32(tmp, src1, mask);
2225             break;
2226         case 2: /* bclr */
2227             tcg_gen_andi_i32(tmp, src1, ~mask);
2228             break;
2229         case 3: /* bset */
2230             tcg_gen_ori_i32(tmp, src1, mask);
2231             break;
2232         default: /* btst */
2233             break;
2234         }
2235         DEST_EA(env, insn, opsize, tmp, &addr);
2236         tcg_temp_free(tmp);
2237     }
2238 }
2239 
2240 static TCGv gen_get_ccr(DisasContext *s)
2241 {
2242     TCGv dest;
2243 
2244     update_cc_op(s);
2245     dest = tcg_temp_new();
2246     gen_helper_get_ccr(dest, cpu_env);
2247     return dest;
2248 }
2249 
2250 static TCGv gen_get_sr(DisasContext *s)
2251 {
2252     TCGv ccr;
2253     TCGv sr;
2254 
2255     ccr = gen_get_ccr(s);
2256     sr = tcg_temp_new();
2257     tcg_gen_andi_i32(sr, QREG_SR, 0xffe0);
2258     tcg_gen_or_i32(sr, sr, ccr);
2259     tcg_temp_free(ccr);
2260     return sr;
2261 }
2262 
2263 static void gen_set_sr_im(DisasContext *s, uint16_t val, int ccr_only)
2264 {
2265     if (ccr_only) {
2266         tcg_gen_movi_i32(QREG_CC_C, val & CCF_C ? 1 : 0);
2267         tcg_gen_movi_i32(QREG_CC_V, val & CCF_V ? -1 : 0);
2268         tcg_gen_movi_i32(QREG_CC_Z, val & CCF_Z ? 0 : 1);
2269         tcg_gen_movi_i32(QREG_CC_N, val & CCF_N ? -1 : 0);
2270         tcg_gen_movi_i32(QREG_CC_X, val & CCF_X ? 1 : 0);
2271     } else {
2272         TCGv sr = tcg_const_i32(val);
2273         gen_helper_set_sr(cpu_env, sr);
2274         tcg_temp_free(sr);
2275     }
2276     set_cc_op(s, CC_OP_FLAGS);
2277 }
2278 
2279 static void gen_set_sr(DisasContext *s, TCGv val, int ccr_only)
2280 {
2281     if (ccr_only) {
2282         gen_helper_set_ccr(cpu_env, val);
2283     } else {
2284         gen_helper_set_sr(cpu_env, val);
2285     }
2286     set_cc_op(s, CC_OP_FLAGS);
2287 }
2288 
2289 static void gen_move_to_sr(CPUM68KState *env, DisasContext *s, uint16_t insn,
2290                            bool ccr_only)
2291 {
2292     if ((insn & 0x3f) == 0x3c) {
2293         uint16_t val;
2294         val = read_im16(env, s);
2295         gen_set_sr_im(s, val, ccr_only);
2296     } else {
2297         TCGv src;
2298         SRC_EA(env, src, OS_WORD, 0, NULL);
2299         gen_set_sr(s, src, ccr_only);
2300     }
2301 }
2302 
2303 DISAS_INSN(arith_im)
2304 {
2305     int op;
2306     TCGv im;
2307     TCGv src1;
2308     TCGv dest;
2309     TCGv addr;
2310     int opsize;
2311     bool with_SR = ((insn & 0x3f) == 0x3c);
2312 
2313     op = (insn >> 9) & 7;
2314     opsize = insn_opsize(insn);
2315     switch (opsize) {
2316     case OS_BYTE:
2317         im = tcg_const_i32((int8_t)read_im8(env, s));
2318         break;
2319     case OS_WORD:
2320         im = tcg_const_i32((int16_t)read_im16(env, s));
2321         break;
2322     case OS_LONG:
2323         im = tcg_const_i32(read_im32(env, s));
2324         break;
2325     default:
2326         g_assert_not_reached();
2327     }
2328 
2329     if (with_SR) {
2330         /* SR/CCR can only be used with andi/eori/ori */
2331         if (op == 2 || op == 3 || op == 6) {
2332             disas_undef(env, s, insn);
2333             return;
2334         }
2335         switch (opsize) {
2336         case OS_BYTE:
2337             src1 = gen_get_ccr(s);
2338             break;
2339         case OS_WORD:
2340             if (IS_USER(s)) {
2341                 gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
2342                 return;
2343             }
2344             src1 = gen_get_sr(s);
2345             break;
2346         default:
2347             /* OS_LONG; others already g_assert_not_reached.  */
2348             disas_undef(env, s, insn);
2349             return;
2350         }
2351     } else {
2352         SRC_EA(env, src1, opsize, 1, (op == 6) ? NULL : &addr);
2353     }
2354     dest = tcg_temp_new();
2355     switch (op) {
2356     case 0: /* ori */
2357         tcg_gen_or_i32(dest, src1, im);
2358         if (with_SR) {
2359             gen_set_sr(s, dest, opsize == OS_BYTE);
2360         } else {
2361             DEST_EA(env, insn, opsize, dest, &addr);
2362             gen_logic_cc(s, dest, opsize);
2363         }
2364         break;
2365     case 1: /* andi */
2366         tcg_gen_and_i32(dest, src1, im);
2367         if (with_SR) {
2368             gen_set_sr(s, dest, opsize == OS_BYTE);
2369         } else {
2370             DEST_EA(env, insn, opsize, dest, &addr);
2371             gen_logic_cc(s, dest, opsize);
2372         }
2373         break;
2374     case 2: /* subi */
2375         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, src1, im);
2376         tcg_gen_sub_i32(dest, src1, im);
2377         gen_update_cc_add(dest, im, opsize);
2378         set_cc_op(s, CC_OP_SUBB + opsize);
2379         DEST_EA(env, insn, opsize, dest, &addr);
2380         break;
2381     case 3: /* addi */
2382         tcg_gen_add_i32(dest, src1, im);
2383         gen_update_cc_add(dest, im, opsize);
2384         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, im);
2385         set_cc_op(s, CC_OP_ADDB + opsize);
2386         DEST_EA(env, insn, opsize, dest, &addr);
2387         break;
2388     case 5: /* eori */
2389         tcg_gen_xor_i32(dest, src1, im);
2390         if (with_SR) {
2391             gen_set_sr(s, dest, opsize == OS_BYTE);
2392         } else {
2393             DEST_EA(env, insn, opsize, dest, &addr);
2394             gen_logic_cc(s, dest, opsize);
2395         }
2396         break;
2397     case 6: /* cmpi */
2398         gen_update_cc_cmp(s, src1, im, opsize);
2399         break;
2400     default:
2401         abort();
2402     }
2403     tcg_temp_free(im);
2404     tcg_temp_free(dest);
2405 }
2406 
2407 DISAS_INSN(cas)
2408 {
2409     int opsize;
2410     TCGv addr;
2411     uint16_t ext;
2412     TCGv load;
2413     TCGv cmp;
2414     MemOp opc;
2415 
2416     switch ((insn >> 9) & 3) {
2417     case 1:
2418         opsize = OS_BYTE;
2419         opc = MO_SB;
2420         break;
2421     case 2:
2422         opsize = OS_WORD;
2423         opc = MO_TESW;
2424         break;
2425     case 3:
2426         opsize = OS_LONG;
2427         opc = MO_TESL;
2428         break;
2429     default:
2430         g_assert_not_reached();
2431     }
2432 
2433     ext = read_im16(env, s);
2434 
2435     /* cas Dc,Du,<EA> */
2436 
2437     addr = gen_lea(env, s, insn, opsize);
2438     if (IS_NULL_QREG(addr)) {
2439         gen_addr_fault(s);
2440         return;
2441     }
2442 
2443     cmp = gen_extend(s, DREG(ext, 0), opsize, 1);
2444 
2445     /*
2446      * if  <EA> == Dc then
2447      *     <EA> = Du
2448      *     Dc = <EA> (because <EA> == Dc)
2449      * else
2450      *     Dc = <EA>
2451      */
2452 
2453     load = tcg_temp_new();
2454     tcg_gen_atomic_cmpxchg_i32(load, addr, cmp, DREG(ext, 6),
2455                                IS_USER(s), opc);
2456     /* update flags before setting cmp to load */
2457     gen_update_cc_cmp(s, load, cmp, opsize);
2458     gen_partset_reg(opsize, DREG(ext, 0), load);
2459 
2460     tcg_temp_free(load);
2461 
2462     switch (extract32(insn, 3, 3)) {
2463     case 3: /* Indirect postincrement.  */
2464         tcg_gen_addi_i32(AREG(insn, 0), addr, opsize_bytes(opsize));
2465         break;
2466     case 4: /* Indirect predecrememnt.  */
2467         tcg_gen_mov_i32(AREG(insn, 0), addr);
2468         break;
2469     }
2470 }
2471 
2472 DISAS_INSN(cas2w)
2473 {
2474     uint16_t ext1, ext2;
2475     TCGv addr1, addr2;
2476     TCGv regs;
2477 
2478     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2479 
2480     ext1 = read_im16(env, s);
2481 
2482     if (ext1 & 0x8000) {
2483         /* Address Register */
2484         addr1 = AREG(ext1, 12);
2485     } else {
2486         /* Data Register */
2487         addr1 = DREG(ext1, 12);
2488     }
2489 
2490     ext2 = read_im16(env, s);
2491     if (ext2 & 0x8000) {
2492         /* Address Register */
2493         addr2 = AREG(ext2, 12);
2494     } else {
2495         /* Data Register */
2496         addr2 = DREG(ext2, 12);
2497     }
2498 
2499     /*
2500      * if (R1) == Dc1 && (R2) == Dc2 then
2501      *     (R1) = Du1
2502      *     (R2) = Du2
2503      * else
2504      *     Dc1 = (R1)
2505      *     Dc2 = (R2)
2506      */
2507 
2508     regs = tcg_const_i32(REG(ext2, 6) |
2509                          (REG(ext1, 6) << 3) |
2510                          (REG(ext2, 0) << 6) |
2511                          (REG(ext1, 0) << 9));
2512     if (tb_cflags(s->base.tb) & CF_PARALLEL) {
2513         gen_helper_exit_atomic(cpu_env);
2514     } else {
2515         gen_helper_cas2w(cpu_env, regs, addr1, addr2);
2516     }
2517     tcg_temp_free(regs);
2518 
2519     /* Note that cas2w also assigned to env->cc_op.  */
2520     s->cc_op = CC_OP_CMPW;
2521     s->cc_op_synced = 1;
2522 }
2523 
2524 DISAS_INSN(cas2l)
2525 {
2526     uint16_t ext1, ext2;
2527     TCGv addr1, addr2, regs;
2528 
2529     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2530 
2531     ext1 = read_im16(env, s);
2532 
2533     if (ext1 & 0x8000) {
2534         /* Address Register */
2535         addr1 = AREG(ext1, 12);
2536     } else {
2537         /* Data Register */
2538         addr1 = DREG(ext1, 12);
2539     }
2540 
2541     ext2 = read_im16(env, s);
2542     if (ext2 & 0x8000) {
2543         /* Address Register */
2544         addr2 = AREG(ext2, 12);
2545     } else {
2546         /* Data Register */
2547         addr2 = DREG(ext2, 12);
2548     }
2549 
2550     /*
2551      * if (R1) == Dc1 && (R2) == Dc2 then
2552      *     (R1) = Du1
2553      *     (R2) = Du2
2554      * else
2555      *     Dc1 = (R1)
2556      *     Dc2 = (R2)
2557      */
2558 
2559     regs = tcg_const_i32(REG(ext2, 6) |
2560                          (REG(ext1, 6) << 3) |
2561                          (REG(ext2, 0) << 6) |
2562                          (REG(ext1, 0) << 9));
2563     if (tb_cflags(s->base.tb) & CF_PARALLEL) {
2564         gen_helper_cas2l_parallel(cpu_env, regs, addr1, addr2);
2565     } else {
2566         gen_helper_cas2l(cpu_env, regs, addr1, addr2);
2567     }
2568     tcg_temp_free(regs);
2569 
2570     /* Note that cas2l also assigned to env->cc_op.  */
2571     s->cc_op = CC_OP_CMPL;
2572     s->cc_op_synced = 1;
2573 }
2574 
2575 DISAS_INSN(byterev)
2576 {
2577     TCGv reg;
2578 
2579     reg = DREG(insn, 0);
2580     tcg_gen_bswap32_i32(reg, reg);
2581 }
2582 
2583 DISAS_INSN(move)
2584 {
2585     TCGv src;
2586     TCGv dest;
2587     int op;
2588     int opsize;
2589 
2590     switch (insn >> 12) {
2591     case 1: /* move.b */
2592         opsize = OS_BYTE;
2593         break;
2594     case 2: /* move.l */
2595         opsize = OS_LONG;
2596         break;
2597     case 3: /* move.w */
2598         opsize = OS_WORD;
2599         break;
2600     default:
2601         abort();
2602     }
2603     SRC_EA(env, src, opsize, 1, NULL);
2604     op = (insn >> 6) & 7;
2605     if (op == 1) {
2606         /* movea */
2607         /* The value will already have been sign extended.  */
2608         dest = AREG(insn, 9);
2609         tcg_gen_mov_i32(dest, src);
2610     } else {
2611         /* normal move */
2612         uint16_t dest_ea;
2613         dest_ea = ((insn >> 9) & 7) | (op << 3);
2614         DEST_EA(env, dest_ea, opsize, src, NULL);
2615         /* This will be correct because loads sign extend.  */
2616         gen_logic_cc(s, src, opsize);
2617     }
2618 }
2619 
2620 DISAS_INSN(negx)
2621 {
2622     TCGv z;
2623     TCGv src;
2624     TCGv addr;
2625     int opsize;
2626 
2627     opsize = insn_opsize(insn);
2628     SRC_EA(env, src, opsize, 1, &addr);
2629 
2630     gen_flush_flags(s); /* compute old Z */
2631 
2632     /*
2633      * Perform subtract with borrow.
2634      * (X, N) =  -(src + X);
2635      */
2636 
2637     z = tcg_const_i32(0);
2638     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, z, QREG_CC_X, z);
2639     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, z, z, QREG_CC_N, QREG_CC_X);
2640     tcg_temp_free(z);
2641     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
2642 
2643     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
2644 
2645     /*
2646      * Compute signed-overflow for negation.  The normal formula for
2647      * subtraction is (res ^ src) & (src ^ dest), but with dest==0
2648      * this simplifies to res & src.
2649      */
2650 
2651     tcg_gen_and_i32(QREG_CC_V, QREG_CC_N, src);
2652 
2653     /* Copy the rest of the results into place.  */
2654     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
2655     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
2656 
2657     set_cc_op(s, CC_OP_FLAGS);
2658 
2659     /* result is in QREG_CC_N */
2660 
2661     DEST_EA(env, insn, opsize, QREG_CC_N, &addr);
2662 }
2663 
2664 DISAS_INSN(lea)
2665 {
2666     TCGv reg;
2667     TCGv tmp;
2668 
2669     reg = AREG(insn, 9);
2670     tmp = gen_lea(env, s, insn, OS_LONG);
2671     if (IS_NULL_QREG(tmp)) {
2672         gen_addr_fault(s);
2673         return;
2674     }
2675     tcg_gen_mov_i32(reg, tmp);
2676 }
2677 
2678 DISAS_INSN(clr)
2679 {
2680     int opsize;
2681     TCGv zero;
2682 
2683     zero = tcg_const_i32(0);
2684 
2685     opsize = insn_opsize(insn);
2686     DEST_EA(env, insn, opsize, zero, NULL);
2687     gen_logic_cc(s, zero, opsize);
2688     tcg_temp_free(zero);
2689 }
2690 
2691 DISAS_INSN(move_from_ccr)
2692 {
2693     TCGv ccr;
2694 
2695     ccr = gen_get_ccr(s);
2696     DEST_EA(env, insn, OS_WORD, ccr, NULL);
2697 }
2698 
2699 DISAS_INSN(neg)
2700 {
2701     TCGv src1;
2702     TCGv dest;
2703     TCGv addr;
2704     int opsize;
2705 
2706     opsize = insn_opsize(insn);
2707     SRC_EA(env, src1, opsize, 1, &addr);
2708     dest = tcg_temp_new();
2709     tcg_gen_neg_i32(dest, src1);
2710     set_cc_op(s, CC_OP_SUBB + opsize);
2711     gen_update_cc_add(dest, src1, opsize);
2712     tcg_gen_setcondi_i32(TCG_COND_NE, QREG_CC_X, dest, 0);
2713     DEST_EA(env, insn, opsize, dest, &addr);
2714     tcg_temp_free(dest);
2715 }
2716 
2717 DISAS_INSN(move_to_ccr)
2718 {
2719     gen_move_to_sr(env, s, insn, true);
2720 }
2721 
2722 DISAS_INSN(not)
2723 {
2724     TCGv src1;
2725     TCGv dest;
2726     TCGv addr;
2727     int opsize;
2728 
2729     opsize = insn_opsize(insn);
2730     SRC_EA(env, src1, opsize, 1, &addr);
2731     dest = tcg_temp_new();
2732     tcg_gen_not_i32(dest, src1);
2733     DEST_EA(env, insn, opsize, dest, &addr);
2734     gen_logic_cc(s, dest, opsize);
2735 }
2736 
2737 DISAS_INSN(swap)
2738 {
2739     TCGv src1;
2740     TCGv src2;
2741     TCGv reg;
2742 
2743     src1 = tcg_temp_new();
2744     src2 = tcg_temp_new();
2745     reg = DREG(insn, 0);
2746     tcg_gen_shli_i32(src1, reg, 16);
2747     tcg_gen_shri_i32(src2, reg, 16);
2748     tcg_gen_or_i32(reg, src1, src2);
2749     tcg_temp_free(src2);
2750     tcg_temp_free(src1);
2751     gen_logic_cc(s, reg, OS_LONG);
2752 }
2753 
2754 DISAS_INSN(bkpt)
2755 {
2756     gen_exception(s, s->base.pc_next, EXCP_DEBUG);
2757 }
2758 
2759 DISAS_INSN(pea)
2760 {
2761     TCGv tmp;
2762 
2763     tmp = gen_lea(env, s, insn, OS_LONG);
2764     if (IS_NULL_QREG(tmp)) {
2765         gen_addr_fault(s);
2766         return;
2767     }
2768     gen_push(s, tmp);
2769 }
2770 
2771 DISAS_INSN(ext)
2772 {
2773     int op;
2774     TCGv reg;
2775     TCGv tmp;
2776 
2777     reg = DREG(insn, 0);
2778     op = (insn >> 6) & 7;
2779     tmp = tcg_temp_new();
2780     if (op == 3)
2781         tcg_gen_ext16s_i32(tmp, reg);
2782     else
2783         tcg_gen_ext8s_i32(tmp, reg);
2784     if (op == 2)
2785         gen_partset_reg(OS_WORD, reg, tmp);
2786     else
2787         tcg_gen_mov_i32(reg, tmp);
2788     gen_logic_cc(s, tmp, OS_LONG);
2789     tcg_temp_free(tmp);
2790 }
2791 
2792 DISAS_INSN(tst)
2793 {
2794     int opsize;
2795     TCGv tmp;
2796 
2797     opsize = insn_opsize(insn);
2798     SRC_EA(env, tmp, opsize, 1, NULL);
2799     gen_logic_cc(s, tmp, opsize);
2800 }
2801 
2802 DISAS_INSN(pulse)
2803 {
2804   /* Implemented as a NOP.  */
2805 }
2806 
2807 DISAS_INSN(illegal)
2808 {
2809     gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2810 }
2811 
2812 /* ??? This should be atomic.  */
2813 DISAS_INSN(tas)
2814 {
2815     TCGv dest;
2816     TCGv src1;
2817     TCGv addr;
2818 
2819     dest = tcg_temp_new();
2820     SRC_EA(env, src1, OS_BYTE, 1, &addr);
2821     gen_logic_cc(s, src1, OS_BYTE);
2822     tcg_gen_ori_i32(dest, src1, 0x80);
2823     DEST_EA(env, insn, OS_BYTE, dest, &addr);
2824     tcg_temp_free(dest);
2825 }
2826 
2827 DISAS_INSN(mull)
2828 {
2829     uint16_t ext;
2830     TCGv src1;
2831     int sign;
2832 
2833     ext = read_im16(env, s);
2834 
2835     sign = ext & 0x800;
2836 
2837     if (ext & 0x400) {
2838         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
2839             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2840             return;
2841         }
2842 
2843         SRC_EA(env, src1, OS_LONG, 0, NULL);
2844 
2845         if (sign) {
2846             tcg_gen_muls2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2847         } else {
2848             tcg_gen_mulu2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2849         }
2850         /* if Dl == Dh, 68040 returns low word */
2851         tcg_gen_mov_i32(DREG(ext, 0), QREG_CC_N);
2852         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_Z);
2853         tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N);
2854 
2855         tcg_gen_movi_i32(QREG_CC_V, 0);
2856         tcg_gen_movi_i32(QREG_CC_C, 0);
2857 
2858         set_cc_op(s, CC_OP_FLAGS);
2859         return;
2860     }
2861     SRC_EA(env, src1, OS_LONG, 0, NULL);
2862     if (m68k_feature(s->env, M68K_FEATURE_M68000)) {
2863         tcg_gen_movi_i32(QREG_CC_C, 0);
2864         if (sign) {
2865             tcg_gen_muls2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2866             /* QREG_CC_V is -(QREG_CC_V != (QREG_CC_N >> 31)) */
2867             tcg_gen_sari_i32(QREG_CC_Z, QREG_CC_N, 31);
2868             tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_Z);
2869         } else {
2870             tcg_gen_mulu2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2871             /* QREG_CC_V is -(QREG_CC_V != 0), use QREG_CC_C as 0 */
2872             tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_C);
2873         }
2874         tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
2875         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_N);
2876 
2877         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
2878 
2879         set_cc_op(s, CC_OP_FLAGS);
2880     } else {
2881         /*
2882          * The upper 32 bits of the product are discarded, so
2883          * muls.l and mulu.l are functionally equivalent.
2884          */
2885         tcg_gen_mul_i32(DREG(ext, 12), src1, DREG(ext, 12));
2886         gen_logic_cc(s, DREG(ext, 12), OS_LONG);
2887     }
2888 }
2889 
2890 static void gen_link(DisasContext *s, uint16_t insn, int32_t offset)
2891 {
2892     TCGv reg;
2893     TCGv tmp;
2894 
2895     reg = AREG(insn, 0);
2896     tmp = tcg_temp_new();
2897     tcg_gen_subi_i32(tmp, QREG_SP, 4);
2898     gen_store(s, OS_LONG, tmp, reg, IS_USER(s));
2899     if ((insn & 7) != 7) {
2900         tcg_gen_mov_i32(reg, tmp);
2901     }
2902     tcg_gen_addi_i32(QREG_SP, tmp, offset);
2903     tcg_temp_free(tmp);
2904 }
2905 
2906 DISAS_INSN(link)
2907 {
2908     int16_t offset;
2909 
2910     offset = read_im16(env, s);
2911     gen_link(s, insn, offset);
2912 }
2913 
2914 DISAS_INSN(linkl)
2915 {
2916     int32_t offset;
2917 
2918     offset = read_im32(env, s);
2919     gen_link(s, insn, offset);
2920 }
2921 
2922 DISAS_INSN(unlk)
2923 {
2924     TCGv src;
2925     TCGv reg;
2926     TCGv tmp;
2927 
2928     src = tcg_temp_new();
2929     reg = AREG(insn, 0);
2930     tcg_gen_mov_i32(src, reg);
2931     tmp = gen_load(s, OS_LONG, src, 0, IS_USER(s));
2932     tcg_gen_mov_i32(reg, tmp);
2933     tcg_gen_addi_i32(QREG_SP, src, 4);
2934     tcg_temp_free(src);
2935     tcg_temp_free(tmp);
2936 }
2937 
2938 #if defined(CONFIG_SOFTMMU)
2939 DISAS_INSN(reset)
2940 {
2941     if (IS_USER(s)) {
2942         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
2943         return;
2944     }
2945 
2946     gen_helper_reset(cpu_env);
2947 }
2948 #endif
2949 
2950 DISAS_INSN(nop)
2951 {
2952 }
2953 
2954 DISAS_INSN(rtd)
2955 {
2956     TCGv tmp;
2957     int16_t offset = read_im16(env, s);
2958 
2959     tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
2960     tcg_gen_addi_i32(QREG_SP, QREG_SP, offset + 4);
2961     gen_jmp(s, tmp);
2962 }
2963 
2964 DISAS_INSN(rtr)
2965 {
2966     TCGv tmp;
2967     TCGv ccr;
2968     TCGv sp;
2969 
2970     sp = tcg_temp_new();
2971     ccr = gen_load(s, OS_WORD, QREG_SP, 0, IS_USER(s));
2972     tcg_gen_addi_i32(sp, QREG_SP, 2);
2973     tmp = gen_load(s, OS_LONG, sp, 0, IS_USER(s));
2974     tcg_gen_addi_i32(QREG_SP, sp, 4);
2975     tcg_temp_free(sp);
2976 
2977     gen_set_sr(s, ccr, true);
2978     tcg_temp_free(ccr);
2979 
2980     gen_jmp(s, tmp);
2981 }
2982 
2983 DISAS_INSN(rts)
2984 {
2985     TCGv tmp;
2986 
2987     tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
2988     tcg_gen_addi_i32(QREG_SP, QREG_SP, 4);
2989     gen_jmp(s, tmp);
2990 }
2991 
2992 DISAS_INSN(jump)
2993 {
2994     TCGv tmp;
2995 
2996     /*
2997      * Load the target address first to ensure correct exception
2998      * behavior.
2999      */
3000     tmp = gen_lea(env, s, insn, OS_LONG);
3001     if (IS_NULL_QREG(tmp)) {
3002         gen_addr_fault(s);
3003         return;
3004     }
3005     if ((insn & 0x40) == 0) {
3006         /* jsr */
3007         gen_push(s, tcg_const_i32(s->pc));
3008     }
3009     gen_jmp(s, tmp);
3010 }
3011 
3012 DISAS_INSN(addsubq)
3013 {
3014     TCGv src;
3015     TCGv dest;
3016     TCGv val;
3017     int imm;
3018     TCGv addr;
3019     int opsize;
3020 
3021     if ((insn & 070) == 010) {
3022         /* Operation on address register is always long.  */
3023         opsize = OS_LONG;
3024     } else {
3025         opsize = insn_opsize(insn);
3026     }
3027     SRC_EA(env, src, opsize, 1, &addr);
3028     imm = (insn >> 9) & 7;
3029     if (imm == 0) {
3030         imm = 8;
3031     }
3032     val = tcg_const_i32(imm);
3033     dest = tcg_temp_new();
3034     tcg_gen_mov_i32(dest, src);
3035     if ((insn & 0x38) == 0x08) {
3036         /*
3037          * Don't update condition codes if the destination is an
3038          * address register.
3039          */
3040         if (insn & 0x0100) {
3041             tcg_gen_sub_i32(dest, dest, val);
3042         } else {
3043             tcg_gen_add_i32(dest, dest, val);
3044         }
3045     } else {
3046         if (insn & 0x0100) {
3047             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
3048             tcg_gen_sub_i32(dest, dest, val);
3049             set_cc_op(s, CC_OP_SUBB + opsize);
3050         } else {
3051             tcg_gen_add_i32(dest, dest, val);
3052             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
3053             set_cc_op(s, CC_OP_ADDB + opsize);
3054         }
3055         gen_update_cc_add(dest, val, opsize);
3056     }
3057     tcg_temp_free(val);
3058     DEST_EA(env, insn, opsize, dest, &addr);
3059     tcg_temp_free(dest);
3060 }
3061 
3062 DISAS_INSN(tpf)
3063 {
3064     switch (insn & 7) {
3065     case 2: /* One extension word.  */
3066         s->pc += 2;
3067         break;
3068     case 3: /* Two extension words.  */
3069         s->pc += 4;
3070         break;
3071     case 4: /* No extension words.  */
3072         break;
3073     default:
3074         disas_undef(env, s, insn);
3075     }
3076 }
3077 
3078 DISAS_INSN(branch)
3079 {
3080     int32_t offset;
3081     uint32_t base;
3082     int op;
3083 
3084     base = s->pc;
3085     op = (insn >> 8) & 0xf;
3086     offset = (int8_t)insn;
3087     if (offset == 0) {
3088         offset = (int16_t)read_im16(env, s);
3089     } else if (offset == -1) {
3090         offset = read_im32(env, s);
3091     }
3092     if (op == 1) {
3093         /* bsr */
3094         gen_push(s, tcg_const_i32(s->pc));
3095     }
3096     if (op > 1) {
3097         /* Bcc */
3098         TCGLabel *l1 = gen_new_label();
3099         gen_jmpcc(s, ((insn >> 8) & 0xf) ^ 1, l1);
3100         gen_jmp_tb(s, 1, base + offset);
3101         gen_set_label(l1);
3102         gen_jmp_tb(s, 0, s->pc);
3103     } else {
3104         /* Unconditional branch.  */
3105         update_cc_op(s);
3106         gen_jmp_tb(s, 0, base + offset);
3107     }
3108 }
3109 
3110 DISAS_INSN(moveq)
3111 {
3112     tcg_gen_movi_i32(DREG(insn, 9), (int8_t)insn);
3113     gen_logic_cc(s, DREG(insn, 9), OS_LONG);
3114 }
3115 
3116 DISAS_INSN(mvzs)
3117 {
3118     int opsize;
3119     TCGv src;
3120     TCGv reg;
3121 
3122     if (insn & 0x40)
3123         opsize = OS_WORD;
3124     else
3125         opsize = OS_BYTE;
3126     SRC_EA(env, src, opsize, (insn & 0x80) == 0, NULL);
3127     reg = DREG(insn, 9);
3128     tcg_gen_mov_i32(reg, src);
3129     gen_logic_cc(s, src, opsize);
3130 }
3131 
3132 DISAS_INSN(or)
3133 {
3134     TCGv reg;
3135     TCGv dest;
3136     TCGv src;
3137     TCGv addr;
3138     int opsize;
3139 
3140     opsize = insn_opsize(insn);
3141     reg = gen_extend(s, DREG(insn, 9), opsize, 0);
3142     dest = tcg_temp_new();
3143     if (insn & 0x100) {
3144         SRC_EA(env, src, opsize, 0, &addr);
3145         tcg_gen_or_i32(dest, src, reg);
3146         DEST_EA(env, insn, opsize, dest, &addr);
3147     } else {
3148         SRC_EA(env, src, opsize, 0, NULL);
3149         tcg_gen_or_i32(dest, src, reg);
3150         gen_partset_reg(opsize, DREG(insn, 9), dest);
3151     }
3152     gen_logic_cc(s, dest, opsize);
3153     tcg_temp_free(dest);
3154 }
3155 
3156 DISAS_INSN(suba)
3157 {
3158     TCGv src;
3159     TCGv reg;
3160 
3161     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3162     reg = AREG(insn, 9);
3163     tcg_gen_sub_i32(reg, reg, src);
3164 }
3165 
3166 static inline void gen_subx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3167 {
3168     TCGv tmp;
3169 
3170     gen_flush_flags(s); /* compute old Z */
3171 
3172     /*
3173      * Perform subtract with borrow.
3174      * (X, N) = dest - (src + X);
3175      */
3176 
3177     tmp = tcg_const_i32(0);
3178     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, tmp, QREG_CC_X, tmp);
3179     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, dest, tmp, QREG_CC_N, QREG_CC_X);
3180     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3181     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
3182 
3183     /* Compute signed-overflow for subtract.  */
3184 
3185     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, dest);
3186     tcg_gen_xor_i32(tmp, dest, src);
3187     tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, tmp);
3188     tcg_temp_free(tmp);
3189 
3190     /* Copy the rest of the results into place.  */
3191     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3192     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3193 
3194     set_cc_op(s, CC_OP_FLAGS);
3195 
3196     /* result is in QREG_CC_N */
3197 }
3198 
3199 DISAS_INSN(subx_reg)
3200 {
3201     TCGv dest;
3202     TCGv src;
3203     int opsize;
3204 
3205     opsize = insn_opsize(insn);
3206 
3207     src = gen_extend(s, DREG(insn, 0), opsize, 1);
3208     dest = gen_extend(s, DREG(insn, 9), opsize, 1);
3209 
3210     gen_subx(s, src, dest, opsize);
3211 
3212     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3213 }
3214 
3215 DISAS_INSN(subx_mem)
3216 {
3217     TCGv src;
3218     TCGv addr_src;
3219     TCGv dest;
3220     TCGv addr_dest;
3221     int opsize;
3222 
3223     opsize = insn_opsize(insn);
3224 
3225     addr_src = AREG(insn, 0);
3226     tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3227     src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3228 
3229     addr_dest = AREG(insn, 9);
3230     tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3231     dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3232 
3233     gen_subx(s, src, dest, opsize);
3234 
3235     gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3236 
3237     tcg_temp_free(dest);
3238     tcg_temp_free(src);
3239 }
3240 
3241 DISAS_INSN(mov3q)
3242 {
3243     TCGv src;
3244     int val;
3245 
3246     val = (insn >> 9) & 7;
3247     if (val == 0)
3248         val = -1;
3249     src = tcg_const_i32(val);
3250     gen_logic_cc(s, src, OS_LONG);
3251     DEST_EA(env, insn, OS_LONG, src, NULL);
3252     tcg_temp_free(src);
3253 }
3254 
3255 DISAS_INSN(cmp)
3256 {
3257     TCGv src;
3258     TCGv reg;
3259     int opsize;
3260 
3261     opsize = insn_opsize(insn);
3262     SRC_EA(env, src, opsize, 1, NULL);
3263     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
3264     gen_update_cc_cmp(s, reg, src, opsize);
3265 }
3266 
3267 DISAS_INSN(cmpa)
3268 {
3269     int opsize;
3270     TCGv src;
3271     TCGv reg;
3272 
3273     if (insn & 0x100) {
3274         opsize = OS_LONG;
3275     } else {
3276         opsize = OS_WORD;
3277     }
3278     SRC_EA(env, src, opsize, 1, NULL);
3279     reg = AREG(insn, 9);
3280     gen_update_cc_cmp(s, reg, src, OS_LONG);
3281 }
3282 
3283 DISAS_INSN(cmpm)
3284 {
3285     int opsize = insn_opsize(insn);
3286     TCGv src, dst;
3287 
3288     /* Post-increment load (mode 3) from Ay.  */
3289     src = gen_ea_mode(env, s, 3, REG(insn, 0), opsize,
3290                       NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3291     /* Post-increment load (mode 3) from Ax.  */
3292     dst = gen_ea_mode(env, s, 3, REG(insn, 9), opsize,
3293                       NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3294 
3295     gen_update_cc_cmp(s, dst, src, opsize);
3296 }
3297 
3298 DISAS_INSN(eor)
3299 {
3300     TCGv src;
3301     TCGv dest;
3302     TCGv addr;
3303     int opsize;
3304 
3305     opsize = insn_opsize(insn);
3306 
3307     SRC_EA(env, src, opsize, 0, &addr);
3308     dest = tcg_temp_new();
3309     tcg_gen_xor_i32(dest, src, DREG(insn, 9));
3310     gen_logic_cc(s, dest, opsize);
3311     DEST_EA(env, insn, opsize, dest, &addr);
3312     tcg_temp_free(dest);
3313 }
3314 
3315 static void do_exg(TCGv reg1, TCGv reg2)
3316 {
3317     TCGv temp = tcg_temp_new();
3318     tcg_gen_mov_i32(temp, reg1);
3319     tcg_gen_mov_i32(reg1, reg2);
3320     tcg_gen_mov_i32(reg2, temp);
3321     tcg_temp_free(temp);
3322 }
3323 
3324 DISAS_INSN(exg_dd)
3325 {
3326     /* exchange Dx and Dy */
3327     do_exg(DREG(insn, 9), DREG(insn, 0));
3328 }
3329 
3330 DISAS_INSN(exg_aa)
3331 {
3332     /* exchange Ax and Ay */
3333     do_exg(AREG(insn, 9), AREG(insn, 0));
3334 }
3335 
3336 DISAS_INSN(exg_da)
3337 {
3338     /* exchange Dx and Ay */
3339     do_exg(DREG(insn, 9), AREG(insn, 0));
3340 }
3341 
3342 DISAS_INSN(and)
3343 {
3344     TCGv src;
3345     TCGv reg;
3346     TCGv dest;
3347     TCGv addr;
3348     int opsize;
3349 
3350     dest = tcg_temp_new();
3351 
3352     opsize = insn_opsize(insn);
3353     reg = DREG(insn, 9);
3354     if (insn & 0x100) {
3355         SRC_EA(env, src, opsize, 0, &addr);
3356         tcg_gen_and_i32(dest, src, reg);
3357         DEST_EA(env, insn, opsize, dest, &addr);
3358     } else {
3359         SRC_EA(env, src, opsize, 0, NULL);
3360         tcg_gen_and_i32(dest, src, reg);
3361         gen_partset_reg(opsize, reg, dest);
3362     }
3363     gen_logic_cc(s, dest, opsize);
3364     tcg_temp_free(dest);
3365 }
3366 
3367 DISAS_INSN(adda)
3368 {
3369     TCGv src;
3370     TCGv reg;
3371 
3372     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3373     reg = AREG(insn, 9);
3374     tcg_gen_add_i32(reg, reg, src);
3375 }
3376 
3377 static inline void gen_addx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3378 {
3379     TCGv tmp;
3380 
3381     gen_flush_flags(s); /* compute old Z */
3382 
3383     /*
3384      * Perform addition with carry.
3385      * (X, N) = src + dest + X;
3386      */
3387 
3388     tmp = tcg_const_i32(0);
3389     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_X, tmp, dest, tmp);
3390     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_N, QREG_CC_X, src, tmp);
3391     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3392 
3393     /* Compute signed-overflow for addition.  */
3394 
3395     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3396     tcg_gen_xor_i32(tmp, dest, src);
3397     tcg_gen_andc_i32(QREG_CC_V, QREG_CC_V, tmp);
3398     tcg_temp_free(tmp);
3399 
3400     /* Copy the rest of the results into place.  */
3401     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3402     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3403 
3404     set_cc_op(s, CC_OP_FLAGS);
3405 
3406     /* result is in QREG_CC_N */
3407 }
3408 
3409 DISAS_INSN(addx_reg)
3410 {
3411     TCGv dest;
3412     TCGv src;
3413     int opsize;
3414 
3415     opsize = insn_opsize(insn);
3416 
3417     dest = gen_extend(s, DREG(insn, 9), opsize, 1);
3418     src = gen_extend(s, DREG(insn, 0), opsize, 1);
3419 
3420     gen_addx(s, src, dest, opsize);
3421 
3422     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3423 }
3424 
3425 DISAS_INSN(addx_mem)
3426 {
3427     TCGv src;
3428     TCGv addr_src;
3429     TCGv dest;
3430     TCGv addr_dest;
3431     int opsize;
3432 
3433     opsize = insn_opsize(insn);
3434 
3435     addr_src = AREG(insn, 0);
3436     tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3437     src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3438 
3439     addr_dest = AREG(insn, 9);
3440     tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3441     dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3442 
3443     gen_addx(s, src, dest, opsize);
3444 
3445     gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3446 
3447     tcg_temp_free(dest);
3448     tcg_temp_free(src);
3449 }
3450 
3451 static inline void shift_im(DisasContext *s, uint16_t insn, int opsize)
3452 {
3453     int count = (insn >> 9) & 7;
3454     int logical = insn & 8;
3455     int left = insn & 0x100;
3456     int bits = opsize_bytes(opsize) * 8;
3457     TCGv reg = gen_extend(s, DREG(insn, 0), opsize, !logical);
3458 
3459     if (count == 0) {
3460         count = 8;
3461     }
3462 
3463     tcg_gen_movi_i32(QREG_CC_V, 0);
3464     if (left) {
3465         tcg_gen_shri_i32(QREG_CC_C, reg, bits - count);
3466         tcg_gen_shli_i32(QREG_CC_N, reg, count);
3467 
3468         /*
3469          * Note that ColdFire always clears V (done above),
3470          * while M68000 sets if the most significant bit is changed at
3471          * any time during the shift operation.
3472          */
3473         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3474             /* if shift count >= bits, V is (reg != 0) */
3475             if (count >= bits) {
3476                 tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, reg, QREG_CC_V);
3477             } else {
3478                 TCGv t0 = tcg_temp_new();
3479                 tcg_gen_sari_i32(QREG_CC_V, reg, bits - 1);
3480                 tcg_gen_sari_i32(t0, reg, bits - count - 1);
3481                 tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, t0);
3482                 tcg_temp_free(t0);
3483             }
3484             tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3485         }
3486     } else {
3487         tcg_gen_shri_i32(QREG_CC_C, reg, count - 1);
3488         if (logical) {
3489             tcg_gen_shri_i32(QREG_CC_N, reg, count);
3490         } else {
3491             tcg_gen_sari_i32(QREG_CC_N, reg, count);
3492         }
3493     }
3494 
3495     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3496     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3497     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3498     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3499 
3500     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3501     set_cc_op(s, CC_OP_FLAGS);
3502 }
3503 
3504 static inline void shift_reg(DisasContext *s, uint16_t insn, int opsize)
3505 {
3506     int logical = insn & 8;
3507     int left = insn & 0x100;
3508     int bits = opsize_bytes(opsize) * 8;
3509     TCGv reg = gen_extend(s, DREG(insn, 0), opsize, !logical);
3510     TCGv s32;
3511     TCGv_i64 t64, s64;
3512 
3513     t64 = tcg_temp_new_i64();
3514     s64 = tcg_temp_new_i64();
3515     s32 = tcg_temp_new();
3516 
3517     /*
3518      * Note that m68k truncates the shift count modulo 64, not 32.
3519      * In addition, a 64-bit shift makes it easy to find "the last
3520      * bit shifted out", for the carry flag.
3521      */
3522     tcg_gen_andi_i32(s32, DREG(insn, 9), 63);
3523     tcg_gen_extu_i32_i64(s64, s32);
3524     tcg_gen_extu_i32_i64(t64, reg);
3525 
3526     /* Optimistically set V=0.  Also used as a zero source below.  */
3527     tcg_gen_movi_i32(QREG_CC_V, 0);
3528     if (left) {
3529         tcg_gen_shl_i64(t64, t64, s64);
3530 
3531         if (opsize == OS_LONG) {
3532             tcg_gen_extr_i64_i32(QREG_CC_N, QREG_CC_C, t64);
3533             /* Note that C=0 if shift count is 0, and we get that for free.  */
3534         } else {
3535             TCGv zero = tcg_const_i32(0);
3536             tcg_gen_extrl_i64_i32(QREG_CC_N, t64);
3537             tcg_gen_shri_i32(QREG_CC_C, QREG_CC_N, bits);
3538             tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3539                                 s32, zero, zero, QREG_CC_C);
3540             tcg_temp_free(zero);
3541         }
3542         tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3543 
3544         /* X = C, but only if the shift count was non-zero.  */
3545         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3546                             QREG_CC_C, QREG_CC_X);
3547 
3548         /*
3549          * M68000 sets V if the most significant bit is changed at
3550          * any time during the shift operation.  Do this via creating
3551          * an extension of the sign bit, comparing, and discarding
3552          * the bits below the sign bit.  I.e.
3553          *     int64_t s = (intN_t)reg;
3554          *     int64_t t = (int64_t)(intN_t)reg << count;
3555          *     V = ((s ^ t) & (-1 << (bits - 1))) != 0
3556          */
3557         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3558             TCGv_i64 tt = tcg_const_i64(32);
3559             /* if shift is greater than 32, use 32 */
3560             tcg_gen_movcond_i64(TCG_COND_GT, s64, s64, tt, tt, s64);
3561             tcg_temp_free_i64(tt);
3562             /* Sign extend the input to 64 bits; re-do the shift.  */
3563             tcg_gen_ext_i32_i64(t64, reg);
3564             tcg_gen_shl_i64(s64, t64, s64);
3565             /* Clear all bits that are unchanged.  */
3566             tcg_gen_xor_i64(t64, t64, s64);
3567             /* Ignore the bits below the sign bit.  */
3568             tcg_gen_andi_i64(t64, t64, -1ULL << (bits - 1));
3569             /* If any bits remain set, we have overflow.  */
3570             tcg_gen_setcondi_i64(TCG_COND_NE, t64, t64, 0);
3571             tcg_gen_extrl_i64_i32(QREG_CC_V, t64);
3572             tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3573         }
3574     } else {
3575         tcg_gen_shli_i64(t64, t64, 32);
3576         if (logical) {
3577             tcg_gen_shr_i64(t64, t64, s64);
3578         } else {
3579             tcg_gen_sar_i64(t64, t64, s64);
3580         }
3581         tcg_gen_extr_i64_i32(QREG_CC_C, QREG_CC_N, t64);
3582 
3583         /* Note that C=0 if shift count is 0, and we get that for free.  */
3584         tcg_gen_shri_i32(QREG_CC_C, QREG_CC_C, 31);
3585 
3586         /* X = C, but only if the shift count was non-zero.  */
3587         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3588                             QREG_CC_C, QREG_CC_X);
3589     }
3590     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3591     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3592 
3593     tcg_temp_free(s32);
3594     tcg_temp_free_i64(s64);
3595     tcg_temp_free_i64(t64);
3596 
3597     /* Write back the result.  */
3598     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3599     set_cc_op(s, CC_OP_FLAGS);
3600 }
3601 
3602 DISAS_INSN(shift8_im)
3603 {
3604     shift_im(s, insn, OS_BYTE);
3605 }
3606 
3607 DISAS_INSN(shift16_im)
3608 {
3609     shift_im(s, insn, OS_WORD);
3610 }
3611 
3612 DISAS_INSN(shift_im)
3613 {
3614     shift_im(s, insn, OS_LONG);
3615 }
3616 
3617 DISAS_INSN(shift8_reg)
3618 {
3619     shift_reg(s, insn, OS_BYTE);
3620 }
3621 
3622 DISAS_INSN(shift16_reg)
3623 {
3624     shift_reg(s, insn, OS_WORD);
3625 }
3626 
3627 DISAS_INSN(shift_reg)
3628 {
3629     shift_reg(s, insn, OS_LONG);
3630 }
3631 
3632 DISAS_INSN(shift_mem)
3633 {
3634     int logical = insn & 8;
3635     int left = insn & 0x100;
3636     TCGv src;
3637     TCGv addr;
3638 
3639     SRC_EA(env, src, OS_WORD, !logical, &addr);
3640     tcg_gen_movi_i32(QREG_CC_V, 0);
3641     if (left) {
3642         tcg_gen_shri_i32(QREG_CC_C, src, 15);
3643         tcg_gen_shli_i32(QREG_CC_N, src, 1);
3644 
3645         /*
3646          * Note that ColdFire always clears V,
3647          * while M68000 sets if the most significant bit is changed at
3648          * any time during the shift operation
3649          */
3650         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3651             src = gen_extend(s, src, OS_WORD, 1);
3652             tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3653         }
3654     } else {
3655         tcg_gen_mov_i32(QREG_CC_C, src);
3656         if (logical) {
3657             tcg_gen_shri_i32(QREG_CC_N, src, 1);
3658         } else {
3659             tcg_gen_sari_i32(QREG_CC_N, src, 1);
3660         }
3661     }
3662 
3663     gen_ext(QREG_CC_N, QREG_CC_N, OS_WORD, 1);
3664     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3665     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3666     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3667 
3668     DEST_EA(env, insn, OS_WORD, QREG_CC_N, &addr);
3669     set_cc_op(s, CC_OP_FLAGS);
3670 }
3671 
3672 static void rotate(TCGv reg, TCGv shift, int left, int size)
3673 {
3674     switch (size) {
3675     case 8:
3676         /* Replicate the 8-bit input so that a 32-bit rotate works.  */
3677         tcg_gen_ext8u_i32(reg, reg);
3678         tcg_gen_muli_i32(reg, reg, 0x01010101);
3679         goto do_long;
3680     case 16:
3681         /* Replicate the 16-bit input so that a 32-bit rotate works.  */
3682         tcg_gen_deposit_i32(reg, reg, reg, 16, 16);
3683         goto do_long;
3684     do_long:
3685     default:
3686         if (left) {
3687             tcg_gen_rotl_i32(reg, reg, shift);
3688         } else {
3689             tcg_gen_rotr_i32(reg, reg, shift);
3690         }
3691     }
3692 
3693     /* compute flags */
3694 
3695     switch (size) {
3696     case 8:
3697         tcg_gen_ext8s_i32(reg, reg);
3698         break;
3699     case 16:
3700         tcg_gen_ext16s_i32(reg, reg);
3701         break;
3702     default:
3703         break;
3704     }
3705 
3706     /* QREG_CC_X is not affected */
3707 
3708     tcg_gen_mov_i32(QREG_CC_N, reg);
3709     tcg_gen_mov_i32(QREG_CC_Z, reg);
3710 
3711     if (left) {
3712         tcg_gen_andi_i32(QREG_CC_C, reg, 1);
3713     } else {
3714         tcg_gen_shri_i32(QREG_CC_C, reg, 31);
3715     }
3716 
3717     tcg_gen_movi_i32(QREG_CC_V, 0); /* always cleared */
3718 }
3719 
3720 static void rotate_x_flags(TCGv reg, TCGv X, int size)
3721 {
3722     switch (size) {
3723     case 8:
3724         tcg_gen_ext8s_i32(reg, reg);
3725         break;
3726     case 16:
3727         tcg_gen_ext16s_i32(reg, reg);
3728         break;
3729     default:
3730         break;
3731     }
3732     tcg_gen_mov_i32(QREG_CC_N, reg);
3733     tcg_gen_mov_i32(QREG_CC_Z, reg);
3734     tcg_gen_mov_i32(QREG_CC_X, X);
3735     tcg_gen_mov_i32(QREG_CC_C, X);
3736     tcg_gen_movi_i32(QREG_CC_V, 0);
3737 }
3738 
3739 /* Result of rotate_x() is valid if 0 <= shift <= size */
3740 static TCGv rotate_x(TCGv reg, TCGv shift, int left, int size)
3741 {
3742     TCGv X, shl, shr, shx, sz, zero;
3743 
3744     sz = tcg_const_i32(size);
3745 
3746     shr = tcg_temp_new();
3747     shl = tcg_temp_new();
3748     shx = tcg_temp_new();
3749     if (left) {
3750         tcg_gen_mov_i32(shl, shift);      /* shl = shift */
3751         tcg_gen_movi_i32(shr, size + 1);
3752         tcg_gen_sub_i32(shr, shr, shift); /* shr = size + 1 - shift */
3753         tcg_gen_subi_i32(shx, shift, 1);  /* shx = shift - 1 */
3754         /* shx = shx < 0 ? size : shx; */
3755         zero = tcg_const_i32(0);
3756         tcg_gen_movcond_i32(TCG_COND_LT, shx, shx, zero, sz, shx);
3757         tcg_temp_free(zero);
3758     } else {
3759         tcg_gen_mov_i32(shr, shift);      /* shr = shift */
3760         tcg_gen_movi_i32(shl, size + 1);
3761         tcg_gen_sub_i32(shl, shl, shift); /* shl = size + 1 - shift */
3762         tcg_gen_sub_i32(shx, sz, shift); /* shx = size - shift */
3763     }
3764     tcg_temp_free_i32(sz);
3765 
3766     /* reg = (reg << shl) | (reg >> shr) | (x << shx); */
3767 
3768     tcg_gen_shl_i32(shl, reg, shl);
3769     tcg_gen_shr_i32(shr, reg, shr);
3770     tcg_gen_or_i32(reg, shl, shr);
3771     tcg_temp_free(shl);
3772     tcg_temp_free(shr);
3773     tcg_gen_shl_i32(shx, QREG_CC_X, shx);
3774     tcg_gen_or_i32(reg, reg, shx);
3775     tcg_temp_free(shx);
3776 
3777     /* X = (reg >> size) & 1 */
3778 
3779     X = tcg_temp_new();
3780     tcg_gen_extract_i32(X, reg, size, 1);
3781 
3782     return X;
3783 }
3784 
3785 /* Result of rotate32_x() is valid if 0 <= shift < 33 */
3786 static TCGv rotate32_x(TCGv reg, TCGv shift, int left)
3787 {
3788     TCGv_i64 t0, shift64;
3789     TCGv X, lo, hi, zero;
3790 
3791     shift64 = tcg_temp_new_i64();
3792     tcg_gen_extu_i32_i64(shift64, shift);
3793 
3794     t0 = tcg_temp_new_i64();
3795 
3796     X = tcg_temp_new();
3797     lo = tcg_temp_new();
3798     hi = tcg_temp_new();
3799 
3800     if (left) {
3801         /* create [reg:X:..] */
3802 
3803         tcg_gen_shli_i32(lo, QREG_CC_X, 31);
3804         tcg_gen_concat_i32_i64(t0, lo, reg);
3805 
3806         /* rotate */
3807 
3808         tcg_gen_rotl_i64(t0, t0, shift64);
3809         tcg_temp_free_i64(shift64);
3810 
3811         /* result is [reg:..:reg:X] */
3812 
3813         tcg_gen_extr_i64_i32(lo, hi, t0);
3814         tcg_gen_andi_i32(X, lo, 1);
3815 
3816         tcg_gen_shri_i32(lo, lo, 1);
3817     } else {
3818         /* create [..:X:reg] */
3819 
3820         tcg_gen_concat_i32_i64(t0, reg, QREG_CC_X);
3821 
3822         tcg_gen_rotr_i64(t0, t0, shift64);
3823         tcg_temp_free_i64(shift64);
3824 
3825         /* result is value: [X:reg:..:reg] */
3826 
3827         tcg_gen_extr_i64_i32(lo, hi, t0);
3828 
3829         /* extract X */
3830 
3831         tcg_gen_shri_i32(X, hi, 31);
3832 
3833         /* extract result */
3834 
3835         tcg_gen_shli_i32(hi, hi, 1);
3836     }
3837     tcg_temp_free_i64(t0);
3838     tcg_gen_or_i32(lo, lo, hi);
3839     tcg_temp_free(hi);
3840 
3841     /* if shift == 0, register and X are not affected */
3842 
3843     zero = tcg_const_i32(0);
3844     tcg_gen_movcond_i32(TCG_COND_EQ, X, shift, zero, QREG_CC_X, X);
3845     tcg_gen_movcond_i32(TCG_COND_EQ, reg, shift, zero, reg, lo);
3846     tcg_temp_free(zero);
3847     tcg_temp_free(lo);
3848 
3849     return X;
3850 }
3851 
3852 DISAS_INSN(rotate_im)
3853 {
3854     TCGv shift;
3855     int tmp;
3856     int left = (insn & 0x100);
3857 
3858     tmp = (insn >> 9) & 7;
3859     if (tmp == 0) {
3860         tmp = 8;
3861     }
3862 
3863     shift = tcg_const_i32(tmp);
3864     if (insn & 8) {
3865         rotate(DREG(insn, 0), shift, left, 32);
3866     } else {
3867         TCGv X = rotate32_x(DREG(insn, 0), shift, left);
3868         rotate_x_flags(DREG(insn, 0), X, 32);
3869         tcg_temp_free(X);
3870     }
3871     tcg_temp_free(shift);
3872 
3873     set_cc_op(s, CC_OP_FLAGS);
3874 }
3875 
3876 DISAS_INSN(rotate8_im)
3877 {
3878     int left = (insn & 0x100);
3879     TCGv reg;
3880     TCGv shift;
3881     int tmp;
3882 
3883     reg = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
3884 
3885     tmp = (insn >> 9) & 7;
3886     if (tmp == 0) {
3887         tmp = 8;
3888     }
3889 
3890     shift = tcg_const_i32(tmp);
3891     if (insn & 8) {
3892         rotate(reg, shift, left, 8);
3893     } else {
3894         TCGv X = rotate_x(reg, shift, left, 8);
3895         rotate_x_flags(reg, X, 8);
3896         tcg_temp_free(X);
3897     }
3898     tcg_temp_free(shift);
3899     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3900     set_cc_op(s, CC_OP_FLAGS);
3901 }
3902 
3903 DISAS_INSN(rotate16_im)
3904 {
3905     int left = (insn & 0x100);
3906     TCGv reg;
3907     TCGv shift;
3908     int tmp;
3909 
3910     reg = gen_extend(s, DREG(insn, 0), OS_WORD, 0);
3911     tmp = (insn >> 9) & 7;
3912     if (tmp == 0) {
3913         tmp = 8;
3914     }
3915 
3916     shift = tcg_const_i32(tmp);
3917     if (insn & 8) {
3918         rotate(reg, shift, left, 16);
3919     } else {
3920         TCGv X = rotate_x(reg, shift, left, 16);
3921         rotate_x_flags(reg, X, 16);
3922         tcg_temp_free(X);
3923     }
3924     tcg_temp_free(shift);
3925     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3926     set_cc_op(s, CC_OP_FLAGS);
3927 }
3928 
3929 DISAS_INSN(rotate_reg)
3930 {
3931     TCGv reg;
3932     TCGv src;
3933     TCGv t0, t1;
3934     int left = (insn & 0x100);
3935 
3936     reg = DREG(insn, 0);
3937     src = DREG(insn, 9);
3938     /* shift in [0..63] */
3939     t0 = tcg_temp_new();
3940     tcg_gen_andi_i32(t0, src, 63);
3941     t1 = tcg_temp_new_i32();
3942     if (insn & 8) {
3943         tcg_gen_andi_i32(t1, src, 31);
3944         rotate(reg, t1, left, 32);
3945         /* if shift == 0, clear C */
3946         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3947                             t0, QREG_CC_V /* 0 */,
3948                             QREG_CC_V /* 0 */, QREG_CC_C);
3949     } else {
3950         TCGv X;
3951         /* modulo 33 */
3952         tcg_gen_movi_i32(t1, 33);
3953         tcg_gen_remu_i32(t1, t0, t1);
3954         X = rotate32_x(DREG(insn, 0), t1, left);
3955         rotate_x_flags(DREG(insn, 0), X, 32);
3956         tcg_temp_free(X);
3957     }
3958     tcg_temp_free(t1);
3959     tcg_temp_free(t0);
3960     set_cc_op(s, CC_OP_FLAGS);
3961 }
3962 
3963 DISAS_INSN(rotate8_reg)
3964 {
3965     TCGv reg;
3966     TCGv src;
3967     TCGv t0, t1;
3968     int left = (insn & 0x100);
3969 
3970     reg = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
3971     src = DREG(insn, 9);
3972     /* shift in [0..63] */
3973     t0 = tcg_temp_new_i32();
3974     tcg_gen_andi_i32(t0, src, 63);
3975     t1 = tcg_temp_new_i32();
3976     if (insn & 8) {
3977         tcg_gen_andi_i32(t1, src, 7);
3978         rotate(reg, t1, left, 8);
3979         /* if shift == 0, clear C */
3980         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3981                             t0, QREG_CC_V /* 0 */,
3982                             QREG_CC_V /* 0 */, QREG_CC_C);
3983     } else {
3984         TCGv X;
3985         /* modulo 9 */
3986         tcg_gen_movi_i32(t1, 9);
3987         tcg_gen_remu_i32(t1, t0, t1);
3988         X = rotate_x(reg, t1, left, 8);
3989         rotate_x_flags(reg, X, 8);
3990         tcg_temp_free(X);
3991     }
3992     tcg_temp_free(t1);
3993     tcg_temp_free(t0);
3994     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3995     set_cc_op(s, CC_OP_FLAGS);
3996 }
3997 
3998 DISAS_INSN(rotate16_reg)
3999 {
4000     TCGv reg;
4001     TCGv src;
4002     TCGv t0, t1;
4003     int left = (insn & 0x100);
4004 
4005     reg = gen_extend(s, DREG(insn, 0), OS_WORD, 0);
4006     src = DREG(insn, 9);
4007     /* shift in [0..63] */
4008     t0 = tcg_temp_new_i32();
4009     tcg_gen_andi_i32(t0, src, 63);
4010     t1 = tcg_temp_new_i32();
4011     if (insn & 8) {
4012         tcg_gen_andi_i32(t1, src, 15);
4013         rotate(reg, t1, left, 16);
4014         /* if shift == 0, clear C */
4015         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
4016                             t0, QREG_CC_V /* 0 */,
4017                             QREG_CC_V /* 0 */, QREG_CC_C);
4018     } else {
4019         TCGv X;
4020         /* modulo 17 */
4021         tcg_gen_movi_i32(t1, 17);
4022         tcg_gen_remu_i32(t1, t0, t1);
4023         X = rotate_x(reg, t1, left, 16);
4024         rotate_x_flags(reg, X, 16);
4025         tcg_temp_free(X);
4026     }
4027     tcg_temp_free(t1);
4028     tcg_temp_free(t0);
4029     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
4030     set_cc_op(s, CC_OP_FLAGS);
4031 }
4032 
4033 DISAS_INSN(rotate_mem)
4034 {
4035     TCGv src;
4036     TCGv addr;
4037     TCGv shift;
4038     int left = (insn & 0x100);
4039 
4040     SRC_EA(env, src, OS_WORD, 0, &addr);
4041 
4042     shift = tcg_const_i32(1);
4043     if (insn & 0x0200) {
4044         rotate(src, shift, left, 16);
4045     } else {
4046         TCGv X = rotate_x(src, shift, left, 16);
4047         rotate_x_flags(src, X, 16);
4048         tcg_temp_free(X);
4049     }
4050     tcg_temp_free(shift);
4051     DEST_EA(env, insn, OS_WORD, src, &addr);
4052     set_cc_op(s, CC_OP_FLAGS);
4053 }
4054 
4055 DISAS_INSN(bfext_reg)
4056 {
4057     int ext = read_im16(env, s);
4058     int is_sign = insn & 0x200;
4059     TCGv src = DREG(insn, 0);
4060     TCGv dst = DREG(ext, 12);
4061     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4062     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4063     int pos = 32 - ofs - len;        /* little bit-endian */
4064     TCGv tmp = tcg_temp_new();
4065     TCGv shift;
4066 
4067     /*
4068      * In general, we're going to rotate the field so that it's at the
4069      * top of the word and then right-shift by the complement of the
4070      * width to extend the field.
4071      */
4072     if (ext & 0x20) {
4073         /* Variable width.  */
4074         if (ext & 0x800) {
4075             /* Variable offset.  */
4076             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4077             tcg_gen_rotl_i32(tmp, src, tmp);
4078         } else {
4079             tcg_gen_rotli_i32(tmp, src, ofs);
4080         }
4081 
4082         shift = tcg_temp_new();
4083         tcg_gen_neg_i32(shift, DREG(ext, 0));
4084         tcg_gen_andi_i32(shift, shift, 31);
4085         tcg_gen_sar_i32(QREG_CC_N, tmp, shift);
4086         if (is_sign) {
4087             tcg_gen_mov_i32(dst, QREG_CC_N);
4088         } else {
4089             tcg_gen_shr_i32(dst, tmp, shift);
4090         }
4091         tcg_temp_free(shift);
4092     } else {
4093         /* Immediate width.  */
4094         if (ext & 0x800) {
4095             /* Variable offset */
4096             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4097             tcg_gen_rotl_i32(tmp, src, tmp);
4098             src = tmp;
4099             pos = 32 - len;
4100         } else {
4101             /*
4102              * Immediate offset.  If the field doesn't wrap around the
4103              * end of the word, rely on (s)extract completely.
4104              */
4105             if (pos < 0) {
4106                 tcg_gen_rotli_i32(tmp, src, ofs);
4107                 src = tmp;
4108                 pos = 32 - len;
4109             }
4110         }
4111 
4112         tcg_gen_sextract_i32(QREG_CC_N, src, pos, len);
4113         if (is_sign) {
4114             tcg_gen_mov_i32(dst, QREG_CC_N);
4115         } else {
4116             tcg_gen_extract_i32(dst, src, pos, len);
4117         }
4118     }
4119 
4120     tcg_temp_free(tmp);
4121     set_cc_op(s, CC_OP_LOGIC);
4122 }
4123 
4124 DISAS_INSN(bfext_mem)
4125 {
4126     int ext = read_im16(env, s);
4127     int is_sign = insn & 0x200;
4128     TCGv dest = DREG(ext, 12);
4129     TCGv addr, len, ofs;
4130 
4131     addr = gen_lea(env, s, insn, OS_UNSIZED);
4132     if (IS_NULL_QREG(addr)) {
4133         gen_addr_fault(s);
4134         return;
4135     }
4136 
4137     if (ext & 0x20) {
4138         len = DREG(ext, 0);
4139     } else {
4140         len = tcg_const_i32(extract32(ext, 0, 5));
4141     }
4142     if (ext & 0x800) {
4143         ofs = DREG(ext, 6);
4144     } else {
4145         ofs = tcg_const_i32(extract32(ext, 6, 5));
4146     }
4147 
4148     if (is_sign) {
4149         gen_helper_bfexts_mem(dest, cpu_env, addr, ofs, len);
4150         tcg_gen_mov_i32(QREG_CC_N, dest);
4151     } else {
4152         TCGv_i64 tmp = tcg_temp_new_i64();
4153         gen_helper_bfextu_mem(tmp, cpu_env, addr, ofs, len);
4154         tcg_gen_extr_i64_i32(dest, QREG_CC_N, tmp);
4155         tcg_temp_free_i64(tmp);
4156     }
4157     set_cc_op(s, CC_OP_LOGIC);
4158 
4159     if (!(ext & 0x20)) {
4160         tcg_temp_free(len);
4161     }
4162     if (!(ext & 0x800)) {
4163         tcg_temp_free(ofs);
4164     }
4165 }
4166 
4167 DISAS_INSN(bfop_reg)
4168 {
4169     int ext = read_im16(env, s);
4170     TCGv src = DREG(insn, 0);
4171     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4172     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4173     TCGv mask, tofs, tlen;
4174 
4175     tofs = NULL;
4176     tlen = NULL;
4177     if ((insn & 0x0f00) == 0x0d00) { /* bfffo */
4178         tofs = tcg_temp_new();
4179         tlen = tcg_temp_new();
4180     }
4181 
4182     if ((ext & 0x820) == 0) {
4183         /* Immediate width and offset.  */
4184         uint32_t maski = 0x7fffffffu >> (len - 1);
4185         if (ofs + len <= 32) {
4186             tcg_gen_shli_i32(QREG_CC_N, src, ofs);
4187         } else {
4188             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4189         }
4190         tcg_gen_andi_i32(QREG_CC_N, QREG_CC_N, ~maski);
4191         mask = tcg_const_i32(ror32(maski, ofs));
4192         if (tofs) {
4193             tcg_gen_movi_i32(tofs, ofs);
4194             tcg_gen_movi_i32(tlen, len);
4195         }
4196     } else {
4197         TCGv tmp = tcg_temp_new();
4198         if (ext & 0x20) {
4199             /* Variable width */
4200             tcg_gen_subi_i32(tmp, DREG(ext, 0), 1);
4201             tcg_gen_andi_i32(tmp, tmp, 31);
4202             mask = tcg_const_i32(0x7fffffffu);
4203             tcg_gen_shr_i32(mask, mask, tmp);
4204             if (tlen) {
4205                 tcg_gen_addi_i32(tlen, tmp, 1);
4206             }
4207         } else {
4208             /* Immediate width */
4209             mask = tcg_const_i32(0x7fffffffu >> (len - 1));
4210             if (tlen) {
4211                 tcg_gen_movi_i32(tlen, len);
4212             }
4213         }
4214         if (ext & 0x800) {
4215             /* Variable offset */
4216             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4217             tcg_gen_rotl_i32(QREG_CC_N, src, tmp);
4218             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4219             tcg_gen_rotr_i32(mask, mask, tmp);
4220             if (tofs) {
4221                 tcg_gen_mov_i32(tofs, tmp);
4222             }
4223         } else {
4224             /* Immediate offset (and variable width) */
4225             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4226             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4227             tcg_gen_rotri_i32(mask, mask, ofs);
4228             if (tofs) {
4229                 tcg_gen_movi_i32(tofs, ofs);
4230             }
4231         }
4232         tcg_temp_free(tmp);
4233     }
4234     set_cc_op(s, CC_OP_LOGIC);
4235 
4236     switch (insn & 0x0f00) {
4237     case 0x0a00: /* bfchg */
4238         tcg_gen_eqv_i32(src, src, mask);
4239         break;
4240     case 0x0c00: /* bfclr */
4241         tcg_gen_and_i32(src, src, mask);
4242         break;
4243     case 0x0d00: /* bfffo */
4244         gen_helper_bfffo_reg(DREG(ext, 12), QREG_CC_N, tofs, tlen);
4245         tcg_temp_free(tlen);
4246         tcg_temp_free(tofs);
4247         break;
4248     case 0x0e00: /* bfset */
4249         tcg_gen_orc_i32(src, src, mask);
4250         break;
4251     case 0x0800: /* bftst */
4252         /* flags already set; no other work to do.  */
4253         break;
4254     default:
4255         g_assert_not_reached();
4256     }
4257     tcg_temp_free(mask);
4258 }
4259 
4260 DISAS_INSN(bfop_mem)
4261 {
4262     int ext = read_im16(env, s);
4263     TCGv addr, len, ofs;
4264     TCGv_i64 t64;
4265 
4266     addr = gen_lea(env, s, insn, OS_UNSIZED);
4267     if (IS_NULL_QREG(addr)) {
4268         gen_addr_fault(s);
4269         return;
4270     }
4271 
4272     if (ext & 0x20) {
4273         len = DREG(ext, 0);
4274     } else {
4275         len = tcg_const_i32(extract32(ext, 0, 5));
4276     }
4277     if (ext & 0x800) {
4278         ofs = DREG(ext, 6);
4279     } else {
4280         ofs = tcg_const_i32(extract32(ext, 6, 5));
4281     }
4282 
4283     switch (insn & 0x0f00) {
4284     case 0x0a00: /* bfchg */
4285         gen_helper_bfchg_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4286         break;
4287     case 0x0c00: /* bfclr */
4288         gen_helper_bfclr_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4289         break;
4290     case 0x0d00: /* bfffo */
4291         t64 = tcg_temp_new_i64();
4292         gen_helper_bfffo_mem(t64, cpu_env, addr, ofs, len);
4293         tcg_gen_extr_i64_i32(DREG(ext, 12), QREG_CC_N, t64);
4294         tcg_temp_free_i64(t64);
4295         break;
4296     case 0x0e00: /* bfset */
4297         gen_helper_bfset_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4298         break;
4299     case 0x0800: /* bftst */
4300         gen_helper_bfexts_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4301         break;
4302     default:
4303         g_assert_not_reached();
4304     }
4305     set_cc_op(s, CC_OP_LOGIC);
4306 
4307     if (!(ext & 0x20)) {
4308         tcg_temp_free(len);
4309     }
4310     if (!(ext & 0x800)) {
4311         tcg_temp_free(ofs);
4312     }
4313 }
4314 
4315 DISAS_INSN(bfins_reg)
4316 {
4317     int ext = read_im16(env, s);
4318     TCGv dst = DREG(insn, 0);
4319     TCGv src = DREG(ext, 12);
4320     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4321     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4322     int pos = 32 - ofs - len;        /* little bit-endian */
4323     TCGv tmp;
4324 
4325     tmp = tcg_temp_new();
4326 
4327     if (ext & 0x20) {
4328         /* Variable width */
4329         tcg_gen_neg_i32(tmp, DREG(ext, 0));
4330         tcg_gen_andi_i32(tmp, tmp, 31);
4331         tcg_gen_shl_i32(QREG_CC_N, src, tmp);
4332     } else {
4333         /* Immediate width */
4334         tcg_gen_shli_i32(QREG_CC_N, src, 32 - len);
4335     }
4336     set_cc_op(s, CC_OP_LOGIC);
4337 
4338     /* Immediate width and offset */
4339     if ((ext & 0x820) == 0) {
4340         /* Check for suitability for deposit.  */
4341         if (pos >= 0) {
4342             tcg_gen_deposit_i32(dst, dst, src, pos, len);
4343         } else {
4344             uint32_t maski = -2U << (len - 1);
4345             uint32_t roti = (ofs + len) & 31;
4346             tcg_gen_andi_i32(tmp, src, ~maski);
4347             tcg_gen_rotri_i32(tmp, tmp, roti);
4348             tcg_gen_andi_i32(dst, dst, ror32(maski, roti));
4349             tcg_gen_or_i32(dst, dst, tmp);
4350         }
4351     } else {
4352         TCGv mask = tcg_temp_new();
4353         TCGv rot = tcg_temp_new();
4354 
4355         if (ext & 0x20) {
4356             /* Variable width */
4357             tcg_gen_subi_i32(rot, DREG(ext, 0), 1);
4358             tcg_gen_andi_i32(rot, rot, 31);
4359             tcg_gen_movi_i32(mask, -2);
4360             tcg_gen_shl_i32(mask, mask, rot);
4361             tcg_gen_mov_i32(rot, DREG(ext, 0));
4362             tcg_gen_andc_i32(tmp, src, mask);
4363         } else {
4364             /* Immediate width (variable offset) */
4365             uint32_t maski = -2U << (len - 1);
4366             tcg_gen_andi_i32(tmp, src, ~maski);
4367             tcg_gen_movi_i32(mask, maski);
4368             tcg_gen_movi_i32(rot, len & 31);
4369         }
4370         if (ext & 0x800) {
4371             /* Variable offset */
4372             tcg_gen_add_i32(rot, rot, DREG(ext, 6));
4373         } else {
4374             /* Immediate offset (variable width) */
4375             tcg_gen_addi_i32(rot, rot, ofs);
4376         }
4377         tcg_gen_andi_i32(rot, rot, 31);
4378         tcg_gen_rotr_i32(mask, mask, rot);
4379         tcg_gen_rotr_i32(tmp, tmp, rot);
4380         tcg_gen_and_i32(dst, dst, mask);
4381         tcg_gen_or_i32(dst, dst, tmp);
4382 
4383         tcg_temp_free(rot);
4384         tcg_temp_free(mask);
4385     }
4386     tcg_temp_free(tmp);
4387 }
4388 
4389 DISAS_INSN(bfins_mem)
4390 {
4391     int ext = read_im16(env, s);
4392     TCGv src = DREG(ext, 12);
4393     TCGv addr, len, ofs;
4394 
4395     addr = gen_lea(env, s, insn, OS_UNSIZED);
4396     if (IS_NULL_QREG(addr)) {
4397         gen_addr_fault(s);
4398         return;
4399     }
4400 
4401     if (ext & 0x20) {
4402         len = DREG(ext, 0);
4403     } else {
4404         len = tcg_const_i32(extract32(ext, 0, 5));
4405     }
4406     if (ext & 0x800) {
4407         ofs = DREG(ext, 6);
4408     } else {
4409         ofs = tcg_const_i32(extract32(ext, 6, 5));
4410     }
4411 
4412     gen_helper_bfins_mem(QREG_CC_N, cpu_env, addr, src, ofs, len);
4413     set_cc_op(s, CC_OP_LOGIC);
4414 
4415     if (!(ext & 0x20)) {
4416         tcg_temp_free(len);
4417     }
4418     if (!(ext & 0x800)) {
4419         tcg_temp_free(ofs);
4420     }
4421 }
4422 
4423 DISAS_INSN(ff1)
4424 {
4425     TCGv reg;
4426     reg = DREG(insn, 0);
4427     gen_logic_cc(s, reg, OS_LONG);
4428     gen_helper_ff1(reg, reg);
4429 }
4430 
4431 DISAS_INSN(chk)
4432 {
4433     TCGv src, reg;
4434     int opsize;
4435 
4436     switch ((insn >> 7) & 3) {
4437     case 3:
4438         opsize = OS_WORD;
4439         break;
4440     case 2:
4441         if (m68k_feature(env, M68K_FEATURE_CHK2)) {
4442             opsize = OS_LONG;
4443             break;
4444         }
4445         /* fallthru */
4446     default:
4447         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4448         return;
4449     }
4450     SRC_EA(env, src, opsize, 1, NULL);
4451     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
4452 
4453     gen_flush_flags(s);
4454     gen_helper_chk(cpu_env, reg, src);
4455 }
4456 
4457 DISAS_INSN(chk2)
4458 {
4459     uint16_t ext;
4460     TCGv addr1, addr2, bound1, bound2, reg;
4461     int opsize;
4462 
4463     switch ((insn >> 9) & 3) {
4464     case 0:
4465         opsize = OS_BYTE;
4466         break;
4467     case 1:
4468         opsize = OS_WORD;
4469         break;
4470     case 2:
4471         opsize = OS_LONG;
4472         break;
4473     default:
4474         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4475         return;
4476     }
4477 
4478     ext = read_im16(env, s);
4479     if ((ext & 0x0800) == 0) {
4480         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4481         return;
4482     }
4483 
4484     addr1 = gen_lea(env, s, insn, OS_UNSIZED);
4485     addr2 = tcg_temp_new();
4486     tcg_gen_addi_i32(addr2, addr1, opsize_bytes(opsize));
4487 
4488     bound1 = gen_load(s, opsize, addr1, 1, IS_USER(s));
4489     tcg_temp_free(addr1);
4490     bound2 = gen_load(s, opsize, addr2, 1, IS_USER(s));
4491     tcg_temp_free(addr2);
4492 
4493     reg = tcg_temp_new();
4494     if (ext & 0x8000) {
4495         tcg_gen_mov_i32(reg, AREG(ext, 12));
4496     } else {
4497         gen_ext(reg, DREG(ext, 12), opsize, 1);
4498     }
4499 
4500     gen_flush_flags(s);
4501     gen_helper_chk2(cpu_env, reg, bound1, bound2);
4502     tcg_temp_free(reg);
4503     tcg_temp_free(bound1);
4504     tcg_temp_free(bound2);
4505 }
4506 
4507 static void m68k_copy_line(TCGv dst, TCGv src, int index)
4508 {
4509     TCGv addr;
4510     TCGv_i64 t0, t1;
4511 
4512     addr = tcg_temp_new();
4513 
4514     t0 = tcg_temp_new_i64();
4515     t1 = tcg_temp_new_i64();
4516 
4517     tcg_gen_andi_i32(addr, src, ~15);
4518     tcg_gen_qemu_ld64(t0, addr, index);
4519     tcg_gen_addi_i32(addr, addr, 8);
4520     tcg_gen_qemu_ld64(t1, addr, index);
4521 
4522     tcg_gen_andi_i32(addr, dst, ~15);
4523     tcg_gen_qemu_st64(t0, addr, index);
4524     tcg_gen_addi_i32(addr, addr, 8);
4525     tcg_gen_qemu_st64(t1, addr, index);
4526 
4527     tcg_temp_free_i64(t0);
4528     tcg_temp_free_i64(t1);
4529     tcg_temp_free(addr);
4530 }
4531 
4532 DISAS_INSN(move16_reg)
4533 {
4534     int index = IS_USER(s);
4535     TCGv tmp;
4536     uint16_t ext;
4537 
4538     ext = read_im16(env, s);
4539     if ((ext & (1 << 15)) == 0) {
4540         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4541     }
4542 
4543     m68k_copy_line(AREG(ext, 12), AREG(insn, 0), index);
4544 
4545     /* Ax can be Ay, so save Ay before incrementing Ax */
4546     tmp = tcg_temp_new();
4547     tcg_gen_mov_i32(tmp, AREG(ext, 12));
4548     tcg_gen_addi_i32(AREG(insn, 0), AREG(insn, 0), 16);
4549     tcg_gen_addi_i32(AREG(ext, 12), tmp, 16);
4550     tcg_temp_free(tmp);
4551 }
4552 
4553 DISAS_INSN(move16_mem)
4554 {
4555     int index = IS_USER(s);
4556     TCGv reg, addr;
4557 
4558     reg = AREG(insn, 0);
4559     addr = tcg_const_i32(read_im32(env, s));
4560 
4561     if ((insn >> 3) & 1) {
4562         /* MOVE16 (xxx).L, (Ay) */
4563         m68k_copy_line(reg, addr, index);
4564     } else {
4565         /* MOVE16 (Ay), (xxx).L */
4566         m68k_copy_line(addr, reg, index);
4567     }
4568 
4569     tcg_temp_free(addr);
4570 
4571     if (((insn >> 3) & 2) == 0) {
4572         /* (Ay)+ */
4573         tcg_gen_addi_i32(reg, reg, 16);
4574     }
4575 }
4576 
4577 DISAS_INSN(strldsr)
4578 {
4579     uint16_t ext;
4580     uint32_t addr;
4581 
4582     addr = s->pc - 2;
4583     ext = read_im16(env, s);
4584     if (ext != 0x46FC) {
4585         gen_exception(s, addr, EXCP_ILLEGAL);
4586         return;
4587     }
4588     ext = read_im16(env, s);
4589     if (IS_USER(s) || (ext & SR_S) == 0) {
4590         gen_exception(s, addr, EXCP_PRIVILEGE);
4591         return;
4592     }
4593     gen_push(s, gen_get_sr(s));
4594     gen_set_sr_im(s, ext, 0);
4595 }
4596 
4597 DISAS_INSN(move_from_sr)
4598 {
4599     TCGv sr;
4600 
4601     if (IS_USER(s) && !m68k_feature(env, M68K_FEATURE_M68000)) {
4602         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4603         return;
4604     }
4605     sr = gen_get_sr(s);
4606     DEST_EA(env, insn, OS_WORD, sr, NULL);
4607 }
4608 
4609 #if defined(CONFIG_SOFTMMU)
4610 DISAS_INSN(moves)
4611 {
4612     int opsize;
4613     uint16_t ext;
4614     TCGv reg;
4615     TCGv addr;
4616     int extend;
4617 
4618     if (IS_USER(s)) {
4619         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4620         return;
4621     }
4622 
4623     ext = read_im16(env, s);
4624 
4625     opsize = insn_opsize(insn);
4626 
4627     if (ext & 0x8000) {
4628         /* address register */
4629         reg = AREG(ext, 12);
4630         extend = 1;
4631     } else {
4632         /* data register */
4633         reg = DREG(ext, 12);
4634         extend = 0;
4635     }
4636 
4637     addr = gen_lea(env, s, insn, opsize);
4638     if (IS_NULL_QREG(addr)) {
4639         gen_addr_fault(s);
4640         return;
4641     }
4642 
4643     if (ext & 0x0800) {
4644         /* from reg to ea */
4645         gen_store(s, opsize, addr, reg, DFC_INDEX(s));
4646     } else {
4647         /* from ea to reg */
4648         TCGv tmp = gen_load(s, opsize, addr, 0, SFC_INDEX(s));
4649         if (extend) {
4650             gen_ext(reg, tmp, opsize, 1);
4651         } else {
4652             gen_partset_reg(opsize, reg, tmp);
4653         }
4654         tcg_temp_free(tmp);
4655     }
4656     switch (extract32(insn, 3, 3)) {
4657     case 3: /* Indirect postincrement.  */
4658         tcg_gen_addi_i32(AREG(insn, 0), addr,
4659                          REG(insn, 0) == 7 && opsize == OS_BYTE
4660                          ? 2
4661                          : opsize_bytes(opsize));
4662         break;
4663     case 4: /* Indirect predecrememnt.  */
4664         tcg_gen_mov_i32(AREG(insn, 0), addr);
4665         break;
4666     }
4667 }
4668 
4669 DISAS_INSN(move_to_sr)
4670 {
4671     if (IS_USER(s)) {
4672         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4673         return;
4674     }
4675     gen_move_to_sr(env, s, insn, false);
4676     gen_exit_tb(s);
4677 }
4678 
4679 DISAS_INSN(move_from_usp)
4680 {
4681     if (IS_USER(s)) {
4682         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4683         return;
4684     }
4685     tcg_gen_ld_i32(AREG(insn, 0), cpu_env,
4686                    offsetof(CPUM68KState, sp[M68K_USP]));
4687 }
4688 
4689 DISAS_INSN(move_to_usp)
4690 {
4691     if (IS_USER(s)) {
4692         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4693         return;
4694     }
4695     tcg_gen_st_i32(AREG(insn, 0), cpu_env,
4696                    offsetof(CPUM68KState, sp[M68K_USP]));
4697 }
4698 
4699 DISAS_INSN(halt)
4700 {
4701     if (IS_USER(s)) {
4702         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4703         return;
4704     }
4705 
4706     gen_exception(s, s->pc, EXCP_HALT_INSN);
4707 }
4708 
4709 DISAS_INSN(stop)
4710 {
4711     uint16_t ext;
4712 
4713     if (IS_USER(s)) {
4714         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4715         return;
4716     }
4717 
4718     ext = read_im16(env, s);
4719 
4720     gen_set_sr_im(s, ext, 0);
4721     tcg_gen_movi_i32(cpu_halted, 1);
4722     gen_exception(s, s->pc, EXCP_HLT);
4723 }
4724 
4725 DISAS_INSN(rte)
4726 {
4727     if (IS_USER(s)) {
4728         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4729         return;
4730     }
4731     gen_exception(s, s->base.pc_next, EXCP_RTE);
4732 }
4733 
4734 DISAS_INSN(cf_movec)
4735 {
4736     uint16_t ext;
4737     TCGv reg;
4738 
4739     if (IS_USER(s)) {
4740         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4741         return;
4742     }
4743 
4744     ext = read_im16(env, s);
4745 
4746     if (ext & 0x8000) {
4747         reg = AREG(ext, 12);
4748     } else {
4749         reg = DREG(ext, 12);
4750     }
4751     gen_helper_cf_movec_to(cpu_env, tcg_const_i32(ext & 0xfff), reg);
4752     gen_exit_tb(s);
4753 }
4754 
4755 DISAS_INSN(m68k_movec)
4756 {
4757     uint16_t ext;
4758     TCGv reg;
4759 
4760     if (IS_USER(s)) {
4761         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4762         return;
4763     }
4764 
4765     ext = read_im16(env, s);
4766 
4767     if (ext & 0x8000) {
4768         reg = AREG(ext, 12);
4769     } else {
4770         reg = DREG(ext, 12);
4771     }
4772     if (insn & 1) {
4773         gen_helper_m68k_movec_to(cpu_env, tcg_const_i32(ext & 0xfff), reg);
4774     } else {
4775         gen_helper_m68k_movec_from(reg, cpu_env, tcg_const_i32(ext & 0xfff));
4776     }
4777     gen_exit_tb(s);
4778 }
4779 
4780 DISAS_INSN(intouch)
4781 {
4782     if (IS_USER(s)) {
4783         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4784         return;
4785     }
4786     /* ICache fetch.  Implement as no-op.  */
4787 }
4788 
4789 DISAS_INSN(cpushl)
4790 {
4791     if (IS_USER(s)) {
4792         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4793         return;
4794     }
4795     /* Cache push/invalidate.  Implement as no-op.  */
4796 }
4797 
4798 DISAS_INSN(cpush)
4799 {
4800     if (IS_USER(s)) {
4801         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4802         return;
4803     }
4804     /* Cache push/invalidate.  Implement as no-op.  */
4805 }
4806 
4807 DISAS_INSN(cinv)
4808 {
4809     if (IS_USER(s)) {
4810         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4811         return;
4812     }
4813     /* Invalidate cache line.  Implement as no-op.  */
4814 }
4815 
4816 #if defined(CONFIG_SOFTMMU)
4817 DISAS_INSN(pflush)
4818 {
4819     TCGv opmode;
4820 
4821     if (IS_USER(s)) {
4822         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4823         return;
4824     }
4825 
4826     opmode = tcg_const_i32((insn >> 3) & 3);
4827     gen_helper_pflush(cpu_env, AREG(insn, 0), opmode);
4828     tcg_temp_free(opmode);
4829 }
4830 
4831 DISAS_INSN(ptest)
4832 {
4833     TCGv is_read;
4834 
4835     if (IS_USER(s)) {
4836         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4837         return;
4838     }
4839     is_read = tcg_const_i32((insn >> 5) & 1);
4840     gen_helper_ptest(cpu_env, AREG(insn, 0), is_read);
4841     tcg_temp_free(is_read);
4842 }
4843 #endif
4844 
4845 DISAS_INSN(wddata)
4846 {
4847     gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4848 }
4849 
4850 DISAS_INSN(wdebug)
4851 {
4852     if (IS_USER(s)) {
4853         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4854         return;
4855     }
4856     /* TODO: Implement wdebug.  */
4857     cpu_abort(env_cpu(env), "WDEBUG not implemented");
4858 }
4859 #endif
4860 
4861 DISAS_INSN(trap)
4862 {
4863     gen_exception(s, s->base.pc_next, EXCP_TRAP0 + (insn & 0xf));
4864 }
4865 
4866 static void gen_load_fcr(DisasContext *s, TCGv res, int reg)
4867 {
4868     switch (reg) {
4869     case M68K_FPIAR:
4870         tcg_gen_movi_i32(res, 0);
4871         break;
4872     case M68K_FPSR:
4873         tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpsr));
4874         break;
4875     case M68K_FPCR:
4876         tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpcr));
4877         break;
4878     }
4879 }
4880 
4881 static void gen_store_fcr(DisasContext *s, TCGv val, int reg)
4882 {
4883     switch (reg) {
4884     case M68K_FPIAR:
4885         break;
4886     case M68K_FPSR:
4887         tcg_gen_st_i32(val, cpu_env, offsetof(CPUM68KState, fpsr));
4888         break;
4889     case M68K_FPCR:
4890         gen_helper_set_fpcr(cpu_env, val);
4891         break;
4892     }
4893 }
4894 
4895 static void gen_qemu_store_fcr(DisasContext *s, TCGv addr, int reg)
4896 {
4897     int index = IS_USER(s);
4898     TCGv tmp;
4899 
4900     tmp = tcg_temp_new();
4901     gen_load_fcr(s, tmp, reg);
4902     tcg_gen_qemu_st32(tmp, addr, index);
4903     tcg_temp_free(tmp);
4904 }
4905 
4906 static void gen_qemu_load_fcr(DisasContext *s, TCGv addr, int reg)
4907 {
4908     int index = IS_USER(s);
4909     TCGv tmp;
4910 
4911     tmp = tcg_temp_new();
4912     tcg_gen_qemu_ld32u(tmp, addr, index);
4913     gen_store_fcr(s, tmp, reg);
4914     tcg_temp_free(tmp);
4915 }
4916 
4917 
4918 static void gen_op_fmove_fcr(CPUM68KState *env, DisasContext *s,
4919                              uint32_t insn, uint32_t ext)
4920 {
4921     int mask = (ext >> 10) & 7;
4922     int is_write = (ext >> 13) & 1;
4923     int mode = extract32(insn, 3, 3);
4924     int i;
4925     TCGv addr, tmp;
4926 
4927     switch (mode) {
4928     case 0: /* Dn */
4929         if (mask != M68K_FPIAR && mask != M68K_FPSR && mask != M68K_FPCR) {
4930             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4931             return;
4932         }
4933         if (is_write) {
4934             gen_load_fcr(s, DREG(insn, 0), mask);
4935         } else {
4936             gen_store_fcr(s, DREG(insn, 0), mask);
4937         }
4938         return;
4939     case 1: /* An, only with FPIAR */
4940         if (mask != M68K_FPIAR) {
4941             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4942             return;
4943         }
4944         if (is_write) {
4945             gen_load_fcr(s, AREG(insn, 0), mask);
4946         } else {
4947             gen_store_fcr(s, AREG(insn, 0), mask);
4948         }
4949         return;
4950     case 7: /* Immediate */
4951         if (REG(insn, 0) == 4) {
4952             if (is_write ||
4953                 (mask != M68K_FPIAR && mask != M68K_FPSR &&
4954                  mask != M68K_FPCR)) {
4955                 gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4956                 return;
4957             }
4958             tmp = tcg_const_i32(read_im32(env, s));
4959             gen_store_fcr(s, tmp, mask);
4960             tcg_temp_free(tmp);
4961             return;
4962         }
4963         break;
4964     default:
4965         break;
4966     }
4967 
4968     tmp = gen_lea(env, s, insn, OS_LONG);
4969     if (IS_NULL_QREG(tmp)) {
4970         gen_addr_fault(s);
4971         return;
4972     }
4973 
4974     addr = tcg_temp_new();
4975     tcg_gen_mov_i32(addr, tmp);
4976 
4977     /*
4978      * mask:
4979      *
4980      * 0b100 Floating-Point Control Register
4981      * 0b010 Floating-Point Status Register
4982      * 0b001 Floating-Point Instruction Address Register
4983      *
4984      */
4985 
4986     if (is_write && mode == 4) {
4987         for (i = 2; i >= 0; i--, mask >>= 1) {
4988             if (mask & 1) {
4989                 gen_qemu_store_fcr(s, addr, 1 << i);
4990                 if (mask != 1) {
4991                     tcg_gen_subi_i32(addr, addr, opsize_bytes(OS_LONG));
4992                 }
4993             }
4994        }
4995        tcg_gen_mov_i32(AREG(insn, 0), addr);
4996     } else {
4997         for (i = 0; i < 3; i++, mask >>= 1) {
4998             if (mask & 1) {
4999                 if (is_write) {
5000                     gen_qemu_store_fcr(s, addr, 1 << i);
5001                 } else {
5002                     gen_qemu_load_fcr(s, addr, 1 << i);
5003                 }
5004                 if (mask != 1 || mode == 3) {
5005                     tcg_gen_addi_i32(addr, addr, opsize_bytes(OS_LONG));
5006                 }
5007             }
5008         }
5009         if (mode == 3) {
5010             tcg_gen_mov_i32(AREG(insn, 0), addr);
5011         }
5012     }
5013     tcg_temp_free_i32(addr);
5014 }
5015 
5016 static void gen_op_fmovem(CPUM68KState *env, DisasContext *s,
5017                           uint32_t insn, uint32_t ext)
5018 {
5019     int opsize;
5020     TCGv addr, tmp;
5021     int mode = (ext >> 11) & 0x3;
5022     int is_load = ((ext & 0x2000) == 0);
5023 
5024     if (m68k_feature(s->env, M68K_FEATURE_FPU)) {
5025         opsize = OS_EXTENDED;
5026     } else {
5027         opsize = OS_DOUBLE;  /* FIXME */
5028     }
5029 
5030     addr = gen_lea(env, s, insn, opsize);
5031     if (IS_NULL_QREG(addr)) {
5032         gen_addr_fault(s);
5033         return;
5034     }
5035 
5036     tmp = tcg_temp_new();
5037     if (mode & 0x1) {
5038         /* Dynamic register list */
5039         tcg_gen_ext8u_i32(tmp, DREG(ext, 4));
5040     } else {
5041         /* Static register list */
5042         tcg_gen_movi_i32(tmp, ext & 0xff);
5043     }
5044 
5045     if (!is_load && (mode & 2) == 0) {
5046         /*
5047          * predecrement addressing mode
5048          * only available to store register to memory
5049          */
5050         if (opsize == OS_EXTENDED) {
5051             gen_helper_fmovemx_st_predec(tmp, cpu_env, addr, tmp);
5052         } else {
5053             gen_helper_fmovemd_st_predec(tmp, cpu_env, addr, tmp);
5054         }
5055     } else {
5056         /* postincrement addressing mode */
5057         if (opsize == OS_EXTENDED) {
5058             if (is_load) {
5059                 gen_helper_fmovemx_ld_postinc(tmp, cpu_env, addr, tmp);
5060             } else {
5061                 gen_helper_fmovemx_st_postinc(tmp, cpu_env, addr, tmp);
5062             }
5063         } else {
5064             if (is_load) {
5065                 gen_helper_fmovemd_ld_postinc(tmp, cpu_env, addr, tmp);
5066             } else {
5067                 gen_helper_fmovemd_st_postinc(tmp, cpu_env, addr, tmp);
5068             }
5069         }
5070     }
5071     if ((insn & 070) == 030 || (insn & 070) == 040) {
5072         tcg_gen_mov_i32(AREG(insn, 0), tmp);
5073     }
5074     tcg_temp_free(tmp);
5075 }
5076 
5077 /*
5078  * ??? FP exceptions are not implemented.  Most exceptions are deferred until
5079  * immediately before the next FP instruction is executed.
5080  */
5081 DISAS_INSN(fpu)
5082 {
5083     uint16_t ext;
5084     int opmode;
5085     int opsize;
5086     TCGv_ptr cpu_src, cpu_dest;
5087 
5088     ext = read_im16(env, s);
5089     opmode = ext & 0x7f;
5090     switch ((ext >> 13) & 7) {
5091     case 0:
5092         break;
5093     case 1:
5094         goto undef;
5095     case 2:
5096         if (insn == 0xf200 && (ext & 0xfc00) == 0x5c00) {
5097             /* fmovecr */
5098             TCGv rom_offset = tcg_const_i32(opmode);
5099             cpu_dest = gen_fp_ptr(REG(ext, 7));
5100             gen_helper_fconst(cpu_env, cpu_dest, rom_offset);
5101             tcg_temp_free_ptr(cpu_dest);
5102             tcg_temp_free(rom_offset);
5103             return;
5104         }
5105         break;
5106     case 3: /* fmove out */
5107         cpu_src = gen_fp_ptr(REG(ext, 7));
5108         opsize = ext_opsize(ext, 10);
5109         if (gen_ea_fp(env, s, insn, opsize, cpu_src,
5110                       EA_STORE, IS_USER(s)) == -1) {
5111             gen_addr_fault(s);
5112         }
5113         gen_helper_ftst(cpu_env, cpu_src);
5114         tcg_temp_free_ptr(cpu_src);
5115         return;
5116     case 4: /* fmove to control register.  */
5117     case 5: /* fmove from control register.  */
5118         gen_op_fmove_fcr(env, s, insn, ext);
5119         return;
5120     case 6: /* fmovem */
5121     case 7:
5122         if ((ext & 0x1000) == 0 && !m68k_feature(s->env, M68K_FEATURE_FPU)) {
5123             goto undef;
5124         }
5125         gen_op_fmovem(env, s, insn, ext);
5126         return;
5127     }
5128     if (ext & (1 << 14)) {
5129         /* Source effective address.  */
5130         opsize = ext_opsize(ext, 10);
5131         cpu_src = gen_fp_result_ptr();
5132         if (gen_ea_fp(env, s, insn, opsize, cpu_src,
5133                       EA_LOADS, IS_USER(s)) == -1) {
5134             gen_addr_fault(s);
5135             return;
5136         }
5137     } else {
5138         /* Source register.  */
5139         opsize = OS_EXTENDED;
5140         cpu_src = gen_fp_ptr(REG(ext, 10));
5141     }
5142     cpu_dest = gen_fp_ptr(REG(ext, 7));
5143     switch (opmode) {
5144     case 0: /* fmove */
5145         gen_fp_move(cpu_dest, cpu_src);
5146         break;
5147     case 0x40: /* fsmove */
5148         gen_helper_fsround(cpu_env, cpu_dest, cpu_src);
5149         break;
5150     case 0x44: /* fdmove */
5151         gen_helper_fdround(cpu_env, cpu_dest, cpu_src);
5152         break;
5153     case 1: /* fint */
5154         gen_helper_firound(cpu_env, cpu_dest, cpu_src);
5155         break;
5156     case 2: /* fsinh */
5157         gen_helper_fsinh(cpu_env, cpu_dest, cpu_src);
5158         break;
5159     case 3: /* fintrz */
5160         gen_helper_fitrunc(cpu_env, cpu_dest, cpu_src);
5161         break;
5162     case 4: /* fsqrt */
5163         gen_helper_fsqrt(cpu_env, cpu_dest, cpu_src);
5164         break;
5165     case 0x41: /* fssqrt */
5166         gen_helper_fssqrt(cpu_env, cpu_dest, cpu_src);
5167         break;
5168     case 0x45: /* fdsqrt */
5169         gen_helper_fdsqrt(cpu_env, cpu_dest, cpu_src);
5170         break;
5171     case 0x06: /* flognp1 */
5172         gen_helper_flognp1(cpu_env, cpu_dest, cpu_src);
5173         break;
5174     case 0x08: /* fetoxm1 */
5175         gen_helper_fetoxm1(cpu_env, cpu_dest, cpu_src);
5176         break;
5177     case 0x09: /* ftanh */
5178         gen_helper_ftanh(cpu_env, cpu_dest, cpu_src);
5179         break;
5180     case 0x0a: /* fatan */
5181         gen_helper_fatan(cpu_env, cpu_dest, cpu_src);
5182         break;
5183     case 0x0c: /* fasin */
5184         gen_helper_fasin(cpu_env, cpu_dest, cpu_src);
5185         break;
5186     case 0x0d: /* fatanh */
5187         gen_helper_fatanh(cpu_env, cpu_dest, cpu_src);
5188         break;
5189     case 0x0e: /* fsin */
5190         gen_helper_fsin(cpu_env, cpu_dest, cpu_src);
5191         break;
5192     case 0x0f: /* ftan */
5193         gen_helper_ftan(cpu_env, cpu_dest, cpu_src);
5194         break;
5195     case 0x10: /* fetox */
5196         gen_helper_fetox(cpu_env, cpu_dest, cpu_src);
5197         break;
5198     case 0x11: /* ftwotox */
5199         gen_helper_ftwotox(cpu_env, cpu_dest, cpu_src);
5200         break;
5201     case 0x12: /* ftentox */
5202         gen_helper_ftentox(cpu_env, cpu_dest, cpu_src);
5203         break;
5204     case 0x14: /* flogn */
5205         gen_helper_flogn(cpu_env, cpu_dest, cpu_src);
5206         break;
5207     case 0x15: /* flog10 */
5208         gen_helper_flog10(cpu_env, cpu_dest, cpu_src);
5209         break;
5210     case 0x16: /* flog2 */
5211         gen_helper_flog2(cpu_env, cpu_dest, cpu_src);
5212         break;
5213     case 0x18: /* fabs */
5214         gen_helper_fabs(cpu_env, cpu_dest, cpu_src);
5215         break;
5216     case 0x58: /* fsabs */
5217         gen_helper_fsabs(cpu_env, cpu_dest, cpu_src);
5218         break;
5219     case 0x5c: /* fdabs */
5220         gen_helper_fdabs(cpu_env, cpu_dest, cpu_src);
5221         break;
5222     case 0x19: /* fcosh */
5223         gen_helper_fcosh(cpu_env, cpu_dest, cpu_src);
5224         break;
5225     case 0x1a: /* fneg */
5226         gen_helper_fneg(cpu_env, cpu_dest, cpu_src);
5227         break;
5228     case 0x5a: /* fsneg */
5229         gen_helper_fsneg(cpu_env, cpu_dest, cpu_src);
5230         break;
5231     case 0x5e: /* fdneg */
5232         gen_helper_fdneg(cpu_env, cpu_dest, cpu_src);
5233         break;
5234     case 0x1c: /* facos */
5235         gen_helper_facos(cpu_env, cpu_dest, cpu_src);
5236         break;
5237     case 0x1d: /* fcos */
5238         gen_helper_fcos(cpu_env, cpu_dest, cpu_src);
5239         break;
5240     case 0x1e: /* fgetexp */
5241         gen_helper_fgetexp(cpu_env, cpu_dest, cpu_src);
5242         break;
5243     case 0x1f: /* fgetman */
5244         gen_helper_fgetman(cpu_env, cpu_dest, cpu_src);
5245         break;
5246     case 0x20: /* fdiv */
5247         gen_helper_fdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5248         break;
5249     case 0x60: /* fsdiv */
5250         gen_helper_fsdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5251         break;
5252     case 0x64: /* fddiv */
5253         gen_helper_fddiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5254         break;
5255     case 0x21: /* fmod */
5256         gen_helper_fmod(cpu_env, cpu_dest, cpu_src, cpu_dest);
5257         break;
5258     case 0x22: /* fadd */
5259         gen_helper_fadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5260         break;
5261     case 0x62: /* fsadd */
5262         gen_helper_fsadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5263         break;
5264     case 0x66: /* fdadd */
5265         gen_helper_fdadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5266         break;
5267     case 0x23: /* fmul */
5268         gen_helper_fmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5269         break;
5270     case 0x63: /* fsmul */
5271         gen_helper_fsmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5272         break;
5273     case 0x67: /* fdmul */
5274         gen_helper_fdmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5275         break;
5276     case 0x24: /* fsgldiv */
5277         gen_helper_fsgldiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5278         break;
5279     case 0x25: /* frem */
5280         gen_helper_frem(cpu_env, cpu_dest, cpu_src, cpu_dest);
5281         break;
5282     case 0x26: /* fscale */
5283         gen_helper_fscale(cpu_env, cpu_dest, cpu_src, cpu_dest);
5284         break;
5285     case 0x27: /* fsglmul */
5286         gen_helper_fsglmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5287         break;
5288     case 0x28: /* fsub */
5289         gen_helper_fsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5290         break;
5291     case 0x68: /* fssub */
5292         gen_helper_fssub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5293         break;
5294     case 0x6c: /* fdsub */
5295         gen_helper_fdsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5296         break;
5297     case 0x30: case 0x31: case 0x32:
5298     case 0x33: case 0x34: case 0x35:
5299     case 0x36: case 0x37: {
5300             TCGv_ptr cpu_dest2 = gen_fp_ptr(REG(ext, 0));
5301             gen_helper_fsincos(cpu_env, cpu_dest, cpu_dest2, cpu_src);
5302             tcg_temp_free_ptr(cpu_dest2);
5303         }
5304         break;
5305     case 0x38: /* fcmp */
5306         gen_helper_fcmp(cpu_env, cpu_src, cpu_dest);
5307         return;
5308     case 0x3a: /* ftst */
5309         gen_helper_ftst(cpu_env, cpu_src);
5310         return;
5311     default:
5312         goto undef;
5313     }
5314     tcg_temp_free_ptr(cpu_src);
5315     gen_helper_ftst(cpu_env, cpu_dest);
5316     tcg_temp_free_ptr(cpu_dest);
5317     return;
5318 undef:
5319     /* FIXME: Is this right for offset addressing modes?  */
5320     s->pc -= 2;
5321     disas_undef_fpu(env, s, insn);
5322 }
5323 
5324 static void gen_fcc_cond(DisasCompare *c, DisasContext *s, int cond)
5325 {
5326     TCGv fpsr;
5327 
5328     c->g1 = 1;
5329     c->v2 = tcg_const_i32(0);
5330     c->g2 = 0;
5331     /* TODO: Raise BSUN exception.  */
5332     fpsr = tcg_temp_new();
5333     gen_load_fcr(s, fpsr, M68K_FPSR);
5334     switch (cond) {
5335     case 0:  /* False */
5336     case 16: /* Signaling False */
5337         c->v1 = c->v2;
5338         c->tcond = TCG_COND_NEVER;
5339         break;
5340     case 1:  /* EQual Z */
5341     case 17: /* Signaling EQual Z */
5342         c->v1 = tcg_temp_new();
5343         c->g1 = 0;
5344         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5345         c->tcond = TCG_COND_NE;
5346         break;
5347     case 2:  /* Ordered Greater Than !(A || Z || N) */
5348     case 18: /* Greater Than !(A || Z || N) */
5349         c->v1 = tcg_temp_new();
5350         c->g1 = 0;
5351         tcg_gen_andi_i32(c->v1, fpsr,
5352                          FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5353         c->tcond = TCG_COND_EQ;
5354         break;
5355     case 3:  /* Ordered Greater than or Equal Z || !(A || N) */
5356     case 19: /* Greater than or Equal Z || !(A || N) */
5357         c->v1 = tcg_temp_new();
5358         c->g1 = 0;
5359         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5360         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5361         tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_Z | FPSR_CC_N);
5362         tcg_gen_or_i32(c->v1, c->v1, fpsr);
5363         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5364         c->tcond = TCG_COND_NE;
5365         break;
5366     case 4:  /* Ordered Less Than !(!N || A || Z); */
5367     case 20: /* Less Than !(!N || A || Z); */
5368         c->v1 = tcg_temp_new();
5369         c->g1 = 0;
5370         tcg_gen_xori_i32(c->v1, fpsr, FPSR_CC_N);
5371         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_N | FPSR_CC_A | FPSR_CC_Z);
5372         c->tcond = TCG_COND_EQ;
5373         break;
5374     case 5:  /* Ordered Less than or Equal Z || (N && !A) */
5375     case 21: /* Less than or Equal Z || (N && !A) */
5376         c->v1 = tcg_temp_new();
5377         c->g1 = 0;
5378         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5379         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5380         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5381         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_Z | FPSR_CC_N);
5382         c->tcond = TCG_COND_NE;
5383         break;
5384     case 6:  /* Ordered Greater or Less than !(A || Z) */
5385     case 22: /* Greater or Less than !(A || Z) */
5386         c->v1 = tcg_temp_new();
5387         c->g1 = 0;
5388         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5389         c->tcond = TCG_COND_EQ;
5390         break;
5391     case 7:  /* Ordered !A */
5392     case 23: /* Greater, Less or Equal !A */
5393         c->v1 = tcg_temp_new();
5394         c->g1 = 0;
5395         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5396         c->tcond = TCG_COND_EQ;
5397         break;
5398     case 8:  /* Unordered A */
5399     case 24: /* Not Greater, Less or Equal A */
5400         c->v1 = tcg_temp_new();
5401         c->g1 = 0;
5402         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5403         c->tcond = TCG_COND_NE;
5404         break;
5405     case 9:  /* Unordered or Equal A || Z */
5406     case 25: /* Not Greater or Less then A || Z */
5407         c->v1 = tcg_temp_new();
5408         c->g1 = 0;
5409         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5410         c->tcond = TCG_COND_NE;
5411         break;
5412     case 10: /* Unordered or Greater Than A || !(N || Z)) */
5413     case 26: /* Not Less or Equal A || !(N || Z)) */
5414         c->v1 = tcg_temp_new();
5415         c->g1 = 0;
5416         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5417         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5418         tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_A | FPSR_CC_N);
5419         tcg_gen_or_i32(c->v1, c->v1, fpsr);
5420         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5421         c->tcond = TCG_COND_NE;
5422         break;
5423     case 11: /* Unordered or Greater or Equal A || Z || !N */
5424     case 27: /* Not Less Than A || Z || !N */
5425         c->v1 = tcg_temp_new();
5426         c->g1 = 0;
5427         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5428         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5429         c->tcond = TCG_COND_NE;
5430         break;
5431     case 12: /* Unordered or Less Than A || (N && !Z) */
5432     case 28: /* Not Greater than or Equal A || (N && !Z) */
5433         c->v1 = tcg_temp_new();
5434         c->g1 = 0;
5435         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5436         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5437         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5438         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_A | FPSR_CC_N);
5439         c->tcond = TCG_COND_NE;
5440         break;
5441     case 13: /* Unordered or Less or Equal A || Z || N */
5442     case 29: /* Not Greater Than A || Z || N */
5443         c->v1 = tcg_temp_new();
5444         c->g1 = 0;
5445         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5446         c->tcond = TCG_COND_NE;
5447         break;
5448     case 14: /* Not Equal !Z */
5449     case 30: /* Signaling Not Equal !Z */
5450         c->v1 = tcg_temp_new();
5451         c->g1 = 0;
5452         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5453         c->tcond = TCG_COND_EQ;
5454         break;
5455     case 15: /* True */
5456     case 31: /* Signaling True */
5457         c->v1 = c->v2;
5458         c->tcond = TCG_COND_ALWAYS;
5459         break;
5460     }
5461     tcg_temp_free(fpsr);
5462 }
5463 
5464 static void gen_fjmpcc(DisasContext *s, int cond, TCGLabel *l1)
5465 {
5466     DisasCompare c;
5467 
5468     gen_fcc_cond(&c, s, cond);
5469     update_cc_op(s);
5470     tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
5471     free_cond(&c);
5472 }
5473 
5474 DISAS_INSN(fbcc)
5475 {
5476     uint32_t offset;
5477     uint32_t base;
5478     TCGLabel *l1;
5479 
5480     base = s->pc;
5481     offset = (int16_t)read_im16(env, s);
5482     if (insn & (1 << 6)) {
5483         offset = (offset << 16) | read_im16(env, s);
5484     }
5485 
5486     l1 = gen_new_label();
5487     update_cc_op(s);
5488     gen_fjmpcc(s, insn & 0x3f, l1);
5489     gen_jmp_tb(s, 0, s->pc);
5490     gen_set_label(l1);
5491     gen_jmp_tb(s, 1, base + offset);
5492 }
5493 
5494 DISAS_INSN(fscc)
5495 {
5496     DisasCompare c;
5497     int cond;
5498     TCGv tmp;
5499     uint16_t ext;
5500 
5501     ext = read_im16(env, s);
5502     cond = ext & 0x3f;
5503     gen_fcc_cond(&c, s, cond);
5504 
5505     tmp = tcg_temp_new();
5506     tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
5507     free_cond(&c);
5508 
5509     tcg_gen_neg_i32(tmp, tmp);
5510     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
5511     tcg_temp_free(tmp);
5512 }
5513 
5514 #if defined(CONFIG_SOFTMMU)
5515 DISAS_INSN(frestore)
5516 {
5517     TCGv addr;
5518 
5519     if (IS_USER(s)) {
5520         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
5521         return;
5522     }
5523     if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5524         SRC_EA(env, addr, OS_LONG, 0, NULL);
5525         /* FIXME: check the state frame */
5526     } else {
5527         disas_undef(env, s, insn);
5528     }
5529 }
5530 
5531 DISAS_INSN(fsave)
5532 {
5533     if (IS_USER(s)) {
5534         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
5535         return;
5536     }
5537 
5538     if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5539         /* always write IDLE */
5540         TCGv idle = tcg_const_i32(0x41000000);
5541         DEST_EA(env, insn, OS_LONG, idle, NULL);
5542         tcg_temp_free(idle);
5543     } else {
5544         disas_undef(env, s, insn);
5545     }
5546 }
5547 #endif
5548 
5549 static inline TCGv gen_mac_extract_word(DisasContext *s, TCGv val, int upper)
5550 {
5551     TCGv tmp = tcg_temp_new();
5552     if (s->env->macsr & MACSR_FI) {
5553         if (upper)
5554             tcg_gen_andi_i32(tmp, val, 0xffff0000);
5555         else
5556             tcg_gen_shli_i32(tmp, val, 16);
5557     } else if (s->env->macsr & MACSR_SU) {
5558         if (upper)
5559             tcg_gen_sari_i32(tmp, val, 16);
5560         else
5561             tcg_gen_ext16s_i32(tmp, val);
5562     } else {
5563         if (upper)
5564             tcg_gen_shri_i32(tmp, val, 16);
5565         else
5566             tcg_gen_ext16u_i32(tmp, val);
5567     }
5568     return tmp;
5569 }
5570 
5571 static void gen_mac_clear_flags(void)
5572 {
5573     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR,
5574                      ~(MACSR_V | MACSR_Z | MACSR_N | MACSR_EV));
5575 }
5576 
5577 DISAS_INSN(mac)
5578 {
5579     TCGv rx;
5580     TCGv ry;
5581     uint16_t ext;
5582     int acc;
5583     TCGv tmp;
5584     TCGv addr;
5585     TCGv loadval;
5586     int dual;
5587     TCGv saved_flags;
5588 
5589     if (!s->done_mac) {
5590         s->mactmp = tcg_temp_new_i64();
5591         s->done_mac = 1;
5592     }
5593 
5594     ext = read_im16(env, s);
5595 
5596     acc = ((insn >> 7) & 1) | ((ext >> 3) & 2);
5597     dual = ((insn & 0x30) != 0 && (ext & 3) != 0);
5598     if (dual && !m68k_feature(s->env, M68K_FEATURE_CF_EMAC_B)) {
5599         disas_undef(env, s, insn);
5600         return;
5601     }
5602     if (insn & 0x30) {
5603         /* MAC with load.  */
5604         tmp = gen_lea(env, s, insn, OS_LONG);
5605         addr = tcg_temp_new();
5606         tcg_gen_and_i32(addr, tmp, QREG_MAC_MASK);
5607         /*
5608          * Load the value now to ensure correct exception behavior.
5609          * Perform writeback after reading the MAC inputs.
5610          */
5611         loadval = gen_load(s, OS_LONG, addr, 0, IS_USER(s));
5612 
5613         acc ^= 1;
5614         rx = (ext & 0x8000) ? AREG(ext, 12) : DREG(insn, 12);
5615         ry = (ext & 8) ? AREG(ext, 0) : DREG(ext, 0);
5616     } else {
5617         loadval = addr = NULL_QREG;
5618         rx = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5619         ry = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5620     }
5621 
5622     gen_mac_clear_flags();
5623 #if 0
5624     l1 = -1;
5625     /* Disabled because conditional branches clobber temporary vars.  */
5626     if ((s->env->macsr & MACSR_OMC) != 0 && !dual) {
5627         /* Skip the multiply if we know we will ignore it.  */
5628         l1 = gen_new_label();
5629         tmp = tcg_temp_new();
5630         tcg_gen_andi_i32(tmp, QREG_MACSR, 1 << (acc + 8));
5631         gen_op_jmp_nz32(tmp, l1);
5632     }
5633 #endif
5634 
5635     if ((ext & 0x0800) == 0) {
5636         /* Word.  */
5637         rx = gen_mac_extract_word(s, rx, (ext & 0x80) != 0);
5638         ry = gen_mac_extract_word(s, ry, (ext & 0x40) != 0);
5639     }
5640     if (s->env->macsr & MACSR_FI) {
5641         gen_helper_macmulf(s->mactmp, cpu_env, rx, ry);
5642     } else {
5643         if (s->env->macsr & MACSR_SU)
5644             gen_helper_macmuls(s->mactmp, cpu_env, rx, ry);
5645         else
5646             gen_helper_macmulu(s->mactmp, cpu_env, rx, ry);
5647         switch ((ext >> 9) & 3) {
5648         case 1:
5649             tcg_gen_shli_i64(s->mactmp, s->mactmp, 1);
5650             break;
5651         case 3:
5652             tcg_gen_shri_i64(s->mactmp, s->mactmp, 1);
5653             break;
5654         }
5655     }
5656 
5657     if (dual) {
5658         /* Save the overflow flag from the multiply.  */
5659         saved_flags = tcg_temp_new();
5660         tcg_gen_mov_i32(saved_flags, QREG_MACSR);
5661     } else {
5662         saved_flags = NULL_QREG;
5663     }
5664 
5665 #if 0
5666     /* Disabled because conditional branches clobber temporary vars.  */
5667     if ((s->env->macsr & MACSR_OMC) != 0 && dual) {
5668         /* Skip the accumulate if the value is already saturated.  */
5669         l1 = gen_new_label();
5670         tmp = tcg_temp_new();
5671         gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
5672         gen_op_jmp_nz32(tmp, l1);
5673     }
5674 #endif
5675 
5676     if (insn & 0x100)
5677         tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5678     else
5679         tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5680 
5681     if (s->env->macsr & MACSR_FI)
5682         gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
5683     else if (s->env->macsr & MACSR_SU)
5684         gen_helper_macsats(cpu_env, tcg_const_i32(acc));
5685     else
5686         gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
5687 
5688 #if 0
5689     /* Disabled because conditional branches clobber temporary vars.  */
5690     if (l1 != -1)
5691         gen_set_label(l1);
5692 #endif
5693 
5694     if (dual) {
5695         /* Dual accumulate variant.  */
5696         acc = (ext >> 2) & 3;
5697         /* Restore the overflow flag from the multiplier.  */
5698         tcg_gen_mov_i32(QREG_MACSR, saved_flags);
5699 #if 0
5700         /* Disabled because conditional branches clobber temporary vars.  */
5701         if ((s->env->macsr & MACSR_OMC) != 0) {
5702             /* Skip the accumulate if the value is already saturated.  */
5703             l1 = gen_new_label();
5704             tmp = tcg_temp_new();
5705             gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
5706             gen_op_jmp_nz32(tmp, l1);
5707         }
5708 #endif
5709         if (ext & 2)
5710             tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5711         else
5712             tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5713         if (s->env->macsr & MACSR_FI)
5714             gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
5715         else if (s->env->macsr & MACSR_SU)
5716             gen_helper_macsats(cpu_env, tcg_const_i32(acc));
5717         else
5718             gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
5719 #if 0
5720         /* Disabled because conditional branches clobber temporary vars.  */
5721         if (l1 != -1)
5722             gen_set_label(l1);
5723 #endif
5724     }
5725     gen_helper_mac_set_flags(cpu_env, tcg_const_i32(acc));
5726 
5727     if (insn & 0x30) {
5728         TCGv rw;
5729         rw = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5730         tcg_gen_mov_i32(rw, loadval);
5731         /*
5732          * FIXME: Should address writeback happen with the masked or
5733          * unmasked value?
5734          */
5735         switch ((insn >> 3) & 7) {
5736         case 3: /* Post-increment.  */
5737             tcg_gen_addi_i32(AREG(insn, 0), addr, 4);
5738             break;
5739         case 4: /* Pre-decrement.  */
5740             tcg_gen_mov_i32(AREG(insn, 0), addr);
5741         }
5742         tcg_temp_free(loadval);
5743     }
5744 }
5745 
5746 DISAS_INSN(from_mac)
5747 {
5748     TCGv rx;
5749     TCGv_i64 acc;
5750     int accnum;
5751 
5752     rx = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5753     accnum = (insn >> 9) & 3;
5754     acc = MACREG(accnum);
5755     if (s->env->macsr & MACSR_FI) {
5756         gen_helper_get_macf(rx, cpu_env, acc);
5757     } else if ((s->env->macsr & MACSR_OMC) == 0) {
5758         tcg_gen_extrl_i64_i32(rx, acc);
5759     } else if (s->env->macsr & MACSR_SU) {
5760         gen_helper_get_macs(rx, acc);
5761     } else {
5762         gen_helper_get_macu(rx, acc);
5763     }
5764     if (insn & 0x40) {
5765         tcg_gen_movi_i64(acc, 0);
5766         tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5767     }
5768 }
5769 
5770 DISAS_INSN(move_mac)
5771 {
5772     /* FIXME: This can be done without a helper.  */
5773     int src;
5774     TCGv dest;
5775     src = insn & 3;
5776     dest = tcg_const_i32((insn >> 9) & 3);
5777     gen_helper_mac_move(cpu_env, dest, tcg_const_i32(src));
5778     gen_mac_clear_flags();
5779     gen_helper_mac_set_flags(cpu_env, dest);
5780 }
5781 
5782 DISAS_INSN(from_macsr)
5783 {
5784     TCGv reg;
5785 
5786     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5787     tcg_gen_mov_i32(reg, QREG_MACSR);
5788 }
5789 
5790 DISAS_INSN(from_mask)
5791 {
5792     TCGv reg;
5793     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5794     tcg_gen_mov_i32(reg, QREG_MAC_MASK);
5795 }
5796 
5797 DISAS_INSN(from_mext)
5798 {
5799     TCGv reg;
5800     TCGv acc;
5801     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5802     acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
5803     if (s->env->macsr & MACSR_FI)
5804         gen_helper_get_mac_extf(reg, cpu_env, acc);
5805     else
5806         gen_helper_get_mac_exti(reg, cpu_env, acc);
5807 }
5808 
5809 DISAS_INSN(macsr_to_ccr)
5810 {
5811     TCGv tmp = tcg_temp_new();
5812     tcg_gen_andi_i32(tmp, QREG_MACSR, 0xf);
5813     gen_helper_set_sr(cpu_env, tmp);
5814     tcg_temp_free(tmp);
5815     set_cc_op(s, CC_OP_FLAGS);
5816 }
5817 
5818 DISAS_INSN(to_mac)
5819 {
5820     TCGv_i64 acc;
5821     TCGv val;
5822     int accnum;
5823     accnum = (insn >> 9) & 3;
5824     acc = MACREG(accnum);
5825     SRC_EA(env, val, OS_LONG, 0, NULL);
5826     if (s->env->macsr & MACSR_FI) {
5827         tcg_gen_ext_i32_i64(acc, val);
5828         tcg_gen_shli_i64(acc, acc, 8);
5829     } else if (s->env->macsr & MACSR_SU) {
5830         tcg_gen_ext_i32_i64(acc, val);
5831     } else {
5832         tcg_gen_extu_i32_i64(acc, val);
5833     }
5834     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5835     gen_mac_clear_flags();
5836     gen_helper_mac_set_flags(cpu_env, tcg_const_i32(accnum));
5837 }
5838 
5839 DISAS_INSN(to_macsr)
5840 {
5841     TCGv val;
5842     SRC_EA(env, val, OS_LONG, 0, NULL);
5843     gen_helper_set_macsr(cpu_env, val);
5844     gen_exit_tb(s);
5845 }
5846 
5847 DISAS_INSN(to_mask)
5848 {
5849     TCGv val;
5850     SRC_EA(env, val, OS_LONG, 0, NULL);
5851     tcg_gen_ori_i32(QREG_MAC_MASK, val, 0xffff0000);
5852 }
5853 
5854 DISAS_INSN(to_mext)
5855 {
5856     TCGv val;
5857     TCGv acc;
5858     SRC_EA(env, val, OS_LONG, 0, NULL);
5859     acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
5860     if (s->env->macsr & MACSR_FI)
5861         gen_helper_set_mac_extf(cpu_env, val, acc);
5862     else if (s->env->macsr & MACSR_SU)
5863         gen_helper_set_mac_exts(cpu_env, val, acc);
5864     else
5865         gen_helper_set_mac_extu(cpu_env, val, acc);
5866 }
5867 
5868 static disas_proc opcode_table[65536];
5869 
5870 static void
5871 register_opcode (disas_proc proc, uint16_t opcode, uint16_t mask)
5872 {
5873   int i;
5874   int from;
5875   int to;
5876 
5877   /* Sanity check.  All set bits must be included in the mask.  */
5878   if (opcode & ~mask) {
5879       fprintf(stderr,
5880               "qemu internal error: bogus opcode definition %04x/%04x\n",
5881               opcode, mask);
5882       abort();
5883   }
5884   /*
5885    * This could probably be cleverer.  For now just optimize the case where
5886    * the top bits are known.
5887    */
5888   /* Find the first zero bit in the mask.  */
5889   i = 0x8000;
5890   while ((i & mask) != 0)
5891       i >>= 1;
5892   /* Iterate over all combinations of this and lower bits.  */
5893   if (i == 0)
5894       i = 1;
5895   else
5896       i <<= 1;
5897   from = opcode & ~(i - 1);
5898   to = from + i;
5899   for (i = from; i < to; i++) {
5900       if ((i & mask) == opcode)
5901           opcode_table[i] = proc;
5902   }
5903 }
5904 
5905 /*
5906  * Register m68k opcode handlers.  Order is important.
5907  * Later insn override earlier ones.
5908  */
5909 void register_m68k_insns (CPUM68KState *env)
5910 {
5911     /*
5912      * Build the opcode table only once to avoid
5913      * multithreading issues.
5914      */
5915     if (opcode_table[0] != NULL) {
5916         return;
5917     }
5918 
5919     /*
5920      * use BASE() for instruction available
5921      * for CF_ISA_A and M68000.
5922      */
5923 #define BASE(name, opcode, mask) \
5924     register_opcode(disas_##name, 0x##opcode, 0x##mask)
5925 #define INSN(name, opcode, mask, feature) do { \
5926     if (m68k_feature(env, M68K_FEATURE_##feature)) \
5927         BASE(name, opcode, mask); \
5928     } while(0)
5929     BASE(undef,     0000, 0000);
5930     INSN(arith_im,  0080, fff8, CF_ISA_A);
5931     INSN(arith_im,  0000, ff00, M68000);
5932     INSN(chk2,      00c0, f9c0, CHK2);
5933     INSN(bitrev,    00c0, fff8, CF_ISA_APLUSC);
5934     BASE(bitop_reg, 0100, f1c0);
5935     BASE(bitop_reg, 0140, f1c0);
5936     BASE(bitop_reg, 0180, f1c0);
5937     BASE(bitop_reg, 01c0, f1c0);
5938     INSN(movep,     0108, f138, MOVEP);
5939     INSN(arith_im,  0280, fff8, CF_ISA_A);
5940     INSN(arith_im,  0200, ff00, M68000);
5941     INSN(undef,     02c0, ffc0, M68000);
5942     INSN(byterev,   02c0, fff8, CF_ISA_APLUSC);
5943     INSN(arith_im,  0480, fff8, CF_ISA_A);
5944     INSN(arith_im,  0400, ff00, M68000);
5945     INSN(undef,     04c0, ffc0, M68000);
5946     INSN(arith_im,  0600, ff00, M68000);
5947     INSN(undef,     06c0, ffc0, M68000);
5948     INSN(ff1,       04c0, fff8, CF_ISA_APLUSC);
5949     INSN(arith_im,  0680, fff8, CF_ISA_A);
5950     INSN(arith_im,  0c00, ff38, CF_ISA_A);
5951     INSN(arith_im,  0c00, ff00, M68000);
5952     BASE(bitop_im,  0800, ffc0);
5953     BASE(bitop_im,  0840, ffc0);
5954     BASE(bitop_im,  0880, ffc0);
5955     BASE(bitop_im,  08c0, ffc0);
5956     INSN(arith_im,  0a80, fff8, CF_ISA_A);
5957     INSN(arith_im,  0a00, ff00, M68000);
5958 #if defined(CONFIG_SOFTMMU)
5959     INSN(moves,     0e00, ff00, M68000);
5960 #endif
5961     INSN(cas,       0ac0, ffc0, CAS);
5962     INSN(cas,       0cc0, ffc0, CAS);
5963     INSN(cas,       0ec0, ffc0, CAS);
5964     INSN(cas2w,     0cfc, ffff, CAS);
5965     INSN(cas2l,     0efc, ffff, CAS);
5966     BASE(move,      1000, f000);
5967     BASE(move,      2000, f000);
5968     BASE(move,      3000, f000);
5969     INSN(chk,       4000, f040, M68000);
5970     INSN(strldsr,   40e7, ffff, CF_ISA_APLUSC);
5971     INSN(negx,      4080, fff8, CF_ISA_A);
5972     INSN(negx,      4000, ff00, M68000);
5973     INSN(undef,     40c0, ffc0, M68000);
5974     INSN(move_from_sr, 40c0, fff8, CF_ISA_A);
5975     INSN(move_from_sr, 40c0, ffc0, M68000);
5976     BASE(lea,       41c0, f1c0);
5977     BASE(clr,       4200, ff00);
5978     BASE(undef,     42c0, ffc0);
5979     INSN(move_from_ccr, 42c0, fff8, CF_ISA_A);
5980     INSN(move_from_ccr, 42c0, ffc0, M68000);
5981     INSN(neg,       4480, fff8, CF_ISA_A);
5982     INSN(neg,       4400, ff00, M68000);
5983     INSN(undef,     44c0, ffc0, M68000);
5984     BASE(move_to_ccr, 44c0, ffc0);
5985     INSN(not,       4680, fff8, CF_ISA_A);
5986     INSN(not,       4600, ff00, M68000);
5987 #if defined(CONFIG_SOFTMMU)
5988     BASE(move_to_sr, 46c0, ffc0);
5989 #endif
5990     INSN(nbcd,      4800, ffc0, M68000);
5991     INSN(linkl,     4808, fff8, M68000);
5992     BASE(pea,       4840, ffc0);
5993     BASE(swap,      4840, fff8);
5994     INSN(bkpt,      4848, fff8, BKPT);
5995     INSN(movem,     48d0, fbf8, CF_ISA_A);
5996     INSN(movem,     48e8, fbf8, CF_ISA_A);
5997     INSN(movem,     4880, fb80, M68000);
5998     BASE(ext,       4880, fff8);
5999     BASE(ext,       48c0, fff8);
6000     BASE(ext,       49c0, fff8);
6001     BASE(tst,       4a00, ff00);
6002     INSN(tas,       4ac0, ffc0, CF_ISA_B);
6003     INSN(tas,       4ac0, ffc0, M68000);
6004 #if defined(CONFIG_SOFTMMU)
6005     INSN(halt,      4ac8, ffff, CF_ISA_A);
6006 #endif
6007     INSN(pulse,     4acc, ffff, CF_ISA_A);
6008     BASE(illegal,   4afc, ffff);
6009     INSN(mull,      4c00, ffc0, CF_ISA_A);
6010     INSN(mull,      4c00, ffc0, LONG_MULDIV);
6011     INSN(divl,      4c40, ffc0, CF_ISA_A);
6012     INSN(divl,      4c40, ffc0, LONG_MULDIV);
6013     INSN(sats,      4c80, fff8, CF_ISA_B);
6014     BASE(trap,      4e40, fff0);
6015     BASE(link,      4e50, fff8);
6016     BASE(unlk,      4e58, fff8);
6017 #if defined(CONFIG_SOFTMMU)
6018     INSN(move_to_usp, 4e60, fff8, USP);
6019     INSN(move_from_usp, 4e68, fff8, USP);
6020     INSN(reset,     4e70, ffff, M68000);
6021     BASE(stop,      4e72, ffff);
6022     BASE(rte,       4e73, ffff);
6023     INSN(cf_movec,  4e7b, ffff, CF_ISA_A);
6024     INSN(m68k_movec, 4e7a, fffe, MOVEC);
6025 #endif
6026     BASE(nop,       4e71, ffff);
6027     INSN(rtd,       4e74, ffff, RTD);
6028     BASE(rts,       4e75, ffff);
6029     INSN(rtr,       4e77, ffff, M68000);
6030     BASE(jump,      4e80, ffc0);
6031     BASE(jump,      4ec0, ffc0);
6032     INSN(addsubq,   5000, f080, M68000);
6033     BASE(addsubq,   5080, f0c0);
6034     INSN(scc,       50c0, f0f8, CF_ISA_A); /* Scc.B Dx   */
6035     INSN(scc,       50c0, f0c0, M68000);   /* Scc.B <EA> */
6036     INSN(dbcc,      50c8, f0f8, M68000);
6037     INSN(tpf,       51f8, fff8, CF_ISA_A);
6038 
6039     /* Branch instructions.  */
6040     BASE(branch,    6000, f000);
6041     /* Disable long branch instructions, then add back the ones we want.  */
6042     BASE(undef,     60ff, f0ff); /* All long branches.  */
6043     INSN(branch,    60ff, f0ff, CF_ISA_B);
6044     INSN(undef,     60ff, ffff, CF_ISA_B); /* bra.l */
6045     INSN(branch,    60ff, ffff, BRAL);
6046     INSN(branch,    60ff, f0ff, BCCL);
6047 
6048     BASE(moveq,     7000, f100);
6049     INSN(mvzs,      7100, f100, CF_ISA_B);
6050     BASE(or,        8000, f000);
6051     BASE(divw,      80c0, f0c0);
6052     INSN(sbcd_reg,  8100, f1f8, M68000);
6053     INSN(sbcd_mem,  8108, f1f8, M68000);
6054     BASE(addsub,    9000, f000);
6055     INSN(undef,     90c0, f0c0, CF_ISA_A);
6056     INSN(subx_reg,  9180, f1f8, CF_ISA_A);
6057     INSN(subx_reg,  9100, f138, M68000);
6058     INSN(subx_mem,  9108, f138, M68000);
6059     INSN(suba,      91c0, f1c0, CF_ISA_A);
6060     INSN(suba,      90c0, f0c0, M68000);
6061 
6062     BASE(undef_mac, a000, f000);
6063     INSN(mac,       a000, f100, CF_EMAC);
6064     INSN(from_mac,  a180, f9b0, CF_EMAC);
6065     INSN(move_mac,  a110, f9fc, CF_EMAC);
6066     INSN(from_macsr,a980, f9f0, CF_EMAC);
6067     INSN(from_mask, ad80, fff0, CF_EMAC);
6068     INSN(from_mext, ab80, fbf0, CF_EMAC);
6069     INSN(macsr_to_ccr, a9c0, ffff, CF_EMAC);
6070     INSN(to_mac,    a100, f9c0, CF_EMAC);
6071     INSN(to_macsr,  a900, ffc0, CF_EMAC);
6072     INSN(to_mext,   ab00, fbc0, CF_EMAC);
6073     INSN(to_mask,   ad00, ffc0, CF_EMAC);
6074 
6075     INSN(mov3q,     a140, f1c0, CF_ISA_B);
6076     INSN(cmp,       b000, f1c0, CF_ISA_B); /* cmp.b */
6077     INSN(cmp,       b040, f1c0, CF_ISA_B); /* cmp.w */
6078     INSN(cmpa,      b0c0, f1c0, CF_ISA_B); /* cmpa.w */
6079     INSN(cmp,       b080, f1c0, CF_ISA_A);
6080     INSN(cmpa,      b1c0, f1c0, CF_ISA_A);
6081     INSN(cmp,       b000, f100, M68000);
6082     INSN(eor,       b100, f100, M68000);
6083     INSN(cmpm,      b108, f138, M68000);
6084     INSN(cmpa,      b0c0, f0c0, M68000);
6085     INSN(eor,       b180, f1c0, CF_ISA_A);
6086     BASE(and,       c000, f000);
6087     INSN(exg_dd,    c140, f1f8, M68000);
6088     INSN(exg_aa,    c148, f1f8, M68000);
6089     INSN(exg_da,    c188, f1f8, M68000);
6090     BASE(mulw,      c0c0, f0c0);
6091     INSN(abcd_reg,  c100, f1f8, M68000);
6092     INSN(abcd_mem,  c108, f1f8, M68000);
6093     BASE(addsub,    d000, f000);
6094     INSN(undef,     d0c0, f0c0, CF_ISA_A);
6095     INSN(addx_reg,      d180, f1f8, CF_ISA_A);
6096     INSN(addx_reg,  d100, f138, M68000);
6097     INSN(addx_mem,  d108, f138, M68000);
6098     INSN(adda,      d1c0, f1c0, CF_ISA_A);
6099     INSN(adda,      d0c0, f0c0, M68000);
6100     INSN(shift_im,  e080, f0f0, CF_ISA_A);
6101     INSN(shift_reg, e0a0, f0f0, CF_ISA_A);
6102     INSN(shift8_im, e000, f0f0, M68000);
6103     INSN(shift16_im, e040, f0f0, M68000);
6104     INSN(shift_im,  e080, f0f0, M68000);
6105     INSN(shift8_reg, e020, f0f0, M68000);
6106     INSN(shift16_reg, e060, f0f0, M68000);
6107     INSN(shift_reg, e0a0, f0f0, M68000);
6108     INSN(shift_mem, e0c0, fcc0, M68000);
6109     INSN(rotate_im, e090, f0f0, M68000);
6110     INSN(rotate8_im, e010, f0f0, M68000);
6111     INSN(rotate16_im, e050, f0f0, M68000);
6112     INSN(rotate_reg, e0b0, f0f0, M68000);
6113     INSN(rotate8_reg, e030, f0f0, M68000);
6114     INSN(rotate16_reg, e070, f0f0, M68000);
6115     INSN(rotate_mem, e4c0, fcc0, M68000);
6116     INSN(bfext_mem, e9c0, fdc0, BITFIELD);  /* bfextu & bfexts */
6117     INSN(bfext_reg, e9c0, fdf8, BITFIELD);
6118     INSN(bfins_mem, efc0, ffc0, BITFIELD);
6119     INSN(bfins_reg, efc0, fff8, BITFIELD);
6120     INSN(bfop_mem, eac0, ffc0, BITFIELD);   /* bfchg */
6121     INSN(bfop_reg, eac0, fff8, BITFIELD);   /* bfchg */
6122     INSN(bfop_mem, ecc0, ffc0, BITFIELD);   /* bfclr */
6123     INSN(bfop_reg, ecc0, fff8, BITFIELD);   /* bfclr */
6124     INSN(bfop_mem, edc0, ffc0, BITFIELD);   /* bfffo */
6125     INSN(bfop_reg, edc0, fff8, BITFIELD);   /* bfffo */
6126     INSN(bfop_mem, eec0, ffc0, BITFIELD);   /* bfset */
6127     INSN(bfop_reg, eec0, fff8, BITFIELD);   /* bfset */
6128     INSN(bfop_mem, e8c0, ffc0, BITFIELD);   /* bftst */
6129     INSN(bfop_reg, e8c0, fff8, BITFIELD);   /* bftst */
6130     BASE(undef_fpu, f000, f000);
6131     INSN(fpu,       f200, ffc0, CF_FPU);
6132     INSN(fbcc,      f280, ffc0, CF_FPU);
6133     INSN(fpu,       f200, ffc0, FPU);
6134     INSN(fscc,      f240, ffc0, FPU);
6135     INSN(fbcc,      f280, ff80, FPU);
6136 #if defined(CONFIG_SOFTMMU)
6137     INSN(frestore,  f340, ffc0, CF_FPU);
6138     INSN(fsave,     f300, ffc0, CF_FPU);
6139     INSN(frestore,  f340, ffc0, FPU);
6140     INSN(fsave,     f300, ffc0, FPU);
6141     INSN(intouch,   f340, ffc0, CF_ISA_A);
6142     INSN(cpushl,    f428, ff38, CF_ISA_A);
6143     INSN(cpush,     f420, ff20, M68040);
6144     INSN(cinv,      f400, ff20, M68040);
6145     INSN(pflush,    f500, ffe0, M68040);
6146     INSN(ptest,     f548, ffd8, M68040);
6147     INSN(wddata,    fb00, ff00, CF_ISA_A);
6148     INSN(wdebug,    fbc0, ffc0, CF_ISA_A);
6149 #endif
6150     INSN(move16_mem, f600, ffe0, M68040);
6151     INSN(move16_reg, f620, fff8, M68040);
6152 #undef INSN
6153 }
6154 
6155 static void m68k_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cpu)
6156 {
6157     DisasContext *dc = container_of(dcbase, DisasContext, base);
6158     CPUM68KState *env = cpu->env_ptr;
6159 
6160     dc->env = env;
6161     dc->pc = dc->base.pc_first;
6162     dc->cc_op = CC_OP_DYNAMIC;
6163     dc->cc_op_synced = 1;
6164     dc->done_mac = 0;
6165     dc->writeback_mask = 0;
6166     init_release_array(dc);
6167 
6168     dc->ss_active = (M68K_SR_TRACE(env->sr) == M68K_SR_TRACE_ANY_INS);
6169     /* If architectural single step active, limit to 1 */
6170     if (dc->ss_active) {
6171         dc->base.max_insns = 1;
6172     }
6173 }
6174 
6175 static void m68k_tr_tb_start(DisasContextBase *dcbase, CPUState *cpu)
6176 {
6177 }
6178 
6179 static void m68k_tr_insn_start(DisasContextBase *dcbase, CPUState *cpu)
6180 {
6181     DisasContext *dc = container_of(dcbase, DisasContext, base);
6182     tcg_gen_insn_start(dc->base.pc_next, dc->cc_op);
6183 }
6184 
6185 static void m68k_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
6186 {
6187     DisasContext *dc = container_of(dcbase, DisasContext, base);
6188     CPUM68KState *env = cpu->env_ptr;
6189     uint16_t insn = read_im16(env, dc);
6190 
6191     opcode_table[insn](env, dc, insn);
6192     do_writebacks(dc);
6193     do_release(dc);
6194 
6195     dc->base.pc_next = dc->pc;
6196 
6197     if (dc->base.is_jmp == DISAS_NEXT) {
6198         /*
6199          * Stop translation when the next insn might touch a new page.
6200          * This ensures that prefetch aborts at the right place.
6201          *
6202          * We cannot determine the size of the next insn without
6203          * completely decoding it.  However, the maximum insn size
6204          * is 32 bytes, so end if we do not have that much remaining.
6205          * This may produce several small TBs at the end of each page,
6206          * but they will all be linked with goto_tb.
6207          *
6208          * ??? ColdFire maximum is 4 bytes; MC68000's maximum is also
6209          * smaller than MC68020's.
6210          */
6211         target_ulong start_page_offset
6212             = dc->pc - (dc->base.pc_first & TARGET_PAGE_MASK);
6213 
6214         if (start_page_offset >= TARGET_PAGE_SIZE - 32) {
6215             dc->base.is_jmp = DISAS_TOO_MANY;
6216         }
6217     }
6218 }
6219 
6220 static void m68k_tr_tb_stop(DisasContextBase *dcbase, CPUState *cpu)
6221 {
6222     DisasContext *dc = container_of(dcbase, DisasContext, base);
6223 
6224     switch (dc->base.is_jmp) {
6225     case DISAS_NORETURN:
6226         break;
6227     case DISAS_TOO_MANY:
6228         update_cc_op(dc);
6229         if (dc->ss_active) {
6230             tcg_gen_movi_i32(QREG_PC, dc->pc);
6231             gen_raise_exception(EXCP_TRACE);
6232         } else {
6233             gen_jmp_tb(dc, 0, dc->pc);
6234         }
6235         break;
6236     case DISAS_JUMP:
6237         /* We updated CC_OP and PC in gen_jmp/gen_jmp_im.  */
6238         if (dc->ss_active) {
6239             gen_raise_exception(EXCP_TRACE);
6240         } else {
6241             tcg_gen_lookup_and_goto_ptr();
6242         }
6243         break;
6244     case DISAS_EXIT:
6245         /*
6246          * We updated CC_OP and PC in gen_exit_tb, but also modified
6247          * other state that may require returning to the main loop.
6248          */
6249         if (dc->ss_active) {
6250             gen_raise_exception(EXCP_TRACE);
6251         } else {
6252             tcg_gen_exit_tb(NULL, 0);
6253         }
6254         break;
6255     default:
6256         g_assert_not_reached();
6257     }
6258 }
6259 
6260 static void m68k_tr_disas_log(const DisasContextBase *dcbase, CPUState *cpu)
6261 {
6262     qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first));
6263     log_target_disas(cpu, dcbase->pc_first, dcbase->tb->size);
6264 }
6265 
6266 static const TranslatorOps m68k_tr_ops = {
6267     .init_disas_context = m68k_tr_init_disas_context,
6268     .tb_start           = m68k_tr_tb_start,
6269     .insn_start         = m68k_tr_insn_start,
6270     .translate_insn     = m68k_tr_translate_insn,
6271     .tb_stop            = m68k_tr_tb_stop,
6272     .disas_log          = m68k_tr_disas_log,
6273 };
6274 
6275 void gen_intermediate_code(CPUState *cpu, TranslationBlock *tb, int max_insns)
6276 {
6277     DisasContext dc;
6278     translator_loop(&m68k_tr_ops, &dc.base, cpu, tb, max_insns);
6279 }
6280 
6281 static double floatx80_to_double(CPUM68KState *env, uint16_t high, uint64_t low)
6282 {
6283     floatx80 a = { .high = high, .low = low };
6284     union {
6285         float64 f64;
6286         double d;
6287     } u;
6288 
6289     u.f64 = floatx80_to_float64(a, &env->fp_status);
6290     return u.d;
6291 }
6292 
6293 void m68k_cpu_dump_state(CPUState *cs, FILE *f, int flags)
6294 {
6295     M68kCPU *cpu = M68K_CPU(cs);
6296     CPUM68KState *env = &cpu->env;
6297     int i;
6298     uint16_t sr;
6299     for (i = 0; i < 8; i++) {
6300         qemu_fprintf(f, "D%d = %08x   A%d = %08x   "
6301                      "F%d = %04x %016"PRIx64"  (%12g)\n",
6302                      i, env->dregs[i], i, env->aregs[i],
6303                      i, env->fregs[i].l.upper, env->fregs[i].l.lower,
6304                      floatx80_to_double(env, env->fregs[i].l.upper,
6305                                         env->fregs[i].l.lower));
6306     }
6307     qemu_fprintf(f, "PC = %08x   ", env->pc);
6308     sr = env->sr | cpu_m68k_get_ccr(env);
6309     qemu_fprintf(f, "SR = %04x T:%x I:%x %c%c %c%c%c%c%c\n",
6310                  sr, (sr & SR_T) >> SR_T_SHIFT, (sr & SR_I) >> SR_I_SHIFT,
6311                  (sr & SR_S) ? 'S' : 'U', (sr & SR_M) ? '%' : 'I',
6312                  (sr & CCF_X) ? 'X' : '-', (sr & CCF_N) ? 'N' : '-',
6313                  (sr & CCF_Z) ? 'Z' : '-', (sr & CCF_V) ? 'V' : '-',
6314                  (sr & CCF_C) ? 'C' : '-');
6315     qemu_fprintf(f, "FPSR = %08x %c%c%c%c ", env->fpsr,
6316                  (env->fpsr & FPSR_CC_A) ? 'A' : '-',
6317                  (env->fpsr & FPSR_CC_I) ? 'I' : '-',
6318                  (env->fpsr & FPSR_CC_Z) ? 'Z' : '-',
6319                  (env->fpsr & FPSR_CC_N) ? 'N' : '-');
6320     qemu_fprintf(f, "\n                                "
6321                  "FPCR =     %04x ", env->fpcr);
6322     switch (env->fpcr & FPCR_PREC_MASK) {
6323     case FPCR_PREC_X:
6324         qemu_fprintf(f, "X ");
6325         break;
6326     case FPCR_PREC_S:
6327         qemu_fprintf(f, "S ");
6328         break;
6329     case FPCR_PREC_D:
6330         qemu_fprintf(f, "D ");
6331         break;
6332     }
6333     switch (env->fpcr & FPCR_RND_MASK) {
6334     case FPCR_RND_N:
6335         qemu_fprintf(f, "RN ");
6336         break;
6337     case FPCR_RND_Z:
6338         qemu_fprintf(f, "RZ ");
6339         break;
6340     case FPCR_RND_M:
6341         qemu_fprintf(f, "RM ");
6342         break;
6343     case FPCR_RND_P:
6344         qemu_fprintf(f, "RP ");
6345         break;
6346     }
6347     qemu_fprintf(f, "\n");
6348 #ifdef CONFIG_SOFTMMU
6349     qemu_fprintf(f, "%sA7(MSP) = %08x %sA7(USP) = %08x %sA7(ISP) = %08x\n",
6350                  env->current_sp == M68K_SSP ? "->" : "  ", env->sp[M68K_SSP],
6351                  env->current_sp == M68K_USP ? "->" : "  ", env->sp[M68K_USP],
6352                  env->current_sp == M68K_ISP ? "->" : "  ", env->sp[M68K_ISP]);
6353     qemu_fprintf(f, "VBR = 0x%08x\n", env->vbr);
6354     qemu_fprintf(f, "SFC = %x DFC %x\n", env->sfc, env->dfc);
6355     qemu_fprintf(f, "SSW %08x TCR %08x URP %08x SRP %08x\n",
6356                  env->mmu.ssw, env->mmu.tcr, env->mmu.urp, env->mmu.srp);
6357     qemu_fprintf(f, "DTTR0/1: %08x/%08x ITTR0/1: %08x/%08x\n",
6358                  env->mmu.ttr[M68K_DTTR0], env->mmu.ttr[M68K_DTTR1],
6359                  env->mmu.ttr[M68K_ITTR0], env->mmu.ttr[M68K_ITTR1]);
6360     qemu_fprintf(f, "MMUSR %08x, fault at %08x\n",
6361                  env->mmu.mmusr, env->mmu.ar);
6362 #endif
6363 }
6364 
6365 void restore_state_to_opc(CPUM68KState *env, TranslationBlock *tb,
6366                           target_ulong *data)
6367 {
6368     int cc_op = data[1];
6369     env->pc = data[0];
6370     if (cc_op != CC_OP_DYNAMIC) {
6371         env->cc_op = cc_op;
6372     }
6373 }
6374