xref: /openbmc/qemu/target/m68k/translate.c (revision 14776ab5)
1 /*
2  *  m68k translation
3  *
4  *  Copyright (c) 2005-2007 CodeSourcery
5  *  Written by Paul Brook
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "disas/disas.h"
24 #include "exec/exec-all.h"
25 #include "tcg-op.h"
26 #include "qemu/log.h"
27 #include "qemu/qemu-print.h"
28 #include "exec/cpu_ldst.h"
29 #include "exec/translator.h"
30 
31 #include "exec/helper-proto.h"
32 #include "exec/helper-gen.h"
33 
34 #include "trace-tcg.h"
35 #include "exec/log.h"
36 #include "fpu/softfloat.h"
37 
38 
39 //#define DEBUG_DISPATCH 1
40 
41 #define DEFO32(name, offset) static TCGv QREG_##name;
42 #define DEFO64(name, offset) static TCGv_i64 QREG_##name;
43 #include "qregs.def"
44 #undef DEFO32
45 #undef DEFO64
46 
47 static TCGv_i32 cpu_halted;
48 static TCGv_i32 cpu_exception_index;
49 
50 static char cpu_reg_names[2 * 8 * 3 + 5 * 4];
51 static TCGv cpu_dregs[8];
52 static TCGv cpu_aregs[8];
53 static TCGv_i64 cpu_macc[4];
54 
55 #define REG(insn, pos)  (((insn) >> (pos)) & 7)
56 #define DREG(insn, pos) cpu_dregs[REG(insn, pos)]
57 #define AREG(insn, pos) get_areg(s, REG(insn, pos))
58 #define MACREG(acc)     cpu_macc[acc]
59 #define QREG_SP         get_areg(s, 7)
60 
61 static TCGv NULL_QREG;
62 #define IS_NULL_QREG(t) (t == NULL_QREG)
63 /* Used to distinguish stores from bad addressing modes.  */
64 static TCGv store_dummy;
65 
66 #include "exec/gen-icount.h"
67 
68 void m68k_tcg_init(void)
69 {
70     char *p;
71     int i;
72 
73 #define DEFO32(name, offset) \
74     QREG_##name = tcg_global_mem_new_i32(cpu_env, \
75         offsetof(CPUM68KState, offset), #name);
76 #define DEFO64(name, offset) \
77     QREG_##name = tcg_global_mem_new_i64(cpu_env, \
78         offsetof(CPUM68KState, offset), #name);
79 #include "qregs.def"
80 #undef DEFO32
81 #undef DEFO64
82 
83     cpu_halted = tcg_global_mem_new_i32(cpu_env,
84                                         -offsetof(M68kCPU, env) +
85                                         offsetof(CPUState, halted), "HALTED");
86     cpu_exception_index = tcg_global_mem_new_i32(cpu_env,
87                                                  -offsetof(M68kCPU, env) +
88                                                  offsetof(CPUState, exception_index),
89                                                  "EXCEPTION");
90 
91     p = cpu_reg_names;
92     for (i = 0; i < 8; i++) {
93         sprintf(p, "D%d", i);
94         cpu_dregs[i] = tcg_global_mem_new(cpu_env,
95                                           offsetof(CPUM68KState, dregs[i]), p);
96         p += 3;
97         sprintf(p, "A%d", i);
98         cpu_aregs[i] = tcg_global_mem_new(cpu_env,
99                                           offsetof(CPUM68KState, aregs[i]), p);
100         p += 3;
101     }
102     for (i = 0; i < 4; i++) {
103         sprintf(p, "ACC%d", i);
104         cpu_macc[i] = tcg_global_mem_new_i64(cpu_env,
105                                          offsetof(CPUM68KState, macc[i]), p);
106         p += 5;
107     }
108 
109     NULL_QREG = tcg_global_mem_new(cpu_env, -4, "NULL");
110     store_dummy = tcg_global_mem_new(cpu_env, -8, "NULL");
111 }
112 
113 /* internal defines */
114 typedef struct DisasContext {
115     DisasContextBase base;
116     CPUM68KState *env;
117     target_ulong pc;
118     CCOp cc_op; /* Current CC operation */
119     int cc_op_synced;
120     TCGv_i64 mactmp;
121     int done_mac;
122     int writeback_mask;
123     TCGv writeback[8];
124 #define MAX_TO_RELEASE 8
125     int release_count;
126     TCGv release[MAX_TO_RELEASE];
127 } DisasContext;
128 
129 static void init_release_array(DisasContext *s)
130 {
131 #ifdef CONFIG_DEBUG_TCG
132     memset(s->release, 0, sizeof(s->release));
133 #endif
134     s->release_count = 0;
135 }
136 
137 static void do_release(DisasContext *s)
138 {
139     int i;
140     for (i = 0; i < s->release_count; i++) {
141         tcg_temp_free(s->release[i]);
142     }
143     init_release_array(s);
144 }
145 
146 static TCGv mark_to_release(DisasContext *s, TCGv tmp)
147 {
148     g_assert(s->release_count < MAX_TO_RELEASE);
149     return s->release[s->release_count++] = tmp;
150 }
151 
152 static TCGv get_areg(DisasContext *s, unsigned regno)
153 {
154     if (s->writeback_mask & (1 << regno)) {
155         return s->writeback[regno];
156     } else {
157         return cpu_aregs[regno];
158     }
159 }
160 
161 static void delay_set_areg(DisasContext *s, unsigned regno,
162                            TCGv val, bool give_temp)
163 {
164     if (s->writeback_mask & (1 << regno)) {
165         if (give_temp) {
166             tcg_temp_free(s->writeback[regno]);
167             s->writeback[regno] = val;
168         } else {
169             tcg_gen_mov_i32(s->writeback[regno], val);
170         }
171     } else {
172         s->writeback_mask |= 1 << regno;
173         if (give_temp) {
174             s->writeback[regno] = val;
175         } else {
176             TCGv tmp = tcg_temp_new();
177             s->writeback[regno] = tmp;
178             tcg_gen_mov_i32(tmp, val);
179         }
180     }
181 }
182 
183 static void do_writebacks(DisasContext *s)
184 {
185     unsigned mask = s->writeback_mask;
186     if (mask) {
187         s->writeback_mask = 0;
188         do {
189             unsigned regno = ctz32(mask);
190             tcg_gen_mov_i32(cpu_aregs[regno], s->writeback[regno]);
191             tcg_temp_free(s->writeback[regno]);
192             mask &= mask - 1;
193         } while (mask);
194     }
195 }
196 
197 /* is_jmp field values */
198 #define DISAS_JUMP      DISAS_TARGET_0 /* only pc was modified dynamically */
199 #define DISAS_EXIT      DISAS_TARGET_1 /* cpu state was modified dynamically */
200 
201 #if defined(CONFIG_USER_ONLY)
202 #define IS_USER(s) 1
203 #else
204 #define IS_USER(s)   (!(s->base.tb->flags & TB_FLAGS_MSR_S))
205 #define SFC_INDEX(s) ((s->base.tb->flags & TB_FLAGS_SFC_S) ? \
206                       MMU_KERNEL_IDX : MMU_USER_IDX)
207 #define DFC_INDEX(s) ((s->base.tb->flags & TB_FLAGS_DFC_S) ? \
208                       MMU_KERNEL_IDX : MMU_USER_IDX)
209 #endif
210 
211 typedef void (*disas_proc)(CPUM68KState *env, DisasContext *s, uint16_t insn);
212 
213 #ifdef DEBUG_DISPATCH
214 #define DISAS_INSN(name)                                                \
215     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
216                                   uint16_t insn);                       \
217     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
218                              uint16_t insn)                             \
219     {                                                                   \
220         qemu_log("Dispatch " #name "\n");                               \
221         real_disas_##name(env, s, insn);                                \
222     }                                                                   \
223     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
224                                   uint16_t insn)
225 #else
226 #define DISAS_INSN(name)                                                \
227     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
228                              uint16_t insn)
229 #endif
230 
231 static const uint8_t cc_op_live[CC_OP_NB] = {
232     [CC_OP_DYNAMIC] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
233     [CC_OP_FLAGS] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
234     [CC_OP_ADDB ... CC_OP_ADDL] = CCF_X | CCF_N | CCF_V,
235     [CC_OP_SUBB ... CC_OP_SUBL] = CCF_X | CCF_N | CCF_V,
236     [CC_OP_CMPB ... CC_OP_CMPL] = CCF_X | CCF_N | CCF_V,
237     [CC_OP_LOGIC] = CCF_X | CCF_N
238 };
239 
240 static void set_cc_op(DisasContext *s, CCOp op)
241 {
242     CCOp old_op = s->cc_op;
243     int dead;
244 
245     if (old_op == op) {
246         return;
247     }
248     s->cc_op = op;
249     s->cc_op_synced = 0;
250 
251     /*
252      * Discard CC computation that will no longer be used.
253      * Note that X and N are never dead.
254      */
255     dead = cc_op_live[old_op] & ~cc_op_live[op];
256     if (dead & CCF_C) {
257         tcg_gen_discard_i32(QREG_CC_C);
258     }
259     if (dead & CCF_Z) {
260         tcg_gen_discard_i32(QREG_CC_Z);
261     }
262     if (dead & CCF_V) {
263         tcg_gen_discard_i32(QREG_CC_V);
264     }
265 }
266 
267 /* Update the CPU env CC_OP state.  */
268 static void update_cc_op(DisasContext *s)
269 {
270     if (!s->cc_op_synced) {
271         s->cc_op_synced = 1;
272         tcg_gen_movi_i32(QREG_CC_OP, s->cc_op);
273     }
274 }
275 
276 /* Generate a jump to an immediate address.  */
277 static void gen_jmp_im(DisasContext *s, uint32_t dest)
278 {
279     update_cc_op(s);
280     tcg_gen_movi_i32(QREG_PC, dest);
281     s->base.is_jmp = DISAS_JUMP;
282 }
283 
284 /* Generate a jump to the address in qreg DEST.  */
285 static void gen_jmp(DisasContext *s, TCGv dest)
286 {
287     update_cc_op(s);
288     tcg_gen_mov_i32(QREG_PC, dest);
289     s->base.is_jmp = DISAS_JUMP;
290 }
291 
292 static void gen_exception(DisasContext *s, uint32_t dest, int nr)
293 {
294     TCGv_i32 tmp;
295 
296     update_cc_op(s);
297     tcg_gen_movi_i32(QREG_PC, dest);
298 
299     tmp = tcg_const_i32(nr);
300     gen_helper_raise_exception(cpu_env, tmp);
301     tcg_temp_free_i32(tmp);
302 
303     s->base.is_jmp = DISAS_NORETURN;
304 }
305 
306 static inline void gen_addr_fault(DisasContext *s)
307 {
308     gen_exception(s, s->base.pc_next, EXCP_ADDRESS);
309 }
310 
311 /*
312  * Generate a load from the specified address.  Narrow values are
313  *  sign extended to full register width.
314  */
315 static inline TCGv gen_load(DisasContext *s, int opsize, TCGv addr,
316                             int sign, int index)
317 {
318     TCGv tmp;
319     tmp = tcg_temp_new_i32();
320     switch(opsize) {
321     case OS_BYTE:
322         if (sign)
323             tcg_gen_qemu_ld8s(tmp, addr, index);
324         else
325             tcg_gen_qemu_ld8u(tmp, addr, index);
326         break;
327     case OS_WORD:
328         if (sign)
329             tcg_gen_qemu_ld16s(tmp, addr, index);
330         else
331             tcg_gen_qemu_ld16u(tmp, addr, index);
332         break;
333     case OS_LONG:
334         tcg_gen_qemu_ld32u(tmp, addr, index);
335         break;
336     default:
337         g_assert_not_reached();
338     }
339     return tmp;
340 }
341 
342 /* Generate a store.  */
343 static inline void gen_store(DisasContext *s, int opsize, TCGv addr, TCGv val,
344                              int index)
345 {
346     switch(opsize) {
347     case OS_BYTE:
348         tcg_gen_qemu_st8(val, addr, index);
349         break;
350     case OS_WORD:
351         tcg_gen_qemu_st16(val, addr, index);
352         break;
353     case OS_LONG:
354         tcg_gen_qemu_st32(val, addr, index);
355         break;
356     default:
357         g_assert_not_reached();
358     }
359 }
360 
361 typedef enum {
362     EA_STORE,
363     EA_LOADU,
364     EA_LOADS
365 } ea_what;
366 
367 /*
368  * Generate an unsigned load if VAL is 0 a signed load if val is -1,
369  * otherwise generate a store.
370  */
371 static TCGv gen_ldst(DisasContext *s, int opsize, TCGv addr, TCGv val,
372                      ea_what what, int index)
373 {
374     if (what == EA_STORE) {
375         gen_store(s, opsize, addr, val, index);
376         return store_dummy;
377     } else {
378         return mark_to_release(s, gen_load(s, opsize, addr,
379                                            what == EA_LOADS, index));
380     }
381 }
382 
383 /* Read a 16-bit immediate constant */
384 static inline uint16_t read_im16(CPUM68KState *env, DisasContext *s)
385 {
386     uint16_t im;
387     im = cpu_lduw_code(env, s->pc);
388     s->pc += 2;
389     return im;
390 }
391 
392 /* Read an 8-bit immediate constant */
393 static inline uint8_t read_im8(CPUM68KState *env, DisasContext *s)
394 {
395     return read_im16(env, s);
396 }
397 
398 /* Read a 32-bit immediate constant.  */
399 static inline uint32_t read_im32(CPUM68KState *env, DisasContext *s)
400 {
401     uint32_t im;
402     im = read_im16(env, s) << 16;
403     im |= 0xffff & read_im16(env, s);
404     return im;
405 }
406 
407 /* Read a 64-bit immediate constant.  */
408 static inline uint64_t read_im64(CPUM68KState *env, DisasContext *s)
409 {
410     uint64_t im;
411     im = (uint64_t)read_im32(env, s) << 32;
412     im |= (uint64_t)read_im32(env, s);
413     return im;
414 }
415 
416 /* Calculate and address index.  */
417 static TCGv gen_addr_index(DisasContext *s, uint16_t ext, TCGv tmp)
418 {
419     TCGv add;
420     int scale;
421 
422     add = (ext & 0x8000) ? AREG(ext, 12) : DREG(ext, 12);
423     if ((ext & 0x800) == 0) {
424         tcg_gen_ext16s_i32(tmp, add);
425         add = tmp;
426     }
427     scale = (ext >> 9) & 3;
428     if (scale != 0) {
429         tcg_gen_shli_i32(tmp, add, scale);
430         add = tmp;
431     }
432     return add;
433 }
434 
435 /*
436  * Handle a base + index + displacement effective addresss.
437  * A NULL_QREG base means pc-relative.
438  */
439 static TCGv gen_lea_indexed(CPUM68KState *env, DisasContext *s, TCGv base)
440 {
441     uint32_t offset;
442     uint16_t ext;
443     TCGv add;
444     TCGv tmp;
445     uint32_t bd, od;
446 
447     offset = s->pc;
448     ext = read_im16(env, s);
449 
450     if ((ext & 0x800) == 0 && !m68k_feature(s->env, M68K_FEATURE_WORD_INDEX))
451         return NULL_QREG;
452 
453     if (m68k_feature(s->env, M68K_FEATURE_M68000) &&
454         !m68k_feature(s->env, M68K_FEATURE_SCALED_INDEX)) {
455         ext &= ~(3 << 9);
456     }
457 
458     if (ext & 0x100) {
459         /* full extension word format */
460         if (!m68k_feature(s->env, M68K_FEATURE_EXT_FULL))
461             return NULL_QREG;
462 
463         if ((ext & 0x30) > 0x10) {
464             /* base displacement */
465             if ((ext & 0x30) == 0x20) {
466                 bd = (int16_t)read_im16(env, s);
467             } else {
468                 bd = read_im32(env, s);
469             }
470         } else {
471             bd = 0;
472         }
473         tmp = mark_to_release(s, tcg_temp_new());
474         if ((ext & 0x44) == 0) {
475             /* pre-index */
476             add = gen_addr_index(s, ext, tmp);
477         } else {
478             add = NULL_QREG;
479         }
480         if ((ext & 0x80) == 0) {
481             /* base not suppressed */
482             if (IS_NULL_QREG(base)) {
483                 base = mark_to_release(s, tcg_const_i32(offset + bd));
484                 bd = 0;
485             }
486             if (!IS_NULL_QREG(add)) {
487                 tcg_gen_add_i32(tmp, add, base);
488                 add = tmp;
489             } else {
490                 add = base;
491             }
492         }
493         if (!IS_NULL_QREG(add)) {
494             if (bd != 0) {
495                 tcg_gen_addi_i32(tmp, add, bd);
496                 add = tmp;
497             }
498         } else {
499             add = mark_to_release(s, tcg_const_i32(bd));
500         }
501         if ((ext & 3) != 0) {
502             /* memory indirect */
503             base = mark_to_release(s, gen_load(s, OS_LONG, add, 0, IS_USER(s)));
504             if ((ext & 0x44) == 4) {
505                 add = gen_addr_index(s, ext, tmp);
506                 tcg_gen_add_i32(tmp, add, base);
507                 add = tmp;
508             } else {
509                 add = base;
510             }
511             if ((ext & 3) > 1) {
512                 /* outer displacement */
513                 if ((ext & 3) == 2) {
514                     od = (int16_t)read_im16(env, s);
515                 } else {
516                     od = read_im32(env, s);
517                 }
518             } else {
519                 od = 0;
520             }
521             if (od != 0) {
522                 tcg_gen_addi_i32(tmp, add, od);
523                 add = tmp;
524             }
525         }
526     } else {
527         /* brief extension word format */
528         tmp = mark_to_release(s, tcg_temp_new());
529         add = gen_addr_index(s, ext, tmp);
530         if (!IS_NULL_QREG(base)) {
531             tcg_gen_add_i32(tmp, add, base);
532             if ((int8_t)ext)
533                 tcg_gen_addi_i32(tmp, tmp, (int8_t)ext);
534         } else {
535             tcg_gen_addi_i32(tmp, add, offset + (int8_t)ext);
536         }
537         add = tmp;
538     }
539     return add;
540 }
541 
542 /* Sign or zero extend a value.  */
543 
544 static inline void gen_ext(TCGv res, TCGv val, int opsize, int sign)
545 {
546     switch (opsize) {
547     case OS_BYTE:
548         if (sign) {
549             tcg_gen_ext8s_i32(res, val);
550         } else {
551             tcg_gen_ext8u_i32(res, val);
552         }
553         break;
554     case OS_WORD:
555         if (sign) {
556             tcg_gen_ext16s_i32(res, val);
557         } else {
558             tcg_gen_ext16u_i32(res, val);
559         }
560         break;
561     case OS_LONG:
562         tcg_gen_mov_i32(res, val);
563         break;
564     default:
565         g_assert_not_reached();
566     }
567 }
568 
569 /* Evaluate all the CC flags.  */
570 
571 static void gen_flush_flags(DisasContext *s)
572 {
573     TCGv t0, t1;
574 
575     switch (s->cc_op) {
576     case CC_OP_FLAGS:
577         return;
578 
579     case CC_OP_ADDB:
580     case CC_OP_ADDW:
581     case CC_OP_ADDL:
582         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
583         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
584         /* Compute signed overflow for addition.  */
585         t0 = tcg_temp_new();
586         t1 = tcg_temp_new();
587         tcg_gen_sub_i32(t0, QREG_CC_N, QREG_CC_V);
588         gen_ext(t0, t0, s->cc_op - CC_OP_ADDB, 1);
589         tcg_gen_xor_i32(t1, QREG_CC_N, QREG_CC_V);
590         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
591         tcg_temp_free(t0);
592         tcg_gen_andc_i32(QREG_CC_V, t1, QREG_CC_V);
593         tcg_temp_free(t1);
594         break;
595 
596     case CC_OP_SUBB:
597     case CC_OP_SUBW:
598     case CC_OP_SUBL:
599         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
600         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
601         /* Compute signed overflow for subtraction.  */
602         t0 = tcg_temp_new();
603         t1 = tcg_temp_new();
604         tcg_gen_add_i32(t0, QREG_CC_N, QREG_CC_V);
605         gen_ext(t0, t0, s->cc_op - CC_OP_SUBB, 1);
606         tcg_gen_xor_i32(t1, QREG_CC_N, t0);
607         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
608         tcg_temp_free(t0);
609         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t1);
610         tcg_temp_free(t1);
611         break;
612 
613     case CC_OP_CMPB:
614     case CC_OP_CMPW:
615     case CC_OP_CMPL:
616         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_C, QREG_CC_N, QREG_CC_V);
617         tcg_gen_sub_i32(QREG_CC_Z, QREG_CC_N, QREG_CC_V);
618         gen_ext(QREG_CC_Z, QREG_CC_Z, s->cc_op - CC_OP_CMPB, 1);
619         /* Compute signed overflow for subtraction.  */
620         t0 = tcg_temp_new();
621         tcg_gen_xor_i32(t0, QREG_CC_Z, QREG_CC_N);
622         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, QREG_CC_N);
623         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t0);
624         tcg_temp_free(t0);
625         tcg_gen_mov_i32(QREG_CC_N, QREG_CC_Z);
626         break;
627 
628     case CC_OP_LOGIC:
629         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
630         tcg_gen_movi_i32(QREG_CC_C, 0);
631         tcg_gen_movi_i32(QREG_CC_V, 0);
632         break;
633 
634     case CC_OP_DYNAMIC:
635         gen_helper_flush_flags(cpu_env, QREG_CC_OP);
636         s->cc_op_synced = 1;
637         break;
638 
639     default:
640         t0 = tcg_const_i32(s->cc_op);
641         gen_helper_flush_flags(cpu_env, t0);
642         tcg_temp_free(t0);
643         s->cc_op_synced = 1;
644         break;
645     }
646 
647     /* Note that flush_flags also assigned to env->cc_op.  */
648     s->cc_op = CC_OP_FLAGS;
649 }
650 
651 static inline TCGv gen_extend(DisasContext *s, TCGv val, int opsize, int sign)
652 {
653     TCGv tmp;
654 
655     if (opsize == OS_LONG) {
656         tmp = val;
657     } else {
658         tmp = mark_to_release(s, tcg_temp_new());
659         gen_ext(tmp, val, opsize, sign);
660     }
661 
662     return tmp;
663 }
664 
665 static void gen_logic_cc(DisasContext *s, TCGv val, int opsize)
666 {
667     gen_ext(QREG_CC_N, val, opsize, 1);
668     set_cc_op(s, CC_OP_LOGIC);
669 }
670 
671 static void gen_update_cc_cmp(DisasContext *s, TCGv dest, TCGv src, int opsize)
672 {
673     tcg_gen_mov_i32(QREG_CC_N, dest);
674     tcg_gen_mov_i32(QREG_CC_V, src);
675     set_cc_op(s, CC_OP_CMPB + opsize);
676 }
677 
678 static void gen_update_cc_add(TCGv dest, TCGv src, int opsize)
679 {
680     gen_ext(QREG_CC_N, dest, opsize, 1);
681     tcg_gen_mov_i32(QREG_CC_V, src);
682 }
683 
684 static inline int opsize_bytes(int opsize)
685 {
686     switch (opsize) {
687     case OS_BYTE: return 1;
688     case OS_WORD: return 2;
689     case OS_LONG: return 4;
690     case OS_SINGLE: return 4;
691     case OS_DOUBLE: return 8;
692     case OS_EXTENDED: return 12;
693     case OS_PACKED: return 12;
694     default:
695         g_assert_not_reached();
696     }
697 }
698 
699 static inline int insn_opsize(int insn)
700 {
701     switch ((insn >> 6) & 3) {
702     case 0: return OS_BYTE;
703     case 1: return OS_WORD;
704     case 2: return OS_LONG;
705     default:
706         g_assert_not_reached();
707     }
708 }
709 
710 static inline int ext_opsize(int ext, int pos)
711 {
712     switch ((ext >> pos) & 7) {
713     case 0: return OS_LONG;
714     case 1: return OS_SINGLE;
715     case 2: return OS_EXTENDED;
716     case 3: return OS_PACKED;
717     case 4: return OS_WORD;
718     case 5: return OS_DOUBLE;
719     case 6: return OS_BYTE;
720     default:
721         g_assert_not_reached();
722     }
723 }
724 
725 /*
726  * Assign value to a register.  If the width is less than the register width
727  * only the low part of the register is set.
728  */
729 static void gen_partset_reg(int opsize, TCGv reg, TCGv val)
730 {
731     TCGv tmp;
732     switch (opsize) {
733     case OS_BYTE:
734         tcg_gen_andi_i32(reg, reg, 0xffffff00);
735         tmp = tcg_temp_new();
736         tcg_gen_ext8u_i32(tmp, val);
737         tcg_gen_or_i32(reg, reg, tmp);
738         tcg_temp_free(tmp);
739         break;
740     case OS_WORD:
741         tcg_gen_andi_i32(reg, reg, 0xffff0000);
742         tmp = tcg_temp_new();
743         tcg_gen_ext16u_i32(tmp, val);
744         tcg_gen_or_i32(reg, reg, tmp);
745         tcg_temp_free(tmp);
746         break;
747     case OS_LONG:
748     case OS_SINGLE:
749         tcg_gen_mov_i32(reg, val);
750         break;
751     default:
752         g_assert_not_reached();
753     }
754 }
755 
756 /*
757  * Generate code for an "effective address".  Does not adjust the base
758  * register for autoincrement addressing modes.
759  */
760 static TCGv gen_lea_mode(CPUM68KState *env, DisasContext *s,
761                          int mode, int reg0, int opsize)
762 {
763     TCGv reg;
764     TCGv tmp;
765     uint16_t ext;
766     uint32_t offset;
767 
768     switch (mode) {
769     case 0: /* Data register direct.  */
770     case 1: /* Address register direct.  */
771         return NULL_QREG;
772     case 3: /* Indirect postincrement.  */
773         if (opsize == OS_UNSIZED) {
774             return NULL_QREG;
775         }
776         /* fallthru */
777     case 2: /* Indirect register */
778         return get_areg(s, reg0);
779     case 4: /* Indirect predecrememnt.  */
780         if (opsize == OS_UNSIZED) {
781             return NULL_QREG;
782         }
783         reg = get_areg(s, reg0);
784         tmp = mark_to_release(s, tcg_temp_new());
785         if (reg0 == 7 && opsize == OS_BYTE &&
786             m68k_feature(s->env, M68K_FEATURE_M68000)) {
787             tcg_gen_subi_i32(tmp, reg, 2);
788         } else {
789             tcg_gen_subi_i32(tmp, reg, opsize_bytes(opsize));
790         }
791         return tmp;
792     case 5: /* Indirect displacement.  */
793         reg = get_areg(s, reg0);
794         tmp = mark_to_release(s, tcg_temp_new());
795         ext = read_im16(env, s);
796         tcg_gen_addi_i32(tmp, reg, (int16_t)ext);
797         return tmp;
798     case 6: /* Indirect index + displacement.  */
799         reg = get_areg(s, reg0);
800         return gen_lea_indexed(env, s, reg);
801     case 7: /* Other */
802         switch (reg0) {
803         case 0: /* Absolute short.  */
804             offset = (int16_t)read_im16(env, s);
805             return mark_to_release(s, tcg_const_i32(offset));
806         case 1: /* Absolute long.  */
807             offset = read_im32(env, s);
808             return mark_to_release(s, tcg_const_i32(offset));
809         case 2: /* pc displacement  */
810             offset = s->pc;
811             offset += (int16_t)read_im16(env, s);
812             return mark_to_release(s, tcg_const_i32(offset));
813         case 3: /* pc index+displacement.  */
814             return gen_lea_indexed(env, s, NULL_QREG);
815         case 4: /* Immediate.  */
816         default:
817             return NULL_QREG;
818         }
819     }
820     /* Should never happen.  */
821     return NULL_QREG;
822 }
823 
824 static TCGv gen_lea(CPUM68KState *env, DisasContext *s, uint16_t insn,
825                     int opsize)
826 {
827     int mode = extract32(insn, 3, 3);
828     int reg0 = REG(insn, 0);
829     return gen_lea_mode(env, s, mode, reg0, opsize);
830 }
831 
832 /*
833  * Generate code to load/store a value from/into an EA.  If WHAT > 0 this is
834  * a write otherwise it is a read (0 == sign extend, -1 == zero extend).
835  * ADDRP is non-null for readwrite operands.
836  */
837 static TCGv gen_ea_mode(CPUM68KState *env, DisasContext *s, int mode, int reg0,
838                         int opsize, TCGv val, TCGv *addrp, ea_what what,
839                         int index)
840 {
841     TCGv reg, tmp, result;
842     int32_t offset;
843 
844     switch (mode) {
845     case 0: /* Data register direct.  */
846         reg = cpu_dregs[reg0];
847         if (what == EA_STORE) {
848             gen_partset_reg(opsize, reg, val);
849             return store_dummy;
850         } else {
851             return gen_extend(s, reg, opsize, what == EA_LOADS);
852         }
853     case 1: /* Address register direct.  */
854         reg = get_areg(s, reg0);
855         if (what == EA_STORE) {
856             tcg_gen_mov_i32(reg, val);
857             return store_dummy;
858         } else {
859             return gen_extend(s, reg, opsize, what == EA_LOADS);
860         }
861     case 2: /* Indirect register */
862         reg = get_areg(s, reg0);
863         return gen_ldst(s, opsize, reg, val, what, index);
864     case 3: /* Indirect postincrement.  */
865         reg = get_areg(s, reg0);
866         result = gen_ldst(s, opsize, reg, val, what, index);
867         if (what == EA_STORE || !addrp) {
868             TCGv tmp = tcg_temp_new();
869             if (reg0 == 7 && opsize == OS_BYTE &&
870                 m68k_feature(s->env, M68K_FEATURE_M68000)) {
871                 tcg_gen_addi_i32(tmp, reg, 2);
872             } else {
873                 tcg_gen_addi_i32(tmp, reg, opsize_bytes(opsize));
874             }
875             delay_set_areg(s, reg0, tmp, true);
876         }
877         return result;
878     case 4: /* Indirect predecrememnt.  */
879         if (addrp && what == EA_STORE) {
880             tmp = *addrp;
881         } else {
882             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
883             if (IS_NULL_QREG(tmp)) {
884                 return tmp;
885             }
886             if (addrp) {
887                 *addrp = tmp;
888             }
889         }
890         result = gen_ldst(s, opsize, tmp, val, what, index);
891         if (what == EA_STORE || !addrp) {
892             delay_set_areg(s, reg0, tmp, false);
893         }
894         return result;
895     case 5: /* Indirect displacement.  */
896     case 6: /* Indirect index + displacement.  */
897     do_indirect:
898         if (addrp && what == EA_STORE) {
899             tmp = *addrp;
900         } else {
901             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
902             if (IS_NULL_QREG(tmp)) {
903                 return tmp;
904             }
905             if (addrp) {
906                 *addrp = tmp;
907             }
908         }
909         return gen_ldst(s, opsize, tmp, val, what, index);
910     case 7: /* Other */
911         switch (reg0) {
912         case 0: /* Absolute short.  */
913         case 1: /* Absolute long.  */
914         case 2: /* pc displacement  */
915         case 3: /* pc index+displacement.  */
916             goto do_indirect;
917         case 4: /* Immediate.  */
918             /* Sign extend values for consistency.  */
919             switch (opsize) {
920             case OS_BYTE:
921                 if (what == EA_LOADS) {
922                     offset = (int8_t)read_im8(env, s);
923                 } else {
924                     offset = read_im8(env, s);
925                 }
926                 break;
927             case OS_WORD:
928                 if (what == EA_LOADS) {
929                     offset = (int16_t)read_im16(env, s);
930                 } else {
931                     offset = read_im16(env, s);
932                 }
933                 break;
934             case OS_LONG:
935                 offset = read_im32(env, s);
936                 break;
937             default:
938                 g_assert_not_reached();
939             }
940             return mark_to_release(s, tcg_const_i32(offset));
941         default:
942             return NULL_QREG;
943         }
944     }
945     /* Should never happen.  */
946     return NULL_QREG;
947 }
948 
949 static TCGv gen_ea(CPUM68KState *env, DisasContext *s, uint16_t insn,
950                    int opsize, TCGv val, TCGv *addrp, ea_what what, int index)
951 {
952     int mode = extract32(insn, 3, 3);
953     int reg0 = REG(insn, 0);
954     return gen_ea_mode(env, s, mode, reg0, opsize, val, addrp, what, index);
955 }
956 
957 static TCGv_ptr gen_fp_ptr(int freg)
958 {
959     TCGv_ptr fp = tcg_temp_new_ptr();
960     tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fregs[freg]));
961     return fp;
962 }
963 
964 static TCGv_ptr gen_fp_result_ptr(void)
965 {
966     TCGv_ptr fp = tcg_temp_new_ptr();
967     tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fp_result));
968     return fp;
969 }
970 
971 static void gen_fp_move(TCGv_ptr dest, TCGv_ptr src)
972 {
973     TCGv t32;
974     TCGv_i64 t64;
975 
976     t32 = tcg_temp_new();
977     tcg_gen_ld16u_i32(t32, src, offsetof(FPReg, l.upper));
978     tcg_gen_st16_i32(t32, dest, offsetof(FPReg, l.upper));
979     tcg_temp_free(t32);
980 
981     t64 = tcg_temp_new_i64();
982     tcg_gen_ld_i64(t64, src, offsetof(FPReg, l.lower));
983     tcg_gen_st_i64(t64, dest, offsetof(FPReg, l.lower));
984     tcg_temp_free_i64(t64);
985 }
986 
987 static void gen_load_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
988                         int index)
989 {
990     TCGv tmp;
991     TCGv_i64 t64;
992 
993     t64 = tcg_temp_new_i64();
994     tmp = tcg_temp_new();
995     switch (opsize) {
996     case OS_BYTE:
997         tcg_gen_qemu_ld8s(tmp, addr, index);
998         gen_helper_exts32(cpu_env, fp, tmp);
999         break;
1000     case OS_WORD:
1001         tcg_gen_qemu_ld16s(tmp, addr, index);
1002         gen_helper_exts32(cpu_env, fp, tmp);
1003         break;
1004     case OS_LONG:
1005         tcg_gen_qemu_ld32u(tmp, addr, index);
1006         gen_helper_exts32(cpu_env, fp, tmp);
1007         break;
1008     case OS_SINGLE:
1009         tcg_gen_qemu_ld32u(tmp, addr, index);
1010         gen_helper_extf32(cpu_env, fp, tmp);
1011         break;
1012     case OS_DOUBLE:
1013         tcg_gen_qemu_ld64(t64, addr, index);
1014         gen_helper_extf64(cpu_env, fp, t64);
1015         break;
1016     case OS_EXTENDED:
1017         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1018             gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1019             break;
1020         }
1021         tcg_gen_qemu_ld32u(tmp, addr, index);
1022         tcg_gen_shri_i32(tmp, tmp, 16);
1023         tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
1024         tcg_gen_addi_i32(tmp, addr, 4);
1025         tcg_gen_qemu_ld64(t64, tmp, index);
1026         tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
1027         break;
1028     case OS_PACKED:
1029         /*
1030          * unimplemented data type on 68040/ColdFire
1031          * FIXME if needed for another FPU
1032          */
1033         gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1034         break;
1035     default:
1036         g_assert_not_reached();
1037     }
1038     tcg_temp_free(tmp);
1039     tcg_temp_free_i64(t64);
1040 }
1041 
1042 static void gen_store_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
1043                          int index)
1044 {
1045     TCGv tmp;
1046     TCGv_i64 t64;
1047 
1048     t64 = tcg_temp_new_i64();
1049     tmp = tcg_temp_new();
1050     switch (opsize) {
1051     case OS_BYTE:
1052         gen_helper_reds32(tmp, cpu_env, fp);
1053         tcg_gen_qemu_st8(tmp, addr, index);
1054         break;
1055     case OS_WORD:
1056         gen_helper_reds32(tmp, cpu_env, fp);
1057         tcg_gen_qemu_st16(tmp, addr, index);
1058         break;
1059     case OS_LONG:
1060         gen_helper_reds32(tmp, cpu_env, fp);
1061         tcg_gen_qemu_st32(tmp, addr, index);
1062         break;
1063     case OS_SINGLE:
1064         gen_helper_redf32(tmp, cpu_env, fp);
1065         tcg_gen_qemu_st32(tmp, addr, index);
1066         break;
1067     case OS_DOUBLE:
1068         gen_helper_redf64(t64, cpu_env, fp);
1069         tcg_gen_qemu_st64(t64, addr, index);
1070         break;
1071     case OS_EXTENDED:
1072         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1073             gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1074             break;
1075         }
1076         tcg_gen_ld16u_i32(tmp, fp, offsetof(FPReg, l.upper));
1077         tcg_gen_shli_i32(tmp, tmp, 16);
1078         tcg_gen_qemu_st32(tmp, addr, index);
1079         tcg_gen_addi_i32(tmp, addr, 4);
1080         tcg_gen_ld_i64(t64, fp, offsetof(FPReg, l.lower));
1081         tcg_gen_qemu_st64(t64, tmp, index);
1082         break;
1083     case OS_PACKED:
1084         /*
1085          * unimplemented data type on 68040/ColdFire
1086          * FIXME if needed for another FPU
1087          */
1088         gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1089         break;
1090     default:
1091         g_assert_not_reached();
1092     }
1093     tcg_temp_free(tmp);
1094     tcg_temp_free_i64(t64);
1095 }
1096 
1097 static void gen_ldst_fp(DisasContext *s, int opsize, TCGv addr,
1098                         TCGv_ptr fp, ea_what what, int index)
1099 {
1100     if (what == EA_STORE) {
1101         gen_store_fp(s, opsize, addr, fp, index);
1102     } else {
1103         gen_load_fp(s, opsize, addr, fp, index);
1104     }
1105 }
1106 
1107 static int gen_ea_mode_fp(CPUM68KState *env, DisasContext *s, int mode,
1108                           int reg0, int opsize, TCGv_ptr fp, ea_what what,
1109                           int index)
1110 {
1111     TCGv reg, addr, tmp;
1112     TCGv_i64 t64;
1113 
1114     switch (mode) {
1115     case 0: /* Data register direct.  */
1116         reg = cpu_dregs[reg0];
1117         if (what == EA_STORE) {
1118             switch (opsize) {
1119             case OS_BYTE:
1120             case OS_WORD:
1121             case OS_LONG:
1122                 gen_helper_reds32(reg, cpu_env, fp);
1123                 break;
1124             case OS_SINGLE:
1125                 gen_helper_redf32(reg, cpu_env, fp);
1126                 break;
1127             default:
1128                 g_assert_not_reached();
1129             }
1130         } else {
1131             tmp = tcg_temp_new();
1132             switch (opsize) {
1133             case OS_BYTE:
1134                 tcg_gen_ext8s_i32(tmp, reg);
1135                 gen_helper_exts32(cpu_env, fp, tmp);
1136                 break;
1137             case OS_WORD:
1138                 tcg_gen_ext16s_i32(tmp, reg);
1139                 gen_helper_exts32(cpu_env, fp, tmp);
1140                 break;
1141             case OS_LONG:
1142                 gen_helper_exts32(cpu_env, fp, reg);
1143                 break;
1144             case OS_SINGLE:
1145                 gen_helper_extf32(cpu_env, fp, reg);
1146                 break;
1147             default:
1148                 g_assert_not_reached();
1149             }
1150             tcg_temp_free(tmp);
1151         }
1152         return 0;
1153     case 1: /* Address register direct.  */
1154         return -1;
1155     case 2: /* Indirect register */
1156         addr = get_areg(s, reg0);
1157         gen_ldst_fp(s, opsize, addr, fp, what, index);
1158         return 0;
1159     case 3: /* Indirect postincrement.  */
1160         addr = cpu_aregs[reg0];
1161         gen_ldst_fp(s, opsize, addr, fp, what, index);
1162         tcg_gen_addi_i32(addr, addr, opsize_bytes(opsize));
1163         return 0;
1164     case 4: /* Indirect predecrememnt.  */
1165         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1166         if (IS_NULL_QREG(addr)) {
1167             return -1;
1168         }
1169         gen_ldst_fp(s, opsize, addr, fp, what, index);
1170         tcg_gen_mov_i32(cpu_aregs[reg0], addr);
1171         return 0;
1172     case 5: /* Indirect displacement.  */
1173     case 6: /* Indirect index + displacement.  */
1174     do_indirect:
1175         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1176         if (IS_NULL_QREG(addr)) {
1177             return -1;
1178         }
1179         gen_ldst_fp(s, opsize, addr, fp, what, index);
1180         return 0;
1181     case 7: /* Other */
1182         switch (reg0) {
1183         case 0: /* Absolute short.  */
1184         case 1: /* Absolute long.  */
1185         case 2: /* pc displacement  */
1186         case 3: /* pc index+displacement.  */
1187             goto do_indirect;
1188         case 4: /* Immediate.  */
1189             if (what == EA_STORE) {
1190                 return -1;
1191             }
1192             switch (opsize) {
1193             case OS_BYTE:
1194                 tmp = tcg_const_i32((int8_t)read_im8(env, s));
1195                 gen_helper_exts32(cpu_env, fp, tmp);
1196                 tcg_temp_free(tmp);
1197                 break;
1198             case OS_WORD:
1199                 tmp = tcg_const_i32((int16_t)read_im16(env, s));
1200                 gen_helper_exts32(cpu_env, fp, tmp);
1201                 tcg_temp_free(tmp);
1202                 break;
1203             case OS_LONG:
1204                 tmp = tcg_const_i32(read_im32(env, s));
1205                 gen_helper_exts32(cpu_env, fp, tmp);
1206                 tcg_temp_free(tmp);
1207                 break;
1208             case OS_SINGLE:
1209                 tmp = tcg_const_i32(read_im32(env, s));
1210                 gen_helper_extf32(cpu_env, fp, tmp);
1211                 tcg_temp_free(tmp);
1212                 break;
1213             case OS_DOUBLE:
1214                 t64 = tcg_const_i64(read_im64(env, s));
1215                 gen_helper_extf64(cpu_env, fp, t64);
1216                 tcg_temp_free_i64(t64);
1217                 break;
1218             case OS_EXTENDED:
1219                 if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1220                     gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1221                     break;
1222                 }
1223                 tmp = tcg_const_i32(read_im32(env, s) >> 16);
1224                 tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
1225                 tcg_temp_free(tmp);
1226                 t64 = tcg_const_i64(read_im64(env, s));
1227                 tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
1228                 tcg_temp_free_i64(t64);
1229                 break;
1230             case OS_PACKED:
1231                 /*
1232                  * unimplemented data type on 68040/ColdFire
1233                  * FIXME if needed for another FPU
1234                  */
1235                 gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1236                 break;
1237             default:
1238                 g_assert_not_reached();
1239             }
1240             return 0;
1241         default:
1242             return -1;
1243         }
1244     }
1245     return -1;
1246 }
1247 
1248 static int gen_ea_fp(CPUM68KState *env, DisasContext *s, uint16_t insn,
1249                        int opsize, TCGv_ptr fp, ea_what what, int index)
1250 {
1251     int mode = extract32(insn, 3, 3);
1252     int reg0 = REG(insn, 0);
1253     return gen_ea_mode_fp(env, s, mode, reg0, opsize, fp, what, index);
1254 }
1255 
1256 typedef struct {
1257     TCGCond tcond;
1258     bool g1;
1259     bool g2;
1260     TCGv v1;
1261     TCGv v2;
1262 } DisasCompare;
1263 
1264 static void gen_cc_cond(DisasCompare *c, DisasContext *s, int cond)
1265 {
1266     TCGv tmp, tmp2;
1267     TCGCond tcond;
1268     CCOp op = s->cc_op;
1269 
1270     /* The CC_OP_CMP form can handle most normal comparisons directly.  */
1271     if (op == CC_OP_CMPB || op == CC_OP_CMPW || op == CC_OP_CMPL) {
1272         c->g1 = c->g2 = 1;
1273         c->v1 = QREG_CC_N;
1274         c->v2 = QREG_CC_V;
1275         switch (cond) {
1276         case 2: /* HI */
1277         case 3: /* LS */
1278             tcond = TCG_COND_LEU;
1279             goto done;
1280         case 4: /* CC */
1281         case 5: /* CS */
1282             tcond = TCG_COND_LTU;
1283             goto done;
1284         case 6: /* NE */
1285         case 7: /* EQ */
1286             tcond = TCG_COND_EQ;
1287             goto done;
1288         case 10: /* PL */
1289         case 11: /* MI */
1290             c->g1 = c->g2 = 0;
1291             c->v2 = tcg_const_i32(0);
1292             c->v1 = tmp = tcg_temp_new();
1293             tcg_gen_sub_i32(tmp, QREG_CC_N, QREG_CC_V);
1294             gen_ext(tmp, tmp, op - CC_OP_CMPB, 1);
1295             /* fallthru */
1296         case 12: /* GE */
1297         case 13: /* LT */
1298             tcond = TCG_COND_LT;
1299             goto done;
1300         case 14: /* GT */
1301         case 15: /* LE */
1302             tcond = TCG_COND_LE;
1303             goto done;
1304         }
1305     }
1306 
1307     c->g1 = 1;
1308     c->g2 = 0;
1309     c->v2 = tcg_const_i32(0);
1310 
1311     switch (cond) {
1312     case 0: /* T */
1313     case 1: /* F */
1314         c->v1 = c->v2;
1315         tcond = TCG_COND_NEVER;
1316         goto done;
1317     case 14: /* GT (!(Z || (N ^ V))) */
1318     case 15: /* LE (Z || (N ^ V)) */
1319         /*
1320          * Logic operations clear V, which simplifies LE to (Z || N),
1321          * and since Z and N are co-located, this becomes a normal
1322          * comparison vs N.
1323          */
1324         if (op == CC_OP_LOGIC) {
1325             c->v1 = QREG_CC_N;
1326             tcond = TCG_COND_LE;
1327             goto done;
1328         }
1329         break;
1330     case 12: /* GE (!(N ^ V)) */
1331     case 13: /* LT (N ^ V) */
1332         /* Logic operations clear V, which simplifies this to N.  */
1333         if (op != CC_OP_LOGIC) {
1334             break;
1335         }
1336         /* fallthru */
1337     case 10: /* PL (!N) */
1338     case 11: /* MI (N) */
1339         /* Several cases represent N normally.  */
1340         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1341             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1342             op == CC_OP_LOGIC) {
1343             c->v1 = QREG_CC_N;
1344             tcond = TCG_COND_LT;
1345             goto done;
1346         }
1347         break;
1348     case 6: /* NE (!Z) */
1349     case 7: /* EQ (Z) */
1350         /* Some cases fold Z into N.  */
1351         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1352             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1353             op == CC_OP_LOGIC) {
1354             tcond = TCG_COND_EQ;
1355             c->v1 = QREG_CC_N;
1356             goto done;
1357         }
1358         break;
1359     case 4: /* CC (!C) */
1360     case 5: /* CS (C) */
1361         /* Some cases fold C into X.  */
1362         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1363             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL) {
1364             tcond = TCG_COND_NE;
1365             c->v1 = QREG_CC_X;
1366             goto done;
1367         }
1368         /* fallthru */
1369     case 8: /* VC (!V) */
1370     case 9: /* VS (V) */
1371         /* Logic operations clear V and C.  */
1372         if (op == CC_OP_LOGIC) {
1373             tcond = TCG_COND_NEVER;
1374             c->v1 = c->v2;
1375             goto done;
1376         }
1377         break;
1378     }
1379 
1380     /* Otherwise, flush flag state to CC_OP_FLAGS.  */
1381     gen_flush_flags(s);
1382 
1383     switch (cond) {
1384     case 0: /* T */
1385     case 1: /* F */
1386     default:
1387         /* Invalid, or handled above.  */
1388         abort();
1389     case 2: /* HI (!C && !Z) -> !(C || Z)*/
1390     case 3: /* LS (C || Z) */
1391         c->v1 = tmp = tcg_temp_new();
1392         c->g1 = 0;
1393         tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1394         tcg_gen_or_i32(tmp, tmp, QREG_CC_C);
1395         tcond = TCG_COND_NE;
1396         break;
1397     case 4: /* CC (!C) */
1398     case 5: /* CS (C) */
1399         c->v1 = QREG_CC_C;
1400         tcond = TCG_COND_NE;
1401         break;
1402     case 6: /* NE (!Z) */
1403     case 7: /* EQ (Z) */
1404         c->v1 = QREG_CC_Z;
1405         tcond = TCG_COND_EQ;
1406         break;
1407     case 8: /* VC (!V) */
1408     case 9: /* VS (V) */
1409         c->v1 = QREG_CC_V;
1410         tcond = TCG_COND_LT;
1411         break;
1412     case 10: /* PL (!N) */
1413     case 11: /* MI (N) */
1414         c->v1 = QREG_CC_N;
1415         tcond = TCG_COND_LT;
1416         break;
1417     case 12: /* GE (!(N ^ V)) */
1418     case 13: /* LT (N ^ V) */
1419         c->v1 = tmp = tcg_temp_new();
1420         c->g1 = 0;
1421         tcg_gen_xor_i32(tmp, QREG_CC_N, QREG_CC_V);
1422         tcond = TCG_COND_LT;
1423         break;
1424     case 14: /* GT (!(Z || (N ^ V))) */
1425     case 15: /* LE (Z || (N ^ V)) */
1426         c->v1 = tmp = tcg_temp_new();
1427         c->g1 = 0;
1428         tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1429         tcg_gen_neg_i32(tmp, tmp);
1430         tmp2 = tcg_temp_new();
1431         tcg_gen_xor_i32(tmp2, QREG_CC_N, QREG_CC_V);
1432         tcg_gen_or_i32(tmp, tmp, tmp2);
1433         tcg_temp_free(tmp2);
1434         tcond = TCG_COND_LT;
1435         break;
1436     }
1437 
1438  done:
1439     if ((cond & 1) == 0) {
1440         tcond = tcg_invert_cond(tcond);
1441     }
1442     c->tcond = tcond;
1443 }
1444 
1445 static void free_cond(DisasCompare *c)
1446 {
1447     if (!c->g1) {
1448         tcg_temp_free(c->v1);
1449     }
1450     if (!c->g2) {
1451         tcg_temp_free(c->v2);
1452     }
1453 }
1454 
1455 static void gen_jmpcc(DisasContext *s, int cond, TCGLabel *l1)
1456 {
1457   DisasCompare c;
1458 
1459   gen_cc_cond(&c, s, cond);
1460   update_cc_op(s);
1461   tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
1462   free_cond(&c);
1463 }
1464 
1465 /* Force a TB lookup after an instruction that changes the CPU state.  */
1466 static void gen_exit_tb(DisasContext *s)
1467 {
1468     update_cc_op(s);
1469     tcg_gen_movi_i32(QREG_PC, s->pc);
1470     s->base.is_jmp = DISAS_EXIT;
1471 }
1472 
1473 #define SRC_EA(env, result, opsize, op_sign, addrp) do {                \
1474         result = gen_ea(env, s, insn, opsize, NULL_QREG, addrp,         \
1475                         op_sign ? EA_LOADS : EA_LOADU, IS_USER(s));     \
1476         if (IS_NULL_QREG(result)) {                                     \
1477             gen_addr_fault(s);                                          \
1478             return;                                                     \
1479         }                                                               \
1480     } while (0)
1481 
1482 #define DEST_EA(env, insn, opsize, val, addrp) do {                     \
1483         TCGv ea_result = gen_ea(env, s, insn, opsize, val, addrp,       \
1484                                 EA_STORE, IS_USER(s));                  \
1485         if (IS_NULL_QREG(ea_result)) {                                  \
1486             gen_addr_fault(s);                                          \
1487             return;                                                     \
1488         }                                                               \
1489     } while (0)
1490 
1491 static inline bool use_goto_tb(DisasContext *s, uint32_t dest)
1492 {
1493 #ifndef CONFIG_USER_ONLY
1494     return (s->base.pc_first & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK)
1495         || (s->base.pc_next & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK);
1496 #else
1497     return true;
1498 #endif
1499 }
1500 
1501 /* Generate a jump to an immediate address.  */
1502 static void gen_jmp_tb(DisasContext *s, int n, uint32_t dest)
1503 {
1504     if (unlikely(s->base.singlestep_enabled)) {
1505         gen_exception(s, dest, EXCP_DEBUG);
1506     } else if (use_goto_tb(s, dest)) {
1507         tcg_gen_goto_tb(n);
1508         tcg_gen_movi_i32(QREG_PC, dest);
1509         tcg_gen_exit_tb(s->base.tb, n);
1510     } else {
1511         gen_jmp_im(s, dest);
1512         tcg_gen_exit_tb(NULL, 0);
1513     }
1514     s->base.is_jmp = DISAS_NORETURN;
1515 }
1516 
1517 DISAS_INSN(scc)
1518 {
1519     DisasCompare c;
1520     int cond;
1521     TCGv tmp;
1522 
1523     cond = (insn >> 8) & 0xf;
1524     gen_cc_cond(&c, s, cond);
1525 
1526     tmp = tcg_temp_new();
1527     tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
1528     free_cond(&c);
1529 
1530     tcg_gen_neg_i32(tmp, tmp);
1531     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
1532     tcg_temp_free(tmp);
1533 }
1534 
1535 DISAS_INSN(dbcc)
1536 {
1537     TCGLabel *l1;
1538     TCGv reg;
1539     TCGv tmp;
1540     int16_t offset;
1541     uint32_t base;
1542 
1543     reg = DREG(insn, 0);
1544     base = s->pc;
1545     offset = (int16_t)read_im16(env, s);
1546     l1 = gen_new_label();
1547     gen_jmpcc(s, (insn >> 8) & 0xf, l1);
1548 
1549     tmp = tcg_temp_new();
1550     tcg_gen_ext16s_i32(tmp, reg);
1551     tcg_gen_addi_i32(tmp, tmp, -1);
1552     gen_partset_reg(OS_WORD, reg, tmp);
1553     tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, -1, l1);
1554     gen_jmp_tb(s, 1, base + offset);
1555     gen_set_label(l1);
1556     gen_jmp_tb(s, 0, s->pc);
1557 }
1558 
1559 DISAS_INSN(undef_mac)
1560 {
1561     gen_exception(s, s->base.pc_next, EXCP_LINEA);
1562 }
1563 
1564 DISAS_INSN(undef_fpu)
1565 {
1566     gen_exception(s, s->base.pc_next, EXCP_LINEF);
1567 }
1568 
1569 DISAS_INSN(undef)
1570 {
1571     /*
1572      * ??? This is both instructions that are as yet unimplemented
1573      * for the 680x0 series, as well as those that are implemented
1574      * but actually illegal for CPU32 or pre-68020.
1575      */
1576     qemu_log_mask(LOG_UNIMP, "Illegal instruction: %04x @ %08x\n",
1577                   insn, s->base.pc_next);
1578     gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
1579 }
1580 
1581 DISAS_INSN(mulw)
1582 {
1583     TCGv reg;
1584     TCGv tmp;
1585     TCGv src;
1586     int sign;
1587 
1588     sign = (insn & 0x100) != 0;
1589     reg = DREG(insn, 9);
1590     tmp = tcg_temp_new();
1591     if (sign)
1592         tcg_gen_ext16s_i32(tmp, reg);
1593     else
1594         tcg_gen_ext16u_i32(tmp, reg);
1595     SRC_EA(env, src, OS_WORD, sign, NULL);
1596     tcg_gen_mul_i32(tmp, tmp, src);
1597     tcg_gen_mov_i32(reg, tmp);
1598     gen_logic_cc(s, tmp, OS_LONG);
1599     tcg_temp_free(tmp);
1600 }
1601 
1602 DISAS_INSN(divw)
1603 {
1604     int sign;
1605     TCGv src;
1606     TCGv destr;
1607 
1608     /* divX.w <EA>,Dn    32/16 -> 16r:16q */
1609 
1610     sign = (insn & 0x100) != 0;
1611 
1612     /* dest.l / src.w */
1613 
1614     SRC_EA(env, src, OS_WORD, sign, NULL);
1615     destr = tcg_const_i32(REG(insn, 9));
1616     if (sign) {
1617         gen_helper_divsw(cpu_env, destr, src);
1618     } else {
1619         gen_helper_divuw(cpu_env, destr, src);
1620     }
1621     tcg_temp_free(destr);
1622 
1623     set_cc_op(s, CC_OP_FLAGS);
1624 }
1625 
1626 DISAS_INSN(divl)
1627 {
1628     TCGv num, reg, den;
1629     int sign;
1630     uint16_t ext;
1631 
1632     ext = read_im16(env, s);
1633 
1634     sign = (ext & 0x0800) != 0;
1635 
1636     if (ext & 0x400) {
1637         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
1638             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
1639             return;
1640         }
1641 
1642         /* divX.l <EA>, Dr:Dq    64/32 -> 32r:32q */
1643 
1644         SRC_EA(env, den, OS_LONG, 0, NULL);
1645         num = tcg_const_i32(REG(ext, 12));
1646         reg = tcg_const_i32(REG(ext, 0));
1647         if (sign) {
1648             gen_helper_divsll(cpu_env, num, reg, den);
1649         } else {
1650             gen_helper_divull(cpu_env, num, reg, den);
1651         }
1652         tcg_temp_free(reg);
1653         tcg_temp_free(num);
1654         set_cc_op(s, CC_OP_FLAGS);
1655         return;
1656     }
1657 
1658     /* divX.l <EA>, Dq        32/32 -> 32q     */
1659     /* divXl.l <EA>, Dr:Dq    32/32 -> 32r:32q */
1660 
1661     SRC_EA(env, den, OS_LONG, 0, NULL);
1662     num = tcg_const_i32(REG(ext, 12));
1663     reg = tcg_const_i32(REG(ext, 0));
1664     if (sign) {
1665         gen_helper_divsl(cpu_env, num, reg, den);
1666     } else {
1667         gen_helper_divul(cpu_env, num, reg, den);
1668     }
1669     tcg_temp_free(reg);
1670     tcg_temp_free(num);
1671 
1672     set_cc_op(s, CC_OP_FLAGS);
1673 }
1674 
1675 static void bcd_add(TCGv dest, TCGv src)
1676 {
1677     TCGv t0, t1;
1678 
1679     /*
1680      * dest10 = dest10 + src10 + X
1681      *
1682      *        t1 = src
1683      *        t2 = t1 + 0x066
1684      *        t3 = t2 + dest + X
1685      *        t4 = t2 ^ dest
1686      *        t5 = t3 ^ t4
1687      *        t6 = ~t5 & 0x110
1688      *        t7 = (t6 >> 2) | (t6 >> 3)
1689      *        return t3 - t7
1690      */
1691 
1692     /*
1693      * t1 = (src + 0x066) + dest + X
1694      *    = result with some possible exceding 0x6
1695      */
1696 
1697     t0 = tcg_const_i32(0x066);
1698     tcg_gen_add_i32(t0, t0, src);
1699 
1700     t1 = tcg_temp_new();
1701     tcg_gen_add_i32(t1, t0, dest);
1702     tcg_gen_add_i32(t1, t1, QREG_CC_X);
1703 
1704     /* we will remove exceding 0x6 where there is no carry */
1705 
1706     /*
1707      * t0 = (src + 0x0066) ^ dest
1708      *    = t1 without carries
1709      */
1710 
1711     tcg_gen_xor_i32(t0, t0, dest);
1712 
1713     /*
1714      * extract the carries
1715      * t0 = t0 ^ t1
1716      *    = only the carries
1717      */
1718 
1719     tcg_gen_xor_i32(t0, t0, t1);
1720 
1721     /*
1722      * generate 0x1 where there is no carry
1723      * and for each 0x10, generate a 0x6
1724      */
1725 
1726     tcg_gen_shri_i32(t0, t0, 3);
1727     tcg_gen_not_i32(t0, t0);
1728     tcg_gen_andi_i32(t0, t0, 0x22);
1729     tcg_gen_add_i32(dest, t0, t0);
1730     tcg_gen_add_i32(dest, dest, t0);
1731     tcg_temp_free(t0);
1732 
1733     /*
1734      * remove the exceding 0x6
1735      * for digits that have not generated a carry
1736      */
1737 
1738     tcg_gen_sub_i32(dest, t1, dest);
1739     tcg_temp_free(t1);
1740 }
1741 
1742 static void bcd_sub(TCGv dest, TCGv src)
1743 {
1744     TCGv t0, t1, t2;
1745 
1746     /*
1747      *  dest10 = dest10 - src10 - X
1748      *         = bcd_add(dest + 1 - X, 0x199 - src)
1749      */
1750 
1751     /* t0 = 0x066 + (0x199 - src) */
1752 
1753     t0 = tcg_temp_new();
1754     tcg_gen_subfi_i32(t0, 0x1ff, src);
1755 
1756     /* t1 = t0 + dest + 1 - X*/
1757 
1758     t1 = tcg_temp_new();
1759     tcg_gen_add_i32(t1, t0, dest);
1760     tcg_gen_addi_i32(t1, t1, 1);
1761     tcg_gen_sub_i32(t1, t1, QREG_CC_X);
1762 
1763     /* t2 = t0 ^ dest */
1764 
1765     t2 = tcg_temp_new();
1766     tcg_gen_xor_i32(t2, t0, dest);
1767 
1768     /* t0 = t1 ^ t2 */
1769 
1770     tcg_gen_xor_i32(t0, t1, t2);
1771 
1772     /*
1773      * t2 = ~t0 & 0x110
1774      * t0 = (t2 >> 2) | (t2 >> 3)
1775      *
1776      * to fit on 8bit operands, changed in:
1777      *
1778      * t2 = ~(t0 >> 3) & 0x22
1779      * t0 = t2 + t2
1780      * t0 = t0 + t2
1781      */
1782 
1783     tcg_gen_shri_i32(t2, t0, 3);
1784     tcg_gen_not_i32(t2, t2);
1785     tcg_gen_andi_i32(t2, t2, 0x22);
1786     tcg_gen_add_i32(t0, t2, t2);
1787     tcg_gen_add_i32(t0, t0, t2);
1788     tcg_temp_free(t2);
1789 
1790     /* return t1 - t0 */
1791 
1792     tcg_gen_sub_i32(dest, t1, t0);
1793     tcg_temp_free(t0);
1794     tcg_temp_free(t1);
1795 }
1796 
1797 static void bcd_flags(TCGv val)
1798 {
1799     tcg_gen_andi_i32(QREG_CC_C, val, 0x0ff);
1800     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_C);
1801 
1802     tcg_gen_extract_i32(QREG_CC_C, val, 8, 1);
1803 
1804     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
1805 }
1806 
1807 DISAS_INSN(abcd_reg)
1808 {
1809     TCGv src;
1810     TCGv dest;
1811 
1812     gen_flush_flags(s); /* !Z is sticky */
1813 
1814     src = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
1815     dest = gen_extend(s, DREG(insn, 9), OS_BYTE, 0);
1816     bcd_add(dest, src);
1817     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1818 
1819     bcd_flags(dest);
1820 }
1821 
1822 DISAS_INSN(abcd_mem)
1823 {
1824     TCGv src, dest, addr;
1825 
1826     gen_flush_flags(s); /* !Z is sticky */
1827 
1828     /* Indirect pre-decrement load (mode 4) */
1829 
1830     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1831                       NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1832     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1833                        NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1834 
1835     bcd_add(dest, src);
1836 
1837     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1838                 EA_STORE, IS_USER(s));
1839 
1840     bcd_flags(dest);
1841 }
1842 
1843 DISAS_INSN(sbcd_reg)
1844 {
1845     TCGv src, dest;
1846 
1847     gen_flush_flags(s); /* !Z is sticky */
1848 
1849     src = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
1850     dest = gen_extend(s, DREG(insn, 9), OS_BYTE, 0);
1851 
1852     bcd_sub(dest, src);
1853 
1854     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1855 
1856     bcd_flags(dest);
1857 }
1858 
1859 DISAS_INSN(sbcd_mem)
1860 {
1861     TCGv src, dest, addr;
1862 
1863     gen_flush_flags(s); /* !Z is sticky */
1864 
1865     /* Indirect pre-decrement load (mode 4) */
1866 
1867     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1868                       NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1869     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1870                        NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1871 
1872     bcd_sub(dest, src);
1873 
1874     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1875                 EA_STORE, IS_USER(s));
1876 
1877     bcd_flags(dest);
1878 }
1879 
1880 DISAS_INSN(nbcd)
1881 {
1882     TCGv src, dest;
1883     TCGv addr;
1884 
1885     gen_flush_flags(s); /* !Z is sticky */
1886 
1887     SRC_EA(env, src, OS_BYTE, 0, &addr);
1888 
1889     dest = tcg_const_i32(0);
1890     bcd_sub(dest, src);
1891 
1892     DEST_EA(env, insn, OS_BYTE, dest, &addr);
1893 
1894     bcd_flags(dest);
1895 
1896     tcg_temp_free(dest);
1897 }
1898 
1899 DISAS_INSN(addsub)
1900 {
1901     TCGv reg;
1902     TCGv dest;
1903     TCGv src;
1904     TCGv tmp;
1905     TCGv addr;
1906     int add;
1907     int opsize;
1908 
1909     add = (insn & 0x4000) != 0;
1910     opsize = insn_opsize(insn);
1911     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
1912     dest = tcg_temp_new();
1913     if (insn & 0x100) {
1914         SRC_EA(env, tmp, opsize, 1, &addr);
1915         src = reg;
1916     } else {
1917         tmp = reg;
1918         SRC_EA(env, src, opsize, 1, NULL);
1919     }
1920     if (add) {
1921         tcg_gen_add_i32(dest, tmp, src);
1922         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, src);
1923         set_cc_op(s, CC_OP_ADDB + opsize);
1924     } else {
1925         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, tmp, src);
1926         tcg_gen_sub_i32(dest, tmp, src);
1927         set_cc_op(s, CC_OP_SUBB + opsize);
1928     }
1929     gen_update_cc_add(dest, src, opsize);
1930     if (insn & 0x100) {
1931         DEST_EA(env, insn, opsize, dest, &addr);
1932     } else {
1933         gen_partset_reg(opsize, DREG(insn, 9), dest);
1934     }
1935     tcg_temp_free(dest);
1936 }
1937 
1938 /* Reverse the order of the bits in REG.  */
1939 DISAS_INSN(bitrev)
1940 {
1941     TCGv reg;
1942     reg = DREG(insn, 0);
1943     gen_helper_bitrev(reg, reg);
1944 }
1945 
1946 DISAS_INSN(bitop_reg)
1947 {
1948     int opsize;
1949     int op;
1950     TCGv src1;
1951     TCGv src2;
1952     TCGv tmp;
1953     TCGv addr;
1954     TCGv dest;
1955 
1956     if ((insn & 0x38) != 0)
1957         opsize = OS_BYTE;
1958     else
1959         opsize = OS_LONG;
1960     op = (insn >> 6) & 3;
1961     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
1962 
1963     gen_flush_flags(s);
1964     src2 = tcg_temp_new();
1965     if (opsize == OS_BYTE)
1966         tcg_gen_andi_i32(src2, DREG(insn, 9), 7);
1967     else
1968         tcg_gen_andi_i32(src2, DREG(insn, 9), 31);
1969 
1970     tmp = tcg_const_i32(1);
1971     tcg_gen_shl_i32(tmp, tmp, src2);
1972     tcg_temp_free(src2);
1973 
1974     tcg_gen_and_i32(QREG_CC_Z, src1, tmp);
1975 
1976     dest = tcg_temp_new();
1977     switch (op) {
1978     case 1: /* bchg */
1979         tcg_gen_xor_i32(dest, src1, tmp);
1980         break;
1981     case 2: /* bclr */
1982         tcg_gen_andc_i32(dest, src1, tmp);
1983         break;
1984     case 3: /* bset */
1985         tcg_gen_or_i32(dest, src1, tmp);
1986         break;
1987     default: /* btst */
1988         break;
1989     }
1990     tcg_temp_free(tmp);
1991     if (op) {
1992         DEST_EA(env, insn, opsize, dest, &addr);
1993     }
1994     tcg_temp_free(dest);
1995 }
1996 
1997 DISAS_INSN(sats)
1998 {
1999     TCGv reg;
2000     reg = DREG(insn, 0);
2001     gen_flush_flags(s);
2002     gen_helper_sats(reg, reg, QREG_CC_V);
2003     gen_logic_cc(s, reg, OS_LONG);
2004 }
2005 
2006 static void gen_push(DisasContext *s, TCGv val)
2007 {
2008     TCGv tmp;
2009 
2010     tmp = tcg_temp_new();
2011     tcg_gen_subi_i32(tmp, QREG_SP, 4);
2012     gen_store(s, OS_LONG, tmp, val, IS_USER(s));
2013     tcg_gen_mov_i32(QREG_SP, tmp);
2014     tcg_temp_free(tmp);
2015 }
2016 
2017 static TCGv mreg(int reg)
2018 {
2019     if (reg < 8) {
2020         /* Dx */
2021         return cpu_dregs[reg];
2022     }
2023     /* Ax */
2024     return cpu_aregs[reg & 7];
2025 }
2026 
2027 DISAS_INSN(movem)
2028 {
2029     TCGv addr, incr, tmp, r[16];
2030     int is_load = (insn & 0x0400) != 0;
2031     int opsize = (insn & 0x40) != 0 ? OS_LONG : OS_WORD;
2032     uint16_t mask = read_im16(env, s);
2033     int mode = extract32(insn, 3, 3);
2034     int reg0 = REG(insn, 0);
2035     int i;
2036 
2037     tmp = cpu_aregs[reg0];
2038 
2039     switch (mode) {
2040     case 0: /* data register direct */
2041     case 1: /* addr register direct */
2042     do_addr_fault:
2043         gen_addr_fault(s);
2044         return;
2045 
2046     case 2: /* indirect */
2047         break;
2048 
2049     case 3: /* indirect post-increment */
2050         if (!is_load) {
2051             /* post-increment is not allowed */
2052             goto do_addr_fault;
2053         }
2054         break;
2055 
2056     case 4: /* indirect pre-decrement */
2057         if (is_load) {
2058             /* pre-decrement is not allowed */
2059             goto do_addr_fault;
2060         }
2061         /*
2062          * We want a bare copy of the address reg, without any pre-decrement
2063          * adjustment, as gen_lea would provide.
2064          */
2065         break;
2066 
2067     default:
2068         tmp = gen_lea_mode(env, s, mode, reg0, opsize);
2069         if (IS_NULL_QREG(tmp)) {
2070             goto do_addr_fault;
2071         }
2072         break;
2073     }
2074 
2075     addr = tcg_temp_new();
2076     tcg_gen_mov_i32(addr, tmp);
2077     incr = tcg_const_i32(opsize_bytes(opsize));
2078 
2079     if (is_load) {
2080         /* memory to register */
2081         for (i = 0; i < 16; i++) {
2082             if (mask & (1 << i)) {
2083                 r[i] = gen_load(s, opsize, addr, 1, IS_USER(s));
2084                 tcg_gen_add_i32(addr, addr, incr);
2085             }
2086         }
2087         for (i = 0; i < 16; i++) {
2088             if (mask & (1 << i)) {
2089                 tcg_gen_mov_i32(mreg(i), r[i]);
2090                 tcg_temp_free(r[i]);
2091             }
2092         }
2093         if (mode == 3) {
2094             /* post-increment: movem (An)+,X */
2095             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2096         }
2097     } else {
2098         /* register to memory */
2099         if (mode == 4) {
2100             /* pre-decrement: movem X,-(An) */
2101             for (i = 15; i >= 0; i--) {
2102                 if ((mask << i) & 0x8000) {
2103                     tcg_gen_sub_i32(addr, addr, incr);
2104                     if (reg0 + 8 == i &&
2105                         m68k_feature(s->env, M68K_FEATURE_EXT_FULL)) {
2106                         /*
2107                          * M68020+: if the addressing register is the
2108                          * register moved to memory, the value written
2109                          * is the initial value decremented by the size of
2110                          * the operation, regardless of how many actual
2111                          * stores have been performed until this point.
2112                          * M68000/M68010: the value is the initial value.
2113                          */
2114                         tmp = tcg_temp_new();
2115                         tcg_gen_sub_i32(tmp, cpu_aregs[reg0], incr);
2116                         gen_store(s, opsize, addr, tmp, IS_USER(s));
2117                         tcg_temp_free(tmp);
2118                     } else {
2119                         gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2120                     }
2121                 }
2122             }
2123             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2124         } else {
2125             for (i = 0; i < 16; i++) {
2126                 if (mask & (1 << i)) {
2127                     gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2128                     tcg_gen_add_i32(addr, addr, incr);
2129                 }
2130             }
2131         }
2132     }
2133 
2134     tcg_temp_free(incr);
2135     tcg_temp_free(addr);
2136 }
2137 
2138 DISAS_INSN(movep)
2139 {
2140     uint8_t i;
2141     int16_t displ;
2142     TCGv reg;
2143     TCGv addr;
2144     TCGv abuf;
2145     TCGv dbuf;
2146 
2147     displ = read_im16(env, s);
2148 
2149     addr = AREG(insn, 0);
2150     reg = DREG(insn, 9);
2151 
2152     abuf = tcg_temp_new();
2153     tcg_gen_addi_i32(abuf, addr, displ);
2154     dbuf = tcg_temp_new();
2155 
2156     if (insn & 0x40) {
2157         i = 4;
2158     } else {
2159         i = 2;
2160     }
2161 
2162     if (insn & 0x80) {
2163         for ( ; i > 0 ; i--) {
2164             tcg_gen_shri_i32(dbuf, reg, (i - 1) * 8);
2165             tcg_gen_qemu_st8(dbuf, abuf, IS_USER(s));
2166             if (i > 1) {
2167                 tcg_gen_addi_i32(abuf, abuf, 2);
2168             }
2169         }
2170     } else {
2171         for ( ; i > 0 ; i--) {
2172             tcg_gen_qemu_ld8u(dbuf, abuf, IS_USER(s));
2173             tcg_gen_deposit_i32(reg, reg, dbuf, (i - 1) * 8, 8);
2174             if (i > 1) {
2175                 tcg_gen_addi_i32(abuf, abuf, 2);
2176             }
2177         }
2178     }
2179     tcg_temp_free(abuf);
2180     tcg_temp_free(dbuf);
2181 }
2182 
2183 DISAS_INSN(bitop_im)
2184 {
2185     int opsize;
2186     int op;
2187     TCGv src1;
2188     uint32_t mask;
2189     int bitnum;
2190     TCGv tmp;
2191     TCGv addr;
2192 
2193     if ((insn & 0x38) != 0)
2194         opsize = OS_BYTE;
2195     else
2196         opsize = OS_LONG;
2197     op = (insn >> 6) & 3;
2198 
2199     bitnum = read_im16(env, s);
2200     if (m68k_feature(s->env, M68K_FEATURE_M68000)) {
2201         if (bitnum & 0xfe00) {
2202             disas_undef(env, s, insn);
2203             return;
2204         }
2205     } else {
2206         if (bitnum & 0xff00) {
2207             disas_undef(env, s, insn);
2208             return;
2209         }
2210     }
2211 
2212     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
2213 
2214     gen_flush_flags(s);
2215     if (opsize == OS_BYTE)
2216         bitnum &= 7;
2217     else
2218         bitnum &= 31;
2219     mask = 1 << bitnum;
2220 
2221    tcg_gen_andi_i32(QREG_CC_Z, src1, mask);
2222 
2223     if (op) {
2224         tmp = tcg_temp_new();
2225         switch (op) {
2226         case 1: /* bchg */
2227             tcg_gen_xori_i32(tmp, src1, mask);
2228             break;
2229         case 2: /* bclr */
2230             tcg_gen_andi_i32(tmp, src1, ~mask);
2231             break;
2232         case 3: /* bset */
2233             tcg_gen_ori_i32(tmp, src1, mask);
2234             break;
2235         default: /* btst */
2236             break;
2237         }
2238         DEST_EA(env, insn, opsize, tmp, &addr);
2239         tcg_temp_free(tmp);
2240     }
2241 }
2242 
2243 static TCGv gen_get_ccr(DisasContext *s)
2244 {
2245     TCGv dest;
2246 
2247     update_cc_op(s);
2248     dest = tcg_temp_new();
2249     gen_helper_get_ccr(dest, cpu_env);
2250     return dest;
2251 }
2252 
2253 static TCGv gen_get_sr(DisasContext *s)
2254 {
2255     TCGv ccr;
2256     TCGv sr;
2257 
2258     ccr = gen_get_ccr(s);
2259     sr = tcg_temp_new();
2260     tcg_gen_andi_i32(sr, QREG_SR, 0xffe0);
2261     tcg_gen_or_i32(sr, sr, ccr);
2262     tcg_temp_free(ccr);
2263     return sr;
2264 }
2265 
2266 static void gen_set_sr_im(DisasContext *s, uint16_t val, int ccr_only)
2267 {
2268     if (ccr_only) {
2269         tcg_gen_movi_i32(QREG_CC_C, val & CCF_C ? 1 : 0);
2270         tcg_gen_movi_i32(QREG_CC_V, val & CCF_V ? -1 : 0);
2271         tcg_gen_movi_i32(QREG_CC_Z, val & CCF_Z ? 0 : 1);
2272         tcg_gen_movi_i32(QREG_CC_N, val & CCF_N ? -1 : 0);
2273         tcg_gen_movi_i32(QREG_CC_X, val & CCF_X ? 1 : 0);
2274     } else {
2275         TCGv sr = tcg_const_i32(val);
2276         gen_helper_set_sr(cpu_env, sr);
2277         tcg_temp_free(sr);
2278     }
2279     set_cc_op(s, CC_OP_FLAGS);
2280 }
2281 
2282 static void gen_set_sr(DisasContext *s, TCGv val, int ccr_only)
2283 {
2284     if (ccr_only) {
2285         gen_helper_set_ccr(cpu_env, val);
2286     } else {
2287         gen_helper_set_sr(cpu_env, val);
2288     }
2289     set_cc_op(s, CC_OP_FLAGS);
2290 }
2291 
2292 static void gen_move_to_sr(CPUM68KState *env, DisasContext *s, uint16_t insn,
2293                            bool ccr_only)
2294 {
2295     if ((insn & 0x3f) == 0x3c) {
2296         uint16_t val;
2297         val = read_im16(env, s);
2298         gen_set_sr_im(s, val, ccr_only);
2299     } else {
2300         TCGv src;
2301         SRC_EA(env, src, OS_WORD, 0, NULL);
2302         gen_set_sr(s, src, ccr_only);
2303     }
2304 }
2305 
2306 DISAS_INSN(arith_im)
2307 {
2308     int op;
2309     TCGv im;
2310     TCGv src1;
2311     TCGv dest;
2312     TCGv addr;
2313     int opsize;
2314     bool with_SR = ((insn & 0x3f) == 0x3c);
2315 
2316     op = (insn >> 9) & 7;
2317     opsize = insn_opsize(insn);
2318     switch (opsize) {
2319     case OS_BYTE:
2320         im = tcg_const_i32((int8_t)read_im8(env, s));
2321         break;
2322     case OS_WORD:
2323         im = tcg_const_i32((int16_t)read_im16(env, s));
2324         break;
2325     case OS_LONG:
2326         im = tcg_const_i32(read_im32(env, s));
2327         break;
2328     default:
2329         g_assert_not_reached();
2330     }
2331 
2332     if (with_SR) {
2333         /* SR/CCR can only be used with andi/eori/ori */
2334         if (op == 2 || op == 3 || op == 6) {
2335             disas_undef(env, s, insn);
2336             return;
2337         }
2338         switch (opsize) {
2339         case OS_BYTE:
2340             src1 = gen_get_ccr(s);
2341             break;
2342         case OS_WORD:
2343             if (IS_USER(s)) {
2344                 gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
2345                 return;
2346             }
2347             src1 = gen_get_sr(s);
2348             break;
2349         default:
2350             /* OS_LONG; others already g_assert_not_reached.  */
2351             disas_undef(env, s, insn);
2352             return;
2353         }
2354     } else {
2355         SRC_EA(env, src1, opsize, 1, (op == 6) ? NULL : &addr);
2356     }
2357     dest = tcg_temp_new();
2358     switch (op) {
2359     case 0: /* ori */
2360         tcg_gen_or_i32(dest, src1, im);
2361         if (with_SR) {
2362             gen_set_sr(s, dest, opsize == OS_BYTE);
2363         } else {
2364             DEST_EA(env, insn, opsize, dest, &addr);
2365             gen_logic_cc(s, dest, opsize);
2366         }
2367         break;
2368     case 1: /* andi */
2369         tcg_gen_and_i32(dest, src1, im);
2370         if (with_SR) {
2371             gen_set_sr(s, dest, opsize == OS_BYTE);
2372         } else {
2373             DEST_EA(env, insn, opsize, dest, &addr);
2374             gen_logic_cc(s, dest, opsize);
2375         }
2376         break;
2377     case 2: /* subi */
2378         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, src1, im);
2379         tcg_gen_sub_i32(dest, src1, im);
2380         gen_update_cc_add(dest, im, opsize);
2381         set_cc_op(s, CC_OP_SUBB + opsize);
2382         DEST_EA(env, insn, opsize, dest, &addr);
2383         break;
2384     case 3: /* addi */
2385         tcg_gen_add_i32(dest, src1, im);
2386         gen_update_cc_add(dest, im, opsize);
2387         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, im);
2388         set_cc_op(s, CC_OP_ADDB + opsize);
2389         DEST_EA(env, insn, opsize, dest, &addr);
2390         break;
2391     case 5: /* eori */
2392         tcg_gen_xor_i32(dest, src1, im);
2393         if (with_SR) {
2394             gen_set_sr(s, dest, opsize == OS_BYTE);
2395         } else {
2396             DEST_EA(env, insn, opsize, dest, &addr);
2397             gen_logic_cc(s, dest, opsize);
2398         }
2399         break;
2400     case 6: /* cmpi */
2401         gen_update_cc_cmp(s, src1, im, opsize);
2402         break;
2403     default:
2404         abort();
2405     }
2406     tcg_temp_free(im);
2407     tcg_temp_free(dest);
2408 }
2409 
2410 DISAS_INSN(cas)
2411 {
2412     int opsize;
2413     TCGv addr;
2414     uint16_t ext;
2415     TCGv load;
2416     TCGv cmp;
2417     MemOp opc;
2418 
2419     switch ((insn >> 9) & 3) {
2420     case 1:
2421         opsize = OS_BYTE;
2422         opc = MO_SB;
2423         break;
2424     case 2:
2425         opsize = OS_WORD;
2426         opc = MO_TESW;
2427         break;
2428     case 3:
2429         opsize = OS_LONG;
2430         opc = MO_TESL;
2431         break;
2432     default:
2433         g_assert_not_reached();
2434     }
2435 
2436     ext = read_im16(env, s);
2437 
2438     /* cas Dc,Du,<EA> */
2439 
2440     addr = gen_lea(env, s, insn, opsize);
2441     if (IS_NULL_QREG(addr)) {
2442         gen_addr_fault(s);
2443         return;
2444     }
2445 
2446     cmp = gen_extend(s, DREG(ext, 0), opsize, 1);
2447 
2448     /*
2449      * if  <EA> == Dc then
2450      *     <EA> = Du
2451      *     Dc = <EA> (because <EA> == Dc)
2452      * else
2453      *     Dc = <EA>
2454      */
2455 
2456     load = tcg_temp_new();
2457     tcg_gen_atomic_cmpxchg_i32(load, addr, cmp, DREG(ext, 6),
2458                                IS_USER(s), opc);
2459     /* update flags before setting cmp to load */
2460     gen_update_cc_cmp(s, load, cmp, opsize);
2461     gen_partset_reg(opsize, DREG(ext, 0), load);
2462 
2463     tcg_temp_free(load);
2464 
2465     switch (extract32(insn, 3, 3)) {
2466     case 3: /* Indirect postincrement.  */
2467         tcg_gen_addi_i32(AREG(insn, 0), addr, opsize_bytes(opsize));
2468         break;
2469     case 4: /* Indirect predecrememnt.  */
2470         tcg_gen_mov_i32(AREG(insn, 0), addr);
2471         break;
2472     }
2473 }
2474 
2475 DISAS_INSN(cas2w)
2476 {
2477     uint16_t ext1, ext2;
2478     TCGv addr1, addr2;
2479     TCGv regs;
2480 
2481     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2482 
2483     ext1 = read_im16(env, s);
2484 
2485     if (ext1 & 0x8000) {
2486         /* Address Register */
2487         addr1 = AREG(ext1, 12);
2488     } else {
2489         /* Data Register */
2490         addr1 = DREG(ext1, 12);
2491     }
2492 
2493     ext2 = read_im16(env, s);
2494     if (ext2 & 0x8000) {
2495         /* Address Register */
2496         addr2 = AREG(ext2, 12);
2497     } else {
2498         /* Data Register */
2499         addr2 = DREG(ext2, 12);
2500     }
2501 
2502     /*
2503      * if (R1) == Dc1 && (R2) == Dc2 then
2504      *     (R1) = Du1
2505      *     (R2) = Du2
2506      * else
2507      *     Dc1 = (R1)
2508      *     Dc2 = (R2)
2509      */
2510 
2511     regs = tcg_const_i32(REG(ext2, 6) |
2512                          (REG(ext1, 6) << 3) |
2513                          (REG(ext2, 0) << 6) |
2514                          (REG(ext1, 0) << 9));
2515     if (tb_cflags(s->base.tb) & CF_PARALLEL) {
2516         gen_helper_exit_atomic(cpu_env);
2517     } else {
2518         gen_helper_cas2w(cpu_env, regs, addr1, addr2);
2519     }
2520     tcg_temp_free(regs);
2521 
2522     /* Note that cas2w also assigned to env->cc_op.  */
2523     s->cc_op = CC_OP_CMPW;
2524     s->cc_op_synced = 1;
2525 }
2526 
2527 DISAS_INSN(cas2l)
2528 {
2529     uint16_t ext1, ext2;
2530     TCGv addr1, addr2, regs;
2531 
2532     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2533 
2534     ext1 = read_im16(env, s);
2535 
2536     if (ext1 & 0x8000) {
2537         /* Address Register */
2538         addr1 = AREG(ext1, 12);
2539     } else {
2540         /* Data Register */
2541         addr1 = DREG(ext1, 12);
2542     }
2543 
2544     ext2 = read_im16(env, s);
2545     if (ext2 & 0x8000) {
2546         /* Address Register */
2547         addr2 = AREG(ext2, 12);
2548     } else {
2549         /* Data Register */
2550         addr2 = DREG(ext2, 12);
2551     }
2552 
2553     /*
2554      * if (R1) == Dc1 && (R2) == Dc2 then
2555      *     (R1) = Du1
2556      *     (R2) = Du2
2557      * else
2558      *     Dc1 = (R1)
2559      *     Dc2 = (R2)
2560      */
2561 
2562     regs = tcg_const_i32(REG(ext2, 6) |
2563                          (REG(ext1, 6) << 3) |
2564                          (REG(ext2, 0) << 6) |
2565                          (REG(ext1, 0) << 9));
2566     if (tb_cflags(s->base.tb) & CF_PARALLEL) {
2567         gen_helper_cas2l_parallel(cpu_env, regs, addr1, addr2);
2568     } else {
2569         gen_helper_cas2l(cpu_env, regs, addr1, addr2);
2570     }
2571     tcg_temp_free(regs);
2572 
2573     /* Note that cas2l also assigned to env->cc_op.  */
2574     s->cc_op = CC_OP_CMPL;
2575     s->cc_op_synced = 1;
2576 }
2577 
2578 DISAS_INSN(byterev)
2579 {
2580     TCGv reg;
2581 
2582     reg = DREG(insn, 0);
2583     tcg_gen_bswap32_i32(reg, reg);
2584 }
2585 
2586 DISAS_INSN(move)
2587 {
2588     TCGv src;
2589     TCGv dest;
2590     int op;
2591     int opsize;
2592 
2593     switch (insn >> 12) {
2594     case 1: /* move.b */
2595         opsize = OS_BYTE;
2596         break;
2597     case 2: /* move.l */
2598         opsize = OS_LONG;
2599         break;
2600     case 3: /* move.w */
2601         opsize = OS_WORD;
2602         break;
2603     default:
2604         abort();
2605     }
2606     SRC_EA(env, src, opsize, 1, NULL);
2607     op = (insn >> 6) & 7;
2608     if (op == 1) {
2609         /* movea */
2610         /* The value will already have been sign extended.  */
2611         dest = AREG(insn, 9);
2612         tcg_gen_mov_i32(dest, src);
2613     } else {
2614         /* normal move */
2615         uint16_t dest_ea;
2616         dest_ea = ((insn >> 9) & 7) | (op << 3);
2617         DEST_EA(env, dest_ea, opsize, src, NULL);
2618         /* This will be correct because loads sign extend.  */
2619         gen_logic_cc(s, src, opsize);
2620     }
2621 }
2622 
2623 DISAS_INSN(negx)
2624 {
2625     TCGv z;
2626     TCGv src;
2627     TCGv addr;
2628     int opsize;
2629 
2630     opsize = insn_opsize(insn);
2631     SRC_EA(env, src, opsize, 1, &addr);
2632 
2633     gen_flush_flags(s); /* compute old Z */
2634 
2635     /*
2636      * Perform substract with borrow.
2637      * (X, N) =  -(src + X);
2638      */
2639 
2640     z = tcg_const_i32(0);
2641     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, z, QREG_CC_X, z);
2642     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, z, z, QREG_CC_N, QREG_CC_X);
2643     tcg_temp_free(z);
2644     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
2645 
2646     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
2647 
2648     /*
2649      * Compute signed-overflow for negation.  The normal formula for
2650      * subtraction is (res ^ src) & (src ^ dest), but with dest==0
2651      * this simplies to res & src.
2652      */
2653 
2654     tcg_gen_and_i32(QREG_CC_V, QREG_CC_N, src);
2655 
2656     /* Copy the rest of the results into place.  */
2657     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
2658     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
2659 
2660     set_cc_op(s, CC_OP_FLAGS);
2661 
2662     /* result is in QREG_CC_N */
2663 
2664     DEST_EA(env, insn, opsize, QREG_CC_N, &addr);
2665 }
2666 
2667 DISAS_INSN(lea)
2668 {
2669     TCGv reg;
2670     TCGv tmp;
2671 
2672     reg = AREG(insn, 9);
2673     tmp = gen_lea(env, s, insn, OS_LONG);
2674     if (IS_NULL_QREG(tmp)) {
2675         gen_addr_fault(s);
2676         return;
2677     }
2678     tcg_gen_mov_i32(reg, tmp);
2679 }
2680 
2681 DISAS_INSN(clr)
2682 {
2683     int opsize;
2684     TCGv zero;
2685 
2686     zero = tcg_const_i32(0);
2687 
2688     opsize = insn_opsize(insn);
2689     DEST_EA(env, insn, opsize, zero, NULL);
2690     gen_logic_cc(s, zero, opsize);
2691     tcg_temp_free(zero);
2692 }
2693 
2694 DISAS_INSN(move_from_ccr)
2695 {
2696     TCGv ccr;
2697 
2698     ccr = gen_get_ccr(s);
2699     DEST_EA(env, insn, OS_WORD, ccr, NULL);
2700 }
2701 
2702 DISAS_INSN(neg)
2703 {
2704     TCGv src1;
2705     TCGv dest;
2706     TCGv addr;
2707     int opsize;
2708 
2709     opsize = insn_opsize(insn);
2710     SRC_EA(env, src1, opsize, 1, &addr);
2711     dest = tcg_temp_new();
2712     tcg_gen_neg_i32(dest, src1);
2713     set_cc_op(s, CC_OP_SUBB + opsize);
2714     gen_update_cc_add(dest, src1, opsize);
2715     tcg_gen_setcondi_i32(TCG_COND_NE, QREG_CC_X, dest, 0);
2716     DEST_EA(env, insn, opsize, dest, &addr);
2717     tcg_temp_free(dest);
2718 }
2719 
2720 DISAS_INSN(move_to_ccr)
2721 {
2722     gen_move_to_sr(env, s, insn, true);
2723 }
2724 
2725 DISAS_INSN(not)
2726 {
2727     TCGv src1;
2728     TCGv dest;
2729     TCGv addr;
2730     int opsize;
2731 
2732     opsize = insn_opsize(insn);
2733     SRC_EA(env, src1, opsize, 1, &addr);
2734     dest = tcg_temp_new();
2735     tcg_gen_not_i32(dest, src1);
2736     DEST_EA(env, insn, opsize, dest, &addr);
2737     gen_logic_cc(s, dest, opsize);
2738 }
2739 
2740 DISAS_INSN(swap)
2741 {
2742     TCGv src1;
2743     TCGv src2;
2744     TCGv reg;
2745 
2746     src1 = tcg_temp_new();
2747     src2 = tcg_temp_new();
2748     reg = DREG(insn, 0);
2749     tcg_gen_shli_i32(src1, reg, 16);
2750     tcg_gen_shri_i32(src2, reg, 16);
2751     tcg_gen_or_i32(reg, src1, src2);
2752     tcg_temp_free(src2);
2753     tcg_temp_free(src1);
2754     gen_logic_cc(s, reg, OS_LONG);
2755 }
2756 
2757 DISAS_INSN(bkpt)
2758 {
2759     gen_exception(s, s->base.pc_next, EXCP_DEBUG);
2760 }
2761 
2762 DISAS_INSN(pea)
2763 {
2764     TCGv tmp;
2765 
2766     tmp = gen_lea(env, s, insn, OS_LONG);
2767     if (IS_NULL_QREG(tmp)) {
2768         gen_addr_fault(s);
2769         return;
2770     }
2771     gen_push(s, tmp);
2772 }
2773 
2774 DISAS_INSN(ext)
2775 {
2776     int op;
2777     TCGv reg;
2778     TCGv tmp;
2779 
2780     reg = DREG(insn, 0);
2781     op = (insn >> 6) & 7;
2782     tmp = tcg_temp_new();
2783     if (op == 3)
2784         tcg_gen_ext16s_i32(tmp, reg);
2785     else
2786         tcg_gen_ext8s_i32(tmp, reg);
2787     if (op == 2)
2788         gen_partset_reg(OS_WORD, reg, tmp);
2789     else
2790         tcg_gen_mov_i32(reg, tmp);
2791     gen_logic_cc(s, tmp, OS_LONG);
2792     tcg_temp_free(tmp);
2793 }
2794 
2795 DISAS_INSN(tst)
2796 {
2797     int opsize;
2798     TCGv tmp;
2799 
2800     opsize = insn_opsize(insn);
2801     SRC_EA(env, tmp, opsize, 1, NULL);
2802     gen_logic_cc(s, tmp, opsize);
2803 }
2804 
2805 DISAS_INSN(pulse)
2806 {
2807   /* Implemented as a NOP.  */
2808 }
2809 
2810 DISAS_INSN(illegal)
2811 {
2812     gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2813 }
2814 
2815 /* ??? This should be atomic.  */
2816 DISAS_INSN(tas)
2817 {
2818     TCGv dest;
2819     TCGv src1;
2820     TCGv addr;
2821 
2822     dest = tcg_temp_new();
2823     SRC_EA(env, src1, OS_BYTE, 1, &addr);
2824     gen_logic_cc(s, src1, OS_BYTE);
2825     tcg_gen_ori_i32(dest, src1, 0x80);
2826     DEST_EA(env, insn, OS_BYTE, dest, &addr);
2827     tcg_temp_free(dest);
2828 }
2829 
2830 DISAS_INSN(mull)
2831 {
2832     uint16_t ext;
2833     TCGv src1;
2834     int sign;
2835 
2836     ext = read_im16(env, s);
2837 
2838     sign = ext & 0x800;
2839 
2840     if (ext & 0x400) {
2841         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
2842             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2843             return;
2844         }
2845 
2846         SRC_EA(env, src1, OS_LONG, 0, NULL);
2847 
2848         if (sign) {
2849             tcg_gen_muls2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2850         } else {
2851             tcg_gen_mulu2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2852         }
2853         /* if Dl == Dh, 68040 returns low word */
2854         tcg_gen_mov_i32(DREG(ext, 0), QREG_CC_N);
2855         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_Z);
2856         tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N);
2857 
2858         tcg_gen_movi_i32(QREG_CC_V, 0);
2859         tcg_gen_movi_i32(QREG_CC_C, 0);
2860 
2861         set_cc_op(s, CC_OP_FLAGS);
2862         return;
2863     }
2864     SRC_EA(env, src1, OS_LONG, 0, NULL);
2865     if (m68k_feature(s->env, M68K_FEATURE_M68000)) {
2866         tcg_gen_movi_i32(QREG_CC_C, 0);
2867         if (sign) {
2868             tcg_gen_muls2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2869             /* QREG_CC_V is -(QREG_CC_V != (QREG_CC_N >> 31)) */
2870             tcg_gen_sari_i32(QREG_CC_Z, QREG_CC_N, 31);
2871             tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_Z);
2872         } else {
2873             tcg_gen_mulu2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2874             /* QREG_CC_V is -(QREG_CC_V != 0), use QREG_CC_C as 0 */
2875             tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_C);
2876         }
2877         tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
2878         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_N);
2879 
2880         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
2881 
2882         set_cc_op(s, CC_OP_FLAGS);
2883     } else {
2884         /*
2885          * The upper 32 bits of the product are discarded, so
2886          * muls.l and mulu.l are functionally equivalent.
2887          */
2888         tcg_gen_mul_i32(DREG(ext, 12), src1, DREG(ext, 12));
2889         gen_logic_cc(s, DREG(ext, 12), OS_LONG);
2890     }
2891 }
2892 
2893 static void gen_link(DisasContext *s, uint16_t insn, int32_t offset)
2894 {
2895     TCGv reg;
2896     TCGv tmp;
2897 
2898     reg = AREG(insn, 0);
2899     tmp = tcg_temp_new();
2900     tcg_gen_subi_i32(tmp, QREG_SP, 4);
2901     gen_store(s, OS_LONG, tmp, reg, IS_USER(s));
2902     if ((insn & 7) != 7) {
2903         tcg_gen_mov_i32(reg, tmp);
2904     }
2905     tcg_gen_addi_i32(QREG_SP, tmp, offset);
2906     tcg_temp_free(tmp);
2907 }
2908 
2909 DISAS_INSN(link)
2910 {
2911     int16_t offset;
2912 
2913     offset = read_im16(env, s);
2914     gen_link(s, insn, offset);
2915 }
2916 
2917 DISAS_INSN(linkl)
2918 {
2919     int32_t offset;
2920 
2921     offset = read_im32(env, s);
2922     gen_link(s, insn, offset);
2923 }
2924 
2925 DISAS_INSN(unlk)
2926 {
2927     TCGv src;
2928     TCGv reg;
2929     TCGv tmp;
2930 
2931     src = tcg_temp_new();
2932     reg = AREG(insn, 0);
2933     tcg_gen_mov_i32(src, reg);
2934     tmp = gen_load(s, OS_LONG, src, 0, IS_USER(s));
2935     tcg_gen_mov_i32(reg, tmp);
2936     tcg_gen_addi_i32(QREG_SP, src, 4);
2937     tcg_temp_free(src);
2938     tcg_temp_free(tmp);
2939 }
2940 
2941 #if defined(CONFIG_SOFTMMU)
2942 DISAS_INSN(reset)
2943 {
2944     if (IS_USER(s)) {
2945         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
2946         return;
2947     }
2948 
2949     gen_helper_reset(cpu_env);
2950 }
2951 #endif
2952 
2953 DISAS_INSN(nop)
2954 {
2955 }
2956 
2957 DISAS_INSN(rtd)
2958 {
2959     TCGv tmp;
2960     int16_t offset = read_im16(env, s);
2961 
2962     tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
2963     tcg_gen_addi_i32(QREG_SP, QREG_SP, offset + 4);
2964     gen_jmp(s, tmp);
2965 }
2966 
2967 DISAS_INSN(rts)
2968 {
2969     TCGv tmp;
2970 
2971     tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
2972     tcg_gen_addi_i32(QREG_SP, QREG_SP, 4);
2973     gen_jmp(s, tmp);
2974 }
2975 
2976 DISAS_INSN(jump)
2977 {
2978     TCGv tmp;
2979 
2980     /*
2981      * Load the target address first to ensure correct exception
2982      * behavior.
2983      */
2984     tmp = gen_lea(env, s, insn, OS_LONG);
2985     if (IS_NULL_QREG(tmp)) {
2986         gen_addr_fault(s);
2987         return;
2988     }
2989     if ((insn & 0x40) == 0) {
2990         /* jsr */
2991         gen_push(s, tcg_const_i32(s->pc));
2992     }
2993     gen_jmp(s, tmp);
2994 }
2995 
2996 DISAS_INSN(addsubq)
2997 {
2998     TCGv src;
2999     TCGv dest;
3000     TCGv val;
3001     int imm;
3002     TCGv addr;
3003     int opsize;
3004 
3005     if ((insn & 070) == 010) {
3006         /* Operation on address register is always long.  */
3007         opsize = OS_LONG;
3008     } else {
3009         opsize = insn_opsize(insn);
3010     }
3011     SRC_EA(env, src, opsize, 1, &addr);
3012     imm = (insn >> 9) & 7;
3013     if (imm == 0) {
3014         imm = 8;
3015     }
3016     val = tcg_const_i32(imm);
3017     dest = tcg_temp_new();
3018     tcg_gen_mov_i32(dest, src);
3019     if ((insn & 0x38) == 0x08) {
3020         /*
3021          * Don't update condition codes if the destination is an
3022          * address register.
3023          */
3024         if (insn & 0x0100) {
3025             tcg_gen_sub_i32(dest, dest, val);
3026         } else {
3027             tcg_gen_add_i32(dest, dest, val);
3028         }
3029     } else {
3030         if (insn & 0x0100) {
3031             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
3032             tcg_gen_sub_i32(dest, dest, val);
3033             set_cc_op(s, CC_OP_SUBB + opsize);
3034         } else {
3035             tcg_gen_add_i32(dest, dest, val);
3036             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
3037             set_cc_op(s, CC_OP_ADDB + opsize);
3038         }
3039         gen_update_cc_add(dest, val, opsize);
3040     }
3041     tcg_temp_free(val);
3042     DEST_EA(env, insn, opsize, dest, &addr);
3043     tcg_temp_free(dest);
3044 }
3045 
3046 DISAS_INSN(tpf)
3047 {
3048     switch (insn & 7) {
3049     case 2: /* One extension word.  */
3050         s->pc += 2;
3051         break;
3052     case 3: /* Two extension words.  */
3053         s->pc += 4;
3054         break;
3055     case 4: /* No extension words.  */
3056         break;
3057     default:
3058         disas_undef(env, s, insn);
3059     }
3060 }
3061 
3062 DISAS_INSN(branch)
3063 {
3064     int32_t offset;
3065     uint32_t base;
3066     int op;
3067 
3068     base = s->pc;
3069     op = (insn >> 8) & 0xf;
3070     offset = (int8_t)insn;
3071     if (offset == 0) {
3072         offset = (int16_t)read_im16(env, s);
3073     } else if (offset == -1) {
3074         offset = read_im32(env, s);
3075     }
3076     if (op == 1) {
3077         /* bsr */
3078         gen_push(s, tcg_const_i32(s->pc));
3079     }
3080     if (op > 1) {
3081         /* Bcc */
3082         TCGLabel *l1 = gen_new_label();
3083         gen_jmpcc(s, ((insn >> 8) & 0xf) ^ 1, l1);
3084         gen_jmp_tb(s, 1, base + offset);
3085         gen_set_label(l1);
3086         gen_jmp_tb(s, 0, s->pc);
3087     } else {
3088         /* Unconditional branch.  */
3089         update_cc_op(s);
3090         gen_jmp_tb(s, 0, base + offset);
3091     }
3092 }
3093 
3094 DISAS_INSN(moveq)
3095 {
3096     tcg_gen_movi_i32(DREG(insn, 9), (int8_t)insn);
3097     gen_logic_cc(s, DREG(insn, 9), OS_LONG);
3098 }
3099 
3100 DISAS_INSN(mvzs)
3101 {
3102     int opsize;
3103     TCGv src;
3104     TCGv reg;
3105 
3106     if (insn & 0x40)
3107         opsize = OS_WORD;
3108     else
3109         opsize = OS_BYTE;
3110     SRC_EA(env, src, opsize, (insn & 0x80) == 0, NULL);
3111     reg = DREG(insn, 9);
3112     tcg_gen_mov_i32(reg, src);
3113     gen_logic_cc(s, src, opsize);
3114 }
3115 
3116 DISAS_INSN(or)
3117 {
3118     TCGv reg;
3119     TCGv dest;
3120     TCGv src;
3121     TCGv addr;
3122     int opsize;
3123 
3124     opsize = insn_opsize(insn);
3125     reg = gen_extend(s, DREG(insn, 9), opsize, 0);
3126     dest = tcg_temp_new();
3127     if (insn & 0x100) {
3128         SRC_EA(env, src, opsize, 0, &addr);
3129         tcg_gen_or_i32(dest, src, reg);
3130         DEST_EA(env, insn, opsize, dest, &addr);
3131     } else {
3132         SRC_EA(env, src, opsize, 0, NULL);
3133         tcg_gen_or_i32(dest, src, reg);
3134         gen_partset_reg(opsize, DREG(insn, 9), dest);
3135     }
3136     gen_logic_cc(s, dest, opsize);
3137     tcg_temp_free(dest);
3138 }
3139 
3140 DISAS_INSN(suba)
3141 {
3142     TCGv src;
3143     TCGv reg;
3144 
3145     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3146     reg = AREG(insn, 9);
3147     tcg_gen_sub_i32(reg, reg, src);
3148 }
3149 
3150 static inline void gen_subx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3151 {
3152     TCGv tmp;
3153 
3154     gen_flush_flags(s); /* compute old Z */
3155 
3156     /*
3157      * Perform substract with borrow.
3158      * (X, N) = dest - (src + X);
3159      */
3160 
3161     tmp = tcg_const_i32(0);
3162     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, tmp, QREG_CC_X, tmp);
3163     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, dest, tmp, QREG_CC_N, QREG_CC_X);
3164     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3165     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
3166 
3167     /* Compute signed-overflow for substract.  */
3168 
3169     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, dest);
3170     tcg_gen_xor_i32(tmp, dest, src);
3171     tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, tmp);
3172     tcg_temp_free(tmp);
3173 
3174     /* Copy the rest of the results into place.  */
3175     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3176     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3177 
3178     set_cc_op(s, CC_OP_FLAGS);
3179 
3180     /* result is in QREG_CC_N */
3181 }
3182 
3183 DISAS_INSN(subx_reg)
3184 {
3185     TCGv dest;
3186     TCGv src;
3187     int opsize;
3188 
3189     opsize = insn_opsize(insn);
3190 
3191     src = gen_extend(s, DREG(insn, 0), opsize, 1);
3192     dest = gen_extend(s, DREG(insn, 9), opsize, 1);
3193 
3194     gen_subx(s, src, dest, opsize);
3195 
3196     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3197 }
3198 
3199 DISAS_INSN(subx_mem)
3200 {
3201     TCGv src;
3202     TCGv addr_src;
3203     TCGv dest;
3204     TCGv addr_dest;
3205     int opsize;
3206 
3207     opsize = insn_opsize(insn);
3208 
3209     addr_src = AREG(insn, 0);
3210     tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3211     src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3212 
3213     addr_dest = AREG(insn, 9);
3214     tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3215     dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3216 
3217     gen_subx(s, src, dest, opsize);
3218 
3219     gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3220 
3221     tcg_temp_free(dest);
3222     tcg_temp_free(src);
3223 }
3224 
3225 DISAS_INSN(mov3q)
3226 {
3227     TCGv src;
3228     int val;
3229 
3230     val = (insn >> 9) & 7;
3231     if (val == 0)
3232         val = -1;
3233     src = tcg_const_i32(val);
3234     gen_logic_cc(s, src, OS_LONG);
3235     DEST_EA(env, insn, OS_LONG, src, NULL);
3236     tcg_temp_free(src);
3237 }
3238 
3239 DISAS_INSN(cmp)
3240 {
3241     TCGv src;
3242     TCGv reg;
3243     int opsize;
3244 
3245     opsize = insn_opsize(insn);
3246     SRC_EA(env, src, opsize, 1, NULL);
3247     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
3248     gen_update_cc_cmp(s, reg, src, opsize);
3249 }
3250 
3251 DISAS_INSN(cmpa)
3252 {
3253     int opsize;
3254     TCGv src;
3255     TCGv reg;
3256 
3257     if (insn & 0x100) {
3258         opsize = OS_LONG;
3259     } else {
3260         opsize = OS_WORD;
3261     }
3262     SRC_EA(env, src, opsize, 1, NULL);
3263     reg = AREG(insn, 9);
3264     gen_update_cc_cmp(s, reg, src, OS_LONG);
3265 }
3266 
3267 DISAS_INSN(cmpm)
3268 {
3269     int opsize = insn_opsize(insn);
3270     TCGv src, dst;
3271 
3272     /* Post-increment load (mode 3) from Ay.  */
3273     src = gen_ea_mode(env, s, 3, REG(insn, 0), opsize,
3274                       NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3275     /* Post-increment load (mode 3) from Ax.  */
3276     dst = gen_ea_mode(env, s, 3, REG(insn, 9), opsize,
3277                       NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3278 
3279     gen_update_cc_cmp(s, dst, src, opsize);
3280 }
3281 
3282 DISAS_INSN(eor)
3283 {
3284     TCGv src;
3285     TCGv dest;
3286     TCGv addr;
3287     int opsize;
3288 
3289     opsize = insn_opsize(insn);
3290 
3291     SRC_EA(env, src, opsize, 0, &addr);
3292     dest = tcg_temp_new();
3293     tcg_gen_xor_i32(dest, src, DREG(insn, 9));
3294     gen_logic_cc(s, dest, opsize);
3295     DEST_EA(env, insn, opsize, dest, &addr);
3296     tcg_temp_free(dest);
3297 }
3298 
3299 static void do_exg(TCGv reg1, TCGv reg2)
3300 {
3301     TCGv temp = tcg_temp_new();
3302     tcg_gen_mov_i32(temp, reg1);
3303     tcg_gen_mov_i32(reg1, reg2);
3304     tcg_gen_mov_i32(reg2, temp);
3305     tcg_temp_free(temp);
3306 }
3307 
3308 DISAS_INSN(exg_dd)
3309 {
3310     /* exchange Dx and Dy */
3311     do_exg(DREG(insn, 9), DREG(insn, 0));
3312 }
3313 
3314 DISAS_INSN(exg_aa)
3315 {
3316     /* exchange Ax and Ay */
3317     do_exg(AREG(insn, 9), AREG(insn, 0));
3318 }
3319 
3320 DISAS_INSN(exg_da)
3321 {
3322     /* exchange Dx and Ay */
3323     do_exg(DREG(insn, 9), AREG(insn, 0));
3324 }
3325 
3326 DISAS_INSN(and)
3327 {
3328     TCGv src;
3329     TCGv reg;
3330     TCGv dest;
3331     TCGv addr;
3332     int opsize;
3333 
3334     dest = tcg_temp_new();
3335 
3336     opsize = insn_opsize(insn);
3337     reg = DREG(insn, 9);
3338     if (insn & 0x100) {
3339         SRC_EA(env, src, opsize, 0, &addr);
3340         tcg_gen_and_i32(dest, src, reg);
3341         DEST_EA(env, insn, opsize, dest, &addr);
3342     } else {
3343         SRC_EA(env, src, opsize, 0, NULL);
3344         tcg_gen_and_i32(dest, src, reg);
3345         gen_partset_reg(opsize, reg, dest);
3346     }
3347     gen_logic_cc(s, dest, opsize);
3348     tcg_temp_free(dest);
3349 }
3350 
3351 DISAS_INSN(adda)
3352 {
3353     TCGv src;
3354     TCGv reg;
3355 
3356     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3357     reg = AREG(insn, 9);
3358     tcg_gen_add_i32(reg, reg, src);
3359 }
3360 
3361 static inline void gen_addx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3362 {
3363     TCGv tmp;
3364 
3365     gen_flush_flags(s); /* compute old Z */
3366 
3367     /*
3368      * Perform addition with carry.
3369      * (X, N) = src + dest + X;
3370      */
3371 
3372     tmp = tcg_const_i32(0);
3373     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_X, tmp, dest, tmp);
3374     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_N, QREG_CC_X, src, tmp);
3375     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3376 
3377     /* Compute signed-overflow for addition.  */
3378 
3379     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3380     tcg_gen_xor_i32(tmp, dest, src);
3381     tcg_gen_andc_i32(QREG_CC_V, QREG_CC_V, tmp);
3382     tcg_temp_free(tmp);
3383 
3384     /* Copy the rest of the results into place.  */
3385     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3386     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3387 
3388     set_cc_op(s, CC_OP_FLAGS);
3389 
3390     /* result is in QREG_CC_N */
3391 }
3392 
3393 DISAS_INSN(addx_reg)
3394 {
3395     TCGv dest;
3396     TCGv src;
3397     int opsize;
3398 
3399     opsize = insn_opsize(insn);
3400 
3401     dest = gen_extend(s, DREG(insn, 9), opsize, 1);
3402     src = gen_extend(s, DREG(insn, 0), opsize, 1);
3403 
3404     gen_addx(s, src, dest, opsize);
3405 
3406     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3407 }
3408 
3409 DISAS_INSN(addx_mem)
3410 {
3411     TCGv src;
3412     TCGv addr_src;
3413     TCGv dest;
3414     TCGv addr_dest;
3415     int opsize;
3416 
3417     opsize = insn_opsize(insn);
3418 
3419     addr_src = AREG(insn, 0);
3420     tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3421     src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3422 
3423     addr_dest = AREG(insn, 9);
3424     tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3425     dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3426 
3427     gen_addx(s, src, dest, opsize);
3428 
3429     gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3430 
3431     tcg_temp_free(dest);
3432     tcg_temp_free(src);
3433 }
3434 
3435 static inline void shift_im(DisasContext *s, uint16_t insn, int opsize)
3436 {
3437     int count = (insn >> 9) & 7;
3438     int logical = insn & 8;
3439     int left = insn & 0x100;
3440     int bits = opsize_bytes(opsize) * 8;
3441     TCGv reg = gen_extend(s, DREG(insn, 0), opsize, !logical);
3442 
3443     if (count == 0) {
3444         count = 8;
3445     }
3446 
3447     tcg_gen_movi_i32(QREG_CC_V, 0);
3448     if (left) {
3449         tcg_gen_shri_i32(QREG_CC_C, reg, bits - count);
3450         tcg_gen_shli_i32(QREG_CC_N, reg, count);
3451 
3452         /*
3453          * Note that ColdFire always clears V (done above),
3454          * while M68000 sets if the most significant bit is changed at
3455          * any time during the shift operation.
3456          */
3457         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3458             /* if shift count >= bits, V is (reg != 0) */
3459             if (count >= bits) {
3460                 tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, reg, QREG_CC_V);
3461             } else {
3462                 TCGv t0 = tcg_temp_new();
3463                 tcg_gen_sari_i32(QREG_CC_V, reg, bits - 1);
3464                 tcg_gen_sari_i32(t0, reg, bits - count - 1);
3465                 tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, t0);
3466                 tcg_temp_free(t0);
3467             }
3468             tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3469         }
3470     } else {
3471         tcg_gen_shri_i32(QREG_CC_C, reg, count - 1);
3472         if (logical) {
3473             tcg_gen_shri_i32(QREG_CC_N, reg, count);
3474         } else {
3475             tcg_gen_sari_i32(QREG_CC_N, reg, count);
3476         }
3477     }
3478 
3479     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3480     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3481     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3482     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3483 
3484     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3485     set_cc_op(s, CC_OP_FLAGS);
3486 }
3487 
3488 static inline void shift_reg(DisasContext *s, uint16_t insn, int opsize)
3489 {
3490     int logical = insn & 8;
3491     int left = insn & 0x100;
3492     int bits = opsize_bytes(opsize) * 8;
3493     TCGv reg = gen_extend(s, DREG(insn, 0), opsize, !logical);
3494     TCGv s32;
3495     TCGv_i64 t64, s64;
3496 
3497     t64 = tcg_temp_new_i64();
3498     s64 = tcg_temp_new_i64();
3499     s32 = tcg_temp_new();
3500 
3501     /*
3502      * Note that m68k truncates the shift count modulo 64, not 32.
3503      * In addition, a 64-bit shift makes it easy to find "the last
3504      * bit shifted out", for the carry flag.
3505      */
3506     tcg_gen_andi_i32(s32, DREG(insn, 9), 63);
3507     tcg_gen_extu_i32_i64(s64, s32);
3508     tcg_gen_extu_i32_i64(t64, reg);
3509 
3510     /* Optimistically set V=0.  Also used as a zero source below.  */
3511     tcg_gen_movi_i32(QREG_CC_V, 0);
3512     if (left) {
3513         tcg_gen_shl_i64(t64, t64, s64);
3514 
3515         if (opsize == OS_LONG) {
3516             tcg_gen_extr_i64_i32(QREG_CC_N, QREG_CC_C, t64);
3517             /* Note that C=0 if shift count is 0, and we get that for free.  */
3518         } else {
3519             TCGv zero = tcg_const_i32(0);
3520             tcg_gen_extrl_i64_i32(QREG_CC_N, t64);
3521             tcg_gen_shri_i32(QREG_CC_C, QREG_CC_N, bits);
3522             tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3523                                 s32, zero, zero, QREG_CC_C);
3524             tcg_temp_free(zero);
3525         }
3526         tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3527 
3528         /* X = C, but only if the shift count was non-zero.  */
3529         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3530                             QREG_CC_C, QREG_CC_X);
3531 
3532         /*
3533          * M68000 sets V if the most significant bit is changed at
3534          * any time during the shift operation.  Do this via creating
3535          * an extension of the sign bit, comparing, and discarding
3536          * the bits below the sign bit.  I.e.
3537          *     int64_t s = (intN_t)reg;
3538          *     int64_t t = (int64_t)(intN_t)reg << count;
3539          *     V = ((s ^ t) & (-1 << (bits - 1))) != 0
3540          */
3541         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3542             TCGv_i64 tt = tcg_const_i64(32);
3543             /* if shift is greater than 32, use 32 */
3544             tcg_gen_movcond_i64(TCG_COND_GT, s64, s64, tt, tt, s64);
3545             tcg_temp_free_i64(tt);
3546             /* Sign extend the input to 64 bits; re-do the shift.  */
3547             tcg_gen_ext_i32_i64(t64, reg);
3548             tcg_gen_shl_i64(s64, t64, s64);
3549             /* Clear all bits that are unchanged.  */
3550             tcg_gen_xor_i64(t64, t64, s64);
3551             /* Ignore the bits below the sign bit.  */
3552             tcg_gen_andi_i64(t64, t64, -1ULL << (bits - 1));
3553             /* If any bits remain set, we have overflow.  */
3554             tcg_gen_setcondi_i64(TCG_COND_NE, t64, t64, 0);
3555             tcg_gen_extrl_i64_i32(QREG_CC_V, t64);
3556             tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3557         }
3558     } else {
3559         tcg_gen_shli_i64(t64, t64, 32);
3560         if (logical) {
3561             tcg_gen_shr_i64(t64, t64, s64);
3562         } else {
3563             tcg_gen_sar_i64(t64, t64, s64);
3564         }
3565         tcg_gen_extr_i64_i32(QREG_CC_C, QREG_CC_N, t64);
3566 
3567         /* Note that C=0 if shift count is 0, and we get that for free.  */
3568         tcg_gen_shri_i32(QREG_CC_C, QREG_CC_C, 31);
3569 
3570         /* X = C, but only if the shift count was non-zero.  */
3571         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3572                             QREG_CC_C, QREG_CC_X);
3573     }
3574     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3575     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3576 
3577     tcg_temp_free(s32);
3578     tcg_temp_free_i64(s64);
3579     tcg_temp_free_i64(t64);
3580 
3581     /* Write back the result.  */
3582     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3583     set_cc_op(s, CC_OP_FLAGS);
3584 }
3585 
3586 DISAS_INSN(shift8_im)
3587 {
3588     shift_im(s, insn, OS_BYTE);
3589 }
3590 
3591 DISAS_INSN(shift16_im)
3592 {
3593     shift_im(s, insn, OS_WORD);
3594 }
3595 
3596 DISAS_INSN(shift_im)
3597 {
3598     shift_im(s, insn, OS_LONG);
3599 }
3600 
3601 DISAS_INSN(shift8_reg)
3602 {
3603     shift_reg(s, insn, OS_BYTE);
3604 }
3605 
3606 DISAS_INSN(shift16_reg)
3607 {
3608     shift_reg(s, insn, OS_WORD);
3609 }
3610 
3611 DISAS_INSN(shift_reg)
3612 {
3613     shift_reg(s, insn, OS_LONG);
3614 }
3615 
3616 DISAS_INSN(shift_mem)
3617 {
3618     int logical = insn & 8;
3619     int left = insn & 0x100;
3620     TCGv src;
3621     TCGv addr;
3622 
3623     SRC_EA(env, src, OS_WORD, !logical, &addr);
3624     tcg_gen_movi_i32(QREG_CC_V, 0);
3625     if (left) {
3626         tcg_gen_shri_i32(QREG_CC_C, src, 15);
3627         tcg_gen_shli_i32(QREG_CC_N, src, 1);
3628 
3629         /*
3630          * Note that ColdFire always clears V,
3631          * while M68000 sets if the most significant bit is changed at
3632          * any time during the shift operation
3633          */
3634         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3635             src = gen_extend(s, src, OS_WORD, 1);
3636             tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3637         }
3638     } else {
3639         tcg_gen_mov_i32(QREG_CC_C, src);
3640         if (logical) {
3641             tcg_gen_shri_i32(QREG_CC_N, src, 1);
3642         } else {
3643             tcg_gen_sari_i32(QREG_CC_N, src, 1);
3644         }
3645     }
3646 
3647     gen_ext(QREG_CC_N, QREG_CC_N, OS_WORD, 1);
3648     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3649     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3650     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3651 
3652     DEST_EA(env, insn, OS_WORD, QREG_CC_N, &addr);
3653     set_cc_op(s, CC_OP_FLAGS);
3654 }
3655 
3656 static void rotate(TCGv reg, TCGv shift, int left, int size)
3657 {
3658     switch (size) {
3659     case 8:
3660         /* Replicate the 8-bit input so that a 32-bit rotate works.  */
3661         tcg_gen_ext8u_i32(reg, reg);
3662         tcg_gen_muli_i32(reg, reg, 0x01010101);
3663         goto do_long;
3664     case 16:
3665         /* Replicate the 16-bit input so that a 32-bit rotate works.  */
3666         tcg_gen_deposit_i32(reg, reg, reg, 16, 16);
3667         goto do_long;
3668     do_long:
3669     default:
3670         if (left) {
3671             tcg_gen_rotl_i32(reg, reg, shift);
3672         } else {
3673             tcg_gen_rotr_i32(reg, reg, shift);
3674         }
3675     }
3676 
3677     /* compute flags */
3678 
3679     switch (size) {
3680     case 8:
3681         tcg_gen_ext8s_i32(reg, reg);
3682         break;
3683     case 16:
3684         tcg_gen_ext16s_i32(reg, reg);
3685         break;
3686     default:
3687         break;
3688     }
3689 
3690     /* QREG_CC_X is not affected */
3691 
3692     tcg_gen_mov_i32(QREG_CC_N, reg);
3693     tcg_gen_mov_i32(QREG_CC_Z, reg);
3694 
3695     if (left) {
3696         tcg_gen_andi_i32(QREG_CC_C, reg, 1);
3697     } else {
3698         tcg_gen_shri_i32(QREG_CC_C, reg, 31);
3699     }
3700 
3701     tcg_gen_movi_i32(QREG_CC_V, 0); /* always cleared */
3702 }
3703 
3704 static void rotate_x_flags(TCGv reg, TCGv X, int size)
3705 {
3706     switch (size) {
3707     case 8:
3708         tcg_gen_ext8s_i32(reg, reg);
3709         break;
3710     case 16:
3711         tcg_gen_ext16s_i32(reg, reg);
3712         break;
3713     default:
3714         break;
3715     }
3716     tcg_gen_mov_i32(QREG_CC_N, reg);
3717     tcg_gen_mov_i32(QREG_CC_Z, reg);
3718     tcg_gen_mov_i32(QREG_CC_X, X);
3719     tcg_gen_mov_i32(QREG_CC_C, X);
3720     tcg_gen_movi_i32(QREG_CC_V, 0);
3721 }
3722 
3723 /* Result of rotate_x() is valid if 0 <= shift <= size */
3724 static TCGv rotate_x(TCGv reg, TCGv shift, int left, int size)
3725 {
3726     TCGv X, shl, shr, shx, sz, zero;
3727 
3728     sz = tcg_const_i32(size);
3729 
3730     shr = tcg_temp_new();
3731     shl = tcg_temp_new();
3732     shx = tcg_temp_new();
3733     if (left) {
3734         tcg_gen_mov_i32(shl, shift);      /* shl = shift */
3735         tcg_gen_movi_i32(shr, size + 1);
3736         tcg_gen_sub_i32(shr, shr, shift); /* shr = size + 1 - shift */
3737         tcg_gen_subi_i32(shx, shift, 1);  /* shx = shift - 1 */
3738         /* shx = shx < 0 ? size : shx; */
3739         zero = tcg_const_i32(0);
3740         tcg_gen_movcond_i32(TCG_COND_LT, shx, shx, zero, sz, shx);
3741         tcg_temp_free(zero);
3742     } else {
3743         tcg_gen_mov_i32(shr, shift);      /* shr = shift */
3744         tcg_gen_movi_i32(shl, size + 1);
3745         tcg_gen_sub_i32(shl, shl, shift); /* shl = size + 1 - shift */
3746         tcg_gen_sub_i32(shx, sz, shift); /* shx = size - shift */
3747     }
3748     tcg_temp_free_i32(sz);
3749 
3750     /* reg = (reg << shl) | (reg >> shr) | (x << shx); */
3751 
3752     tcg_gen_shl_i32(shl, reg, shl);
3753     tcg_gen_shr_i32(shr, reg, shr);
3754     tcg_gen_or_i32(reg, shl, shr);
3755     tcg_temp_free(shl);
3756     tcg_temp_free(shr);
3757     tcg_gen_shl_i32(shx, QREG_CC_X, shx);
3758     tcg_gen_or_i32(reg, reg, shx);
3759     tcg_temp_free(shx);
3760 
3761     /* X = (reg >> size) & 1 */
3762 
3763     X = tcg_temp_new();
3764     tcg_gen_extract_i32(X, reg, size, 1);
3765 
3766     return X;
3767 }
3768 
3769 /* Result of rotate32_x() is valid if 0 <= shift < 33 */
3770 static TCGv rotate32_x(TCGv reg, TCGv shift, int left)
3771 {
3772     TCGv_i64 t0, shift64;
3773     TCGv X, lo, hi, zero;
3774 
3775     shift64 = tcg_temp_new_i64();
3776     tcg_gen_extu_i32_i64(shift64, shift);
3777 
3778     t0 = tcg_temp_new_i64();
3779 
3780     X = tcg_temp_new();
3781     lo = tcg_temp_new();
3782     hi = tcg_temp_new();
3783 
3784     if (left) {
3785         /* create [reg:X:..] */
3786 
3787         tcg_gen_shli_i32(lo, QREG_CC_X, 31);
3788         tcg_gen_concat_i32_i64(t0, lo, reg);
3789 
3790         /* rotate */
3791 
3792         tcg_gen_rotl_i64(t0, t0, shift64);
3793         tcg_temp_free_i64(shift64);
3794 
3795         /* result is [reg:..:reg:X] */
3796 
3797         tcg_gen_extr_i64_i32(lo, hi, t0);
3798         tcg_gen_andi_i32(X, lo, 1);
3799 
3800         tcg_gen_shri_i32(lo, lo, 1);
3801     } else {
3802         /* create [..:X:reg] */
3803 
3804         tcg_gen_concat_i32_i64(t0, reg, QREG_CC_X);
3805 
3806         tcg_gen_rotr_i64(t0, t0, shift64);
3807         tcg_temp_free_i64(shift64);
3808 
3809         /* result is value: [X:reg:..:reg] */
3810 
3811         tcg_gen_extr_i64_i32(lo, hi, t0);
3812 
3813         /* extract X */
3814 
3815         tcg_gen_shri_i32(X, hi, 31);
3816 
3817         /* extract result */
3818 
3819         tcg_gen_shli_i32(hi, hi, 1);
3820     }
3821     tcg_temp_free_i64(t0);
3822     tcg_gen_or_i32(lo, lo, hi);
3823     tcg_temp_free(hi);
3824 
3825     /* if shift == 0, register and X are not affected */
3826 
3827     zero = tcg_const_i32(0);
3828     tcg_gen_movcond_i32(TCG_COND_EQ, X, shift, zero, QREG_CC_X, X);
3829     tcg_gen_movcond_i32(TCG_COND_EQ, reg, shift, zero, reg, lo);
3830     tcg_temp_free(zero);
3831     tcg_temp_free(lo);
3832 
3833     return X;
3834 }
3835 
3836 DISAS_INSN(rotate_im)
3837 {
3838     TCGv shift;
3839     int tmp;
3840     int left = (insn & 0x100);
3841 
3842     tmp = (insn >> 9) & 7;
3843     if (tmp == 0) {
3844         tmp = 8;
3845     }
3846 
3847     shift = tcg_const_i32(tmp);
3848     if (insn & 8) {
3849         rotate(DREG(insn, 0), shift, left, 32);
3850     } else {
3851         TCGv X = rotate32_x(DREG(insn, 0), shift, left);
3852         rotate_x_flags(DREG(insn, 0), X, 32);
3853         tcg_temp_free(X);
3854     }
3855     tcg_temp_free(shift);
3856 
3857     set_cc_op(s, CC_OP_FLAGS);
3858 }
3859 
3860 DISAS_INSN(rotate8_im)
3861 {
3862     int left = (insn & 0x100);
3863     TCGv reg;
3864     TCGv shift;
3865     int tmp;
3866 
3867     reg = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
3868 
3869     tmp = (insn >> 9) & 7;
3870     if (tmp == 0) {
3871         tmp = 8;
3872     }
3873 
3874     shift = tcg_const_i32(tmp);
3875     if (insn & 8) {
3876         rotate(reg, shift, left, 8);
3877     } else {
3878         TCGv X = rotate_x(reg, shift, left, 8);
3879         rotate_x_flags(reg, X, 8);
3880         tcg_temp_free(X);
3881     }
3882     tcg_temp_free(shift);
3883     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3884     set_cc_op(s, CC_OP_FLAGS);
3885 }
3886 
3887 DISAS_INSN(rotate16_im)
3888 {
3889     int left = (insn & 0x100);
3890     TCGv reg;
3891     TCGv shift;
3892     int tmp;
3893 
3894     reg = gen_extend(s, DREG(insn, 0), OS_WORD, 0);
3895     tmp = (insn >> 9) & 7;
3896     if (tmp == 0) {
3897         tmp = 8;
3898     }
3899 
3900     shift = tcg_const_i32(tmp);
3901     if (insn & 8) {
3902         rotate(reg, shift, left, 16);
3903     } else {
3904         TCGv X = rotate_x(reg, shift, left, 16);
3905         rotate_x_flags(reg, X, 16);
3906         tcg_temp_free(X);
3907     }
3908     tcg_temp_free(shift);
3909     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3910     set_cc_op(s, CC_OP_FLAGS);
3911 }
3912 
3913 DISAS_INSN(rotate_reg)
3914 {
3915     TCGv reg;
3916     TCGv src;
3917     TCGv t0, t1;
3918     int left = (insn & 0x100);
3919 
3920     reg = DREG(insn, 0);
3921     src = DREG(insn, 9);
3922     /* shift in [0..63] */
3923     t0 = tcg_temp_new();
3924     tcg_gen_andi_i32(t0, src, 63);
3925     t1 = tcg_temp_new_i32();
3926     if (insn & 8) {
3927         tcg_gen_andi_i32(t1, src, 31);
3928         rotate(reg, t1, left, 32);
3929         /* if shift == 0, clear C */
3930         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3931                             t0, QREG_CC_V /* 0 */,
3932                             QREG_CC_V /* 0 */, QREG_CC_C);
3933     } else {
3934         TCGv X;
3935         /* modulo 33 */
3936         tcg_gen_movi_i32(t1, 33);
3937         tcg_gen_remu_i32(t1, t0, t1);
3938         X = rotate32_x(DREG(insn, 0), t1, left);
3939         rotate_x_flags(DREG(insn, 0), X, 32);
3940         tcg_temp_free(X);
3941     }
3942     tcg_temp_free(t1);
3943     tcg_temp_free(t0);
3944     set_cc_op(s, CC_OP_FLAGS);
3945 }
3946 
3947 DISAS_INSN(rotate8_reg)
3948 {
3949     TCGv reg;
3950     TCGv src;
3951     TCGv t0, t1;
3952     int left = (insn & 0x100);
3953 
3954     reg = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
3955     src = DREG(insn, 9);
3956     /* shift in [0..63] */
3957     t0 = tcg_temp_new_i32();
3958     tcg_gen_andi_i32(t0, src, 63);
3959     t1 = tcg_temp_new_i32();
3960     if (insn & 8) {
3961         tcg_gen_andi_i32(t1, src, 7);
3962         rotate(reg, t1, left, 8);
3963         /* if shift == 0, clear C */
3964         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3965                             t0, QREG_CC_V /* 0 */,
3966                             QREG_CC_V /* 0 */, QREG_CC_C);
3967     } else {
3968         TCGv X;
3969         /* modulo 9 */
3970         tcg_gen_movi_i32(t1, 9);
3971         tcg_gen_remu_i32(t1, t0, t1);
3972         X = rotate_x(reg, t1, left, 8);
3973         rotate_x_flags(reg, X, 8);
3974         tcg_temp_free(X);
3975     }
3976     tcg_temp_free(t1);
3977     tcg_temp_free(t0);
3978     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3979     set_cc_op(s, CC_OP_FLAGS);
3980 }
3981 
3982 DISAS_INSN(rotate16_reg)
3983 {
3984     TCGv reg;
3985     TCGv src;
3986     TCGv t0, t1;
3987     int left = (insn & 0x100);
3988 
3989     reg = gen_extend(s, DREG(insn, 0), OS_WORD, 0);
3990     src = DREG(insn, 9);
3991     /* shift in [0..63] */
3992     t0 = tcg_temp_new_i32();
3993     tcg_gen_andi_i32(t0, src, 63);
3994     t1 = tcg_temp_new_i32();
3995     if (insn & 8) {
3996         tcg_gen_andi_i32(t1, src, 15);
3997         rotate(reg, t1, left, 16);
3998         /* if shift == 0, clear C */
3999         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
4000                             t0, QREG_CC_V /* 0 */,
4001                             QREG_CC_V /* 0 */, QREG_CC_C);
4002     } else {
4003         TCGv X;
4004         /* modulo 17 */
4005         tcg_gen_movi_i32(t1, 17);
4006         tcg_gen_remu_i32(t1, t0, t1);
4007         X = rotate_x(reg, t1, left, 16);
4008         rotate_x_flags(reg, X, 16);
4009         tcg_temp_free(X);
4010     }
4011     tcg_temp_free(t1);
4012     tcg_temp_free(t0);
4013     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
4014     set_cc_op(s, CC_OP_FLAGS);
4015 }
4016 
4017 DISAS_INSN(rotate_mem)
4018 {
4019     TCGv src;
4020     TCGv addr;
4021     TCGv shift;
4022     int left = (insn & 0x100);
4023 
4024     SRC_EA(env, src, OS_WORD, 0, &addr);
4025 
4026     shift = tcg_const_i32(1);
4027     if (insn & 0x0200) {
4028         rotate(src, shift, left, 16);
4029     } else {
4030         TCGv X = rotate_x(src, shift, left, 16);
4031         rotate_x_flags(src, X, 16);
4032         tcg_temp_free(X);
4033     }
4034     tcg_temp_free(shift);
4035     DEST_EA(env, insn, OS_WORD, src, &addr);
4036     set_cc_op(s, CC_OP_FLAGS);
4037 }
4038 
4039 DISAS_INSN(bfext_reg)
4040 {
4041     int ext = read_im16(env, s);
4042     int is_sign = insn & 0x200;
4043     TCGv src = DREG(insn, 0);
4044     TCGv dst = DREG(ext, 12);
4045     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4046     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4047     int pos = 32 - ofs - len;        /* little bit-endian */
4048     TCGv tmp = tcg_temp_new();
4049     TCGv shift;
4050 
4051     /*
4052      * In general, we're going to rotate the field so that it's at the
4053      * top of the word and then right-shift by the complement of the
4054      * width to extend the field.
4055      */
4056     if (ext & 0x20) {
4057         /* Variable width.  */
4058         if (ext & 0x800) {
4059             /* Variable offset.  */
4060             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4061             tcg_gen_rotl_i32(tmp, src, tmp);
4062         } else {
4063             tcg_gen_rotli_i32(tmp, src, ofs);
4064         }
4065 
4066         shift = tcg_temp_new();
4067         tcg_gen_neg_i32(shift, DREG(ext, 0));
4068         tcg_gen_andi_i32(shift, shift, 31);
4069         tcg_gen_sar_i32(QREG_CC_N, tmp, shift);
4070         if (is_sign) {
4071             tcg_gen_mov_i32(dst, QREG_CC_N);
4072         } else {
4073             tcg_gen_shr_i32(dst, tmp, shift);
4074         }
4075         tcg_temp_free(shift);
4076     } else {
4077         /* Immediate width.  */
4078         if (ext & 0x800) {
4079             /* Variable offset */
4080             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4081             tcg_gen_rotl_i32(tmp, src, tmp);
4082             src = tmp;
4083             pos = 32 - len;
4084         } else {
4085             /*
4086              * Immediate offset.  If the field doesn't wrap around the
4087              * end of the word, rely on (s)extract completely.
4088              */
4089             if (pos < 0) {
4090                 tcg_gen_rotli_i32(tmp, src, ofs);
4091                 src = tmp;
4092                 pos = 32 - len;
4093             }
4094         }
4095 
4096         tcg_gen_sextract_i32(QREG_CC_N, src, pos, len);
4097         if (is_sign) {
4098             tcg_gen_mov_i32(dst, QREG_CC_N);
4099         } else {
4100             tcg_gen_extract_i32(dst, src, pos, len);
4101         }
4102     }
4103 
4104     tcg_temp_free(tmp);
4105     set_cc_op(s, CC_OP_LOGIC);
4106 }
4107 
4108 DISAS_INSN(bfext_mem)
4109 {
4110     int ext = read_im16(env, s);
4111     int is_sign = insn & 0x200;
4112     TCGv dest = DREG(ext, 12);
4113     TCGv addr, len, ofs;
4114 
4115     addr = gen_lea(env, s, insn, OS_UNSIZED);
4116     if (IS_NULL_QREG(addr)) {
4117         gen_addr_fault(s);
4118         return;
4119     }
4120 
4121     if (ext & 0x20) {
4122         len = DREG(ext, 0);
4123     } else {
4124         len = tcg_const_i32(extract32(ext, 0, 5));
4125     }
4126     if (ext & 0x800) {
4127         ofs = DREG(ext, 6);
4128     } else {
4129         ofs = tcg_const_i32(extract32(ext, 6, 5));
4130     }
4131 
4132     if (is_sign) {
4133         gen_helper_bfexts_mem(dest, cpu_env, addr, ofs, len);
4134         tcg_gen_mov_i32(QREG_CC_N, dest);
4135     } else {
4136         TCGv_i64 tmp = tcg_temp_new_i64();
4137         gen_helper_bfextu_mem(tmp, cpu_env, addr, ofs, len);
4138         tcg_gen_extr_i64_i32(dest, QREG_CC_N, tmp);
4139         tcg_temp_free_i64(tmp);
4140     }
4141     set_cc_op(s, CC_OP_LOGIC);
4142 
4143     if (!(ext & 0x20)) {
4144         tcg_temp_free(len);
4145     }
4146     if (!(ext & 0x800)) {
4147         tcg_temp_free(ofs);
4148     }
4149 }
4150 
4151 DISAS_INSN(bfop_reg)
4152 {
4153     int ext = read_im16(env, s);
4154     TCGv src = DREG(insn, 0);
4155     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4156     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4157     TCGv mask, tofs, tlen;
4158 
4159     tofs = NULL;
4160     tlen = NULL;
4161     if ((insn & 0x0f00) == 0x0d00) { /* bfffo */
4162         tofs = tcg_temp_new();
4163         tlen = tcg_temp_new();
4164     }
4165 
4166     if ((ext & 0x820) == 0) {
4167         /* Immediate width and offset.  */
4168         uint32_t maski = 0x7fffffffu >> (len - 1);
4169         if (ofs + len <= 32) {
4170             tcg_gen_shli_i32(QREG_CC_N, src, ofs);
4171         } else {
4172             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4173         }
4174         tcg_gen_andi_i32(QREG_CC_N, QREG_CC_N, ~maski);
4175         mask = tcg_const_i32(ror32(maski, ofs));
4176         if (tofs) {
4177             tcg_gen_movi_i32(tofs, ofs);
4178             tcg_gen_movi_i32(tlen, len);
4179         }
4180     } else {
4181         TCGv tmp = tcg_temp_new();
4182         if (ext & 0x20) {
4183             /* Variable width */
4184             tcg_gen_subi_i32(tmp, DREG(ext, 0), 1);
4185             tcg_gen_andi_i32(tmp, tmp, 31);
4186             mask = tcg_const_i32(0x7fffffffu);
4187             tcg_gen_shr_i32(mask, mask, tmp);
4188             if (tlen) {
4189                 tcg_gen_addi_i32(tlen, tmp, 1);
4190             }
4191         } else {
4192             /* Immediate width */
4193             mask = tcg_const_i32(0x7fffffffu >> (len - 1));
4194             if (tlen) {
4195                 tcg_gen_movi_i32(tlen, len);
4196             }
4197         }
4198         if (ext & 0x800) {
4199             /* Variable offset */
4200             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4201             tcg_gen_rotl_i32(QREG_CC_N, src, tmp);
4202             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4203             tcg_gen_rotr_i32(mask, mask, tmp);
4204             if (tofs) {
4205                 tcg_gen_mov_i32(tofs, tmp);
4206             }
4207         } else {
4208             /* Immediate offset (and variable width) */
4209             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4210             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4211             tcg_gen_rotri_i32(mask, mask, ofs);
4212             if (tofs) {
4213                 tcg_gen_movi_i32(tofs, ofs);
4214             }
4215         }
4216         tcg_temp_free(tmp);
4217     }
4218     set_cc_op(s, CC_OP_LOGIC);
4219 
4220     switch (insn & 0x0f00) {
4221     case 0x0a00: /* bfchg */
4222         tcg_gen_eqv_i32(src, src, mask);
4223         break;
4224     case 0x0c00: /* bfclr */
4225         tcg_gen_and_i32(src, src, mask);
4226         break;
4227     case 0x0d00: /* bfffo */
4228         gen_helper_bfffo_reg(DREG(ext, 12), QREG_CC_N, tofs, tlen);
4229         tcg_temp_free(tlen);
4230         tcg_temp_free(tofs);
4231         break;
4232     case 0x0e00: /* bfset */
4233         tcg_gen_orc_i32(src, src, mask);
4234         break;
4235     case 0x0800: /* bftst */
4236         /* flags already set; no other work to do.  */
4237         break;
4238     default:
4239         g_assert_not_reached();
4240     }
4241     tcg_temp_free(mask);
4242 }
4243 
4244 DISAS_INSN(bfop_mem)
4245 {
4246     int ext = read_im16(env, s);
4247     TCGv addr, len, ofs;
4248     TCGv_i64 t64;
4249 
4250     addr = gen_lea(env, s, insn, OS_UNSIZED);
4251     if (IS_NULL_QREG(addr)) {
4252         gen_addr_fault(s);
4253         return;
4254     }
4255 
4256     if (ext & 0x20) {
4257         len = DREG(ext, 0);
4258     } else {
4259         len = tcg_const_i32(extract32(ext, 0, 5));
4260     }
4261     if (ext & 0x800) {
4262         ofs = DREG(ext, 6);
4263     } else {
4264         ofs = tcg_const_i32(extract32(ext, 6, 5));
4265     }
4266 
4267     switch (insn & 0x0f00) {
4268     case 0x0a00: /* bfchg */
4269         gen_helper_bfchg_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4270         break;
4271     case 0x0c00: /* bfclr */
4272         gen_helper_bfclr_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4273         break;
4274     case 0x0d00: /* bfffo */
4275         t64 = tcg_temp_new_i64();
4276         gen_helper_bfffo_mem(t64, cpu_env, addr, ofs, len);
4277         tcg_gen_extr_i64_i32(DREG(ext, 12), QREG_CC_N, t64);
4278         tcg_temp_free_i64(t64);
4279         break;
4280     case 0x0e00: /* bfset */
4281         gen_helper_bfset_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4282         break;
4283     case 0x0800: /* bftst */
4284         gen_helper_bfexts_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4285         break;
4286     default:
4287         g_assert_not_reached();
4288     }
4289     set_cc_op(s, CC_OP_LOGIC);
4290 
4291     if (!(ext & 0x20)) {
4292         tcg_temp_free(len);
4293     }
4294     if (!(ext & 0x800)) {
4295         tcg_temp_free(ofs);
4296     }
4297 }
4298 
4299 DISAS_INSN(bfins_reg)
4300 {
4301     int ext = read_im16(env, s);
4302     TCGv dst = DREG(insn, 0);
4303     TCGv src = DREG(ext, 12);
4304     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4305     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4306     int pos = 32 - ofs - len;        /* little bit-endian */
4307     TCGv tmp;
4308 
4309     tmp = tcg_temp_new();
4310 
4311     if (ext & 0x20) {
4312         /* Variable width */
4313         tcg_gen_neg_i32(tmp, DREG(ext, 0));
4314         tcg_gen_andi_i32(tmp, tmp, 31);
4315         tcg_gen_shl_i32(QREG_CC_N, src, tmp);
4316     } else {
4317         /* Immediate width */
4318         tcg_gen_shli_i32(QREG_CC_N, src, 32 - len);
4319     }
4320     set_cc_op(s, CC_OP_LOGIC);
4321 
4322     /* Immediate width and offset */
4323     if ((ext & 0x820) == 0) {
4324         /* Check for suitability for deposit.  */
4325         if (pos >= 0) {
4326             tcg_gen_deposit_i32(dst, dst, src, pos, len);
4327         } else {
4328             uint32_t maski = -2U << (len - 1);
4329             uint32_t roti = (ofs + len) & 31;
4330             tcg_gen_andi_i32(tmp, src, ~maski);
4331             tcg_gen_rotri_i32(tmp, tmp, roti);
4332             tcg_gen_andi_i32(dst, dst, ror32(maski, roti));
4333             tcg_gen_or_i32(dst, dst, tmp);
4334         }
4335     } else {
4336         TCGv mask = tcg_temp_new();
4337         TCGv rot = tcg_temp_new();
4338 
4339         if (ext & 0x20) {
4340             /* Variable width */
4341             tcg_gen_subi_i32(rot, DREG(ext, 0), 1);
4342             tcg_gen_andi_i32(rot, rot, 31);
4343             tcg_gen_movi_i32(mask, -2);
4344             tcg_gen_shl_i32(mask, mask, rot);
4345             tcg_gen_mov_i32(rot, DREG(ext, 0));
4346             tcg_gen_andc_i32(tmp, src, mask);
4347         } else {
4348             /* Immediate width (variable offset) */
4349             uint32_t maski = -2U << (len - 1);
4350             tcg_gen_andi_i32(tmp, src, ~maski);
4351             tcg_gen_movi_i32(mask, maski);
4352             tcg_gen_movi_i32(rot, len & 31);
4353         }
4354         if (ext & 0x800) {
4355             /* Variable offset */
4356             tcg_gen_add_i32(rot, rot, DREG(ext, 6));
4357         } else {
4358             /* Immediate offset (variable width) */
4359             tcg_gen_addi_i32(rot, rot, ofs);
4360         }
4361         tcg_gen_andi_i32(rot, rot, 31);
4362         tcg_gen_rotr_i32(mask, mask, rot);
4363         tcg_gen_rotr_i32(tmp, tmp, rot);
4364         tcg_gen_and_i32(dst, dst, mask);
4365         tcg_gen_or_i32(dst, dst, tmp);
4366 
4367         tcg_temp_free(rot);
4368         tcg_temp_free(mask);
4369     }
4370     tcg_temp_free(tmp);
4371 }
4372 
4373 DISAS_INSN(bfins_mem)
4374 {
4375     int ext = read_im16(env, s);
4376     TCGv src = DREG(ext, 12);
4377     TCGv addr, len, ofs;
4378 
4379     addr = gen_lea(env, s, insn, OS_UNSIZED);
4380     if (IS_NULL_QREG(addr)) {
4381         gen_addr_fault(s);
4382         return;
4383     }
4384 
4385     if (ext & 0x20) {
4386         len = DREG(ext, 0);
4387     } else {
4388         len = tcg_const_i32(extract32(ext, 0, 5));
4389     }
4390     if (ext & 0x800) {
4391         ofs = DREG(ext, 6);
4392     } else {
4393         ofs = tcg_const_i32(extract32(ext, 6, 5));
4394     }
4395 
4396     gen_helper_bfins_mem(QREG_CC_N, cpu_env, addr, src, ofs, len);
4397     set_cc_op(s, CC_OP_LOGIC);
4398 
4399     if (!(ext & 0x20)) {
4400         tcg_temp_free(len);
4401     }
4402     if (!(ext & 0x800)) {
4403         tcg_temp_free(ofs);
4404     }
4405 }
4406 
4407 DISAS_INSN(ff1)
4408 {
4409     TCGv reg;
4410     reg = DREG(insn, 0);
4411     gen_logic_cc(s, reg, OS_LONG);
4412     gen_helper_ff1(reg, reg);
4413 }
4414 
4415 DISAS_INSN(chk)
4416 {
4417     TCGv src, reg;
4418     int opsize;
4419 
4420     switch ((insn >> 7) & 3) {
4421     case 3:
4422         opsize = OS_WORD;
4423         break;
4424     case 2:
4425         if (m68k_feature(env, M68K_FEATURE_CHK2)) {
4426             opsize = OS_LONG;
4427             break;
4428         }
4429         /* fallthru */
4430     default:
4431         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4432         return;
4433     }
4434     SRC_EA(env, src, opsize, 1, NULL);
4435     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
4436 
4437     gen_flush_flags(s);
4438     gen_helper_chk(cpu_env, reg, src);
4439 }
4440 
4441 DISAS_INSN(chk2)
4442 {
4443     uint16_t ext;
4444     TCGv addr1, addr2, bound1, bound2, reg;
4445     int opsize;
4446 
4447     switch ((insn >> 9) & 3) {
4448     case 0:
4449         opsize = OS_BYTE;
4450         break;
4451     case 1:
4452         opsize = OS_WORD;
4453         break;
4454     case 2:
4455         opsize = OS_LONG;
4456         break;
4457     default:
4458         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4459         return;
4460     }
4461 
4462     ext = read_im16(env, s);
4463     if ((ext & 0x0800) == 0) {
4464         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4465         return;
4466     }
4467 
4468     addr1 = gen_lea(env, s, insn, OS_UNSIZED);
4469     addr2 = tcg_temp_new();
4470     tcg_gen_addi_i32(addr2, addr1, opsize_bytes(opsize));
4471 
4472     bound1 = gen_load(s, opsize, addr1, 1, IS_USER(s));
4473     tcg_temp_free(addr1);
4474     bound2 = gen_load(s, opsize, addr2, 1, IS_USER(s));
4475     tcg_temp_free(addr2);
4476 
4477     reg = tcg_temp_new();
4478     if (ext & 0x8000) {
4479         tcg_gen_mov_i32(reg, AREG(ext, 12));
4480     } else {
4481         gen_ext(reg, DREG(ext, 12), opsize, 1);
4482     }
4483 
4484     gen_flush_flags(s);
4485     gen_helper_chk2(cpu_env, reg, bound1, bound2);
4486     tcg_temp_free(reg);
4487     tcg_temp_free(bound1);
4488     tcg_temp_free(bound2);
4489 }
4490 
4491 static void m68k_copy_line(TCGv dst, TCGv src, int index)
4492 {
4493     TCGv addr;
4494     TCGv_i64 t0, t1;
4495 
4496     addr = tcg_temp_new();
4497 
4498     t0 = tcg_temp_new_i64();
4499     t1 = tcg_temp_new_i64();
4500 
4501     tcg_gen_andi_i32(addr, src, ~15);
4502     tcg_gen_qemu_ld64(t0, addr, index);
4503     tcg_gen_addi_i32(addr, addr, 8);
4504     tcg_gen_qemu_ld64(t1, addr, index);
4505 
4506     tcg_gen_andi_i32(addr, dst, ~15);
4507     tcg_gen_qemu_st64(t0, addr, index);
4508     tcg_gen_addi_i32(addr, addr, 8);
4509     tcg_gen_qemu_st64(t1, addr, index);
4510 
4511     tcg_temp_free_i64(t0);
4512     tcg_temp_free_i64(t1);
4513     tcg_temp_free(addr);
4514 }
4515 
4516 DISAS_INSN(move16_reg)
4517 {
4518     int index = IS_USER(s);
4519     TCGv tmp;
4520     uint16_t ext;
4521 
4522     ext = read_im16(env, s);
4523     if ((ext & (1 << 15)) == 0) {
4524         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4525     }
4526 
4527     m68k_copy_line(AREG(ext, 12), AREG(insn, 0), index);
4528 
4529     /* Ax can be Ay, so save Ay before incrementing Ax */
4530     tmp = tcg_temp_new();
4531     tcg_gen_mov_i32(tmp, AREG(ext, 12));
4532     tcg_gen_addi_i32(AREG(insn, 0), AREG(insn, 0), 16);
4533     tcg_gen_addi_i32(AREG(ext, 12), tmp, 16);
4534     tcg_temp_free(tmp);
4535 }
4536 
4537 DISAS_INSN(move16_mem)
4538 {
4539     int index = IS_USER(s);
4540     TCGv reg, addr;
4541 
4542     reg = AREG(insn, 0);
4543     addr = tcg_const_i32(read_im32(env, s));
4544 
4545     if ((insn >> 3) & 1) {
4546         /* MOVE16 (xxx).L, (Ay) */
4547         m68k_copy_line(reg, addr, index);
4548     } else {
4549         /* MOVE16 (Ay), (xxx).L */
4550         m68k_copy_line(addr, reg, index);
4551     }
4552 
4553     tcg_temp_free(addr);
4554 
4555     if (((insn >> 3) & 2) == 0) {
4556         /* (Ay)+ */
4557         tcg_gen_addi_i32(reg, reg, 16);
4558     }
4559 }
4560 
4561 DISAS_INSN(strldsr)
4562 {
4563     uint16_t ext;
4564     uint32_t addr;
4565 
4566     addr = s->pc - 2;
4567     ext = read_im16(env, s);
4568     if (ext != 0x46FC) {
4569         gen_exception(s, addr, EXCP_ILLEGAL);
4570         return;
4571     }
4572     ext = read_im16(env, s);
4573     if (IS_USER(s) || (ext & SR_S) == 0) {
4574         gen_exception(s, addr, EXCP_PRIVILEGE);
4575         return;
4576     }
4577     gen_push(s, gen_get_sr(s));
4578     gen_set_sr_im(s, ext, 0);
4579 }
4580 
4581 DISAS_INSN(move_from_sr)
4582 {
4583     TCGv sr;
4584 
4585     if (IS_USER(s) && !m68k_feature(env, M68K_FEATURE_M68000)) {
4586         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4587         return;
4588     }
4589     sr = gen_get_sr(s);
4590     DEST_EA(env, insn, OS_WORD, sr, NULL);
4591 }
4592 
4593 #if defined(CONFIG_SOFTMMU)
4594 DISAS_INSN(moves)
4595 {
4596     int opsize;
4597     uint16_t ext;
4598     TCGv reg;
4599     TCGv addr;
4600     int extend;
4601 
4602     if (IS_USER(s)) {
4603         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4604         return;
4605     }
4606 
4607     ext = read_im16(env, s);
4608 
4609     opsize = insn_opsize(insn);
4610 
4611     if (ext & 0x8000) {
4612         /* address register */
4613         reg = AREG(ext, 12);
4614         extend = 1;
4615     } else {
4616         /* data register */
4617         reg = DREG(ext, 12);
4618         extend = 0;
4619     }
4620 
4621     addr = gen_lea(env, s, insn, opsize);
4622     if (IS_NULL_QREG(addr)) {
4623         gen_addr_fault(s);
4624         return;
4625     }
4626 
4627     if (ext & 0x0800) {
4628         /* from reg to ea */
4629         gen_store(s, opsize, addr, reg, DFC_INDEX(s));
4630     } else {
4631         /* from ea to reg */
4632         TCGv tmp = gen_load(s, opsize, addr, 0, SFC_INDEX(s));
4633         if (extend) {
4634             gen_ext(reg, tmp, opsize, 1);
4635         } else {
4636             gen_partset_reg(opsize, reg, tmp);
4637         }
4638         tcg_temp_free(tmp);
4639     }
4640     switch (extract32(insn, 3, 3)) {
4641     case 3: /* Indirect postincrement.  */
4642         tcg_gen_addi_i32(AREG(insn, 0), addr,
4643                          REG(insn, 0) == 7 && opsize == OS_BYTE
4644                          ? 2
4645                          : opsize_bytes(opsize));
4646         break;
4647     case 4: /* Indirect predecrememnt.  */
4648         tcg_gen_mov_i32(AREG(insn, 0), addr);
4649         break;
4650     }
4651 }
4652 
4653 DISAS_INSN(move_to_sr)
4654 {
4655     if (IS_USER(s)) {
4656         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4657         return;
4658     }
4659     gen_move_to_sr(env, s, insn, false);
4660     gen_exit_tb(s);
4661 }
4662 
4663 DISAS_INSN(move_from_usp)
4664 {
4665     if (IS_USER(s)) {
4666         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4667         return;
4668     }
4669     tcg_gen_ld_i32(AREG(insn, 0), cpu_env,
4670                    offsetof(CPUM68KState, sp[M68K_USP]));
4671 }
4672 
4673 DISAS_INSN(move_to_usp)
4674 {
4675     if (IS_USER(s)) {
4676         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4677         return;
4678     }
4679     tcg_gen_st_i32(AREG(insn, 0), cpu_env,
4680                    offsetof(CPUM68KState, sp[M68K_USP]));
4681 }
4682 
4683 DISAS_INSN(halt)
4684 {
4685     if (IS_USER(s)) {
4686         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4687         return;
4688     }
4689 
4690     gen_exception(s, s->pc, EXCP_HALT_INSN);
4691 }
4692 
4693 DISAS_INSN(stop)
4694 {
4695     uint16_t ext;
4696 
4697     if (IS_USER(s)) {
4698         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4699         return;
4700     }
4701 
4702     ext = read_im16(env, s);
4703 
4704     gen_set_sr_im(s, ext, 0);
4705     tcg_gen_movi_i32(cpu_halted, 1);
4706     gen_exception(s, s->pc, EXCP_HLT);
4707 }
4708 
4709 DISAS_INSN(rte)
4710 {
4711     if (IS_USER(s)) {
4712         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4713         return;
4714     }
4715     gen_exception(s, s->base.pc_next, EXCP_RTE);
4716 }
4717 
4718 DISAS_INSN(cf_movec)
4719 {
4720     uint16_t ext;
4721     TCGv reg;
4722 
4723     if (IS_USER(s)) {
4724         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4725         return;
4726     }
4727 
4728     ext = read_im16(env, s);
4729 
4730     if (ext & 0x8000) {
4731         reg = AREG(ext, 12);
4732     } else {
4733         reg = DREG(ext, 12);
4734     }
4735     gen_helper_cf_movec_to(cpu_env, tcg_const_i32(ext & 0xfff), reg);
4736     gen_exit_tb(s);
4737 }
4738 
4739 DISAS_INSN(m68k_movec)
4740 {
4741     uint16_t ext;
4742     TCGv reg;
4743 
4744     if (IS_USER(s)) {
4745         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4746         return;
4747     }
4748 
4749     ext = read_im16(env, s);
4750 
4751     if (ext & 0x8000) {
4752         reg = AREG(ext, 12);
4753     } else {
4754         reg = DREG(ext, 12);
4755     }
4756     if (insn & 1) {
4757         gen_helper_m68k_movec_to(cpu_env, tcg_const_i32(ext & 0xfff), reg);
4758     } else {
4759         gen_helper_m68k_movec_from(reg, cpu_env, tcg_const_i32(ext & 0xfff));
4760     }
4761     gen_exit_tb(s);
4762 }
4763 
4764 DISAS_INSN(intouch)
4765 {
4766     if (IS_USER(s)) {
4767         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4768         return;
4769     }
4770     /* ICache fetch.  Implement as no-op.  */
4771 }
4772 
4773 DISAS_INSN(cpushl)
4774 {
4775     if (IS_USER(s)) {
4776         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4777         return;
4778     }
4779     /* Cache push/invalidate.  Implement as no-op.  */
4780 }
4781 
4782 DISAS_INSN(cpush)
4783 {
4784     if (IS_USER(s)) {
4785         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4786         return;
4787     }
4788     /* Cache push/invalidate.  Implement as no-op.  */
4789 }
4790 
4791 DISAS_INSN(cinv)
4792 {
4793     if (IS_USER(s)) {
4794         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4795         return;
4796     }
4797     /* Invalidate cache line.  Implement as no-op.  */
4798 }
4799 
4800 #if defined(CONFIG_SOFTMMU)
4801 DISAS_INSN(pflush)
4802 {
4803     TCGv opmode;
4804 
4805     if (IS_USER(s)) {
4806         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4807         return;
4808     }
4809 
4810     opmode = tcg_const_i32((insn >> 3) & 3);
4811     gen_helper_pflush(cpu_env, AREG(insn, 0), opmode);
4812     tcg_temp_free(opmode);
4813 }
4814 
4815 DISAS_INSN(ptest)
4816 {
4817     TCGv is_read;
4818 
4819     if (IS_USER(s)) {
4820         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4821         return;
4822     }
4823     is_read = tcg_const_i32((insn >> 5) & 1);
4824     gen_helper_ptest(cpu_env, AREG(insn, 0), is_read);
4825     tcg_temp_free(is_read);
4826 }
4827 #endif
4828 
4829 DISAS_INSN(wddata)
4830 {
4831     gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4832 }
4833 
4834 DISAS_INSN(wdebug)
4835 {
4836     if (IS_USER(s)) {
4837         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4838         return;
4839     }
4840     /* TODO: Implement wdebug.  */
4841     cpu_abort(env_cpu(env), "WDEBUG not implemented");
4842 }
4843 #endif
4844 
4845 DISAS_INSN(trap)
4846 {
4847     gen_exception(s, s->base.pc_next, EXCP_TRAP0 + (insn & 0xf));
4848 }
4849 
4850 static void gen_load_fcr(DisasContext *s, TCGv res, int reg)
4851 {
4852     switch (reg) {
4853     case M68K_FPIAR:
4854         tcg_gen_movi_i32(res, 0);
4855         break;
4856     case M68K_FPSR:
4857         tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpsr));
4858         break;
4859     case M68K_FPCR:
4860         tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpcr));
4861         break;
4862     }
4863 }
4864 
4865 static void gen_store_fcr(DisasContext *s, TCGv val, int reg)
4866 {
4867     switch (reg) {
4868     case M68K_FPIAR:
4869         break;
4870     case M68K_FPSR:
4871         tcg_gen_st_i32(val, cpu_env, offsetof(CPUM68KState, fpsr));
4872         break;
4873     case M68K_FPCR:
4874         gen_helper_set_fpcr(cpu_env, val);
4875         break;
4876     }
4877 }
4878 
4879 static void gen_qemu_store_fcr(DisasContext *s, TCGv addr, int reg)
4880 {
4881     int index = IS_USER(s);
4882     TCGv tmp;
4883 
4884     tmp = tcg_temp_new();
4885     gen_load_fcr(s, tmp, reg);
4886     tcg_gen_qemu_st32(tmp, addr, index);
4887     tcg_temp_free(tmp);
4888 }
4889 
4890 static void gen_qemu_load_fcr(DisasContext *s, TCGv addr, int reg)
4891 {
4892     int index = IS_USER(s);
4893     TCGv tmp;
4894 
4895     tmp = tcg_temp_new();
4896     tcg_gen_qemu_ld32u(tmp, addr, index);
4897     gen_store_fcr(s, tmp, reg);
4898     tcg_temp_free(tmp);
4899 }
4900 
4901 
4902 static void gen_op_fmove_fcr(CPUM68KState *env, DisasContext *s,
4903                              uint32_t insn, uint32_t ext)
4904 {
4905     int mask = (ext >> 10) & 7;
4906     int is_write = (ext >> 13) & 1;
4907     int mode = extract32(insn, 3, 3);
4908     int i;
4909     TCGv addr, tmp;
4910 
4911     switch (mode) {
4912     case 0: /* Dn */
4913         if (mask != M68K_FPIAR && mask != M68K_FPSR && mask != M68K_FPCR) {
4914             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4915             return;
4916         }
4917         if (is_write) {
4918             gen_load_fcr(s, DREG(insn, 0), mask);
4919         } else {
4920             gen_store_fcr(s, DREG(insn, 0), mask);
4921         }
4922         return;
4923     case 1: /* An, only with FPIAR */
4924         if (mask != M68K_FPIAR) {
4925             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4926             return;
4927         }
4928         if (is_write) {
4929             gen_load_fcr(s, AREG(insn, 0), mask);
4930         } else {
4931             gen_store_fcr(s, AREG(insn, 0), mask);
4932         }
4933         return;
4934     default:
4935         break;
4936     }
4937 
4938     tmp = gen_lea(env, s, insn, OS_LONG);
4939     if (IS_NULL_QREG(tmp)) {
4940         gen_addr_fault(s);
4941         return;
4942     }
4943 
4944     addr = tcg_temp_new();
4945     tcg_gen_mov_i32(addr, tmp);
4946 
4947     /*
4948      * mask:
4949      *
4950      * 0b100 Floating-Point Control Register
4951      * 0b010 Floating-Point Status Register
4952      * 0b001 Floating-Point Instruction Address Register
4953      *
4954      */
4955 
4956     if (is_write && mode == 4) {
4957         for (i = 2; i >= 0; i--, mask >>= 1) {
4958             if (mask & 1) {
4959                 gen_qemu_store_fcr(s, addr, 1 << i);
4960                 if (mask != 1) {
4961                     tcg_gen_subi_i32(addr, addr, opsize_bytes(OS_LONG));
4962                 }
4963             }
4964        }
4965        tcg_gen_mov_i32(AREG(insn, 0), addr);
4966     } else {
4967         for (i = 0; i < 3; i++, mask >>= 1) {
4968             if (mask & 1) {
4969                 if (is_write) {
4970                     gen_qemu_store_fcr(s, addr, 1 << i);
4971                 } else {
4972                     gen_qemu_load_fcr(s, addr, 1 << i);
4973                 }
4974                 if (mask != 1 || mode == 3) {
4975                     tcg_gen_addi_i32(addr, addr, opsize_bytes(OS_LONG));
4976                 }
4977             }
4978         }
4979         if (mode == 3) {
4980             tcg_gen_mov_i32(AREG(insn, 0), addr);
4981         }
4982     }
4983     tcg_temp_free_i32(addr);
4984 }
4985 
4986 static void gen_op_fmovem(CPUM68KState *env, DisasContext *s,
4987                           uint32_t insn, uint32_t ext)
4988 {
4989     int opsize;
4990     TCGv addr, tmp;
4991     int mode = (ext >> 11) & 0x3;
4992     int is_load = ((ext & 0x2000) == 0);
4993 
4994     if (m68k_feature(s->env, M68K_FEATURE_FPU)) {
4995         opsize = OS_EXTENDED;
4996     } else {
4997         opsize = OS_DOUBLE;  /* FIXME */
4998     }
4999 
5000     addr = gen_lea(env, s, insn, opsize);
5001     if (IS_NULL_QREG(addr)) {
5002         gen_addr_fault(s);
5003         return;
5004     }
5005 
5006     tmp = tcg_temp_new();
5007     if (mode & 0x1) {
5008         /* Dynamic register list */
5009         tcg_gen_ext8u_i32(tmp, DREG(ext, 4));
5010     } else {
5011         /* Static register list */
5012         tcg_gen_movi_i32(tmp, ext & 0xff);
5013     }
5014 
5015     if (!is_load && (mode & 2) == 0) {
5016         /*
5017          * predecrement addressing mode
5018          * only available to store register to memory
5019          */
5020         if (opsize == OS_EXTENDED) {
5021             gen_helper_fmovemx_st_predec(tmp, cpu_env, addr, tmp);
5022         } else {
5023             gen_helper_fmovemd_st_predec(tmp, cpu_env, addr, tmp);
5024         }
5025     } else {
5026         /* postincrement addressing mode */
5027         if (opsize == OS_EXTENDED) {
5028             if (is_load) {
5029                 gen_helper_fmovemx_ld_postinc(tmp, cpu_env, addr, tmp);
5030             } else {
5031                 gen_helper_fmovemx_st_postinc(tmp, cpu_env, addr, tmp);
5032             }
5033         } else {
5034             if (is_load) {
5035                 gen_helper_fmovemd_ld_postinc(tmp, cpu_env, addr, tmp);
5036             } else {
5037                 gen_helper_fmovemd_st_postinc(tmp, cpu_env, addr, tmp);
5038             }
5039         }
5040     }
5041     if ((insn & 070) == 030 || (insn & 070) == 040) {
5042         tcg_gen_mov_i32(AREG(insn, 0), tmp);
5043     }
5044     tcg_temp_free(tmp);
5045 }
5046 
5047 /*
5048  * ??? FP exceptions are not implemented.  Most exceptions are deferred until
5049  * immediately before the next FP instruction is executed.
5050  */
5051 DISAS_INSN(fpu)
5052 {
5053     uint16_t ext;
5054     int opmode;
5055     int opsize;
5056     TCGv_ptr cpu_src, cpu_dest;
5057 
5058     ext = read_im16(env, s);
5059     opmode = ext & 0x7f;
5060     switch ((ext >> 13) & 7) {
5061     case 0:
5062         break;
5063     case 1:
5064         goto undef;
5065     case 2:
5066         if (insn == 0xf200 && (ext & 0xfc00) == 0x5c00) {
5067             /* fmovecr */
5068             TCGv rom_offset = tcg_const_i32(opmode);
5069             cpu_dest = gen_fp_ptr(REG(ext, 7));
5070             gen_helper_fconst(cpu_env, cpu_dest, rom_offset);
5071             tcg_temp_free_ptr(cpu_dest);
5072             tcg_temp_free(rom_offset);
5073             return;
5074         }
5075         break;
5076     case 3: /* fmove out */
5077         cpu_src = gen_fp_ptr(REG(ext, 7));
5078         opsize = ext_opsize(ext, 10);
5079         if (gen_ea_fp(env, s, insn, opsize, cpu_src,
5080                       EA_STORE, IS_USER(s)) == -1) {
5081             gen_addr_fault(s);
5082         }
5083         gen_helper_ftst(cpu_env, cpu_src);
5084         tcg_temp_free_ptr(cpu_src);
5085         return;
5086     case 4: /* fmove to control register.  */
5087     case 5: /* fmove from control register.  */
5088         gen_op_fmove_fcr(env, s, insn, ext);
5089         return;
5090     case 6: /* fmovem */
5091     case 7:
5092         if ((ext & 0x1000) == 0 && !m68k_feature(s->env, M68K_FEATURE_FPU)) {
5093             goto undef;
5094         }
5095         gen_op_fmovem(env, s, insn, ext);
5096         return;
5097     }
5098     if (ext & (1 << 14)) {
5099         /* Source effective address.  */
5100         opsize = ext_opsize(ext, 10);
5101         cpu_src = gen_fp_result_ptr();
5102         if (gen_ea_fp(env, s, insn, opsize, cpu_src,
5103                       EA_LOADS, IS_USER(s)) == -1) {
5104             gen_addr_fault(s);
5105             return;
5106         }
5107     } else {
5108         /* Source register.  */
5109         opsize = OS_EXTENDED;
5110         cpu_src = gen_fp_ptr(REG(ext, 10));
5111     }
5112     cpu_dest = gen_fp_ptr(REG(ext, 7));
5113     switch (opmode) {
5114     case 0: /* fmove */
5115         gen_fp_move(cpu_dest, cpu_src);
5116         break;
5117     case 0x40: /* fsmove */
5118         gen_helper_fsround(cpu_env, cpu_dest, cpu_src);
5119         break;
5120     case 0x44: /* fdmove */
5121         gen_helper_fdround(cpu_env, cpu_dest, cpu_src);
5122         break;
5123     case 1: /* fint */
5124         gen_helper_firound(cpu_env, cpu_dest, cpu_src);
5125         break;
5126     case 2: /* fsinh */
5127         gen_helper_fsinh(cpu_env, cpu_dest, cpu_src);
5128         break;
5129     case 3: /* fintrz */
5130         gen_helper_fitrunc(cpu_env, cpu_dest, cpu_src);
5131         break;
5132     case 4: /* fsqrt */
5133         gen_helper_fsqrt(cpu_env, cpu_dest, cpu_src);
5134         break;
5135     case 0x41: /* fssqrt */
5136         gen_helper_fssqrt(cpu_env, cpu_dest, cpu_src);
5137         break;
5138     case 0x45: /* fdsqrt */
5139         gen_helper_fdsqrt(cpu_env, cpu_dest, cpu_src);
5140         break;
5141     case 0x06: /* flognp1 */
5142         gen_helper_flognp1(cpu_env, cpu_dest, cpu_src);
5143         break;
5144     case 0x09: /* ftanh */
5145         gen_helper_ftanh(cpu_env, cpu_dest, cpu_src);
5146         break;
5147     case 0x0a: /* fatan */
5148         gen_helper_fatan(cpu_env, cpu_dest, cpu_src);
5149         break;
5150     case 0x0c: /* fasin */
5151         gen_helper_fasin(cpu_env, cpu_dest, cpu_src);
5152         break;
5153     case 0x0d: /* fatanh */
5154         gen_helper_fatanh(cpu_env, cpu_dest, cpu_src);
5155         break;
5156     case 0x0e: /* fsin */
5157         gen_helper_fsin(cpu_env, cpu_dest, cpu_src);
5158         break;
5159     case 0x0f: /* ftan */
5160         gen_helper_ftan(cpu_env, cpu_dest, cpu_src);
5161         break;
5162     case 0x10: /* fetox */
5163         gen_helper_fetox(cpu_env, cpu_dest, cpu_src);
5164         break;
5165     case 0x11: /* ftwotox */
5166         gen_helper_ftwotox(cpu_env, cpu_dest, cpu_src);
5167         break;
5168     case 0x12: /* ftentox */
5169         gen_helper_ftentox(cpu_env, cpu_dest, cpu_src);
5170         break;
5171     case 0x14: /* flogn */
5172         gen_helper_flogn(cpu_env, cpu_dest, cpu_src);
5173         break;
5174     case 0x15: /* flog10 */
5175         gen_helper_flog10(cpu_env, cpu_dest, cpu_src);
5176         break;
5177     case 0x16: /* flog2 */
5178         gen_helper_flog2(cpu_env, cpu_dest, cpu_src);
5179         break;
5180     case 0x18: /* fabs */
5181         gen_helper_fabs(cpu_env, cpu_dest, cpu_src);
5182         break;
5183     case 0x58: /* fsabs */
5184         gen_helper_fsabs(cpu_env, cpu_dest, cpu_src);
5185         break;
5186     case 0x5c: /* fdabs */
5187         gen_helper_fdabs(cpu_env, cpu_dest, cpu_src);
5188         break;
5189     case 0x19: /* fcosh */
5190         gen_helper_fcosh(cpu_env, cpu_dest, cpu_src);
5191         break;
5192     case 0x1a: /* fneg */
5193         gen_helper_fneg(cpu_env, cpu_dest, cpu_src);
5194         break;
5195     case 0x5a: /* fsneg */
5196         gen_helper_fsneg(cpu_env, cpu_dest, cpu_src);
5197         break;
5198     case 0x5e: /* fdneg */
5199         gen_helper_fdneg(cpu_env, cpu_dest, cpu_src);
5200         break;
5201     case 0x1c: /* facos */
5202         gen_helper_facos(cpu_env, cpu_dest, cpu_src);
5203         break;
5204     case 0x1d: /* fcos */
5205         gen_helper_fcos(cpu_env, cpu_dest, cpu_src);
5206         break;
5207     case 0x1e: /* fgetexp */
5208         gen_helper_fgetexp(cpu_env, cpu_dest, cpu_src);
5209         break;
5210     case 0x1f: /* fgetman */
5211         gen_helper_fgetman(cpu_env, cpu_dest, cpu_src);
5212         break;
5213     case 0x20: /* fdiv */
5214         gen_helper_fdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5215         break;
5216     case 0x60: /* fsdiv */
5217         gen_helper_fsdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5218         break;
5219     case 0x64: /* fddiv */
5220         gen_helper_fddiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5221         break;
5222     case 0x21: /* fmod */
5223         gen_helper_fmod(cpu_env, cpu_dest, cpu_src, cpu_dest);
5224         break;
5225     case 0x22: /* fadd */
5226         gen_helper_fadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5227         break;
5228     case 0x62: /* fsadd */
5229         gen_helper_fsadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5230         break;
5231     case 0x66: /* fdadd */
5232         gen_helper_fdadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5233         break;
5234     case 0x23: /* fmul */
5235         gen_helper_fmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5236         break;
5237     case 0x63: /* fsmul */
5238         gen_helper_fsmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5239         break;
5240     case 0x67: /* fdmul */
5241         gen_helper_fdmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5242         break;
5243     case 0x24: /* fsgldiv */
5244         gen_helper_fsgldiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5245         break;
5246     case 0x25: /* frem */
5247         gen_helper_frem(cpu_env, cpu_dest, cpu_src, cpu_dest);
5248         break;
5249     case 0x26: /* fscale */
5250         gen_helper_fscale(cpu_env, cpu_dest, cpu_src, cpu_dest);
5251         break;
5252     case 0x27: /* fsglmul */
5253         gen_helper_fsglmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5254         break;
5255     case 0x28: /* fsub */
5256         gen_helper_fsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5257         break;
5258     case 0x68: /* fssub */
5259         gen_helper_fssub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5260         break;
5261     case 0x6c: /* fdsub */
5262         gen_helper_fdsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5263         break;
5264     case 0x30: case 0x31: case 0x32:
5265     case 0x33: case 0x34: case 0x35:
5266     case 0x36: case 0x37: {
5267             TCGv_ptr cpu_dest2 = gen_fp_ptr(REG(ext, 0));
5268             gen_helper_fsincos(cpu_env, cpu_dest, cpu_dest2, cpu_src);
5269             tcg_temp_free_ptr(cpu_dest2);
5270         }
5271         break;
5272     case 0x38: /* fcmp */
5273         gen_helper_fcmp(cpu_env, cpu_src, cpu_dest);
5274         return;
5275     case 0x3a: /* ftst */
5276         gen_helper_ftst(cpu_env, cpu_src);
5277         return;
5278     default:
5279         goto undef;
5280     }
5281     tcg_temp_free_ptr(cpu_src);
5282     gen_helper_ftst(cpu_env, cpu_dest);
5283     tcg_temp_free_ptr(cpu_dest);
5284     return;
5285 undef:
5286     /* FIXME: Is this right for offset addressing modes?  */
5287     s->pc -= 2;
5288     disas_undef_fpu(env, s, insn);
5289 }
5290 
5291 static void gen_fcc_cond(DisasCompare *c, DisasContext *s, int cond)
5292 {
5293     TCGv fpsr;
5294 
5295     c->g1 = 1;
5296     c->v2 = tcg_const_i32(0);
5297     c->g2 = 0;
5298     /* TODO: Raise BSUN exception.  */
5299     fpsr = tcg_temp_new();
5300     gen_load_fcr(s, fpsr, M68K_FPSR);
5301     switch (cond) {
5302     case 0:  /* False */
5303     case 16: /* Signaling False */
5304         c->v1 = c->v2;
5305         c->tcond = TCG_COND_NEVER;
5306         break;
5307     case 1:  /* EQual Z */
5308     case 17: /* Signaling EQual Z */
5309         c->v1 = tcg_temp_new();
5310         c->g1 = 0;
5311         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5312         c->tcond = TCG_COND_NE;
5313         break;
5314     case 2:  /* Ordered Greater Than !(A || Z || N) */
5315     case 18: /* Greater Than !(A || Z || N) */
5316         c->v1 = tcg_temp_new();
5317         c->g1 = 0;
5318         tcg_gen_andi_i32(c->v1, fpsr,
5319                          FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5320         c->tcond = TCG_COND_EQ;
5321         break;
5322     case 3:  /* Ordered Greater than or Equal Z || !(A || N) */
5323     case 19: /* Greater than or Equal Z || !(A || N) */
5324         c->v1 = tcg_temp_new();
5325         c->g1 = 0;
5326         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5327         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5328         tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_Z | FPSR_CC_N);
5329         tcg_gen_or_i32(c->v1, c->v1, fpsr);
5330         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5331         c->tcond = TCG_COND_NE;
5332         break;
5333     case 4:  /* Ordered Less Than !(!N || A || Z); */
5334     case 20: /* Less Than !(!N || A || Z); */
5335         c->v1 = tcg_temp_new();
5336         c->g1 = 0;
5337         tcg_gen_xori_i32(c->v1, fpsr, FPSR_CC_N);
5338         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_N | FPSR_CC_A | FPSR_CC_Z);
5339         c->tcond = TCG_COND_EQ;
5340         break;
5341     case 5:  /* Ordered Less than or Equal Z || (N && !A) */
5342     case 21: /* Less than or Equal Z || (N && !A) */
5343         c->v1 = tcg_temp_new();
5344         c->g1 = 0;
5345         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5346         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5347         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5348         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_Z | FPSR_CC_N);
5349         c->tcond = TCG_COND_NE;
5350         break;
5351     case 6:  /* Ordered Greater or Less than !(A || Z) */
5352     case 22: /* Greater or Less than !(A || Z) */
5353         c->v1 = tcg_temp_new();
5354         c->g1 = 0;
5355         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5356         c->tcond = TCG_COND_EQ;
5357         break;
5358     case 7:  /* Ordered !A */
5359     case 23: /* Greater, Less or Equal !A */
5360         c->v1 = tcg_temp_new();
5361         c->g1 = 0;
5362         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5363         c->tcond = TCG_COND_EQ;
5364         break;
5365     case 8:  /* Unordered A */
5366     case 24: /* Not Greater, Less or Equal A */
5367         c->v1 = tcg_temp_new();
5368         c->g1 = 0;
5369         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5370         c->tcond = TCG_COND_NE;
5371         break;
5372     case 9:  /* Unordered or Equal A || Z */
5373     case 25: /* Not Greater or Less then A || Z */
5374         c->v1 = tcg_temp_new();
5375         c->g1 = 0;
5376         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5377         c->tcond = TCG_COND_NE;
5378         break;
5379     case 10: /* Unordered or Greater Than A || !(N || Z)) */
5380     case 26: /* Not Less or Equal A || !(N || Z)) */
5381         c->v1 = tcg_temp_new();
5382         c->g1 = 0;
5383         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5384         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5385         tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_A | FPSR_CC_N);
5386         tcg_gen_or_i32(c->v1, c->v1, fpsr);
5387         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5388         c->tcond = TCG_COND_NE;
5389         break;
5390     case 11: /* Unordered or Greater or Equal A || Z || !N */
5391     case 27: /* Not Less Than A || Z || !N */
5392         c->v1 = tcg_temp_new();
5393         c->g1 = 0;
5394         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5395         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5396         c->tcond = TCG_COND_NE;
5397         break;
5398     case 12: /* Unordered or Less Than A || (N && !Z) */
5399     case 28: /* Not Greater than or Equal A || (N && !Z) */
5400         c->v1 = tcg_temp_new();
5401         c->g1 = 0;
5402         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5403         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5404         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5405         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_A | FPSR_CC_N);
5406         c->tcond = TCG_COND_NE;
5407         break;
5408     case 13: /* Unordered or Less or Equal A || Z || N */
5409     case 29: /* Not Greater Than A || Z || N */
5410         c->v1 = tcg_temp_new();
5411         c->g1 = 0;
5412         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5413         c->tcond = TCG_COND_NE;
5414         break;
5415     case 14: /* Not Equal !Z */
5416     case 30: /* Signaling Not Equal !Z */
5417         c->v1 = tcg_temp_new();
5418         c->g1 = 0;
5419         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5420         c->tcond = TCG_COND_EQ;
5421         break;
5422     case 15: /* True */
5423     case 31: /* Signaling True */
5424         c->v1 = c->v2;
5425         c->tcond = TCG_COND_ALWAYS;
5426         break;
5427     }
5428     tcg_temp_free(fpsr);
5429 }
5430 
5431 static void gen_fjmpcc(DisasContext *s, int cond, TCGLabel *l1)
5432 {
5433     DisasCompare c;
5434 
5435     gen_fcc_cond(&c, s, cond);
5436     update_cc_op(s);
5437     tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
5438     free_cond(&c);
5439 }
5440 
5441 DISAS_INSN(fbcc)
5442 {
5443     uint32_t offset;
5444     uint32_t base;
5445     TCGLabel *l1;
5446 
5447     base = s->pc;
5448     offset = (int16_t)read_im16(env, s);
5449     if (insn & (1 << 6)) {
5450         offset = (offset << 16) | read_im16(env, s);
5451     }
5452 
5453     l1 = gen_new_label();
5454     update_cc_op(s);
5455     gen_fjmpcc(s, insn & 0x3f, l1);
5456     gen_jmp_tb(s, 0, s->pc);
5457     gen_set_label(l1);
5458     gen_jmp_tb(s, 1, base + offset);
5459 }
5460 
5461 DISAS_INSN(fscc)
5462 {
5463     DisasCompare c;
5464     int cond;
5465     TCGv tmp;
5466     uint16_t ext;
5467 
5468     ext = read_im16(env, s);
5469     cond = ext & 0x3f;
5470     gen_fcc_cond(&c, s, cond);
5471 
5472     tmp = tcg_temp_new();
5473     tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
5474     free_cond(&c);
5475 
5476     tcg_gen_neg_i32(tmp, tmp);
5477     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
5478     tcg_temp_free(tmp);
5479 }
5480 
5481 #if defined(CONFIG_SOFTMMU)
5482 DISAS_INSN(frestore)
5483 {
5484     TCGv addr;
5485 
5486     if (IS_USER(s)) {
5487         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
5488         return;
5489     }
5490     if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5491         SRC_EA(env, addr, OS_LONG, 0, NULL);
5492         /* FIXME: check the state frame */
5493     } else {
5494         disas_undef(env, s, insn);
5495     }
5496 }
5497 
5498 DISAS_INSN(fsave)
5499 {
5500     if (IS_USER(s)) {
5501         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
5502         return;
5503     }
5504 
5505     if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5506         /* always write IDLE */
5507         TCGv idle = tcg_const_i32(0x41000000);
5508         DEST_EA(env, insn, OS_LONG, idle, NULL);
5509         tcg_temp_free(idle);
5510     } else {
5511         disas_undef(env, s, insn);
5512     }
5513 }
5514 #endif
5515 
5516 static inline TCGv gen_mac_extract_word(DisasContext *s, TCGv val, int upper)
5517 {
5518     TCGv tmp = tcg_temp_new();
5519     if (s->env->macsr & MACSR_FI) {
5520         if (upper)
5521             tcg_gen_andi_i32(tmp, val, 0xffff0000);
5522         else
5523             tcg_gen_shli_i32(tmp, val, 16);
5524     } else if (s->env->macsr & MACSR_SU) {
5525         if (upper)
5526             tcg_gen_sari_i32(tmp, val, 16);
5527         else
5528             tcg_gen_ext16s_i32(tmp, val);
5529     } else {
5530         if (upper)
5531             tcg_gen_shri_i32(tmp, val, 16);
5532         else
5533             tcg_gen_ext16u_i32(tmp, val);
5534     }
5535     return tmp;
5536 }
5537 
5538 static void gen_mac_clear_flags(void)
5539 {
5540     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR,
5541                      ~(MACSR_V | MACSR_Z | MACSR_N | MACSR_EV));
5542 }
5543 
5544 DISAS_INSN(mac)
5545 {
5546     TCGv rx;
5547     TCGv ry;
5548     uint16_t ext;
5549     int acc;
5550     TCGv tmp;
5551     TCGv addr;
5552     TCGv loadval;
5553     int dual;
5554     TCGv saved_flags;
5555 
5556     if (!s->done_mac) {
5557         s->mactmp = tcg_temp_new_i64();
5558         s->done_mac = 1;
5559     }
5560 
5561     ext = read_im16(env, s);
5562 
5563     acc = ((insn >> 7) & 1) | ((ext >> 3) & 2);
5564     dual = ((insn & 0x30) != 0 && (ext & 3) != 0);
5565     if (dual && !m68k_feature(s->env, M68K_FEATURE_CF_EMAC_B)) {
5566         disas_undef(env, s, insn);
5567         return;
5568     }
5569     if (insn & 0x30) {
5570         /* MAC with load.  */
5571         tmp = gen_lea(env, s, insn, OS_LONG);
5572         addr = tcg_temp_new();
5573         tcg_gen_and_i32(addr, tmp, QREG_MAC_MASK);
5574         /*
5575          * Load the value now to ensure correct exception behavior.
5576          * Perform writeback after reading the MAC inputs.
5577          */
5578         loadval = gen_load(s, OS_LONG, addr, 0, IS_USER(s));
5579 
5580         acc ^= 1;
5581         rx = (ext & 0x8000) ? AREG(ext, 12) : DREG(insn, 12);
5582         ry = (ext & 8) ? AREG(ext, 0) : DREG(ext, 0);
5583     } else {
5584         loadval = addr = NULL_QREG;
5585         rx = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5586         ry = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5587     }
5588 
5589     gen_mac_clear_flags();
5590 #if 0
5591     l1 = -1;
5592     /* Disabled because conditional branches clobber temporary vars.  */
5593     if ((s->env->macsr & MACSR_OMC) != 0 && !dual) {
5594         /* Skip the multiply if we know we will ignore it.  */
5595         l1 = gen_new_label();
5596         tmp = tcg_temp_new();
5597         tcg_gen_andi_i32(tmp, QREG_MACSR, 1 << (acc + 8));
5598         gen_op_jmp_nz32(tmp, l1);
5599     }
5600 #endif
5601 
5602     if ((ext & 0x0800) == 0) {
5603         /* Word.  */
5604         rx = gen_mac_extract_word(s, rx, (ext & 0x80) != 0);
5605         ry = gen_mac_extract_word(s, ry, (ext & 0x40) != 0);
5606     }
5607     if (s->env->macsr & MACSR_FI) {
5608         gen_helper_macmulf(s->mactmp, cpu_env, rx, ry);
5609     } else {
5610         if (s->env->macsr & MACSR_SU)
5611             gen_helper_macmuls(s->mactmp, cpu_env, rx, ry);
5612         else
5613             gen_helper_macmulu(s->mactmp, cpu_env, rx, ry);
5614         switch ((ext >> 9) & 3) {
5615         case 1:
5616             tcg_gen_shli_i64(s->mactmp, s->mactmp, 1);
5617             break;
5618         case 3:
5619             tcg_gen_shri_i64(s->mactmp, s->mactmp, 1);
5620             break;
5621         }
5622     }
5623 
5624     if (dual) {
5625         /* Save the overflow flag from the multiply.  */
5626         saved_flags = tcg_temp_new();
5627         tcg_gen_mov_i32(saved_flags, QREG_MACSR);
5628     } else {
5629         saved_flags = NULL_QREG;
5630     }
5631 
5632 #if 0
5633     /* Disabled because conditional branches clobber temporary vars.  */
5634     if ((s->env->macsr & MACSR_OMC) != 0 && dual) {
5635         /* Skip the accumulate if the value is already saturated.  */
5636         l1 = gen_new_label();
5637         tmp = tcg_temp_new();
5638         gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
5639         gen_op_jmp_nz32(tmp, l1);
5640     }
5641 #endif
5642 
5643     if (insn & 0x100)
5644         tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5645     else
5646         tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5647 
5648     if (s->env->macsr & MACSR_FI)
5649         gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
5650     else if (s->env->macsr & MACSR_SU)
5651         gen_helper_macsats(cpu_env, tcg_const_i32(acc));
5652     else
5653         gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
5654 
5655 #if 0
5656     /* Disabled because conditional branches clobber temporary vars.  */
5657     if (l1 != -1)
5658         gen_set_label(l1);
5659 #endif
5660 
5661     if (dual) {
5662         /* Dual accumulate variant.  */
5663         acc = (ext >> 2) & 3;
5664         /* Restore the overflow flag from the multiplier.  */
5665         tcg_gen_mov_i32(QREG_MACSR, saved_flags);
5666 #if 0
5667         /* Disabled because conditional branches clobber temporary vars.  */
5668         if ((s->env->macsr & MACSR_OMC) != 0) {
5669             /* Skip the accumulate if the value is already saturated.  */
5670             l1 = gen_new_label();
5671             tmp = tcg_temp_new();
5672             gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
5673             gen_op_jmp_nz32(tmp, l1);
5674         }
5675 #endif
5676         if (ext & 2)
5677             tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5678         else
5679             tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5680         if (s->env->macsr & MACSR_FI)
5681             gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
5682         else if (s->env->macsr & MACSR_SU)
5683             gen_helper_macsats(cpu_env, tcg_const_i32(acc));
5684         else
5685             gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
5686 #if 0
5687         /* Disabled because conditional branches clobber temporary vars.  */
5688         if (l1 != -1)
5689             gen_set_label(l1);
5690 #endif
5691     }
5692     gen_helper_mac_set_flags(cpu_env, tcg_const_i32(acc));
5693 
5694     if (insn & 0x30) {
5695         TCGv rw;
5696         rw = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5697         tcg_gen_mov_i32(rw, loadval);
5698         /*
5699          * FIXME: Should address writeback happen with the masked or
5700          * unmasked value?
5701          */
5702         switch ((insn >> 3) & 7) {
5703         case 3: /* Post-increment.  */
5704             tcg_gen_addi_i32(AREG(insn, 0), addr, 4);
5705             break;
5706         case 4: /* Pre-decrement.  */
5707             tcg_gen_mov_i32(AREG(insn, 0), addr);
5708         }
5709         tcg_temp_free(loadval);
5710     }
5711 }
5712 
5713 DISAS_INSN(from_mac)
5714 {
5715     TCGv rx;
5716     TCGv_i64 acc;
5717     int accnum;
5718 
5719     rx = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5720     accnum = (insn >> 9) & 3;
5721     acc = MACREG(accnum);
5722     if (s->env->macsr & MACSR_FI) {
5723         gen_helper_get_macf(rx, cpu_env, acc);
5724     } else if ((s->env->macsr & MACSR_OMC) == 0) {
5725         tcg_gen_extrl_i64_i32(rx, acc);
5726     } else if (s->env->macsr & MACSR_SU) {
5727         gen_helper_get_macs(rx, acc);
5728     } else {
5729         gen_helper_get_macu(rx, acc);
5730     }
5731     if (insn & 0x40) {
5732         tcg_gen_movi_i64(acc, 0);
5733         tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5734     }
5735 }
5736 
5737 DISAS_INSN(move_mac)
5738 {
5739     /* FIXME: This can be done without a helper.  */
5740     int src;
5741     TCGv dest;
5742     src = insn & 3;
5743     dest = tcg_const_i32((insn >> 9) & 3);
5744     gen_helper_mac_move(cpu_env, dest, tcg_const_i32(src));
5745     gen_mac_clear_flags();
5746     gen_helper_mac_set_flags(cpu_env, dest);
5747 }
5748 
5749 DISAS_INSN(from_macsr)
5750 {
5751     TCGv reg;
5752 
5753     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5754     tcg_gen_mov_i32(reg, QREG_MACSR);
5755 }
5756 
5757 DISAS_INSN(from_mask)
5758 {
5759     TCGv reg;
5760     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5761     tcg_gen_mov_i32(reg, QREG_MAC_MASK);
5762 }
5763 
5764 DISAS_INSN(from_mext)
5765 {
5766     TCGv reg;
5767     TCGv acc;
5768     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5769     acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
5770     if (s->env->macsr & MACSR_FI)
5771         gen_helper_get_mac_extf(reg, cpu_env, acc);
5772     else
5773         gen_helper_get_mac_exti(reg, cpu_env, acc);
5774 }
5775 
5776 DISAS_INSN(macsr_to_ccr)
5777 {
5778     TCGv tmp = tcg_temp_new();
5779     tcg_gen_andi_i32(tmp, QREG_MACSR, 0xf);
5780     gen_helper_set_sr(cpu_env, tmp);
5781     tcg_temp_free(tmp);
5782     set_cc_op(s, CC_OP_FLAGS);
5783 }
5784 
5785 DISAS_INSN(to_mac)
5786 {
5787     TCGv_i64 acc;
5788     TCGv val;
5789     int accnum;
5790     accnum = (insn >> 9) & 3;
5791     acc = MACREG(accnum);
5792     SRC_EA(env, val, OS_LONG, 0, NULL);
5793     if (s->env->macsr & MACSR_FI) {
5794         tcg_gen_ext_i32_i64(acc, val);
5795         tcg_gen_shli_i64(acc, acc, 8);
5796     } else if (s->env->macsr & MACSR_SU) {
5797         tcg_gen_ext_i32_i64(acc, val);
5798     } else {
5799         tcg_gen_extu_i32_i64(acc, val);
5800     }
5801     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5802     gen_mac_clear_flags();
5803     gen_helper_mac_set_flags(cpu_env, tcg_const_i32(accnum));
5804 }
5805 
5806 DISAS_INSN(to_macsr)
5807 {
5808     TCGv val;
5809     SRC_EA(env, val, OS_LONG, 0, NULL);
5810     gen_helper_set_macsr(cpu_env, val);
5811     gen_exit_tb(s);
5812 }
5813 
5814 DISAS_INSN(to_mask)
5815 {
5816     TCGv val;
5817     SRC_EA(env, val, OS_LONG, 0, NULL);
5818     tcg_gen_ori_i32(QREG_MAC_MASK, val, 0xffff0000);
5819 }
5820 
5821 DISAS_INSN(to_mext)
5822 {
5823     TCGv val;
5824     TCGv acc;
5825     SRC_EA(env, val, OS_LONG, 0, NULL);
5826     acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
5827     if (s->env->macsr & MACSR_FI)
5828         gen_helper_set_mac_extf(cpu_env, val, acc);
5829     else if (s->env->macsr & MACSR_SU)
5830         gen_helper_set_mac_exts(cpu_env, val, acc);
5831     else
5832         gen_helper_set_mac_extu(cpu_env, val, acc);
5833 }
5834 
5835 static disas_proc opcode_table[65536];
5836 
5837 static void
5838 register_opcode (disas_proc proc, uint16_t opcode, uint16_t mask)
5839 {
5840   int i;
5841   int from;
5842   int to;
5843 
5844   /* Sanity check.  All set bits must be included in the mask.  */
5845   if (opcode & ~mask) {
5846       fprintf(stderr,
5847               "qemu internal error: bogus opcode definition %04x/%04x\n",
5848               opcode, mask);
5849       abort();
5850   }
5851   /*
5852    * This could probably be cleverer.  For now just optimize the case where
5853    * the top bits are known.
5854    */
5855   /* Find the first zero bit in the mask.  */
5856   i = 0x8000;
5857   while ((i & mask) != 0)
5858       i >>= 1;
5859   /* Iterate over all combinations of this and lower bits.  */
5860   if (i == 0)
5861       i = 1;
5862   else
5863       i <<= 1;
5864   from = opcode & ~(i - 1);
5865   to = from + i;
5866   for (i = from; i < to; i++) {
5867       if ((i & mask) == opcode)
5868           opcode_table[i] = proc;
5869   }
5870 }
5871 
5872 /*
5873  * Register m68k opcode handlers.  Order is important.
5874  * Later insn override earlier ones.
5875  */
5876 void register_m68k_insns (CPUM68KState *env)
5877 {
5878     /*
5879      * Build the opcode table only once to avoid
5880      * multithreading issues.
5881      */
5882     if (opcode_table[0] != NULL) {
5883         return;
5884     }
5885 
5886     /*
5887      * use BASE() for instruction available
5888      * for CF_ISA_A and M68000.
5889      */
5890 #define BASE(name, opcode, mask) \
5891     register_opcode(disas_##name, 0x##opcode, 0x##mask)
5892 #define INSN(name, opcode, mask, feature) do { \
5893     if (m68k_feature(env, M68K_FEATURE_##feature)) \
5894         BASE(name, opcode, mask); \
5895     } while(0)
5896     BASE(undef,     0000, 0000);
5897     INSN(arith_im,  0080, fff8, CF_ISA_A);
5898     INSN(arith_im,  0000, ff00, M68000);
5899     INSN(chk2,      00c0, f9c0, CHK2);
5900     INSN(bitrev,    00c0, fff8, CF_ISA_APLUSC);
5901     BASE(bitop_reg, 0100, f1c0);
5902     BASE(bitop_reg, 0140, f1c0);
5903     BASE(bitop_reg, 0180, f1c0);
5904     BASE(bitop_reg, 01c0, f1c0);
5905     INSN(movep,     0108, f138, MOVEP);
5906     INSN(arith_im,  0280, fff8, CF_ISA_A);
5907     INSN(arith_im,  0200, ff00, M68000);
5908     INSN(undef,     02c0, ffc0, M68000);
5909     INSN(byterev,   02c0, fff8, CF_ISA_APLUSC);
5910     INSN(arith_im,  0480, fff8, CF_ISA_A);
5911     INSN(arith_im,  0400, ff00, M68000);
5912     INSN(undef,     04c0, ffc0, M68000);
5913     INSN(arith_im,  0600, ff00, M68000);
5914     INSN(undef,     06c0, ffc0, M68000);
5915     INSN(ff1,       04c0, fff8, CF_ISA_APLUSC);
5916     INSN(arith_im,  0680, fff8, CF_ISA_A);
5917     INSN(arith_im,  0c00, ff38, CF_ISA_A);
5918     INSN(arith_im,  0c00, ff00, M68000);
5919     BASE(bitop_im,  0800, ffc0);
5920     BASE(bitop_im,  0840, ffc0);
5921     BASE(bitop_im,  0880, ffc0);
5922     BASE(bitop_im,  08c0, ffc0);
5923     INSN(arith_im,  0a80, fff8, CF_ISA_A);
5924     INSN(arith_im,  0a00, ff00, M68000);
5925 #if defined(CONFIG_SOFTMMU)
5926     INSN(moves,     0e00, ff00, M68000);
5927 #endif
5928     INSN(cas,       0ac0, ffc0, CAS);
5929     INSN(cas,       0cc0, ffc0, CAS);
5930     INSN(cas,       0ec0, ffc0, CAS);
5931     INSN(cas2w,     0cfc, ffff, CAS);
5932     INSN(cas2l,     0efc, ffff, CAS);
5933     BASE(move,      1000, f000);
5934     BASE(move,      2000, f000);
5935     BASE(move,      3000, f000);
5936     INSN(chk,       4000, f040, M68000);
5937     INSN(strldsr,   40e7, ffff, CF_ISA_APLUSC);
5938     INSN(negx,      4080, fff8, CF_ISA_A);
5939     INSN(negx,      4000, ff00, M68000);
5940     INSN(undef,     40c0, ffc0, M68000);
5941     INSN(move_from_sr, 40c0, fff8, CF_ISA_A);
5942     INSN(move_from_sr, 40c0, ffc0, M68000);
5943     BASE(lea,       41c0, f1c0);
5944     BASE(clr,       4200, ff00);
5945     BASE(undef,     42c0, ffc0);
5946     INSN(move_from_ccr, 42c0, fff8, CF_ISA_A);
5947     INSN(move_from_ccr, 42c0, ffc0, M68000);
5948     INSN(neg,       4480, fff8, CF_ISA_A);
5949     INSN(neg,       4400, ff00, M68000);
5950     INSN(undef,     44c0, ffc0, M68000);
5951     BASE(move_to_ccr, 44c0, ffc0);
5952     INSN(not,       4680, fff8, CF_ISA_A);
5953     INSN(not,       4600, ff00, M68000);
5954 #if defined(CONFIG_SOFTMMU)
5955     BASE(move_to_sr, 46c0, ffc0);
5956 #endif
5957     INSN(nbcd,      4800, ffc0, M68000);
5958     INSN(linkl,     4808, fff8, M68000);
5959     BASE(pea,       4840, ffc0);
5960     BASE(swap,      4840, fff8);
5961     INSN(bkpt,      4848, fff8, BKPT);
5962     INSN(movem,     48d0, fbf8, CF_ISA_A);
5963     INSN(movem,     48e8, fbf8, CF_ISA_A);
5964     INSN(movem,     4880, fb80, M68000);
5965     BASE(ext,       4880, fff8);
5966     BASE(ext,       48c0, fff8);
5967     BASE(ext,       49c0, fff8);
5968     BASE(tst,       4a00, ff00);
5969     INSN(tas,       4ac0, ffc0, CF_ISA_B);
5970     INSN(tas,       4ac0, ffc0, M68000);
5971 #if defined(CONFIG_SOFTMMU)
5972     INSN(halt,      4ac8, ffff, CF_ISA_A);
5973 #endif
5974     INSN(pulse,     4acc, ffff, CF_ISA_A);
5975     BASE(illegal,   4afc, ffff);
5976     INSN(mull,      4c00, ffc0, CF_ISA_A);
5977     INSN(mull,      4c00, ffc0, LONG_MULDIV);
5978     INSN(divl,      4c40, ffc0, CF_ISA_A);
5979     INSN(divl,      4c40, ffc0, LONG_MULDIV);
5980     INSN(sats,      4c80, fff8, CF_ISA_B);
5981     BASE(trap,      4e40, fff0);
5982     BASE(link,      4e50, fff8);
5983     BASE(unlk,      4e58, fff8);
5984 #if defined(CONFIG_SOFTMMU)
5985     INSN(move_to_usp, 4e60, fff8, USP);
5986     INSN(move_from_usp, 4e68, fff8, USP);
5987     INSN(reset,     4e70, ffff, M68000);
5988     BASE(stop,      4e72, ffff);
5989     BASE(rte,       4e73, ffff);
5990     INSN(cf_movec,  4e7b, ffff, CF_ISA_A);
5991     INSN(m68k_movec, 4e7a, fffe, M68000);
5992 #endif
5993     BASE(nop,       4e71, ffff);
5994     INSN(rtd,       4e74, ffff, RTD);
5995     BASE(rts,       4e75, ffff);
5996     BASE(jump,      4e80, ffc0);
5997     BASE(jump,      4ec0, ffc0);
5998     INSN(addsubq,   5000, f080, M68000);
5999     BASE(addsubq,   5080, f0c0);
6000     INSN(scc,       50c0, f0f8, CF_ISA_A); /* Scc.B Dx   */
6001     INSN(scc,       50c0, f0c0, M68000);   /* Scc.B <EA> */
6002     INSN(dbcc,      50c8, f0f8, M68000);
6003     INSN(tpf,       51f8, fff8, CF_ISA_A);
6004 
6005     /* Branch instructions.  */
6006     BASE(branch,    6000, f000);
6007     /* Disable long branch instructions, then add back the ones we want.  */
6008     BASE(undef,     60ff, f0ff); /* All long branches.  */
6009     INSN(branch,    60ff, f0ff, CF_ISA_B);
6010     INSN(undef,     60ff, ffff, CF_ISA_B); /* bra.l */
6011     INSN(branch,    60ff, ffff, BRAL);
6012     INSN(branch,    60ff, f0ff, BCCL);
6013 
6014     BASE(moveq,     7000, f100);
6015     INSN(mvzs,      7100, f100, CF_ISA_B);
6016     BASE(or,        8000, f000);
6017     BASE(divw,      80c0, f0c0);
6018     INSN(sbcd_reg,  8100, f1f8, M68000);
6019     INSN(sbcd_mem,  8108, f1f8, M68000);
6020     BASE(addsub,    9000, f000);
6021     INSN(undef,     90c0, f0c0, CF_ISA_A);
6022     INSN(subx_reg,  9180, f1f8, CF_ISA_A);
6023     INSN(subx_reg,  9100, f138, M68000);
6024     INSN(subx_mem,  9108, f138, M68000);
6025     INSN(suba,      91c0, f1c0, CF_ISA_A);
6026     INSN(suba,      90c0, f0c0, M68000);
6027 
6028     BASE(undef_mac, a000, f000);
6029     INSN(mac,       a000, f100, CF_EMAC);
6030     INSN(from_mac,  a180, f9b0, CF_EMAC);
6031     INSN(move_mac,  a110, f9fc, CF_EMAC);
6032     INSN(from_macsr,a980, f9f0, CF_EMAC);
6033     INSN(from_mask, ad80, fff0, CF_EMAC);
6034     INSN(from_mext, ab80, fbf0, CF_EMAC);
6035     INSN(macsr_to_ccr, a9c0, ffff, CF_EMAC);
6036     INSN(to_mac,    a100, f9c0, CF_EMAC);
6037     INSN(to_macsr,  a900, ffc0, CF_EMAC);
6038     INSN(to_mext,   ab00, fbc0, CF_EMAC);
6039     INSN(to_mask,   ad00, ffc0, CF_EMAC);
6040 
6041     INSN(mov3q,     a140, f1c0, CF_ISA_B);
6042     INSN(cmp,       b000, f1c0, CF_ISA_B); /* cmp.b */
6043     INSN(cmp,       b040, f1c0, CF_ISA_B); /* cmp.w */
6044     INSN(cmpa,      b0c0, f1c0, CF_ISA_B); /* cmpa.w */
6045     INSN(cmp,       b080, f1c0, CF_ISA_A);
6046     INSN(cmpa,      b1c0, f1c0, CF_ISA_A);
6047     INSN(cmp,       b000, f100, M68000);
6048     INSN(eor,       b100, f100, M68000);
6049     INSN(cmpm,      b108, f138, M68000);
6050     INSN(cmpa,      b0c0, f0c0, M68000);
6051     INSN(eor,       b180, f1c0, CF_ISA_A);
6052     BASE(and,       c000, f000);
6053     INSN(exg_dd,    c140, f1f8, M68000);
6054     INSN(exg_aa,    c148, f1f8, M68000);
6055     INSN(exg_da,    c188, f1f8, M68000);
6056     BASE(mulw,      c0c0, f0c0);
6057     INSN(abcd_reg,  c100, f1f8, M68000);
6058     INSN(abcd_mem,  c108, f1f8, M68000);
6059     BASE(addsub,    d000, f000);
6060     INSN(undef,     d0c0, f0c0, CF_ISA_A);
6061     INSN(addx_reg,      d180, f1f8, CF_ISA_A);
6062     INSN(addx_reg,  d100, f138, M68000);
6063     INSN(addx_mem,  d108, f138, M68000);
6064     INSN(adda,      d1c0, f1c0, CF_ISA_A);
6065     INSN(adda,      d0c0, f0c0, M68000);
6066     INSN(shift_im,  e080, f0f0, CF_ISA_A);
6067     INSN(shift_reg, e0a0, f0f0, CF_ISA_A);
6068     INSN(shift8_im, e000, f0f0, M68000);
6069     INSN(shift16_im, e040, f0f0, M68000);
6070     INSN(shift_im,  e080, f0f0, M68000);
6071     INSN(shift8_reg, e020, f0f0, M68000);
6072     INSN(shift16_reg, e060, f0f0, M68000);
6073     INSN(shift_reg, e0a0, f0f0, M68000);
6074     INSN(shift_mem, e0c0, fcc0, M68000);
6075     INSN(rotate_im, e090, f0f0, M68000);
6076     INSN(rotate8_im, e010, f0f0, M68000);
6077     INSN(rotate16_im, e050, f0f0, M68000);
6078     INSN(rotate_reg, e0b0, f0f0, M68000);
6079     INSN(rotate8_reg, e030, f0f0, M68000);
6080     INSN(rotate16_reg, e070, f0f0, M68000);
6081     INSN(rotate_mem, e4c0, fcc0, M68000);
6082     INSN(bfext_mem, e9c0, fdc0, BITFIELD);  /* bfextu & bfexts */
6083     INSN(bfext_reg, e9c0, fdf8, BITFIELD);
6084     INSN(bfins_mem, efc0, ffc0, BITFIELD);
6085     INSN(bfins_reg, efc0, fff8, BITFIELD);
6086     INSN(bfop_mem, eac0, ffc0, BITFIELD);   /* bfchg */
6087     INSN(bfop_reg, eac0, fff8, BITFIELD);   /* bfchg */
6088     INSN(bfop_mem, ecc0, ffc0, BITFIELD);   /* bfclr */
6089     INSN(bfop_reg, ecc0, fff8, BITFIELD);   /* bfclr */
6090     INSN(bfop_mem, edc0, ffc0, BITFIELD);   /* bfffo */
6091     INSN(bfop_reg, edc0, fff8, BITFIELD);   /* bfffo */
6092     INSN(bfop_mem, eec0, ffc0, BITFIELD);   /* bfset */
6093     INSN(bfop_reg, eec0, fff8, BITFIELD);   /* bfset */
6094     INSN(bfop_mem, e8c0, ffc0, BITFIELD);   /* bftst */
6095     INSN(bfop_reg, e8c0, fff8, BITFIELD);   /* bftst */
6096     BASE(undef_fpu, f000, f000);
6097     INSN(fpu,       f200, ffc0, CF_FPU);
6098     INSN(fbcc,      f280, ffc0, CF_FPU);
6099     INSN(fpu,       f200, ffc0, FPU);
6100     INSN(fscc,      f240, ffc0, FPU);
6101     INSN(fbcc,      f280, ff80, FPU);
6102 #if defined(CONFIG_SOFTMMU)
6103     INSN(frestore,  f340, ffc0, CF_FPU);
6104     INSN(fsave,     f300, ffc0, CF_FPU);
6105     INSN(frestore,  f340, ffc0, FPU);
6106     INSN(fsave,     f300, ffc0, FPU);
6107     INSN(intouch,   f340, ffc0, CF_ISA_A);
6108     INSN(cpushl,    f428, ff38, CF_ISA_A);
6109     INSN(cpush,     f420, ff20, M68040);
6110     INSN(cinv,      f400, ff20, M68040);
6111     INSN(pflush,    f500, ffe0, M68040);
6112     INSN(ptest,     f548, ffd8, M68040);
6113     INSN(wddata,    fb00, ff00, CF_ISA_A);
6114     INSN(wdebug,    fbc0, ffc0, CF_ISA_A);
6115 #endif
6116     INSN(move16_mem, f600, ffe0, M68040);
6117     INSN(move16_reg, f620, fff8, M68040);
6118 #undef INSN
6119 }
6120 
6121 static void m68k_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cpu)
6122 {
6123     DisasContext *dc = container_of(dcbase, DisasContext, base);
6124     CPUM68KState *env = cpu->env_ptr;
6125 
6126     dc->env = env;
6127     dc->pc = dc->base.pc_first;
6128     dc->cc_op = CC_OP_DYNAMIC;
6129     dc->cc_op_synced = 1;
6130     dc->done_mac = 0;
6131     dc->writeback_mask = 0;
6132     init_release_array(dc);
6133 }
6134 
6135 static void m68k_tr_tb_start(DisasContextBase *dcbase, CPUState *cpu)
6136 {
6137 }
6138 
6139 static void m68k_tr_insn_start(DisasContextBase *dcbase, CPUState *cpu)
6140 {
6141     DisasContext *dc = container_of(dcbase, DisasContext, base);
6142     tcg_gen_insn_start(dc->base.pc_next, dc->cc_op);
6143 }
6144 
6145 static bool m68k_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cpu,
6146                                      const CPUBreakpoint *bp)
6147 {
6148     DisasContext *dc = container_of(dcbase, DisasContext, base);
6149 
6150     gen_exception(dc, dc->base.pc_next, EXCP_DEBUG);
6151     /*
6152      * The address covered by the breakpoint must be included in
6153      * [tb->pc, tb->pc + tb->size) in order to for it to be
6154      * properly cleared -- thus we increment the PC here so that
6155      * the logic setting tb->size below does the right thing.
6156      */
6157     dc->base.pc_next += 2;
6158 
6159     return true;
6160 }
6161 
6162 static void m68k_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
6163 {
6164     DisasContext *dc = container_of(dcbase, DisasContext, base);
6165     CPUM68KState *env = cpu->env_ptr;
6166     uint16_t insn = read_im16(env, dc);
6167 
6168     opcode_table[insn](env, dc, insn);
6169     do_writebacks(dc);
6170     do_release(dc);
6171 
6172     dc->base.pc_next = dc->pc;
6173 
6174     if (dc->base.is_jmp == DISAS_NEXT) {
6175         /*
6176          * Stop translation when the next insn might touch a new page.
6177          * This ensures that prefetch aborts at the right place.
6178          *
6179          * We cannot determine the size of the next insn without
6180          * completely decoding it.  However, the maximum insn size
6181          * is 32 bytes, so end if we do not have that much remaining.
6182          * This may produce several small TBs at the end of each page,
6183          * but they will all be linked with goto_tb.
6184          *
6185          * ??? ColdFire maximum is 4 bytes; MC68000's maximum is also
6186          * smaller than MC68020's.
6187          */
6188         target_ulong start_page_offset
6189             = dc->pc - (dc->base.pc_first & TARGET_PAGE_MASK);
6190 
6191         if (start_page_offset >= TARGET_PAGE_SIZE - 32) {
6192             dc->base.is_jmp = DISAS_TOO_MANY;
6193         }
6194     }
6195 }
6196 
6197 static void m68k_tr_tb_stop(DisasContextBase *dcbase, CPUState *cpu)
6198 {
6199     DisasContext *dc = container_of(dcbase, DisasContext, base);
6200 
6201     if (dc->base.is_jmp == DISAS_NORETURN) {
6202         return;
6203     }
6204     if (dc->base.singlestep_enabled) {
6205         gen_helper_raise_exception(cpu_env, tcg_const_i32(EXCP_DEBUG));
6206         return;
6207     }
6208 
6209     switch (dc->base.is_jmp) {
6210     case DISAS_TOO_MANY:
6211         update_cc_op(dc);
6212         gen_jmp_tb(dc, 0, dc->pc);
6213         break;
6214     case DISAS_JUMP:
6215         /* We updated CC_OP and PC in gen_jmp/gen_jmp_im.  */
6216         tcg_gen_lookup_and_goto_ptr();
6217         break;
6218     case DISAS_EXIT:
6219         /*
6220          * We updated CC_OP and PC in gen_exit_tb, but also modified
6221          * other state that may require returning to the main loop.
6222          */
6223         tcg_gen_exit_tb(NULL, 0);
6224         break;
6225     default:
6226         g_assert_not_reached();
6227     }
6228 }
6229 
6230 static void m68k_tr_disas_log(const DisasContextBase *dcbase, CPUState *cpu)
6231 {
6232     qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first));
6233     log_target_disas(cpu, dcbase->pc_first, dcbase->tb->size);
6234 }
6235 
6236 static const TranslatorOps m68k_tr_ops = {
6237     .init_disas_context = m68k_tr_init_disas_context,
6238     .tb_start           = m68k_tr_tb_start,
6239     .insn_start         = m68k_tr_insn_start,
6240     .breakpoint_check   = m68k_tr_breakpoint_check,
6241     .translate_insn     = m68k_tr_translate_insn,
6242     .tb_stop            = m68k_tr_tb_stop,
6243     .disas_log          = m68k_tr_disas_log,
6244 };
6245 
6246 void gen_intermediate_code(CPUState *cpu, TranslationBlock *tb, int max_insns)
6247 {
6248     DisasContext dc;
6249     translator_loop(&m68k_tr_ops, &dc.base, cpu, tb, max_insns);
6250 }
6251 
6252 static double floatx80_to_double(CPUM68KState *env, uint16_t high, uint64_t low)
6253 {
6254     floatx80 a = { .high = high, .low = low };
6255     union {
6256         float64 f64;
6257         double d;
6258     } u;
6259 
6260     u.f64 = floatx80_to_float64(a, &env->fp_status);
6261     return u.d;
6262 }
6263 
6264 void m68k_cpu_dump_state(CPUState *cs, FILE *f, int flags)
6265 {
6266     M68kCPU *cpu = M68K_CPU(cs);
6267     CPUM68KState *env = &cpu->env;
6268     int i;
6269     uint16_t sr;
6270     for (i = 0; i < 8; i++) {
6271         qemu_fprintf(f, "D%d = %08x   A%d = %08x   "
6272                      "F%d = %04x %016"PRIx64"  (%12g)\n",
6273                      i, env->dregs[i], i, env->aregs[i],
6274                      i, env->fregs[i].l.upper, env->fregs[i].l.lower,
6275                      floatx80_to_double(env, env->fregs[i].l.upper,
6276                                         env->fregs[i].l.lower));
6277     }
6278     qemu_fprintf(f, "PC = %08x   ", env->pc);
6279     sr = env->sr | cpu_m68k_get_ccr(env);
6280     qemu_fprintf(f, "SR = %04x T:%x I:%x %c%c %c%c%c%c%c\n",
6281                  sr, (sr & SR_T) >> SR_T_SHIFT, (sr & SR_I) >> SR_I_SHIFT,
6282                  (sr & SR_S) ? 'S' : 'U', (sr & SR_M) ? '%' : 'I',
6283                  (sr & CCF_X) ? 'X' : '-', (sr & CCF_N) ? 'N' : '-',
6284                  (sr & CCF_Z) ? 'Z' : '-', (sr & CCF_V) ? 'V' : '-',
6285                  (sr & CCF_C) ? 'C' : '-');
6286     qemu_fprintf(f, "FPSR = %08x %c%c%c%c ", env->fpsr,
6287                  (env->fpsr & FPSR_CC_A) ? 'A' : '-',
6288                  (env->fpsr & FPSR_CC_I) ? 'I' : '-',
6289                  (env->fpsr & FPSR_CC_Z) ? 'Z' : '-',
6290                  (env->fpsr & FPSR_CC_N) ? 'N' : '-');
6291     qemu_fprintf(f, "\n                                "
6292                  "FPCR =     %04x ", env->fpcr);
6293     switch (env->fpcr & FPCR_PREC_MASK) {
6294     case FPCR_PREC_X:
6295         qemu_fprintf(f, "X ");
6296         break;
6297     case FPCR_PREC_S:
6298         qemu_fprintf(f, "S ");
6299         break;
6300     case FPCR_PREC_D:
6301         qemu_fprintf(f, "D ");
6302         break;
6303     }
6304     switch (env->fpcr & FPCR_RND_MASK) {
6305     case FPCR_RND_N:
6306         qemu_fprintf(f, "RN ");
6307         break;
6308     case FPCR_RND_Z:
6309         qemu_fprintf(f, "RZ ");
6310         break;
6311     case FPCR_RND_M:
6312         qemu_fprintf(f, "RM ");
6313         break;
6314     case FPCR_RND_P:
6315         qemu_fprintf(f, "RP ");
6316         break;
6317     }
6318     qemu_fprintf(f, "\n");
6319 #ifdef CONFIG_SOFTMMU
6320     qemu_fprintf(f, "%sA7(MSP) = %08x %sA7(USP) = %08x %sA7(ISP) = %08x\n",
6321                  env->current_sp == M68K_SSP ? "->" : "  ", env->sp[M68K_SSP],
6322                  env->current_sp == M68K_USP ? "->" : "  ", env->sp[M68K_USP],
6323                  env->current_sp == M68K_ISP ? "->" : "  ", env->sp[M68K_ISP]);
6324     qemu_fprintf(f, "VBR = 0x%08x\n", env->vbr);
6325     qemu_fprintf(f, "SFC = %x DFC %x\n", env->sfc, env->dfc);
6326     qemu_fprintf(f, "SSW %08x TCR %08x URP %08x SRP %08x\n",
6327                  env->mmu.ssw, env->mmu.tcr, env->mmu.urp, env->mmu.srp);
6328     qemu_fprintf(f, "DTTR0/1: %08x/%08x ITTR0/1: %08x/%08x\n",
6329                  env->mmu.ttr[M68K_DTTR0], env->mmu.ttr[M68K_DTTR1],
6330                  env->mmu.ttr[M68K_ITTR0], env->mmu.ttr[M68K_ITTR1]);
6331     qemu_fprintf(f, "MMUSR %08x, fault at %08x\n",
6332                  env->mmu.mmusr, env->mmu.ar);
6333 #endif
6334 }
6335 
6336 void restore_state_to_opc(CPUM68KState *env, TranslationBlock *tb,
6337                           target_ulong *data)
6338 {
6339     int cc_op = data[1];
6340     env->pc = data[0];
6341     if (cc_op != CC_OP_DYNAMIC) {
6342         env->cc_op = cc_op;
6343     }
6344 }
6345