xref: /openbmc/qemu/target/m68k/translate.c (revision 98670d47)
1 /*
2  *  m68k translation
3  *
4  *  Copyright (c) 2005-2007 CodeSourcery
5  *  Written by Paul Brook
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "disas/disas.h"
24 #include "exec/exec-all.h"
25 #include "tcg-op.h"
26 #include "qemu/log.h"
27 #include "exec/cpu_ldst.h"
28 #include "exec/translator.h"
29 
30 #include "exec/helper-proto.h"
31 #include "exec/helper-gen.h"
32 
33 #include "trace-tcg.h"
34 #include "exec/log.h"
35 
36 //#define DEBUG_DISPATCH 1
37 
38 #define DEFO32(name, offset) static TCGv QREG_##name;
39 #define DEFO64(name, offset) static TCGv_i64 QREG_##name;
40 #include "qregs.def"
41 #undef DEFO32
42 #undef DEFO64
43 
44 static TCGv_i32 cpu_halted;
45 static TCGv_i32 cpu_exception_index;
46 
47 static char cpu_reg_names[2 * 8 * 3 + 5 * 4];
48 static TCGv cpu_dregs[8];
49 static TCGv cpu_aregs[8];
50 static TCGv_i64 cpu_macc[4];
51 
52 #define REG(insn, pos)  (((insn) >> (pos)) & 7)
53 #define DREG(insn, pos) cpu_dregs[REG(insn, pos)]
54 #define AREG(insn, pos) get_areg(s, REG(insn, pos))
55 #define MACREG(acc)     cpu_macc[acc]
56 #define QREG_SP         get_areg(s, 7)
57 
58 static TCGv NULL_QREG;
59 #define IS_NULL_QREG(t) (t == NULL_QREG)
60 /* Used to distinguish stores from bad addressing modes.  */
61 static TCGv store_dummy;
62 
63 #include "exec/gen-icount.h"
64 
65 void m68k_tcg_init(void)
66 {
67     char *p;
68     int i;
69 
70 #define DEFO32(name, offset) \
71     QREG_##name = tcg_global_mem_new_i32(cpu_env, \
72         offsetof(CPUM68KState, offset), #name);
73 #define DEFO64(name, offset) \
74     QREG_##name = tcg_global_mem_new_i64(cpu_env, \
75         offsetof(CPUM68KState, offset), #name);
76 #include "qregs.def"
77 #undef DEFO32
78 #undef DEFO64
79 
80     cpu_halted = tcg_global_mem_new_i32(cpu_env,
81                                         -offsetof(M68kCPU, env) +
82                                         offsetof(CPUState, halted), "HALTED");
83     cpu_exception_index = tcg_global_mem_new_i32(cpu_env,
84                                                  -offsetof(M68kCPU, env) +
85                                                  offsetof(CPUState, exception_index),
86                                                  "EXCEPTION");
87 
88     p = cpu_reg_names;
89     for (i = 0; i < 8; i++) {
90         sprintf(p, "D%d", i);
91         cpu_dregs[i] = tcg_global_mem_new(cpu_env,
92                                           offsetof(CPUM68KState, dregs[i]), p);
93         p += 3;
94         sprintf(p, "A%d", i);
95         cpu_aregs[i] = tcg_global_mem_new(cpu_env,
96                                           offsetof(CPUM68KState, aregs[i]), p);
97         p += 3;
98     }
99     for (i = 0; i < 4; i++) {
100         sprintf(p, "ACC%d", i);
101         cpu_macc[i] = tcg_global_mem_new_i64(cpu_env,
102                                          offsetof(CPUM68KState, macc[i]), p);
103         p += 5;
104     }
105 
106     NULL_QREG = tcg_global_mem_new(cpu_env, -4, "NULL");
107     store_dummy = tcg_global_mem_new(cpu_env, -8, "NULL");
108 }
109 
110 /* internal defines */
111 typedef struct DisasContext {
112     CPUM68KState *env;
113     target_ulong insn_pc; /* Start of the current instruction.  */
114     target_ulong pc;
115     int is_jmp;
116     CCOp cc_op; /* Current CC operation */
117     int cc_op_synced;
118     int user;
119     struct TranslationBlock *tb;
120     int singlestep_enabled;
121     TCGv_i64 mactmp;
122     int done_mac;
123     int writeback_mask;
124     TCGv writeback[8];
125 } DisasContext;
126 
127 static TCGv get_areg(DisasContext *s, unsigned regno)
128 {
129     if (s->writeback_mask & (1 << regno)) {
130         return s->writeback[regno];
131     } else {
132         return cpu_aregs[regno];
133     }
134 }
135 
136 static void delay_set_areg(DisasContext *s, unsigned regno,
137                            TCGv val, bool give_temp)
138 {
139     if (s->writeback_mask & (1 << regno)) {
140         if (give_temp) {
141             tcg_temp_free(s->writeback[regno]);
142             s->writeback[regno] = val;
143         } else {
144             tcg_gen_mov_i32(s->writeback[regno], val);
145         }
146     } else {
147         s->writeback_mask |= 1 << regno;
148         if (give_temp) {
149             s->writeback[regno] = val;
150         } else {
151             TCGv tmp = tcg_temp_new();
152             s->writeback[regno] = tmp;
153             tcg_gen_mov_i32(tmp, val);
154         }
155     }
156 }
157 
158 static void do_writebacks(DisasContext *s)
159 {
160     unsigned mask = s->writeback_mask;
161     if (mask) {
162         s->writeback_mask = 0;
163         do {
164             unsigned regno = ctz32(mask);
165             tcg_gen_mov_i32(cpu_aregs[regno], s->writeback[regno]);
166             tcg_temp_free(s->writeback[regno]);
167             mask &= mask - 1;
168         } while (mask);
169     }
170 }
171 
172 /* is_jmp field values */
173 #define DISAS_JUMP      DISAS_TARGET_0 /* only pc was modified dynamically */
174 #define DISAS_UPDATE    DISAS_TARGET_1 /* cpu state was modified dynamically */
175 #define DISAS_TB_JUMP   DISAS_TARGET_2 /* only pc was modified statically */
176 #define DISAS_JUMP_NEXT DISAS_TARGET_3
177 
178 #if defined(CONFIG_USER_ONLY)
179 #define IS_USER(s) 1
180 #else
181 #define IS_USER(s) s->user
182 #endif
183 
184 typedef void (*disas_proc)(CPUM68KState *env, DisasContext *s, uint16_t insn);
185 
186 #ifdef DEBUG_DISPATCH
187 #define DISAS_INSN(name)                                                \
188     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
189                                   uint16_t insn);                       \
190     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
191                              uint16_t insn)                             \
192     {                                                                   \
193         qemu_log("Dispatch " #name "\n");                               \
194         real_disas_##name(env, s, insn);                                \
195     }                                                                   \
196     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
197                                   uint16_t insn)
198 #else
199 #define DISAS_INSN(name)                                                \
200     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
201                              uint16_t insn)
202 #endif
203 
204 static const uint8_t cc_op_live[CC_OP_NB] = {
205     [CC_OP_DYNAMIC] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
206     [CC_OP_FLAGS] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
207     [CC_OP_ADDB ... CC_OP_ADDL] = CCF_X | CCF_N | CCF_V,
208     [CC_OP_SUBB ... CC_OP_SUBL] = CCF_X | CCF_N | CCF_V,
209     [CC_OP_CMPB ... CC_OP_CMPL] = CCF_X | CCF_N | CCF_V,
210     [CC_OP_LOGIC] = CCF_X | CCF_N
211 };
212 
213 static void set_cc_op(DisasContext *s, CCOp op)
214 {
215     CCOp old_op = s->cc_op;
216     int dead;
217 
218     if (old_op == op) {
219         return;
220     }
221     s->cc_op = op;
222     s->cc_op_synced = 0;
223 
224     /* Discard CC computation that will no longer be used.
225        Note that X and N are never dead.  */
226     dead = cc_op_live[old_op] & ~cc_op_live[op];
227     if (dead & CCF_C) {
228         tcg_gen_discard_i32(QREG_CC_C);
229     }
230     if (dead & CCF_Z) {
231         tcg_gen_discard_i32(QREG_CC_Z);
232     }
233     if (dead & CCF_V) {
234         tcg_gen_discard_i32(QREG_CC_V);
235     }
236 }
237 
238 /* Update the CPU env CC_OP state.  */
239 static void update_cc_op(DisasContext *s)
240 {
241     if (!s->cc_op_synced) {
242         s->cc_op_synced = 1;
243         tcg_gen_movi_i32(QREG_CC_OP, s->cc_op);
244     }
245 }
246 
247 /* Generate a jump to an immediate address.  */
248 static void gen_jmp_im(DisasContext *s, uint32_t dest)
249 {
250     update_cc_op(s);
251     tcg_gen_movi_i32(QREG_PC, dest);
252     s->is_jmp = DISAS_JUMP;
253 }
254 
255 /* Generate a jump to the address in qreg DEST.  */
256 static void gen_jmp(DisasContext *s, TCGv dest)
257 {
258     update_cc_op(s);
259     tcg_gen_mov_i32(QREG_PC, dest);
260     s->is_jmp = DISAS_JUMP;
261 }
262 
263 static void gen_raise_exception(int nr)
264 {
265     TCGv_i32 tmp = tcg_const_i32(nr);
266 
267     gen_helper_raise_exception(cpu_env, tmp);
268     tcg_temp_free_i32(tmp);
269 }
270 
271 static void gen_exception(DisasContext *s, uint32_t where, int nr)
272 {
273     gen_jmp_im(s, where);
274     gen_raise_exception(nr);
275 }
276 
277 static inline void gen_addr_fault(DisasContext *s)
278 {
279     gen_exception(s, s->insn_pc, EXCP_ADDRESS);
280 }
281 
282 /* Generate a load from the specified address.  Narrow values are
283    sign extended to full register width.  */
284 static inline TCGv gen_load(DisasContext * s, int opsize, TCGv addr, int sign)
285 {
286     TCGv tmp;
287     int index = IS_USER(s);
288     tmp = tcg_temp_new_i32();
289     switch(opsize) {
290     case OS_BYTE:
291         if (sign)
292             tcg_gen_qemu_ld8s(tmp, addr, index);
293         else
294             tcg_gen_qemu_ld8u(tmp, addr, index);
295         break;
296     case OS_WORD:
297         if (sign)
298             tcg_gen_qemu_ld16s(tmp, addr, index);
299         else
300             tcg_gen_qemu_ld16u(tmp, addr, index);
301         break;
302     case OS_LONG:
303         tcg_gen_qemu_ld32u(tmp, addr, index);
304         break;
305     default:
306         g_assert_not_reached();
307     }
308     return tmp;
309 }
310 
311 /* Generate a store.  */
312 static inline void gen_store(DisasContext *s, int opsize, TCGv addr, TCGv val)
313 {
314     int index = IS_USER(s);
315     switch(opsize) {
316     case OS_BYTE:
317         tcg_gen_qemu_st8(val, addr, index);
318         break;
319     case OS_WORD:
320         tcg_gen_qemu_st16(val, addr, index);
321         break;
322     case OS_LONG:
323         tcg_gen_qemu_st32(val, addr, index);
324         break;
325     default:
326         g_assert_not_reached();
327     }
328 }
329 
330 typedef enum {
331     EA_STORE,
332     EA_LOADU,
333     EA_LOADS
334 } ea_what;
335 
336 /* Generate an unsigned load if VAL is 0 a signed load if val is -1,
337    otherwise generate a store.  */
338 static TCGv gen_ldst(DisasContext *s, int opsize, TCGv addr, TCGv val,
339                      ea_what what)
340 {
341     if (what == EA_STORE) {
342         gen_store(s, opsize, addr, val);
343         return store_dummy;
344     } else {
345         return gen_load(s, opsize, addr, what == EA_LOADS);
346     }
347 }
348 
349 /* Read a 16-bit immediate constant */
350 static inline uint16_t read_im16(CPUM68KState *env, DisasContext *s)
351 {
352     uint16_t im;
353     im = cpu_lduw_code(env, s->pc);
354     s->pc += 2;
355     return im;
356 }
357 
358 /* Read an 8-bit immediate constant */
359 static inline uint8_t read_im8(CPUM68KState *env, DisasContext *s)
360 {
361     return read_im16(env, s);
362 }
363 
364 /* Read a 32-bit immediate constant.  */
365 static inline uint32_t read_im32(CPUM68KState *env, DisasContext *s)
366 {
367     uint32_t im;
368     im = read_im16(env, s) << 16;
369     im |= 0xffff & read_im16(env, s);
370     return im;
371 }
372 
373 /* Read a 64-bit immediate constant.  */
374 static inline uint64_t read_im64(CPUM68KState *env, DisasContext *s)
375 {
376     uint64_t im;
377     im = (uint64_t)read_im32(env, s) << 32;
378     im |= (uint64_t)read_im32(env, s);
379     return im;
380 }
381 
382 /* Calculate and address index.  */
383 static TCGv gen_addr_index(DisasContext *s, uint16_t ext, TCGv tmp)
384 {
385     TCGv add;
386     int scale;
387 
388     add = (ext & 0x8000) ? AREG(ext, 12) : DREG(ext, 12);
389     if ((ext & 0x800) == 0) {
390         tcg_gen_ext16s_i32(tmp, add);
391         add = tmp;
392     }
393     scale = (ext >> 9) & 3;
394     if (scale != 0) {
395         tcg_gen_shli_i32(tmp, add, scale);
396         add = tmp;
397     }
398     return add;
399 }
400 
401 /* Handle a base + index + displacement effective addresss.
402    A NULL_QREG base means pc-relative.  */
403 static TCGv gen_lea_indexed(CPUM68KState *env, DisasContext *s, TCGv base)
404 {
405     uint32_t offset;
406     uint16_t ext;
407     TCGv add;
408     TCGv tmp;
409     uint32_t bd, od;
410 
411     offset = s->pc;
412     ext = read_im16(env, s);
413 
414     if ((ext & 0x800) == 0 && !m68k_feature(s->env, M68K_FEATURE_WORD_INDEX))
415         return NULL_QREG;
416 
417     if (m68k_feature(s->env, M68K_FEATURE_M68000) &&
418         !m68k_feature(s->env, M68K_FEATURE_SCALED_INDEX)) {
419         ext &= ~(3 << 9);
420     }
421 
422     if (ext & 0x100) {
423         /* full extension word format */
424         if (!m68k_feature(s->env, M68K_FEATURE_EXT_FULL))
425             return NULL_QREG;
426 
427         if ((ext & 0x30) > 0x10) {
428             /* base displacement */
429             if ((ext & 0x30) == 0x20) {
430                 bd = (int16_t)read_im16(env, s);
431             } else {
432                 bd = read_im32(env, s);
433             }
434         } else {
435             bd = 0;
436         }
437         tmp = tcg_temp_new();
438         if ((ext & 0x44) == 0) {
439             /* pre-index */
440             add = gen_addr_index(s, ext, tmp);
441         } else {
442             add = NULL_QREG;
443         }
444         if ((ext & 0x80) == 0) {
445             /* base not suppressed */
446             if (IS_NULL_QREG(base)) {
447                 base = tcg_const_i32(offset + bd);
448                 bd = 0;
449             }
450             if (!IS_NULL_QREG(add)) {
451                 tcg_gen_add_i32(tmp, add, base);
452                 add = tmp;
453             } else {
454                 add = base;
455             }
456         }
457         if (!IS_NULL_QREG(add)) {
458             if (bd != 0) {
459                 tcg_gen_addi_i32(tmp, add, bd);
460                 add = tmp;
461             }
462         } else {
463             add = tcg_const_i32(bd);
464         }
465         if ((ext & 3) != 0) {
466             /* memory indirect */
467             base = gen_load(s, OS_LONG, add, 0);
468             if ((ext & 0x44) == 4) {
469                 add = gen_addr_index(s, ext, tmp);
470                 tcg_gen_add_i32(tmp, add, base);
471                 add = tmp;
472             } else {
473                 add = base;
474             }
475             if ((ext & 3) > 1) {
476                 /* outer displacement */
477                 if ((ext & 3) == 2) {
478                     od = (int16_t)read_im16(env, s);
479                 } else {
480                     od = read_im32(env, s);
481                 }
482             } else {
483                 od = 0;
484             }
485             if (od != 0) {
486                 tcg_gen_addi_i32(tmp, add, od);
487                 add = tmp;
488             }
489         }
490     } else {
491         /* brief extension word format */
492         tmp = tcg_temp_new();
493         add = gen_addr_index(s, ext, tmp);
494         if (!IS_NULL_QREG(base)) {
495             tcg_gen_add_i32(tmp, add, base);
496             if ((int8_t)ext)
497                 tcg_gen_addi_i32(tmp, tmp, (int8_t)ext);
498         } else {
499             tcg_gen_addi_i32(tmp, add, offset + (int8_t)ext);
500         }
501         add = tmp;
502     }
503     return add;
504 }
505 
506 /* Sign or zero extend a value.  */
507 
508 static inline void gen_ext(TCGv res, TCGv val, int opsize, int sign)
509 {
510     switch (opsize) {
511     case OS_BYTE:
512         if (sign) {
513             tcg_gen_ext8s_i32(res, val);
514         } else {
515             tcg_gen_ext8u_i32(res, val);
516         }
517         break;
518     case OS_WORD:
519         if (sign) {
520             tcg_gen_ext16s_i32(res, val);
521         } else {
522             tcg_gen_ext16u_i32(res, val);
523         }
524         break;
525     case OS_LONG:
526         tcg_gen_mov_i32(res, val);
527         break;
528     default:
529         g_assert_not_reached();
530     }
531 }
532 
533 /* Evaluate all the CC flags.  */
534 
535 static void gen_flush_flags(DisasContext *s)
536 {
537     TCGv t0, t1;
538 
539     switch (s->cc_op) {
540     case CC_OP_FLAGS:
541         return;
542 
543     case CC_OP_ADDB:
544     case CC_OP_ADDW:
545     case CC_OP_ADDL:
546         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
547         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
548         /* Compute signed overflow for addition.  */
549         t0 = tcg_temp_new();
550         t1 = tcg_temp_new();
551         tcg_gen_sub_i32(t0, QREG_CC_N, QREG_CC_V);
552         gen_ext(t0, t0, s->cc_op - CC_OP_ADDB, 1);
553         tcg_gen_xor_i32(t1, QREG_CC_N, QREG_CC_V);
554         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
555         tcg_temp_free(t0);
556         tcg_gen_andc_i32(QREG_CC_V, t1, QREG_CC_V);
557         tcg_temp_free(t1);
558         break;
559 
560     case CC_OP_SUBB:
561     case CC_OP_SUBW:
562     case CC_OP_SUBL:
563         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
564         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
565         /* Compute signed overflow for subtraction.  */
566         t0 = tcg_temp_new();
567         t1 = tcg_temp_new();
568         tcg_gen_add_i32(t0, QREG_CC_N, QREG_CC_V);
569         gen_ext(t0, t0, s->cc_op - CC_OP_SUBB, 1);
570         tcg_gen_xor_i32(t1, QREG_CC_N, t0);
571         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
572         tcg_temp_free(t0);
573         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t1);
574         tcg_temp_free(t1);
575         break;
576 
577     case CC_OP_CMPB:
578     case CC_OP_CMPW:
579     case CC_OP_CMPL:
580         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_C, QREG_CC_N, QREG_CC_V);
581         tcg_gen_sub_i32(QREG_CC_Z, QREG_CC_N, QREG_CC_V);
582         gen_ext(QREG_CC_Z, QREG_CC_Z, s->cc_op - CC_OP_CMPB, 1);
583         /* Compute signed overflow for subtraction.  */
584         t0 = tcg_temp_new();
585         tcg_gen_xor_i32(t0, QREG_CC_Z, QREG_CC_N);
586         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, QREG_CC_N);
587         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t0);
588         tcg_temp_free(t0);
589         tcg_gen_mov_i32(QREG_CC_N, QREG_CC_Z);
590         break;
591 
592     case CC_OP_LOGIC:
593         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
594         tcg_gen_movi_i32(QREG_CC_C, 0);
595         tcg_gen_movi_i32(QREG_CC_V, 0);
596         break;
597 
598     case CC_OP_DYNAMIC:
599         gen_helper_flush_flags(cpu_env, QREG_CC_OP);
600         s->cc_op_synced = 1;
601         break;
602 
603     default:
604         t0 = tcg_const_i32(s->cc_op);
605         gen_helper_flush_flags(cpu_env, t0);
606         tcg_temp_free(t0);
607         s->cc_op_synced = 1;
608         break;
609     }
610 
611     /* Note that flush_flags also assigned to env->cc_op.  */
612     s->cc_op = CC_OP_FLAGS;
613 }
614 
615 static inline TCGv gen_extend(TCGv val, int opsize, int sign)
616 {
617     TCGv tmp;
618 
619     if (opsize == OS_LONG) {
620         tmp = val;
621     } else {
622         tmp = tcg_temp_new();
623         gen_ext(tmp, val, opsize, sign);
624     }
625 
626     return tmp;
627 }
628 
629 static void gen_logic_cc(DisasContext *s, TCGv val, int opsize)
630 {
631     gen_ext(QREG_CC_N, val, opsize, 1);
632     set_cc_op(s, CC_OP_LOGIC);
633 }
634 
635 static void gen_update_cc_cmp(DisasContext *s, TCGv dest, TCGv src, int opsize)
636 {
637     tcg_gen_mov_i32(QREG_CC_N, dest);
638     tcg_gen_mov_i32(QREG_CC_V, src);
639     set_cc_op(s, CC_OP_CMPB + opsize);
640 }
641 
642 static void gen_update_cc_add(TCGv dest, TCGv src, int opsize)
643 {
644     gen_ext(QREG_CC_N, dest, opsize, 1);
645     tcg_gen_mov_i32(QREG_CC_V, src);
646 }
647 
648 static inline int opsize_bytes(int opsize)
649 {
650     switch (opsize) {
651     case OS_BYTE: return 1;
652     case OS_WORD: return 2;
653     case OS_LONG: return 4;
654     case OS_SINGLE: return 4;
655     case OS_DOUBLE: return 8;
656     case OS_EXTENDED: return 12;
657     case OS_PACKED: return 12;
658     default:
659         g_assert_not_reached();
660     }
661 }
662 
663 static inline int insn_opsize(int insn)
664 {
665     switch ((insn >> 6) & 3) {
666     case 0: return OS_BYTE;
667     case 1: return OS_WORD;
668     case 2: return OS_LONG;
669     default:
670         g_assert_not_reached();
671     }
672 }
673 
674 static inline int ext_opsize(int ext, int pos)
675 {
676     switch ((ext >> pos) & 7) {
677     case 0: return OS_LONG;
678     case 1: return OS_SINGLE;
679     case 2: return OS_EXTENDED;
680     case 3: return OS_PACKED;
681     case 4: return OS_WORD;
682     case 5: return OS_DOUBLE;
683     case 6: return OS_BYTE;
684     default:
685         g_assert_not_reached();
686     }
687 }
688 
689 /* Assign value to a register.  If the width is less than the register width
690    only the low part of the register is set.  */
691 static void gen_partset_reg(int opsize, TCGv reg, TCGv val)
692 {
693     TCGv tmp;
694     switch (opsize) {
695     case OS_BYTE:
696         tcg_gen_andi_i32(reg, reg, 0xffffff00);
697         tmp = tcg_temp_new();
698         tcg_gen_ext8u_i32(tmp, val);
699         tcg_gen_or_i32(reg, reg, tmp);
700         tcg_temp_free(tmp);
701         break;
702     case OS_WORD:
703         tcg_gen_andi_i32(reg, reg, 0xffff0000);
704         tmp = tcg_temp_new();
705         tcg_gen_ext16u_i32(tmp, val);
706         tcg_gen_or_i32(reg, reg, tmp);
707         tcg_temp_free(tmp);
708         break;
709     case OS_LONG:
710     case OS_SINGLE:
711         tcg_gen_mov_i32(reg, val);
712         break;
713     default:
714         g_assert_not_reached();
715     }
716 }
717 
718 /* Generate code for an "effective address".  Does not adjust the base
719    register for autoincrement addressing modes.  */
720 static TCGv gen_lea_mode(CPUM68KState *env, DisasContext *s,
721                          int mode, int reg0, int opsize)
722 {
723     TCGv reg;
724     TCGv tmp;
725     uint16_t ext;
726     uint32_t offset;
727 
728     switch (mode) {
729     case 0: /* Data register direct.  */
730     case 1: /* Address register direct.  */
731         return NULL_QREG;
732     case 3: /* Indirect postincrement.  */
733         if (opsize == OS_UNSIZED) {
734             return NULL_QREG;
735         }
736         /* fallthru */
737     case 2: /* Indirect register */
738         return get_areg(s, reg0);
739     case 4: /* Indirect predecrememnt.  */
740         if (opsize == OS_UNSIZED) {
741             return NULL_QREG;
742         }
743         reg = get_areg(s, reg0);
744         tmp = tcg_temp_new();
745         if (reg0 == 7 && opsize == OS_BYTE &&
746             m68k_feature(s->env, M68K_FEATURE_M68000)) {
747             tcg_gen_subi_i32(tmp, reg, 2);
748         } else {
749             tcg_gen_subi_i32(tmp, reg, opsize_bytes(opsize));
750         }
751         return tmp;
752     case 5: /* Indirect displacement.  */
753         reg = get_areg(s, reg0);
754         tmp = tcg_temp_new();
755         ext = read_im16(env, s);
756         tcg_gen_addi_i32(tmp, reg, (int16_t)ext);
757         return tmp;
758     case 6: /* Indirect index + displacement.  */
759         reg = get_areg(s, reg0);
760         return gen_lea_indexed(env, s, reg);
761     case 7: /* Other */
762         switch (reg0) {
763         case 0: /* Absolute short.  */
764             offset = (int16_t)read_im16(env, s);
765             return tcg_const_i32(offset);
766         case 1: /* Absolute long.  */
767             offset = read_im32(env, s);
768             return tcg_const_i32(offset);
769         case 2: /* pc displacement  */
770             offset = s->pc;
771             offset += (int16_t)read_im16(env, s);
772             return tcg_const_i32(offset);
773         case 3: /* pc index+displacement.  */
774             return gen_lea_indexed(env, s, NULL_QREG);
775         case 4: /* Immediate.  */
776         default:
777             return NULL_QREG;
778         }
779     }
780     /* Should never happen.  */
781     return NULL_QREG;
782 }
783 
784 static TCGv gen_lea(CPUM68KState *env, DisasContext *s, uint16_t insn,
785                     int opsize)
786 {
787     int mode = extract32(insn, 3, 3);
788     int reg0 = REG(insn, 0);
789     return gen_lea_mode(env, s, mode, reg0, opsize);
790 }
791 
792 /* Generate code to load/store a value from/into an EA.  If WHAT > 0 this is
793    a write otherwise it is a read (0 == sign extend, -1 == zero extend).
794    ADDRP is non-null for readwrite operands.  */
795 static TCGv gen_ea_mode(CPUM68KState *env, DisasContext *s, int mode, int reg0,
796                         int opsize, TCGv val, TCGv *addrp, ea_what what)
797 {
798     TCGv reg, tmp, result;
799     int32_t offset;
800 
801     switch (mode) {
802     case 0: /* Data register direct.  */
803         reg = cpu_dregs[reg0];
804         if (what == EA_STORE) {
805             gen_partset_reg(opsize, reg, val);
806             return store_dummy;
807         } else {
808             return gen_extend(reg, opsize, what == EA_LOADS);
809         }
810     case 1: /* Address register direct.  */
811         reg = get_areg(s, reg0);
812         if (what == EA_STORE) {
813             tcg_gen_mov_i32(reg, val);
814             return store_dummy;
815         } else {
816             return gen_extend(reg, opsize, what == EA_LOADS);
817         }
818     case 2: /* Indirect register */
819         reg = get_areg(s, reg0);
820         return gen_ldst(s, opsize, reg, val, what);
821     case 3: /* Indirect postincrement.  */
822         reg = get_areg(s, reg0);
823         result = gen_ldst(s, opsize, reg, val, what);
824         if (what == EA_STORE || !addrp) {
825             TCGv tmp = tcg_temp_new();
826             if (reg0 == 7 && opsize == OS_BYTE &&
827                 m68k_feature(s->env, M68K_FEATURE_M68000)) {
828                 tcg_gen_addi_i32(tmp, reg, 2);
829             } else {
830                 tcg_gen_addi_i32(tmp, reg, opsize_bytes(opsize));
831             }
832             delay_set_areg(s, reg0, tmp, true);
833         }
834         return result;
835     case 4: /* Indirect predecrememnt.  */
836         if (addrp && what == EA_STORE) {
837             tmp = *addrp;
838         } else {
839             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
840             if (IS_NULL_QREG(tmp)) {
841                 return tmp;
842             }
843             if (addrp) {
844                 *addrp = tmp;
845             }
846         }
847         result = gen_ldst(s, opsize, tmp, val, what);
848         if (what == EA_STORE || !addrp) {
849             delay_set_areg(s, reg0, tmp, false);
850         }
851         return result;
852     case 5: /* Indirect displacement.  */
853     case 6: /* Indirect index + displacement.  */
854     do_indirect:
855         if (addrp && what == EA_STORE) {
856             tmp = *addrp;
857         } else {
858             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
859             if (IS_NULL_QREG(tmp)) {
860                 return tmp;
861             }
862             if (addrp) {
863                 *addrp = tmp;
864             }
865         }
866         return gen_ldst(s, opsize, tmp, val, what);
867     case 7: /* Other */
868         switch (reg0) {
869         case 0: /* Absolute short.  */
870         case 1: /* Absolute long.  */
871         case 2: /* pc displacement  */
872         case 3: /* pc index+displacement.  */
873             goto do_indirect;
874         case 4: /* Immediate.  */
875             /* Sign extend values for consistency.  */
876             switch (opsize) {
877             case OS_BYTE:
878                 if (what == EA_LOADS) {
879                     offset = (int8_t)read_im8(env, s);
880                 } else {
881                     offset = read_im8(env, s);
882                 }
883                 break;
884             case OS_WORD:
885                 if (what == EA_LOADS) {
886                     offset = (int16_t)read_im16(env, s);
887                 } else {
888                     offset = read_im16(env, s);
889                 }
890                 break;
891             case OS_LONG:
892                 offset = read_im32(env, s);
893                 break;
894             default:
895                 g_assert_not_reached();
896             }
897             return tcg_const_i32(offset);
898         default:
899             return NULL_QREG;
900         }
901     }
902     /* Should never happen.  */
903     return NULL_QREG;
904 }
905 
906 static TCGv gen_ea(CPUM68KState *env, DisasContext *s, uint16_t insn,
907                    int opsize, TCGv val, TCGv *addrp, ea_what what)
908 {
909     int mode = extract32(insn, 3, 3);
910     int reg0 = REG(insn, 0);
911     return gen_ea_mode(env, s, mode, reg0, opsize, val, addrp, what);
912 }
913 
914 static TCGv_ptr gen_fp_ptr(int freg)
915 {
916     TCGv_ptr fp = tcg_temp_new_ptr();
917     tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fregs[freg]));
918     return fp;
919 }
920 
921 static TCGv_ptr gen_fp_result_ptr(void)
922 {
923     TCGv_ptr fp = tcg_temp_new_ptr();
924     tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fp_result));
925     return fp;
926 }
927 
928 static void gen_fp_move(TCGv_ptr dest, TCGv_ptr src)
929 {
930     TCGv t32;
931     TCGv_i64 t64;
932 
933     t32 = tcg_temp_new();
934     tcg_gen_ld16u_i32(t32, src, offsetof(FPReg, l.upper));
935     tcg_gen_st16_i32(t32, dest, offsetof(FPReg, l.upper));
936     tcg_temp_free(t32);
937 
938     t64 = tcg_temp_new_i64();
939     tcg_gen_ld_i64(t64, src, offsetof(FPReg, l.lower));
940     tcg_gen_st_i64(t64, dest, offsetof(FPReg, l.lower));
941     tcg_temp_free_i64(t64);
942 }
943 
944 static void gen_load_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp)
945 {
946     TCGv tmp;
947     TCGv_i64 t64;
948     int index = IS_USER(s);
949 
950     t64 = tcg_temp_new_i64();
951     tmp = tcg_temp_new();
952     switch (opsize) {
953     case OS_BYTE:
954         tcg_gen_qemu_ld8s(tmp, addr, index);
955         gen_helper_exts32(cpu_env, fp, tmp);
956         break;
957     case OS_WORD:
958         tcg_gen_qemu_ld16s(tmp, addr, index);
959         gen_helper_exts32(cpu_env, fp, tmp);
960         break;
961     case OS_LONG:
962         tcg_gen_qemu_ld32u(tmp, addr, index);
963         gen_helper_exts32(cpu_env, fp, tmp);
964         break;
965     case OS_SINGLE:
966         tcg_gen_qemu_ld32u(tmp, addr, index);
967         gen_helper_extf32(cpu_env, fp, tmp);
968         break;
969     case OS_DOUBLE:
970         tcg_gen_qemu_ld64(t64, addr, index);
971         gen_helper_extf64(cpu_env, fp, t64);
972         break;
973     case OS_EXTENDED:
974         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
975             gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
976             break;
977         }
978         tcg_gen_qemu_ld32u(tmp, addr, index);
979         tcg_gen_shri_i32(tmp, tmp, 16);
980         tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
981         tcg_gen_addi_i32(tmp, addr, 4);
982         tcg_gen_qemu_ld64(t64, tmp, index);
983         tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
984         break;
985     case OS_PACKED:
986         /* unimplemented data type on 68040/ColdFire
987          * FIXME if needed for another FPU
988          */
989         gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
990         break;
991     default:
992         g_assert_not_reached();
993     }
994     tcg_temp_free(tmp);
995     tcg_temp_free_i64(t64);
996 }
997 
998 static void gen_store_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp)
999 {
1000     TCGv tmp;
1001     TCGv_i64 t64;
1002     int index = IS_USER(s);
1003 
1004     t64 = tcg_temp_new_i64();
1005     tmp = tcg_temp_new();
1006     switch (opsize) {
1007     case OS_BYTE:
1008         gen_helper_reds32(tmp, cpu_env, fp);
1009         tcg_gen_qemu_st8(tmp, addr, index);
1010         break;
1011     case OS_WORD:
1012         gen_helper_reds32(tmp, cpu_env, fp);
1013         tcg_gen_qemu_st16(tmp, addr, index);
1014         break;
1015     case OS_LONG:
1016         gen_helper_reds32(tmp, cpu_env, fp);
1017         tcg_gen_qemu_st32(tmp, addr, index);
1018         break;
1019     case OS_SINGLE:
1020         gen_helper_redf32(tmp, cpu_env, fp);
1021         tcg_gen_qemu_st32(tmp, addr, index);
1022         break;
1023     case OS_DOUBLE:
1024         gen_helper_redf64(t64, cpu_env, fp);
1025         tcg_gen_qemu_st64(t64, addr, index);
1026         break;
1027     case OS_EXTENDED:
1028         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1029             gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
1030             break;
1031         }
1032         tcg_gen_ld16u_i32(tmp, fp, offsetof(FPReg, l.upper));
1033         tcg_gen_shli_i32(tmp, tmp, 16);
1034         tcg_gen_qemu_st32(tmp, addr, index);
1035         tcg_gen_addi_i32(tmp, addr, 4);
1036         tcg_gen_ld_i64(t64, fp, offsetof(FPReg, l.lower));
1037         tcg_gen_qemu_st64(t64, tmp, index);
1038         break;
1039     case OS_PACKED:
1040         /* unimplemented data type on 68040/ColdFire
1041          * FIXME if needed for another FPU
1042          */
1043         gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
1044         break;
1045     default:
1046         g_assert_not_reached();
1047     }
1048     tcg_temp_free(tmp);
1049     tcg_temp_free_i64(t64);
1050 }
1051 
1052 static void gen_ldst_fp(DisasContext *s, int opsize, TCGv addr,
1053                         TCGv_ptr fp, ea_what what)
1054 {
1055     if (what == EA_STORE) {
1056         gen_store_fp(s, opsize, addr, fp);
1057     } else {
1058         gen_load_fp(s, opsize, addr, fp);
1059     }
1060 }
1061 
1062 static int gen_ea_mode_fp(CPUM68KState *env, DisasContext *s, int mode,
1063                           int reg0, int opsize, TCGv_ptr fp, ea_what what)
1064 {
1065     TCGv reg, addr, tmp;
1066     TCGv_i64 t64;
1067 
1068     switch (mode) {
1069     case 0: /* Data register direct.  */
1070         reg = cpu_dregs[reg0];
1071         if (what == EA_STORE) {
1072             switch (opsize) {
1073             case OS_BYTE:
1074             case OS_WORD:
1075             case OS_LONG:
1076                 gen_helper_reds32(reg, cpu_env, fp);
1077                 break;
1078             case OS_SINGLE:
1079                 gen_helper_redf32(reg, cpu_env, fp);
1080                 break;
1081             default:
1082                 g_assert_not_reached();
1083             }
1084         } else {
1085             tmp = tcg_temp_new();
1086             switch (opsize) {
1087             case OS_BYTE:
1088                 tcg_gen_ext8s_i32(tmp, reg);
1089                 gen_helper_exts32(cpu_env, fp, tmp);
1090                 break;
1091             case OS_WORD:
1092                 tcg_gen_ext16s_i32(tmp, reg);
1093                 gen_helper_exts32(cpu_env, fp, tmp);
1094                 break;
1095             case OS_LONG:
1096                 gen_helper_exts32(cpu_env, fp, reg);
1097                 break;
1098             case OS_SINGLE:
1099                 gen_helper_extf32(cpu_env, fp, reg);
1100                 break;
1101             default:
1102                 g_assert_not_reached();
1103             }
1104             tcg_temp_free(tmp);
1105         }
1106         return 0;
1107     case 1: /* Address register direct.  */
1108         return -1;
1109     case 2: /* Indirect register */
1110         addr = get_areg(s, reg0);
1111         gen_ldst_fp(s, opsize, addr, fp, what);
1112         return 0;
1113     case 3: /* Indirect postincrement.  */
1114         addr = cpu_aregs[reg0];
1115         gen_ldst_fp(s, opsize, addr, fp, what);
1116         tcg_gen_addi_i32(addr, addr, opsize_bytes(opsize));
1117         return 0;
1118     case 4: /* Indirect predecrememnt.  */
1119         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1120         if (IS_NULL_QREG(addr)) {
1121             return -1;
1122         }
1123         gen_ldst_fp(s, opsize, addr, fp, what);
1124         tcg_gen_mov_i32(cpu_aregs[reg0], addr);
1125         return 0;
1126     case 5: /* Indirect displacement.  */
1127     case 6: /* Indirect index + displacement.  */
1128     do_indirect:
1129         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1130         if (IS_NULL_QREG(addr)) {
1131             return -1;
1132         }
1133         gen_ldst_fp(s, opsize, addr, fp, what);
1134         return 0;
1135     case 7: /* Other */
1136         switch (reg0) {
1137         case 0: /* Absolute short.  */
1138         case 1: /* Absolute long.  */
1139         case 2: /* pc displacement  */
1140         case 3: /* pc index+displacement.  */
1141             goto do_indirect;
1142         case 4: /* Immediate.  */
1143             if (what == EA_STORE) {
1144                 return -1;
1145             }
1146             switch (opsize) {
1147             case OS_BYTE:
1148                 tmp = tcg_const_i32((int8_t)read_im8(env, s));
1149                 gen_helper_exts32(cpu_env, fp, tmp);
1150                 tcg_temp_free(tmp);
1151                 break;
1152             case OS_WORD:
1153                 tmp = tcg_const_i32((int16_t)read_im16(env, s));
1154                 gen_helper_exts32(cpu_env, fp, tmp);
1155                 tcg_temp_free(tmp);
1156                 break;
1157             case OS_LONG:
1158                 tmp = tcg_const_i32(read_im32(env, s));
1159                 gen_helper_exts32(cpu_env, fp, tmp);
1160                 tcg_temp_free(tmp);
1161                 break;
1162             case OS_SINGLE:
1163                 tmp = tcg_const_i32(read_im32(env, s));
1164                 gen_helper_extf32(cpu_env, fp, tmp);
1165                 tcg_temp_free(tmp);
1166                 break;
1167             case OS_DOUBLE:
1168                 t64 = tcg_const_i64(read_im64(env, s));
1169                 gen_helper_extf64(cpu_env, fp, t64);
1170                 tcg_temp_free_i64(t64);
1171                 break;
1172             case OS_EXTENDED:
1173                 if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1174                     gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
1175                     break;
1176                 }
1177                 tmp = tcg_const_i32(read_im32(env, s) >> 16);
1178                 tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
1179                 tcg_temp_free(tmp);
1180                 t64 = tcg_const_i64(read_im64(env, s));
1181                 tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
1182                 tcg_temp_free_i64(t64);
1183                 break;
1184             case OS_PACKED:
1185                 /* unimplemented data type on 68040/ColdFire
1186                  * FIXME if needed for another FPU
1187                  */
1188                 gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
1189                 break;
1190             default:
1191                 g_assert_not_reached();
1192             }
1193             return 0;
1194         default:
1195             return -1;
1196         }
1197     }
1198     return -1;
1199 }
1200 
1201 static int gen_ea_fp(CPUM68KState *env, DisasContext *s, uint16_t insn,
1202                        int opsize, TCGv_ptr fp, ea_what what)
1203 {
1204     int mode = extract32(insn, 3, 3);
1205     int reg0 = REG(insn, 0);
1206     return gen_ea_mode_fp(env, s, mode, reg0, opsize, fp, what);
1207 }
1208 
1209 typedef struct {
1210     TCGCond tcond;
1211     bool g1;
1212     bool g2;
1213     TCGv v1;
1214     TCGv v2;
1215 } DisasCompare;
1216 
1217 static void gen_cc_cond(DisasCompare *c, DisasContext *s, int cond)
1218 {
1219     TCGv tmp, tmp2;
1220     TCGCond tcond;
1221     CCOp op = s->cc_op;
1222 
1223     /* The CC_OP_CMP form can handle most normal comparisons directly.  */
1224     if (op == CC_OP_CMPB || op == CC_OP_CMPW || op == CC_OP_CMPL) {
1225         c->g1 = c->g2 = 1;
1226         c->v1 = QREG_CC_N;
1227         c->v2 = QREG_CC_V;
1228         switch (cond) {
1229         case 2: /* HI */
1230         case 3: /* LS */
1231             tcond = TCG_COND_LEU;
1232             goto done;
1233         case 4: /* CC */
1234         case 5: /* CS */
1235             tcond = TCG_COND_LTU;
1236             goto done;
1237         case 6: /* NE */
1238         case 7: /* EQ */
1239             tcond = TCG_COND_EQ;
1240             goto done;
1241         case 10: /* PL */
1242         case 11: /* MI */
1243             c->g1 = c->g2 = 0;
1244             c->v2 = tcg_const_i32(0);
1245             c->v1 = tmp = tcg_temp_new();
1246             tcg_gen_sub_i32(tmp, QREG_CC_N, QREG_CC_V);
1247             gen_ext(tmp, tmp, op - CC_OP_CMPB, 1);
1248             /* fallthru */
1249         case 12: /* GE */
1250         case 13: /* LT */
1251             tcond = TCG_COND_LT;
1252             goto done;
1253         case 14: /* GT */
1254         case 15: /* LE */
1255             tcond = TCG_COND_LE;
1256             goto done;
1257         }
1258     }
1259 
1260     c->g1 = 1;
1261     c->g2 = 0;
1262     c->v2 = tcg_const_i32(0);
1263 
1264     switch (cond) {
1265     case 0: /* T */
1266     case 1: /* F */
1267         c->v1 = c->v2;
1268         tcond = TCG_COND_NEVER;
1269         goto done;
1270     case 14: /* GT (!(Z || (N ^ V))) */
1271     case 15: /* LE (Z || (N ^ V)) */
1272         /* Logic operations clear V, which simplifies LE to (Z || N),
1273            and since Z and N are co-located, this becomes a normal
1274            comparison vs N.  */
1275         if (op == CC_OP_LOGIC) {
1276             c->v1 = QREG_CC_N;
1277             tcond = TCG_COND_LE;
1278             goto done;
1279         }
1280         break;
1281     case 12: /* GE (!(N ^ V)) */
1282     case 13: /* LT (N ^ V) */
1283         /* Logic operations clear V, which simplifies this to N.  */
1284         if (op != CC_OP_LOGIC) {
1285             break;
1286         }
1287         /* fallthru */
1288     case 10: /* PL (!N) */
1289     case 11: /* MI (N) */
1290         /* Several cases represent N normally.  */
1291         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1292             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1293             op == CC_OP_LOGIC) {
1294             c->v1 = QREG_CC_N;
1295             tcond = TCG_COND_LT;
1296             goto done;
1297         }
1298         break;
1299     case 6: /* NE (!Z) */
1300     case 7: /* EQ (Z) */
1301         /* Some cases fold Z into N.  */
1302         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1303             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1304             op == CC_OP_LOGIC) {
1305             tcond = TCG_COND_EQ;
1306             c->v1 = QREG_CC_N;
1307             goto done;
1308         }
1309         break;
1310     case 4: /* CC (!C) */
1311     case 5: /* CS (C) */
1312         /* Some cases fold C into X.  */
1313         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1314             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL) {
1315             tcond = TCG_COND_NE;
1316             c->v1 = QREG_CC_X;
1317             goto done;
1318         }
1319         /* fallthru */
1320     case 8: /* VC (!V) */
1321     case 9: /* VS (V) */
1322         /* Logic operations clear V and C.  */
1323         if (op == CC_OP_LOGIC) {
1324             tcond = TCG_COND_NEVER;
1325             c->v1 = c->v2;
1326             goto done;
1327         }
1328         break;
1329     }
1330 
1331     /* Otherwise, flush flag state to CC_OP_FLAGS.  */
1332     gen_flush_flags(s);
1333 
1334     switch (cond) {
1335     case 0: /* T */
1336     case 1: /* F */
1337     default:
1338         /* Invalid, or handled above.  */
1339         abort();
1340     case 2: /* HI (!C && !Z) -> !(C || Z)*/
1341     case 3: /* LS (C || Z) */
1342         c->v1 = tmp = tcg_temp_new();
1343         c->g1 = 0;
1344         tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1345         tcg_gen_or_i32(tmp, tmp, QREG_CC_C);
1346         tcond = TCG_COND_NE;
1347         break;
1348     case 4: /* CC (!C) */
1349     case 5: /* CS (C) */
1350         c->v1 = QREG_CC_C;
1351         tcond = TCG_COND_NE;
1352         break;
1353     case 6: /* NE (!Z) */
1354     case 7: /* EQ (Z) */
1355         c->v1 = QREG_CC_Z;
1356         tcond = TCG_COND_EQ;
1357         break;
1358     case 8: /* VC (!V) */
1359     case 9: /* VS (V) */
1360         c->v1 = QREG_CC_V;
1361         tcond = TCG_COND_LT;
1362         break;
1363     case 10: /* PL (!N) */
1364     case 11: /* MI (N) */
1365         c->v1 = QREG_CC_N;
1366         tcond = TCG_COND_LT;
1367         break;
1368     case 12: /* GE (!(N ^ V)) */
1369     case 13: /* LT (N ^ V) */
1370         c->v1 = tmp = tcg_temp_new();
1371         c->g1 = 0;
1372         tcg_gen_xor_i32(tmp, QREG_CC_N, QREG_CC_V);
1373         tcond = TCG_COND_LT;
1374         break;
1375     case 14: /* GT (!(Z || (N ^ V))) */
1376     case 15: /* LE (Z || (N ^ V)) */
1377         c->v1 = tmp = tcg_temp_new();
1378         c->g1 = 0;
1379         tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1380         tcg_gen_neg_i32(tmp, tmp);
1381         tmp2 = tcg_temp_new();
1382         tcg_gen_xor_i32(tmp2, QREG_CC_N, QREG_CC_V);
1383         tcg_gen_or_i32(tmp, tmp, tmp2);
1384         tcg_temp_free(tmp2);
1385         tcond = TCG_COND_LT;
1386         break;
1387     }
1388 
1389  done:
1390     if ((cond & 1) == 0) {
1391         tcond = tcg_invert_cond(tcond);
1392     }
1393     c->tcond = tcond;
1394 }
1395 
1396 static void free_cond(DisasCompare *c)
1397 {
1398     if (!c->g1) {
1399         tcg_temp_free(c->v1);
1400     }
1401     if (!c->g2) {
1402         tcg_temp_free(c->v2);
1403     }
1404 }
1405 
1406 static void gen_jmpcc(DisasContext *s, int cond, TCGLabel *l1)
1407 {
1408   DisasCompare c;
1409 
1410   gen_cc_cond(&c, s, cond);
1411   update_cc_op(s);
1412   tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
1413   free_cond(&c);
1414 }
1415 
1416 /* Force a TB lookup after an instruction that changes the CPU state.  */
1417 static void gen_lookup_tb(DisasContext *s)
1418 {
1419     update_cc_op(s);
1420     tcg_gen_movi_i32(QREG_PC, s->pc);
1421     s->is_jmp = DISAS_UPDATE;
1422 }
1423 
1424 #define SRC_EA(env, result, opsize, op_sign, addrp) do {                \
1425         result = gen_ea(env, s, insn, opsize, NULL_QREG, addrp,         \
1426                         op_sign ? EA_LOADS : EA_LOADU);                 \
1427         if (IS_NULL_QREG(result)) {                                     \
1428             gen_addr_fault(s);                                          \
1429             return;                                                     \
1430         }                                                               \
1431     } while (0)
1432 
1433 #define DEST_EA(env, insn, opsize, val, addrp) do {                     \
1434         TCGv ea_result = gen_ea(env, s, insn, opsize, val, addrp, EA_STORE); \
1435         if (IS_NULL_QREG(ea_result)) {                                  \
1436             gen_addr_fault(s);                                          \
1437             return;                                                     \
1438         }                                                               \
1439     } while (0)
1440 
1441 static inline bool use_goto_tb(DisasContext *s, uint32_t dest)
1442 {
1443 #ifndef CONFIG_USER_ONLY
1444     return (s->tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) ||
1445            (s->insn_pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK);
1446 #else
1447     return true;
1448 #endif
1449 }
1450 
1451 /* Generate a jump to an immediate address.  */
1452 static void gen_jmp_tb(DisasContext *s, int n, uint32_t dest)
1453 {
1454     if (unlikely(s->singlestep_enabled)) {
1455         gen_exception(s, dest, EXCP_DEBUG);
1456     } else if (use_goto_tb(s, dest)) {
1457         tcg_gen_goto_tb(n);
1458         tcg_gen_movi_i32(QREG_PC, dest);
1459         tcg_gen_exit_tb((uintptr_t)s->tb + n);
1460     } else {
1461         gen_jmp_im(s, dest);
1462         tcg_gen_exit_tb(0);
1463     }
1464     s->is_jmp = DISAS_TB_JUMP;
1465 }
1466 
1467 DISAS_INSN(scc)
1468 {
1469     DisasCompare c;
1470     int cond;
1471     TCGv tmp;
1472 
1473     cond = (insn >> 8) & 0xf;
1474     gen_cc_cond(&c, s, cond);
1475 
1476     tmp = tcg_temp_new();
1477     tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
1478     free_cond(&c);
1479 
1480     tcg_gen_neg_i32(tmp, tmp);
1481     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
1482     tcg_temp_free(tmp);
1483 }
1484 
1485 DISAS_INSN(dbcc)
1486 {
1487     TCGLabel *l1;
1488     TCGv reg;
1489     TCGv tmp;
1490     int16_t offset;
1491     uint32_t base;
1492 
1493     reg = DREG(insn, 0);
1494     base = s->pc;
1495     offset = (int16_t)read_im16(env, s);
1496     l1 = gen_new_label();
1497     gen_jmpcc(s, (insn >> 8) & 0xf, l1);
1498 
1499     tmp = tcg_temp_new();
1500     tcg_gen_ext16s_i32(tmp, reg);
1501     tcg_gen_addi_i32(tmp, tmp, -1);
1502     gen_partset_reg(OS_WORD, reg, tmp);
1503     tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, -1, l1);
1504     gen_jmp_tb(s, 1, base + offset);
1505     gen_set_label(l1);
1506     gen_jmp_tb(s, 0, s->pc);
1507 }
1508 
1509 DISAS_INSN(undef_mac)
1510 {
1511     gen_exception(s, s->insn_pc, EXCP_LINEA);
1512 }
1513 
1514 DISAS_INSN(undef_fpu)
1515 {
1516     gen_exception(s, s->insn_pc, EXCP_LINEF);
1517 }
1518 
1519 DISAS_INSN(undef)
1520 {
1521     /* ??? This is both instructions that are as yet unimplemented
1522        for the 680x0 series, as well as those that are implemented
1523        but actually illegal for CPU32 or pre-68020.  */
1524     qemu_log_mask(LOG_UNIMP, "Illegal instruction: %04x @ %08x",
1525                   insn, s->insn_pc);
1526     gen_exception(s, s->insn_pc, EXCP_UNSUPPORTED);
1527 }
1528 
1529 DISAS_INSN(mulw)
1530 {
1531     TCGv reg;
1532     TCGv tmp;
1533     TCGv src;
1534     int sign;
1535 
1536     sign = (insn & 0x100) != 0;
1537     reg = DREG(insn, 9);
1538     tmp = tcg_temp_new();
1539     if (sign)
1540         tcg_gen_ext16s_i32(tmp, reg);
1541     else
1542         tcg_gen_ext16u_i32(tmp, reg);
1543     SRC_EA(env, src, OS_WORD, sign, NULL);
1544     tcg_gen_mul_i32(tmp, tmp, src);
1545     tcg_gen_mov_i32(reg, tmp);
1546     gen_logic_cc(s, tmp, OS_LONG);
1547     tcg_temp_free(tmp);
1548 }
1549 
1550 DISAS_INSN(divw)
1551 {
1552     int sign;
1553     TCGv src;
1554     TCGv destr;
1555 
1556     /* divX.w <EA>,Dn    32/16 -> 16r:16q */
1557 
1558     sign = (insn & 0x100) != 0;
1559 
1560     /* dest.l / src.w */
1561 
1562     SRC_EA(env, src, OS_WORD, sign, NULL);
1563     destr = tcg_const_i32(REG(insn, 9));
1564     if (sign) {
1565         gen_helper_divsw(cpu_env, destr, src);
1566     } else {
1567         gen_helper_divuw(cpu_env, destr, src);
1568     }
1569     tcg_temp_free(destr);
1570 
1571     set_cc_op(s, CC_OP_FLAGS);
1572 }
1573 
1574 DISAS_INSN(divl)
1575 {
1576     TCGv num, reg, den;
1577     int sign;
1578     uint16_t ext;
1579 
1580     ext = read_im16(env, s);
1581 
1582     sign = (ext & 0x0800) != 0;
1583 
1584     if (ext & 0x400) {
1585         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
1586             gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
1587             return;
1588         }
1589 
1590         /* divX.l <EA>, Dr:Dq    64/32 -> 32r:32q */
1591 
1592         SRC_EA(env, den, OS_LONG, 0, NULL);
1593         num = tcg_const_i32(REG(ext, 12));
1594         reg = tcg_const_i32(REG(ext, 0));
1595         if (sign) {
1596             gen_helper_divsll(cpu_env, num, reg, den);
1597         } else {
1598             gen_helper_divull(cpu_env, num, reg, den);
1599         }
1600         tcg_temp_free(reg);
1601         tcg_temp_free(num);
1602         set_cc_op(s, CC_OP_FLAGS);
1603         return;
1604     }
1605 
1606     /* divX.l <EA>, Dq        32/32 -> 32q     */
1607     /* divXl.l <EA>, Dr:Dq    32/32 -> 32r:32q */
1608 
1609     SRC_EA(env, den, OS_LONG, 0, NULL);
1610     num = tcg_const_i32(REG(ext, 12));
1611     reg = tcg_const_i32(REG(ext, 0));
1612     if (sign) {
1613         gen_helper_divsl(cpu_env, num, reg, den);
1614     } else {
1615         gen_helper_divul(cpu_env, num, reg, den);
1616     }
1617     tcg_temp_free(reg);
1618     tcg_temp_free(num);
1619 
1620     set_cc_op(s, CC_OP_FLAGS);
1621 }
1622 
1623 static void bcd_add(TCGv dest, TCGv src)
1624 {
1625     TCGv t0, t1;
1626 
1627     /*  dest10 = dest10 + src10 + X
1628      *
1629      *        t1 = src
1630      *        t2 = t1 + 0x066
1631      *        t3 = t2 + dest + X
1632      *        t4 = t2 ^ dest
1633      *        t5 = t3 ^ t4
1634      *        t6 = ~t5 & 0x110
1635      *        t7 = (t6 >> 2) | (t6 >> 3)
1636      *        return t3 - t7
1637      */
1638 
1639     /* t1 = (src + 0x066) + dest + X
1640      *    = result with some possible exceding 0x6
1641      */
1642 
1643     t0 = tcg_const_i32(0x066);
1644     tcg_gen_add_i32(t0, t0, src);
1645 
1646     t1 = tcg_temp_new();
1647     tcg_gen_add_i32(t1, t0, dest);
1648     tcg_gen_add_i32(t1, t1, QREG_CC_X);
1649 
1650     /* we will remove exceding 0x6 where there is no carry */
1651 
1652     /* t0 = (src + 0x0066) ^ dest
1653      *    = t1 without carries
1654      */
1655 
1656     tcg_gen_xor_i32(t0, t0, dest);
1657 
1658     /* extract the carries
1659      * t0 = t0 ^ t1
1660      *    = only the carries
1661      */
1662 
1663     tcg_gen_xor_i32(t0, t0, t1);
1664 
1665     /* generate 0x1 where there is no carry
1666      * and for each 0x10, generate a 0x6
1667      */
1668 
1669     tcg_gen_shri_i32(t0, t0, 3);
1670     tcg_gen_not_i32(t0, t0);
1671     tcg_gen_andi_i32(t0, t0, 0x22);
1672     tcg_gen_add_i32(dest, t0, t0);
1673     tcg_gen_add_i32(dest, dest, t0);
1674     tcg_temp_free(t0);
1675 
1676     /* remove the exceding 0x6
1677      * for digits that have not generated a carry
1678      */
1679 
1680     tcg_gen_sub_i32(dest, t1, dest);
1681     tcg_temp_free(t1);
1682 }
1683 
1684 static void bcd_sub(TCGv dest, TCGv src)
1685 {
1686     TCGv t0, t1, t2;
1687 
1688     /*  dest10 = dest10 - src10 - X
1689      *         = bcd_add(dest + 1 - X, 0x199 - src)
1690      */
1691 
1692     /* t0 = 0x066 + (0x199 - src) */
1693 
1694     t0 = tcg_temp_new();
1695     tcg_gen_subfi_i32(t0, 0x1ff, src);
1696 
1697     /* t1 = t0 + dest + 1 - X*/
1698 
1699     t1 = tcg_temp_new();
1700     tcg_gen_add_i32(t1, t0, dest);
1701     tcg_gen_addi_i32(t1, t1, 1);
1702     tcg_gen_sub_i32(t1, t1, QREG_CC_X);
1703 
1704     /* t2 = t0 ^ dest */
1705 
1706     t2 = tcg_temp_new();
1707     tcg_gen_xor_i32(t2, t0, dest);
1708 
1709     /* t0 = t1 ^ t2 */
1710 
1711     tcg_gen_xor_i32(t0, t1, t2);
1712 
1713     /* t2 = ~t0 & 0x110
1714      * t0 = (t2 >> 2) | (t2 >> 3)
1715      *
1716      * to fit on 8bit operands, changed in:
1717      *
1718      * t2 = ~(t0 >> 3) & 0x22
1719      * t0 = t2 + t2
1720      * t0 = t0 + t2
1721      */
1722 
1723     tcg_gen_shri_i32(t2, t0, 3);
1724     tcg_gen_not_i32(t2, t2);
1725     tcg_gen_andi_i32(t2, t2, 0x22);
1726     tcg_gen_add_i32(t0, t2, t2);
1727     tcg_gen_add_i32(t0, t0, t2);
1728     tcg_temp_free(t2);
1729 
1730     /* return t1 - t0 */
1731 
1732     tcg_gen_sub_i32(dest, t1, t0);
1733     tcg_temp_free(t0);
1734     tcg_temp_free(t1);
1735 }
1736 
1737 static void bcd_flags(TCGv val)
1738 {
1739     tcg_gen_andi_i32(QREG_CC_C, val, 0x0ff);
1740     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_C);
1741 
1742     tcg_gen_extract_i32(QREG_CC_C, val, 8, 1);
1743 
1744     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
1745 }
1746 
1747 DISAS_INSN(abcd_reg)
1748 {
1749     TCGv src;
1750     TCGv dest;
1751 
1752     gen_flush_flags(s); /* !Z is sticky */
1753 
1754     src = gen_extend(DREG(insn, 0), OS_BYTE, 0);
1755     dest = gen_extend(DREG(insn, 9), OS_BYTE, 0);
1756     bcd_add(dest, src);
1757     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1758 
1759     bcd_flags(dest);
1760 }
1761 
1762 DISAS_INSN(abcd_mem)
1763 {
1764     TCGv src, dest, addr;
1765 
1766     gen_flush_flags(s); /* !Z is sticky */
1767 
1768     /* Indirect pre-decrement load (mode 4) */
1769 
1770     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1771                       NULL_QREG, NULL, EA_LOADU);
1772     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1773                        NULL_QREG, &addr, EA_LOADU);
1774 
1775     bcd_add(dest, src);
1776 
1777     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr, EA_STORE);
1778 
1779     bcd_flags(dest);
1780 }
1781 
1782 DISAS_INSN(sbcd_reg)
1783 {
1784     TCGv src, dest;
1785 
1786     gen_flush_flags(s); /* !Z is sticky */
1787 
1788     src = gen_extend(DREG(insn, 0), OS_BYTE, 0);
1789     dest = gen_extend(DREG(insn, 9), OS_BYTE, 0);
1790 
1791     bcd_sub(dest, src);
1792 
1793     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1794 
1795     bcd_flags(dest);
1796 }
1797 
1798 DISAS_INSN(sbcd_mem)
1799 {
1800     TCGv src, dest, addr;
1801 
1802     gen_flush_flags(s); /* !Z is sticky */
1803 
1804     /* Indirect pre-decrement load (mode 4) */
1805 
1806     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1807                       NULL_QREG, NULL, EA_LOADU);
1808     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1809                        NULL_QREG, &addr, EA_LOADU);
1810 
1811     bcd_sub(dest, src);
1812 
1813     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr, EA_STORE);
1814 
1815     bcd_flags(dest);
1816 }
1817 
1818 DISAS_INSN(nbcd)
1819 {
1820     TCGv src, dest;
1821     TCGv addr;
1822 
1823     gen_flush_flags(s); /* !Z is sticky */
1824 
1825     SRC_EA(env, src, OS_BYTE, 0, &addr);
1826 
1827     dest = tcg_const_i32(0);
1828     bcd_sub(dest, src);
1829 
1830     DEST_EA(env, insn, OS_BYTE, dest, &addr);
1831 
1832     bcd_flags(dest);
1833 
1834     tcg_temp_free(dest);
1835 }
1836 
1837 DISAS_INSN(addsub)
1838 {
1839     TCGv reg;
1840     TCGv dest;
1841     TCGv src;
1842     TCGv tmp;
1843     TCGv addr;
1844     int add;
1845     int opsize;
1846 
1847     add = (insn & 0x4000) != 0;
1848     opsize = insn_opsize(insn);
1849     reg = gen_extend(DREG(insn, 9), opsize, 1);
1850     dest = tcg_temp_new();
1851     if (insn & 0x100) {
1852         SRC_EA(env, tmp, opsize, 1, &addr);
1853         src = reg;
1854     } else {
1855         tmp = reg;
1856         SRC_EA(env, src, opsize, 1, NULL);
1857     }
1858     if (add) {
1859         tcg_gen_add_i32(dest, tmp, src);
1860         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, src);
1861         set_cc_op(s, CC_OP_ADDB + opsize);
1862     } else {
1863         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, tmp, src);
1864         tcg_gen_sub_i32(dest, tmp, src);
1865         set_cc_op(s, CC_OP_SUBB + opsize);
1866     }
1867     gen_update_cc_add(dest, src, opsize);
1868     if (insn & 0x100) {
1869         DEST_EA(env, insn, opsize, dest, &addr);
1870     } else {
1871         gen_partset_reg(opsize, DREG(insn, 9), dest);
1872     }
1873     tcg_temp_free(dest);
1874 }
1875 
1876 /* Reverse the order of the bits in REG.  */
1877 DISAS_INSN(bitrev)
1878 {
1879     TCGv reg;
1880     reg = DREG(insn, 0);
1881     gen_helper_bitrev(reg, reg);
1882 }
1883 
1884 DISAS_INSN(bitop_reg)
1885 {
1886     int opsize;
1887     int op;
1888     TCGv src1;
1889     TCGv src2;
1890     TCGv tmp;
1891     TCGv addr;
1892     TCGv dest;
1893 
1894     if ((insn & 0x38) != 0)
1895         opsize = OS_BYTE;
1896     else
1897         opsize = OS_LONG;
1898     op = (insn >> 6) & 3;
1899     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
1900 
1901     gen_flush_flags(s);
1902     src2 = tcg_temp_new();
1903     if (opsize == OS_BYTE)
1904         tcg_gen_andi_i32(src2, DREG(insn, 9), 7);
1905     else
1906         tcg_gen_andi_i32(src2, DREG(insn, 9), 31);
1907 
1908     tmp = tcg_const_i32(1);
1909     tcg_gen_shl_i32(tmp, tmp, src2);
1910     tcg_temp_free(src2);
1911 
1912     tcg_gen_and_i32(QREG_CC_Z, src1, tmp);
1913 
1914     dest = tcg_temp_new();
1915     switch (op) {
1916     case 1: /* bchg */
1917         tcg_gen_xor_i32(dest, src1, tmp);
1918         break;
1919     case 2: /* bclr */
1920         tcg_gen_andc_i32(dest, src1, tmp);
1921         break;
1922     case 3: /* bset */
1923         tcg_gen_or_i32(dest, src1, tmp);
1924         break;
1925     default: /* btst */
1926         break;
1927     }
1928     tcg_temp_free(tmp);
1929     if (op) {
1930         DEST_EA(env, insn, opsize, dest, &addr);
1931     }
1932     tcg_temp_free(dest);
1933 }
1934 
1935 DISAS_INSN(sats)
1936 {
1937     TCGv reg;
1938     reg = DREG(insn, 0);
1939     gen_flush_flags(s);
1940     gen_helper_sats(reg, reg, QREG_CC_V);
1941     gen_logic_cc(s, reg, OS_LONG);
1942 }
1943 
1944 static void gen_push(DisasContext *s, TCGv val)
1945 {
1946     TCGv tmp;
1947 
1948     tmp = tcg_temp_new();
1949     tcg_gen_subi_i32(tmp, QREG_SP, 4);
1950     gen_store(s, OS_LONG, tmp, val);
1951     tcg_gen_mov_i32(QREG_SP, tmp);
1952     tcg_temp_free(tmp);
1953 }
1954 
1955 static TCGv mreg(int reg)
1956 {
1957     if (reg < 8) {
1958         /* Dx */
1959         return cpu_dregs[reg];
1960     }
1961     /* Ax */
1962     return cpu_aregs[reg & 7];
1963 }
1964 
1965 DISAS_INSN(movem)
1966 {
1967     TCGv addr, incr, tmp, r[16];
1968     int is_load = (insn & 0x0400) != 0;
1969     int opsize = (insn & 0x40) != 0 ? OS_LONG : OS_WORD;
1970     uint16_t mask = read_im16(env, s);
1971     int mode = extract32(insn, 3, 3);
1972     int reg0 = REG(insn, 0);
1973     int i;
1974 
1975     tmp = cpu_aregs[reg0];
1976 
1977     switch (mode) {
1978     case 0: /* data register direct */
1979     case 1: /* addr register direct */
1980     do_addr_fault:
1981         gen_addr_fault(s);
1982         return;
1983 
1984     case 2: /* indirect */
1985         break;
1986 
1987     case 3: /* indirect post-increment */
1988         if (!is_load) {
1989             /* post-increment is not allowed */
1990             goto do_addr_fault;
1991         }
1992         break;
1993 
1994     case 4: /* indirect pre-decrement */
1995         if (is_load) {
1996             /* pre-decrement is not allowed */
1997             goto do_addr_fault;
1998         }
1999         /* We want a bare copy of the address reg, without any pre-decrement
2000            adjustment, as gen_lea would provide.  */
2001         break;
2002 
2003     default:
2004         tmp = gen_lea_mode(env, s, mode, reg0, opsize);
2005         if (IS_NULL_QREG(tmp)) {
2006             goto do_addr_fault;
2007         }
2008         break;
2009     }
2010 
2011     addr = tcg_temp_new();
2012     tcg_gen_mov_i32(addr, tmp);
2013     incr = tcg_const_i32(opsize_bytes(opsize));
2014 
2015     if (is_load) {
2016         /* memory to register */
2017         for (i = 0; i < 16; i++) {
2018             if (mask & (1 << i)) {
2019                 r[i] = gen_load(s, opsize, addr, 1);
2020                 tcg_gen_add_i32(addr, addr, incr);
2021             }
2022         }
2023         for (i = 0; i < 16; i++) {
2024             if (mask & (1 << i)) {
2025                 tcg_gen_mov_i32(mreg(i), r[i]);
2026                 tcg_temp_free(r[i]);
2027             }
2028         }
2029         if (mode == 3) {
2030             /* post-increment: movem (An)+,X */
2031             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2032         }
2033     } else {
2034         /* register to memory */
2035         if (mode == 4) {
2036             /* pre-decrement: movem X,-(An) */
2037             for (i = 15; i >= 0; i--) {
2038                 if ((mask << i) & 0x8000) {
2039                     tcg_gen_sub_i32(addr, addr, incr);
2040                     if (reg0 + 8 == i &&
2041                         m68k_feature(s->env, M68K_FEATURE_EXT_FULL)) {
2042                         /* M68020+: if the addressing register is the
2043                          * register moved to memory, the value written
2044                          * is the initial value decremented by the size of
2045                          * the operation, regardless of how many actual
2046                          * stores have been performed until this point.
2047                          * M68000/M68010: the value is the initial value.
2048                          */
2049                         tmp = tcg_temp_new();
2050                         tcg_gen_sub_i32(tmp, cpu_aregs[reg0], incr);
2051                         gen_store(s, opsize, addr, tmp);
2052                         tcg_temp_free(tmp);
2053                     } else {
2054                         gen_store(s, opsize, addr, mreg(i));
2055                     }
2056                 }
2057             }
2058             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2059         } else {
2060             for (i = 0; i < 16; i++) {
2061                 if (mask & (1 << i)) {
2062                     gen_store(s, opsize, addr, mreg(i));
2063                     tcg_gen_add_i32(addr, addr, incr);
2064                 }
2065             }
2066         }
2067     }
2068 
2069     tcg_temp_free(incr);
2070     tcg_temp_free(addr);
2071 }
2072 
2073 DISAS_INSN(bitop_im)
2074 {
2075     int opsize;
2076     int op;
2077     TCGv src1;
2078     uint32_t mask;
2079     int bitnum;
2080     TCGv tmp;
2081     TCGv addr;
2082 
2083     if ((insn & 0x38) != 0)
2084         opsize = OS_BYTE;
2085     else
2086         opsize = OS_LONG;
2087     op = (insn >> 6) & 3;
2088 
2089     bitnum = read_im16(env, s);
2090     if (m68k_feature(s->env, M68K_FEATURE_M68000)) {
2091         if (bitnum & 0xfe00) {
2092             disas_undef(env, s, insn);
2093             return;
2094         }
2095     } else {
2096         if (bitnum & 0xff00) {
2097             disas_undef(env, s, insn);
2098             return;
2099         }
2100     }
2101 
2102     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
2103 
2104     gen_flush_flags(s);
2105     if (opsize == OS_BYTE)
2106         bitnum &= 7;
2107     else
2108         bitnum &= 31;
2109     mask = 1 << bitnum;
2110 
2111    tcg_gen_andi_i32(QREG_CC_Z, src1, mask);
2112 
2113     if (op) {
2114         tmp = tcg_temp_new();
2115         switch (op) {
2116         case 1: /* bchg */
2117             tcg_gen_xori_i32(tmp, src1, mask);
2118             break;
2119         case 2: /* bclr */
2120             tcg_gen_andi_i32(tmp, src1, ~mask);
2121             break;
2122         case 3: /* bset */
2123             tcg_gen_ori_i32(tmp, src1, mask);
2124             break;
2125         default: /* btst */
2126             break;
2127         }
2128         DEST_EA(env, insn, opsize, tmp, &addr);
2129         tcg_temp_free(tmp);
2130     }
2131 }
2132 
2133 static TCGv gen_get_ccr(DisasContext *s)
2134 {
2135     TCGv dest;
2136 
2137     update_cc_op(s);
2138     dest = tcg_temp_new();
2139     gen_helper_get_ccr(dest, cpu_env);
2140     return dest;
2141 }
2142 
2143 static TCGv gen_get_sr(DisasContext *s)
2144 {
2145     TCGv ccr;
2146     TCGv sr;
2147 
2148     ccr = gen_get_ccr(s);
2149     sr = tcg_temp_new();
2150     tcg_gen_andi_i32(sr, QREG_SR, 0xffe0);
2151     tcg_gen_or_i32(sr, sr, ccr);
2152     return sr;
2153 }
2154 
2155 static void gen_set_sr_im(DisasContext *s, uint16_t val, int ccr_only)
2156 {
2157     if (ccr_only) {
2158         tcg_gen_movi_i32(QREG_CC_C, val & CCF_C ? 1 : 0);
2159         tcg_gen_movi_i32(QREG_CC_V, val & CCF_V ? -1 : 0);
2160         tcg_gen_movi_i32(QREG_CC_Z, val & CCF_Z ? 0 : 1);
2161         tcg_gen_movi_i32(QREG_CC_N, val & CCF_N ? -1 : 0);
2162         tcg_gen_movi_i32(QREG_CC_X, val & CCF_X ? 1 : 0);
2163     } else {
2164         TCGv sr = tcg_const_i32(val);
2165         gen_helper_set_sr(cpu_env, sr);
2166         tcg_temp_free(sr);
2167     }
2168     set_cc_op(s, CC_OP_FLAGS);
2169 }
2170 
2171 static void gen_set_sr(DisasContext *s, TCGv val, int ccr_only)
2172 {
2173     if (ccr_only) {
2174         gen_helper_set_ccr(cpu_env, val);
2175     } else {
2176         gen_helper_set_sr(cpu_env, val);
2177     }
2178     set_cc_op(s, CC_OP_FLAGS);
2179 }
2180 
2181 static void gen_move_to_sr(CPUM68KState *env, DisasContext *s, uint16_t insn,
2182                            bool ccr_only)
2183 {
2184     if ((insn & 0x3f) == 0x3c) {
2185         uint16_t val;
2186         val = read_im16(env, s);
2187         gen_set_sr_im(s, val, ccr_only);
2188     } else {
2189         TCGv src;
2190         SRC_EA(env, src, OS_WORD, 0, NULL);
2191         gen_set_sr(s, src, ccr_only);
2192     }
2193 }
2194 
2195 DISAS_INSN(arith_im)
2196 {
2197     int op;
2198     TCGv im;
2199     TCGv src1;
2200     TCGv dest;
2201     TCGv addr;
2202     int opsize;
2203     bool with_SR = ((insn & 0x3f) == 0x3c);
2204 
2205     op = (insn >> 9) & 7;
2206     opsize = insn_opsize(insn);
2207     switch (opsize) {
2208     case OS_BYTE:
2209         im = tcg_const_i32((int8_t)read_im8(env, s));
2210         break;
2211     case OS_WORD:
2212         im = tcg_const_i32((int16_t)read_im16(env, s));
2213         break;
2214     case OS_LONG:
2215         im = tcg_const_i32(read_im32(env, s));
2216         break;
2217     default:
2218        abort();
2219     }
2220 
2221     if (with_SR) {
2222         /* SR/CCR can only be used with andi/eori/ori */
2223         if (op == 2 || op == 3 || op == 6) {
2224             disas_undef(env, s, insn);
2225             return;
2226         }
2227         switch (opsize) {
2228         case OS_BYTE:
2229             src1 = gen_get_ccr(s);
2230             break;
2231         case OS_WORD:
2232             if (IS_USER(s)) {
2233                 gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
2234                 return;
2235             }
2236             src1 = gen_get_sr(s);
2237             break;
2238         case OS_LONG:
2239             disas_undef(env, s, insn);
2240             return;
2241         }
2242     } else {
2243         SRC_EA(env, src1, opsize, 1, (op == 6) ? NULL : &addr);
2244     }
2245     dest = tcg_temp_new();
2246     switch (op) {
2247     case 0: /* ori */
2248         tcg_gen_or_i32(dest, src1, im);
2249         if (with_SR) {
2250             gen_set_sr(s, dest, opsize == OS_BYTE);
2251         } else {
2252             DEST_EA(env, insn, opsize, dest, &addr);
2253             gen_logic_cc(s, dest, opsize);
2254         }
2255         break;
2256     case 1: /* andi */
2257         tcg_gen_and_i32(dest, src1, im);
2258         if (with_SR) {
2259             gen_set_sr(s, dest, opsize == OS_BYTE);
2260         } else {
2261             DEST_EA(env, insn, opsize, dest, &addr);
2262             gen_logic_cc(s, dest, opsize);
2263         }
2264         break;
2265     case 2: /* subi */
2266         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, src1, im);
2267         tcg_gen_sub_i32(dest, src1, im);
2268         gen_update_cc_add(dest, im, opsize);
2269         set_cc_op(s, CC_OP_SUBB + opsize);
2270         DEST_EA(env, insn, opsize, dest, &addr);
2271         break;
2272     case 3: /* addi */
2273         tcg_gen_add_i32(dest, src1, im);
2274         gen_update_cc_add(dest, im, opsize);
2275         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, im);
2276         set_cc_op(s, CC_OP_ADDB + opsize);
2277         DEST_EA(env, insn, opsize, dest, &addr);
2278         break;
2279     case 5: /* eori */
2280         tcg_gen_xor_i32(dest, src1, im);
2281         if (with_SR) {
2282             gen_set_sr(s, dest, opsize == OS_BYTE);
2283         } else {
2284             DEST_EA(env, insn, opsize, dest, &addr);
2285             gen_logic_cc(s, dest, opsize);
2286         }
2287         break;
2288     case 6: /* cmpi */
2289         gen_update_cc_cmp(s, src1, im, opsize);
2290         break;
2291     default:
2292         abort();
2293     }
2294     tcg_temp_free(im);
2295     tcg_temp_free(dest);
2296 }
2297 
2298 DISAS_INSN(cas)
2299 {
2300     int opsize;
2301     TCGv addr;
2302     uint16_t ext;
2303     TCGv load;
2304     TCGv cmp;
2305     TCGMemOp opc;
2306 
2307     switch ((insn >> 9) & 3) {
2308     case 1:
2309         opsize = OS_BYTE;
2310         opc = MO_SB;
2311         break;
2312     case 2:
2313         opsize = OS_WORD;
2314         opc = MO_TESW;
2315         break;
2316     case 3:
2317         opsize = OS_LONG;
2318         opc = MO_TESL;
2319         break;
2320     default:
2321         g_assert_not_reached();
2322     }
2323 
2324     ext = read_im16(env, s);
2325 
2326     /* cas Dc,Du,<EA> */
2327 
2328     addr = gen_lea(env, s, insn, opsize);
2329     if (IS_NULL_QREG(addr)) {
2330         gen_addr_fault(s);
2331         return;
2332     }
2333 
2334     cmp = gen_extend(DREG(ext, 0), opsize, 1);
2335 
2336     /* if  <EA> == Dc then
2337      *     <EA> = Du
2338      *     Dc = <EA> (because <EA> == Dc)
2339      * else
2340      *     Dc = <EA>
2341      */
2342 
2343     load = tcg_temp_new();
2344     tcg_gen_atomic_cmpxchg_i32(load, addr, cmp, DREG(ext, 6),
2345                                IS_USER(s), opc);
2346     /* update flags before setting cmp to load */
2347     gen_update_cc_cmp(s, load, cmp, opsize);
2348     gen_partset_reg(opsize, DREG(ext, 0), load);
2349 
2350     tcg_temp_free(load);
2351 
2352     switch (extract32(insn, 3, 3)) {
2353     case 3: /* Indirect postincrement.  */
2354         tcg_gen_addi_i32(AREG(insn, 0), addr, opsize_bytes(opsize));
2355         break;
2356     case 4: /* Indirect predecrememnt.  */
2357         tcg_gen_mov_i32(AREG(insn, 0), addr);
2358         break;
2359     }
2360 }
2361 
2362 DISAS_INSN(cas2w)
2363 {
2364     uint16_t ext1, ext2;
2365     TCGv addr1, addr2;
2366     TCGv regs;
2367 
2368     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2369 
2370     ext1 = read_im16(env, s);
2371 
2372     if (ext1 & 0x8000) {
2373         /* Address Register */
2374         addr1 = AREG(ext1, 12);
2375     } else {
2376         /* Data Register */
2377         addr1 = DREG(ext1, 12);
2378     }
2379 
2380     ext2 = read_im16(env, s);
2381     if (ext2 & 0x8000) {
2382         /* Address Register */
2383         addr2 = AREG(ext2, 12);
2384     } else {
2385         /* Data Register */
2386         addr2 = DREG(ext2, 12);
2387     }
2388 
2389     /* if (R1) == Dc1 && (R2) == Dc2 then
2390      *     (R1) = Du1
2391      *     (R2) = Du2
2392      * else
2393      *     Dc1 = (R1)
2394      *     Dc2 = (R2)
2395      */
2396 
2397     regs = tcg_const_i32(REG(ext2, 6) |
2398                          (REG(ext1, 6) << 3) |
2399                          (REG(ext2, 0) << 6) |
2400                          (REG(ext1, 0) << 9));
2401     if (tb_cflags(s->tb) & CF_PARALLEL) {
2402         gen_helper_exit_atomic(cpu_env);
2403     } else {
2404         gen_helper_cas2w(cpu_env, regs, addr1, addr2);
2405     }
2406     tcg_temp_free(regs);
2407 
2408     /* Note that cas2w also assigned to env->cc_op.  */
2409     s->cc_op = CC_OP_CMPW;
2410     s->cc_op_synced = 1;
2411 }
2412 
2413 DISAS_INSN(cas2l)
2414 {
2415     uint16_t ext1, ext2;
2416     TCGv addr1, addr2, regs;
2417 
2418     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2419 
2420     ext1 = read_im16(env, s);
2421 
2422     if (ext1 & 0x8000) {
2423         /* Address Register */
2424         addr1 = AREG(ext1, 12);
2425     } else {
2426         /* Data Register */
2427         addr1 = DREG(ext1, 12);
2428     }
2429 
2430     ext2 = read_im16(env, s);
2431     if (ext2 & 0x8000) {
2432         /* Address Register */
2433         addr2 = AREG(ext2, 12);
2434     } else {
2435         /* Data Register */
2436         addr2 = DREG(ext2, 12);
2437     }
2438 
2439     /* if (R1) == Dc1 && (R2) == Dc2 then
2440      *     (R1) = Du1
2441      *     (R2) = Du2
2442      * else
2443      *     Dc1 = (R1)
2444      *     Dc2 = (R2)
2445      */
2446 
2447     regs = tcg_const_i32(REG(ext2, 6) |
2448                          (REG(ext1, 6) << 3) |
2449                          (REG(ext2, 0) << 6) |
2450                          (REG(ext1, 0) << 9));
2451     if (tb_cflags(s->tb) & CF_PARALLEL) {
2452         gen_helper_cas2l_parallel(cpu_env, regs, addr1, addr2);
2453     } else {
2454         gen_helper_cas2l(cpu_env, regs, addr1, addr2);
2455     }
2456     tcg_temp_free(regs);
2457 
2458     /* Note that cas2l also assigned to env->cc_op.  */
2459     s->cc_op = CC_OP_CMPL;
2460     s->cc_op_synced = 1;
2461 }
2462 
2463 DISAS_INSN(byterev)
2464 {
2465     TCGv reg;
2466 
2467     reg = DREG(insn, 0);
2468     tcg_gen_bswap32_i32(reg, reg);
2469 }
2470 
2471 DISAS_INSN(move)
2472 {
2473     TCGv src;
2474     TCGv dest;
2475     int op;
2476     int opsize;
2477 
2478     switch (insn >> 12) {
2479     case 1: /* move.b */
2480         opsize = OS_BYTE;
2481         break;
2482     case 2: /* move.l */
2483         opsize = OS_LONG;
2484         break;
2485     case 3: /* move.w */
2486         opsize = OS_WORD;
2487         break;
2488     default:
2489         abort();
2490     }
2491     SRC_EA(env, src, opsize, 1, NULL);
2492     op = (insn >> 6) & 7;
2493     if (op == 1) {
2494         /* movea */
2495         /* The value will already have been sign extended.  */
2496         dest = AREG(insn, 9);
2497         tcg_gen_mov_i32(dest, src);
2498     } else {
2499         /* normal move */
2500         uint16_t dest_ea;
2501         dest_ea = ((insn >> 9) & 7) | (op << 3);
2502         DEST_EA(env, dest_ea, opsize, src, NULL);
2503         /* This will be correct because loads sign extend.  */
2504         gen_logic_cc(s, src, opsize);
2505     }
2506 }
2507 
2508 DISAS_INSN(negx)
2509 {
2510     TCGv z;
2511     TCGv src;
2512     TCGv addr;
2513     int opsize;
2514 
2515     opsize = insn_opsize(insn);
2516     SRC_EA(env, src, opsize, 1, &addr);
2517 
2518     gen_flush_flags(s); /* compute old Z */
2519 
2520     /* Perform substract with borrow.
2521      * (X, N) =  -(src + X);
2522      */
2523 
2524     z = tcg_const_i32(0);
2525     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, z, QREG_CC_X, z);
2526     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, z, z, QREG_CC_N, QREG_CC_X);
2527     tcg_temp_free(z);
2528     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
2529 
2530     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
2531 
2532     /* Compute signed-overflow for negation.  The normal formula for
2533      * subtraction is (res ^ src) & (src ^ dest), but with dest==0
2534      * this simplies to res & src.
2535      */
2536 
2537     tcg_gen_and_i32(QREG_CC_V, QREG_CC_N, src);
2538 
2539     /* Copy the rest of the results into place.  */
2540     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
2541     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
2542 
2543     set_cc_op(s, CC_OP_FLAGS);
2544 
2545     /* result is in QREG_CC_N */
2546 
2547     DEST_EA(env, insn, opsize, QREG_CC_N, &addr);
2548 }
2549 
2550 DISAS_INSN(lea)
2551 {
2552     TCGv reg;
2553     TCGv tmp;
2554 
2555     reg = AREG(insn, 9);
2556     tmp = gen_lea(env, s, insn, OS_LONG);
2557     if (IS_NULL_QREG(tmp)) {
2558         gen_addr_fault(s);
2559         return;
2560     }
2561     tcg_gen_mov_i32(reg, tmp);
2562 }
2563 
2564 DISAS_INSN(clr)
2565 {
2566     int opsize;
2567     TCGv zero;
2568 
2569     zero = tcg_const_i32(0);
2570 
2571     opsize = insn_opsize(insn);
2572     DEST_EA(env, insn, opsize, zero, NULL);
2573     gen_logic_cc(s, zero, opsize);
2574     tcg_temp_free(zero);
2575 }
2576 
2577 DISAS_INSN(move_from_ccr)
2578 {
2579     TCGv ccr;
2580 
2581     ccr = gen_get_ccr(s);
2582     DEST_EA(env, insn, OS_WORD, ccr, NULL);
2583 }
2584 
2585 DISAS_INSN(neg)
2586 {
2587     TCGv src1;
2588     TCGv dest;
2589     TCGv addr;
2590     int opsize;
2591 
2592     opsize = insn_opsize(insn);
2593     SRC_EA(env, src1, opsize, 1, &addr);
2594     dest = tcg_temp_new();
2595     tcg_gen_neg_i32(dest, src1);
2596     set_cc_op(s, CC_OP_SUBB + opsize);
2597     gen_update_cc_add(dest, src1, opsize);
2598     tcg_gen_setcondi_i32(TCG_COND_NE, QREG_CC_X, dest, 0);
2599     DEST_EA(env, insn, opsize, dest, &addr);
2600     tcg_temp_free(dest);
2601 }
2602 
2603 DISAS_INSN(move_to_ccr)
2604 {
2605     gen_move_to_sr(env, s, insn, true);
2606 }
2607 
2608 DISAS_INSN(not)
2609 {
2610     TCGv src1;
2611     TCGv dest;
2612     TCGv addr;
2613     int opsize;
2614 
2615     opsize = insn_opsize(insn);
2616     SRC_EA(env, src1, opsize, 1, &addr);
2617     dest = tcg_temp_new();
2618     tcg_gen_not_i32(dest, src1);
2619     DEST_EA(env, insn, opsize, dest, &addr);
2620     gen_logic_cc(s, dest, opsize);
2621 }
2622 
2623 DISAS_INSN(swap)
2624 {
2625     TCGv src1;
2626     TCGv src2;
2627     TCGv reg;
2628 
2629     src1 = tcg_temp_new();
2630     src2 = tcg_temp_new();
2631     reg = DREG(insn, 0);
2632     tcg_gen_shli_i32(src1, reg, 16);
2633     tcg_gen_shri_i32(src2, reg, 16);
2634     tcg_gen_or_i32(reg, src1, src2);
2635     tcg_temp_free(src2);
2636     tcg_temp_free(src1);
2637     gen_logic_cc(s, reg, OS_LONG);
2638 }
2639 
2640 DISAS_INSN(bkpt)
2641 {
2642     gen_exception(s, s->insn_pc, EXCP_DEBUG);
2643 }
2644 
2645 DISAS_INSN(pea)
2646 {
2647     TCGv tmp;
2648 
2649     tmp = gen_lea(env, s, insn, OS_LONG);
2650     if (IS_NULL_QREG(tmp)) {
2651         gen_addr_fault(s);
2652         return;
2653     }
2654     gen_push(s, tmp);
2655 }
2656 
2657 DISAS_INSN(ext)
2658 {
2659     int op;
2660     TCGv reg;
2661     TCGv tmp;
2662 
2663     reg = DREG(insn, 0);
2664     op = (insn >> 6) & 7;
2665     tmp = tcg_temp_new();
2666     if (op == 3)
2667         tcg_gen_ext16s_i32(tmp, reg);
2668     else
2669         tcg_gen_ext8s_i32(tmp, reg);
2670     if (op == 2)
2671         gen_partset_reg(OS_WORD, reg, tmp);
2672     else
2673         tcg_gen_mov_i32(reg, tmp);
2674     gen_logic_cc(s, tmp, OS_LONG);
2675     tcg_temp_free(tmp);
2676 }
2677 
2678 DISAS_INSN(tst)
2679 {
2680     int opsize;
2681     TCGv tmp;
2682 
2683     opsize = insn_opsize(insn);
2684     SRC_EA(env, tmp, opsize, 1, NULL);
2685     gen_logic_cc(s, tmp, opsize);
2686 }
2687 
2688 DISAS_INSN(pulse)
2689 {
2690   /* Implemented as a NOP.  */
2691 }
2692 
2693 DISAS_INSN(illegal)
2694 {
2695     gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
2696 }
2697 
2698 /* ??? This should be atomic.  */
2699 DISAS_INSN(tas)
2700 {
2701     TCGv dest;
2702     TCGv src1;
2703     TCGv addr;
2704 
2705     dest = tcg_temp_new();
2706     SRC_EA(env, src1, OS_BYTE, 1, &addr);
2707     gen_logic_cc(s, src1, OS_BYTE);
2708     tcg_gen_ori_i32(dest, src1, 0x80);
2709     DEST_EA(env, insn, OS_BYTE, dest, &addr);
2710     tcg_temp_free(dest);
2711 }
2712 
2713 DISAS_INSN(mull)
2714 {
2715     uint16_t ext;
2716     TCGv src1;
2717     int sign;
2718 
2719     ext = read_im16(env, s);
2720 
2721     sign = ext & 0x800;
2722 
2723     if (ext & 0x400) {
2724         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
2725             gen_exception(s, s->insn_pc, EXCP_UNSUPPORTED);
2726             return;
2727         }
2728 
2729         SRC_EA(env, src1, OS_LONG, 0, NULL);
2730 
2731         if (sign) {
2732             tcg_gen_muls2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2733         } else {
2734             tcg_gen_mulu2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2735         }
2736         /* if Dl == Dh, 68040 returns low word */
2737         tcg_gen_mov_i32(DREG(ext, 0), QREG_CC_N);
2738         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_Z);
2739         tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N);
2740 
2741         tcg_gen_movi_i32(QREG_CC_V, 0);
2742         tcg_gen_movi_i32(QREG_CC_C, 0);
2743 
2744         set_cc_op(s, CC_OP_FLAGS);
2745         return;
2746     }
2747     SRC_EA(env, src1, OS_LONG, 0, NULL);
2748     if (m68k_feature(s->env, M68K_FEATURE_M68000)) {
2749         tcg_gen_movi_i32(QREG_CC_C, 0);
2750         if (sign) {
2751             tcg_gen_muls2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2752             /* QREG_CC_V is -(QREG_CC_V != (QREG_CC_N >> 31)) */
2753             tcg_gen_sari_i32(QREG_CC_Z, QREG_CC_N, 31);
2754             tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_Z);
2755         } else {
2756             tcg_gen_mulu2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2757             /* QREG_CC_V is -(QREG_CC_V != 0), use QREG_CC_C as 0 */
2758             tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_C);
2759         }
2760         tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
2761         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_N);
2762 
2763         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
2764 
2765         set_cc_op(s, CC_OP_FLAGS);
2766     } else {
2767         /* The upper 32 bits of the product are discarded, so
2768            muls.l and mulu.l are functionally equivalent.  */
2769         tcg_gen_mul_i32(DREG(ext, 12), src1, DREG(ext, 12));
2770         gen_logic_cc(s, DREG(ext, 12), OS_LONG);
2771     }
2772 }
2773 
2774 static void gen_link(DisasContext *s, uint16_t insn, int32_t offset)
2775 {
2776     TCGv reg;
2777     TCGv tmp;
2778 
2779     reg = AREG(insn, 0);
2780     tmp = tcg_temp_new();
2781     tcg_gen_subi_i32(tmp, QREG_SP, 4);
2782     gen_store(s, OS_LONG, tmp, reg);
2783     if ((insn & 7) != 7) {
2784         tcg_gen_mov_i32(reg, tmp);
2785     }
2786     tcg_gen_addi_i32(QREG_SP, tmp, offset);
2787     tcg_temp_free(tmp);
2788 }
2789 
2790 DISAS_INSN(link)
2791 {
2792     int16_t offset;
2793 
2794     offset = read_im16(env, s);
2795     gen_link(s, insn, offset);
2796 }
2797 
2798 DISAS_INSN(linkl)
2799 {
2800     int32_t offset;
2801 
2802     offset = read_im32(env, s);
2803     gen_link(s, insn, offset);
2804 }
2805 
2806 DISAS_INSN(unlk)
2807 {
2808     TCGv src;
2809     TCGv reg;
2810     TCGv tmp;
2811 
2812     src = tcg_temp_new();
2813     reg = AREG(insn, 0);
2814     tcg_gen_mov_i32(src, reg);
2815     tmp = gen_load(s, OS_LONG, src, 0);
2816     tcg_gen_mov_i32(reg, tmp);
2817     tcg_gen_addi_i32(QREG_SP, src, 4);
2818     tcg_temp_free(src);
2819 }
2820 
2821 #if defined(CONFIG_SOFTMMU)
2822 DISAS_INSN(reset)
2823 {
2824     if (IS_USER(s)) {
2825         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
2826         return;
2827     }
2828 
2829     gen_helper_reset(cpu_env);
2830 }
2831 #endif
2832 
2833 DISAS_INSN(nop)
2834 {
2835 }
2836 
2837 DISAS_INSN(rtd)
2838 {
2839     TCGv tmp;
2840     int16_t offset = read_im16(env, s);
2841 
2842     tmp = gen_load(s, OS_LONG, QREG_SP, 0);
2843     tcg_gen_addi_i32(QREG_SP, QREG_SP, offset + 4);
2844     gen_jmp(s, tmp);
2845 }
2846 
2847 DISAS_INSN(rts)
2848 {
2849     TCGv tmp;
2850 
2851     tmp = gen_load(s, OS_LONG, QREG_SP, 0);
2852     tcg_gen_addi_i32(QREG_SP, QREG_SP, 4);
2853     gen_jmp(s, tmp);
2854 }
2855 
2856 DISAS_INSN(jump)
2857 {
2858     TCGv tmp;
2859 
2860     /* Load the target address first to ensure correct exception
2861        behavior.  */
2862     tmp = gen_lea(env, s, insn, OS_LONG);
2863     if (IS_NULL_QREG(tmp)) {
2864         gen_addr_fault(s);
2865         return;
2866     }
2867     if ((insn & 0x40) == 0) {
2868         /* jsr */
2869         gen_push(s, tcg_const_i32(s->pc));
2870     }
2871     gen_jmp(s, tmp);
2872 }
2873 
2874 DISAS_INSN(addsubq)
2875 {
2876     TCGv src;
2877     TCGv dest;
2878     TCGv val;
2879     int imm;
2880     TCGv addr;
2881     int opsize;
2882 
2883     if ((insn & 070) == 010) {
2884         /* Operation on address register is always long.  */
2885         opsize = OS_LONG;
2886     } else {
2887         opsize = insn_opsize(insn);
2888     }
2889     SRC_EA(env, src, opsize, 1, &addr);
2890     imm = (insn >> 9) & 7;
2891     if (imm == 0) {
2892         imm = 8;
2893     }
2894     val = tcg_const_i32(imm);
2895     dest = tcg_temp_new();
2896     tcg_gen_mov_i32(dest, src);
2897     if ((insn & 0x38) == 0x08) {
2898         /* Don't update condition codes if the destination is an
2899            address register.  */
2900         if (insn & 0x0100) {
2901             tcg_gen_sub_i32(dest, dest, val);
2902         } else {
2903             tcg_gen_add_i32(dest, dest, val);
2904         }
2905     } else {
2906         if (insn & 0x0100) {
2907             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
2908             tcg_gen_sub_i32(dest, dest, val);
2909             set_cc_op(s, CC_OP_SUBB + opsize);
2910         } else {
2911             tcg_gen_add_i32(dest, dest, val);
2912             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
2913             set_cc_op(s, CC_OP_ADDB + opsize);
2914         }
2915         gen_update_cc_add(dest, val, opsize);
2916     }
2917     tcg_temp_free(val);
2918     DEST_EA(env, insn, opsize, dest, &addr);
2919     tcg_temp_free(dest);
2920 }
2921 
2922 DISAS_INSN(tpf)
2923 {
2924     switch (insn & 7) {
2925     case 2: /* One extension word.  */
2926         s->pc += 2;
2927         break;
2928     case 3: /* Two extension words.  */
2929         s->pc += 4;
2930         break;
2931     case 4: /* No extension words.  */
2932         break;
2933     default:
2934         disas_undef(env, s, insn);
2935     }
2936 }
2937 
2938 DISAS_INSN(branch)
2939 {
2940     int32_t offset;
2941     uint32_t base;
2942     int op;
2943     TCGLabel *l1;
2944 
2945     base = s->pc;
2946     op = (insn >> 8) & 0xf;
2947     offset = (int8_t)insn;
2948     if (offset == 0) {
2949         offset = (int16_t)read_im16(env, s);
2950     } else if (offset == -1) {
2951         offset = read_im32(env, s);
2952     }
2953     if (op == 1) {
2954         /* bsr */
2955         gen_push(s, tcg_const_i32(s->pc));
2956     }
2957     if (op > 1) {
2958         /* Bcc */
2959         l1 = gen_new_label();
2960         gen_jmpcc(s, ((insn >> 8) & 0xf) ^ 1, l1);
2961         gen_jmp_tb(s, 1, base + offset);
2962         gen_set_label(l1);
2963         gen_jmp_tb(s, 0, s->pc);
2964     } else {
2965         /* Unconditional branch.  */
2966         update_cc_op(s);
2967         gen_jmp_tb(s, 0, base + offset);
2968     }
2969 }
2970 
2971 DISAS_INSN(moveq)
2972 {
2973     tcg_gen_movi_i32(DREG(insn, 9), (int8_t)insn);
2974     gen_logic_cc(s, DREG(insn, 9), OS_LONG);
2975 }
2976 
2977 DISAS_INSN(mvzs)
2978 {
2979     int opsize;
2980     TCGv src;
2981     TCGv reg;
2982 
2983     if (insn & 0x40)
2984         opsize = OS_WORD;
2985     else
2986         opsize = OS_BYTE;
2987     SRC_EA(env, src, opsize, (insn & 0x80) == 0, NULL);
2988     reg = DREG(insn, 9);
2989     tcg_gen_mov_i32(reg, src);
2990     gen_logic_cc(s, src, opsize);
2991 }
2992 
2993 DISAS_INSN(or)
2994 {
2995     TCGv reg;
2996     TCGv dest;
2997     TCGv src;
2998     TCGv addr;
2999     int opsize;
3000 
3001     opsize = insn_opsize(insn);
3002     reg = gen_extend(DREG(insn, 9), opsize, 0);
3003     dest = tcg_temp_new();
3004     if (insn & 0x100) {
3005         SRC_EA(env, src, opsize, 0, &addr);
3006         tcg_gen_or_i32(dest, src, reg);
3007         DEST_EA(env, insn, opsize, dest, &addr);
3008     } else {
3009         SRC_EA(env, src, opsize, 0, NULL);
3010         tcg_gen_or_i32(dest, src, reg);
3011         gen_partset_reg(opsize, DREG(insn, 9), dest);
3012     }
3013     gen_logic_cc(s, dest, opsize);
3014     tcg_temp_free(dest);
3015 }
3016 
3017 DISAS_INSN(suba)
3018 {
3019     TCGv src;
3020     TCGv reg;
3021 
3022     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3023     reg = AREG(insn, 9);
3024     tcg_gen_sub_i32(reg, reg, src);
3025 }
3026 
3027 static inline void gen_subx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3028 {
3029     TCGv tmp;
3030 
3031     gen_flush_flags(s); /* compute old Z */
3032 
3033     /* Perform substract with borrow.
3034      * (X, N) = dest - (src + X);
3035      */
3036 
3037     tmp = tcg_const_i32(0);
3038     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, tmp, QREG_CC_X, tmp);
3039     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, dest, tmp, QREG_CC_N, QREG_CC_X);
3040     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3041     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
3042 
3043     /* Compute signed-overflow for substract.  */
3044 
3045     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, dest);
3046     tcg_gen_xor_i32(tmp, dest, src);
3047     tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, tmp);
3048     tcg_temp_free(tmp);
3049 
3050     /* Copy the rest of the results into place.  */
3051     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3052     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3053 
3054     set_cc_op(s, CC_OP_FLAGS);
3055 
3056     /* result is in QREG_CC_N */
3057 }
3058 
3059 DISAS_INSN(subx_reg)
3060 {
3061     TCGv dest;
3062     TCGv src;
3063     int opsize;
3064 
3065     opsize = insn_opsize(insn);
3066 
3067     src = gen_extend(DREG(insn, 0), opsize, 1);
3068     dest = gen_extend(DREG(insn, 9), opsize, 1);
3069 
3070     gen_subx(s, src, dest, opsize);
3071 
3072     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3073 }
3074 
3075 DISAS_INSN(subx_mem)
3076 {
3077     TCGv src;
3078     TCGv addr_src;
3079     TCGv dest;
3080     TCGv addr_dest;
3081     int opsize;
3082 
3083     opsize = insn_opsize(insn);
3084 
3085     addr_src = AREG(insn, 0);
3086     tcg_gen_subi_i32(addr_src, addr_src, opsize);
3087     src = gen_load(s, opsize, addr_src, 1);
3088 
3089     addr_dest = AREG(insn, 9);
3090     tcg_gen_subi_i32(addr_dest, addr_dest, opsize);
3091     dest = gen_load(s, opsize, addr_dest, 1);
3092 
3093     gen_subx(s, src, dest, opsize);
3094 
3095     gen_store(s, opsize, addr_dest, QREG_CC_N);
3096 }
3097 
3098 DISAS_INSN(mov3q)
3099 {
3100     TCGv src;
3101     int val;
3102 
3103     val = (insn >> 9) & 7;
3104     if (val == 0)
3105         val = -1;
3106     src = tcg_const_i32(val);
3107     gen_logic_cc(s, src, OS_LONG);
3108     DEST_EA(env, insn, OS_LONG, src, NULL);
3109     tcg_temp_free(src);
3110 }
3111 
3112 DISAS_INSN(cmp)
3113 {
3114     TCGv src;
3115     TCGv reg;
3116     int opsize;
3117 
3118     opsize = insn_opsize(insn);
3119     SRC_EA(env, src, opsize, 1, NULL);
3120     reg = gen_extend(DREG(insn, 9), opsize, 1);
3121     gen_update_cc_cmp(s, reg, src, opsize);
3122 }
3123 
3124 DISAS_INSN(cmpa)
3125 {
3126     int opsize;
3127     TCGv src;
3128     TCGv reg;
3129 
3130     if (insn & 0x100) {
3131         opsize = OS_LONG;
3132     } else {
3133         opsize = OS_WORD;
3134     }
3135     SRC_EA(env, src, opsize, 1, NULL);
3136     reg = AREG(insn, 9);
3137     gen_update_cc_cmp(s, reg, src, OS_LONG);
3138 }
3139 
3140 DISAS_INSN(cmpm)
3141 {
3142     int opsize = insn_opsize(insn);
3143     TCGv src, dst;
3144 
3145     /* Post-increment load (mode 3) from Ay.  */
3146     src = gen_ea_mode(env, s, 3, REG(insn, 0), opsize,
3147                       NULL_QREG, NULL, EA_LOADS);
3148     /* Post-increment load (mode 3) from Ax.  */
3149     dst = gen_ea_mode(env, s, 3, REG(insn, 9), opsize,
3150                       NULL_QREG, NULL, EA_LOADS);
3151 
3152     gen_update_cc_cmp(s, dst, src, opsize);
3153 }
3154 
3155 DISAS_INSN(eor)
3156 {
3157     TCGv src;
3158     TCGv dest;
3159     TCGv addr;
3160     int opsize;
3161 
3162     opsize = insn_opsize(insn);
3163 
3164     SRC_EA(env, src, opsize, 0, &addr);
3165     dest = tcg_temp_new();
3166     tcg_gen_xor_i32(dest, src, DREG(insn, 9));
3167     gen_logic_cc(s, dest, opsize);
3168     DEST_EA(env, insn, opsize, dest, &addr);
3169     tcg_temp_free(dest);
3170 }
3171 
3172 static void do_exg(TCGv reg1, TCGv reg2)
3173 {
3174     TCGv temp = tcg_temp_new();
3175     tcg_gen_mov_i32(temp, reg1);
3176     tcg_gen_mov_i32(reg1, reg2);
3177     tcg_gen_mov_i32(reg2, temp);
3178     tcg_temp_free(temp);
3179 }
3180 
3181 DISAS_INSN(exg_dd)
3182 {
3183     /* exchange Dx and Dy */
3184     do_exg(DREG(insn, 9), DREG(insn, 0));
3185 }
3186 
3187 DISAS_INSN(exg_aa)
3188 {
3189     /* exchange Ax and Ay */
3190     do_exg(AREG(insn, 9), AREG(insn, 0));
3191 }
3192 
3193 DISAS_INSN(exg_da)
3194 {
3195     /* exchange Dx and Ay */
3196     do_exg(DREG(insn, 9), AREG(insn, 0));
3197 }
3198 
3199 DISAS_INSN(and)
3200 {
3201     TCGv src;
3202     TCGv reg;
3203     TCGv dest;
3204     TCGv addr;
3205     int opsize;
3206 
3207     dest = tcg_temp_new();
3208 
3209     opsize = insn_opsize(insn);
3210     reg = DREG(insn, 9);
3211     if (insn & 0x100) {
3212         SRC_EA(env, src, opsize, 0, &addr);
3213         tcg_gen_and_i32(dest, src, reg);
3214         DEST_EA(env, insn, opsize, dest, &addr);
3215     } else {
3216         SRC_EA(env, src, opsize, 0, NULL);
3217         tcg_gen_and_i32(dest, src, reg);
3218         gen_partset_reg(opsize, reg, dest);
3219     }
3220     gen_logic_cc(s, dest, opsize);
3221     tcg_temp_free(dest);
3222 }
3223 
3224 DISAS_INSN(adda)
3225 {
3226     TCGv src;
3227     TCGv reg;
3228 
3229     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3230     reg = AREG(insn, 9);
3231     tcg_gen_add_i32(reg, reg, src);
3232 }
3233 
3234 static inline void gen_addx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3235 {
3236     TCGv tmp;
3237 
3238     gen_flush_flags(s); /* compute old Z */
3239 
3240     /* Perform addition with carry.
3241      * (X, N) = src + dest + X;
3242      */
3243 
3244     tmp = tcg_const_i32(0);
3245     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_X, tmp, dest, tmp);
3246     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_N, QREG_CC_X, src, tmp);
3247     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3248 
3249     /* Compute signed-overflow for addition.  */
3250 
3251     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3252     tcg_gen_xor_i32(tmp, dest, src);
3253     tcg_gen_andc_i32(QREG_CC_V, QREG_CC_V, tmp);
3254     tcg_temp_free(tmp);
3255 
3256     /* Copy the rest of the results into place.  */
3257     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3258     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3259 
3260     set_cc_op(s, CC_OP_FLAGS);
3261 
3262     /* result is in QREG_CC_N */
3263 }
3264 
3265 DISAS_INSN(addx_reg)
3266 {
3267     TCGv dest;
3268     TCGv src;
3269     int opsize;
3270 
3271     opsize = insn_opsize(insn);
3272 
3273     dest = gen_extend(DREG(insn, 9), opsize, 1);
3274     src = gen_extend(DREG(insn, 0), opsize, 1);
3275 
3276     gen_addx(s, src, dest, opsize);
3277 
3278     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3279 }
3280 
3281 DISAS_INSN(addx_mem)
3282 {
3283     TCGv src;
3284     TCGv addr_src;
3285     TCGv dest;
3286     TCGv addr_dest;
3287     int opsize;
3288 
3289     opsize = insn_opsize(insn);
3290 
3291     addr_src = AREG(insn, 0);
3292     tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3293     src = gen_load(s, opsize, addr_src, 1);
3294 
3295     addr_dest = AREG(insn, 9);
3296     tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3297     dest = gen_load(s, opsize, addr_dest, 1);
3298 
3299     gen_addx(s, src, dest, opsize);
3300 
3301     gen_store(s, opsize, addr_dest, QREG_CC_N);
3302 }
3303 
3304 static inline void shift_im(DisasContext *s, uint16_t insn, int opsize)
3305 {
3306     int count = (insn >> 9) & 7;
3307     int logical = insn & 8;
3308     int left = insn & 0x100;
3309     int bits = opsize_bytes(opsize) * 8;
3310     TCGv reg = gen_extend(DREG(insn, 0), opsize, !logical);
3311 
3312     if (count == 0) {
3313         count = 8;
3314     }
3315 
3316     tcg_gen_movi_i32(QREG_CC_V, 0);
3317     if (left) {
3318         tcg_gen_shri_i32(QREG_CC_C, reg, bits - count);
3319         tcg_gen_shli_i32(QREG_CC_N, reg, count);
3320 
3321         /* Note that ColdFire always clears V (done above),
3322            while M68000 sets if the most significant bit is changed at
3323            any time during the shift operation */
3324         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3325             /* if shift count >= bits, V is (reg != 0) */
3326             if (count >= bits) {
3327                 tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, reg, QREG_CC_V);
3328             } else {
3329                 TCGv t0 = tcg_temp_new();
3330                 tcg_gen_sari_i32(QREG_CC_V, reg, bits - 1);
3331                 tcg_gen_sari_i32(t0, reg, bits - count - 1);
3332                 tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, t0);
3333                 tcg_temp_free(t0);
3334             }
3335             tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3336         }
3337     } else {
3338         tcg_gen_shri_i32(QREG_CC_C, reg, count - 1);
3339         if (logical) {
3340             tcg_gen_shri_i32(QREG_CC_N, reg, count);
3341         } else {
3342             tcg_gen_sari_i32(QREG_CC_N, reg, count);
3343         }
3344     }
3345 
3346     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3347     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3348     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3349     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3350 
3351     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3352     set_cc_op(s, CC_OP_FLAGS);
3353 }
3354 
3355 static inline void shift_reg(DisasContext *s, uint16_t insn, int opsize)
3356 {
3357     int logical = insn & 8;
3358     int left = insn & 0x100;
3359     int bits = opsize_bytes(opsize) * 8;
3360     TCGv reg = gen_extend(DREG(insn, 0), opsize, !logical);
3361     TCGv s32;
3362     TCGv_i64 t64, s64;
3363 
3364     t64 = tcg_temp_new_i64();
3365     s64 = tcg_temp_new_i64();
3366     s32 = tcg_temp_new();
3367 
3368     /* Note that m68k truncates the shift count modulo 64, not 32.
3369        In addition, a 64-bit shift makes it easy to find "the last
3370        bit shifted out", for the carry flag.  */
3371     tcg_gen_andi_i32(s32, DREG(insn, 9), 63);
3372     tcg_gen_extu_i32_i64(s64, s32);
3373     tcg_gen_extu_i32_i64(t64, reg);
3374 
3375     /* Optimistically set V=0.  Also used as a zero source below.  */
3376     tcg_gen_movi_i32(QREG_CC_V, 0);
3377     if (left) {
3378         tcg_gen_shl_i64(t64, t64, s64);
3379 
3380         if (opsize == OS_LONG) {
3381             tcg_gen_extr_i64_i32(QREG_CC_N, QREG_CC_C, t64);
3382             /* Note that C=0 if shift count is 0, and we get that for free.  */
3383         } else {
3384             TCGv zero = tcg_const_i32(0);
3385             tcg_gen_extrl_i64_i32(QREG_CC_N, t64);
3386             tcg_gen_shri_i32(QREG_CC_C, QREG_CC_N, bits);
3387             tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3388                                 s32, zero, zero, QREG_CC_C);
3389             tcg_temp_free(zero);
3390         }
3391         tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3392 
3393         /* X = C, but only if the shift count was non-zero.  */
3394         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3395                             QREG_CC_C, QREG_CC_X);
3396 
3397         /* M68000 sets V if the most significant bit is changed at
3398          * any time during the shift operation.  Do this via creating
3399          * an extension of the sign bit, comparing, and discarding
3400          * the bits below the sign bit.  I.e.
3401          *     int64_t s = (intN_t)reg;
3402          *     int64_t t = (int64_t)(intN_t)reg << count;
3403          *     V = ((s ^ t) & (-1 << (bits - 1))) != 0
3404          */
3405         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3406             TCGv_i64 tt = tcg_const_i64(32);
3407             /* if shift is greater than 32, use 32 */
3408             tcg_gen_movcond_i64(TCG_COND_GT, s64, s64, tt, tt, s64);
3409             tcg_temp_free_i64(tt);
3410             /* Sign extend the input to 64 bits; re-do the shift.  */
3411             tcg_gen_ext_i32_i64(t64, reg);
3412             tcg_gen_shl_i64(s64, t64, s64);
3413             /* Clear all bits that are unchanged.  */
3414             tcg_gen_xor_i64(t64, t64, s64);
3415             /* Ignore the bits below the sign bit.  */
3416             tcg_gen_andi_i64(t64, t64, -1ULL << (bits - 1));
3417             /* If any bits remain set, we have overflow.  */
3418             tcg_gen_setcondi_i64(TCG_COND_NE, t64, t64, 0);
3419             tcg_gen_extrl_i64_i32(QREG_CC_V, t64);
3420             tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3421         }
3422     } else {
3423         tcg_gen_shli_i64(t64, t64, 32);
3424         if (logical) {
3425             tcg_gen_shr_i64(t64, t64, s64);
3426         } else {
3427             tcg_gen_sar_i64(t64, t64, s64);
3428         }
3429         tcg_gen_extr_i64_i32(QREG_CC_C, QREG_CC_N, t64);
3430 
3431         /* Note that C=0 if shift count is 0, and we get that for free.  */
3432         tcg_gen_shri_i32(QREG_CC_C, QREG_CC_C, 31);
3433 
3434         /* X = C, but only if the shift count was non-zero.  */
3435         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3436                             QREG_CC_C, QREG_CC_X);
3437     }
3438     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3439     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3440 
3441     tcg_temp_free(s32);
3442     tcg_temp_free_i64(s64);
3443     tcg_temp_free_i64(t64);
3444 
3445     /* Write back the result.  */
3446     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3447     set_cc_op(s, CC_OP_FLAGS);
3448 }
3449 
3450 DISAS_INSN(shift8_im)
3451 {
3452     shift_im(s, insn, OS_BYTE);
3453 }
3454 
3455 DISAS_INSN(shift16_im)
3456 {
3457     shift_im(s, insn, OS_WORD);
3458 }
3459 
3460 DISAS_INSN(shift_im)
3461 {
3462     shift_im(s, insn, OS_LONG);
3463 }
3464 
3465 DISAS_INSN(shift8_reg)
3466 {
3467     shift_reg(s, insn, OS_BYTE);
3468 }
3469 
3470 DISAS_INSN(shift16_reg)
3471 {
3472     shift_reg(s, insn, OS_WORD);
3473 }
3474 
3475 DISAS_INSN(shift_reg)
3476 {
3477     shift_reg(s, insn, OS_LONG);
3478 }
3479 
3480 DISAS_INSN(shift_mem)
3481 {
3482     int logical = insn & 8;
3483     int left = insn & 0x100;
3484     TCGv src;
3485     TCGv addr;
3486 
3487     SRC_EA(env, src, OS_WORD, !logical, &addr);
3488     tcg_gen_movi_i32(QREG_CC_V, 0);
3489     if (left) {
3490         tcg_gen_shri_i32(QREG_CC_C, src, 15);
3491         tcg_gen_shli_i32(QREG_CC_N, src, 1);
3492 
3493         /* Note that ColdFire always clears V,
3494            while M68000 sets if the most significant bit is changed at
3495            any time during the shift operation */
3496         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3497             src = gen_extend(src, OS_WORD, 1);
3498             tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3499         }
3500     } else {
3501         tcg_gen_mov_i32(QREG_CC_C, src);
3502         if (logical) {
3503             tcg_gen_shri_i32(QREG_CC_N, src, 1);
3504         } else {
3505             tcg_gen_sari_i32(QREG_CC_N, src, 1);
3506         }
3507     }
3508 
3509     gen_ext(QREG_CC_N, QREG_CC_N, OS_WORD, 1);
3510     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3511     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3512     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3513 
3514     DEST_EA(env, insn, OS_WORD, QREG_CC_N, &addr);
3515     set_cc_op(s, CC_OP_FLAGS);
3516 }
3517 
3518 static void rotate(TCGv reg, TCGv shift, int left, int size)
3519 {
3520     switch (size) {
3521     case 8:
3522         /* Replicate the 8-bit input so that a 32-bit rotate works.  */
3523         tcg_gen_ext8u_i32(reg, reg);
3524         tcg_gen_muli_i32(reg, reg, 0x01010101);
3525         goto do_long;
3526     case 16:
3527         /* Replicate the 16-bit input so that a 32-bit rotate works.  */
3528         tcg_gen_deposit_i32(reg, reg, reg, 16, 16);
3529         goto do_long;
3530     do_long:
3531     default:
3532         if (left) {
3533             tcg_gen_rotl_i32(reg, reg, shift);
3534         } else {
3535             tcg_gen_rotr_i32(reg, reg, shift);
3536         }
3537     }
3538 
3539     /* compute flags */
3540 
3541     switch (size) {
3542     case 8:
3543         tcg_gen_ext8s_i32(reg, reg);
3544         break;
3545     case 16:
3546         tcg_gen_ext16s_i32(reg, reg);
3547         break;
3548     default:
3549         break;
3550     }
3551 
3552     /* QREG_CC_X is not affected */
3553 
3554     tcg_gen_mov_i32(QREG_CC_N, reg);
3555     tcg_gen_mov_i32(QREG_CC_Z, reg);
3556 
3557     if (left) {
3558         tcg_gen_andi_i32(QREG_CC_C, reg, 1);
3559     } else {
3560         tcg_gen_shri_i32(QREG_CC_C, reg, 31);
3561     }
3562 
3563     tcg_gen_movi_i32(QREG_CC_V, 0); /* always cleared */
3564 }
3565 
3566 static void rotate_x_flags(TCGv reg, TCGv X, int size)
3567 {
3568     switch (size) {
3569     case 8:
3570         tcg_gen_ext8s_i32(reg, reg);
3571         break;
3572     case 16:
3573         tcg_gen_ext16s_i32(reg, reg);
3574         break;
3575     default:
3576         break;
3577     }
3578     tcg_gen_mov_i32(QREG_CC_N, reg);
3579     tcg_gen_mov_i32(QREG_CC_Z, reg);
3580     tcg_gen_mov_i32(QREG_CC_X, X);
3581     tcg_gen_mov_i32(QREG_CC_C, X);
3582     tcg_gen_movi_i32(QREG_CC_V, 0);
3583 }
3584 
3585 /* Result of rotate_x() is valid if 0 <= shift <= size */
3586 static TCGv rotate_x(TCGv reg, TCGv shift, int left, int size)
3587 {
3588     TCGv X, shl, shr, shx, sz, zero;
3589 
3590     sz = tcg_const_i32(size);
3591 
3592     shr = tcg_temp_new();
3593     shl = tcg_temp_new();
3594     shx = tcg_temp_new();
3595     if (left) {
3596         tcg_gen_mov_i32(shl, shift);      /* shl = shift */
3597         tcg_gen_movi_i32(shr, size + 1);
3598         tcg_gen_sub_i32(shr, shr, shift); /* shr = size + 1 - shift */
3599         tcg_gen_subi_i32(shx, shift, 1);  /* shx = shift - 1 */
3600         /* shx = shx < 0 ? size : shx; */
3601         zero = tcg_const_i32(0);
3602         tcg_gen_movcond_i32(TCG_COND_LT, shx, shx, zero, sz, shx);
3603         tcg_temp_free(zero);
3604     } else {
3605         tcg_gen_mov_i32(shr, shift);      /* shr = shift */
3606         tcg_gen_movi_i32(shl, size + 1);
3607         tcg_gen_sub_i32(shl, shl, shift); /* shl = size + 1 - shift */
3608         tcg_gen_sub_i32(shx, sz, shift); /* shx = size - shift */
3609     }
3610 
3611     /* reg = (reg << shl) | (reg >> shr) | (x << shx); */
3612 
3613     tcg_gen_shl_i32(shl, reg, shl);
3614     tcg_gen_shr_i32(shr, reg, shr);
3615     tcg_gen_or_i32(reg, shl, shr);
3616     tcg_temp_free(shl);
3617     tcg_temp_free(shr);
3618     tcg_gen_shl_i32(shx, QREG_CC_X, shx);
3619     tcg_gen_or_i32(reg, reg, shx);
3620     tcg_temp_free(shx);
3621 
3622     /* X = (reg >> size) & 1 */
3623 
3624     X = tcg_temp_new();
3625     tcg_gen_shr_i32(X, reg, sz);
3626     tcg_gen_andi_i32(X, X, 1);
3627     tcg_temp_free(sz);
3628 
3629     return X;
3630 }
3631 
3632 /* Result of rotate32_x() is valid if 0 <= shift < 33 */
3633 static TCGv rotate32_x(TCGv reg, TCGv shift, int left)
3634 {
3635     TCGv_i64 t0, shift64;
3636     TCGv X, lo, hi, zero;
3637 
3638     shift64 = tcg_temp_new_i64();
3639     tcg_gen_extu_i32_i64(shift64, shift);
3640 
3641     t0 = tcg_temp_new_i64();
3642 
3643     X = tcg_temp_new();
3644     lo = tcg_temp_new();
3645     hi = tcg_temp_new();
3646 
3647     if (left) {
3648         /* create [reg:X:..] */
3649 
3650         tcg_gen_shli_i32(lo, QREG_CC_X, 31);
3651         tcg_gen_concat_i32_i64(t0, lo, reg);
3652 
3653         /* rotate */
3654 
3655         tcg_gen_rotl_i64(t0, t0, shift64);
3656         tcg_temp_free_i64(shift64);
3657 
3658         /* result is [reg:..:reg:X] */
3659 
3660         tcg_gen_extr_i64_i32(lo, hi, t0);
3661         tcg_gen_andi_i32(X, lo, 1);
3662 
3663         tcg_gen_shri_i32(lo, lo, 1);
3664     } else {
3665         /* create [..:X:reg] */
3666 
3667         tcg_gen_concat_i32_i64(t0, reg, QREG_CC_X);
3668 
3669         tcg_gen_rotr_i64(t0, t0, shift64);
3670         tcg_temp_free_i64(shift64);
3671 
3672         /* result is value: [X:reg:..:reg] */
3673 
3674         tcg_gen_extr_i64_i32(lo, hi, t0);
3675 
3676         /* extract X */
3677 
3678         tcg_gen_shri_i32(X, hi, 31);
3679 
3680         /* extract result */
3681 
3682         tcg_gen_shli_i32(hi, hi, 1);
3683     }
3684     tcg_temp_free_i64(t0);
3685     tcg_gen_or_i32(lo, lo, hi);
3686     tcg_temp_free(hi);
3687 
3688     /* if shift == 0, register and X are not affected */
3689 
3690     zero = tcg_const_i32(0);
3691     tcg_gen_movcond_i32(TCG_COND_EQ, X, shift, zero, QREG_CC_X, X);
3692     tcg_gen_movcond_i32(TCG_COND_EQ, reg, shift, zero, reg, lo);
3693     tcg_temp_free(zero);
3694     tcg_temp_free(lo);
3695 
3696     return X;
3697 }
3698 
3699 DISAS_INSN(rotate_im)
3700 {
3701     TCGv shift;
3702     int tmp;
3703     int left = (insn & 0x100);
3704 
3705     tmp = (insn >> 9) & 7;
3706     if (tmp == 0) {
3707         tmp = 8;
3708     }
3709 
3710     shift = tcg_const_i32(tmp);
3711     if (insn & 8) {
3712         rotate(DREG(insn, 0), shift, left, 32);
3713     } else {
3714         TCGv X = rotate32_x(DREG(insn, 0), shift, left);
3715         rotate_x_flags(DREG(insn, 0), X, 32);
3716         tcg_temp_free(X);
3717     }
3718     tcg_temp_free(shift);
3719 
3720     set_cc_op(s, CC_OP_FLAGS);
3721 }
3722 
3723 DISAS_INSN(rotate8_im)
3724 {
3725     int left = (insn & 0x100);
3726     TCGv reg;
3727     TCGv shift;
3728     int tmp;
3729 
3730     reg = gen_extend(DREG(insn, 0), OS_BYTE, 0);
3731 
3732     tmp = (insn >> 9) & 7;
3733     if (tmp == 0) {
3734         tmp = 8;
3735     }
3736 
3737     shift = tcg_const_i32(tmp);
3738     if (insn & 8) {
3739         rotate(reg, shift, left, 8);
3740     } else {
3741         TCGv X = rotate_x(reg, shift, left, 8);
3742         rotate_x_flags(reg, X, 8);
3743         tcg_temp_free(X);
3744     }
3745     tcg_temp_free(shift);
3746     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3747     set_cc_op(s, CC_OP_FLAGS);
3748 }
3749 
3750 DISAS_INSN(rotate16_im)
3751 {
3752     int left = (insn & 0x100);
3753     TCGv reg;
3754     TCGv shift;
3755     int tmp;
3756 
3757     reg = gen_extend(DREG(insn, 0), OS_WORD, 0);
3758     tmp = (insn >> 9) & 7;
3759     if (tmp == 0) {
3760         tmp = 8;
3761     }
3762 
3763     shift = tcg_const_i32(tmp);
3764     if (insn & 8) {
3765         rotate(reg, shift, left, 16);
3766     } else {
3767         TCGv X = rotate_x(reg, shift, left, 16);
3768         rotate_x_flags(reg, X, 16);
3769         tcg_temp_free(X);
3770     }
3771     tcg_temp_free(shift);
3772     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3773     set_cc_op(s, CC_OP_FLAGS);
3774 }
3775 
3776 DISAS_INSN(rotate_reg)
3777 {
3778     TCGv reg;
3779     TCGv src;
3780     TCGv t0, t1;
3781     int left = (insn & 0x100);
3782 
3783     reg = DREG(insn, 0);
3784     src = DREG(insn, 9);
3785     /* shift in [0..63] */
3786     t0 = tcg_temp_new();
3787     tcg_gen_andi_i32(t0, src, 63);
3788     t1 = tcg_temp_new_i32();
3789     if (insn & 8) {
3790         tcg_gen_andi_i32(t1, src, 31);
3791         rotate(reg, t1, left, 32);
3792         /* if shift == 0, clear C */
3793         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3794                             t0, QREG_CC_V /* 0 */,
3795                             QREG_CC_V /* 0 */, QREG_CC_C);
3796     } else {
3797         TCGv X;
3798         /* modulo 33 */
3799         tcg_gen_movi_i32(t1, 33);
3800         tcg_gen_remu_i32(t1, t0, t1);
3801         X = rotate32_x(DREG(insn, 0), t1, left);
3802         rotate_x_flags(DREG(insn, 0), X, 32);
3803         tcg_temp_free(X);
3804     }
3805     tcg_temp_free(t1);
3806     tcg_temp_free(t0);
3807     set_cc_op(s, CC_OP_FLAGS);
3808 }
3809 
3810 DISAS_INSN(rotate8_reg)
3811 {
3812     TCGv reg;
3813     TCGv src;
3814     TCGv t0, t1;
3815     int left = (insn & 0x100);
3816 
3817     reg = gen_extend(DREG(insn, 0), OS_BYTE, 0);
3818     src = DREG(insn, 9);
3819     /* shift in [0..63] */
3820     t0 = tcg_temp_new_i32();
3821     tcg_gen_andi_i32(t0, src, 63);
3822     t1 = tcg_temp_new_i32();
3823     if (insn & 8) {
3824         tcg_gen_andi_i32(t1, src, 7);
3825         rotate(reg, t1, left, 8);
3826         /* if shift == 0, clear C */
3827         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3828                             t0, QREG_CC_V /* 0 */,
3829                             QREG_CC_V /* 0 */, QREG_CC_C);
3830     } else {
3831         TCGv X;
3832         /* modulo 9 */
3833         tcg_gen_movi_i32(t1, 9);
3834         tcg_gen_remu_i32(t1, t0, t1);
3835         X = rotate_x(reg, t1, left, 8);
3836         rotate_x_flags(reg, X, 8);
3837         tcg_temp_free(X);
3838     }
3839     tcg_temp_free(t1);
3840     tcg_temp_free(t0);
3841     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3842     set_cc_op(s, CC_OP_FLAGS);
3843 }
3844 
3845 DISAS_INSN(rotate16_reg)
3846 {
3847     TCGv reg;
3848     TCGv src;
3849     TCGv t0, t1;
3850     int left = (insn & 0x100);
3851 
3852     reg = gen_extend(DREG(insn, 0), OS_WORD, 0);
3853     src = DREG(insn, 9);
3854     /* shift in [0..63] */
3855     t0 = tcg_temp_new_i32();
3856     tcg_gen_andi_i32(t0, src, 63);
3857     t1 = tcg_temp_new_i32();
3858     if (insn & 8) {
3859         tcg_gen_andi_i32(t1, src, 15);
3860         rotate(reg, t1, left, 16);
3861         /* if shift == 0, clear C */
3862         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3863                             t0, QREG_CC_V /* 0 */,
3864                             QREG_CC_V /* 0 */, QREG_CC_C);
3865     } else {
3866         TCGv X;
3867         /* modulo 17 */
3868         tcg_gen_movi_i32(t1, 17);
3869         tcg_gen_remu_i32(t1, t0, t1);
3870         X = rotate_x(reg, t1, left, 16);
3871         rotate_x_flags(reg, X, 16);
3872         tcg_temp_free(X);
3873     }
3874     tcg_temp_free(t1);
3875     tcg_temp_free(t0);
3876     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3877     set_cc_op(s, CC_OP_FLAGS);
3878 }
3879 
3880 DISAS_INSN(rotate_mem)
3881 {
3882     TCGv src;
3883     TCGv addr;
3884     TCGv shift;
3885     int left = (insn & 0x100);
3886 
3887     SRC_EA(env, src, OS_WORD, 0, &addr);
3888 
3889     shift = tcg_const_i32(1);
3890     if (insn & 0x0200) {
3891         rotate(src, shift, left, 16);
3892     } else {
3893         TCGv X = rotate_x(src, shift, left, 16);
3894         rotate_x_flags(src, X, 16);
3895         tcg_temp_free(X);
3896     }
3897     tcg_temp_free(shift);
3898     DEST_EA(env, insn, OS_WORD, src, &addr);
3899     set_cc_op(s, CC_OP_FLAGS);
3900 }
3901 
3902 DISAS_INSN(bfext_reg)
3903 {
3904     int ext = read_im16(env, s);
3905     int is_sign = insn & 0x200;
3906     TCGv src = DREG(insn, 0);
3907     TCGv dst = DREG(ext, 12);
3908     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
3909     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
3910     int pos = 32 - ofs - len;        /* little bit-endian */
3911     TCGv tmp = tcg_temp_new();
3912     TCGv shift;
3913 
3914     /* In general, we're going to rotate the field so that it's at the
3915        top of the word and then right-shift by the compliment of the
3916        width to extend the field.  */
3917     if (ext & 0x20) {
3918         /* Variable width.  */
3919         if (ext & 0x800) {
3920             /* Variable offset.  */
3921             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
3922             tcg_gen_rotl_i32(tmp, src, tmp);
3923         } else {
3924             tcg_gen_rotli_i32(tmp, src, ofs);
3925         }
3926 
3927         shift = tcg_temp_new();
3928         tcg_gen_neg_i32(shift, DREG(ext, 0));
3929         tcg_gen_andi_i32(shift, shift, 31);
3930         tcg_gen_sar_i32(QREG_CC_N, tmp, shift);
3931         if (is_sign) {
3932             tcg_gen_mov_i32(dst, QREG_CC_N);
3933         } else {
3934             tcg_gen_shr_i32(dst, tmp, shift);
3935         }
3936         tcg_temp_free(shift);
3937     } else {
3938         /* Immediate width.  */
3939         if (ext & 0x800) {
3940             /* Variable offset */
3941             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
3942             tcg_gen_rotl_i32(tmp, src, tmp);
3943             src = tmp;
3944             pos = 32 - len;
3945         } else {
3946             /* Immediate offset.  If the field doesn't wrap around the
3947                end of the word, rely on (s)extract completely.  */
3948             if (pos < 0) {
3949                 tcg_gen_rotli_i32(tmp, src, ofs);
3950                 src = tmp;
3951                 pos = 32 - len;
3952             }
3953         }
3954 
3955         tcg_gen_sextract_i32(QREG_CC_N, src, pos, len);
3956         if (is_sign) {
3957             tcg_gen_mov_i32(dst, QREG_CC_N);
3958         } else {
3959             tcg_gen_extract_i32(dst, src, pos, len);
3960         }
3961     }
3962 
3963     tcg_temp_free(tmp);
3964     set_cc_op(s, CC_OP_LOGIC);
3965 }
3966 
3967 DISAS_INSN(bfext_mem)
3968 {
3969     int ext = read_im16(env, s);
3970     int is_sign = insn & 0x200;
3971     TCGv dest = DREG(ext, 12);
3972     TCGv addr, len, ofs;
3973 
3974     addr = gen_lea(env, s, insn, OS_UNSIZED);
3975     if (IS_NULL_QREG(addr)) {
3976         gen_addr_fault(s);
3977         return;
3978     }
3979 
3980     if (ext & 0x20) {
3981         len = DREG(ext, 0);
3982     } else {
3983         len = tcg_const_i32(extract32(ext, 0, 5));
3984     }
3985     if (ext & 0x800) {
3986         ofs = DREG(ext, 6);
3987     } else {
3988         ofs = tcg_const_i32(extract32(ext, 6, 5));
3989     }
3990 
3991     if (is_sign) {
3992         gen_helper_bfexts_mem(dest, cpu_env, addr, ofs, len);
3993         tcg_gen_mov_i32(QREG_CC_N, dest);
3994     } else {
3995         TCGv_i64 tmp = tcg_temp_new_i64();
3996         gen_helper_bfextu_mem(tmp, cpu_env, addr, ofs, len);
3997         tcg_gen_extr_i64_i32(dest, QREG_CC_N, tmp);
3998         tcg_temp_free_i64(tmp);
3999     }
4000     set_cc_op(s, CC_OP_LOGIC);
4001 
4002     if (!(ext & 0x20)) {
4003         tcg_temp_free(len);
4004     }
4005     if (!(ext & 0x800)) {
4006         tcg_temp_free(ofs);
4007     }
4008 }
4009 
4010 DISAS_INSN(bfop_reg)
4011 {
4012     int ext = read_im16(env, s);
4013     TCGv src = DREG(insn, 0);
4014     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4015     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4016     TCGv mask, tofs, tlen;
4017 
4018     tofs = NULL;
4019     tlen = NULL;
4020     if ((insn & 0x0f00) == 0x0d00) { /* bfffo */
4021         tofs = tcg_temp_new();
4022         tlen = tcg_temp_new();
4023     }
4024 
4025     if ((ext & 0x820) == 0) {
4026         /* Immediate width and offset.  */
4027         uint32_t maski = 0x7fffffffu >> (len - 1);
4028         if (ofs + len <= 32) {
4029             tcg_gen_shli_i32(QREG_CC_N, src, ofs);
4030         } else {
4031             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4032         }
4033         tcg_gen_andi_i32(QREG_CC_N, QREG_CC_N, ~maski);
4034         mask = tcg_const_i32(ror32(maski, ofs));
4035         if (tofs) {
4036             tcg_gen_movi_i32(tofs, ofs);
4037             tcg_gen_movi_i32(tlen, len);
4038         }
4039     } else {
4040         TCGv tmp = tcg_temp_new();
4041         if (ext & 0x20) {
4042             /* Variable width */
4043             tcg_gen_subi_i32(tmp, DREG(ext, 0), 1);
4044             tcg_gen_andi_i32(tmp, tmp, 31);
4045             mask = tcg_const_i32(0x7fffffffu);
4046             tcg_gen_shr_i32(mask, mask, tmp);
4047             if (tlen) {
4048                 tcg_gen_addi_i32(tlen, tmp, 1);
4049             }
4050         } else {
4051             /* Immediate width */
4052             mask = tcg_const_i32(0x7fffffffu >> (len - 1));
4053             if (tlen) {
4054                 tcg_gen_movi_i32(tlen, len);
4055             }
4056         }
4057         if (ext & 0x800) {
4058             /* Variable offset */
4059             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4060             tcg_gen_rotl_i32(QREG_CC_N, src, tmp);
4061             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4062             tcg_gen_rotr_i32(mask, mask, tmp);
4063             if (tofs) {
4064                 tcg_gen_mov_i32(tofs, tmp);
4065             }
4066         } else {
4067             /* Immediate offset (and variable width) */
4068             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4069             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4070             tcg_gen_rotri_i32(mask, mask, ofs);
4071             if (tofs) {
4072                 tcg_gen_movi_i32(tofs, ofs);
4073             }
4074         }
4075         tcg_temp_free(tmp);
4076     }
4077     set_cc_op(s, CC_OP_LOGIC);
4078 
4079     switch (insn & 0x0f00) {
4080     case 0x0a00: /* bfchg */
4081         tcg_gen_eqv_i32(src, src, mask);
4082         break;
4083     case 0x0c00: /* bfclr */
4084         tcg_gen_and_i32(src, src, mask);
4085         break;
4086     case 0x0d00: /* bfffo */
4087         gen_helper_bfffo_reg(DREG(ext, 12), QREG_CC_N, tofs, tlen);
4088         tcg_temp_free(tlen);
4089         tcg_temp_free(tofs);
4090         break;
4091     case 0x0e00: /* bfset */
4092         tcg_gen_orc_i32(src, src, mask);
4093         break;
4094     case 0x0800: /* bftst */
4095         /* flags already set; no other work to do.  */
4096         break;
4097     default:
4098         g_assert_not_reached();
4099     }
4100     tcg_temp_free(mask);
4101 }
4102 
4103 DISAS_INSN(bfop_mem)
4104 {
4105     int ext = read_im16(env, s);
4106     TCGv addr, len, ofs;
4107     TCGv_i64 t64;
4108 
4109     addr = gen_lea(env, s, insn, OS_UNSIZED);
4110     if (IS_NULL_QREG(addr)) {
4111         gen_addr_fault(s);
4112         return;
4113     }
4114 
4115     if (ext & 0x20) {
4116         len = DREG(ext, 0);
4117     } else {
4118         len = tcg_const_i32(extract32(ext, 0, 5));
4119     }
4120     if (ext & 0x800) {
4121         ofs = DREG(ext, 6);
4122     } else {
4123         ofs = tcg_const_i32(extract32(ext, 6, 5));
4124     }
4125 
4126     switch (insn & 0x0f00) {
4127     case 0x0a00: /* bfchg */
4128         gen_helper_bfchg_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4129         break;
4130     case 0x0c00: /* bfclr */
4131         gen_helper_bfclr_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4132         break;
4133     case 0x0d00: /* bfffo */
4134         t64 = tcg_temp_new_i64();
4135         gen_helper_bfffo_mem(t64, cpu_env, addr, ofs, len);
4136         tcg_gen_extr_i64_i32(DREG(ext, 12), QREG_CC_N, t64);
4137         tcg_temp_free_i64(t64);
4138         break;
4139     case 0x0e00: /* bfset */
4140         gen_helper_bfset_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4141         break;
4142     case 0x0800: /* bftst */
4143         gen_helper_bfexts_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4144         break;
4145     default:
4146         g_assert_not_reached();
4147     }
4148     set_cc_op(s, CC_OP_LOGIC);
4149 
4150     if (!(ext & 0x20)) {
4151         tcg_temp_free(len);
4152     }
4153     if (!(ext & 0x800)) {
4154         tcg_temp_free(ofs);
4155     }
4156 }
4157 
4158 DISAS_INSN(bfins_reg)
4159 {
4160     int ext = read_im16(env, s);
4161     TCGv dst = DREG(insn, 0);
4162     TCGv src = DREG(ext, 12);
4163     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4164     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4165     int pos = 32 - ofs - len;        /* little bit-endian */
4166     TCGv tmp;
4167 
4168     tmp = tcg_temp_new();
4169 
4170     if (ext & 0x20) {
4171         /* Variable width */
4172         tcg_gen_neg_i32(tmp, DREG(ext, 0));
4173         tcg_gen_andi_i32(tmp, tmp, 31);
4174         tcg_gen_shl_i32(QREG_CC_N, src, tmp);
4175     } else {
4176         /* Immediate width */
4177         tcg_gen_shli_i32(QREG_CC_N, src, 32 - len);
4178     }
4179     set_cc_op(s, CC_OP_LOGIC);
4180 
4181     /* Immediate width and offset */
4182     if ((ext & 0x820) == 0) {
4183         /* Check for suitability for deposit.  */
4184         if (pos >= 0) {
4185             tcg_gen_deposit_i32(dst, dst, src, pos, len);
4186         } else {
4187             uint32_t maski = -2U << (len - 1);
4188             uint32_t roti = (ofs + len) & 31;
4189             tcg_gen_andi_i32(tmp, src, ~maski);
4190             tcg_gen_rotri_i32(tmp, tmp, roti);
4191             tcg_gen_andi_i32(dst, dst, ror32(maski, roti));
4192             tcg_gen_or_i32(dst, dst, tmp);
4193         }
4194     } else {
4195         TCGv mask = tcg_temp_new();
4196         TCGv rot = tcg_temp_new();
4197 
4198         if (ext & 0x20) {
4199             /* Variable width */
4200             tcg_gen_subi_i32(rot, DREG(ext, 0), 1);
4201             tcg_gen_andi_i32(rot, rot, 31);
4202             tcg_gen_movi_i32(mask, -2);
4203             tcg_gen_shl_i32(mask, mask, rot);
4204             tcg_gen_mov_i32(rot, DREG(ext, 0));
4205             tcg_gen_andc_i32(tmp, src, mask);
4206         } else {
4207             /* Immediate width (variable offset) */
4208             uint32_t maski = -2U << (len - 1);
4209             tcg_gen_andi_i32(tmp, src, ~maski);
4210             tcg_gen_movi_i32(mask, maski);
4211             tcg_gen_movi_i32(rot, len & 31);
4212         }
4213         if (ext & 0x800) {
4214             /* Variable offset */
4215             tcg_gen_add_i32(rot, rot, DREG(ext, 6));
4216         } else {
4217             /* Immediate offset (variable width) */
4218             tcg_gen_addi_i32(rot, rot, ofs);
4219         }
4220         tcg_gen_andi_i32(rot, rot, 31);
4221         tcg_gen_rotr_i32(mask, mask, rot);
4222         tcg_gen_rotr_i32(tmp, tmp, rot);
4223         tcg_gen_and_i32(dst, dst, mask);
4224         tcg_gen_or_i32(dst, dst, tmp);
4225 
4226         tcg_temp_free(rot);
4227         tcg_temp_free(mask);
4228     }
4229     tcg_temp_free(tmp);
4230 }
4231 
4232 DISAS_INSN(bfins_mem)
4233 {
4234     int ext = read_im16(env, s);
4235     TCGv src = DREG(ext, 12);
4236     TCGv addr, len, ofs;
4237 
4238     addr = gen_lea(env, s, insn, OS_UNSIZED);
4239     if (IS_NULL_QREG(addr)) {
4240         gen_addr_fault(s);
4241         return;
4242     }
4243 
4244     if (ext & 0x20) {
4245         len = DREG(ext, 0);
4246     } else {
4247         len = tcg_const_i32(extract32(ext, 0, 5));
4248     }
4249     if (ext & 0x800) {
4250         ofs = DREG(ext, 6);
4251     } else {
4252         ofs = tcg_const_i32(extract32(ext, 6, 5));
4253     }
4254 
4255     gen_helper_bfins_mem(QREG_CC_N, cpu_env, addr, src, ofs, len);
4256     set_cc_op(s, CC_OP_LOGIC);
4257 
4258     if (!(ext & 0x20)) {
4259         tcg_temp_free(len);
4260     }
4261     if (!(ext & 0x800)) {
4262         tcg_temp_free(ofs);
4263     }
4264 }
4265 
4266 DISAS_INSN(ff1)
4267 {
4268     TCGv reg;
4269     reg = DREG(insn, 0);
4270     gen_logic_cc(s, reg, OS_LONG);
4271     gen_helper_ff1(reg, reg);
4272 }
4273 
4274 DISAS_INSN(chk)
4275 {
4276     TCGv src, reg;
4277     int opsize;
4278 
4279     switch ((insn >> 7) & 3) {
4280     case 3:
4281         opsize = OS_WORD;
4282         break;
4283     case 2:
4284         if (m68k_feature(env, M68K_FEATURE_CHK2)) {
4285             opsize = OS_LONG;
4286             break;
4287         }
4288         /* fallthru */
4289     default:
4290         gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
4291         return;
4292     }
4293     SRC_EA(env, src, opsize, 1, NULL);
4294     reg = gen_extend(DREG(insn, 9), opsize, 1);
4295 
4296     gen_flush_flags(s);
4297     gen_helper_chk(cpu_env, reg, src);
4298 }
4299 
4300 DISAS_INSN(chk2)
4301 {
4302     uint16_t ext;
4303     TCGv addr1, addr2, bound1, bound2, reg;
4304     int opsize;
4305 
4306     switch ((insn >> 9) & 3) {
4307     case 0:
4308         opsize = OS_BYTE;
4309         break;
4310     case 1:
4311         opsize = OS_WORD;
4312         break;
4313     case 2:
4314         opsize = OS_LONG;
4315         break;
4316     default:
4317         gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
4318         return;
4319     }
4320 
4321     ext = read_im16(env, s);
4322     if ((ext & 0x0800) == 0) {
4323         gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
4324         return;
4325     }
4326 
4327     addr1 = gen_lea(env, s, insn, OS_UNSIZED);
4328     addr2 = tcg_temp_new();
4329     tcg_gen_addi_i32(addr2, addr1, opsize_bytes(opsize));
4330 
4331     bound1 = gen_load(s, opsize, addr1, 1);
4332     tcg_temp_free(addr1);
4333     bound2 = gen_load(s, opsize, addr2, 1);
4334     tcg_temp_free(addr2);
4335 
4336     reg = tcg_temp_new();
4337     if (ext & 0x8000) {
4338         tcg_gen_mov_i32(reg, AREG(ext, 12));
4339     } else {
4340         gen_ext(reg, DREG(ext, 12), opsize, 1);
4341     }
4342 
4343     gen_flush_flags(s);
4344     gen_helper_chk2(cpu_env, reg, bound1, bound2);
4345     tcg_temp_free(reg);
4346 }
4347 
4348 static void m68k_copy_line(TCGv dst, TCGv src, int index)
4349 {
4350     TCGv addr;
4351     TCGv_i64 t0, t1;
4352 
4353     addr = tcg_temp_new();
4354 
4355     t0 = tcg_temp_new_i64();
4356     t1 = tcg_temp_new_i64();
4357 
4358     tcg_gen_andi_i32(addr, src, ~15);
4359     tcg_gen_qemu_ld64(t0, addr, index);
4360     tcg_gen_addi_i32(addr, addr, 8);
4361     tcg_gen_qemu_ld64(t1, addr, index);
4362 
4363     tcg_gen_andi_i32(addr, dst, ~15);
4364     tcg_gen_qemu_st64(t0, addr, index);
4365     tcg_gen_addi_i32(addr, addr, 8);
4366     tcg_gen_qemu_st64(t1, addr, index);
4367 
4368     tcg_temp_free_i64(t0);
4369     tcg_temp_free_i64(t1);
4370     tcg_temp_free(addr);
4371 }
4372 
4373 DISAS_INSN(move16_reg)
4374 {
4375     int index = IS_USER(s);
4376     TCGv tmp;
4377     uint16_t ext;
4378 
4379     ext = read_im16(env, s);
4380     if ((ext & (1 << 15)) == 0) {
4381         gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
4382     }
4383 
4384     m68k_copy_line(AREG(ext, 12), AREG(insn, 0), index);
4385 
4386     /* Ax can be Ay, so save Ay before incrementing Ax */
4387     tmp = tcg_temp_new();
4388     tcg_gen_mov_i32(tmp, AREG(ext, 12));
4389     tcg_gen_addi_i32(AREG(insn, 0), AREG(insn, 0), 16);
4390     tcg_gen_addi_i32(AREG(ext, 12), tmp, 16);
4391     tcg_temp_free(tmp);
4392 }
4393 
4394 DISAS_INSN(move16_mem)
4395 {
4396     int index = IS_USER(s);
4397     TCGv reg, addr;
4398 
4399     reg = AREG(insn, 0);
4400     addr = tcg_const_i32(read_im32(env, s));
4401 
4402     if ((insn >> 3) & 1) {
4403         /* MOVE16 (xxx).L, (Ay) */
4404         m68k_copy_line(reg, addr, index);
4405     } else {
4406         /* MOVE16 (Ay), (xxx).L */
4407         m68k_copy_line(addr, reg, index);
4408     }
4409 
4410     tcg_temp_free(addr);
4411 
4412     if (((insn >> 3) & 2) == 0) {
4413         /* (Ay)+ */
4414         tcg_gen_addi_i32(reg, reg, 16);
4415     }
4416 }
4417 
4418 DISAS_INSN(strldsr)
4419 {
4420     uint16_t ext;
4421     uint32_t addr;
4422 
4423     addr = s->pc - 2;
4424     ext = read_im16(env, s);
4425     if (ext != 0x46FC) {
4426         gen_exception(s, addr, EXCP_UNSUPPORTED);
4427         return;
4428     }
4429     ext = read_im16(env, s);
4430     if (IS_USER(s) || (ext & SR_S) == 0) {
4431         gen_exception(s, addr, EXCP_PRIVILEGE);
4432         return;
4433     }
4434     gen_push(s, gen_get_sr(s));
4435     gen_set_sr_im(s, ext, 0);
4436 }
4437 
4438 DISAS_INSN(move_from_sr)
4439 {
4440     TCGv sr;
4441 
4442     if (IS_USER(s) && !m68k_feature(env, M68K_FEATURE_M68000)) {
4443         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4444         return;
4445     }
4446     sr = gen_get_sr(s);
4447     DEST_EA(env, insn, OS_WORD, sr, NULL);
4448 }
4449 
4450 #if defined(CONFIG_SOFTMMU)
4451 DISAS_INSN(move_to_sr)
4452 {
4453     if (IS_USER(s)) {
4454         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4455         return;
4456     }
4457     gen_move_to_sr(env, s, insn, false);
4458     gen_lookup_tb(s);
4459 }
4460 
4461 DISAS_INSN(move_from_usp)
4462 {
4463     if (IS_USER(s)) {
4464         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4465         return;
4466     }
4467     tcg_gen_ld_i32(AREG(insn, 0), cpu_env,
4468                    offsetof(CPUM68KState, sp[M68K_USP]));
4469 }
4470 
4471 DISAS_INSN(move_to_usp)
4472 {
4473     if (IS_USER(s)) {
4474         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4475         return;
4476     }
4477     tcg_gen_st_i32(AREG(insn, 0), cpu_env,
4478                    offsetof(CPUM68KState, sp[M68K_USP]));
4479 }
4480 
4481 DISAS_INSN(halt)
4482 {
4483     if (IS_USER(s)) {
4484         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4485         return;
4486     }
4487 
4488     gen_exception(s, s->pc, EXCP_HALT_INSN);
4489 }
4490 
4491 DISAS_INSN(stop)
4492 {
4493     uint16_t ext;
4494 
4495     if (IS_USER(s)) {
4496         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4497         return;
4498     }
4499 
4500     ext = read_im16(env, s);
4501 
4502     gen_set_sr_im(s, ext, 0);
4503     tcg_gen_movi_i32(cpu_halted, 1);
4504     gen_exception(s, s->pc, EXCP_HLT);
4505 }
4506 
4507 DISAS_INSN(rte)
4508 {
4509     if (IS_USER(s)) {
4510         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4511         return;
4512     }
4513     gen_exception(s, s->insn_pc, EXCP_RTE);
4514 }
4515 
4516 DISAS_INSN(cf_movec)
4517 {
4518     uint16_t ext;
4519     TCGv reg;
4520 
4521     if (IS_USER(s)) {
4522         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4523         return;
4524     }
4525 
4526     ext = read_im16(env, s);
4527 
4528     if (ext & 0x8000) {
4529         reg = AREG(ext, 12);
4530     } else {
4531         reg = DREG(ext, 12);
4532     }
4533     gen_helper_cf_movec_to(cpu_env, tcg_const_i32(ext & 0xfff), reg);
4534     gen_lookup_tb(s);
4535 }
4536 
4537 DISAS_INSN(m68k_movec)
4538 {
4539     uint16_t ext;
4540     TCGv reg;
4541 
4542     if (IS_USER(s)) {
4543         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4544         return;
4545     }
4546 
4547     ext = read_im16(env, s);
4548 
4549     if (ext & 0x8000) {
4550         reg = AREG(ext, 12);
4551     } else {
4552         reg = DREG(ext, 12);
4553     }
4554     if (insn & 1) {
4555         gen_helper_m68k_movec_to(cpu_env, tcg_const_i32(ext & 0xfff), reg);
4556     } else {
4557         gen_helper_m68k_movec_from(reg, cpu_env, tcg_const_i32(ext & 0xfff));
4558     }
4559     gen_lookup_tb(s);
4560 }
4561 
4562 DISAS_INSN(intouch)
4563 {
4564     if (IS_USER(s)) {
4565         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4566         return;
4567     }
4568     /* ICache fetch.  Implement as no-op.  */
4569 }
4570 
4571 DISAS_INSN(cpushl)
4572 {
4573     if (IS_USER(s)) {
4574         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4575         return;
4576     }
4577     /* Cache push/invalidate.  Implement as no-op.  */
4578 }
4579 
4580 DISAS_INSN(cpush)
4581 {
4582     if (IS_USER(s)) {
4583         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4584         return;
4585     }
4586     /* Cache push/invalidate.  Implement as no-op.  */
4587 }
4588 
4589 DISAS_INSN(cinv)
4590 {
4591     if (IS_USER(s)) {
4592         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4593         return;
4594     }
4595     /* Invalidate cache line.  Implement as no-op.  */
4596 }
4597 
4598 DISAS_INSN(wddata)
4599 {
4600     gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4601 }
4602 
4603 DISAS_INSN(wdebug)
4604 {
4605     M68kCPU *cpu = m68k_env_get_cpu(env);
4606 
4607     if (IS_USER(s)) {
4608         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
4609         return;
4610     }
4611     /* TODO: Implement wdebug.  */
4612     cpu_abort(CPU(cpu), "WDEBUG not implemented");
4613 }
4614 #endif
4615 
4616 DISAS_INSN(trap)
4617 {
4618     gen_exception(s, s->insn_pc, EXCP_TRAP0 + (insn & 0xf));
4619 }
4620 
4621 static void gen_load_fcr(DisasContext *s, TCGv res, int reg)
4622 {
4623     switch (reg) {
4624     case M68K_FPIAR:
4625         tcg_gen_movi_i32(res, 0);
4626         break;
4627     case M68K_FPSR:
4628         tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpsr));
4629         break;
4630     case M68K_FPCR:
4631         tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpcr));
4632         break;
4633     }
4634 }
4635 
4636 static void gen_store_fcr(DisasContext *s, TCGv val, int reg)
4637 {
4638     switch (reg) {
4639     case M68K_FPIAR:
4640         break;
4641     case M68K_FPSR:
4642         tcg_gen_st_i32(val, cpu_env, offsetof(CPUM68KState, fpsr));
4643         break;
4644     case M68K_FPCR:
4645         gen_helper_set_fpcr(cpu_env, val);
4646         break;
4647     }
4648 }
4649 
4650 static void gen_qemu_store_fcr(DisasContext *s, TCGv addr, int reg)
4651 {
4652     int index = IS_USER(s);
4653     TCGv tmp;
4654 
4655     tmp = tcg_temp_new();
4656     gen_load_fcr(s, tmp, reg);
4657     tcg_gen_qemu_st32(tmp, addr, index);
4658     tcg_temp_free(tmp);
4659 }
4660 
4661 static void gen_qemu_load_fcr(DisasContext *s, TCGv addr, int reg)
4662 {
4663     int index = IS_USER(s);
4664     TCGv tmp;
4665 
4666     tmp = tcg_temp_new();
4667     tcg_gen_qemu_ld32u(tmp, addr, index);
4668     gen_store_fcr(s, tmp, reg);
4669     tcg_temp_free(tmp);
4670 }
4671 
4672 
4673 static void gen_op_fmove_fcr(CPUM68KState *env, DisasContext *s,
4674                              uint32_t insn, uint32_t ext)
4675 {
4676     int mask = (ext >> 10) & 7;
4677     int is_write = (ext >> 13) & 1;
4678     int mode = extract32(insn, 3, 3);
4679     int i;
4680     TCGv addr, tmp;
4681 
4682     switch (mode) {
4683     case 0: /* Dn */
4684         if (mask != M68K_FPIAR && mask != M68K_FPSR && mask != M68K_FPCR) {
4685             gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
4686             return;
4687         }
4688         if (is_write) {
4689             gen_load_fcr(s, DREG(insn, 0), mask);
4690         } else {
4691             gen_store_fcr(s, DREG(insn, 0), mask);
4692         }
4693         return;
4694     case 1: /* An, only with FPIAR */
4695         if (mask != M68K_FPIAR) {
4696             gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
4697             return;
4698         }
4699         if (is_write) {
4700             gen_load_fcr(s, AREG(insn, 0), mask);
4701         } else {
4702             gen_store_fcr(s, AREG(insn, 0), mask);
4703         }
4704         return;
4705     default:
4706         break;
4707     }
4708 
4709     tmp = gen_lea(env, s, insn, OS_LONG);
4710     if (IS_NULL_QREG(tmp)) {
4711         gen_addr_fault(s);
4712         return;
4713     }
4714 
4715     addr = tcg_temp_new();
4716     tcg_gen_mov_i32(addr, tmp);
4717 
4718     /* mask:
4719      *
4720      * 0b100 Floating-Point Control Register
4721      * 0b010 Floating-Point Status Register
4722      * 0b001 Floating-Point Instruction Address Register
4723      *
4724      */
4725 
4726     if (is_write && mode == 4) {
4727         for (i = 2; i >= 0; i--, mask >>= 1) {
4728             if (mask & 1) {
4729                 gen_qemu_store_fcr(s, addr, 1 << i);
4730                 if (mask != 1) {
4731                     tcg_gen_subi_i32(addr, addr, opsize_bytes(OS_LONG));
4732                 }
4733             }
4734        }
4735        tcg_gen_mov_i32(AREG(insn, 0), addr);
4736     } else {
4737         for (i = 0; i < 3; i++, mask >>= 1) {
4738             if (mask & 1) {
4739                 if (is_write) {
4740                     gen_qemu_store_fcr(s, addr, 1 << i);
4741                 } else {
4742                     gen_qemu_load_fcr(s, addr, 1 << i);
4743                 }
4744                 if (mask != 1 || mode == 3) {
4745                     tcg_gen_addi_i32(addr, addr, opsize_bytes(OS_LONG));
4746                 }
4747             }
4748         }
4749         if (mode == 3) {
4750             tcg_gen_mov_i32(AREG(insn, 0), addr);
4751         }
4752     }
4753     tcg_temp_free_i32(addr);
4754 }
4755 
4756 static void gen_op_fmovem(CPUM68KState *env, DisasContext *s,
4757                           uint32_t insn, uint32_t ext)
4758 {
4759     int opsize;
4760     TCGv addr, tmp;
4761     int mode = (ext >> 11) & 0x3;
4762     int is_load = ((ext & 0x2000) == 0);
4763 
4764     if (m68k_feature(s->env, M68K_FEATURE_FPU)) {
4765         opsize = OS_EXTENDED;
4766     } else {
4767         opsize = OS_DOUBLE;  /* FIXME */
4768     }
4769 
4770     addr = gen_lea(env, s, insn, opsize);
4771     if (IS_NULL_QREG(addr)) {
4772         gen_addr_fault(s);
4773         return;
4774     }
4775 
4776     tmp = tcg_temp_new();
4777     if (mode & 0x1) {
4778         /* Dynamic register list */
4779         tcg_gen_ext8u_i32(tmp, DREG(ext, 4));
4780     } else {
4781         /* Static register list */
4782         tcg_gen_movi_i32(tmp, ext & 0xff);
4783     }
4784 
4785     if (!is_load && (mode & 2) == 0) {
4786         /* predecrement addressing mode
4787          * only available to store register to memory
4788          */
4789         if (opsize == OS_EXTENDED) {
4790             gen_helper_fmovemx_st_predec(tmp, cpu_env, addr, tmp);
4791         } else {
4792             gen_helper_fmovemd_st_predec(tmp, cpu_env, addr, tmp);
4793         }
4794     } else {
4795         /* postincrement addressing mode */
4796         if (opsize == OS_EXTENDED) {
4797             if (is_load) {
4798                 gen_helper_fmovemx_ld_postinc(tmp, cpu_env, addr, tmp);
4799             } else {
4800                 gen_helper_fmovemx_st_postinc(tmp, cpu_env, addr, tmp);
4801             }
4802         } else {
4803             if (is_load) {
4804                 gen_helper_fmovemd_ld_postinc(tmp, cpu_env, addr, tmp);
4805             } else {
4806                 gen_helper_fmovemd_st_postinc(tmp, cpu_env, addr, tmp);
4807             }
4808         }
4809     }
4810     if ((insn & 070) == 030 || (insn & 070) == 040) {
4811         tcg_gen_mov_i32(AREG(insn, 0), tmp);
4812     }
4813     tcg_temp_free(tmp);
4814 }
4815 
4816 /* ??? FP exceptions are not implemented.  Most exceptions are deferred until
4817    immediately before the next FP instruction is executed.  */
4818 DISAS_INSN(fpu)
4819 {
4820     uint16_t ext;
4821     int opmode;
4822     int opsize;
4823     TCGv_ptr cpu_src, cpu_dest;
4824 
4825     ext = read_im16(env, s);
4826     opmode = ext & 0x7f;
4827     switch ((ext >> 13) & 7) {
4828     case 0:
4829         break;
4830     case 1:
4831         goto undef;
4832     case 2:
4833         if (insn == 0xf200 && (ext & 0xfc00) == 0x5c00) {
4834             /* fmovecr */
4835             TCGv rom_offset = tcg_const_i32(opmode);
4836             cpu_dest = gen_fp_ptr(REG(ext, 7));
4837             gen_helper_fconst(cpu_env, cpu_dest, rom_offset);
4838             tcg_temp_free_ptr(cpu_dest);
4839             tcg_temp_free(rom_offset);
4840             return;
4841         }
4842         break;
4843     case 3: /* fmove out */
4844         cpu_src = gen_fp_ptr(REG(ext, 7));
4845         opsize = ext_opsize(ext, 10);
4846         if (gen_ea_fp(env, s, insn, opsize, cpu_src, EA_STORE) == -1) {
4847             gen_addr_fault(s);
4848         }
4849         gen_helper_ftst(cpu_env, cpu_src);
4850         tcg_temp_free_ptr(cpu_src);
4851         return;
4852     case 4: /* fmove to control register.  */
4853     case 5: /* fmove from control register.  */
4854         gen_op_fmove_fcr(env, s, insn, ext);
4855         return;
4856     case 6: /* fmovem */
4857     case 7:
4858         if ((ext & 0x1000) == 0 && !m68k_feature(s->env, M68K_FEATURE_FPU)) {
4859             goto undef;
4860         }
4861         gen_op_fmovem(env, s, insn, ext);
4862         return;
4863     }
4864     if (ext & (1 << 14)) {
4865         /* Source effective address.  */
4866         opsize = ext_opsize(ext, 10);
4867         cpu_src = gen_fp_result_ptr();
4868         if (gen_ea_fp(env, s, insn, opsize, cpu_src, EA_LOADS) == -1) {
4869             gen_addr_fault(s);
4870             return;
4871         }
4872     } else {
4873         /* Source register.  */
4874         opsize = OS_EXTENDED;
4875         cpu_src = gen_fp_ptr(REG(ext, 10));
4876     }
4877     cpu_dest = gen_fp_ptr(REG(ext, 7));
4878     switch (opmode) {
4879     case 0: /* fmove */
4880         gen_fp_move(cpu_dest, cpu_src);
4881         break;
4882     case 0x40: /* fsmove */
4883         gen_helper_fsround(cpu_env, cpu_dest, cpu_src);
4884         break;
4885     case 0x44: /* fdmove */
4886         gen_helper_fdround(cpu_env, cpu_dest, cpu_src);
4887         break;
4888     case 1: /* fint */
4889         gen_helper_firound(cpu_env, cpu_dest, cpu_src);
4890         break;
4891     case 3: /* fintrz */
4892         gen_helper_fitrunc(cpu_env, cpu_dest, cpu_src);
4893         break;
4894     case 4: /* fsqrt */
4895         gen_helper_fsqrt(cpu_env, cpu_dest, cpu_src);
4896         break;
4897     case 0x41: /* fssqrt */
4898         gen_helper_fssqrt(cpu_env, cpu_dest, cpu_src);
4899         break;
4900     case 0x45: /* fdsqrt */
4901         gen_helper_fdsqrt(cpu_env, cpu_dest, cpu_src);
4902         break;
4903     case 0x18: /* fabs */
4904         gen_helper_fabs(cpu_env, cpu_dest, cpu_src);
4905         break;
4906     case 0x58: /* fsabs */
4907         gen_helper_fsabs(cpu_env, cpu_dest, cpu_src);
4908         break;
4909     case 0x5c: /* fdabs */
4910         gen_helper_fdabs(cpu_env, cpu_dest, cpu_src);
4911         break;
4912     case 0x1a: /* fneg */
4913         gen_helper_fneg(cpu_env, cpu_dest, cpu_src);
4914         break;
4915     case 0x5a: /* fsneg */
4916         gen_helper_fsneg(cpu_env, cpu_dest, cpu_src);
4917         break;
4918     case 0x5e: /* fdneg */
4919         gen_helper_fdneg(cpu_env, cpu_dest, cpu_src);
4920         break;
4921     case 0x20: /* fdiv */
4922         gen_helper_fdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
4923         break;
4924     case 0x60: /* fsdiv */
4925         gen_helper_fsdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
4926         break;
4927     case 0x64: /* fddiv */
4928         gen_helper_fddiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
4929         break;
4930     case 0x22: /* fadd */
4931         gen_helper_fadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
4932         break;
4933     case 0x62: /* fsadd */
4934         gen_helper_fsadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
4935         break;
4936     case 0x66: /* fdadd */
4937         gen_helper_fdadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
4938         break;
4939     case 0x23: /* fmul */
4940         gen_helper_fmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
4941         break;
4942     case 0x63: /* fsmul */
4943         gen_helper_fsmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
4944         break;
4945     case 0x67: /* fdmul */
4946         gen_helper_fdmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
4947         break;
4948     case 0x24: /* fsgldiv */
4949         gen_helper_fsgldiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
4950         break;
4951     case 0x27: /* fsglmul */
4952         gen_helper_fsglmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
4953         break;
4954     case 0x28: /* fsub */
4955         gen_helper_fsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
4956         break;
4957     case 0x68: /* fssub */
4958         gen_helper_fssub(cpu_env, cpu_dest, cpu_src, cpu_dest);
4959         break;
4960     case 0x6c: /* fdsub */
4961         gen_helper_fdsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
4962         break;
4963     case 0x38: /* fcmp */
4964         gen_helper_fcmp(cpu_env, cpu_src, cpu_dest);
4965         return;
4966     case 0x3a: /* ftst */
4967         gen_helper_ftst(cpu_env, cpu_src);
4968         return;
4969     default:
4970         goto undef;
4971     }
4972     tcg_temp_free_ptr(cpu_src);
4973     gen_helper_ftst(cpu_env, cpu_dest);
4974     tcg_temp_free_ptr(cpu_dest);
4975     return;
4976 undef:
4977     /* FIXME: Is this right for offset addressing modes?  */
4978     s->pc -= 2;
4979     disas_undef_fpu(env, s, insn);
4980 }
4981 
4982 static void gen_fcc_cond(DisasCompare *c, DisasContext *s, int cond)
4983 {
4984     TCGv fpsr;
4985 
4986     c->g1 = 1;
4987     c->v2 = tcg_const_i32(0);
4988     c->g2 = 0;
4989     /* TODO: Raise BSUN exception.  */
4990     fpsr = tcg_temp_new();
4991     gen_load_fcr(s, fpsr, M68K_FPSR);
4992     switch (cond) {
4993     case 0:  /* False */
4994     case 16: /* Signaling False */
4995         c->v1 = c->v2;
4996         c->tcond = TCG_COND_NEVER;
4997         break;
4998     case 1:  /* EQual Z */
4999     case 17: /* Signaling EQual Z */
5000         c->v1 = tcg_temp_new();
5001         c->g1 = 0;
5002         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5003         c->tcond = TCG_COND_NE;
5004         break;
5005     case 2:  /* Ordered Greater Than !(A || Z || N) */
5006     case 18: /* Greater Than !(A || Z || N) */
5007         c->v1 = tcg_temp_new();
5008         c->g1 = 0;
5009         tcg_gen_andi_i32(c->v1, fpsr,
5010                          FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5011         c->tcond = TCG_COND_EQ;
5012         break;
5013     case 3:  /* Ordered Greater than or Equal Z || !(A || N) */
5014     case 19: /* Greater than or Equal Z || !(A || N) */
5015         c->v1 = tcg_temp_new();
5016         c->g1 = 0;
5017         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5018         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5019         tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_Z | FPSR_CC_N);
5020         tcg_gen_or_i32(c->v1, c->v1, fpsr);
5021         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5022         c->tcond = TCG_COND_NE;
5023         break;
5024     case 4:  /* Ordered Less Than !(!N || A || Z); */
5025     case 20: /* Less Than !(!N || A || Z); */
5026         c->v1 = tcg_temp_new();
5027         c->g1 = 0;
5028         tcg_gen_xori_i32(c->v1, fpsr, FPSR_CC_N);
5029         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_N | FPSR_CC_A | FPSR_CC_Z);
5030         c->tcond = TCG_COND_EQ;
5031         break;
5032     case 5:  /* Ordered Less than or Equal Z || (N && !A) */
5033     case 21: /* Less than or Equal Z || (N && !A) */
5034         c->v1 = tcg_temp_new();
5035         c->g1 = 0;
5036         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5037         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5038         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5039         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_Z | FPSR_CC_N);
5040         c->tcond = TCG_COND_NE;
5041         break;
5042     case 6:  /* Ordered Greater or Less than !(A || Z) */
5043     case 22: /* Greater or Less than !(A || Z) */
5044         c->v1 = tcg_temp_new();
5045         c->g1 = 0;
5046         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5047         c->tcond = TCG_COND_EQ;
5048         break;
5049     case 7:  /* Ordered !A */
5050     case 23: /* Greater, Less or Equal !A */
5051         c->v1 = tcg_temp_new();
5052         c->g1 = 0;
5053         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5054         c->tcond = TCG_COND_EQ;
5055         break;
5056     case 8:  /* Unordered A */
5057     case 24: /* Not Greater, Less or Equal A */
5058         c->v1 = tcg_temp_new();
5059         c->g1 = 0;
5060         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5061         c->tcond = TCG_COND_NE;
5062         break;
5063     case 9:  /* Unordered or Equal A || Z */
5064     case 25: /* Not Greater or Less then A || Z */
5065         c->v1 = tcg_temp_new();
5066         c->g1 = 0;
5067         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5068         c->tcond = TCG_COND_NE;
5069         break;
5070     case 10: /* Unordered or Greater Than A || !(N || Z)) */
5071     case 26: /* Not Less or Equal A || !(N || Z)) */
5072         c->v1 = tcg_temp_new();
5073         c->g1 = 0;
5074         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5075         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5076         tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_A | FPSR_CC_N);
5077         tcg_gen_or_i32(c->v1, c->v1, fpsr);
5078         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5079         c->tcond = TCG_COND_NE;
5080         break;
5081     case 11: /* Unordered or Greater or Equal A || Z || !N */
5082     case 27: /* Not Less Than A || Z || !N */
5083         c->v1 = tcg_temp_new();
5084         c->g1 = 0;
5085         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5086         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5087         c->tcond = TCG_COND_NE;
5088         break;
5089     case 12: /* Unordered or Less Than A || (N && !Z) */
5090     case 28: /* Not Greater than or Equal A || (N && !Z) */
5091         c->v1 = tcg_temp_new();
5092         c->g1 = 0;
5093         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5094         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5095         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5096         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_A | FPSR_CC_N);
5097         c->tcond = TCG_COND_NE;
5098         break;
5099     case 13: /* Unordered or Less or Equal A || Z || N */
5100     case 29: /* Not Greater Than A || Z || N */
5101         c->v1 = tcg_temp_new();
5102         c->g1 = 0;
5103         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5104         c->tcond = TCG_COND_NE;
5105         break;
5106     case 14: /* Not Equal !Z */
5107     case 30: /* Signaling Not Equal !Z */
5108         c->v1 = tcg_temp_new();
5109         c->g1 = 0;
5110         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5111         c->tcond = TCG_COND_EQ;
5112         break;
5113     case 15: /* True */
5114     case 31: /* Signaling True */
5115         c->v1 = c->v2;
5116         c->tcond = TCG_COND_ALWAYS;
5117         break;
5118     }
5119     tcg_temp_free(fpsr);
5120 }
5121 
5122 static void gen_fjmpcc(DisasContext *s, int cond, TCGLabel *l1)
5123 {
5124     DisasCompare c;
5125 
5126     gen_fcc_cond(&c, s, cond);
5127     update_cc_op(s);
5128     tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
5129     free_cond(&c);
5130 }
5131 
5132 DISAS_INSN(fbcc)
5133 {
5134     uint32_t offset;
5135     uint32_t base;
5136     TCGLabel *l1;
5137 
5138     base = s->pc;
5139     offset = (int16_t)read_im16(env, s);
5140     if (insn & (1 << 6)) {
5141         offset = (offset << 16) | read_im16(env, s);
5142     }
5143 
5144     l1 = gen_new_label();
5145     update_cc_op(s);
5146     gen_fjmpcc(s, insn & 0x3f, l1);
5147     gen_jmp_tb(s, 0, s->pc);
5148     gen_set_label(l1);
5149     gen_jmp_tb(s, 1, base + offset);
5150 }
5151 
5152 DISAS_INSN(fscc)
5153 {
5154     DisasCompare c;
5155     int cond;
5156     TCGv tmp;
5157     uint16_t ext;
5158 
5159     ext = read_im16(env, s);
5160     cond = ext & 0x3f;
5161     gen_fcc_cond(&c, s, cond);
5162 
5163     tmp = tcg_temp_new();
5164     tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
5165     free_cond(&c);
5166 
5167     tcg_gen_neg_i32(tmp, tmp);
5168     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
5169     tcg_temp_free(tmp);
5170 }
5171 
5172 #if defined(CONFIG_SOFTMMU)
5173 DISAS_INSN(frestore)
5174 {
5175     TCGv addr;
5176 
5177     if (IS_USER(s)) {
5178         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
5179         return;
5180     }
5181     if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5182         SRC_EA(env, addr, OS_LONG, 0, NULL);
5183         /* FIXME: check the state frame */
5184     } else {
5185         disas_undef(env, s, insn);
5186     }
5187 }
5188 
5189 DISAS_INSN(fsave)
5190 {
5191     if (IS_USER(s)) {
5192         gen_exception(s, s->insn_pc, EXCP_PRIVILEGE);
5193         return;
5194     }
5195 
5196     if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5197         /* always write IDLE */
5198         TCGv idle = tcg_const_i32(0x41000000);
5199         DEST_EA(env, insn, OS_LONG, idle, NULL);
5200         tcg_temp_free(idle);
5201     } else {
5202         disas_undef(env, s, insn);
5203     }
5204 }
5205 #endif
5206 
5207 static inline TCGv gen_mac_extract_word(DisasContext *s, TCGv val, int upper)
5208 {
5209     TCGv tmp = tcg_temp_new();
5210     if (s->env->macsr & MACSR_FI) {
5211         if (upper)
5212             tcg_gen_andi_i32(tmp, val, 0xffff0000);
5213         else
5214             tcg_gen_shli_i32(tmp, val, 16);
5215     } else if (s->env->macsr & MACSR_SU) {
5216         if (upper)
5217             tcg_gen_sari_i32(tmp, val, 16);
5218         else
5219             tcg_gen_ext16s_i32(tmp, val);
5220     } else {
5221         if (upper)
5222             tcg_gen_shri_i32(tmp, val, 16);
5223         else
5224             tcg_gen_ext16u_i32(tmp, val);
5225     }
5226     return tmp;
5227 }
5228 
5229 static void gen_mac_clear_flags(void)
5230 {
5231     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR,
5232                      ~(MACSR_V | MACSR_Z | MACSR_N | MACSR_EV));
5233 }
5234 
5235 DISAS_INSN(mac)
5236 {
5237     TCGv rx;
5238     TCGv ry;
5239     uint16_t ext;
5240     int acc;
5241     TCGv tmp;
5242     TCGv addr;
5243     TCGv loadval;
5244     int dual;
5245     TCGv saved_flags;
5246 
5247     if (!s->done_mac) {
5248         s->mactmp = tcg_temp_new_i64();
5249         s->done_mac = 1;
5250     }
5251 
5252     ext = read_im16(env, s);
5253 
5254     acc = ((insn >> 7) & 1) | ((ext >> 3) & 2);
5255     dual = ((insn & 0x30) != 0 && (ext & 3) != 0);
5256     if (dual && !m68k_feature(s->env, M68K_FEATURE_CF_EMAC_B)) {
5257         disas_undef(env, s, insn);
5258         return;
5259     }
5260     if (insn & 0x30) {
5261         /* MAC with load.  */
5262         tmp = gen_lea(env, s, insn, OS_LONG);
5263         addr = tcg_temp_new();
5264         tcg_gen_and_i32(addr, tmp, QREG_MAC_MASK);
5265         /* Load the value now to ensure correct exception behavior.
5266            Perform writeback after reading the MAC inputs.  */
5267         loadval = gen_load(s, OS_LONG, addr, 0);
5268 
5269         acc ^= 1;
5270         rx = (ext & 0x8000) ? AREG(ext, 12) : DREG(insn, 12);
5271         ry = (ext & 8) ? AREG(ext, 0) : DREG(ext, 0);
5272     } else {
5273         loadval = addr = NULL_QREG;
5274         rx = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5275         ry = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5276     }
5277 
5278     gen_mac_clear_flags();
5279 #if 0
5280     l1 = -1;
5281     /* Disabled because conditional branches clobber temporary vars.  */
5282     if ((s->env->macsr & MACSR_OMC) != 0 && !dual) {
5283         /* Skip the multiply if we know we will ignore it.  */
5284         l1 = gen_new_label();
5285         tmp = tcg_temp_new();
5286         tcg_gen_andi_i32(tmp, QREG_MACSR, 1 << (acc + 8));
5287         gen_op_jmp_nz32(tmp, l1);
5288     }
5289 #endif
5290 
5291     if ((ext & 0x0800) == 0) {
5292         /* Word.  */
5293         rx = gen_mac_extract_word(s, rx, (ext & 0x80) != 0);
5294         ry = gen_mac_extract_word(s, ry, (ext & 0x40) != 0);
5295     }
5296     if (s->env->macsr & MACSR_FI) {
5297         gen_helper_macmulf(s->mactmp, cpu_env, rx, ry);
5298     } else {
5299         if (s->env->macsr & MACSR_SU)
5300             gen_helper_macmuls(s->mactmp, cpu_env, rx, ry);
5301         else
5302             gen_helper_macmulu(s->mactmp, cpu_env, rx, ry);
5303         switch ((ext >> 9) & 3) {
5304         case 1:
5305             tcg_gen_shli_i64(s->mactmp, s->mactmp, 1);
5306             break;
5307         case 3:
5308             tcg_gen_shri_i64(s->mactmp, s->mactmp, 1);
5309             break;
5310         }
5311     }
5312 
5313     if (dual) {
5314         /* Save the overflow flag from the multiply.  */
5315         saved_flags = tcg_temp_new();
5316         tcg_gen_mov_i32(saved_flags, QREG_MACSR);
5317     } else {
5318         saved_flags = NULL_QREG;
5319     }
5320 
5321 #if 0
5322     /* Disabled because conditional branches clobber temporary vars.  */
5323     if ((s->env->macsr & MACSR_OMC) != 0 && dual) {
5324         /* Skip the accumulate if the value is already saturated.  */
5325         l1 = gen_new_label();
5326         tmp = tcg_temp_new();
5327         gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
5328         gen_op_jmp_nz32(tmp, l1);
5329     }
5330 #endif
5331 
5332     if (insn & 0x100)
5333         tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5334     else
5335         tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5336 
5337     if (s->env->macsr & MACSR_FI)
5338         gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
5339     else if (s->env->macsr & MACSR_SU)
5340         gen_helper_macsats(cpu_env, tcg_const_i32(acc));
5341     else
5342         gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
5343 
5344 #if 0
5345     /* Disabled because conditional branches clobber temporary vars.  */
5346     if (l1 != -1)
5347         gen_set_label(l1);
5348 #endif
5349 
5350     if (dual) {
5351         /* Dual accumulate variant.  */
5352         acc = (ext >> 2) & 3;
5353         /* Restore the overflow flag from the multiplier.  */
5354         tcg_gen_mov_i32(QREG_MACSR, saved_flags);
5355 #if 0
5356         /* Disabled because conditional branches clobber temporary vars.  */
5357         if ((s->env->macsr & MACSR_OMC) != 0) {
5358             /* Skip the accumulate if the value is already saturated.  */
5359             l1 = gen_new_label();
5360             tmp = tcg_temp_new();
5361             gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
5362             gen_op_jmp_nz32(tmp, l1);
5363         }
5364 #endif
5365         if (ext & 2)
5366             tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5367         else
5368             tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5369         if (s->env->macsr & MACSR_FI)
5370             gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
5371         else if (s->env->macsr & MACSR_SU)
5372             gen_helper_macsats(cpu_env, tcg_const_i32(acc));
5373         else
5374             gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
5375 #if 0
5376         /* Disabled because conditional branches clobber temporary vars.  */
5377         if (l1 != -1)
5378             gen_set_label(l1);
5379 #endif
5380     }
5381     gen_helper_mac_set_flags(cpu_env, tcg_const_i32(acc));
5382 
5383     if (insn & 0x30) {
5384         TCGv rw;
5385         rw = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5386         tcg_gen_mov_i32(rw, loadval);
5387         /* FIXME: Should address writeback happen with the masked or
5388            unmasked value?  */
5389         switch ((insn >> 3) & 7) {
5390         case 3: /* Post-increment.  */
5391             tcg_gen_addi_i32(AREG(insn, 0), addr, 4);
5392             break;
5393         case 4: /* Pre-decrement.  */
5394             tcg_gen_mov_i32(AREG(insn, 0), addr);
5395         }
5396     }
5397 }
5398 
5399 DISAS_INSN(from_mac)
5400 {
5401     TCGv rx;
5402     TCGv_i64 acc;
5403     int accnum;
5404 
5405     rx = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5406     accnum = (insn >> 9) & 3;
5407     acc = MACREG(accnum);
5408     if (s->env->macsr & MACSR_FI) {
5409         gen_helper_get_macf(rx, cpu_env, acc);
5410     } else if ((s->env->macsr & MACSR_OMC) == 0) {
5411         tcg_gen_extrl_i64_i32(rx, acc);
5412     } else if (s->env->macsr & MACSR_SU) {
5413         gen_helper_get_macs(rx, acc);
5414     } else {
5415         gen_helper_get_macu(rx, acc);
5416     }
5417     if (insn & 0x40) {
5418         tcg_gen_movi_i64(acc, 0);
5419         tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5420     }
5421 }
5422 
5423 DISAS_INSN(move_mac)
5424 {
5425     /* FIXME: This can be done without a helper.  */
5426     int src;
5427     TCGv dest;
5428     src = insn & 3;
5429     dest = tcg_const_i32((insn >> 9) & 3);
5430     gen_helper_mac_move(cpu_env, dest, tcg_const_i32(src));
5431     gen_mac_clear_flags();
5432     gen_helper_mac_set_flags(cpu_env, dest);
5433 }
5434 
5435 DISAS_INSN(from_macsr)
5436 {
5437     TCGv reg;
5438 
5439     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5440     tcg_gen_mov_i32(reg, QREG_MACSR);
5441 }
5442 
5443 DISAS_INSN(from_mask)
5444 {
5445     TCGv reg;
5446     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5447     tcg_gen_mov_i32(reg, QREG_MAC_MASK);
5448 }
5449 
5450 DISAS_INSN(from_mext)
5451 {
5452     TCGv reg;
5453     TCGv acc;
5454     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5455     acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
5456     if (s->env->macsr & MACSR_FI)
5457         gen_helper_get_mac_extf(reg, cpu_env, acc);
5458     else
5459         gen_helper_get_mac_exti(reg, cpu_env, acc);
5460 }
5461 
5462 DISAS_INSN(macsr_to_ccr)
5463 {
5464     TCGv tmp = tcg_temp_new();
5465     tcg_gen_andi_i32(tmp, QREG_MACSR, 0xf);
5466     gen_helper_set_sr(cpu_env, tmp);
5467     tcg_temp_free(tmp);
5468     set_cc_op(s, CC_OP_FLAGS);
5469 }
5470 
5471 DISAS_INSN(to_mac)
5472 {
5473     TCGv_i64 acc;
5474     TCGv val;
5475     int accnum;
5476     accnum = (insn >> 9) & 3;
5477     acc = MACREG(accnum);
5478     SRC_EA(env, val, OS_LONG, 0, NULL);
5479     if (s->env->macsr & MACSR_FI) {
5480         tcg_gen_ext_i32_i64(acc, val);
5481         tcg_gen_shli_i64(acc, acc, 8);
5482     } else if (s->env->macsr & MACSR_SU) {
5483         tcg_gen_ext_i32_i64(acc, val);
5484     } else {
5485         tcg_gen_extu_i32_i64(acc, val);
5486     }
5487     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5488     gen_mac_clear_flags();
5489     gen_helper_mac_set_flags(cpu_env, tcg_const_i32(accnum));
5490 }
5491 
5492 DISAS_INSN(to_macsr)
5493 {
5494     TCGv val;
5495     SRC_EA(env, val, OS_LONG, 0, NULL);
5496     gen_helper_set_macsr(cpu_env, val);
5497     gen_lookup_tb(s);
5498 }
5499 
5500 DISAS_INSN(to_mask)
5501 {
5502     TCGv val;
5503     SRC_EA(env, val, OS_LONG, 0, NULL);
5504     tcg_gen_ori_i32(QREG_MAC_MASK, val, 0xffff0000);
5505 }
5506 
5507 DISAS_INSN(to_mext)
5508 {
5509     TCGv val;
5510     TCGv acc;
5511     SRC_EA(env, val, OS_LONG, 0, NULL);
5512     acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
5513     if (s->env->macsr & MACSR_FI)
5514         gen_helper_set_mac_extf(cpu_env, val, acc);
5515     else if (s->env->macsr & MACSR_SU)
5516         gen_helper_set_mac_exts(cpu_env, val, acc);
5517     else
5518         gen_helper_set_mac_extu(cpu_env, val, acc);
5519 }
5520 
5521 static disas_proc opcode_table[65536];
5522 
5523 static void
5524 register_opcode (disas_proc proc, uint16_t opcode, uint16_t mask)
5525 {
5526   int i;
5527   int from;
5528   int to;
5529 
5530   /* Sanity check.  All set bits must be included in the mask.  */
5531   if (opcode & ~mask) {
5532       fprintf(stderr,
5533               "qemu internal error: bogus opcode definition %04x/%04x\n",
5534               opcode, mask);
5535       abort();
5536   }
5537   /* This could probably be cleverer.  For now just optimize the case where
5538      the top bits are known.  */
5539   /* Find the first zero bit in the mask.  */
5540   i = 0x8000;
5541   while ((i & mask) != 0)
5542       i >>= 1;
5543   /* Iterate over all combinations of this and lower bits.  */
5544   if (i == 0)
5545       i = 1;
5546   else
5547       i <<= 1;
5548   from = opcode & ~(i - 1);
5549   to = from + i;
5550   for (i = from; i < to; i++) {
5551       if ((i & mask) == opcode)
5552           opcode_table[i] = proc;
5553   }
5554 }
5555 
5556 /* Register m68k opcode handlers.  Order is important.
5557    Later insn override earlier ones.  */
5558 void register_m68k_insns (CPUM68KState *env)
5559 {
5560     /* Build the opcode table only once to avoid
5561        multithreading issues. */
5562     if (opcode_table[0] != NULL) {
5563         return;
5564     }
5565 
5566     /* use BASE() for instruction available
5567      * for CF_ISA_A and M68000.
5568      */
5569 #define BASE(name, opcode, mask) \
5570     register_opcode(disas_##name, 0x##opcode, 0x##mask)
5571 #define INSN(name, opcode, mask, feature) do { \
5572     if (m68k_feature(env, M68K_FEATURE_##feature)) \
5573         BASE(name, opcode, mask); \
5574     } while(0)
5575     BASE(undef,     0000, 0000);
5576     INSN(arith_im,  0080, fff8, CF_ISA_A);
5577     INSN(arith_im,  0000, ff00, M68000);
5578     INSN(chk2,      00c0, f9c0, CHK2);
5579     INSN(bitrev,    00c0, fff8, CF_ISA_APLUSC);
5580     BASE(bitop_reg, 0100, f1c0);
5581     BASE(bitop_reg, 0140, f1c0);
5582     BASE(bitop_reg, 0180, f1c0);
5583     BASE(bitop_reg, 01c0, f1c0);
5584     INSN(arith_im,  0280, fff8, CF_ISA_A);
5585     INSN(arith_im,  0200, ff00, M68000);
5586     INSN(undef,     02c0, ffc0, M68000);
5587     INSN(byterev,   02c0, fff8, CF_ISA_APLUSC);
5588     INSN(arith_im,  0480, fff8, CF_ISA_A);
5589     INSN(arith_im,  0400, ff00, M68000);
5590     INSN(undef,     04c0, ffc0, M68000);
5591     INSN(arith_im,  0600, ff00, M68000);
5592     INSN(undef,     06c0, ffc0, M68000);
5593     INSN(ff1,       04c0, fff8, CF_ISA_APLUSC);
5594     INSN(arith_im,  0680, fff8, CF_ISA_A);
5595     INSN(arith_im,  0c00, ff38, CF_ISA_A);
5596     INSN(arith_im,  0c00, ff00, M68000);
5597     BASE(bitop_im,  0800, ffc0);
5598     BASE(bitop_im,  0840, ffc0);
5599     BASE(bitop_im,  0880, ffc0);
5600     BASE(bitop_im,  08c0, ffc0);
5601     INSN(arith_im,  0a80, fff8, CF_ISA_A);
5602     INSN(arith_im,  0a00, ff00, M68000);
5603     INSN(cas,       0ac0, ffc0, CAS);
5604     INSN(cas,       0cc0, ffc0, CAS);
5605     INSN(cas,       0ec0, ffc0, CAS);
5606     INSN(cas2w,     0cfc, ffff, CAS);
5607     INSN(cas2l,     0efc, ffff, CAS);
5608     BASE(move,      1000, f000);
5609     BASE(move,      2000, f000);
5610     BASE(move,      3000, f000);
5611     INSN(chk,       4000, f040, M68000);
5612     INSN(strldsr,   40e7, ffff, CF_ISA_APLUSC);
5613     INSN(negx,      4080, fff8, CF_ISA_A);
5614     INSN(negx,      4000, ff00, M68000);
5615     INSN(undef,     40c0, ffc0, M68000);
5616     INSN(move_from_sr, 40c0, fff8, CF_ISA_A);
5617     INSN(move_from_sr, 40c0, ffc0, M68000);
5618     BASE(lea,       41c0, f1c0);
5619     BASE(clr,       4200, ff00);
5620     BASE(undef,     42c0, ffc0);
5621     INSN(move_from_ccr, 42c0, fff8, CF_ISA_A);
5622     INSN(move_from_ccr, 42c0, ffc0, M68000);
5623     INSN(neg,       4480, fff8, CF_ISA_A);
5624     INSN(neg,       4400, ff00, M68000);
5625     INSN(undef,     44c0, ffc0, M68000);
5626     BASE(move_to_ccr, 44c0, ffc0);
5627     INSN(not,       4680, fff8, CF_ISA_A);
5628     INSN(not,       4600, ff00, M68000);
5629 #if defined(CONFIG_SOFTMMU)
5630     BASE(move_to_sr, 46c0, ffc0);
5631 #endif
5632     INSN(nbcd,      4800, ffc0, M68000);
5633     INSN(linkl,     4808, fff8, M68000);
5634     BASE(pea,       4840, ffc0);
5635     BASE(swap,      4840, fff8);
5636     INSN(bkpt,      4848, fff8, BKPT);
5637     INSN(movem,     48d0, fbf8, CF_ISA_A);
5638     INSN(movem,     48e8, fbf8, CF_ISA_A);
5639     INSN(movem,     4880, fb80, M68000);
5640     BASE(ext,       4880, fff8);
5641     BASE(ext,       48c0, fff8);
5642     BASE(ext,       49c0, fff8);
5643     BASE(tst,       4a00, ff00);
5644     INSN(tas,       4ac0, ffc0, CF_ISA_B);
5645     INSN(tas,       4ac0, ffc0, M68000);
5646 #if defined(CONFIG_SOFTMMU)
5647     INSN(halt,      4ac8, ffff, CF_ISA_A);
5648 #endif
5649     INSN(pulse,     4acc, ffff, CF_ISA_A);
5650     BASE(illegal,   4afc, ffff);
5651     INSN(mull,      4c00, ffc0, CF_ISA_A);
5652     INSN(mull,      4c00, ffc0, LONG_MULDIV);
5653     INSN(divl,      4c40, ffc0, CF_ISA_A);
5654     INSN(divl,      4c40, ffc0, LONG_MULDIV);
5655     INSN(sats,      4c80, fff8, CF_ISA_B);
5656     BASE(trap,      4e40, fff0);
5657     BASE(link,      4e50, fff8);
5658     BASE(unlk,      4e58, fff8);
5659 #if defined(CONFIG_SOFTMMU)
5660     INSN(move_to_usp, 4e60, fff8, USP);
5661     INSN(move_from_usp, 4e68, fff8, USP);
5662     INSN(reset,     4e70, ffff, M68000);
5663     BASE(stop,      4e72, ffff);
5664     BASE(rte,       4e73, ffff);
5665     INSN(cf_movec,  4e7b, ffff, CF_ISA_A);
5666     INSN(m68k_movec, 4e7a, fffe, M68000);
5667 #endif
5668     BASE(nop,       4e71, ffff);
5669     INSN(rtd,       4e74, ffff, RTD);
5670     BASE(rts,       4e75, ffff);
5671     BASE(jump,      4e80, ffc0);
5672     BASE(jump,      4ec0, ffc0);
5673     INSN(addsubq,   5000, f080, M68000);
5674     BASE(addsubq,   5080, f0c0);
5675     INSN(scc,       50c0, f0f8, CF_ISA_A); /* Scc.B Dx   */
5676     INSN(scc,       50c0, f0c0, M68000);   /* Scc.B <EA> */
5677     INSN(dbcc,      50c8, f0f8, M68000);
5678     INSN(tpf,       51f8, fff8, CF_ISA_A);
5679 
5680     /* Branch instructions.  */
5681     BASE(branch,    6000, f000);
5682     /* Disable long branch instructions, then add back the ones we want.  */
5683     BASE(undef,     60ff, f0ff); /* All long branches.  */
5684     INSN(branch,    60ff, f0ff, CF_ISA_B);
5685     INSN(undef,     60ff, ffff, CF_ISA_B); /* bra.l */
5686     INSN(branch,    60ff, ffff, BRAL);
5687     INSN(branch,    60ff, f0ff, BCCL);
5688 
5689     BASE(moveq,     7000, f100);
5690     INSN(mvzs,      7100, f100, CF_ISA_B);
5691     BASE(or,        8000, f000);
5692     BASE(divw,      80c0, f0c0);
5693     INSN(sbcd_reg,  8100, f1f8, M68000);
5694     INSN(sbcd_mem,  8108, f1f8, M68000);
5695     BASE(addsub,    9000, f000);
5696     INSN(undef,     90c0, f0c0, CF_ISA_A);
5697     INSN(subx_reg,  9180, f1f8, CF_ISA_A);
5698     INSN(subx_reg,  9100, f138, M68000);
5699     INSN(subx_mem,  9108, f138, M68000);
5700     INSN(suba,      91c0, f1c0, CF_ISA_A);
5701     INSN(suba,      90c0, f0c0, M68000);
5702 
5703     BASE(undef_mac, a000, f000);
5704     INSN(mac,       a000, f100, CF_EMAC);
5705     INSN(from_mac,  a180, f9b0, CF_EMAC);
5706     INSN(move_mac,  a110, f9fc, CF_EMAC);
5707     INSN(from_macsr,a980, f9f0, CF_EMAC);
5708     INSN(from_mask, ad80, fff0, CF_EMAC);
5709     INSN(from_mext, ab80, fbf0, CF_EMAC);
5710     INSN(macsr_to_ccr, a9c0, ffff, CF_EMAC);
5711     INSN(to_mac,    a100, f9c0, CF_EMAC);
5712     INSN(to_macsr,  a900, ffc0, CF_EMAC);
5713     INSN(to_mext,   ab00, fbc0, CF_EMAC);
5714     INSN(to_mask,   ad00, ffc0, CF_EMAC);
5715 
5716     INSN(mov3q,     a140, f1c0, CF_ISA_B);
5717     INSN(cmp,       b000, f1c0, CF_ISA_B); /* cmp.b */
5718     INSN(cmp,       b040, f1c0, CF_ISA_B); /* cmp.w */
5719     INSN(cmpa,      b0c0, f1c0, CF_ISA_B); /* cmpa.w */
5720     INSN(cmp,       b080, f1c0, CF_ISA_A);
5721     INSN(cmpa,      b1c0, f1c0, CF_ISA_A);
5722     INSN(cmp,       b000, f100, M68000);
5723     INSN(eor,       b100, f100, M68000);
5724     INSN(cmpm,      b108, f138, M68000);
5725     INSN(cmpa,      b0c0, f0c0, M68000);
5726     INSN(eor,       b180, f1c0, CF_ISA_A);
5727     BASE(and,       c000, f000);
5728     INSN(exg_dd,    c140, f1f8, M68000);
5729     INSN(exg_aa,    c148, f1f8, M68000);
5730     INSN(exg_da,    c188, f1f8, M68000);
5731     BASE(mulw,      c0c0, f0c0);
5732     INSN(abcd_reg,  c100, f1f8, M68000);
5733     INSN(abcd_mem,  c108, f1f8, M68000);
5734     BASE(addsub,    d000, f000);
5735     INSN(undef,     d0c0, f0c0, CF_ISA_A);
5736     INSN(addx_reg,      d180, f1f8, CF_ISA_A);
5737     INSN(addx_reg,  d100, f138, M68000);
5738     INSN(addx_mem,  d108, f138, M68000);
5739     INSN(adda,      d1c0, f1c0, CF_ISA_A);
5740     INSN(adda,      d0c0, f0c0, M68000);
5741     INSN(shift_im,  e080, f0f0, CF_ISA_A);
5742     INSN(shift_reg, e0a0, f0f0, CF_ISA_A);
5743     INSN(shift8_im, e000, f0f0, M68000);
5744     INSN(shift16_im, e040, f0f0, M68000);
5745     INSN(shift_im,  e080, f0f0, M68000);
5746     INSN(shift8_reg, e020, f0f0, M68000);
5747     INSN(shift16_reg, e060, f0f0, M68000);
5748     INSN(shift_reg, e0a0, f0f0, M68000);
5749     INSN(shift_mem, e0c0, fcc0, M68000);
5750     INSN(rotate_im, e090, f0f0, M68000);
5751     INSN(rotate8_im, e010, f0f0, M68000);
5752     INSN(rotate16_im, e050, f0f0, M68000);
5753     INSN(rotate_reg, e0b0, f0f0, M68000);
5754     INSN(rotate8_reg, e030, f0f0, M68000);
5755     INSN(rotate16_reg, e070, f0f0, M68000);
5756     INSN(rotate_mem, e4c0, fcc0, M68000);
5757     INSN(bfext_mem, e9c0, fdc0, BITFIELD);  /* bfextu & bfexts */
5758     INSN(bfext_reg, e9c0, fdf8, BITFIELD);
5759     INSN(bfins_mem, efc0, ffc0, BITFIELD);
5760     INSN(bfins_reg, efc0, fff8, BITFIELD);
5761     INSN(bfop_mem, eac0, ffc0, BITFIELD);   /* bfchg */
5762     INSN(bfop_reg, eac0, fff8, BITFIELD);   /* bfchg */
5763     INSN(bfop_mem, ecc0, ffc0, BITFIELD);   /* bfclr */
5764     INSN(bfop_reg, ecc0, fff8, BITFIELD);   /* bfclr */
5765     INSN(bfop_mem, edc0, ffc0, BITFIELD);   /* bfffo */
5766     INSN(bfop_reg, edc0, fff8, BITFIELD);   /* bfffo */
5767     INSN(bfop_mem, eec0, ffc0, BITFIELD);   /* bfset */
5768     INSN(bfop_reg, eec0, fff8, BITFIELD);   /* bfset */
5769     INSN(bfop_mem, e8c0, ffc0, BITFIELD);   /* bftst */
5770     INSN(bfop_reg, e8c0, fff8, BITFIELD);   /* bftst */
5771     BASE(undef_fpu, f000, f000);
5772     INSN(fpu,       f200, ffc0, CF_FPU);
5773     INSN(fbcc,      f280, ffc0, CF_FPU);
5774     INSN(fpu,       f200, ffc0, FPU);
5775     INSN(fscc,      f240, ffc0, FPU);
5776     INSN(fbcc,      f280, ff80, FPU);
5777 #if defined(CONFIG_SOFTMMU)
5778     INSN(frestore,  f340, ffc0, CF_FPU);
5779     INSN(fsave,     f300, ffc0, CF_FPU);
5780     INSN(frestore,  f340, ffc0, FPU);
5781     INSN(fsave,     f300, ffc0, FPU);
5782     INSN(intouch,   f340, ffc0, CF_ISA_A);
5783     INSN(cpushl,    f428, ff38, CF_ISA_A);
5784     INSN(cpush,     f420, ff20, M68040);
5785     INSN(cinv,      f400, ff20, M68040);
5786     INSN(wddata,    fb00, ff00, CF_ISA_A);
5787     INSN(wdebug,    fbc0, ffc0, CF_ISA_A);
5788 #endif
5789     INSN(move16_mem, f600, ffe0, M68040);
5790     INSN(move16_reg, f620, fff8, M68040);
5791 #undef INSN
5792 }
5793 
5794 /* ??? Some of this implementation is not exception safe.  We should always
5795    write back the result to memory before setting the condition codes.  */
5796 static void disas_m68k_insn(CPUM68KState * env, DisasContext *s)
5797 {
5798     uint16_t insn = read_im16(env, s);
5799     opcode_table[insn](env, s, insn);
5800     do_writebacks(s);
5801 }
5802 
5803 /* generate intermediate code for basic block 'tb'.  */
5804 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb)
5805 {
5806     CPUM68KState *env = cs->env_ptr;
5807     DisasContext dc1, *dc = &dc1;
5808     target_ulong pc_start;
5809     int pc_offset;
5810     int num_insns;
5811     int max_insns;
5812 
5813     /* generate intermediate code */
5814     pc_start = tb->pc;
5815 
5816     dc->tb = tb;
5817 
5818     dc->env = env;
5819     dc->is_jmp = DISAS_NEXT;
5820     dc->pc = pc_start;
5821     dc->cc_op = CC_OP_DYNAMIC;
5822     dc->cc_op_synced = 1;
5823     dc->singlestep_enabled = cs->singlestep_enabled;
5824     dc->user = (env->sr & SR_S) == 0;
5825     dc->done_mac = 0;
5826     dc->writeback_mask = 0;
5827     num_insns = 0;
5828     max_insns = tb_cflags(tb) & CF_COUNT_MASK;
5829     if (max_insns == 0) {
5830         max_insns = CF_COUNT_MASK;
5831     }
5832     if (max_insns > TCG_MAX_INSNS) {
5833         max_insns = TCG_MAX_INSNS;
5834     }
5835 
5836     gen_tb_start(tb);
5837     do {
5838         pc_offset = dc->pc - pc_start;
5839         tcg_gen_insn_start(dc->pc, dc->cc_op);
5840         num_insns++;
5841 
5842         if (unlikely(cpu_breakpoint_test(cs, dc->pc, BP_ANY))) {
5843             gen_exception(dc, dc->pc, EXCP_DEBUG);
5844             dc->is_jmp = DISAS_JUMP;
5845             /* The address covered by the breakpoint must be included in
5846                [tb->pc, tb->pc + tb->size) in order to for it to be
5847                properly cleared -- thus we increment the PC here so that
5848                the logic setting tb->size below does the right thing.  */
5849             dc->pc += 2;
5850             break;
5851         }
5852 
5853         if (num_insns == max_insns && (tb_cflags(tb) & CF_LAST_IO)) {
5854             gen_io_start();
5855         }
5856 
5857         dc->insn_pc = dc->pc;
5858 	disas_m68k_insn(env, dc);
5859     } while (!dc->is_jmp && !tcg_op_buf_full() &&
5860              !cs->singlestep_enabled &&
5861              !singlestep &&
5862              (pc_offset) < (TARGET_PAGE_SIZE - 32) &&
5863              num_insns < max_insns);
5864 
5865     if (tb_cflags(tb) & CF_LAST_IO)
5866         gen_io_end();
5867     if (unlikely(cs->singlestep_enabled)) {
5868         /* Make sure the pc is updated, and raise a debug exception.  */
5869         if (!dc->is_jmp) {
5870             update_cc_op(dc);
5871             tcg_gen_movi_i32(QREG_PC, dc->pc);
5872         }
5873         gen_helper_raise_exception(cpu_env, tcg_const_i32(EXCP_DEBUG));
5874     } else {
5875         switch(dc->is_jmp) {
5876         case DISAS_NEXT:
5877             update_cc_op(dc);
5878             gen_jmp_tb(dc, 0, dc->pc);
5879             break;
5880         default:
5881         case DISAS_JUMP:
5882         case DISAS_UPDATE:
5883             update_cc_op(dc);
5884             /* indicate that the hash table must be used to find the next TB */
5885             tcg_gen_exit_tb(0);
5886             break;
5887         case DISAS_TB_JUMP:
5888             /* nothing more to generate */
5889             break;
5890         }
5891     }
5892     gen_tb_end(tb, num_insns);
5893 
5894 #ifdef DEBUG_DISAS
5895     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)
5896         && qemu_log_in_addr_range(pc_start)) {
5897         qemu_log_lock();
5898         qemu_log("----------------\n");
5899         qemu_log("IN: %s\n", lookup_symbol(pc_start));
5900         log_target_disas(cs, pc_start, dc->pc - pc_start);
5901         qemu_log("\n");
5902         qemu_log_unlock();
5903     }
5904 #endif
5905     tb->size = dc->pc - pc_start;
5906     tb->icount = num_insns;
5907 }
5908 
5909 static double floatx80_to_double(CPUM68KState *env, uint16_t high, uint64_t low)
5910 {
5911     floatx80 a = { .high = high, .low = low };
5912     union {
5913         float64 f64;
5914         double d;
5915     } u;
5916 
5917     u.f64 = floatx80_to_float64(a, &env->fp_status);
5918     return u.d;
5919 }
5920 
5921 void m68k_cpu_dump_state(CPUState *cs, FILE *f, fprintf_function cpu_fprintf,
5922                          int flags)
5923 {
5924     M68kCPU *cpu = M68K_CPU(cs);
5925     CPUM68KState *env = &cpu->env;
5926     int i;
5927     uint16_t sr;
5928     for (i = 0; i < 8; i++) {
5929         cpu_fprintf(f, "D%d = %08x   A%d = %08x   "
5930                     "F%d = %04x %016"PRIx64"  (%12g)\n",
5931                     i, env->dregs[i], i, env->aregs[i],
5932                     i, env->fregs[i].l.upper, env->fregs[i].l.lower,
5933                     floatx80_to_double(env, env->fregs[i].l.upper,
5934                                        env->fregs[i].l.lower));
5935     }
5936     cpu_fprintf (f, "PC = %08x   ", env->pc);
5937     sr = env->sr | cpu_m68k_get_ccr(env);
5938     cpu_fprintf(f, "SR = %04x T:%x I:%x %c%c %c%c%c%c%c\n",
5939                 sr, (sr & SR_T) >> SR_T_SHIFT, (sr & SR_I) >> SR_I_SHIFT,
5940                 (sr & SR_S) ? 'S' : 'U', (sr & SR_M) ? '%' : 'I',
5941                 (sr & CCF_X) ? 'X' : '-', (sr & CCF_N) ? 'N' : '-',
5942                 (sr & CCF_Z) ? 'Z' : '-', (sr & CCF_V) ? 'V' : '-',
5943                 (sr & CCF_C) ? 'C' : '-');
5944     cpu_fprintf(f, "FPSR = %08x %c%c%c%c ", env->fpsr,
5945                 (env->fpsr & FPSR_CC_A) ? 'A' : '-',
5946                 (env->fpsr & FPSR_CC_I) ? 'I' : '-',
5947                 (env->fpsr & FPSR_CC_Z) ? 'Z' : '-',
5948                 (env->fpsr & FPSR_CC_N) ? 'N' : '-');
5949     cpu_fprintf(f, "\n                                "
5950                    "FPCR =     %04x ", env->fpcr);
5951     switch (env->fpcr & FPCR_PREC_MASK) {
5952     case FPCR_PREC_X:
5953         cpu_fprintf(f, "X ");
5954         break;
5955     case FPCR_PREC_S:
5956         cpu_fprintf(f, "S ");
5957         break;
5958     case FPCR_PREC_D:
5959         cpu_fprintf(f, "D ");
5960         break;
5961     }
5962     switch (env->fpcr & FPCR_RND_MASK) {
5963     case FPCR_RND_N:
5964         cpu_fprintf(f, "RN ");
5965         break;
5966     case FPCR_RND_Z:
5967         cpu_fprintf(f, "RZ ");
5968         break;
5969     case FPCR_RND_M:
5970         cpu_fprintf(f, "RM ");
5971         break;
5972     case FPCR_RND_P:
5973         cpu_fprintf(f, "RP ");
5974         break;
5975     }
5976     cpu_fprintf(f, "\n");
5977 #ifdef CONFIG_SOFTMMU
5978     cpu_fprintf(f, "%sA7(MSP) = %08x %sA7(USP) = %08x %sA7(ISP) = %08x\n",
5979                env->current_sp == M68K_SSP ? "->" : "  ", env->sp[M68K_SSP],
5980                env->current_sp == M68K_USP ? "->" : "  ", env->sp[M68K_USP],
5981                env->current_sp == M68K_ISP ? "->" : "  ", env->sp[M68K_ISP]);
5982     cpu_fprintf(f, "VBR = 0x%08x\n", env->vbr);
5983 #endif
5984 }
5985 
5986 void restore_state_to_opc(CPUM68KState *env, TranslationBlock *tb,
5987                           target_ulong *data)
5988 {
5989     int cc_op = data[1];
5990     env->pc = data[0];
5991     if (cc_op != CC_OP_DYNAMIC) {
5992         env->cc_op = cc_op;
5993     }
5994 }
5995