xref: /openbmc/qemu/target/m68k/translate.c (revision d80d761d)
1 /*
2  *  m68k translation
3  *
4  *  Copyright (c) 2005-2007 CodeSourcery
5  *  Written by Paul Brook
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "disas/disas.h"
24 #include "exec/exec-all.h"
25 #include "tcg/tcg-op.h"
26 #include "qemu/log.h"
27 #include "qemu/qemu-print.h"
28 #include "exec/cpu_ldst.h"
29 #include "exec/translator.h"
30 
31 #include "exec/helper-proto.h"
32 #include "exec/helper-gen.h"
33 
34 #include "exec/log.h"
35 #include "fpu/softfloat.h"
36 
37 
38 //#define DEBUG_DISPATCH 1
39 
40 #define DEFO32(name, offset) static TCGv QREG_##name;
41 #define DEFO64(name, offset) static TCGv_i64 QREG_##name;
42 #include "qregs.h.inc"
43 #undef DEFO32
44 #undef DEFO64
45 
46 static TCGv_i32 cpu_halted;
47 static TCGv_i32 cpu_exception_index;
48 
49 static char cpu_reg_names[2 * 8 * 3 + 5 * 4];
50 static TCGv cpu_dregs[8];
51 static TCGv cpu_aregs[8];
52 static TCGv_i64 cpu_macc[4];
53 
54 #define REG(insn, pos)  (((insn) >> (pos)) & 7)
55 #define DREG(insn, pos) cpu_dregs[REG(insn, pos)]
56 #define AREG(insn, pos) get_areg(s, REG(insn, pos))
57 #define MACREG(acc)     cpu_macc[acc]
58 #define QREG_SP         get_areg(s, 7)
59 
60 static TCGv NULL_QREG;
61 #define IS_NULL_QREG(t) (t == NULL_QREG)
62 /* Used to distinguish stores from bad addressing modes.  */
63 static TCGv store_dummy;
64 
65 #include "exec/gen-icount.h"
66 
67 void m68k_tcg_init(void)
68 {
69     char *p;
70     int i;
71 
72 #define DEFO32(name, offset) \
73     QREG_##name = tcg_global_mem_new_i32(cpu_env, \
74         offsetof(CPUM68KState, offset), #name);
75 #define DEFO64(name, offset) \
76     QREG_##name = tcg_global_mem_new_i64(cpu_env, \
77         offsetof(CPUM68KState, offset), #name);
78 #include "qregs.h.inc"
79 #undef DEFO32
80 #undef DEFO64
81 
82     cpu_halted = tcg_global_mem_new_i32(cpu_env,
83                                         -offsetof(M68kCPU, env) +
84                                         offsetof(CPUState, halted), "HALTED");
85     cpu_exception_index = tcg_global_mem_new_i32(cpu_env,
86                                                  -offsetof(M68kCPU, env) +
87                                                  offsetof(CPUState, exception_index),
88                                                  "EXCEPTION");
89 
90     p = cpu_reg_names;
91     for (i = 0; i < 8; i++) {
92         sprintf(p, "D%d", i);
93         cpu_dregs[i] = tcg_global_mem_new(cpu_env,
94                                           offsetof(CPUM68KState, dregs[i]), p);
95         p += 3;
96         sprintf(p, "A%d", i);
97         cpu_aregs[i] = tcg_global_mem_new(cpu_env,
98                                           offsetof(CPUM68KState, aregs[i]), p);
99         p += 3;
100     }
101     for (i = 0; i < 4; i++) {
102         sprintf(p, "ACC%d", i);
103         cpu_macc[i] = tcg_global_mem_new_i64(cpu_env,
104                                          offsetof(CPUM68KState, macc[i]), p);
105         p += 5;
106     }
107 
108     NULL_QREG = tcg_global_mem_new(cpu_env, -4, "NULL");
109     store_dummy = tcg_global_mem_new(cpu_env, -8, "NULL");
110 }
111 
112 /* internal defines */
113 typedef struct DisasContext {
114     DisasContextBase base;
115     CPUM68KState *env;
116     target_ulong pc;
117     target_ulong pc_prev;
118     CCOp cc_op; /* Current CC operation */
119     int cc_op_synced;
120     TCGv_i64 mactmp;
121     int done_mac;
122     int writeback_mask;
123     TCGv writeback[8];
124     bool ss_active;
125 } DisasContext;
126 
127 static TCGv get_areg(DisasContext *s, unsigned regno)
128 {
129     if (s->writeback_mask & (1 << regno)) {
130         return s->writeback[regno];
131     } else {
132         return cpu_aregs[regno];
133     }
134 }
135 
136 static void delay_set_areg(DisasContext *s, unsigned regno,
137                            TCGv val, bool give_temp)
138 {
139     if (s->writeback_mask & (1 << regno)) {
140         if (give_temp) {
141             s->writeback[regno] = val;
142         } else {
143             tcg_gen_mov_i32(s->writeback[regno], val);
144         }
145     } else {
146         s->writeback_mask |= 1 << regno;
147         if (give_temp) {
148             s->writeback[regno] = val;
149         } else {
150             TCGv tmp = tcg_temp_new();
151             s->writeback[regno] = tmp;
152             tcg_gen_mov_i32(tmp, val);
153         }
154     }
155 }
156 
157 static void do_writebacks(DisasContext *s)
158 {
159     unsigned mask = s->writeback_mask;
160     if (mask) {
161         s->writeback_mask = 0;
162         do {
163             unsigned regno = ctz32(mask);
164             tcg_gen_mov_i32(cpu_aregs[regno], s->writeback[regno]);
165             mask &= mask - 1;
166         } while (mask);
167     }
168 }
169 
170 /* is_jmp field values */
171 #define DISAS_JUMP      DISAS_TARGET_0 /* only pc was modified dynamically */
172 #define DISAS_EXIT      DISAS_TARGET_1 /* cpu state was modified dynamically */
173 
174 #if defined(CONFIG_USER_ONLY)
175 #define IS_USER(s) 1
176 #else
177 #define IS_USER(s)   (!(s->base.tb->flags & TB_FLAGS_MSR_S))
178 #define SFC_INDEX(s) ((s->base.tb->flags & TB_FLAGS_SFC_S) ? \
179                       MMU_KERNEL_IDX : MMU_USER_IDX)
180 #define DFC_INDEX(s) ((s->base.tb->flags & TB_FLAGS_DFC_S) ? \
181                       MMU_KERNEL_IDX : MMU_USER_IDX)
182 #endif
183 
184 typedef void (*disas_proc)(CPUM68KState *env, DisasContext *s, uint16_t insn);
185 
186 #ifdef DEBUG_DISPATCH
187 #define DISAS_INSN(name)                                                \
188     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
189                                   uint16_t insn);                       \
190     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
191                              uint16_t insn)                             \
192     {                                                                   \
193         qemu_log("Dispatch " #name "\n");                               \
194         real_disas_##name(env, s, insn);                                \
195     }                                                                   \
196     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
197                                   uint16_t insn)
198 #else
199 #define DISAS_INSN(name)                                                \
200     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
201                              uint16_t insn)
202 #endif
203 
204 static const uint8_t cc_op_live[CC_OP_NB] = {
205     [CC_OP_DYNAMIC] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
206     [CC_OP_FLAGS] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
207     [CC_OP_ADDB ... CC_OP_ADDL] = CCF_X | CCF_N | CCF_V,
208     [CC_OP_SUBB ... CC_OP_SUBL] = CCF_X | CCF_N | CCF_V,
209     [CC_OP_CMPB ... CC_OP_CMPL] = CCF_X | CCF_N | CCF_V,
210     [CC_OP_LOGIC] = CCF_X | CCF_N
211 };
212 
213 static void set_cc_op(DisasContext *s, CCOp op)
214 {
215     CCOp old_op = s->cc_op;
216     int dead;
217 
218     if (old_op == op) {
219         return;
220     }
221     s->cc_op = op;
222     s->cc_op_synced = 0;
223 
224     /*
225      * Discard CC computation that will no longer be used.
226      * Note that X and N are never dead.
227      */
228     dead = cc_op_live[old_op] & ~cc_op_live[op];
229     if (dead & CCF_C) {
230         tcg_gen_discard_i32(QREG_CC_C);
231     }
232     if (dead & CCF_Z) {
233         tcg_gen_discard_i32(QREG_CC_Z);
234     }
235     if (dead & CCF_V) {
236         tcg_gen_discard_i32(QREG_CC_V);
237     }
238 }
239 
240 /* Update the CPU env CC_OP state.  */
241 static void update_cc_op(DisasContext *s)
242 {
243     if (!s->cc_op_synced) {
244         s->cc_op_synced = 1;
245         tcg_gen_movi_i32(QREG_CC_OP, s->cc_op);
246     }
247 }
248 
249 /* Generate a jump to an immediate address.  */
250 static void gen_jmp_im(DisasContext *s, uint32_t dest)
251 {
252     update_cc_op(s);
253     tcg_gen_movi_i32(QREG_PC, dest);
254     s->base.is_jmp = DISAS_JUMP;
255 }
256 
257 /* Generate a jump to the address in qreg DEST.  */
258 static void gen_jmp(DisasContext *s, TCGv dest)
259 {
260     update_cc_op(s);
261     tcg_gen_mov_i32(QREG_PC, dest);
262     s->base.is_jmp = DISAS_JUMP;
263 }
264 
265 static void gen_raise_exception(int nr)
266 {
267     gen_helper_raise_exception(cpu_env, tcg_constant_i32(nr));
268 }
269 
270 static void gen_raise_exception_format2(DisasContext *s, int nr,
271                                         target_ulong this_pc)
272 {
273     /*
274      * Pass the address of the insn to the exception handler,
275      * for recording in the Format $2 (6-word) stack frame.
276      * Re-use mmu.ar for the purpose, since that's only valid
277      * after tlb_fill.
278      */
279     tcg_gen_st_i32(tcg_constant_i32(this_pc), cpu_env,
280                    offsetof(CPUM68KState, mmu.ar));
281     gen_raise_exception(nr);
282     s->base.is_jmp = DISAS_NORETURN;
283 }
284 
285 static void gen_exception(DisasContext *s, uint32_t dest, int nr)
286 {
287     update_cc_op(s);
288     tcg_gen_movi_i32(QREG_PC, dest);
289 
290     gen_raise_exception(nr);
291 
292     s->base.is_jmp = DISAS_NORETURN;
293 }
294 
295 static inline void gen_addr_fault(DisasContext *s)
296 {
297     gen_exception(s, s->base.pc_next, EXCP_ADDRESS);
298 }
299 
300 /*
301  * Generate a load from the specified address.  Narrow values are
302  *  sign extended to full register width.
303  */
304 static inline TCGv gen_load(DisasContext *s, int opsize, TCGv addr,
305                             int sign, int index)
306 {
307     TCGv tmp;
308     tmp = tcg_temp_new_i32();
309     switch(opsize) {
310     case OS_BYTE:
311         if (sign)
312             tcg_gen_qemu_ld8s(tmp, addr, index);
313         else
314             tcg_gen_qemu_ld8u(tmp, addr, index);
315         break;
316     case OS_WORD:
317         if (sign)
318             tcg_gen_qemu_ld16s(tmp, addr, index);
319         else
320             tcg_gen_qemu_ld16u(tmp, addr, index);
321         break;
322     case OS_LONG:
323         tcg_gen_qemu_ld32u(tmp, addr, index);
324         break;
325     default:
326         g_assert_not_reached();
327     }
328     return tmp;
329 }
330 
331 /* Generate a store.  */
332 static inline void gen_store(DisasContext *s, int opsize, TCGv addr, TCGv val,
333                              int index)
334 {
335     switch(opsize) {
336     case OS_BYTE:
337         tcg_gen_qemu_st8(val, addr, index);
338         break;
339     case OS_WORD:
340         tcg_gen_qemu_st16(val, addr, index);
341         break;
342     case OS_LONG:
343         tcg_gen_qemu_st32(val, addr, index);
344         break;
345     default:
346         g_assert_not_reached();
347     }
348 }
349 
350 typedef enum {
351     EA_STORE,
352     EA_LOADU,
353     EA_LOADS
354 } ea_what;
355 
356 /*
357  * Generate an unsigned load if VAL is 0 a signed load if val is -1,
358  * otherwise generate a store.
359  */
360 static TCGv gen_ldst(DisasContext *s, int opsize, TCGv addr, TCGv val,
361                      ea_what what, int index)
362 {
363     if (what == EA_STORE) {
364         gen_store(s, opsize, addr, val, index);
365         return store_dummy;
366     } else {
367         return gen_load(s, opsize, addr, what == EA_LOADS, index);
368     }
369 }
370 
371 /* Read a 16-bit immediate constant */
372 static inline uint16_t read_im16(CPUM68KState *env, DisasContext *s)
373 {
374     uint16_t im;
375     im = translator_lduw(env, &s->base, s->pc);
376     s->pc += 2;
377     return im;
378 }
379 
380 /* Read an 8-bit immediate constant */
381 static inline uint8_t read_im8(CPUM68KState *env, DisasContext *s)
382 {
383     return read_im16(env, s);
384 }
385 
386 /* Read a 32-bit immediate constant.  */
387 static inline uint32_t read_im32(CPUM68KState *env, DisasContext *s)
388 {
389     uint32_t im;
390     im = read_im16(env, s) << 16;
391     im |= 0xffff & read_im16(env, s);
392     return im;
393 }
394 
395 /* Read a 64-bit immediate constant.  */
396 static inline uint64_t read_im64(CPUM68KState *env, DisasContext *s)
397 {
398     uint64_t im;
399     im = (uint64_t)read_im32(env, s) << 32;
400     im |= (uint64_t)read_im32(env, s);
401     return im;
402 }
403 
404 /* Calculate and address index.  */
405 static TCGv gen_addr_index(DisasContext *s, uint16_t ext, TCGv tmp)
406 {
407     TCGv add;
408     int scale;
409 
410     add = (ext & 0x8000) ? AREG(ext, 12) : DREG(ext, 12);
411     if ((ext & 0x800) == 0) {
412         tcg_gen_ext16s_i32(tmp, add);
413         add = tmp;
414     }
415     scale = (ext >> 9) & 3;
416     if (scale != 0) {
417         tcg_gen_shli_i32(tmp, add, scale);
418         add = tmp;
419     }
420     return add;
421 }
422 
423 /*
424  * Handle a base + index + displacement effective address.
425  * A NULL_QREG base means pc-relative.
426  */
427 static TCGv gen_lea_indexed(CPUM68KState *env, DisasContext *s, TCGv base)
428 {
429     uint32_t offset;
430     uint16_t ext;
431     TCGv add;
432     TCGv tmp;
433     uint32_t bd, od;
434 
435     offset = s->pc;
436     ext = read_im16(env, s);
437 
438     if ((ext & 0x800) == 0 && !m68k_feature(s->env, M68K_FEATURE_WORD_INDEX))
439         return NULL_QREG;
440 
441     if (m68k_feature(s->env, M68K_FEATURE_M68K) &&
442         !m68k_feature(s->env, M68K_FEATURE_SCALED_INDEX)) {
443         ext &= ~(3 << 9);
444     }
445 
446     if (ext & 0x100) {
447         /* full extension word format */
448         if (!m68k_feature(s->env, M68K_FEATURE_EXT_FULL))
449             return NULL_QREG;
450 
451         if ((ext & 0x30) > 0x10) {
452             /* base displacement */
453             if ((ext & 0x30) == 0x20) {
454                 bd = (int16_t)read_im16(env, s);
455             } else {
456                 bd = read_im32(env, s);
457             }
458         } else {
459             bd = 0;
460         }
461         tmp = tcg_temp_new();
462         if ((ext & 0x44) == 0) {
463             /* pre-index */
464             add = gen_addr_index(s, ext, tmp);
465         } else {
466             add = NULL_QREG;
467         }
468         if ((ext & 0x80) == 0) {
469             /* base not suppressed */
470             if (IS_NULL_QREG(base)) {
471                 base = tcg_constant_i32(offset + bd);
472                 bd = 0;
473             }
474             if (!IS_NULL_QREG(add)) {
475                 tcg_gen_add_i32(tmp, add, base);
476                 add = tmp;
477             } else {
478                 add = base;
479             }
480         }
481         if (!IS_NULL_QREG(add)) {
482             if (bd != 0) {
483                 tcg_gen_addi_i32(tmp, add, bd);
484                 add = tmp;
485             }
486         } else {
487             add = tcg_constant_i32(bd);
488         }
489         if ((ext & 3) != 0) {
490             /* memory indirect */
491             base = gen_load(s, OS_LONG, add, 0, IS_USER(s));
492             if ((ext & 0x44) == 4) {
493                 add = gen_addr_index(s, ext, tmp);
494                 tcg_gen_add_i32(tmp, add, base);
495                 add = tmp;
496             } else {
497                 add = base;
498             }
499             if ((ext & 3) > 1) {
500                 /* outer displacement */
501                 if ((ext & 3) == 2) {
502                     od = (int16_t)read_im16(env, s);
503                 } else {
504                     od = read_im32(env, s);
505                 }
506             } else {
507                 od = 0;
508             }
509             if (od != 0) {
510                 tcg_gen_addi_i32(tmp, add, od);
511                 add = tmp;
512             }
513         }
514     } else {
515         /* brief extension word format */
516         tmp = tcg_temp_new();
517         add = gen_addr_index(s, ext, tmp);
518         if (!IS_NULL_QREG(base)) {
519             tcg_gen_add_i32(tmp, add, base);
520             if ((int8_t)ext)
521                 tcg_gen_addi_i32(tmp, tmp, (int8_t)ext);
522         } else {
523             tcg_gen_addi_i32(tmp, add, offset + (int8_t)ext);
524         }
525         add = tmp;
526     }
527     return add;
528 }
529 
530 /* Sign or zero extend a value.  */
531 
532 static inline void gen_ext(TCGv res, TCGv val, int opsize, int sign)
533 {
534     switch (opsize) {
535     case OS_BYTE:
536         if (sign) {
537             tcg_gen_ext8s_i32(res, val);
538         } else {
539             tcg_gen_ext8u_i32(res, val);
540         }
541         break;
542     case OS_WORD:
543         if (sign) {
544             tcg_gen_ext16s_i32(res, val);
545         } else {
546             tcg_gen_ext16u_i32(res, val);
547         }
548         break;
549     case OS_LONG:
550         tcg_gen_mov_i32(res, val);
551         break;
552     default:
553         g_assert_not_reached();
554     }
555 }
556 
557 /* Evaluate all the CC flags.  */
558 
559 static void gen_flush_flags(DisasContext *s)
560 {
561     TCGv t0, t1;
562 
563     switch (s->cc_op) {
564     case CC_OP_FLAGS:
565         return;
566 
567     case CC_OP_ADDB:
568     case CC_OP_ADDW:
569     case CC_OP_ADDL:
570         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
571         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
572         /* Compute signed overflow for addition.  */
573         t0 = tcg_temp_new();
574         t1 = tcg_temp_new();
575         tcg_gen_sub_i32(t0, QREG_CC_N, QREG_CC_V);
576         gen_ext(t0, t0, s->cc_op - CC_OP_ADDB, 1);
577         tcg_gen_xor_i32(t1, QREG_CC_N, QREG_CC_V);
578         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
579         tcg_gen_andc_i32(QREG_CC_V, t1, QREG_CC_V);
580         break;
581 
582     case CC_OP_SUBB:
583     case CC_OP_SUBW:
584     case CC_OP_SUBL:
585         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
586         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
587         /* Compute signed overflow for subtraction.  */
588         t0 = tcg_temp_new();
589         t1 = tcg_temp_new();
590         tcg_gen_add_i32(t0, QREG_CC_N, QREG_CC_V);
591         gen_ext(t0, t0, s->cc_op - CC_OP_SUBB, 1);
592         tcg_gen_xor_i32(t1, QREG_CC_N, t0);
593         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
594         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t1);
595         break;
596 
597     case CC_OP_CMPB:
598     case CC_OP_CMPW:
599     case CC_OP_CMPL:
600         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_C, QREG_CC_N, QREG_CC_V);
601         tcg_gen_sub_i32(QREG_CC_Z, QREG_CC_N, QREG_CC_V);
602         gen_ext(QREG_CC_Z, QREG_CC_Z, s->cc_op - CC_OP_CMPB, 1);
603         /* Compute signed overflow for subtraction.  */
604         t0 = tcg_temp_new();
605         tcg_gen_xor_i32(t0, QREG_CC_Z, QREG_CC_N);
606         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, QREG_CC_N);
607         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t0);
608         tcg_gen_mov_i32(QREG_CC_N, QREG_CC_Z);
609         break;
610 
611     case CC_OP_LOGIC:
612         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
613         tcg_gen_movi_i32(QREG_CC_C, 0);
614         tcg_gen_movi_i32(QREG_CC_V, 0);
615         break;
616 
617     case CC_OP_DYNAMIC:
618         gen_helper_flush_flags(cpu_env, QREG_CC_OP);
619         s->cc_op_synced = 1;
620         break;
621 
622     default:
623         gen_helper_flush_flags(cpu_env, tcg_constant_i32(s->cc_op));
624         s->cc_op_synced = 1;
625         break;
626     }
627 
628     /* Note that flush_flags also assigned to env->cc_op.  */
629     s->cc_op = CC_OP_FLAGS;
630 }
631 
632 static inline TCGv gen_extend(DisasContext *s, TCGv val, int opsize, int sign)
633 {
634     TCGv tmp;
635 
636     if (opsize == OS_LONG) {
637         tmp = val;
638     } else {
639         tmp = tcg_temp_new();
640         gen_ext(tmp, val, opsize, sign);
641     }
642 
643     return tmp;
644 }
645 
646 static void gen_logic_cc(DisasContext *s, TCGv val, int opsize)
647 {
648     gen_ext(QREG_CC_N, val, opsize, 1);
649     set_cc_op(s, CC_OP_LOGIC);
650 }
651 
652 static void gen_update_cc_cmp(DisasContext *s, TCGv dest, TCGv src, int opsize)
653 {
654     tcg_gen_mov_i32(QREG_CC_N, dest);
655     tcg_gen_mov_i32(QREG_CC_V, src);
656     set_cc_op(s, CC_OP_CMPB + opsize);
657 }
658 
659 static void gen_update_cc_add(TCGv dest, TCGv src, int opsize)
660 {
661     gen_ext(QREG_CC_N, dest, opsize, 1);
662     tcg_gen_mov_i32(QREG_CC_V, src);
663 }
664 
665 static inline int opsize_bytes(int opsize)
666 {
667     switch (opsize) {
668     case OS_BYTE: return 1;
669     case OS_WORD: return 2;
670     case OS_LONG: return 4;
671     case OS_SINGLE: return 4;
672     case OS_DOUBLE: return 8;
673     case OS_EXTENDED: return 12;
674     case OS_PACKED: return 12;
675     default:
676         g_assert_not_reached();
677     }
678 }
679 
680 static inline int insn_opsize(int insn)
681 {
682     switch ((insn >> 6) & 3) {
683     case 0: return OS_BYTE;
684     case 1: return OS_WORD;
685     case 2: return OS_LONG;
686     default:
687         g_assert_not_reached();
688     }
689 }
690 
691 static inline int ext_opsize(int ext, int pos)
692 {
693     switch ((ext >> pos) & 7) {
694     case 0: return OS_LONG;
695     case 1: return OS_SINGLE;
696     case 2: return OS_EXTENDED;
697     case 3: return OS_PACKED;
698     case 4: return OS_WORD;
699     case 5: return OS_DOUBLE;
700     case 6: return OS_BYTE;
701     default:
702         g_assert_not_reached();
703     }
704 }
705 
706 /*
707  * Assign value to a register.  If the width is less than the register width
708  * only the low part of the register is set.
709  */
710 static void gen_partset_reg(int opsize, TCGv reg, TCGv val)
711 {
712     TCGv tmp;
713     switch (opsize) {
714     case OS_BYTE:
715         tcg_gen_andi_i32(reg, reg, 0xffffff00);
716         tmp = tcg_temp_new();
717         tcg_gen_ext8u_i32(tmp, val);
718         tcg_gen_or_i32(reg, reg, tmp);
719         break;
720     case OS_WORD:
721         tcg_gen_andi_i32(reg, reg, 0xffff0000);
722         tmp = tcg_temp_new();
723         tcg_gen_ext16u_i32(tmp, val);
724         tcg_gen_or_i32(reg, reg, tmp);
725         break;
726     case OS_LONG:
727     case OS_SINGLE:
728         tcg_gen_mov_i32(reg, val);
729         break;
730     default:
731         g_assert_not_reached();
732     }
733 }
734 
735 /*
736  * Generate code for an "effective address".  Does not adjust the base
737  * register for autoincrement addressing modes.
738  */
739 static TCGv gen_lea_mode(CPUM68KState *env, DisasContext *s,
740                          int mode, int reg0, int opsize)
741 {
742     TCGv reg;
743     TCGv tmp;
744     uint16_t ext;
745     uint32_t offset;
746 
747     switch (mode) {
748     case 0: /* Data register direct.  */
749     case 1: /* Address register direct.  */
750         return NULL_QREG;
751     case 3: /* Indirect postincrement.  */
752         if (opsize == OS_UNSIZED) {
753             return NULL_QREG;
754         }
755         /* fallthru */
756     case 2: /* Indirect register */
757         return get_areg(s, reg0);
758     case 4: /* Indirect predecrememnt.  */
759         if (opsize == OS_UNSIZED) {
760             return NULL_QREG;
761         }
762         reg = get_areg(s, reg0);
763         tmp = tcg_temp_new();
764         if (reg0 == 7 && opsize == OS_BYTE &&
765             m68k_feature(s->env, M68K_FEATURE_M68K)) {
766             tcg_gen_subi_i32(tmp, reg, 2);
767         } else {
768             tcg_gen_subi_i32(tmp, reg, opsize_bytes(opsize));
769         }
770         return tmp;
771     case 5: /* Indirect displacement.  */
772         reg = get_areg(s, reg0);
773         tmp = tcg_temp_new();
774         ext = read_im16(env, s);
775         tcg_gen_addi_i32(tmp, reg, (int16_t)ext);
776         return tmp;
777     case 6: /* Indirect index + displacement.  */
778         reg = get_areg(s, reg0);
779         return gen_lea_indexed(env, s, reg);
780     case 7: /* Other */
781         switch (reg0) {
782         case 0: /* Absolute short.  */
783             offset = (int16_t)read_im16(env, s);
784             return tcg_constant_i32(offset);
785         case 1: /* Absolute long.  */
786             offset = read_im32(env, s);
787             return tcg_constant_i32(offset);
788         case 2: /* pc displacement  */
789             offset = s->pc;
790             offset += (int16_t)read_im16(env, s);
791             return tcg_constant_i32(offset);
792         case 3: /* pc index+displacement.  */
793             return gen_lea_indexed(env, s, NULL_QREG);
794         case 4: /* Immediate.  */
795         default:
796             return NULL_QREG;
797         }
798     }
799     /* Should never happen.  */
800     return NULL_QREG;
801 }
802 
803 static TCGv gen_lea(CPUM68KState *env, DisasContext *s, uint16_t insn,
804                     int opsize)
805 {
806     int mode = extract32(insn, 3, 3);
807     int reg0 = REG(insn, 0);
808     return gen_lea_mode(env, s, mode, reg0, opsize);
809 }
810 
811 /*
812  * Generate code to load/store a value from/into an EA.  If WHAT > 0 this is
813  * a write otherwise it is a read (0 == sign extend, -1 == zero extend).
814  * ADDRP is non-null for readwrite operands.
815  */
816 static TCGv gen_ea_mode(CPUM68KState *env, DisasContext *s, int mode, int reg0,
817                         int opsize, TCGv val, TCGv *addrp, ea_what what,
818                         int index)
819 {
820     TCGv reg, tmp, result;
821     int32_t offset;
822 
823     switch (mode) {
824     case 0: /* Data register direct.  */
825         reg = cpu_dregs[reg0];
826         if (what == EA_STORE) {
827             gen_partset_reg(opsize, reg, val);
828             return store_dummy;
829         } else {
830             return gen_extend(s, reg, opsize, what == EA_LOADS);
831         }
832     case 1: /* Address register direct.  */
833         reg = get_areg(s, reg0);
834         if (what == EA_STORE) {
835             tcg_gen_mov_i32(reg, val);
836             return store_dummy;
837         } else {
838             return gen_extend(s, reg, opsize, what == EA_LOADS);
839         }
840     case 2: /* Indirect register */
841         reg = get_areg(s, reg0);
842         return gen_ldst(s, opsize, reg, val, what, index);
843     case 3: /* Indirect postincrement.  */
844         reg = get_areg(s, reg0);
845         result = gen_ldst(s, opsize, reg, val, what, index);
846         if (what == EA_STORE || !addrp) {
847             TCGv tmp = tcg_temp_new();
848             if (reg0 == 7 && opsize == OS_BYTE &&
849                 m68k_feature(s->env, M68K_FEATURE_M68K)) {
850                 tcg_gen_addi_i32(tmp, reg, 2);
851             } else {
852                 tcg_gen_addi_i32(tmp, reg, opsize_bytes(opsize));
853             }
854             delay_set_areg(s, reg0, tmp, true);
855         }
856         return result;
857     case 4: /* Indirect predecrememnt.  */
858         if (addrp && what == EA_STORE) {
859             tmp = *addrp;
860         } else {
861             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
862             if (IS_NULL_QREG(tmp)) {
863                 return tmp;
864             }
865             if (addrp) {
866                 *addrp = tmp;
867             }
868         }
869         result = gen_ldst(s, opsize, tmp, val, what, index);
870         if (what == EA_STORE || !addrp) {
871             delay_set_areg(s, reg0, tmp, false);
872         }
873         return result;
874     case 5: /* Indirect displacement.  */
875     case 6: /* Indirect index + displacement.  */
876     do_indirect:
877         if (addrp && what == EA_STORE) {
878             tmp = *addrp;
879         } else {
880             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
881             if (IS_NULL_QREG(tmp)) {
882                 return tmp;
883             }
884             if (addrp) {
885                 *addrp = tmp;
886             }
887         }
888         return gen_ldst(s, opsize, tmp, val, what, index);
889     case 7: /* Other */
890         switch (reg0) {
891         case 0: /* Absolute short.  */
892         case 1: /* Absolute long.  */
893         case 2: /* pc displacement  */
894         case 3: /* pc index+displacement.  */
895             goto do_indirect;
896         case 4: /* Immediate.  */
897             /* Sign extend values for consistency.  */
898             switch (opsize) {
899             case OS_BYTE:
900                 if (what == EA_LOADS) {
901                     offset = (int8_t)read_im8(env, s);
902                 } else {
903                     offset = read_im8(env, s);
904                 }
905                 break;
906             case OS_WORD:
907                 if (what == EA_LOADS) {
908                     offset = (int16_t)read_im16(env, s);
909                 } else {
910                     offset = read_im16(env, s);
911                 }
912                 break;
913             case OS_LONG:
914                 offset = read_im32(env, s);
915                 break;
916             default:
917                 g_assert_not_reached();
918             }
919             return tcg_constant_i32(offset);
920         default:
921             return NULL_QREG;
922         }
923     }
924     /* Should never happen.  */
925     return NULL_QREG;
926 }
927 
928 static TCGv gen_ea(CPUM68KState *env, DisasContext *s, uint16_t insn,
929                    int opsize, TCGv val, TCGv *addrp, ea_what what, int index)
930 {
931     int mode = extract32(insn, 3, 3);
932     int reg0 = REG(insn, 0);
933     return gen_ea_mode(env, s, mode, reg0, opsize, val, addrp, what, index);
934 }
935 
936 static TCGv_ptr gen_fp_ptr(int freg)
937 {
938     TCGv_ptr fp = tcg_temp_new_ptr();
939     tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fregs[freg]));
940     return fp;
941 }
942 
943 static TCGv_ptr gen_fp_result_ptr(void)
944 {
945     TCGv_ptr fp = tcg_temp_new_ptr();
946     tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fp_result));
947     return fp;
948 }
949 
950 static void gen_fp_move(TCGv_ptr dest, TCGv_ptr src)
951 {
952     TCGv t32;
953     TCGv_i64 t64;
954 
955     t32 = tcg_temp_new();
956     tcg_gen_ld16u_i32(t32, src, offsetof(FPReg, l.upper));
957     tcg_gen_st16_i32(t32, dest, offsetof(FPReg, l.upper));
958 
959     t64 = tcg_temp_new_i64();
960     tcg_gen_ld_i64(t64, src, offsetof(FPReg, l.lower));
961     tcg_gen_st_i64(t64, dest, offsetof(FPReg, l.lower));
962 }
963 
964 static void gen_load_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
965                         int index)
966 {
967     TCGv tmp;
968     TCGv_i64 t64;
969 
970     t64 = tcg_temp_new_i64();
971     tmp = tcg_temp_new();
972     switch (opsize) {
973     case OS_BYTE:
974         tcg_gen_qemu_ld8s(tmp, addr, index);
975         gen_helper_exts32(cpu_env, fp, tmp);
976         break;
977     case OS_WORD:
978         tcg_gen_qemu_ld16s(tmp, addr, index);
979         gen_helper_exts32(cpu_env, fp, tmp);
980         break;
981     case OS_LONG:
982         tcg_gen_qemu_ld32u(tmp, addr, index);
983         gen_helper_exts32(cpu_env, fp, tmp);
984         break;
985     case OS_SINGLE:
986         tcg_gen_qemu_ld32u(tmp, addr, index);
987         gen_helper_extf32(cpu_env, fp, tmp);
988         break;
989     case OS_DOUBLE:
990         tcg_gen_qemu_ld64(t64, addr, index);
991         gen_helper_extf64(cpu_env, fp, t64);
992         break;
993     case OS_EXTENDED:
994         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
995             gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
996             break;
997         }
998         tcg_gen_qemu_ld32u(tmp, addr, index);
999         tcg_gen_shri_i32(tmp, tmp, 16);
1000         tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
1001         tcg_gen_addi_i32(tmp, addr, 4);
1002         tcg_gen_qemu_ld64(t64, tmp, index);
1003         tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
1004         break;
1005     case OS_PACKED:
1006         /*
1007          * unimplemented data type on 68040/ColdFire
1008          * FIXME if needed for another FPU
1009          */
1010         gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1011         break;
1012     default:
1013         g_assert_not_reached();
1014     }
1015 }
1016 
1017 static void gen_store_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
1018                          int index)
1019 {
1020     TCGv tmp;
1021     TCGv_i64 t64;
1022 
1023     t64 = tcg_temp_new_i64();
1024     tmp = tcg_temp_new();
1025     switch (opsize) {
1026     case OS_BYTE:
1027         gen_helper_reds32(tmp, cpu_env, fp);
1028         tcg_gen_qemu_st8(tmp, addr, index);
1029         break;
1030     case OS_WORD:
1031         gen_helper_reds32(tmp, cpu_env, fp);
1032         tcg_gen_qemu_st16(tmp, addr, index);
1033         break;
1034     case OS_LONG:
1035         gen_helper_reds32(tmp, cpu_env, fp);
1036         tcg_gen_qemu_st32(tmp, addr, index);
1037         break;
1038     case OS_SINGLE:
1039         gen_helper_redf32(tmp, cpu_env, fp);
1040         tcg_gen_qemu_st32(tmp, addr, index);
1041         break;
1042     case OS_DOUBLE:
1043         gen_helper_redf64(t64, cpu_env, fp);
1044         tcg_gen_qemu_st64(t64, addr, index);
1045         break;
1046     case OS_EXTENDED:
1047         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1048             gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1049             break;
1050         }
1051         tcg_gen_ld16u_i32(tmp, fp, offsetof(FPReg, l.upper));
1052         tcg_gen_shli_i32(tmp, tmp, 16);
1053         tcg_gen_qemu_st32(tmp, addr, index);
1054         tcg_gen_addi_i32(tmp, addr, 4);
1055         tcg_gen_ld_i64(t64, fp, offsetof(FPReg, l.lower));
1056         tcg_gen_qemu_st64(t64, tmp, index);
1057         break;
1058     case OS_PACKED:
1059         /*
1060          * unimplemented data type on 68040/ColdFire
1061          * FIXME if needed for another FPU
1062          */
1063         gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1064         break;
1065     default:
1066         g_assert_not_reached();
1067     }
1068 }
1069 
1070 static void gen_ldst_fp(DisasContext *s, int opsize, TCGv addr,
1071                         TCGv_ptr fp, ea_what what, int index)
1072 {
1073     if (what == EA_STORE) {
1074         gen_store_fp(s, opsize, addr, fp, index);
1075     } else {
1076         gen_load_fp(s, opsize, addr, fp, index);
1077     }
1078 }
1079 
1080 static int gen_ea_mode_fp(CPUM68KState *env, DisasContext *s, int mode,
1081                           int reg0, int opsize, TCGv_ptr fp, ea_what what,
1082                           int index)
1083 {
1084     TCGv reg, addr, tmp;
1085     TCGv_i64 t64;
1086 
1087     switch (mode) {
1088     case 0: /* Data register direct.  */
1089         reg = cpu_dregs[reg0];
1090         if (what == EA_STORE) {
1091             switch (opsize) {
1092             case OS_BYTE:
1093             case OS_WORD:
1094             case OS_LONG:
1095                 gen_helper_reds32(reg, cpu_env, fp);
1096                 break;
1097             case OS_SINGLE:
1098                 gen_helper_redf32(reg, cpu_env, fp);
1099                 break;
1100             default:
1101                 g_assert_not_reached();
1102             }
1103         } else {
1104             tmp = tcg_temp_new();
1105             switch (opsize) {
1106             case OS_BYTE:
1107                 tcg_gen_ext8s_i32(tmp, reg);
1108                 gen_helper_exts32(cpu_env, fp, tmp);
1109                 break;
1110             case OS_WORD:
1111                 tcg_gen_ext16s_i32(tmp, reg);
1112                 gen_helper_exts32(cpu_env, fp, tmp);
1113                 break;
1114             case OS_LONG:
1115                 gen_helper_exts32(cpu_env, fp, reg);
1116                 break;
1117             case OS_SINGLE:
1118                 gen_helper_extf32(cpu_env, fp, reg);
1119                 break;
1120             default:
1121                 g_assert_not_reached();
1122             }
1123         }
1124         return 0;
1125     case 1: /* Address register direct.  */
1126         return -1;
1127     case 2: /* Indirect register */
1128         addr = get_areg(s, reg0);
1129         gen_ldst_fp(s, opsize, addr, fp, what, index);
1130         return 0;
1131     case 3: /* Indirect postincrement.  */
1132         addr = cpu_aregs[reg0];
1133         gen_ldst_fp(s, opsize, addr, fp, what, index);
1134         tcg_gen_addi_i32(addr, addr, opsize_bytes(opsize));
1135         return 0;
1136     case 4: /* Indirect predecrememnt.  */
1137         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1138         if (IS_NULL_QREG(addr)) {
1139             return -1;
1140         }
1141         gen_ldst_fp(s, opsize, addr, fp, what, index);
1142         tcg_gen_mov_i32(cpu_aregs[reg0], addr);
1143         return 0;
1144     case 5: /* Indirect displacement.  */
1145     case 6: /* Indirect index + displacement.  */
1146     do_indirect:
1147         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1148         if (IS_NULL_QREG(addr)) {
1149             return -1;
1150         }
1151         gen_ldst_fp(s, opsize, addr, fp, what, index);
1152         return 0;
1153     case 7: /* Other */
1154         switch (reg0) {
1155         case 0: /* Absolute short.  */
1156         case 1: /* Absolute long.  */
1157         case 2: /* pc displacement  */
1158         case 3: /* pc index+displacement.  */
1159             goto do_indirect;
1160         case 4: /* Immediate.  */
1161             if (what == EA_STORE) {
1162                 return -1;
1163             }
1164             switch (opsize) {
1165             case OS_BYTE:
1166                 tmp = tcg_constant_i32((int8_t)read_im8(env, s));
1167                 gen_helper_exts32(cpu_env, fp, tmp);
1168                 break;
1169             case OS_WORD:
1170                 tmp = tcg_constant_i32((int16_t)read_im16(env, s));
1171                 gen_helper_exts32(cpu_env, fp, tmp);
1172                 break;
1173             case OS_LONG:
1174                 tmp = tcg_constant_i32(read_im32(env, s));
1175                 gen_helper_exts32(cpu_env, fp, tmp);
1176                 break;
1177             case OS_SINGLE:
1178                 tmp = tcg_constant_i32(read_im32(env, s));
1179                 gen_helper_extf32(cpu_env, fp, tmp);
1180                 break;
1181             case OS_DOUBLE:
1182                 t64 = tcg_constant_i64(read_im64(env, s));
1183                 gen_helper_extf64(cpu_env, fp, t64);
1184                 break;
1185             case OS_EXTENDED:
1186                 if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1187                     gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1188                     break;
1189                 }
1190                 tmp = tcg_constant_i32(read_im32(env, s) >> 16);
1191                 tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
1192                 t64 = tcg_constant_i64(read_im64(env, s));
1193                 tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
1194                 break;
1195             case OS_PACKED:
1196                 /*
1197                  * unimplemented data type on 68040/ColdFire
1198                  * FIXME if needed for another FPU
1199                  */
1200                 gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1201                 break;
1202             default:
1203                 g_assert_not_reached();
1204             }
1205             return 0;
1206         default:
1207             return -1;
1208         }
1209     }
1210     return -1;
1211 }
1212 
1213 static int gen_ea_fp(CPUM68KState *env, DisasContext *s, uint16_t insn,
1214                        int opsize, TCGv_ptr fp, ea_what what, int index)
1215 {
1216     int mode = extract32(insn, 3, 3);
1217     int reg0 = REG(insn, 0);
1218     return gen_ea_mode_fp(env, s, mode, reg0, opsize, fp, what, index);
1219 }
1220 
1221 typedef struct {
1222     TCGCond tcond;
1223     TCGv v1;
1224     TCGv v2;
1225 } DisasCompare;
1226 
1227 static void gen_cc_cond(DisasCompare *c, DisasContext *s, int cond)
1228 {
1229     TCGv tmp, tmp2;
1230     TCGCond tcond;
1231     CCOp op = s->cc_op;
1232 
1233     /* The CC_OP_CMP form can handle most normal comparisons directly.  */
1234     if (op == CC_OP_CMPB || op == CC_OP_CMPW || op == CC_OP_CMPL) {
1235         c->v1 = QREG_CC_N;
1236         c->v2 = QREG_CC_V;
1237         switch (cond) {
1238         case 2: /* HI */
1239         case 3: /* LS */
1240             tcond = TCG_COND_LEU;
1241             goto done;
1242         case 4: /* CC */
1243         case 5: /* CS */
1244             tcond = TCG_COND_LTU;
1245             goto done;
1246         case 6: /* NE */
1247         case 7: /* EQ */
1248             tcond = TCG_COND_EQ;
1249             goto done;
1250         case 10: /* PL */
1251         case 11: /* MI */
1252             c->v2 = tcg_constant_i32(0);
1253             c->v1 = tmp = tcg_temp_new();
1254             tcg_gen_sub_i32(tmp, QREG_CC_N, QREG_CC_V);
1255             gen_ext(tmp, tmp, op - CC_OP_CMPB, 1);
1256             /* fallthru */
1257         case 12: /* GE */
1258         case 13: /* LT */
1259             tcond = TCG_COND_LT;
1260             goto done;
1261         case 14: /* GT */
1262         case 15: /* LE */
1263             tcond = TCG_COND_LE;
1264             goto done;
1265         }
1266     }
1267 
1268     c->v2 = tcg_constant_i32(0);
1269 
1270     switch (cond) {
1271     case 0: /* T */
1272     case 1: /* F */
1273         c->v1 = c->v2;
1274         tcond = TCG_COND_NEVER;
1275         goto done;
1276     case 14: /* GT (!(Z || (N ^ V))) */
1277     case 15: /* LE (Z || (N ^ V)) */
1278         /*
1279          * Logic operations clear V, which simplifies LE to (Z || N),
1280          * and since Z and N are co-located, this becomes a normal
1281          * comparison vs N.
1282          */
1283         if (op == CC_OP_LOGIC) {
1284             c->v1 = QREG_CC_N;
1285             tcond = TCG_COND_LE;
1286             goto done;
1287         }
1288         break;
1289     case 12: /* GE (!(N ^ V)) */
1290     case 13: /* LT (N ^ V) */
1291         /* Logic operations clear V, which simplifies this to N.  */
1292         if (op != CC_OP_LOGIC) {
1293             break;
1294         }
1295         /* fallthru */
1296     case 10: /* PL (!N) */
1297     case 11: /* MI (N) */
1298         /* Several cases represent N normally.  */
1299         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1300             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1301             op == CC_OP_LOGIC) {
1302             c->v1 = QREG_CC_N;
1303             tcond = TCG_COND_LT;
1304             goto done;
1305         }
1306         break;
1307     case 6: /* NE (!Z) */
1308     case 7: /* EQ (Z) */
1309         /* Some cases fold Z into N.  */
1310         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1311             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1312             op == CC_OP_LOGIC) {
1313             tcond = TCG_COND_EQ;
1314             c->v1 = QREG_CC_N;
1315             goto done;
1316         }
1317         break;
1318     case 4: /* CC (!C) */
1319     case 5: /* CS (C) */
1320         /* Some cases fold C into X.  */
1321         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1322             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL) {
1323             tcond = TCG_COND_NE;
1324             c->v1 = QREG_CC_X;
1325             goto done;
1326         }
1327         /* fallthru */
1328     case 8: /* VC (!V) */
1329     case 9: /* VS (V) */
1330         /* Logic operations clear V and C.  */
1331         if (op == CC_OP_LOGIC) {
1332             tcond = TCG_COND_NEVER;
1333             c->v1 = c->v2;
1334             goto done;
1335         }
1336         break;
1337     }
1338 
1339     /* Otherwise, flush flag state to CC_OP_FLAGS.  */
1340     gen_flush_flags(s);
1341 
1342     switch (cond) {
1343     case 0: /* T */
1344     case 1: /* F */
1345     default:
1346         /* Invalid, or handled above.  */
1347         abort();
1348     case 2: /* HI (!C && !Z) -> !(C || Z)*/
1349     case 3: /* LS (C || Z) */
1350         c->v1 = tmp = tcg_temp_new();
1351         tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1352         tcg_gen_or_i32(tmp, tmp, QREG_CC_C);
1353         tcond = TCG_COND_NE;
1354         break;
1355     case 4: /* CC (!C) */
1356     case 5: /* CS (C) */
1357         c->v1 = QREG_CC_C;
1358         tcond = TCG_COND_NE;
1359         break;
1360     case 6: /* NE (!Z) */
1361     case 7: /* EQ (Z) */
1362         c->v1 = QREG_CC_Z;
1363         tcond = TCG_COND_EQ;
1364         break;
1365     case 8: /* VC (!V) */
1366     case 9: /* VS (V) */
1367         c->v1 = QREG_CC_V;
1368         tcond = TCG_COND_LT;
1369         break;
1370     case 10: /* PL (!N) */
1371     case 11: /* MI (N) */
1372         c->v1 = QREG_CC_N;
1373         tcond = TCG_COND_LT;
1374         break;
1375     case 12: /* GE (!(N ^ V)) */
1376     case 13: /* LT (N ^ V) */
1377         c->v1 = tmp = tcg_temp_new();
1378         tcg_gen_xor_i32(tmp, QREG_CC_N, QREG_CC_V);
1379         tcond = TCG_COND_LT;
1380         break;
1381     case 14: /* GT (!(Z || (N ^ V))) */
1382     case 15: /* LE (Z || (N ^ V)) */
1383         c->v1 = tmp = tcg_temp_new();
1384         tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1385         tcg_gen_neg_i32(tmp, tmp);
1386         tmp2 = tcg_temp_new();
1387         tcg_gen_xor_i32(tmp2, QREG_CC_N, QREG_CC_V);
1388         tcg_gen_or_i32(tmp, tmp, tmp2);
1389         tcond = TCG_COND_LT;
1390         break;
1391     }
1392 
1393  done:
1394     if ((cond & 1) == 0) {
1395         tcond = tcg_invert_cond(tcond);
1396     }
1397     c->tcond = tcond;
1398 }
1399 
1400 static void gen_jmpcc(DisasContext *s, int cond, TCGLabel *l1)
1401 {
1402   DisasCompare c;
1403 
1404   gen_cc_cond(&c, s, cond);
1405   update_cc_op(s);
1406   tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
1407 }
1408 
1409 /* Force a TB lookup after an instruction that changes the CPU state.  */
1410 static void gen_exit_tb(DisasContext *s)
1411 {
1412     update_cc_op(s);
1413     tcg_gen_movi_i32(QREG_PC, s->pc);
1414     s->base.is_jmp = DISAS_EXIT;
1415 }
1416 
1417 #define SRC_EA(env, result, opsize, op_sign, addrp) do {                \
1418         result = gen_ea(env, s, insn, opsize, NULL_QREG, addrp,         \
1419                         op_sign ? EA_LOADS : EA_LOADU, IS_USER(s));     \
1420         if (IS_NULL_QREG(result)) {                                     \
1421             gen_addr_fault(s);                                          \
1422             return;                                                     \
1423         }                                                               \
1424     } while (0)
1425 
1426 #define DEST_EA(env, insn, opsize, val, addrp) do {                     \
1427         TCGv ea_result = gen_ea(env, s, insn, opsize, val, addrp,       \
1428                                 EA_STORE, IS_USER(s));                  \
1429         if (IS_NULL_QREG(ea_result)) {                                  \
1430             gen_addr_fault(s);                                          \
1431             return;                                                     \
1432         }                                                               \
1433     } while (0)
1434 
1435 /* Generate a jump to an immediate address.  */
1436 static void gen_jmp_tb(DisasContext *s, int n, target_ulong dest,
1437                        target_ulong src)
1438 {
1439     if (unlikely(s->ss_active)) {
1440         update_cc_op(s);
1441         tcg_gen_movi_i32(QREG_PC, dest);
1442         gen_raise_exception_format2(s, EXCP_TRACE, src);
1443     } else if (translator_use_goto_tb(&s->base, dest)) {
1444         tcg_gen_goto_tb(n);
1445         tcg_gen_movi_i32(QREG_PC, dest);
1446         tcg_gen_exit_tb(s->base.tb, n);
1447     } else {
1448         gen_jmp_im(s, dest);
1449         tcg_gen_exit_tb(NULL, 0);
1450     }
1451     s->base.is_jmp = DISAS_NORETURN;
1452 }
1453 
1454 DISAS_INSN(scc)
1455 {
1456     DisasCompare c;
1457     int cond;
1458     TCGv tmp;
1459 
1460     cond = (insn >> 8) & 0xf;
1461     gen_cc_cond(&c, s, cond);
1462 
1463     tmp = tcg_temp_new();
1464     tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
1465 
1466     tcg_gen_neg_i32(tmp, tmp);
1467     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
1468 }
1469 
1470 DISAS_INSN(dbcc)
1471 {
1472     TCGLabel *l1;
1473     TCGv reg;
1474     TCGv tmp;
1475     int16_t offset;
1476     uint32_t base;
1477 
1478     reg = DREG(insn, 0);
1479     base = s->pc;
1480     offset = (int16_t)read_im16(env, s);
1481     l1 = gen_new_label();
1482     gen_jmpcc(s, (insn >> 8) & 0xf, l1);
1483 
1484     tmp = tcg_temp_new();
1485     tcg_gen_ext16s_i32(tmp, reg);
1486     tcg_gen_addi_i32(tmp, tmp, -1);
1487     gen_partset_reg(OS_WORD, reg, tmp);
1488     tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, -1, l1);
1489     gen_jmp_tb(s, 1, base + offset, s->base.pc_next);
1490     gen_set_label(l1);
1491     gen_jmp_tb(s, 0, s->pc, s->base.pc_next);
1492 }
1493 
1494 DISAS_INSN(undef_mac)
1495 {
1496     gen_exception(s, s->base.pc_next, EXCP_LINEA);
1497 }
1498 
1499 DISAS_INSN(undef_fpu)
1500 {
1501     gen_exception(s, s->base.pc_next, EXCP_LINEF);
1502 }
1503 
1504 DISAS_INSN(undef)
1505 {
1506     /*
1507      * ??? This is both instructions that are as yet unimplemented
1508      * for the 680x0 series, as well as those that are implemented
1509      * but actually illegal for CPU32 or pre-68020.
1510      */
1511     qemu_log_mask(LOG_UNIMP, "Illegal instruction: %04x @ %08x\n",
1512                   insn, s->base.pc_next);
1513     gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
1514 }
1515 
1516 DISAS_INSN(mulw)
1517 {
1518     TCGv reg;
1519     TCGv tmp;
1520     TCGv src;
1521     int sign;
1522 
1523     sign = (insn & 0x100) != 0;
1524     reg = DREG(insn, 9);
1525     tmp = tcg_temp_new();
1526     if (sign)
1527         tcg_gen_ext16s_i32(tmp, reg);
1528     else
1529         tcg_gen_ext16u_i32(tmp, reg);
1530     SRC_EA(env, src, OS_WORD, sign, NULL);
1531     tcg_gen_mul_i32(tmp, tmp, src);
1532     tcg_gen_mov_i32(reg, tmp);
1533     gen_logic_cc(s, tmp, OS_LONG);
1534 }
1535 
1536 DISAS_INSN(divw)
1537 {
1538     int sign;
1539     TCGv src;
1540     TCGv destr;
1541     TCGv ilen;
1542 
1543     /* divX.w <EA>,Dn    32/16 -> 16r:16q */
1544 
1545     sign = (insn & 0x100) != 0;
1546 
1547     /* dest.l / src.w */
1548 
1549     SRC_EA(env, src, OS_WORD, sign, NULL);
1550     destr = tcg_constant_i32(REG(insn, 9));
1551     ilen = tcg_constant_i32(s->pc - s->base.pc_next);
1552     if (sign) {
1553         gen_helper_divsw(cpu_env, destr, src, ilen);
1554     } else {
1555         gen_helper_divuw(cpu_env, destr, src, ilen);
1556     }
1557 
1558     set_cc_op(s, CC_OP_FLAGS);
1559 }
1560 
1561 DISAS_INSN(divl)
1562 {
1563     TCGv num, reg, den, ilen;
1564     int sign;
1565     uint16_t ext;
1566 
1567     ext = read_im16(env, s);
1568 
1569     sign = (ext & 0x0800) != 0;
1570 
1571     if (ext & 0x400) {
1572         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
1573             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
1574             return;
1575         }
1576 
1577         /* divX.l <EA>, Dr:Dq    64/32 -> 32r:32q */
1578 
1579         SRC_EA(env, den, OS_LONG, 0, NULL);
1580         num = tcg_constant_i32(REG(ext, 12));
1581         reg = tcg_constant_i32(REG(ext, 0));
1582         ilen = tcg_constant_i32(s->pc - s->base.pc_next);
1583         if (sign) {
1584             gen_helper_divsll(cpu_env, num, reg, den, ilen);
1585         } else {
1586             gen_helper_divull(cpu_env, num, reg, den, ilen);
1587         }
1588         set_cc_op(s, CC_OP_FLAGS);
1589         return;
1590     }
1591 
1592     /* divX.l <EA>, Dq        32/32 -> 32q     */
1593     /* divXl.l <EA>, Dr:Dq    32/32 -> 32r:32q */
1594 
1595     SRC_EA(env, den, OS_LONG, 0, NULL);
1596     num = tcg_constant_i32(REG(ext, 12));
1597     reg = tcg_constant_i32(REG(ext, 0));
1598     ilen = tcg_constant_i32(s->pc - s->base.pc_next);
1599     if (sign) {
1600         gen_helper_divsl(cpu_env, num, reg, den, ilen);
1601     } else {
1602         gen_helper_divul(cpu_env, num, reg, den, ilen);
1603     }
1604 
1605     set_cc_op(s, CC_OP_FLAGS);
1606 }
1607 
1608 static void bcd_add(TCGv dest, TCGv src)
1609 {
1610     TCGv t0, t1;
1611 
1612     /*
1613      * dest10 = dest10 + src10 + X
1614      *
1615      *        t1 = src
1616      *        t2 = t1 + 0x066
1617      *        t3 = t2 + dest + X
1618      *        t4 = t2 ^ dest
1619      *        t5 = t3 ^ t4
1620      *        t6 = ~t5 & 0x110
1621      *        t7 = (t6 >> 2) | (t6 >> 3)
1622      *        return t3 - t7
1623      */
1624 
1625     /*
1626      * t1 = (src + 0x066) + dest + X
1627      *    = result with some possible exceeding 0x6
1628      */
1629 
1630     t0 = tcg_temp_new();
1631     tcg_gen_addi_i32(t0, src, 0x066);
1632 
1633     t1 = tcg_temp_new();
1634     tcg_gen_add_i32(t1, t0, dest);
1635     tcg_gen_add_i32(t1, t1, QREG_CC_X);
1636 
1637     /* we will remove exceeding 0x6 where there is no carry */
1638 
1639     /*
1640      * t0 = (src + 0x0066) ^ dest
1641      *    = t1 without carries
1642      */
1643 
1644     tcg_gen_xor_i32(t0, t0, dest);
1645 
1646     /*
1647      * extract the carries
1648      * t0 = t0 ^ t1
1649      *    = only the carries
1650      */
1651 
1652     tcg_gen_xor_i32(t0, t0, t1);
1653 
1654     /*
1655      * generate 0x1 where there is no carry
1656      * and for each 0x10, generate a 0x6
1657      */
1658 
1659     tcg_gen_shri_i32(t0, t0, 3);
1660     tcg_gen_not_i32(t0, t0);
1661     tcg_gen_andi_i32(t0, t0, 0x22);
1662     tcg_gen_add_i32(dest, t0, t0);
1663     tcg_gen_add_i32(dest, dest, t0);
1664 
1665     /*
1666      * remove the exceeding 0x6
1667      * for digits that have not generated a carry
1668      */
1669 
1670     tcg_gen_sub_i32(dest, t1, dest);
1671 }
1672 
1673 static void bcd_sub(TCGv dest, TCGv src)
1674 {
1675     TCGv t0, t1, t2;
1676 
1677     /*
1678      *  dest10 = dest10 - src10 - X
1679      *         = bcd_add(dest + 1 - X, 0x199 - src)
1680      */
1681 
1682     /* t0 = 0x066 + (0x199 - src) */
1683 
1684     t0 = tcg_temp_new();
1685     tcg_gen_subfi_i32(t0, 0x1ff, src);
1686 
1687     /* t1 = t0 + dest + 1 - X*/
1688 
1689     t1 = tcg_temp_new();
1690     tcg_gen_add_i32(t1, t0, dest);
1691     tcg_gen_addi_i32(t1, t1, 1);
1692     tcg_gen_sub_i32(t1, t1, QREG_CC_X);
1693 
1694     /* t2 = t0 ^ dest */
1695 
1696     t2 = tcg_temp_new();
1697     tcg_gen_xor_i32(t2, t0, dest);
1698 
1699     /* t0 = t1 ^ t2 */
1700 
1701     tcg_gen_xor_i32(t0, t1, t2);
1702 
1703     /*
1704      * t2 = ~t0 & 0x110
1705      * t0 = (t2 >> 2) | (t2 >> 3)
1706      *
1707      * to fit on 8bit operands, changed in:
1708      *
1709      * t2 = ~(t0 >> 3) & 0x22
1710      * t0 = t2 + t2
1711      * t0 = t0 + t2
1712      */
1713 
1714     tcg_gen_shri_i32(t2, t0, 3);
1715     tcg_gen_not_i32(t2, t2);
1716     tcg_gen_andi_i32(t2, t2, 0x22);
1717     tcg_gen_add_i32(t0, t2, t2);
1718     tcg_gen_add_i32(t0, t0, t2);
1719 
1720     /* return t1 - t0 */
1721 
1722     tcg_gen_sub_i32(dest, t1, t0);
1723 }
1724 
1725 static void bcd_flags(TCGv val)
1726 {
1727     tcg_gen_andi_i32(QREG_CC_C, val, 0x0ff);
1728     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_C);
1729 
1730     tcg_gen_extract_i32(QREG_CC_C, val, 8, 1);
1731 
1732     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
1733 }
1734 
1735 DISAS_INSN(abcd_reg)
1736 {
1737     TCGv src;
1738     TCGv dest;
1739 
1740     gen_flush_flags(s); /* !Z is sticky */
1741 
1742     src = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
1743     dest = gen_extend(s, DREG(insn, 9), OS_BYTE, 0);
1744     bcd_add(dest, src);
1745     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1746 
1747     bcd_flags(dest);
1748 }
1749 
1750 DISAS_INSN(abcd_mem)
1751 {
1752     TCGv src, dest, addr;
1753 
1754     gen_flush_flags(s); /* !Z is sticky */
1755 
1756     /* Indirect pre-decrement load (mode 4) */
1757 
1758     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1759                       NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1760     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1761                        NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1762 
1763     bcd_add(dest, src);
1764 
1765     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1766                 EA_STORE, IS_USER(s));
1767 
1768     bcd_flags(dest);
1769 }
1770 
1771 DISAS_INSN(sbcd_reg)
1772 {
1773     TCGv src, dest;
1774 
1775     gen_flush_flags(s); /* !Z is sticky */
1776 
1777     src = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
1778     dest = gen_extend(s, DREG(insn, 9), OS_BYTE, 0);
1779 
1780     bcd_sub(dest, src);
1781 
1782     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1783 
1784     bcd_flags(dest);
1785 }
1786 
1787 DISAS_INSN(sbcd_mem)
1788 {
1789     TCGv src, dest, addr;
1790 
1791     gen_flush_flags(s); /* !Z is sticky */
1792 
1793     /* Indirect pre-decrement load (mode 4) */
1794 
1795     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1796                       NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1797     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1798                        NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1799 
1800     bcd_sub(dest, src);
1801 
1802     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1803                 EA_STORE, IS_USER(s));
1804 
1805     bcd_flags(dest);
1806 }
1807 
1808 DISAS_INSN(nbcd)
1809 {
1810     TCGv src, dest;
1811     TCGv addr;
1812 
1813     gen_flush_flags(s); /* !Z is sticky */
1814 
1815     SRC_EA(env, src, OS_BYTE, 0, &addr);
1816 
1817     dest = tcg_temp_new();
1818     tcg_gen_movi_i32(dest, 0);
1819     bcd_sub(dest, src);
1820 
1821     DEST_EA(env, insn, OS_BYTE, dest, &addr);
1822 
1823     bcd_flags(dest);
1824 }
1825 
1826 DISAS_INSN(addsub)
1827 {
1828     TCGv reg;
1829     TCGv dest;
1830     TCGv src;
1831     TCGv tmp;
1832     TCGv addr;
1833     int add;
1834     int opsize;
1835 
1836     add = (insn & 0x4000) != 0;
1837     opsize = insn_opsize(insn);
1838     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
1839     dest = tcg_temp_new();
1840     if (insn & 0x100) {
1841         SRC_EA(env, tmp, opsize, 1, &addr);
1842         src = reg;
1843     } else {
1844         tmp = reg;
1845         SRC_EA(env, src, opsize, 1, NULL);
1846     }
1847     if (add) {
1848         tcg_gen_add_i32(dest, tmp, src);
1849         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, src);
1850         set_cc_op(s, CC_OP_ADDB + opsize);
1851     } else {
1852         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, tmp, src);
1853         tcg_gen_sub_i32(dest, tmp, src);
1854         set_cc_op(s, CC_OP_SUBB + opsize);
1855     }
1856     gen_update_cc_add(dest, src, opsize);
1857     if (insn & 0x100) {
1858         DEST_EA(env, insn, opsize, dest, &addr);
1859     } else {
1860         gen_partset_reg(opsize, DREG(insn, 9), dest);
1861     }
1862 }
1863 
1864 /* Reverse the order of the bits in REG.  */
1865 DISAS_INSN(bitrev)
1866 {
1867     TCGv reg;
1868     reg = DREG(insn, 0);
1869     gen_helper_bitrev(reg, reg);
1870 }
1871 
1872 DISAS_INSN(bitop_reg)
1873 {
1874     int opsize;
1875     int op;
1876     TCGv src1;
1877     TCGv src2;
1878     TCGv tmp;
1879     TCGv addr;
1880     TCGv dest;
1881 
1882     if ((insn & 0x38) != 0)
1883         opsize = OS_BYTE;
1884     else
1885         opsize = OS_LONG;
1886     op = (insn >> 6) & 3;
1887     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
1888 
1889     gen_flush_flags(s);
1890     src2 = tcg_temp_new();
1891     if (opsize == OS_BYTE)
1892         tcg_gen_andi_i32(src2, DREG(insn, 9), 7);
1893     else
1894         tcg_gen_andi_i32(src2, DREG(insn, 9), 31);
1895 
1896     tmp = tcg_temp_new();
1897     tcg_gen_shl_i32(tmp, tcg_constant_i32(1), src2);
1898 
1899     tcg_gen_and_i32(QREG_CC_Z, src1, tmp);
1900 
1901     dest = tcg_temp_new();
1902     switch (op) {
1903     case 1: /* bchg */
1904         tcg_gen_xor_i32(dest, src1, tmp);
1905         break;
1906     case 2: /* bclr */
1907         tcg_gen_andc_i32(dest, src1, tmp);
1908         break;
1909     case 3: /* bset */
1910         tcg_gen_or_i32(dest, src1, tmp);
1911         break;
1912     default: /* btst */
1913         break;
1914     }
1915     if (op) {
1916         DEST_EA(env, insn, opsize, dest, &addr);
1917     }
1918 }
1919 
1920 DISAS_INSN(sats)
1921 {
1922     TCGv reg;
1923     reg = DREG(insn, 0);
1924     gen_flush_flags(s);
1925     gen_helper_sats(reg, reg, QREG_CC_V);
1926     gen_logic_cc(s, reg, OS_LONG);
1927 }
1928 
1929 static void gen_push(DisasContext *s, TCGv val)
1930 {
1931     TCGv tmp;
1932 
1933     tmp = tcg_temp_new();
1934     tcg_gen_subi_i32(tmp, QREG_SP, 4);
1935     gen_store(s, OS_LONG, tmp, val, IS_USER(s));
1936     tcg_gen_mov_i32(QREG_SP, tmp);
1937 }
1938 
1939 static TCGv mreg(int reg)
1940 {
1941     if (reg < 8) {
1942         /* Dx */
1943         return cpu_dregs[reg];
1944     }
1945     /* Ax */
1946     return cpu_aregs[reg & 7];
1947 }
1948 
1949 DISAS_INSN(movem)
1950 {
1951     TCGv addr, incr, tmp, r[16];
1952     int is_load = (insn & 0x0400) != 0;
1953     int opsize = (insn & 0x40) != 0 ? OS_LONG : OS_WORD;
1954     uint16_t mask = read_im16(env, s);
1955     int mode = extract32(insn, 3, 3);
1956     int reg0 = REG(insn, 0);
1957     int i;
1958 
1959     tmp = cpu_aregs[reg0];
1960 
1961     switch (mode) {
1962     case 0: /* data register direct */
1963     case 1: /* addr register direct */
1964     do_addr_fault:
1965         gen_addr_fault(s);
1966         return;
1967 
1968     case 2: /* indirect */
1969         break;
1970 
1971     case 3: /* indirect post-increment */
1972         if (!is_load) {
1973             /* post-increment is not allowed */
1974             goto do_addr_fault;
1975         }
1976         break;
1977 
1978     case 4: /* indirect pre-decrement */
1979         if (is_load) {
1980             /* pre-decrement is not allowed */
1981             goto do_addr_fault;
1982         }
1983         /*
1984          * We want a bare copy of the address reg, without any pre-decrement
1985          * adjustment, as gen_lea would provide.
1986          */
1987         break;
1988 
1989     default:
1990         tmp = gen_lea_mode(env, s, mode, reg0, opsize);
1991         if (IS_NULL_QREG(tmp)) {
1992             goto do_addr_fault;
1993         }
1994         break;
1995     }
1996 
1997     addr = tcg_temp_new();
1998     tcg_gen_mov_i32(addr, tmp);
1999     incr = tcg_constant_i32(opsize_bytes(opsize));
2000 
2001     if (is_load) {
2002         /* memory to register */
2003         for (i = 0; i < 16; i++) {
2004             if (mask & (1 << i)) {
2005                 r[i] = gen_load(s, opsize, addr, 1, IS_USER(s));
2006                 tcg_gen_add_i32(addr, addr, incr);
2007             }
2008         }
2009         for (i = 0; i < 16; i++) {
2010             if (mask & (1 << i)) {
2011                 tcg_gen_mov_i32(mreg(i), r[i]);
2012             }
2013         }
2014         if (mode == 3) {
2015             /* post-increment: movem (An)+,X */
2016             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2017         }
2018     } else {
2019         /* register to memory */
2020         if (mode == 4) {
2021             /* pre-decrement: movem X,-(An) */
2022             for (i = 15; i >= 0; i--) {
2023                 if ((mask << i) & 0x8000) {
2024                     tcg_gen_sub_i32(addr, addr, incr);
2025                     if (reg0 + 8 == i &&
2026                         m68k_feature(s->env, M68K_FEATURE_EXT_FULL)) {
2027                         /*
2028                          * M68020+: if the addressing register is the
2029                          * register moved to memory, the value written
2030                          * is the initial value decremented by the size of
2031                          * the operation, regardless of how many actual
2032                          * stores have been performed until this point.
2033                          * M68000/M68010: the value is the initial value.
2034                          */
2035                         tmp = tcg_temp_new();
2036                         tcg_gen_sub_i32(tmp, cpu_aregs[reg0], incr);
2037                         gen_store(s, opsize, addr, tmp, IS_USER(s));
2038                     } else {
2039                         gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2040                     }
2041                 }
2042             }
2043             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2044         } else {
2045             for (i = 0; i < 16; i++) {
2046                 if (mask & (1 << i)) {
2047                     gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2048                     tcg_gen_add_i32(addr, addr, incr);
2049                 }
2050             }
2051         }
2052     }
2053 }
2054 
2055 DISAS_INSN(movep)
2056 {
2057     uint8_t i;
2058     int16_t displ;
2059     TCGv reg;
2060     TCGv addr;
2061     TCGv abuf;
2062     TCGv dbuf;
2063 
2064     displ = read_im16(env, s);
2065 
2066     addr = AREG(insn, 0);
2067     reg = DREG(insn, 9);
2068 
2069     abuf = tcg_temp_new();
2070     tcg_gen_addi_i32(abuf, addr, displ);
2071     dbuf = tcg_temp_new();
2072 
2073     if (insn & 0x40) {
2074         i = 4;
2075     } else {
2076         i = 2;
2077     }
2078 
2079     if (insn & 0x80) {
2080         for ( ; i > 0 ; i--) {
2081             tcg_gen_shri_i32(dbuf, reg, (i - 1) * 8);
2082             tcg_gen_qemu_st8(dbuf, abuf, IS_USER(s));
2083             if (i > 1) {
2084                 tcg_gen_addi_i32(abuf, abuf, 2);
2085             }
2086         }
2087     } else {
2088         for ( ; i > 0 ; i--) {
2089             tcg_gen_qemu_ld8u(dbuf, abuf, IS_USER(s));
2090             tcg_gen_deposit_i32(reg, reg, dbuf, (i - 1) * 8, 8);
2091             if (i > 1) {
2092                 tcg_gen_addi_i32(abuf, abuf, 2);
2093             }
2094         }
2095     }
2096 }
2097 
2098 DISAS_INSN(bitop_im)
2099 {
2100     int opsize;
2101     int op;
2102     TCGv src1;
2103     uint32_t mask;
2104     int bitnum;
2105     TCGv tmp;
2106     TCGv addr;
2107 
2108     if ((insn & 0x38) != 0)
2109         opsize = OS_BYTE;
2110     else
2111         opsize = OS_LONG;
2112     op = (insn >> 6) & 3;
2113 
2114     bitnum = read_im16(env, s);
2115     if (m68k_feature(s->env, M68K_FEATURE_M68K)) {
2116         if (bitnum & 0xfe00) {
2117             disas_undef(env, s, insn);
2118             return;
2119         }
2120     } else {
2121         if (bitnum & 0xff00) {
2122             disas_undef(env, s, insn);
2123             return;
2124         }
2125     }
2126 
2127     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
2128 
2129     gen_flush_flags(s);
2130     if (opsize == OS_BYTE)
2131         bitnum &= 7;
2132     else
2133         bitnum &= 31;
2134     mask = 1 << bitnum;
2135 
2136    tcg_gen_andi_i32(QREG_CC_Z, src1, mask);
2137 
2138     if (op) {
2139         tmp = tcg_temp_new();
2140         switch (op) {
2141         case 1: /* bchg */
2142             tcg_gen_xori_i32(tmp, src1, mask);
2143             break;
2144         case 2: /* bclr */
2145             tcg_gen_andi_i32(tmp, src1, ~mask);
2146             break;
2147         case 3: /* bset */
2148             tcg_gen_ori_i32(tmp, src1, mask);
2149             break;
2150         default: /* btst */
2151             break;
2152         }
2153         DEST_EA(env, insn, opsize, tmp, &addr);
2154     }
2155 }
2156 
2157 static TCGv gen_get_ccr(DisasContext *s)
2158 {
2159     TCGv dest;
2160 
2161     update_cc_op(s);
2162     dest = tcg_temp_new();
2163     gen_helper_get_ccr(dest, cpu_env);
2164     return dest;
2165 }
2166 
2167 static TCGv gen_get_sr(DisasContext *s)
2168 {
2169     TCGv ccr;
2170     TCGv sr;
2171 
2172     ccr = gen_get_ccr(s);
2173     sr = tcg_temp_new();
2174     tcg_gen_andi_i32(sr, QREG_SR, 0xffe0);
2175     tcg_gen_or_i32(sr, sr, ccr);
2176     return sr;
2177 }
2178 
2179 static void gen_set_sr_im(DisasContext *s, uint16_t val, int ccr_only)
2180 {
2181     if (ccr_only) {
2182         tcg_gen_movi_i32(QREG_CC_C, val & CCF_C ? 1 : 0);
2183         tcg_gen_movi_i32(QREG_CC_V, val & CCF_V ? -1 : 0);
2184         tcg_gen_movi_i32(QREG_CC_Z, val & CCF_Z ? 0 : 1);
2185         tcg_gen_movi_i32(QREG_CC_N, val & CCF_N ? -1 : 0);
2186         tcg_gen_movi_i32(QREG_CC_X, val & CCF_X ? 1 : 0);
2187     } else {
2188         /* Must writeback before changing security state. */
2189         do_writebacks(s);
2190         gen_helper_set_sr(cpu_env, tcg_constant_i32(val));
2191     }
2192     set_cc_op(s, CC_OP_FLAGS);
2193 }
2194 
2195 static void gen_set_sr(DisasContext *s, TCGv val, int ccr_only)
2196 {
2197     if (ccr_only) {
2198         gen_helper_set_ccr(cpu_env, val);
2199     } else {
2200         /* Must writeback before changing security state. */
2201         do_writebacks(s);
2202         gen_helper_set_sr(cpu_env, val);
2203     }
2204     set_cc_op(s, CC_OP_FLAGS);
2205 }
2206 
2207 static void gen_move_to_sr(CPUM68KState *env, DisasContext *s, uint16_t insn,
2208                            bool ccr_only)
2209 {
2210     if ((insn & 0x3f) == 0x3c) {
2211         uint16_t val;
2212         val = read_im16(env, s);
2213         gen_set_sr_im(s, val, ccr_only);
2214     } else {
2215         TCGv src;
2216         SRC_EA(env, src, OS_WORD, 0, NULL);
2217         gen_set_sr(s, src, ccr_only);
2218     }
2219 }
2220 
2221 DISAS_INSN(arith_im)
2222 {
2223     int op;
2224     TCGv im;
2225     TCGv src1;
2226     TCGv dest;
2227     TCGv addr;
2228     int opsize;
2229     bool with_SR = ((insn & 0x3f) == 0x3c);
2230 
2231     op = (insn >> 9) & 7;
2232     opsize = insn_opsize(insn);
2233     switch (opsize) {
2234     case OS_BYTE:
2235         im = tcg_constant_i32((int8_t)read_im8(env, s));
2236         break;
2237     case OS_WORD:
2238         im = tcg_constant_i32((int16_t)read_im16(env, s));
2239         break;
2240     case OS_LONG:
2241         im = tcg_constant_i32(read_im32(env, s));
2242         break;
2243     default:
2244         g_assert_not_reached();
2245     }
2246 
2247     if (with_SR) {
2248         /* SR/CCR can only be used with andi/eori/ori */
2249         if (op == 2 || op == 3 || op == 6) {
2250             disas_undef(env, s, insn);
2251             return;
2252         }
2253         switch (opsize) {
2254         case OS_BYTE:
2255             src1 = gen_get_ccr(s);
2256             break;
2257         case OS_WORD:
2258             if (IS_USER(s)) {
2259                 gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
2260                 return;
2261             }
2262             src1 = gen_get_sr(s);
2263             break;
2264         default:
2265             /* OS_LONG; others already g_assert_not_reached.  */
2266             disas_undef(env, s, insn);
2267             return;
2268         }
2269     } else {
2270         SRC_EA(env, src1, opsize, 1, (op == 6) ? NULL : &addr);
2271     }
2272     dest = tcg_temp_new();
2273     switch (op) {
2274     case 0: /* ori */
2275         tcg_gen_or_i32(dest, src1, im);
2276         if (with_SR) {
2277             gen_set_sr(s, dest, opsize == OS_BYTE);
2278             gen_exit_tb(s);
2279         } else {
2280             DEST_EA(env, insn, opsize, dest, &addr);
2281             gen_logic_cc(s, dest, opsize);
2282         }
2283         break;
2284     case 1: /* andi */
2285         tcg_gen_and_i32(dest, src1, im);
2286         if (with_SR) {
2287             gen_set_sr(s, dest, opsize == OS_BYTE);
2288             gen_exit_tb(s);
2289         } else {
2290             DEST_EA(env, insn, opsize, dest, &addr);
2291             gen_logic_cc(s, dest, opsize);
2292         }
2293         break;
2294     case 2: /* subi */
2295         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, src1, im);
2296         tcg_gen_sub_i32(dest, src1, im);
2297         gen_update_cc_add(dest, im, opsize);
2298         set_cc_op(s, CC_OP_SUBB + opsize);
2299         DEST_EA(env, insn, opsize, dest, &addr);
2300         break;
2301     case 3: /* addi */
2302         tcg_gen_add_i32(dest, src1, im);
2303         gen_update_cc_add(dest, im, opsize);
2304         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, im);
2305         set_cc_op(s, CC_OP_ADDB + opsize);
2306         DEST_EA(env, insn, opsize, dest, &addr);
2307         break;
2308     case 5: /* eori */
2309         tcg_gen_xor_i32(dest, src1, im);
2310         if (with_SR) {
2311             gen_set_sr(s, dest, opsize == OS_BYTE);
2312             gen_exit_tb(s);
2313         } else {
2314             DEST_EA(env, insn, opsize, dest, &addr);
2315             gen_logic_cc(s, dest, opsize);
2316         }
2317         break;
2318     case 6: /* cmpi */
2319         gen_update_cc_cmp(s, src1, im, opsize);
2320         break;
2321     default:
2322         abort();
2323     }
2324 }
2325 
2326 DISAS_INSN(cas)
2327 {
2328     int opsize;
2329     TCGv addr;
2330     uint16_t ext;
2331     TCGv load;
2332     TCGv cmp;
2333     MemOp opc;
2334 
2335     switch ((insn >> 9) & 3) {
2336     case 1:
2337         opsize = OS_BYTE;
2338         opc = MO_SB;
2339         break;
2340     case 2:
2341         opsize = OS_WORD;
2342         opc = MO_TESW;
2343         break;
2344     case 3:
2345         opsize = OS_LONG;
2346         opc = MO_TESL;
2347         break;
2348     default:
2349         g_assert_not_reached();
2350     }
2351 
2352     ext = read_im16(env, s);
2353 
2354     /* cas Dc,Du,<EA> */
2355 
2356     addr = gen_lea(env, s, insn, opsize);
2357     if (IS_NULL_QREG(addr)) {
2358         gen_addr_fault(s);
2359         return;
2360     }
2361 
2362     cmp = gen_extend(s, DREG(ext, 0), opsize, 1);
2363 
2364     /*
2365      * if  <EA> == Dc then
2366      *     <EA> = Du
2367      *     Dc = <EA> (because <EA> == Dc)
2368      * else
2369      *     Dc = <EA>
2370      */
2371 
2372     load = tcg_temp_new();
2373     tcg_gen_atomic_cmpxchg_i32(load, addr, cmp, DREG(ext, 6),
2374                                IS_USER(s), opc);
2375     /* update flags before setting cmp to load */
2376     gen_update_cc_cmp(s, load, cmp, opsize);
2377     gen_partset_reg(opsize, DREG(ext, 0), load);
2378 
2379     switch (extract32(insn, 3, 3)) {
2380     case 3: /* Indirect postincrement.  */
2381         tcg_gen_addi_i32(AREG(insn, 0), addr, opsize_bytes(opsize));
2382         break;
2383     case 4: /* Indirect predecrememnt.  */
2384         tcg_gen_mov_i32(AREG(insn, 0), addr);
2385         break;
2386     }
2387 }
2388 
2389 DISAS_INSN(cas2w)
2390 {
2391     uint16_t ext1, ext2;
2392     TCGv addr1, addr2;
2393 
2394     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2395 
2396     ext1 = read_im16(env, s);
2397 
2398     if (ext1 & 0x8000) {
2399         /* Address Register */
2400         addr1 = AREG(ext1, 12);
2401     } else {
2402         /* Data Register */
2403         addr1 = DREG(ext1, 12);
2404     }
2405 
2406     ext2 = read_im16(env, s);
2407     if (ext2 & 0x8000) {
2408         /* Address Register */
2409         addr2 = AREG(ext2, 12);
2410     } else {
2411         /* Data Register */
2412         addr2 = DREG(ext2, 12);
2413     }
2414 
2415     /*
2416      * if (R1) == Dc1 && (R2) == Dc2 then
2417      *     (R1) = Du1
2418      *     (R2) = Du2
2419      * else
2420      *     Dc1 = (R1)
2421      *     Dc2 = (R2)
2422      */
2423 
2424     if (tb_cflags(s->base.tb) & CF_PARALLEL) {
2425         gen_helper_exit_atomic(cpu_env);
2426     } else {
2427         TCGv regs = tcg_constant_i32(REG(ext2, 6) |
2428                                      (REG(ext1, 6) << 3) |
2429                                      (REG(ext2, 0) << 6) |
2430                                      (REG(ext1, 0) << 9));
2431         gen_helper_cas2w(cpu_env, regs, addr1, addr2);
2432     }
2433 
2434     /* Note that cas2w also assigned to env->cc_op.  */
2435     s->cc_op = CC_OP_CMPW;
2436     s->cc_op_synced = 1;
2437 }
2438 
2439 DISAS_INSN(cas2l)
2440 {
2441     uint16_t ext1, ext2;
2442     TCGv addr1, addr2, regs;
2443 
2444     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2445 
2446     ext1 = read_im16(env, s);
2447 
2448     if (ext1 & 0x8000) {
2449         /* Address Register */
2450         addr1 = AREG(ext1, 12);
2451     } else {
2452         /* Data Register */
2453         addr1 = DREG(ext1, 12);
2454     }
2455 
2456     ext2 = read_im16(env, s);
2457     if (ext2 & 0x8000) {
2458         /* Address Register */
2459         addr2 = AREG(ext2, 12);
2460     } else {
2461         /* Data Register */
2462         addr2 = DREG(ext2, 12);
2463     }
2464 
2465     /*
2466      * if (R1) == Dc1 && (R2) == Dc2 then
2467      *     (R1) = Du1
2468      *     (R2) = Du2
2469      * else
2470      *     Dc1 = (R1)
2471      *     Dc2 = (R2)
2472      */
2473 
2474     regs = tcg_constant_i32(REG(ext2, 6) |
2475                             (REG(ext1, 6) << 3) |
2476                             (REG(ext2, 0) << 6) |
2477                             (REG(ext1, 0) << 9));
2478     if (tb_cflags(s->base.tb) & CF_PARALLEL) {
2479         gen_helper_cas2l_parallel(cpu_env, regs, addr1, addr2);
2480     } else {
2481         gen_helper_cas2l(cpu_env, regs, addr1, addr2);
2482     }
2483 
2484     /* Note that cas2l also assigned to env->cc_op.  */
2485     s->cc_op = CC_OP_CMPL;
2486     s->cc_op_synced = 1;
2487 }
2488 
2489 DISAS_INSN(byterev)
2490 {
2491     TCGv reg;
2492 
2493     reg = DREG(insn, 0);
2494     tcg_gen_bswap32_i32(reg, reg);
2495 }
2496 
2497 DISAS_INSN(move)
2498 {
2499     TCGv src;
2500     TCGv dest;
2501     int op;
2502     int opsize;
2503 
2504     switch (insn >> 12) {
2505     case 1: /* move.b */
2506         opsize = OS_BYTE;
2507         break;
2508     case 2: /* move.l */
2509         opsize = OS_LONG;
2510         break;
2511     case 3: /* move.w */
2512         opsize = OS_WORD;
2513         break;
2514     default:
2515         abort();
2516     }
2517     SRC_EA(env, src, opsize, 1, NULL);
2518     op = (insn >> 6) & 7;
2519     if (op == 1) {
2520         /* movea */
2521         /* The value will already have been sign extended.  */
2522         dest = AREG(insn, 9);
2523         tcg_gen_mov_i32(dest, src);
2524     } else {
2525         /* normal move */
2526         uint16_t dest_ea;
2527         dest_ea = ((insn >> 9) & 7) | (op << 3);
2528         DEST_EA(env, dest_ea, opsize, src, NULL);
2529         /* This will be correct because loads sign extend.  */
2530         gen_logic_cc(s, src, opsize);
2531     }
2532 }
2533 
2534 DISAS_INSN(negx)
2535 {
2536     TCGv z;
2537     TCGv src;
2538     TCGv addr;
2539     int opsize;
2540 
2541     opsize = insn_opsize(insn);
2542     SRC_EA(env, src, opsize, 1, &addr);
2543 
2544     gen_flush_flags(s); /* compute old Z */
2545 
2546     /*
2547      * Perform subtract with borrow.
2548      * (X, N) =  -(src + X);
2549      */
2550 
2551     z = tcg_constant_i32(0);
2552     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, z, QREG_CC_X, z);
2553     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, z, z, QREG_CC_N, QREG_CC_X);
2554     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
2555 
2556     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
2557 
2558     /*
2559      * Compute signed-overflow for negation.  The normal formula for
2560      * subtraction is (res ^ src) & (src ^ dest), but with dest==0
2561      * this simplifies to res & src.
2562      */
2563 
2564     tcg_gen_and_i32(QREG_CC_V, QREG_CC_N, src);
2565 
2566     /* Copy the rest of the results into place.  */
2567     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
2568     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
2569 
2570     set_cc_op(s, CC_OP_FLAGS);
2571 
2572     /* result is in QREG_CC_N */
2573 
2574     DEST_EA(env, insn, opsize, QREG_CC_N, &addr);
2575 }
2576 
2577 DISAS_INSN(lea)
2578 {
2579     TCGv reg;
2580     TCGv tmp;
2581 
2582     reg = AREG(insn, 9);
2583     tmp = gen_lea(env, s, insn, OS_LONG);
2584     if (IS_NULL_QREG(tmp)) {
2585         gen_addr_fault(s);
2586         return;
2587     }
2588     tcg_gen_mov_i32(reg, tmp);
2589 }
2590 
2591 DISAS_INSN(clr)
2592 {
2593     int opsize;
2594     TCGv zero;
2595 
2596     zero = tcg_constant_i32(0);
2597     opsize = insn_opsize(insn);
2598     DEST_EA(env, insn, opsize, zero, NULL);
2599     gen_logic_cc(s, zero, opsize);
2600 }
2601 
2602 DISAS_INSN(move_from_ccr)
2603 {
2604     TCGv ccr;
2605 
2606     ccr = gen_get_ccr(s);
2607     DEST_EA(env, insn, OS_WORD, ccr, NULL);
2608 }
2609 
2610 DISAS_INSN(neg)
2611 {
2612     TCGv src1;
2613     TCGv dest;
2614     TCGv addr;
2615     int opsize;
2616 
2617     opsize = insn_opsize(insn);
2618     SRC_EA(env, src1, opsize, 1, &addr);
2619     dest = tcg_temp_new();
2620     tcg_gen_neg_i32(dest, src1);
2621     set_cc_op(s, CC_OP_SUBB + opsize);
2622     gen_update_cc_add(dest, src1, opsize);
2623     tcg_gen_setcondi_i32(TCG_COND_NE, QREG_CC_X, dest, 0);
2624     DEST_EA(env, insn, opsize, dest, &addr);
2625 }
2626 
2627 DISAS_INSN(move_to_ccr)
2628 {
2629     gen_move_to_sr(env, s, insn, true);
2630 }
2631 
2632 DISAS_INSN(not)
2633 {
2634     TCGv src1;
2635     TCGv dest;
2636     TCGv addr;
2637     int opsize;
2638 
2639     opsize = insn_opsize(insn);
2640     SRC_EA(env, src1, opsize, 1, &addr);
2641     dest = tcg_temp_new();
2642     tcg_gen_not_i32(dest, src1);
2643     DEST_EA(env, insn, opsize, dest, &addr);
2644     gen_logic_cc(s, dest, opsize);
2645 }
2646 
2647 DISAS_INSN(swap)
2648 {
2649     TCGv src1;
2650     TCGv src2;
2651     TCGv reg;
2652 
2653     src1 = tcg_temp_new();
2654     src2 = tcg_temp_new();
2655     reg = DREG(insn, 0);
2656     tcg_gen_shli_i32(src1, reg, 16);
2657     tcg_gen_shri_i32(src2, reg, 16);
2658     tcg_gen_or_i32(reg, src1, src2);
2659     gen_logic_cc(s, reg, OS_LONG);
2660 }
2661 
2662 DISAS_INSN(bkpt)
2663 {
2664 #if defined(CONFIG_SOFTMMU)
2665     gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2666 #else
2667     gen_exception(s, s->base.pc_next, EXCP_DEBUG);
2668 #endif
2669 }
2670 
2671 DISAS_INSN(pea)
2672 {
2673     TCGv tmp;
2674 
2675     tmp = gen_lea(env, s, insn, OS_LONG);
2676     if (IS_NULL_QREG(tmp)) {
2677         gen_addr_fault(s);
2678         return;
2679     }
2680     gen_push(s, tmp);
2681 }
2682 
2683 DISAS_INSN(ext)
2684 {
2685     int op;
2686     TCGv reg;
2687     TCGv tmp;
2688 
2689     reg = DREG(insn, 0);
2690     op = (insn >> 6) & 7;
2691     tmp = tcg_temp_new();
2692     if (op == 3)
2693         tcg_gen_ext16s_i32(tmp, reg);
2694     else
2695         tcg_gen_ext8s_i32(tmp, reg);
2696     if (op == 2)
2697         gen_partset_reg(OS_WORD, reg, tmp);
2698     else
2699         tcg_gen_mov_i32(reg, tmp);
2700     gen_logic_cc(s, tmp, OS_LONG);
2701 }
2702 
2703 DISAS_INSN(tst)
2704 {
2705     int opsize;
2706     TCGv tmp;
2707 
2708     opsize = insn_opsize(insn);
2709     SRC_EA(env, tmp, opsize, 1, NULL);
2710     gen_logic_cc(s, tmp, opsize);
2711 }
2712 
2713 DISAS_INSN(pulse)
2714 {
2715   /* Implemented as a NOP.  */
2716 }
2717 
2718 DISAS_INSN(illegal)
2719 {
2720     gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2721 }
2722 
2723 DISAS_INSN(tas)
2724 {
2725     int mode = extract32(insn, 3, 3);
2726     int reg0 = REG(insn, 0);
2727 
2728     if (mode == 0) {
2729         /* data register direct */
2730         TCGv dest = cpu_dregs[reg0];
2731         gen_logic_cc(s, dest, OS_BYTE);
2732         tcg_gen_ori_tl(dest, dest, 0x80);
2733     } else {
2734         TCGv src1, addr;
2735 
2736         addr = gen_lea_mode(env, s, mode, reg0, OS_BYTE);
2737         if (IS_NULL_QREG(addr)) {
2738             gen_addr_fault(s);
2739             return;
2740         }
2741         src1 = tcg_temp_new();
2742         tcg_gen_atomic_fetch_or_tl(src1, addr, tcg_constant_tl(0x80),
2743                                    IS_USER(s), MO_SB);
2744         gen_logic_cc(s, src1, OS_BYTE);
2745 
2746         switch (mode) {
2747         case 3: /* Indirect postincrement.  */
2748             tcg_gen_addi_i32(AREG(insn, 0), addr, 1);
2749             break;
2750         case 4: /* Indirect predecrememnt.  */
2751             tcg_gen_mov_i32(AREG(insn, 0), addr);
2752             break;
2753         }
2754     }
2755 }
2756 
2757 DISAS_INSN(mull)
2758 {
2759     uint16_t ext;
2760     TCGv src1;
2761     int sign;
2762 
2763     ext = read_im16(env, s);
2764 
2765     sign = ext & 0x800;
2766 
2767     if (ext & 0x400) {
2768         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
2769             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2770             return;
2771         }
2772 
2773         SRC_EA(env, src1, OS_LONG, 0, NULL);
2774 
2775         if (sign) {
2776             tcg_gen_muls2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2777         } else {
2778             tcg_gen_mulu2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2779         }
2780         /* if Dl == Dh, 68040 returns low word */
2781         tcg_gen_mov_i32(DREG(ext, 0), QREG_CC_N);
2782         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_Z);
2783         tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N);
2784 
2785         tcg_gen_movi_i32(QREG_CC_V, 0);
2786         tcg_gen_movi_i32(QREG_CC_C, 0);
2787 
2788         set_cc_op(s, CC_OP_FLAGS);
2789         return;
2790     }
2791     SRC_EA(env, src1, OS_LONG, 0, NULL);
2792     if (m68k_feature(s->env, M68K_FEATURE_M68K)) {
2793         tcg_gen_movi_i32(QREG_CC_C, 0);
2794         if (sign) {
2795             tcg_gen_muls2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2796             /* QREG_CC_V is -(QREG_CC_V != (QREG_CC_N >> 31)) */
2797             tcg_gen_sari_i32(QREG_CC_Z, QREG_CC_N, 31);
2798             tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_Z);
2799         } else {
2800             tcg_gen_mulu2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2801             /* QREG_CC_V is -(QREG_CC_V != 0), use QREG_CC_C as 0 */
2802             tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_C);
2803         }
2804         tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
2805         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_N);
2806 
2807         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
2808 
2809         set_cc_op(s, CC_OP_FLAGS);
2810     } else {
2811         /*
2812          * The upper 32 bits of the product are discarded, so
2813          * muls.l and mulu.l are functionally equivalent.
2814          */
2815         tcg_gen_mul_i32(DREG(ext, 12), src1, DREG(ext, 12));
2816         gen_logic_cc(s, DREG(ext, 12), OS_LONG);
2817     }
2818 }
2819 
2820 static void gen_link(DisasContext *s, uint16_t insn, int32_t offset)
2821 {
2822     TCGv reg;
2823     TCGv tmp;
2824 
2825     reg = AREG(insn, 0);
2826     tmp = tcg_temp_new();
2827     tcg_gen_subi_i32(tmp, QREG_SP, 4);
2828     gen_store(s, OS_LONG, tmp, reg, IS_USER(s));
2829     if ((insn & 7) != 7) {
2830         tcg_gen_mov_i32(reg, tmp);
2831     }
2832     tcg_gen_addi_i32(QREG_SP, tmp, offset);
2833 }
2834 
2835 DISAS_INSN(link)
2836 {
2837     int16_t offset;
2838 
2839     offset = read_im16(env, s);
2840     gen_link(s, insn, offset);
2841 }
2842 
2843 DISAS_INSN(linkl)
2844 {
2845     int32_t offset;
2846 
2847     offset = read_im32(env, s);
2848     gen_link(s, insn, offset);
2849 }
2850 
2851 DISAS_INSN(unlk)
2852 {
2853     TCGv src;
2854     TCGv reg;
2855     TCGv tmp;
2856 
2857     src = tcg_temp_new();
2858     reg = AREG(insn, 0);
2859     tcg_gen_mov_i32(src, reg);
2860     tmp = gen_load(s, OS_LONG, src, 0, IS_USER(s));
2861     tcg_gen_mov_i32(reg, tmp);
2862     tcg_gen_addi_i32(QREG_SP, src, 4);
2863 }
2864 
2865 #if defined(CONFIG_SOFTMMU)
2866 DISAS_INSN(reset)
2867 {
2868     if (IS_USER(s)) {
2869         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
2870         return;
2871     }
2872 
2873     gen_helper_reset(cpu_env);
2874 }
2875 #endif
2876 
2877 DISAS_INSN(nop)
2878 {
2879 }
2880 
2881 DISAS_INSN(rtd)
2882 {
2883     TCGv tmp;
2884     int16_t offset = read_im16(env, s);
2885 
2886     tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
2887     tcg_gen_addi_i32(QREG_SP, QREG_SP, offset + 4);
2888     gen_jmp(s, tmp);
2889 }
2890 
2891 DISAS_INSN(rtr)
2892 {
2893     TCGv tmp;
2894     TCGv ccr;
2895     TCGv sp;
2896 
2897     sp = tcg_temp_new();
2898     ccr = gen_load(s, OS_WORD, QREG_SP, 0, IS_USER(s));
2899     tcg_gen_addi_i32(sp, QREG_SP, 2);
2900     tmp = gen_load(s, OS_LONG, sp, 0, IS_USER(s));
2901     tcg_gen_addi_i32(QREG_SP, sp, 4);
2902 
2903     gen_set_sr(s, ccr, true);
2904 
2905     gen_jmp(s, tmp);
2906 }
2907 
2908 DISAS_INSN(rts)
2909 {
2910     TCGv tmp;
2911 
2912     tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
2913     tcg_gen_addi_i32(QREG_SP, QREG_SP, 4);
2914     gen_jmp(s, tmp);
2915 }
2916 
2917 DISAS_INSN(jump)
2918 {
2919     TCGv tmp;
2920 
2921     /*
2922      * Load the target address first to ensure correct exception
2923      * behavior.
2924      */
2925     tmp = gen_lea(env, s, insn, OS_LONG);
2926     if (IS_NULL_QREG(tmp)) {
2927         gen_addr_fault(s);
2928         return;
2929     }
2930     if ((insn & 0x40) == 0) {
2931         /* jsr */
2932         gen_push(s, tcg_constant_i32(s->pc));
2933     }
2934     gen_jmp(s, tmp);
2935 }
2936 
2937 DISAS_INSN(addsubq)
2938 {
2939     TCGv src;
2940     TCGv dest;
2941     TCGv val;
2942     int imm;
2943     TCGv addr;
2944     int opsize;
2945 
2946     if ((insn & 070) == 010) {
2947         /* Operation on address register is always long.  */
2948         opsize = OS_LONG;
2949     } else {
2950         opsize = insn_opsize(insn);
2951     }
2952     SRC_EA(env, src, opsize, 1, &addr);
2953     imm = (insn >> 9) & 7;
2954     if (imm == 0) {
2955         imm = 8;
2956     }
2957     val = tcg_constant_i32(imm);
2958     dest = tcg_temp_new();
2959     tcg_gen_mov_i32(dest, src);
2960     if ((insn & 0x38) == 0x08) {
2961         /*
2962          * Don't update condition codes if the destination is an
2963          * address register.
2964          */
2965         if (insn & 0x0100) {
2966             tcg_gen_sub_i32(dest, dest, val);
2967         } else {
2968             tcg_gen_add_i32(dest, dest, val);
2969         }
2970     } else {
2971         if (insn & 0x0100) {
2972             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
2973             tcg_gen_sub_i32(dest, dest, val);
2974             set_cc_op(s, CC_OP_SUBB + opsize);
2975         } else {
2976             tcg_gen_add_i32(dest, dest, val);
2977             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
2978             set_cc_op(s, CC_OP_ADDB + opsize);
2979         }
2980         gen_update_cc_add(dest, val, opsize);
2981     }
2982     DEST_EA(env, insn, opsize, dest, &addr);
2983 }
2984 
2985 DISAS_INSN(branch)
2986 {
2987     int32_t offset;
2988     uint32_t base;
2989     int op;
2990 
2991     base = s->pc;
2992     op = (insn >> 8) & 0xf;
2993     offset = (int8_t)insn;
2994     if (offset == 0) {
2995         offset = (int16_t)read_im16(env, s);
2996     } else if (offset == -1) {
2997         offset = read_im32(env, s);
2998     }
2999     if (op == 1) {
3000         /* bsr */
3001         gen_push(s, tcg_constant_i32(s->pc));
3002     }
3003     if (op > 1) {
3004         /* Bcc */
3005         TCGLabel *l1 = gen_new_label();
3006         gen_jmpcc(s, ((insn >> 8) & 0xf) ^ 1, l1);
3007         gen_jmp_tb(s, 1, base + offset, s->base.pc_next);
3008         gen_set_label(l1);
3009         gen_jmp_tb(s, 0, s->pc, s->base.pc_next);
3010     } else {
3011         /* Unconditional branch.  */
3012         update_cc_op(s);
3013         gen_jmp_tb(s, 0, base + offset, s->base.pc_next);
3014     }
3015 }
3016 
3017 DISAS_INSN(moveq)
3018 {
3019     tcg_gen_movi_i32(DREG(insn, 9), (int8_t)insn);
3020     gen_logic_cc(s, DREG(insn, 9), OS_LONG);
3021 }
3022 
3023 DISAS_INSN(mvzs)
3024 {
3025     int opsize;
3026     TCGv src;
3027     TCGv reg;
3028 
3029     if (insn & 0x40)
3030         opsize = OS_WORD;
3031     else
3032         opsize = OS_BYTE;
3033     SRC_EA(env, src, opsize, (insn & 0x80) == 0, NULL);
3034     reg = DREG(insn, 9);
3035     tcg_gen_mov_i32(reg, src);
3036     gen_logic_cc(s, src, opsize);
3037 }
3038 
3039 DISAS_INSN(or)
3040 {
3041     TCGv reg;
3042     TCGv dest;
3043     TCGv src;
3044     TCGv addr;
3045     int opsize;
3046 
3047     opsize = insn_opsize(insn);
3048     reg = gen_extend(s, DREG(insn, 9), opsize, 0);
3049     dest = tcg_temp_new();
3050     if (insn & 0x100) {
3051         SRC_EA(env, src, opsize, 0, &addr);
3052         tcg_gen_or_i32(dest, src, reg);
3053         DEST_EA(env, insn, opsize, dest, &addr);
3054     } else {
3055         SRC_EA(env, src, opsize, 0, NULL);
3056         tcg_gen_or_i32(dest, src, reg);
3057         gen_partset_reg(opsize, DREG(insn, 9), dest);
3058     }
3059     gen_logic_cc(s, dest, opsize);
3060 }
3061 
3062 DISAS_INSN(suba)
3063 {
3064     TCGv src;
3065     TCGv reg;
3066 
3067     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3068     reg = AREG(insn, 9);
3069     tcg_gen_sub_i32(reg, reg, src);
3070 }
3071 
3072 static inline void gen_subx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3073 {
3074     TCGv tmp, zero;
3075 
3076     gen_flush_flags(s); /* compute old Z */
3077 
3078     /*
3079      * Perform subtract with borrow.
3080      * (X, N) = dest - (src + X);
3081      */
3082 
3083     zero = tcg_constant_i32(0);
3084     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, zero, QREG_CC_X, zero);
3085     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, dest, zero, QREG_CC_N, QREG_CC_X);
3086     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3087     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
3088 
3089     /* Compute signed-overflow for subtract.  */
3090 
3091     tmp = tcg_temp_new();
3092     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, dest);
3093     tcg_gen_xor_i32(tmp, dest, src);
3094     tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, tmp);
3095 
3096     /* Copy the rest of the results into place.  */
3097     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3098     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3099 
3100     set_cc_op(s, CC_OP_FLAGS);
3101 
3102     /* result is in QREG_CC_N */
3103 }
3104 
3105 DISAS_INSN(subx_reg)
3106 {
3107     TCGv dest;
3108     TCGv src;
3109     int opsize;
3110 
3111     opsize = insn_opsize(insn);
3112 
3113     src = gen_extend(s, DREG(insn, 0), opsize, 1);
3114     dest = gen_extend(s, DREG(insn, 9), opsize, 1);
3115 
3116     gen_subx(s, src, dest, opsize);
3117 
3118     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3119 }
3120 
3121 DISAS_INSN(subx_mem)
3122 {
3123     TCGv src;
3124     TCGv addr_src;
3125     TCGv dest;
3126     TCGv addr_dest;
3127     int opsize;
3128 
3129     opsize = insn_opsize(insn);
3130 
3131     addr_src = AREG(insn, 0);
3132     tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3133     src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3134 
3135     addr_dest = AREG(insn, 9);
3136     tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3137     dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3138 
3139     gen_subx(s, src, dest, opsize);
3140 
3141     gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3142 }
3143 
3144 DISAS_INSN(mov3q)
3145 {
3146     TCGv src;
3147     int val;
3148 
3149     val = (insn >> 9) & 7;
3150     if (val == 0) {
3151         val = -1;
3152     }
3153     src = tcg_constant_i32(val);
3154     gen_logic_cc(s, src, OS_LONG);
3155     DEST_EA(env, insn, OS_LONG, src, NULL);
3156 }
3157 
3158 DISAS_INSN(cmp)
3159 {
3160     TCGv src;
3161     TCGv reg;
3162     int opsize;
3163 
3164     opsize = insn_opsize(insn);
3165     SRC_EA(env, src, opsize, 1, NULL);
3166     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
3167     gen_update_cc_cmp(s, reg, src, opsize);
3168 }
3169 
3170 DISAS_INSN(cmpa)
3171 {
3172     int opsize;
3173     TCGv src;
3174     TCGv reg;
3175 
3176     if (insn & 0x100) {
3177         opsize = OS_LONG;
3178     } else {
3179         opsize = OS_WORD;
3180     }
3181     SRC_EA(env, src, opsize, 1, NULL);
3182     reg = AREG(insn, 9);
3183     gen_update_cc_cmp(s, reg, src, OS_LONG);
3184 }
3185 
3186 DISAS_INSN(cmpm)
3187 {
3188     int opsize = insn_opsize(insn);
3189     TCGv src, dst;
3190 
3191     /* Post-increment load (mode 3) from Ay.  */
3192     src = gen_ea_mode(env, s, 3, REG(insn, 0), opsize,
3193                       NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3194     /* Post-increment load (mode 3) from Ax.  */
3195     dst = gen_ea_mode(env, s, 3, REG(insn, 9), opsize,
3196                       NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3197 
3198     gen_update_cc_cmp(s, dst, src, opsize);
3199 }
3200 
3201 DISAS_INSN(eor)
3202 {
3203     TCGv src;
3204     TCGv dest;
3205     TCGv addr;
3206     int opsize;
3207 
3208     opsize = insn_opsize(insn);
3209 
3210     SRC_EA(env, src, opsize, 0, &addr);
3211     dest = tcg_temp_new();
3212     tcg_gen_xor_i32(dest, src, DREG(insn, 9));
3213     gen_logic_cc(s, dest, opsize);
3214     DEST_EA(env, insn, opsize, dest, &addr);
3215 }
3216 
3217 static void do_exg(TCGv reg1, TCGv reg2)
3218 {
3219     TCGv temp = tcg_temp_new();
3220     tcg_gen_mov_i32(temp, reg1);
3221     tcg_gen_mov_i32(reg1, reg2);
3222     tcg_gen_mov_i32(reg2, temp);
3223 }
3224 
3225 DISAS_INSN(exg_dd)
3226 {
3227     /* exchange Dx and Dy */
3228     do_exg(DREG(insn, 9), DREG(insn, 0));
3229 }
3230 
3231 DISAS_INSN(exg_aa)
3232 {
3233     /* exchange Ax and Ay */
3234     do_exg(AREG(insn, 9), AREG(insn, 0));
3235 }
3236 
3237 DISAS_INSN(exg_da)
3238 {
3239     /* exchange Dx and Ay */
3240     do_exg(DREG(insn, 9), AREG(insn, 0));
3241 }
3242 
3243 DISAS_INSN(and)
3244 {
3245     TCGv src;
3246     TCGv reg;
3247     TCGv dest;
3248     TCGv addr;
3249     int opsize;
3250 
3251     dest = tcg_temp_new();
3252 
3253     opsize = insn_opsize(insn);
3254     reg = DREG(insn, 9);
3255     if (insn & 0x100) {
3256         SRC_EA(env, src, opsize, 0, &addr);
3257         tcg_gen_and_i32(dest, src, reg);
3258         DEST_EA(env, insn, opsize, dest, &addr);
3259     } else {
3260         SRC_EA(env, src, opsize, 0, NULL);
3261         tcg_gen_and_i32(dest, src, reg);
3262         gen_partset_reg(opsize, reg, dest);
3263     }
3264     gen_logic_cc(s, dest, opsize);
3265 }
3266 
3267 DISAS_INSN(adda)
3268 {
3269     TCGv src;
3270     TCGv reg;
3271 
3272     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3273     reg = AREG(insn, 9);
3274     tcg_gen_add_i32(reg, reg, src);
3275 }
3276 
3277 static inline void gen_addx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3278 {
3279     TCGv tmp, zero;
3280 
3281     gen_flush_flags(s); /* compute old Z */
3282 
3283     /*
3284      * Perform addition with carry.
3285      * (X, N) = src + dest + X;
3286      */
3287 
3288     zero = tcg_constant_i32(0);
3289     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_X, zero, dest, zero);
3290     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_N, QREG_CC_X, src, zero);
3291     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3292 
3293     /* Compute signed-overflow for addition.  */
3294 
3295     tmp = tcg_temp_new();
3296     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3297     tcg_gen_xor_i32(tmp, dest, src);
3298     tcg_gen_andc_i32(QREG_CC_V, QREG_CC_V, tmp);
3299 
3300     /* Copy the rest of the results into place.  */
3301     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3302     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3303 
3304     set_cc_op(s, CC_OP_FLAGS);
3305 
3306     /* result is in QREG_CC_N */
3307 }
3308 
3309 DISAS_INSN(addx_reg)
3310 {
3311     TCGv dest;
3312     TCGv src;
3313     int opsize;
3314 
3315     opsize = insn_opsize(insn);
3316 
3317     dest = gen_extend(s, DREG(insn, 9), opsize, 1);
3318     src = gen_extend(s, DREG(insn, 0), opsize, 1);
3319 
3320     gen_addx(s, src, dest, opsize);
3321 
3322     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3323 }
3324 
3325 DISAS_INSN(addx_mem)
3326 {
3327     TCGv src;
3328     TCGv addr_src;
3329     TCGv dest;
3330     TCGv addr_dest;
3331     int opsize;
3332 
3333     opsize = insn_opsize(insn);
3334 
3335     addr_src = AREG(insn, 0);
3336     tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3337     src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3338 
3339     addr_dest = AREG(insn, 9);
3340     tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3341     dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3342 
3343     gen_addx(s, src, dest, opsize);
3344 
3345     gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3346 }
3347 
3348 static inline void shift_im(DisasContext *s, uint16_t insn, int opsize)
3349 {
3350     int count = (insn >> 9) & 7;
3351     int logical = insn & 8;
3352     int left = insn & 0x100;
3353     int bits = opsize_bytes(opsize) * 8;
3354     TCGv reg = gen_extend(s, DREG(insn, 0), opsize, !logical);
3355 
3356     if (count == 0) {
3357         count = 8;
3358     }
3359 
3360     tcg_gen_movi_i32(QREG_CC_V, 0);
3361     if (left) {
3362         tcg_gen_shri_i32(QREG_CC_C, reg, bits - count);
3363         tcg_gen_shli_i32(QREG_CC_N, reg, count);
3364 
3365         /*
3366          * Note that ColdFire always clears V (done above),
3367          * while M68000 sets if the most significant bit is changed at
3368          * any time during the shift operation.
3369          */
3370         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68K)) {
3371             /* if shift count >= bits, V is (reg != 0) */
3372             if (count >= bits) {
3373                 tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, reg, QREG_CC_V);
3374             } else {
3375                 TCGv t0 = tcg_temp_new();
3376                 tcg_gen_sari_i32(QREG_CC_V, reg, bits - 1);
3377                 tcg_gen_sari_i32(t0, reg, bits - count - 1);
3378                 tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, t0);
3379             }
3380             tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3381         }
3382     } else {
3383         tcg_gen_shri_i32(QREG_CC_C, reg, count - 1);
3384         if (logical) {
3385             tcg_gen_shri_i32(QREG_CC_N, reg, count);
3386         } else {
3387             tcg_gen_sari_i32(QREG_CC_N, reg, count);
3388         }
3389     }
3390 
3391     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3392     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3393     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3394     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3395 
3396     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3397     set_cc_op(s, CC_OP_FLAGS);
3398 }
3399 
3400 static inline void shift_reg(DisasContext *s, uint16_t insn, int opsize)
3401 {
3402     int logical = insn & 8;
3403     int left = insn & 0x100;
3404     int bits = opsize_bytes(opsize) * 8;
3405     TCGv reg = gen_extend(s, DREG(insn, 0), opsize, !logical);
3406     TCGv s32;
3407     TCGv_i64 t64, s64;
3408 
3409     t64 = tcg_temp_new_i64();
3410     s64 = tcg_temp_new_i64();
3411     s32 = tcg_temp_new();
3412 
3413     /*
3414      * Note that m68k truncates the shift count modulo 64, not 32.
3415      * In addition, a 64-bit shift makes it easy to find "the last
3416      * bit shifted out", for the carry flag.
3417      */
3418     tcg_gen_andi_i32(s32, DREG(insn, 9), 63);
3419     tcg_gen_extu_i32_i64(s64, s32);
3420     tcg_gen_extu_i32_i64(t64, reg);
3421 
3422     /* Optimistically set V=0.  Also used as a zero source below.  */
3423     tcg_gen_movi_i32(QREG_CC_V, 0);
3424     if (left) {
3425         tcg_gen_shl_i64(t64, t64, s64);
3426 
3427         if (opsize == OS_LONG) {
3428             tcg_gen_extr_i64_i32(QREG_CC_N, QREG_CC_C, t64);
3429             /* Note that C=0 if shift count is 0, and we get that for free.  */
3430         } else {
3431             TCGv zero = tcg_constant_i32(0);
3432             tcg_gen_extrl_i64_i32(QREG_CC_N, t64);
3433             tcg_gen_shri_i32(QREG_CC_C, QREG_CC_N, bits);
3434             tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3435                                 s32, zero, zero, QREG_CC_C);
3436         }
3437         tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3438 
3439         /* X = C, but only if the shift count was non-zero.  */
3440         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3441                             QREG_CC_C, QREG_CC_X);
3442 
3443         /*
3444          * M68000 sets V if the most significant bit is changed at
3445          * any time during the shift operation.  Do this via creating
3446          * an extension of the sign bit, comparing, and discarding
3447          * the bits below the sign bit.  I.e.
3448          *     int64_t s = (intN_t)reg;
3449          *     int64_t t = (int64_t)(intN_t)reg << count;
3450          *     V = ((s ^ t) & (-1 << (bits - 1))) != 0
3451          */
3452         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68K)) {
3453             TCGv_i64 tt = tcg_constant_i64(32);
3454             /* if shift is greater than 32, use 32 */
3455             tcg_gen_movcond_i64(TCG_COND_GT, s64, s64, tt, tt, s64);
3456             /* Sign extend the input to 64 bits; re-do the shift.  */
3457             tcg_gen_ext_i32_i64(t64, reg);
3458             tcg_gen_shl_i64(s64, t64, s64);
3459             /* Clear all bits that are unchanged.  */
3460             tcg_gen_xor_i64(t64, t64, s64);
3461             /* Ignore the bits below the sign bit.  */
3462             tcg_gen_andi_i64(t64, t64, -1ULL << (bits - 1));
3463             /* If any bits remain set, we have overflow.  */
3464             tcg_gen_setcondi_i64(TCG_COND_NE, t64, t64, 0);
3465             tcg_gen_extrl_i64_i32(QREG_CC_V, t64);
3466             tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3467         }
3468     } else {
3469         tcg_gen_shli_i64(t64, t64, 32);
3470         if (logical) {
3471             tcg_gen_shr_i64(t64, t64, s64);
3472         } else {
3473             tcg_gen_sar_i64(t64, t64, s64);
3474         }
3475         tcg_gen_extr_i64_i32(QREG_CC_C, QREG_CC_N, t64);
3476 
3477         /* Note that C=0 if shift count is 0, and we get that for free.  */
3478         tcg_gen_shri_i32(QREG_CC_C, QREG_CC_C, 31);
3479 
3480         /* X = C, but only if the shift count was non-zero.  */
3481         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3482                             QREG_CC_C, QREG_CC_X);
3483     }
3484     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3485     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3486 
3487     /* Write back the result.  */
3488     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3489     set_cc_op(s, CC_OP_FLAGS);
3490 }
3491 
3492 DISAS_INSN(shift8_im)
3493 {
3494     shift_im(s, insn, OS_BYTE);
3495 }
3496 
3497 DISAS_INSN(shift16_im)
3498 {
3499     shift_im(s, insn, OS_WORD);
3500 }
3501 
3502 DISAS_INSN(shift_im)
3503 {
3504     shift_im(s, insn, OS_LONG);
3505 }
3506 
3507 DISAS_INSN(shift8_reg)
3508 {
3509     shift_reg(s, insn, OS_BYTE);
3510 }
3511 
3512 DISAS_INSN(shift16_reg)
3513 {
3514     shift_reg(s, insn, OS_WORD);
3515 }
3516 
3517 DISAS_INSN(shift_reg)
3518 {
3519     shift_reg(s, insn, OS_LONG);
3520 }
3521 
3522 DISAS_INSN(shift_mem)
3523 {
3524     int logical = insn & 8;
3525     int left = insn & 0x100;
3526     TCGv src;
3527     TCGv addr;
3528 
3529     SRC_EA(env, src, OS_WORD, !logical, &addr);
3530     tcg_gen_movi_i32(QREG_CC_V, 0);
3531     if (left) {
3532         tcg_gen_shri_i32(QREG_CC_C, src, 15);
3533         tcg_gen_shli_i32(QREG_CC_N, src, 1);
3534 
3535         /*
3536          * Note that ColdFire always clears V,
3537          * while M68000 sets if the most significant bit is changed at
3538          * any time during the shift operation
3539          */
3540         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68K)) {
3541             src = gen_extend(s, src, OS_WORD, 1);
3542             tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3543         }
3544     } else {
3545         tcg_gen_mov_i32(QREG_CC_C, src);
3546         if (logical) {
3547             tcg_gen_shri_i32(QREG_CC_N, src, 1);
3548         } else {
3549             tcg_gen_sari_i32(QREG_CC_N, src, 1);
3550         }
3551     }
3552 
3553     gen_ext(QREG_CC_N, QREG_CC_N, OS_WORD, 1);
3554     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3555     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3556     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3557 
3558     DEST_EA(env, insn, OS_WORD, QREG_CC_N, &addr);
3559     set_cc_op(s, CC_OP_FLAGS);
3560 }
3561 
3562 static void rotate(TCGv reg, TCGv shift, int left, int size)
3563 {
3564     switch (size) {
3565     case 8:
3566         /* Replicate the 8-bit input so that a 32-bit rotate works.  */
3567         tcg_gen_ext8u_i32(reg, reg);
3568         tcg_gen_muli_i32(reg, reg, 0x01010101);
3569         goto do_long;
3570     case 16:
3571         /* Replicate the 16-bit input so that a 32-bit rotate works.  */
3572         tcg_gen_deposit_i32(reg, reg, reg, 16, 16);
3573         goto do_long;
3574     do_long:
3575     default:
3576         if (left) {
3577             tcg_gen_rotl_i32(reg, reg, shift);
3578         } else {
3579             tcg_gen_rotr_i32(reg, reg, shift);
3580         }
3581     }
3582 
3583     /* compute flags */
3584 
3585     switch (size) {
3586     case 8:
3587         tcg_gen_ext8s_i32(reg, reg);
3588         break;
3589     case 16:
3590         tcg_gen_ext16s_i32(reg, reg);
3591         break;
3592     default:
3593         break;
3594     }
3595 
3596     /* QREG_CC_X is not affected */
3597 
3598     tcg_gen_mov_i32(QREG_CC_N, reg);
3599     tcg_gen_mov_i32(QREG_CC_Z, reg);
3600 
3601     if (left) {
3602         tcg_gen_andi_i32(QREG_CC_C, reg, 1);
3603     } else {
3604         tcg_gen_shri_i32(QREG_CC_C, reg, 31);
3605     }
3606 
3607     tcg_gen_movi_i32(QREG_CC_V, 0); /* always cleared */
3608 }
3609 
3610 static void rotate_x_flags(TCGv reg, TCGv X, int size)
3611 {
3612     switch (size) {
3613     case 8:
3614         tcg_gen_ext8s_i32(reg, reg);
3615         break;
3616     case 16:
3617         tcg_gen_ext16s_i32(reg, reg);
3618         break;
3619     default:
3620         break;
3621     }
3622     tcg_gen_mov_i32(QREG_CC_N, reg);
3623     tcg_gen_mov_i32(QREG_CC_Z, reg);
3624     tcg_gen_mov_i32(QREG_CC_X, X);
3625     tcg_gen_mov_i32(QREG_CC_C, X);
3626     tcg_gen_movi_i32(QREG_CC_V, 0);
3627 }
3628 
3629 /* Result of rotate_x() is valid if 0 <= shift <= size */
3630 static TCGv rotate_x(TCGv reg, TCGv shift, int left, int size)
3631 {
3632     TCGv X, shl, shr, shx, sz, zero;
3633 
3634     sz = tcg_constant_i32(size);
3635 
3636     shr = tcg_temp_new();
3637     shl = tcg_temp_new();
3638     shx = tcg_temp_new();
3639     if (left) {
3640         tcg_gen_mov_i32(shl, shift);      /* shl = shift */
3641         tcg_gen_movi_i32(shr, size + 1);
3642         tcg_gen_sub_i32(shr, shr, shift); /* shr = size + 1 - shift */
3643         tcg_gen_subi_i32(shx, shift, 1);  /* shx = shift - 1 */
3644         /* shx = shx < 0 ? size : shx; */
3645         zero = tcg_constant_i32(0);
3646         tcg_gen_movcond_i32(TCG_COND_LT, shx, shx, zero, sz, shx);
3647     } else {
3648         tcg_gen_mov_i32(shr, shift);      /* shr = shift */
3649         tcg_gen_movi_i32(shl, size + 1);
3650         tcg_gen_sub_i32(shl, shl, shift); /* shl = size + 1 - shift */
3651         tcg_gen_sub_i32(shx, sz, shift); /* shx = size - shift */
3652     }
3653 
3654     /* reg = (reg << shl) | (reg >> shr) | (x << shx); */
3655 
3656     tcg_gen_shl_i32(shl, reg, shl);
3657     tcg_gen_shr_i32(shr, reg, shr);
3658     tcg_gen_or_i32(reg, shl, shr);
3659     tcg_gen_shl_i32(shx, QREG_CC_X, shx);
3660     tcg_gen_or_i32(reg, reg, shx);
3661 
3662     /* X = (reg >> size) & 1 */
3663 
3664     X = tcg_temp_new();
3665     tcg_gen_extract_i32(X, reg, size, 1);
3666 
3667     return X;
3668 }
3669 
3670 /* Result of rotate32_x() is valid if 0 <= shift < 33 */
3671 static TCGv rotate32_x(TCGv reg, TCGv shift, int left)
3672 {
3673     TCGv_i64 t0, shift64;
3674     TCGv X, lo, hi, zero;
3675 
3676     shift64 = tcg_temp_new_i64();
3677     tcg_gen_extu_i32_i64(shift64, shift);
3678 
3679     t0 = tcg_temp_new_i64();
3680 
3681     X = tcg_temp_new();
3682     lo = tcg_temp_new();
3683     hi = tcg_temp_new();
3684 
3685     if (left) {
3686         /* create [reg:X:..] */
3687 
3688         tcg_gen_shli_i32(lo, QREG_CC_X, 31);
3689         tcg_gen_concat_i32_i64(t0, lo, reg);
3690 
3691         /* rotate */
3692 
3693         tcg_gen_rotl_i64(t0, t0, shift64);
3694 
3695         /* result is [reg:..:reg:X] */
3696 
3697         tcg_gen_extr_i64_i32(lo, hi, t0);
3698         tcg_gen_andi_i32(X, lo, 1);
3699 
3700         tcg_gen_shri_i32(lo, lo, 1);
3701     } else {
3702         /* create [..:X:reg] */
3703 
3704         tcg_gen_concat_i32_i64(t0, reg, QREG_CC_X);
3705 
3706         tcg_gen_rotr_i64(t0, t0, shift64);
3707 
3708         /* result is value: [X:reg:..:reg] */
3709 
3710         tcg_gen_extr_i64_i32(lo, hi, t0);
3711 
3712         /* extract X */
3713 
3714         tcg_gen_shri_i32(X, hi, 31);
3715 
3716         /* extract result */
3717 
3718         tcg_gen_shli_i32(hi, hi, 1);
3719     }
3720     tcg_gen_or_i32(lo, lo, hi);
3721 
3722     /* if shift == 0, register and X are not affected */
3723 
3724     zero = tcg_constant_i32(0);
3725     tcg_gen_movcond_i32(TCG_COND_EQ, X, shift, zero, QREG_CC_X, X);
3726     tcg_gen_movcond_i32(TCG_COND_EQ, reg, shift, zero, reg, lo);
3727 
3728     return X;
3729 }
3730 
3731 DISAS_INSN(rotate_im)
3732 {
3733     TCGv shift;
3734     int tmp;
3735     int left = (insn & 0x100);
3736 
3737     tmp = (insn >> 9) & 7;
3738     if (tmp == 0) {
3739         tmp = 8;
3740     }
3741 
3742     shift = tcg_constant_i32(tmp);
3743     if (insn & 8) {
3744         rotate(DREG(insn, 0), shift, left, 32);
3745     } else {
3746         TCGv X = rotate32_x(DREG(insn, 0), shift, left);
3747         rotate_x_flags(DREG(insn, 0), X, 32);
3748     }
3749 
3750     set_cc_op(s, CC_OP_FLAGS);
3751 }
3752 
3753 DISAS_INSN(rotate8_im)
3754 {
3755     int left = (insn & 0x100);
3756     TCGv reg;
3757     TCGv shift;
3758     int tmp;
3759 
3760     reg = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
3761 
3762     tmp = (insn >> 9) & 7;
3763     if (tmp == 0) {
3764         tmp = 8;
3765     }
3766 
3767     shift = tcg_constant_i32(tmp);
3768     if (insn & 8) {
3769         rotate(reg, shift, left, 8);
3770     } else {
3771         TCGv X = rotate_x(reg, shift, left, 8);
3772         rotate_x_flags(reg, X, 8);
3773     }
3774     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3775     set_cc_op(s, CC_OP_FLAGS);
3776 }
3777 
3778 DISAS_INSN(rotate16_im)
3779 {
3780     int left = (insn & 0x100);
3781     TCGv reg;
3782     TCGv shift;
3783     int tmp;
3784 
3785     reg = gen_extend(s, DREG(insn, 0), OS_WORD, 0);
3786     tmp = (insn >> 9) & 7;
3787     if (tmp == 0) {
3788         tmp = 8;
3789     }
3790 
3791     shift = tcg_constant_i32(tmp);
3792     if (insn & 8) {
3793         rotate(reg, shift, left, 16);
3794     } else {
3795         TCGv X = rotate_x(reg, shift, left, 16);
3796         rotate_x_flags(reg, X, 16);
3797     }
3798     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3799     set_cc_op(s, CC_OP_FLAGS);
3800 }
3801 
3802 DISAS_INSN(rotate_reg)
3803 {
3804     TCGv reg;
3805     TCGv src;
3806     TCGv t0, t1;
3807     int left = (insn & 0x100);
3808 
3809     reg = DREG(insn, 0);
3810     src = DREG(insn, 9);
3811     /* shift in [0..63] */
3812     t0 = tcg_temp_new();
3813     tcg_gen_andi_i32(t0, src, 63);
3814     t1 = tcg_temp_new_i32();
3815     if (insn & 8) {
3816         tcg_gen_andi_i32(t1, src, 31);
3817         rotate(reg, t1, left, 32);
3818         /* if shift == 0, clear C */
3819         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3820                             t0, QREG_CC_V /* 0 */,
3821                             QREG_CC_V /* 0 */, QREG_CC_C);
3822     } else {
3823         TCGv X;
3824         /* modulo 33 */
3825         tcg_gen_movi_i32(t1, 33);
3826         tcg_gen_remu_i32(t1, t0, t1);
3827         X = rotate32_x(DREG(insn, 0), t1, left);
3828         rotate_x_flags(DREG(insn, 0), X, 32);
3829     }
3830     set_cc_op(s, CC_OP_FLAGS);
3831 }
3832 
3833 DISAS_INSN(rotate8_reg)
3834 {
3835     TCGv reg;
3836     TCGv src;
3837     TCGv t0, t1;
3838     int left = (insn & 0x100);
3839 
3840     reg = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
3841     src = DREG(insn, 9);
3842     /* shift in [0..63] */
3843     t0 = tcg_temp_new_i32();
3844     tcg_gen_andi_i32(t0, src, 63);
3845     t1 = tcg_temp_new_i32();
3846     if (insn & 8) {
3847         tcg_gen_andi_i32(t1, src, 7);
3848         rotate(reg, t1, left, 8);
3849         /* if shift == 0, clear C */
3850         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3851                             t0, QREG_CC_V /* 0 */,
3852                             QREG_CC_V /* 0 */, QREG_CC_C);
3853     } else {
3854         TCGv X;
3855         /* modulo 9 */
3856         tcg_gen_movi_i32(t1, 9);
3857         tcg_gen_remu_i32(t1, t0, t1);
3858         X = rotate_x(reg, t1, left, 8);
3859         rotate_x_flags(reg, X, 8);
3860     }
3861     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3862     set_cc_op(s, CC_OP_FLAGS);
3863 }
3864 
3865 DISAS_INSN(rotate16_reg)
3866 {
3867     TCGv reg;
3868     TCGv src;
3869     TCGv t0, t1;
3870     int left = (insn & 0x100);
3871 
3872     reg = gen_extend(s, DREG(insn, 0), OS_WORD, 0);
3873     src = DREG(insn, 9);
3874     /* shift in [0..63] */
3875     t0 = tcg_temp_new_i32();
3876     tcg_gen_andi_i32(t0, src, 63);
3877     t1 = tcg_temp_new_i32();
3878     if (insn & 8) {
3879         tcg_gen_andi_i32(t1, src, 15);
3880         rotate(reg, t1, left, 16);
3881         /* if shift == 0, clear C */
3882         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3883                             t0, QREG_CC_V /* 0 */,
3884                             QREG_CC_V /* 0 */, QREG_CC_C);
3885     } else {
3886         TCGv X;
3887         /* modulo 17 */
3888         tcg_gen_movi_i32(t1, 17);
3889         tcg_gen_remu_i32(t1, t0, t1);
3890         X = rotate_x(reg, t1, left, 16);
3891         rotate_x_flags(reg, X, 16);
3892     }
3893     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3894     set_cc_op(s, CC_OP_FLAGS);
3895 }
3896 
3897 DISAS_INSN(rotate_mem)
3898 {
3899     TCGv src;
3900     TCGv addr;
3901     TCGv shift;
3902     int left = (insn & 0x100);
3903 
3904     SRC_EA(env, src, OS_WORD, 0, &addr);
3905 
3906     shift = tcg_constant_i32(1);
3907     if (insn & 0x0200) {
3908         rotate(src, shift, left, 16);
3909     } else {
3910         TCGv X = rotate_x(src, shift, left, 16);
3911         rotate_x_flags(src, X, 16);
3912     }
3913     DEST_EA(env, insn, OS_WORD, src, &addr);
3914     set_cc_op(s, CC_OP_FLAGS);
3915 }
3916 
3917 DISAS_INSN(bfext_reg)
3918 {
3919     int ext = read_im16(env, s);
3920     int is_sign = insn & 0x200;
3921     TCGv src = DREG(insn, 0);
3922     TCGv dst = DREG(ext, 12);
3923     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
3924     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
3925     int pos = 32 - ofs - len;        /* little bit-endian */
3926     TCGv tmp = tcg_temp_new();
3927     TCGv shift;
3928 
3929     /*
3930      * In general, we're going to rotate the field so that it's at the
3931      * top of the word and then right-shift by the complement of the
3932      * width to extend the field.
3933      */
3934     if (ext & 0x20) {
3935         /* Variable width.  */
3936         if (ext & 0x800) {
3937             /* Variable offset.  */
3938             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
3939             tcg_gen_rotl_i32(tmp, src, tmp);
3940         } else {
3941             tcg_gen_rotli_i32(tmp, src, ofs);
3942         }
3943 
3944         shift = tcg_temp_new();
3945         tcg_gen_neg_i32(shift, DREG(ext, 0));
3946         tcg_gen_andi_i32(shift, shift, 31);
3947         tcg_gen_sar_i32(QREG_CC_N, tmp, shift);
3948         if (is_sign) {
3949             tcg_gen_mov_i32(dst, QREG_CC_N);
3950         } else {
3951             tcg_gen_shr_i32(dst, tmp, shift);
3952         }
3953     } else {
3954         /* Immediate width.  */
3955         if (ext & 0x800) {
3956             /* Variable offset */
3957             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
3958             tcg_gen_rotl_i32(tmp, src, tmp);
3959             src = tmp;
3960             pos = 32 - len;
3961         } else {
3962             /*
3963              * Immediate offset.  If the field doesn't wrap around the
3964              * end of the word, rely on (s)extract completely.
3965              */
3966             if (pos < 0) {
3967                 tcg_gen_rotli_i32(tmp, src, ofs);
3968                 src = tmp;
3969                 pos = 32 - len;
3970             }
3971         }
3972 
3973         tcg_gen_sextract_i32(QREG_CC_N, src, pos, len);
3974         if (is_sign) {
3975             tcg_gen_mov_i32(dst, QREG_CC_N);
3976         } else {
3977             tcg_gen_extract_i32(dst, src, pos, len);
3978         }
3979     }
3980 
3981     set_cc_op(s, CC_OP_LOGIC);
3982 }
3983 
3984 DISAS_INSN(bfext_mem)
3985 {
3986     int ext = read_im16(env, s);
3987     int is_sign = insn & 0x200;
3988     TCGv dest = DREG(ext, 12);
3989     TCGv addr, len, ofs;
3990 
3991     addr = gen_lea(env, s, insn, OS_UNSIZED);
3992     if (IS_NULL_QREG(addr)) {
3993         gen_addr_fault(s);
3994         return;
3995     }
3996 
3997     if (ext & 0x20) {
3998         len = DREG(ext, 0);
3999     } else {
4000         len = tcg_constant_i32(extract32(ext, 0, 5));
4001     }
4002     if (ext & 0x800) {
4003         ofs = DREG(ext, 6);
4004     } else {
4005         ofs = tcg_constant_i32(extract32(ext, 6, 5));
4006     }
4007 
4008     if (is_sign) {
4009         gen_helper_bfexts_mem(dest, cpu_env, addr, ofs, len);
4010         tcg_gen_mov_i32(QREG_CC_N, dest);
4011     } else {
4012         TCGv_i64 tmp = tcg_temp_new_i64();
4013         gen_helper_bfextu_mem(tmp, cpu_env, addr, ofs, len);
4014         tcg_gen_extr_i64_i32(dest, QREG_CC_N, tmp);
4015     }
4016     set_cc_op(s, CC_OP_LOGIC);
4017 }
4018 
4019 DISAS_INSN(bfop_reg)
4020 {
4021     int ext = read_im16(env, s);
4022     TCGv src = DREG(insn, 0);
4023     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4024     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4025     TCGv mask, tofs = NULL, tlen = NULL;
4026     bool is_bfffo = (insn & 0x0f00) == 0x0d00;
4027 
4028     if ((ext & 0x820) == 0) {
4029         /* Immediate width and offset.  */
4030         uint32_t maski = 0x7fffffffu >> (len - 1);
4031         if (ofs + len <= 32) {
4032             tcg_gen_shli_i32(QREG_CC_N, src, ofs);
4033         } else {
4034             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4035         }
4036         tcg_gen_andi_i32(QREG_CC_N, QREG_CC_N, ~maski);
4037 
4038         mask = tcg_constant_i32(ror32(maski, ofs));
4039         if (is_bfffo) {
4040             tofs = tcg_constant_i32(ofs);
4041             tlen = tcg_constant_i32(len);
4042         }
4043     } else {
4044         TCGv tmp = tcg_temp_new();
4045 
4046         mask = tcg_temp_new();
4047         if (ext & 0x20) {
4048             /* Variable width */
4049             tcg_gen_subi_i32(tmp, DREG(ext, 0), 1);
4050             tcg_gen_andi_i32(tmp, tmp, 31);
4051             tcg_gen_shr_i32(mask, tcg_constant_i32(0x7fffffffu), tmp);
4052             if (is_bfffo) {
4053                 tlen = tcg_temp_new();
4054                 tcg_gen_addi_i32(tlen, tmp, 1);
4055             }
4056         } else {
4057             /* Immediate width */
4058             tcg_gen_movi_i32(mask, 0x7fffffffu >> (len - 1));
4059             if (is_bfffo) {
4060                 tlen = tcg_constant_i32(len);
4061             }
4062         }
4063 
4064         if (ext & 0x800) {
4065             /* Variable offset */
4066             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4067             tcg_gen_rotl_i32(QREG_CC_N, src, tmp);
4068             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4069             tcg_gen_rotr_i32(mask, mask, tmp);
4070             if (is_bfffo) {
4071                 tofs = tmp;
4072             }
4073         } else {
4074             /* Immediate offset (and variable width) */
4075             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4076             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4077             tcg_gen_rotri_i32(mask, mask, ofs);
4078             if (is_bfffo) {
4079                 tofs = tcg_constant_i32(ofs);
4080             }
4081         }
4082     }
4083     set_cc_op(s, CC_OP_LOGIC);
4084 
4085     switch (insn & 0x0f00) {
4086     case 0x0a00: /* bfchg */
4087         tcg_gen_eqv_i32(src, src, mask);
4088         break;
4089     case 0x0c00: /* bfclr */
4090         tcg_gen_and_i32(src, src, mask);
4091         break;
4092     case 0x0d00: /* bfffo */
4093         gen_helper_bfffo_reg(DREG(ext, 12), QREG_CC_N, tofs, tlen);
4094         break;
4095     case 0x0e00: /* bfset */
4096         tcg_gen_orc_i32(src, src, mask);
4097         break;
4098     case 0x0800: /* bftst */
4099         /* flags already set; no other work to do.  */
4100         break;
4101     default:
4102         g_assert_not_reached();
4103     }
4104 }
4105 
4106 DISAS_INSN(bfop_mem)
4107 {
4108     int ext = read_im16(env, s);
4109     TCGv addr, len, ofs;
4110     TCGv_i64 t64;
4111 
4112     addr = gen_lea(env, s, insn, OS_UNSIZED);
4113     if (IS_NULL_QREG(addr)) {
4114         gen_addr_fault(s);
4115         return;
4116     }
4117 
4118     if (ext & 0x20) {
4119         len = DREG(ext, 0);
4120     } else {
4121         len = tcg_constant_i32(extract32(ext, 0, 5));
4122     }
4123     if (ext & 0x800) {
4124         ofs = DREG(ext, 6);
4125     } else {
4126         ofs = tcg_constant_i32(extract32(ext, 6, 5));
4127     }
4128 
4129     switch (insn & 0x0f00) {
4130     case 0x0a00: /* bfchg */
4131         gen_helper_bfchg_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4132         break;
4133     case 0x0c00: /* bfclr */
4134         gen_helper_bfclr_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4135         break;
4136     case 0x0d00: /* bfffo */
4137         t64 = tcg_temp_new_i64();
4138         gen_helper_bfffo_mem(t64, cpu_env, addr, ofs, len);
4139         tcg_gen_extr_i64_i32(DREG(ext, 12), QREG_CC_N, t64);
4140         break;
4141     case 0x0e00: /* bfset */
4142         gen_helper_bfset_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4143         break;
4144     case 0x0800: /* bftst */
4145         gen_helper_bfexts_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4146         break;
4147     default:
4148         g_assert_not_reached();
4149     }
4150     set_cc_op(s, CC_OP_LOGIC);
4151 }
4152 
4153 DISAS_INSN(bfins_reg)
4154 {
4155     int ext = read_im16(env, s);
4156     TCGv dst = DREG(insn, 0);
4157     TCGv src = DREG(ext, 12);
4158     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4159     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4160     int pos = 32 - ofs - len;        /* little bit-endian */
4161     TCGv tmp;
4162 
4163     tmp = tcg_temp_new();
4164 
4165     if (ext & 0x20) {
4166         /* Variable width */
4167         tcg_gen_neg_i32(tmp, DREG(ext, 0));
4168         tcg_gen_andi_i32(tmp, tmp, 31);
4169         tcg_gen_shl_i32(QREG_CC_N, src, tmp);
4170     } else {
4171         /* Immediate width */
4172         tcg_gen_shli_i32(QREG_CC_N, src, 32 - len);
4173     }
4174     set_cc_op(s, CC_OP_LOGIC);
4175 
4176     /* Immediate width and offset */
4177     if ((ext & 0x820) == 0) {
4178         /* Check for suitability for deposit.  */
4179         if (pos >= 0) {
4180             tcg_gen_deposit_i32(dst, dst, src, pos, len);
4181         } else {
4182             uint32_t maski = -2U << (len - 1);
4183             uint32_t roti = (ofs + len) & 31;
4184             tcg_gen_andi_i32(tmp, src, ~maski);
4185             tcg_gen_rotri_i32(tmp, tmp, roti);
4186             tcg_gen_andi_i32(dst, dst, ror32(maski, roti));
4187             tcg_gen_or_i32(dst, dst, tmp);
4188         }
4189     } else {
4190         TCGv mask = tcg_temp_new();
4191         TCGv rot = tcg_temp_new();
4192 
4193         if (ext & 0x20) {
4194             /* Variable width */
4195             tcg_gen_subi_i32(rot, DREG(ext, 0), 1);
4196             tcg_gen_andi_i32(rot, rot, 31);
4197             tcg_gen_movi_i32(mask, -2);
4198             tcg_gen_shl_i32(mask, mask, rot);
4199             tcg_gen_mov_i32(rot, DREG(ext, 0));
4200             tcg_gen_andc_i32(tmp, src, mask);
4201         } else {
4202             /* Immediate width (variable offset) */
4203             uint32_t maski = -2U << (len - 1);
4204             tcg_gen_andi_i32(tmp, src, ~maski);
4205             tcg_gen_movi_i32(mask, maski);
4206             tcg_gen_movi_i32(rot, len & 31);
4207         }
4208         if (ext & 0x800) {
4209             /* Variable offset */
4210             tcg_gen_add_i32(rot, rot, DREG(ext, 6));
4211         } else {
4212             /* Immediate offset (variable width) */
4213             tcg_gen_addi_i32(rot, rot, ofs);
4214         }
4215         tcg_gen_andi_i32(rot, rot, 31);
4216         tcg_gen_rotr_i32(mask, mask, rot);
4217         tcg_gen_rotr_i32(tmp, tmp, rot);
4218         tcg_gen_and_i32(dst, dst, mask);
4219         tcg_gen_or_i32(dst, dst, tmp);
4220     }
4221 }
4222 
4223 DISAS_INSN(bfins_mem)
4224 {
4225     int ext = read_im16(env, s);
4226     TCGv src = DREG(ext, 12);
4227     TCGv addr, len, ofs;
4228 
4229     addr = gen_lea(env, s, insn, OS_UNSIZED);
4230     if (IS_NULL_QREG(addr)) {
4231         gen_addr_fault(s);
4232         return;
4233     }
4234 
4235     if (ext & 0x20) {
4236         len = DREG(ext, 0);
4237     } else {
4238         len = tcg_constant_i32(extract32(ext, 0, 5));
4239     }
4240     if (ext & 0x800) {
4241         ofs = DREG(ext, 6);
4242     } else {
4243         ofs = tcg_constant_i32(extract32(ext, 6, 5));
4244     }
4245 
4246     gen_helper_bfins_mem(QREG_CC_N, cpu_env, addr, src, ofs, len);
4247     set_cc_op(s, CC_OP_LOGIC);
4248 }
4249 
4250 DISAS_INSN(ff1)
4251 {
4252     TCGv reg;
4253     reg = DREG(insn, 0);
4254     gen_logic_cc(s, reg, OS_LONG);
4255     gen_helper_ff1(reg, reg);
4256 }
4257 
4258 DISAS_INSN(chk)
4259 {
4260     TCGv src, reg;
4261     int opsize;
4262 
4263     switch ((insn >> 7) & 3) {
4264     case 3:
4265         opsize = OS_WORD;
4266         break;
4267     case 2:
4268         if (m68k_feature(env, M68K_FEATURE_CHK2)) {
4269             opsize = OS_LONG;
4270             break;
4271         }
4272         /* fallthru */
4273     default:
4274         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4275         return;
4276     }
4277     SRC_EA(env, src, opsize, 1, NULL);
4278     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
4279 
4280     gen_flush_flags(s);
4281     gen_helper_chk(cpu_env, reg, src);
4282 }
4283 
4284 DISAS_INSN(chk2)
4285 {
4286     uint16_t ext;
4287     TCGv addr1, addr2, bound1, bound2, reg;
4288     int opsize;
4289 
4290     switch ((insn >> 9) & 3) {
4291     case 0:
4292         opsize = OS_BYTE;
4293         break;
4294     case 1:
4295         opsize = OS_WORD;
4296         break;
4297     case 2:
4298         opsize = OS_LONG;
4299         break;
4300     default:
4301         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4302         return;
4303     }
4304 
4305     ext = read_im16(env, s);
4306     if ((ext & 0x0800) == 0) {
4307         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4308         return;
4309     }
4310 
4311     addr1 = gen_lea(env, s, insn, OS_UNSIZED);
4312     addr2 = tcg_temp_new();
4313     tcg_gen_addi_i32(addr2, addr1, opsize_bytes(opsize));
4314 
4315     bound1 = gen_load(s, opsize, addr1, 1, IS_USER(s));
4316     bound2 = gen_load(s, opsize, addr2, 1, IS_USER(s));
4317 
4318     reg = tcg_temp_new();
4319     if (ext & 0x8000) {
4320         tcg_gen_mov_i32(reg, AREG(ext, 12));
4321     } else {
4322         gen_ext(reg, DREG(ext, 12), opsize, 1);
4323     }
4324 
4325     gen_flush_flags(s);
4326     gen_helper_chk2(cpu_env, reg, bound1, bound2);
4327 }
4328 
4329 static void m68k_copy_line(TCGv dst, TCGv src, int index)
4330 {
4331     TCGv addr;
4332     TCGv_i64 t0, t1;
4333 
4334     addr = tcg_temp_new();
4335 
4336     t0 = tcg_temp_new_i64();
4337     t1 = tcg_temp_new_i64();
4338 
4339     tcg_gen_andi_i32(addr, src, ~15);
4340     tcg_gen_qemu_ld64(t0, addr, index);
4341     tcg_gen_addi_i32(addr, addr, 8);
4342     tcg_gen_qemu_ld64(t1, addr, index);
4343 
4344     tcg_gen_andi_i32(addr, dst, ~15);
4345     tcg_gen_qemu_st64(t0, addr, index);
4346     tcg_gen_addi_i32(addr, addr, 8);
4347     tcg_gen_qemu_st64(t1, addr, index);
4348 }
4349 
4350 DISAS_INSN(move16_reg)
4351 {
4352     int index = IS_USER(s);
4353     TCGv tmp;
4354     uint16_t ext;
4355 
4356     ext = read_im16(env, s);
4357     if ((ext & (1 << 15)) == 0) {
4358         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4359     }
4360 
4361     m68k_copy_line(AREG(ext, 12), AREG(insn, 0), index);
4362 
4363     /* Ax can be Ay, so save Ay before incrementing Ax */
4364     tmp = tcg_temp_new();
4365     tcg_gen_mov_i32(tmp, AREG(ext, 12));
4366     tcg_gen_addi_i32(AREG(insn, 0), AREG(insn, 0), 16);
4367     tcg_gen_addi_i32(AREG(ext, 12), tmp, 16);
4368 }
4369 
4370 DISAS_INSN(move16_mem)
4371 {
4372     int index = IS_USER(s);
4373     TCGv reg, addr;
4374 
4375     reg = AREG(insn, 0);
4376     addr = tcg_constant_i32(read_im32(env, s));
4377 
4378     if ((insn >> 3) & 1) {
4379         /* MOVE16 (xxx).L, (Ay) */
4380         m68k_copy_line(reg, addr, index);
4381     } else {
4382         /* MOVE16 (Ay), (xxx).L */
4383         m68k_copy_line(addr, reg, index);
4384     }
4385 
4386     if (((insn >> 3) & 2) == 0) {
4387         /* (Ay)+ */
4388         tcg_gen_addi_i32(reg, reg, 16);
4389     }
4390 }
4391 
4392 DISAS_INSN(strldsr)
4393 {
4394     uint16_t ext;
4395     uint32_t addr;
4396 
4397     addr = s->pc - 2;
4398     ext = read_im16(env, s);
4399     if (ext != 0x46FC) {
4400         gen_exception(s, addr, EXCP_ILLEGAL);
4401         return;
4402     }
4403     ext = read_im16(env, s);
4404     if (IS_USER(s) || (ext & SR_S) == 0) {
4405         gen_exception(s, addr, EXCP_PRIVILEGE);
4406         return;
4407     }
4408     gen_push(s, gen_get_sr(s));
4409     gen_set_sr_im(s, ext, 0);
4410     gen_exit_tb(s);
4411 }
4412 
4413 DISAS_INSN(move_from_sr)
4414 {
4415     TCGv sr;
4416 
4417     if (IS_USER(s) && m68k_feature(env, M68K_FEATURE_MOVEFROMSR_PRIV)) {
4418         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4419         return;
4420     }
4421     sr = gen_get_sr(s);
4422     DEST_EA(env, insn, OS_WORD, sr, NULL);
4423 }
4424 
4425 #if defined(CONFIG_SOFTMMU)
4426 DISAS_INSN(moves)
4427 {
4428     int opsize;
4429     uint16_t ext;
4430     TCGv reg;
4431     TCGv addr;
4432     int extend;
4433 
4434     if (IS_USER(s)) {
4435         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4436         return;
4437     }
4438 
4439     ext = read_im16(env, s);
4440 
4441     opsize = insn_opsize(insn);
4442 
4443     if (ext & 0x8000) {
4444         /* address register */
4445         reg = AREG(ext, 12);
4446         extend = 1;
4447     } else {
4448         /* data register */
4449         reg = DREG(ext, 12);
4450         extend = 0;
4451     }
4452 
4453     addr = gen_lea(env, s, insn, opsize);
4454     if (IS_NULL_QREG(addr)) {
4455         gen_addr_fault(s);
4456         return;
4457     }
4458 
4459     if (ext & 0x0800) {
4460         /* from reg to ea */
4461         gen_store(s, opsize, addr, reg, DFC_INDEX(s));
4462     } else {
4463         /* from ea to reg */
4464         TCGv tmp = gen_load(s, opsize, addr, 0, SFC_INDEX(s));
4465         if (extend) {
4466             gen_ext(reg, tmp, opsize, 1);
4467         } else {
4468             gen_partset_reg(opsize, reg, tmp);
4469         }
4470     }
4471     switch (extract32(insn, 3, 3)) {
4472     case 3: /* Indirect postincrement.  */
4473         tcg_gen_addi_i32(AREG(insn, 0), addr,
4474                          REG(insn, 0) == 7 && opsize == OS_BYTE
4475                          ? 2
4476                          : opsize_bytes(opsize));
4477         break;
4478     case 4: /* Indirect predecrememnt.  */
4479         tcg_gen_mov_i32(AREG(insn, 0), addr);
4480         break;
4481     }
4482 }
4483 
4484 DISAS_INSN(move_to_sr)
4485 {
4486     if (IS_USER(s)) {
4487         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4488         return;
4489     }
4490     gen_move_to_sr(env, s, insn, false);
4491     gen_exit_tb(s);
4492 }
4493 
4494 DISAS_INSN(move_from_usp)
4495 {
4496     if (IS_USER(s)) {
4497         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4498         return;
4499     }
4500     tcg_gen_ld_i32(AREG(insn, 0), cpu_env,
4501                    offsetof(CPUM68KState, sp[M68K_USP]));
4502 }
4503 
4504 DISAS_INSN(move_to_usp)
4505 {
4506     if (IS_USER(s)) {
4507         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4508         return;
4509     }
4510     tcg_gen_st_i32(AREG(insn, 0), cpu_env,
4511                    offsetof(CPUM68KState, sp[M68K_USP]));
4512 }
4513 
4514 DISAS_INSN(halt)
4515 {
4516     if (IS_USER(s)) {
4517         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4518         return;
4519     }
4520 
4521     gen_exception(s, s->pc, EXCP_HALT_INSN);
4522 }
4523 
4524 DISAS_INSN(stop)
4525 {
4526     uint16_t ext;
4527 
4528     if (IS_USER(s)) {
4529         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4530         return;
4531     }
4532 
4533     ext = read_im16(env, s);
4534 
4535     gen_set_sr_im(s, ext, 0);
4536     tcg_gen_movi_i32(cpu_halted, 1);
4537     gen_exception(s, s->pc, EXCP_HLT);
4538 }
4539 
4540 DISAS_INSN(rte)
4541 {
4542     if (IS_USER(s)) {
4543         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4544         return;
4545     }
4546     gen_exception(s, s->base.pc_next, EXCP_RTE);
4547 }
4548 
4549 DISAS_INSN(cf_movec)
4550 {
4551     uint16_t ext;
4552     TCGv reg;
4553 
4554     if (IS_USER(s)) {
4555         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4556         return;
4557     }
4558 
4559     ext = read_im16(env, s);
4560 
4561     if (ext & 0x8000) {
4562         reg = AREG(ext, 12);
4563     } else {
4564         reg = DREG(ext, 12);
4565     }
4566     gen_helper_cf_movec_to(cpu_env, tcg_constant_i32(ext & 0xfff), reg);
4567     gen_exit_tb(s);
4568 }
4569 
4570 DISAS_INSN(m68k_movec)
4571 {
4572     uint16_t ext;
4573     TCGv reg, creg;
4574 
4575     if (IS_USER(s)) {
4576         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4577         return;
4578     }
4579 
4580     ext = read_im16(env, s);
4581 
4582     if (ext & 0x8000) {
4583         reg = AREG(ext, 12);
4584     } else {
4585         reg = DREG(ext, 12);
4586     }
4587     creg = tcg_constant_i32(ext & 0xfff);
4588     if (insn & 1) {
4589         gen_helper_m68k_movec_to(cpu_env, creg, reg);
4590     } else {
4591         gen_helper_m68k_movec_from(reg, cpu_env, creg);
4592     }
4593     gen_exit_tb(s);
4594 }
4595 
4596 DISAS_INSN(intouch)
4597 {
4598     if (IS_USER(s)) {
4599         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4600         return;
4601     }
4602     /* ICache fetch.  Implement as no-op.  */
4603 }
4604 
4605 DISAS_INSN(cpushl)
4606 {
4607     if (IS_USER(s)) {
4608         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4609         return;
4610     }
4611     /* Cache push/invalidate.  Implement as no-op.  */
4612 }
4613 
4614 DISAS_INSN(cpush)
4615 {
4616     if (IS_USER(s)) {
4617         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4618         return;
4619     }
4620     /* Cache push/invalidate.  Implement as no-op.  */
4621 }
4622 
4623 DISAS_INSN(cinv)
4624 {
4625     if (IS_USER(s)) {
4626         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4627         return;
4628     }
4629     /* Invalidate cache line.  Implement as no-op.  */
4630 }
4631 
4632 #if defined(CONFIG_SOFTMMU)
4633 DISAS_INSN(pflush)
4634 {
4635     TCGv opmode;
4636 
4637     if (IS_USER(s)) {
4638         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4639         return;
4640     }
4641 
4642     opmode = tcg_constant_i32((insn >> 3) & 3);
4643     gen_helper_pflush(cpu_env, AREG(insn, 0), opmode);
4644 }
4645 
4646 DISAS_INSN(ptest)
4647 {
4648     TCGv is_read;
4649 
4650     if (IS_USER(s)) {
4651         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4652         return;
4653     }
4654     is_read = tcg_constant_i32((insn >> 5) & 1);
4655     gen_helper_ptest(cpu_env, AREG(insn, 0), is_read);
4656 }
4657 #endif
4658 
4659 DISAS_INSN(wddata)
4660 {
4661     gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4662 }
4663 
4664 DISAS_INSN(wdebug)
4665 {
4666     if (IS_USER(s)) {
4667         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4668         return;
4669     }
4670     /* TODO: Implement wdebug.  */
4671     cpu_abort(env_cpu(env), "WDEBUG not implemented");
4672 }
4673 #endif
4674 
4675 DISAS_INSN(trap)
4676 {
4677     gen_exception(s, s->pc, EXCP_TRAP0 + (insn & 0xf));
4678 }
4679 
4680 static void do_trapcc(DisasContext *s, DisasCompare *c)
4681 {
4682     if (c->tcond != TCG_COND_NEVER) {
4683         TCGLabel *over = NULL;
4684 
4685         update_cc_op(s);
4686 
4687         if (c->tcond != TCG_COND_ALWAYS) {
4688             /* Jump over if !c. */
4689             over = gen_new_label();
4690             tcg_gen_brcond_i32(tcg_invert_cond(c->tcond), c->v1, c->v2, over);
4691         }
4692 
4693         tcg_gen_movi_i32(QREG_PC, s->pc);
4694         gen_raise_exception_format2(s, EXCP_TRAPCC, s->base.pc_next);
4695 
4696         if (over != NULL) {
4697             gen_set_label(over);
4698             s->base.is_jmp = DISAS_NEXT;
4699         }
4700     }
4701 }
4702 
4703 DISAS_INSN(trapcc)
4704 {
4705     DisasCompare c;
4706 
4707     /* Consume and discard the immediate operand. */
4708     switch (extract32(insn, 0, 3)) {
4709     case 2: /* trapcc.w */
4710         (void)read_im16(env, s);
4711         break;
4712     case 3: /* trapcc.l */
4713         (void)read_im32(env, s);
4714         break;
4715     case 4: /* trapcc (no operand) */
4716         break;
4717     default:
4718         /* trapcc registered with only valid opmodes */
4719         g_assert_not_reached();
4720     }
4721 
4722     gen_cc_cond(&c, s, extract32(insn, 8, 4));
4723     do_trapcc(s, &c);
4724 }
4725 
4726 DISAS_INSN(trapv)
4727 {
4728     DisasCompare c;
4729 
4730     gen_cc_cond(&c, s, 9); /* V set */
4731     do_trapcc(s, &c);
4732 }
4733 
4734 static void gen_load_fcr(DisasContext *s, TCGv res, int reg)
4735 {
4736     switch (reg) {
4737     case M68K_FPIAR:
4738         tcg_gen_movi_i32(res, 0);
4739         break;
4740     case M68K_FPSR:
4741         tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpsr));
4742         break;
4743     case M68K_FPCR:
4744         tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpcr));
4745         break;
4746     }
4747 }
4748 
4749 static void gen_store_fcr(DisasContext *s, TCGv val, int reg)
4750 {
4751     switch (reg) {
4752     case M68K_FPIAR:
4753         break;
4754     case M68K_FPSR:
4755         tcg_gen_st_i32(val, cpu_env, offsetof(CPUM68KState, fpsr));
4756         break;
4757     case M68K_FPCR:
4758         gen_helper_set_fpcr(cpu_env, val);
4759         break;
4760     }
4761 }
4762 
4763 static void gen_qemu_store_fcr(DisasContext *s, TCGv addr, int reg)
4764 {
4765     int index = IS_USER(s);
4766     TCGv tmp;
4767 
4768     tmp = tcg_temp_new();
4769     gen_load_fcr(s, tmp, reg);
4770     tcg_gen_qemu_st32(tmp, addr, index);
4771 }
4772 
4773 static void gen_qemu_load_fcr(DisasContext *s, TCGv addr, int reg)
4774 {
4775     int index = IS_USER(s);
4776     TCGv tmp;
4777 
4778     tmp = tcg_temp_new();
4779     tcg_gen_qemu_ld32u(tmp, addr, index);
4780     gen_store_fcr(s, tmp, reg);
4781 }
4782 
4783 
4784 static void gen_op_fmove_fcr(CPUM68KState *env, DisasContext *s,
4785                              uint32_t insn, uint32_t ext)
4786 {
4787     int mask = (ext >> 10) & 7;
4788     int is_write = (ext >> 13) & 1;
4789     int mode = extract32(insn, 3, 3);
4790     int i;
4791     TCGv addr, tmp;
4792 
4793     switch (mode) {
4794     case 0: /* Dn */
4795         if (mask != M68K_FPIAR && mask != M68K_FPSR && mask != M68K_FPCR) {
4796             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4797             return;
4798         }
4799         if (is_write) {
4800             gen_load_fcr(s, DREG(insn, 0), mask);
4801         } else {
4802             gen_store_fcr(s, DREG(insn, 0), mask);
4803         }
4804         return;
4805     case 1: /* An, only with FPIAR */
4806         if (mask != M68K_FPIAR) {
4807             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4808             return;
4809         }
4810         if (is_write) {
4811             gen_load_fcr(s, AREG(insn, 0), mask);
4812         } else {
4813             gen_store_fcr(s, AREG(insn, 0), mask);
4814         }
4815         return;
4816     case 7: /* Immediate */
4817         if (REG(insn, 0) == 4) {
4818             if (is_write ||
4819                 (mask != M68K_FPIAR && mask != M68K_FPSR &&
4820                  mask != M68K_FPCR)) {
4821                 gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4822                 return;
4823             }
4824             tmp = tcg_constant_i32(read_im32(env, s));
4825             gen_store_fcr(s, tmp, mask);
4826             return;
4827         }
4828         break;
4829     default:
4830         break;
4831     }
4832 
4833     tmp = gen_lea(env, s, insn, OS_LONG);
4834     if (IS_NULL_QREG(tmp)) {
4835         gen_addr_fault(s);
4836         return;
4837     }
4838 
4839     addr = tcg_temp_new();
4840     tcg_gen_mov_i32(addr, tmp);
4841 
4842     /*
4843      * mask:
4844      *
4845      * 0b100 Floating-Point Control Register
4846      * 0b010 Floating-Point Status Register
4847      * 0b001 Floating-Point Instruction Address Register
4848      *
4849      */
4850 
4851     if (is_write && mode == 4) {
4852         for (i = 2; i >= 0; i--, mask >>= 1) {
4853             if (mask & 1) {
4854                 gen_qemu_store_fcr(s, addr, 1 << i);
4855                 if (mask != 1) {
4856                     tcg_gen_subi_i32(addr, addr, opsize_bytes(OS_LONG));
4857                 }
4858             }
4859        }
4860        tcg_gen_mov_i32(AREG(insn, 0), addr);
4861     } else {
4862         for (i = 0; i < 3; i++, mask >>= 1) {
4863             if (mask & 1) {
4864                 if (is_write) {
4865                     gen_qemu_store_fcr(s, addr, 1 << i);
4866                 } else {
4867                     gen_qemu_load_fcr(s, addr, 1 << i);
4868                 }
4869                 if (mask != 1 || mode == 3) {
4870                     tcg_gen_addi_i32(addr, addr, opsize_bytes(OS_LONG));
4871                 }
4872             }
4873         }
4874         if (mode == 3) {
4875             tcg_gen_mov_i32(AREG(insn, 0), addr);
4876         }
4877     }
4878 }
4879 
4880 static void gen_op_fmovem(CPUM68KState *env, DisasContext *s,
4881                           uint32_t insn, uint32_t ext)
4882 {
4883     int opsize;
4884     TCGv addr, tmp;
4885     int mode = (ext >> 11) & 0x3;
4886     int is_load = ((ext & 0x2000) == 0);
4887 
4888     if (m68k_feature(s->env, M68K_FEATURE_FPU)) {
4889         opsize = OS_EXTENDED;
4890     } else {
4891         opsize = OS_DOUBLE;  /* FIXME */
4892     }
4893 
4894     addr = gen_lea(env, s, insn, opsize);
4895     if (IS_NULL_QREG(addr)) {
4896         gen_addr_fault(s);
4897         return;
4898     }
4899 
4900     tmp = tcg_temp_new();
4901     if (mode & 0x1) {
4902         /* Dynamic register list */
4903         tcg_gen_ext8u_i32(tmp, DREG(ext, 4));
4904     } else {
4905         /* Static register list */
4906         tcg_gen_movi_i32(tmp, ext & 0xff);
4907     }
4908 
4909     if (!is_load && (mode & 2) == 0) {
4910         /*
4911          * predecrement addressing mode
4912          * only available to store register to memory
4913          */
4914         if (opsize == OS_EXTENDED) {
4915             gen_helper_fmovemx_st_predec(tmp, cpu_env, addr, tmp);
4916         } else {
4917             gen_helper_fmovemd_st_predec(tmp, cpu_env, addr, tmp);
4918         }
4919     } else {
4920         /* postincrement addressing mode */
4921         if (opsize == OS_EXTENDED) {
4922             if (is_load) {
4923                 gen_helper_fmovemx_ld_postinc(tmp, cpu_env, addr, tmp);
4924             } else {
4925                 gen_helper_fmovemx_st_postinc(tmp, cpu_env, addr, tmp);
4926             }
4927         } else {
4928             if (is_load) {
4929                 gen_helper_fmovemd_ld_postinc(tmp, cpu_env, addr, tmp);
4930             } else {
4931                 gen_helper_fmovemd_st_postinc(tmp, cpu_env, addr, tmp);
4932             }
4933         }
4934     }
4935     if ((insn & 070) == 030 || (insn & 070) == 040) {
4936         tcg_gen_mov_i32(AREG(insn, 0), tmp);
4937     }
4938 }
4939 
4940 /*
4941  * ??? FP exceptions are not implemented.  Most exceptions are deferred until
4942  * immediately before the next FP instruction is executed.
4943  */
4944 DISAS_INSN(fpu)
4945 {
4946     uint16_t ext;
4947     int opmode;
4948     int opsize;
4949     TCGv_ptr cpu_src, cpu_dest;
4950 
4951     ext = read_im16(env, s);
4952     opmode = ext & 0x7f;
4953     switch ((ext >> 13) & 7) {
4954     case 0:
4955         break;
4956     case 1:
4957         goto undef;
4958     case 2:
4959         if (insn == 0xf200 && (ext & 0xfc00) == 0x5c00) {
4960             /* fmovecr */
4961             TCGv rom_offset = tcg_constant_i32(opmode);
4962             cpu_dest = gen_fp_ptr(REG(ext, 7));
4963             gen_helper_fconst(cpu_env, cpu_dest, rom_offset);
4964             return;
4965         }
4966         break;
4967     case 3: /* fmove out */
4968         cpu_src = gen_fp_ptr(REG(ext, 7));
4969         opsize = ext_opsize(ext, 10);
4970         if (gen_ea_fp(env, s, insn, opsize, cpu_src,
4971                       EA_STORE, IS_USER(s)) == -1) {
4972             gen_addr_fault(s);
4973         }
4974         gen_helper_ftst(cpu_env, cpu_src);
4975         return;
4976     case 4: /* fmove to control register.  */
4977     case 5: /* fmove from control register.  */
4978         gen_op_fmove_fcr(env, s, insn, ext);
4979         return;
4980     case 6: /* fmovem */
4981     case 7:
4982         if ((ext & 0x1000) == 0 && !m68k_feature(s->env, M68K_FEATURE_FPU)) {
4983             goto undef;
4984         }
4985         gen_op_fmovem(env, s, insn, ext);
4986         return;
4987     }
4988     if (ext & (1 << 14)) {
4989         /* Source effective address.  */
4990         opsize = ext_opsize(ext, 10);
4991         cpu_src = gen_fp_result_ptr();
4992         if (gen_ea_fp(env, s, insn, opsize, cpu_src,
4993                       EA_LOADS, IS_USER(s)) == -1) {
4994             gen_addr_fault(s);
4995             return;
4996         }
4997     } else {
4998         /* Source register.  */
4999         opsize = OS_EXTENDED;
5000         cpu_src = gen_fp_ptr(REG(ext, 10));
5001     }
5002     cpu_dest = gen_fp_ptr(REG(ext, 7));
5003     switch (opmode) {
5004     case 0: /* fmove */
5005         gen_fp_move(cpu_dest, cpu_src);
5006         break;
5007     case 0x40: /* fsmove */
5008         gen_helper_fsround(cpu_env, cpu_dest, cpu_src);
5009         break;
5010     case 0x44: /* fdmove */
5011         gen_helper_fdround(cpu_env, cpu_dest, cpu_src);
5012         break;
5013     case 1: /* fint */
5014         gen_helper_firound(cpu_env, cpu_dest, cpu_src);
5015         break;
5016     case 2: /* fsinh */
5017         gen_helper_fsinh(cpu_env, cpu_dest, cpu_src);
5018         break;
5019     case 3: /* fintrz */
5020         gen_helper_fitrunc(cpu_env, cpu_dest, cpu_src);
5021         break;
5022     case 4: /* fsqrt */
5023         gen_helper_fsqrt(cpu_env, cpu_dest, cpu_src);
5024         break;
5025     case 0x41: /* fssqrt */
5026         gen_helper_fssqrt(cpu_env, cpu_dest, cpu_src);
5027         break;
5028     case 0x45: /* fdsqrt */
5029         gen_helper_fdsqrt(cpu_env, cpu_dest, cpu_src);
5030         break;
5031     case 0x06: /* flognp1 */
5032         gen_helper_flognp1(cpu_env, cpu_dest, cpu_src);
5033         break;
5034     case 0x08: /* fetoxm1 */
5035         gen_helper_fetoxm1(cpu_env, cpu_dest, cpu_src);
5036         break;
5037     case 0x09: /* ftanh */
5038         gen_helper_ftanh(cpu_env, cpu_dest, cpu_src);
5039         break;
5040     case 0x0a: /* fatan */
5041         gen_helper_fatan(cpu_env, cpu_dest, cpu_src);
5042         break;
5043     case 0x0c: /* fasin */
5044         gen_helper_fasin(cpu_env, cpu_dest, cpu_src);
5045         break;
5046     case 0x0d: /* fatanh */
5047         gen_helper_fatanh(cpu_env, cpu_dest, cpu_src);
5048         break;
5049     case 0x0e: /* fsin */
5050         gen_helper_fsin(cpu_env, cpu_dest, cpu_src);
5051         break;
5052     case 0x0f: /* ftan */
5053         gen_helper_ftan(cpu_env, cpu_dest, cpu_src);
5054         break;
5055     case 0x10: /* fetox */
5056         gen_helper_fetox(cpu_env, cpu_dest, cpu_src);
5057         break;
5058     case 0x11: /* ftwotox */
5059         gen_helper_ftwotox(cpu_env, cpu_dest, cpu_src);
5060         break;
5061     case 0x12: /* ftentox */
5062         gen_helper_ftentox(cpu_env, cpu_dest, cpu_src);
5063         break;
5064     case 0x14: /* flogn */
5065         gen_helper_flogn(cpu_env, cpu_dest, cpu_src);
5066         break;
5067     case 0x15: /* flog10 */
5068         gen_helper_flog10(cpu_env, cpu_dest, cpu_src);
5069         break;
5070     case 0x16: /* flog2 */
5071         gen_helper_flog2(cpu_env, cpu_dest, cpu_src);
5072         break;
5073     case 0x18: /* fabs */
5074         gen_helper_fabs(cpu_env, cpu_dest, cpu_src);
5075         break;
5076     case 0x58: /* fsabs */
5077         gen_helper_fsabs(cpu_env, cpu_dest, cpu_src);
5078         break;
5079     case 0x5c: /* fdabs */
5080         gen_helper_fdabs(cpu_env, cpu_dest, cpu_src);
5081         break;
5082     case 0x19: /* fcosh */
5083         gen_helper_fcosh(cpu_env, cpu_dest, cpu_src);
5084         break;
5085     case 0x1a: /* fneg */
5086         gen_helper_fneg(cpu_env, cpu_dest, cpu_src);
5087         break;
5088     case 0x5a: /* fsneg */
5089         gen_helper_fsneg(cpu_env, cpu_dest, cpu_src);
5090         break;
5091     case 0x5e: /* fdneg */
5092         gen_helper_fdneg(cpu_env, cpu_dest, cpu_src);
5093         break;
5094     case 0x1c: /* facos */
5095         gen_helper_facos(cpu_env, cpu_dest, cpu_src);
5096         break;
5097     case 0x1d: /* fcos */
5098         gen_helper_fcos(cpu_env, cpu_dest, cpu_src);
5099         break;
5100     case 0x1e: /* fgetexp */
5101         gen_helper_fgetexp(cpu_env, cpu_dest, cpu_src);
5102         break;
5103     case 0x1f: /* fgetman */
5104         gen_helper_fgetman(cpu_env, cpu_dest, cpu_src);
5105         break;
5106     case 0x20: /* fdiv */
5107         gen_helper_fdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5108         break;
5109     case 0x60: /* fsdiv */
5110         gen_helper_fsdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5111         break;
5112     case 0x64: /* fddiv */
5113         gen_helper_fddiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5114         break;
5115     case 0x21: /* fmod */
5116         gen_helper_fmod(cpu_env, cpu_dest, cpu_src, cpu_dest);
5117         break;
5118     case 0x22: /* fadd */
5119         gen_helper_fadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5120         break;
5121     case 0x62: /* fsadd */
5122         gen_helper_fsadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5123         break;
5124     case 0x66: /* fdadd */
5125         gen_helper_fdadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5126         break;
5127     case 0x23: /* fmul */
5128         gen_helper_fmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5129         break;
5130     case 0x63: /* fsmul */
5131         gen_helper_fsmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5132         break;
5133     case 0x67: /* fdmul */
5134         gen_helper_fdmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5135         break;
5136     case 0x24: /* fsgldiv */
5137         gen_helper_fsgldiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5138         break;
5139     case 0x25: /* frem */
5140         gen_helper_frem(cpu_env, cpu_dest, cpu_src, cpu_dest);
5141         break;
5142     case 0x26: /* fscale */
5143         gen_helper_fscale(cpu_env, cpu_dest, cpu_src, cpu_dest);
5144         break;
5145     case 0x27: /* fsglmul */
5146         gen_helper_fsglmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5147         break;
5148     case 0x28: /* fsub */
5149         gen_helper_fsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5150         break;
5151     case 0x68: /* fssub */
5152         gen_helper_fssub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5153         break;
5154     case 0x6c: /* fdsub */
5155         gen_helper_fdsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5156         break;
5157     case 0x30: case 0x31: case 0x32:
5158     case 0x33: case 0x34: case 0x35:
5159     case 0x36: case 0x37: {
5160             TCGv_ptr cpu_dest2 = gen_fp_ptr(REG(ext, 0));
5161             gen_helper_fsincos(cpu_env, cpu_dest, cpu_dest2, cpu_src);
5162         }
5163         break;
5164     case 0x38: /* fcmp */
5165         gen_helper_fcmp(cpu_env, cpu_src, cpu_dest);
5166         return;
5167     case 0x3a: /* ftst */
5168         gen_helper_ftst(cpu_env, cpu_src);
5169         return;
5170     default:
5171         goto undef;
5172     }
5173     gen_helper_ftst(cpu_env, cpu_dest);
5174     return;
5175 undef:
5176     /* FIXME: Is this right for offset addressing modes?  */
5177     s->pc -= 2;
5178     disas_undef_fpu(env, s, insn);
5179 }
5180 
5181 static void gen_fcc_cond(DisasCompare *c, DisasContext *s, int cond)
5182 {
5183     TCGv fpsr;
5184 
5185     c->v2 = tcg_constant_i32(0);
5186     /* TODO: Raise BSUN exception.  */
5187     fpsr = tcg_temp_new();
5188     gen_load_fcr(s, fpsr, M68K_FPSR);
5189     switch (cond) {
5190     case 0:  /* False */
5191     case 16: /* Signaling False */
5192         c->v1 = c->v2;
5193         c->tcond = TCG_COND_NEVER;
5194         break;
5195     case 1:  /* EQual Z */
5196     case 17: /* Signaling EQual Z */
5197         c->v1 = tcg_temp_new();
5198         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5199         c->tcond = TCG_COND_NE;
5200         break;
5201     case 2:  /* Ordered Greater Than !(A || Z || N) */
5202     case 18: /* Greater Than !(A || Z || N) */
5203         c->v1 = tcg_temp_new();
5204         tcg_gen_andi_i32(c->v1, fpsr,
5205                          FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5206         c->tcond = TCG_COND_EQ;
5207         break;
5208     case 3:  /* Ordered Greater than or Equal Z || !(A || N) */
5209     case 19: /* Greater than or Equal Z || !(A || N) */
5210         c->v1 = tcg_temp_new();
5211         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5212         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5213         tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_Z | FPSR_CC_N);
5214         tcg_gen_or_i32(c->v1, c->v1, fpsr);
5215         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5216         c->tcond = TCG_COND_NE;
5217         break;
5218     case 4:  /* Ordered Less Than !(!N || A || Z); */
5219     case 20: /* Less Than !(!N || A || Z); */
5220         c->v1 = tcg_temp_new();
5221         tcg_gen_xori_i32(c->v1, fpsr, FPSR_CC_N);
5222         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_N | FPSR_CC_A | FPSR_CC_Z);
5223         c->tcond = TCG_COND_EQ;
5224         break;
5225     case 5:  /* Ordered Less than or Equal Z || (N && !A) */
5226     case 21: /* Less than or Equal Z || (N && !A) */
5227         c->v1 = tcg_temp_new();
5228         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5229         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5230         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5231         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_Z | FPSR_CC_N);
5232         c->tcond = TCG_COND_NE;
5233         break;
5234     case 6:  /* Ordered Greater or Less than !(A || Z) */
5235     case 22: /* Greater or Less than !(A || Z) */
5236         c->v1 = tcg_temp_new();
5237         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5238         c->tcond = TCG_COND_EQ;
5239         break;
5240     case 7:  /* Ordered !A */
5241     case 23: /* Greater, Less or Equal !A */
5242         c->v1 = tcg_temp_new();
5243         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5244         c->tcond = TCG_COND_EQ;
5245         break;
5246     case 8:  /* Unordered A */
5247     case 24: /* Not Greater, Less or Equal A */
5248         c->v1 = tcg_temp_new();
5249         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5250         c->tcond = TCG_COND_NE;
5251         break;
5252     case 9:  /* Unordered or Equal A || Z */
5253     case 25: /* Not Greater or Less then A || Z */
5254         c->v1 = tcg_temp_new();
5255         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5256         c->tcond = TCG_COND_NE;
5257         break;
5258     case 10: /* Unordered or Greater Than A || !(N || Z)) */
5259     case 26: /* Not Less or Equal A || !(N || Z)) */
5260         c->v1 = tcg_temp_new();
5261         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5262         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5263         tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_A | FPSR_CC_N);
5264         tcg_gen_or_i32(c->v1, c->v1, fpsr);
5265         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5266         c->tcond = TCG_COND_NE;
5267         break;
5268     case 11: /* Unordered or Greater or Equal A || Z || !N */
5269     case 27: /* Not Less Than A || Z || !N */
5270         c->v1 = tcg_temp_new();
5271         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5272         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5273         c->tcond = TCG_COND_NE;
5274         break;
5275     case 12: /* Unordered or Less Than A || (N && !Z) */
5276     case 28: /* Not Greater than or Equal A || (N && !Z) */
5277         c->v1 = tcg_temp_new();
5278         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5279         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5280         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5281         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_A | FPSR_CC_N);
5282         c->tcond = TCG_COND_NE;
5283         break;
5284     case 13: /* Unordered or Less or Equal A || Z || N */
5285     case 29: /* Not Greater Than A || Z || N */
5286         c->v1 = tcg_temp_new();
5287         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5288         c->tcond = TCG_COND_NE;
5289         break;
5290     case 14: /* Not Equal !Z */
5291     case 30: /* Signaling Not Equal !Z */
5292         c->v1 = tcg_temp_new();
5293         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5294         c->tcond = TCG_COND_EQ;
5295         break;
5296     case 15: /* True */
5297     case 31: /* Signaling True */
5298         c->v1 = c->v2;
5299         c->tcond = TCG_COND_ALWAYS;
5300         break;
5301     }
5302 }
5303 
5304 static void gen_fjmpcc(DisasContext *s, int cond, TCGLabel *l1)
5305 {
5306     DisasCompare c;
5307 
5308     gen_fcc_cond(&c, s, cond);
5309     update_cc_op(s);
5310     tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
5311 }
5312 
5313 DISAS_INSN(fbcc)
5314 {
5315     uint32_t offset;
5316     uint32_t base;
5317     TCGLabel *l1;
5318 
5319     base = s->pc;
5320     offset = (int16_t)read_im16(env, s);
5321     if (insn & (1 << 6)) {
5322         offset = (offset << 16) | read_im16(env, s);
5323     }
5324 
5325     l1 = gen_new_label();
5326     update_cc_op(s);
5327     gen_fjmpcc(s, insn & 0x3f, l1);
5328     gen_jmp_tb(s, 0, s->pc, s->base.pc_next);
5329     gen_set_label(l1);
5330     gen_jmp_tb(s, 1, base + offset, s->base.pc_next);
5331 }
5332 
5333 DISAS_INSN(fscc)
5334 {
5335     DisasCompare c;
5336     int cond;
5337     TCGv tmp;
5338     uint16_t ext;
5339 
5340     ext = read_im16(env, s);
5341     cond = ext & 0x3f;
5342     gen_fcc_cond(&c, s, cond);
5343 
5344     tmp = tcg_temp_new();
5345     tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
5346 
5347     tcg_gen_neg_i32(tmp, tmp);
5348     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
5349 }
5350 
5351 DISAS_INSN(ftrapcc)
5352 {
5353     DisasCompare c;
5354     uint16_t ext;
5355     int cond;
5356 
5357     ext = read_im16(env, s);
5358     cond = ext & 0x3f;
5359 
5360     /* Consume and discard the immediate operand. */
5361     switch (extract32(insn, 0, 3)) {
5362     case 2: /* ftrapcc.w */
5363         (void)read_im16(env, s);
5364         break;
5365     case 3: /* ftrapcc.l */
5366         (void)read_im32(env, s);
5367         break;
5368     case 4: /* ftrapcc (no operand) */
5369         break;
5370     default:
5371         /* ftrapcc registered with only valid opmodes */
5372         g_assert_not_reached();
5373     }
5374 
5375     gen_fcc_cond(&c, s, cond);
5376     do_trapcc(s, &c);
5377 }
5378 
5379 #if defined(CONFIG_SOFTMMU)
5380 DISAS_INSN(frestore)
5381 {
5382     TCGv addr;
5383 
5384     if (IS_USER(s)) {
5385         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
5386         return;
5387     }
5388     if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5389         SRC_EA(env, addr, OS_LONG, 0, NULL);
5390         /* FIXME: check the state frame */
5391     } else {
5392         disas_undef(env, s, insn);
5393     }
5394 }
5395 
5396 DISAS_INSN(fsave)
5397 {
5398     if (IS_USER(s)) {
5399         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
5400         return;
5401     }
5402 
5403     if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5404         /* always write IDLE */
5405         TCGv idle = tcg_constant_i32(0x41000000);
5406         DEST_EA(env, insn, OS_LONG, idle, NULL);
5407     } else {
5408         disas_undef(env, s, insn);
5409     }
5410 }
5411 #endif
5412 
5413 static inline TCGv gen_mac_extract_word(DisasContext *s, TCGv val, int upper)
5414 {
5415     TCGv tmp = tcg_temp_new();
5416     if (s->env->macsr & MACSR_FI) {
5417         if (upper)
5418             tcg_gen_andi_i32(tmp, val, 0xffff0000);
5419         else
5420             tcg_gen_shli_i32(tmp, val, 16);
5421     } else if (s->env->macsr & MACSR_SU) {
5422         if (upper)
5423             tcg_gen_sari_i32(tmp, val, 16);
5424         else
5425             tcg_gen_ext16s_i32(tmp, val);
5426     } else {
5427         if (upper)
5428             tcg_gen_shri_i32(tmp, val, 16);
5429         else
5430             tcg_gen_ext16u_i32(tmp, val);
5431     }
5432     return tmp;
5433 }
5434 
5435 static void gen_mac_clear_flags(void)
5436 {
5437     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR,
5438                      ~(MACSR_V | MACSR_Z | MACSR_N | MACSR_EV));
5439 }
5440 
5441 DISAS_INSN(mac)
5442 {
5443     TCGv rx;
5444     TCGv ry;
5445     uint16_t ext;
5446     int acc;
5447     TCGv tmp;
5448     TCGv addr;
5449     TCGv loadval;
5450     int dual;
5451     TCGv saved_flags;
5452 
5453     if (!s->done_mac) {
5454         s->mactmp = tcg_temp_new_i64();
5455         s->done_mac = 1;
5456     }
5457 
5458     ext = read_im16(env, s);
5459 
5460     acc = ((insn >> 7) & 1) | ((ext >> 3) & 2);
5461     dual = ((insn & 0x30) != 0 && (ext & 3) != 0);
5462     if (dual && !m68k_feature(s->env, M68K_FEATURE_CF_EMAC_B)) {
5463         disas_undef(env, s, insn);
5464         return;
5465     }
5466     if (insn & 0x30) {
5467         /* MAC with load.  */
5468         tmp = gen_lea(env, s, insn, OS_LONG);
5469         addr = tcg_temp_new();
5470         tcg_gen_and_i32(addr, tmp, QREG_MAC_MASK);
5471         /*
5472          * Load the value now to ensure correct exception behavior.
5473          * Perform writeback after reading the MAC inputs.
5474          */
5475         loadval = gen_load(s, OS_LONG, addr, 0, IS_USER(s));
5476 
5477         acc ^= 1;
5478         rx = (ext & 0x8000) ? AREG(ext, 12) : DREG(insn, 12);
5479         ry = (ext & 8) ? AREG(ext, 0) : DREG(ext, 0);
5480     } else {
5481         loadval = addr = NULL_QREG;
5482         rx = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5483         ry = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5484     }
5485 
5486     gen_mac_clear_flags();
5487 #if 0
5488     l1 = -1;
5489     /* Disabled because conditional branches clobber temporary vars.  */
5490     if ((s->env->macsr & MACSR_OMC) != 0 && !dual) {
5491         /* Skip the multiply if we know we will ignore it.  */
5492         l1 = gen_new_label();
5493         tmp = tcg_temp_new();
5494         tcg_gen_andi_i32(tmp, QREG_MACSR, 1 << (acc + 8));
5495         gen_op_jmp_nz32(tmp, l1);
5496     }
5497 #endif
5498 
5499     if ((ext & 0x0800) == 0) {
5500         /* Word.  */
5501         rx = gen_mac_extract_word(s, rx, (ext & 0x80) != 0);
5502         ry = gen_mac_extract_word(s, ry, (ext & 0x40) != 0);
5503     }
5504     if (s->env->macsr & MACSR_FI) {
5505         gen_helper_macmulf(s->mactmp, cpu_env, rx, ry);
5506     } else {
5507         if (s->env->macsr & MACSR_SU)
5508             gen_helper_macmuls(s->mactmp, cpu_env, rx, ry);
5509         else
5510             gen_helper_macmulu(s->mactmp, cpu_env, rx, ry);
5511         switch ((ext >> 9) & 3) {
5512         case 1:
5513             tcg_gen_shli_i64(s->mactmp, s->mactmp, 1);
5514             break;
5515         case 3:
5516             tcg_gen_shri_i64(s->mactmp, s->mactmp, 1);
5517             break;
5518         }
5519     }
5520 
5521     if (dual) {
5522         /* Save the overflow flag from the multiply.  */
5523         saved_flags = tcg_temp_new();
5524         tcg_gen_mov_i32(saved_flags, QREG_MACSR);
5525     } else {
5526         saved_flags = NULL_QREG;
5527     }
5528 
5529 #if 0
5530     /* Disabled because conditional branches clobber temporary vars.  */
5531     if ((s->env->macsr & MACSR_OMC) != 0 && dual) {
5532         /* Skip the accumulate if the value is already saturated.  */
5533         l1 = gen_new_label();
5534         tmp = tcg_temp_new();
5535         gen_op_and32(tmp, QREG_MACSR, tcg_constant_i32(MACSR_PAV0 << acc));
5536         gen_op_jmp_nz32(tmp, l1);
5537     }
5538 #endif
5539 
5540     if (insn & 0x100)
5541         tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5542     else
5543         tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5544 
5545     if (s->env->macsr & MACSR_FI)
5546         gen_helper_macsatf(cpu_env, tcg_constant_i32(acc));
5547     else if (s->env->macsr & MACSR_SU)
5548         gen_helper_macsats(cpu_env, tcg_constant_i32(acc));
5549     else
5550         gen_helper_macsatu(cpu_env, tcg_constant_i32(acc));
5551 
5552 #if 0
5553     /* Disabled because conditional branches clobber temporary vars.  */
5554     if (l1 != -1)
5555         gen_set_label(l1);
5556 #endif
5557 
5558     if (dual) {
5559         /* Dual accumulate variant.  */
5560         acc = (ext >> 2) & 3;
5561         /* Restore the overflow flag from the multiplier.  */
5562         tcg_gen_mov_i32(QREG_MACSR, saved_flags);
5563 #if 0
5564         /* Disabled because conditional branches clobber temporary vars.  */
5565         if ((s->env->macsr & MACSR_OMC) != 0) {
5566             /* Skip the accumulate if the value is already saturated.  */
5567             l1 = gen_new_label();
5568             tmp = tcg_temp_new();
5569             gen_op_and32(tmp, QREG_MACSR, tcg_constant_i32(MACSR_PAV0 << acc));
5570             gen_op_jmp_nz32(tmp, l1);
5571         }
5572 #endif
5573         if (ext & 2)
5574             tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5575         else
5576             tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5577         if (s->env->macsr & MACSR_FI)
5578             gen_helper_macsatf(cpu_env, tcg_constant_i32(acc));
5579         else if (s->env->macsr & MACSR_SU)
5580             gen_helper_macsats(cpu_env, tcg_constant_i32(acc));
5581         else
5582             gen_helper_macsatu(cpu_env, tcg_constant_i32(acc));
5583 #if 0
5584         /* Disabled because conditional branches clobber temporary vars.  */
5585         if (l1 != -1)
5586             gen_set_label(l1);
5587 #endif
5588     }
5589     gen_helper_mac_set_flags(cpu_env, tcg_constant_i32(acc));
5590 
5591     if (insn & 0x30) {
5592         TCGv rw;
5593         rw = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5594         tcg_gen_mov_i32(rw, loadval);
5595         /*
5596          * FIXME: Should address writeback happen with the masked or
5597          * unmasked value?
5598          */
5599         switch ((insn >> 3) & 7) {
5600         case 3: /* Post-increment.  */
5601             tcg_gen_addi_i32(AREG(insn, 0), addr, 4);
5602             break;
5603         case 4: /* Pre-decrement.  */
5604             tcg_gen_mov_i32(AREG(insn, 0), addr);
5605         }
5606     }
5607 }
5608 
5609 DISAS_INSN(from_mac)
5610 {
5611     TCGv rx;
5612     TCGv_i64 acc;
5613     int accnum;
5614 
5615     rx = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5616     accnum = (insn >> 9) & 3;
5617     acc = MACREG(accnum);
5618     if (s->env->macsr & MACSR_FI) {
5619         gen_helper_get_macf(rx, cpu_env, acc);
5620     } else if ((s->env->macsr & MACSR_OMC) == 0) {
5621         tcg_gen_extrl_i64_i32(rx, acc);
5622     } else if (s->env->macsr & MACSR_SU) {
5623         gen_helper_get_macs(rx, acc);
5624     } else {
5625         gen_helper_get_macu(rx, acc);
5626     }
5627     if (insn & 0x40) {
5628         tcg_gen_movi_i64(acc, 0);
5629         tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5630     }
5631 }
5632 
5633 DISAS_INSN(move_mac)
5634 {
5635     /* FIXME: This can be done without a helper.  */
5636     int src;
5637     TCGv dest;
5638     src = insn & 3;
5639     dest = tcg_constant_i32((insn >> 9) & 3);
5640     gen_helper_mac_move(cpu_env, dest, tcg_constant_i32(src));
5641     gen_mac_clear_flags();
5642     gen_helper_mac_set_flags(cpu_env, dest);
5643 }
5644 
5645 DISAS_INSN(from_macsr)
5646 {
5647     TCGv reg;
5648 
5649     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5650     tcg_gen_mov_i32(reg, QREG_MACSR);
5651 }
5652 
5653 DISAS_INSN(from_mask)
5654 {
5655     TCGv reg;
5656     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5657     tcg_gen_mov_i32(reg, QREG_MAC_MASK);
5658 }
5659 
5660 DISAS_INSN(from_mext)
5661 {
5662     TCGv reg;
5663     TCGv acc;
5664     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5665     acc = tcg_constant_i32((insn & 0x400) ? 2 : 0);
5666     if (s->env->macsr & MACSR_FI)
5667         gen_helper_get_mac_extf(reg, cpu_env, acc);
5668     else
5669         gen_helper_get_mac_exti(reg, cpu_env, acc);
5670 }
5671 
5672 DISAS_INSN(macsr_to_ccr)
5673 {
5674     TCGv tmp = tcg_temp_new();
5675 
5676     /* Note that X and C are always cleared. */
5677     tcg_gen_andi_i32(tmp, QREG_MACSR, CCF_N | CCF_Z | CCF_V);
5678     gen_helper_set_ccr(cpu_env, tmp);
5679     set_cc_op(s, CC_OP_FLAGS);
5680 }
5681 
5682 DISAS_INSN(to_mac)
5683 {
5684     TCGv_i64 acc;
5685     TCGv val;
5686     int accnum;
5687     accnum = (insn >> 9) & 3;
5688     acc = MACREG(accnum);
5689     SRC_EA(env, val, OS_LONG, 0, NULL);
5690     if (s->env->macsr & MACSR_FI) {
5691         tcg_gen_ext_i32_i64(acc, val);
5692         tcg_gen_shli_i64(acc, acc, 8);
5693     } else if (s->env->macsr & MACSR_SU) {
5694         tcg_gen_ext_i32_i64(acc, val);
5695     } else {
5696         tcg_gen_extu_i32_i64(acc, val);
5697     }
5698     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5699     gen_mac_clear_flags();
5700     gen_helper_mac_set_flags(cpu_env, tcg_constant_i32(accnum));
5701 }
5702 
5703 DISAS_INSN(to_macsr)
5704 {
5705     TCGv val;
5706     SRC_EA(env, val, OS_LONG, 0, NULL);
5707     gen_helper_set_macsr(cpu_env, val);
5708     gen_exit_tb(s);
5709 }
5710 
5711 DISAS_INSN(to_mask)
5712 {
5713     TCGv val;
5714     SRC_EA(env, val, OS_LONG, 0, NULL);
5715     tcg_gen_ori_i32(QREG_MAC_MASK, val, 0xffff0000);
5716 }
5717 
5718 DISAS_INSN(to_mext)
5719 {
5720     TCGv val;
5721     TCGv acc;
5722     SRC_EA(env, val, OS_LONG, 0, NULL);
5723     acc = tcg_constant_i32((insn & 0x400) ? 2 : 0);
5724     if (s->env->macsr & MACSR_FI)
5725         gen_helper_set_mac_extf(cpu_env, val, acc);
5726     else if (s->env->macsr & MACSR_SU)
5727         gen_helper_set_mac_exts(cpu_env, val, acc);
5728     else
5729         gen_helper_set_mac_extu(cpu_env, val, acc);
5730 }
5731 
5732 static disas_proc opcode_table[65536];
5733 
5734 static void
5735 register_opcode (disas_proc proc, uint16_t opcode, uint16_t mask)
5736 {
5737   int i;
5738   int from;
5739   int to;
5740 
5741   /* Sanity check.  All set bits must be included in the mask.  */
5742   if (opcode & ~mask) {
5743       fprintf(stderr,
5744               "qemu internal error: bogus opcode definition %04x/%04x\n",
5745               opcode, mask);
5746       abort();
5747   }
5748   /*
5749    * This could probably be cleverer.  For now just optimize the case where
5750    * the top bits are known.
5751    */
5752   /* Find the first zero bit in the mask.  */
5753   i = 0x8000;
5754   while ((i & mask) != 0)
5755       i >>= 1;
5756   /* Iterate over all combinations of this and lower bits.  */
5757   if (i == 0)
5758       i = 1;
5759   else
5760       i <<= 1;
5761   from = opcode & ~(i - 1);
5762   to = from + i;
5763   for (i = from; i < to; i++) {
5764       if ((i & mask) == opcode)
5765           opcode_table[i] = proc;
5766   }
5767 }
5768 
5769 /*
5770  * Register m68k opcode handlers.  Order is important.
5771  * Later insn override earlier ones.
5772  */
5773 void register_m68k_insns (CPUM68KState *env)
5774 {
5775     /*
5776      * Build the opcode table only once to avoid
5777      * multithreading issues.
5778      */
5779     if (opcode_table[0] != NULL) {
5780         return;
5781     }
5782 
5783     /*
5784      * use BASE() for instruction available
5785      * for CF_ISA_A and M68000.
5786      */
5787 #define BASE(name, opcode, mask) \
5788     register_opcode(disas_##name, 0x##opcode, 0x##mask)
5789 #define INSN(name, opcode, mask, feature) do { \
5790     if (m68k_feature(env, M68K_FEATURE_##feature)) \
5791         BASE(name, opcode, mask); \
5792     } while(0)
5793     BASE(undef,     0000, 0000);
5794     INSN(arith_im,  0080, fff8, CF_ISA_A);
5795     INSN(arith_im,  0000, ff00, M68K);
5796     INSN(chk2,      00c0, f9c0, CHK2);
5797     INSN(bitrev,    00c0, fff8, CF_ISA_APLUSC);
5798     BASE(bitop_reg, 0100, f1c0);
5799     BASE(bitop_reg, 0140, f1c0);
5800     BASE(bitop_reg, 0180, f1c0);
5801     BASE(bitop_reg, 01c0, f1c0);
5802     INSN(movep,     0108, f138, MOVEP);
5803     INSN(arith_im,  0280, fff8, CF_ISA_A);
5804     INSN(arith_im,  0200, ff00, M68K);
5805     INSN(undef,     02c0, ffc0, M68K);
5806     INSN(byterev,   02c0, fff8, CF_ISA_APLUSC);
5807     INSN(arith_im,  0480, fff8, CF_ISA_A);
5808     INSN(arith_im,  0400, ff00, M68K);
5809     INSN(undef,     04c0, ffc0, M68K);
5810     INSN(arith_im,  0600, ff00, M68K);
5811     INSN(undef,     06c0, ffc0, M68K);
5812     INSN(ff1,       04c0, fff8, CF_ISA_APLUSC);
5813     INSN(arith_im,  0680, fff8, CF_ISA_A);
5814     INSN(arith_im,  0c00, ff38, CF_ISA_A);
5815     INSN(arith_im,  0c00, ff00, M68K);
5816     BASE(bitop_im,  0800, ffc0);
5817     BASE(bitop_im,  0840, ffc0);
5818     BASE(bitop_im,  0880, ffc0);
5819     BASE(bitop_im,  08c0, ffc0);
5820     INSN(arith_im,  0a80, fff8, CF_ISA_A);
5821     INSN(arith_im,  0a00, ff00, M68K);
5822 #if defined(CONFIG_SOFTMMU)
5823     INSN(moves,     0e00, ff00, M68K);
5824 #endif
5825     INSN(cas,       0ac0, ffc0, CAS);
5826     INSN(cas,       0cc0, ffc0, CAS);
5827     INSN(cas,       0ec0, ffc0, CAS);
5828     INSN(cas2w,     0cfc, ffff, CAS);
5829     INSN(cas2l,     0efc, ffff, CAS);
5830     BASE(move,      1000, f000);
5831     BASE(move,      2000, f000);
5832     BASE(move,      3000, f000);
5833     INSN(chk,       4000, f040, M68K);
5834     INSN(strldsr,   40e7, ffff, CF_ISA_APLUSC);
5835     INSN(negx,      4080, fff8, CF_ISA_A);
5836     INSN(negx,      4000, ff00, M68K);
5837     INSN(undef,     40c0, ffc0, M68K);
5838     INSN(move_from_sr, 40c0, fff8, CF_ISA_A);
5839     INSN(move_from_sr, 40c0, ffc0, M68K);
5840     BASE(lea,       41c0, f1c0);
5841     BASE(clr,       4200, ff00);
5842     BASE(undef,     42c0, ffc0);
5843     INSN(move_from_ccr, 42c0, fff8, CF_ISA_A);
5844     INSN(move_from_ccr, 42c0, ffc0, M68K);
5845     INSN(neg,       4480, fff8, CF_ISA_A);
5846     INSN(neg,       4400, ff00, M68K);
5847     INSN(undef,     44c0, ffc0, M68K);
5848     BASE(move_to_ccr, 44c0, ffc0);
5849     INSN(not,       4680, fff8, CF_ISA_A);
5850     INSN(not,       4600, ff00, M68K);
5851 #if defined(CONFIG_SOFTMMU)
5852     BASE(move_to_sr, 46c0, ffc0);
5853 #endif
5854     INSN(nbcd,      4800, ffc0, M68K);
5855     INSN(linkl,     4808, fff8, M68K);
5856     BASE(pea,       4840, ffc0);
5857     BASE(swap,      4840, fff8);
5858     INSN(bkpt,      4848, fff8, BKPT);
5859     INSN(movem,     48d0, fbf8, CF_ISA_A);
5860     INSN(movem,     48e8, fbf8, CF_ISA_A);
5861     INSN(movem,     4880, fb80, M68K);
5862     BASE(ext,       4880, fff8);
5863     BASE(ext,       48c0, fff8);
5864     BASE(ext,       49c0, fff8);
5865     BASE(tst,       4a00, ff00);
5866     INSN(tas,       4ac0, ffc0, CF_ISA_B);
5867     INSN(tas,       4ac0, ffc0, M68K);
5868 #if defined(CONFIG_SOFTMMU)
5869     INSN(halt,      4ac8, ffff, CF_ISA_A);
5870     INSN(halt,      4ac8, ffff, M68K);
5871 #endif
5872     INSN(pulse,     4acc, ffff, CF_ISA_A);
5873     BASE(illegal,   4afc, ffff);
5874     INSN(mull,      4c00, ffc0, CF_ISA_A);
5875     INSN(mull,      4c00, ffc0, LONG_MULDIV);
5876     INSN(divl,      4c40, ffc0, CF_ISA_A);
5877     INSN(divl,      4c40, ffc0, LONG_MULDIV);
5878     INSN(sats,      4c80, fff8, CF_ISA_B);
5879     BASE(trap,      4e40, fff0);
5880     BASE(link,      4e50, fff8);
5881     BASE(unlk,      4e58, fff8);
5882 #if defined(CONFIG_SOFTMMU)
5883     INSN(move_to_usp, 4e60, fff8, USP);
5884     INSN(move_from_usp, 4e68, fff8, USP);
5885     INSN(reset,     4e70, ffff, M68K);
5886     BASE(stop,      4e72, ffff);
5887     BASE(rte,       4e73, ffff);
5888     INSN(cf_movec,  4e7b, ffff, CF_ISA_A);
5889     INSN(m68k_movec, 4e7a, fffe, MOVEC);
5890 #endif
5891     BASE(nop,       4e71, ffff);
5892     INSN(rtd,       4e74, ffff, RTD);
5893     BASE(rts,       4e75, ffff);
5894     INSN(trapv,     4e76, ffff, M68K);
5895     INSN(rtr,       4e77, ffff, M68K);
5896     BASE(jump,      4e80, ffc0);
5897     BASE(jump,      4ec0, ffc0);
5898     INSN(addsubq,   5000, f080, M68K);
5899     BASE(addsubq,   5080, f0c0);
5900     INSN(scc,       50c0, f0f8, CF_ISA_A); /* Scc.B Dx   */
5901     INSN(scc,       50c0, f0c0, M68K);     /* Scc.B <EA> */
5902     INSN(dbcc,      50c8, f0f8, M68K);
5903     INSN(trapcc,    50fa, f0fe, TRAPCC);   /* opmode 010, 011 */
5904     INSN(trapcc,    50fc, f0ff, TRAPCC);   /* opmode 100 */
5905     INSN(trapcc,    51fa, fffe, CF_ISA_A); /* TPF (trapf) opmode 010, 011 */
5906     INSN(trapcc,    51fc, ffff, CF_ISA_A); /* TPF (trapf) opmode 100 */
5907 
5908     /* Branch instructions.  */
5909     BASE(branch,    6000, f000);
5910     /* Disable long branch instructions, then add back the ones we want.  */
5911     BASE(undef,     60ff, f0ff); /* All long branches.  */
5912     INSN(branch,    60ff, f0ff, CF_ISA_B);
5913     INSN(undef,     60ff, ffff, CF_ISA_B); /* bra.l */
5914     INSN(branch,    60ff, ffff, BRAL);
5915     INSN(branch,    60ff, f0ff, BCCL);
5916 
5917     BASE(moveq,     7000, f100);
5918     INSN(mvzs,      7100, f100, CF_ISA_B);
5919     BASE(or,        8000, f000);
5920     BASE(divw,      80c0, f0c0);
5921     INSN(sbcd_reg,  8100, f1f8, M68K);
5922     INSN(sbcd_mem,  8108, f1f8, M68K);
5923     BASE(addsub,    9000, f000);
5924     INSN(undef,     90c0, f0c0, CF_ISA_A);
5925     INSN(subx_reg,  9180, f1f8, CF_ISA_A);
5926     INSN(subx_reg,  9100, f138, M68K);
5927     INSN(subx_mem,  9108, f138, M68K);
5928     INSN(suba,      91c0, f1c0, CF_ISA_A);
5929     INSN(suba,      90c0, f0c0, M68K);
5930 
5931     BASE(undef_mac, a000, f000);
5932     INSN(mac,       a000, f100, CF_EMAC);
5933     INSN(from_mac,  a180, f9b0, CF_EMAC);
5934     INSN(move_mac,  a110, f9fc, CF_EMAC);
5935     INSN(from_macsr,a980, f9f0, CF_EMAC);
5936     INSN(from_mask, ad80, fff0, CF_EMAC);
5937     INSN(from_mext, ab80, fbf0, CF_EMAC);
5938     INSN(macsr_to_ccr, a9c0, ffff, CF_EMAC);
5939     INSN(to_mac,    a100, f9c0, CF_EMAC);
5940     INSN(to_macsr,  a900, ffc0, CF_EMAC);
5941     INSN(to_mext,   ab00, fbc0, CF_EMAC);
5942     INSN(to_mask,   ad00, ffc0, CF_EMAC);
5943 
5944     INSN(mov3q,     a140, f1c0, CF_ISA_B);
5945     INSN(cmp,       b000, f1c0, CF_ISA_B); /* cmp.b */
5946     INSN(cmp,       b040, f1c0, CF_ISA_B); /* cmp.w */
5947     INSN(cmpa,      b0c0, f1c0, CF_ISA_B); /* cmpa.w */
5948     INSN(cmp,       b080, f1c0, CF_ISA_A);
5949     INSN(cmpa,      b1c0, f1c0, CF_ISA_A);
5950     INSN(cmp,       b000, f100, M68K);
5951     INSN(eor,       b100, f100, M68K);
5952     INSN(cmpm,      b108, f138, M68K);
5953     INSN(cmpa,      b0c0, f0c0, M68K);
5954     INSN(eor,       b180, f1c0, CF_ISA_A);
5955     BASE(and,       c000, f000);
5956     INSN(exg_dd,    c140, f1f8, M68K);
5957     INSN(exg_aa,    c148, f1f8, M68K);
5958     INSN(exg_da,    c188, f1f8, M68K);
5959     BASE(mulw,      c0c0, f0c0);
5960     INSN(abcd_reg,  c100, f1f8, M68K);
5961     INSN(abcd_mem,  c108, f1f8, M68K);
5962     BASE(addsub,    d000, f000);
5963     INSN(undef,     d0c0, f0c0, CF_ISA_A);
5964     INSN(addx_reg,      d180, f1f8, CF_ISA_A);
5965     INSN(addx_reg,  d100, f138, M68K);
5966     INSN(addx_mem,  d108, f138, M68K);
5967     INSN(adda,      d1c0, f1c0, CF_ISA_A);
5968     INSN(adda,      d0c0, f0c0, M68K);
5969     INSN(shift_im,  e080, f0f0, CF_ISA_A);
5970     INSN(shift_reg, e0a0, f0f0, CF_ISA_A);
5971     INSN(shift8_im, e000, f0f0, M68K);
5972     INSN(shift16_im, e040, f0f0, M68K);
5973     INSN(shift_im,  e080, f0f0, M68K);
5974     INSN(shift8_reg, e020, f0f0, M68K);
5975     INSN(shift16_reg, e060, f0f0, M68K);
5976     INSN(shift_reg, e0a0, f0f0, M68K);
5977     INSN(shift_mem, e0c0, fcc0, M68K);
5978     INSN(rotate_im, e090, f0f0, M68K);
5979     INSN(rotate8_im, e010, f0f0, M68K);
5980     INSN(rotate16_im, e050, f0f0, M68K);
5981     INSN(rotate_reg, e0b0, f0f0, M68K);
5982     INSN(rotate8_reg, e030, f0f0, M68K);
5983     INSN(rotate16_reg, e070, f0f0, M68K);
5984     INSN(rotate_mem, e4c0, fcc0, M68K);
5985     INSN(bfext_mem, e9c0, fdc0, BITFIELD);  /* bfextu & bfexts */
5986     INSN(bfext_reg, e9c0, fdf8, BITFIELD);
5987     INSN(bfins_mem, efc0, ffc0, BITFIELD);
5988     INSN(bfins_reg, efc0, fff8, BITFIELD);
5989     INSN(bfop_mem, eac0, ffc0, BITFIELD);   /* bfchg */
5990     INSN(bfop_reg, eac0, fff8, BITFIELD);   /* bfchg */
5991     INSN(bfop_mem, ecc0, ffc0, BITFIELD);   /* bfclr */
5992     INSN(bfop_reg, ecc0, fff8, BITFIELD);   /* bfclr */
5993     INSN(bfop_mem, edc0, ffc0, BITFIELD);   /* bfffo */
5994     INSN(bfop_reg, edc0, fff8, BITFIELD);   /* bfffo */
5995     INSN(bfop_mem, eec0, ffc0, BITFIELD);   /* bfset */
5996     INSN(bfop_reg, eec0, fff8, BITFIELD);   /* bfset */
5997     INSN(bfop_mem, e8c0, ffc0, BITFIELD);   /* bftst */
5998     INSN(bfop_reg, e8c0, fff8, BITFIELD);   /* bftst */
5999     BASE(undef_fpu, f000, f000);
6000     INSN(fpu,       f200, ffc0, CF_FPU);
6001     INSN(fbcc,      f280, ffc0, CF_FPU);
6002     INSN(fpu,       f200, ffc0, FPU);
6003     INSN(fscc,      f240, ffc0, FPU);
6004     INSN(ftrapcc,   f27a, fffe, FPU);       /* opmode 010, 011 */
6005     INSN(ftrapcc,   f27c, ffff, FPU);       /* opmode 100 */
6006     INSN(fbcc,      f280, ff80, FPU);
6007 #if defined(CONFIG_SOFTMMU)
6008     INSN(frestore,  f340, ffc0, CF_FPU);
6009     INSN(fsave,     f300, ffc0, CF_FPU);
6010     INSN(frestore,  f340, ffc0, FPU);
6011     INSN(fsave,     f300, ffc0, FPU);
6012     INSN(intouch,   f340, ffc0, CF_ISA_A);
6013     INSN(cpushl,    f428, ff38, CF_ISA_A);
6014     INSN(cpush,     f420, ff20, M68040);
6015     INSN(cinv,      f400, ff20, M68040);
6016     INSN(pflush,    f500, ffe0, M68040);
6017     INSN(ptest,     f548, ffd8, M68040);
6018     INSN(wddata,    fb00, ff00, CF_ISA_A);
6019     INSN(wdebug,    fbc0, ffc0, CF_ISA_A);
6020 #endif
6021     INSN(move16_mem, f600, ffe0, M68040);
6022     INSN(move16_reg, f620, fff8, M68040);
6023 #undef INSN
6024 }
6025 
6026 static void m68k_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cpu)
6027 {
6028     DisasContext *dc = container_of(dcbase, DisasContext, base);
6029     CPUM68KState *env = cpu->env_ptr;
6030 
6031     dc->env = env;
6032     dc->pc = dc->base.pc_first;
6033     /* This value will always be filled in properly before m68k_tr_tb_stop. */
6034     dc->pc_prev = 0xdeadbeef;
6035     dc->cc_op = CC_OP_DYNAMIC;
6036     dc->cc_op_synced = 1;
6037     dc->done_mac = 0;
6038     dc->writeback_mask = 0;
6039 
6040     dc->ss_active = (M68K_SR_TRACE(env->sr) == M68K_SR_TRACE_ANY_INS);
6041     /* If architectural single step active, limit to 1 */
6042     if (dc->ss_active) {
6043         dc->base.max_insns = 1;
6044     }
6045 }
6046 
6047 static void m68k_tr_tb_start(DisasContextBase *dcbase, CPUState *cpu)
6048 {
6049 }
6050 
6051 static void m68k_tr_insn_start(DisasContextBase *dcbase, CPUState *cpu)
6052 {
6053     DisasContext *dc = container_of(dcbase, DisasContext, base);
6054     tcg_gen_insn_start(dc->base.pc_next, dc->cc_op);
6055 }
6056 
6057 static void m68k_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
6058 {
6059     DisasContext *dc = container_of(dcbase, DisasContext, base);
6060     CPUM68KState *env = cpu->env_ptr;
6061     uint16_t insn = read_im16(env, dc);
6062 
6063     opcode_table[insn](env, dc, insn);
6064     do_writebacks(dc);
6065 
6066     dc->pc_prev = dc->base.pc_next;
6067     dc->base.pc_next = dc->pc;
6068 
6069     if (dc->base.is_jmp == DISAS_NEXT) {
6070         /*
6071          * Stop translation when the next insn might touch a new page.
6072          * This ensures that prefetch aborts at the right place.
6073          *
6074          * We cannot determine the size of the next insn without
6075          * completely decoding it.  However, the maximum insn size
6076          * is 32 bytes, so end if we do not have that much remaining.
6077          * This may produce several small TBs at the end of each page,
6078          * but they will all be linked with goto_tb.
6079          *
6080          * ??? ColdFire maximum is 4 bytes; MC68000's maximum is also
6081          * smaller than MC68020's.
6082          */
6083         target_ulong start_page_offset
6084             = dc->pc - (dc->base.pc_first & TARGET_PAGE_MASK);
6085 
6086         if (start_page_offset >= TARGET_PAGE_SIZE - 32) {
6087             dc->base.is_jmp = DISAS_TOO_MANY;
6088         }
6089     }
6090 }
6091 
6092 static void m68k_tr_tb_stop(DisasContextBase *dcbase, CPUState *cpu)
6093 {
6094     DisasContext *dc = container_of(dcbase, DisasContext, base);
6095 
6096     switch (dc->base.is_jmp) {
6097     case DISAS_NORETURN:
6098         break;
6099     case DISAS_TOO_MANY:
6100         update_cc_op(dc);
6101         gen_jmp_tb(dc, 0, dc->pc, dc->pc_prev);
6102         break;
6103     case DISAS_JUMP:
6104         /* We updated CC_OP and PC in gen_jmp/gen_jmp_im.  */
6105         if (dc->ss_active) {
6106             gen_raise_exception_format2(dc, EXCP_TRACE, dc->pc_prev);
6107         } else {
6108             tcg_gen_lookup_and_goto_ptr();
6109         }
6110         break;
6111     case DISAS_EXIT:
6112         /*
6113          * We updated CC_OP and PC in gen_exit_tb, but also modified
6114          * other state that may require returning to the main loop.
6115          */
6116         if (dc->ss_active) {
6117             gen_raise_exception_format2(dc, EXCP_TRACE, dc->pc_prev);
6118         } else {
6119             tcg_gen_exit_tb(NULL, 0);
6120         }
6121         break;
6122     default:
6123         g_assert_not_reached();
6124     }
6125 }
6126 
6127 static void m68k_tr_disas_log(const DisasContextBase *dcbase,
6128                               CPUState *cpu, FILE *logfile)
6129 {
6130     fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
6131     target_disas(logfile, cpu, dcbase->pc_first, dcbase->tb->size);
6132 }
6133 
6134 static const TranslatorOps m68k_tr_ops = {
6135     .init_disas_context = m68k_tr_init_disas_context,
6136     .tb_start           = m68k_tr_tb_start,
6137     .insn_start         = m68k_tr_insn_start,
6138     .translate_insn     = m68k_tr_translate_insn,
6139     .tb_stop            = m68k_tr_tb_stop,
6140     .disas_log          = m68k_tr_disas_log,
6141 };
6142 
6143 void gen_intermediate_code(CPUState *cpu, TranslationBlock *tb, int *max_insns,
6144                            target_ulong pc, void *host_pc)
6145 {
6146     DisasContext dc;
6147     translator_loop(cpu, tb, max_insns, pc, host_pc, &m68k_tr_ops, &dc.base);
6148 }
6149 
6150 static double floatx80_to_double(CPUM68KState *env, uint16_t high, uint64_t low)
6151 {
6152     floatx80 a = { .high = high, .low = low };
6153     union {
6154         float64 f64;
6155         double d;
6156     } u;
6157 
6158     u.f64 = floatx80_to_float64(a, &env->fp_status);
6159     return u.d;
6160 }
6161 
6162 void m68k_cpu_dump_state(CPUState *cs, FILE *f, int flags)
6163 {
6164     M68kCPU *cpu = M68K_CPU(cs);
6165     CPUM68KState *env = &cpu->env;
6166     int i;
6167     uint16_t sr;
6168     for (i = 0; i < 8; i++) {
6169         qemu_fprintf(f, "D%d = %08x   A%d = %08x   "
6170                      "F%d = %04x %016"PRIx64"  (%12g)\n",
6171                      i, env->dregs[i], i, env->aregs[i],
6172                      i, env->fregs[i].l.upper, env->fregs[i].l.lower,
6173                      floatx80_to_double(env, env->fregs[i].l.upper,
6174                                         env->fregs[i].l.lower));
6175     }
6176     qemu_fprintf(f, "PC = %08x   ", env->pc);
6177     sr = env->sr | cpu_m68k_get_ccr(env);
6178     qemu_fprintf(f, "SR = %04x T:%x I:%x %c%c %c%c%c%c%c\n",
6179                  sr, (sr & SR_T) >> SR_T_SHIFT, (sr & SR_I) >> SR_I_SHIFT,
6180                  (sr & SR_S) ? 'S' : 'U', (sr & SR_M) ? '%' : 'I',
6181                  (sr & CCF_X) ? 'X' : '-', (sr & CCF_N) ? 'N' : '-',
6182                  (sr & CCF_Z) ? 'Z' : '-', (sr & CCF_V) ? 'V' : '-',
6183                  (sr & CCF_C) ? 'C' : '-');
6184     qemu_fprintf(f, "FPSR = %08x %c%c%c%c ", env->fpsr,
6185                  (env->fpsr & FPSR_CC_A) ? 'A' : '-',
6186                  (env->fpsr & FPSR_CC_I) ? 'I' : '-',
6187                  (env->fpsr & FPSR_CC_Z) ? 'Z' : '-',
6188                  (env->fpsr & FPSR_CC_N) ? 'N' : '-');
6189     qemu_fprintf(f, "\n                                "
6190                  "FPCR =     %04x ", env->fpcr);
6191     switch (env->fpcr & FPCR_PREC_MASK) {
6192     case FPCR_PREC_X:
6193         qemu_fprintf(f, "X ");
6194         break;
6195     case FPCR_PREC_S:
6196         qemu_fprintf(f, "S ");
6197         break;
6198     case FPCR_PREC_D:
6199         qemu_fprintf(f, "D ");
6200         break;
6201     }
6202     switch (env->fpcr & FPCR_RND_MASK) {
6203     case FPCR_RND_N:
6204         qemu_fprintf(f, "RN ");
6205         break;
6206     case FPCR_RND_Z:
6207         qemu_fprintf(f, "RZ ");
6208         break;
6209     case FPCR_RND_M:
6210         qemu_fprintf(f, "RM ");
6211         break;
6212     case FPCR_RND_P:
6213         qemu_fprintf(f, "RP ");
6214         break;
6215     }
6216     qemu_fprintf(f, "\n");
6217 #ifdef CONFIG_SOFTMMU
6218     qemu_fprintf(f, "%sA7(MSP) = %08x %sA7(USP) = %08x %sA7(ISP) = %08x\n",
6219                  env->current_sp == M68K_SSP ? "->" : "  ", env->sp[M68K_SSP],
6220                  env->current_sp == M68K_USP ? "->" : "  ", env->sp[M68K_USP],
6221                  env->current_sp == M68K_ISP ? "->" : "  ", env->sp[M68K_ISP]);
6222     qemu_fprintf(f, "VBR = 0x%08x\n", env->vbr);
6223     qemu_fprintf(f, "SFC = %x DFC %x\n", env->sfc, env->dfc);
6224     qemu_fprintf(f, "SSW %08x TCR %08x URP %08x SRP %08x\n",
6225                  env->mmu.ssw, env->mmu.tcr, env->mmu.urp, env->mmu.srp);
6226     qemu_fprintf(f, "DTTR0/1: %08x/%08x ITTR0/1: %08x/%08x\n",
6227                  env->mmu.ttr[M68K_DTTR0], env->mmu.ttr[M68K_DTTR1],
6228                  env->mmu.ttr[M68K_ITTR0], env->mmu.ttr[M68K_ITTR1]);
6229     qemu_fprintf(f, "MMUSR %08x, fault at %08x\n",
6230                  env->mmu.mmusr, env->mmu.ar);
6231 #endif
6232 }
6233