xref: /openbmc/qemu/target/m68k/translate.c (revision d53106c997e5c8e61e37ae9ff9f0e1f243b03968)
1 /*
2  *  m68k translation
3  *
4  *  Copyright (c) 2005-2007 CodeSourcery
5  *  Written by Paul Brook
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "disas/disas.h"
24 #include "exec/exec-all.h"
25 #include "tcg/tcg-op.h"
26 #include "qemu/log.h"
27 #include "qemu/qemu-print.h"
28 #include "exec/cpu_ldst.h"
29 #include "exec/translator.h"
30 
31 #include "exec/helper-proto.h"
32 #include "exec/helper-gen.h"
33 
34 #include "exec/log.h"
35 #include "fpu/softfloat.h"
36 
37 #define HELPER_H "helper.h"
38 #include "exec/helper-info.c.inc"
39 #undef  HELPER_H
40 
41 //#define DEBUG_DISPATCH 1
42 
43 #define DEFO32(name, offset) static TCGv QREG_##name;
44 #define DEFO64(name, offset) static TCGv_i64 QREG_##name;
45 #include "qregs.h.inc"
46 #undef DEFO32
47 #undef DEFO64
48 
49 static TCGv_i32 cpu_halted;
50 static TCGv_i32 cpu_exception_index;
51 
52 static char cpu_reg_names[2 * 8 * 3 + 5 * 4];
53 static TCGv cpu_dregs[8];
54 static TCGv cpu_aregs[8];
55 static TCGv_i64 cpu_macc[4];
56 
57 #define REG(insn, pos)  (((insn) >> (pos)) & 7)
58 #define DREG(insn, pos) cpu_dregs[REG(insn, pos)]
59 #define AREG(insn, pos) get_areg(s, REG(insn, pos))
60 #define MACREG(acc)     cpu_macc[acc]
61 #define QREG_SP         get_areg(s, 7)
62 
63 static TCGv NULL_QREG;
64 #define IS_NULL_QREG(t) (t == NULL_QREG)
65 /* Used to distinguish stores from bad addressing modes.  */
66 static TCGv store_dummy;
67 
68 #include "exec/gen-icount.h"
69 
70 void m68k_tcg_init(void)
71 {
72     char *p;
73     int i;
74 
75 #define DEFO32(name, offset) \
76     QREG_##name = tcg_global_mem_new_i32(cpu_env, \
77         offsetof(CPUM68KState, offset), #name);
78 #define DEFO64(name, offset) \
79     QREG_##name = tcg_global_mem_new_i64(cpu_env, \
80         offsetof(CPUM68KState, offset), #name);
81 #include "qregs.h.inc"
82 #undef DEFO32
83 #undef DEFO64
84 
85     cpu_halted = tcg_global_mem_new_i32(cpu_env,
86                                         -offsetof(M68kCPU, env) +
87                                         offsetof(CPUState, halted), "HALTED");
88     cpu_exception_index = tcg_global_mem_new_i32(cpu_env,
89                                                  -offsetof(M68kCPU, env) +
90                                                  offsetof(CPUState, exception_index),
91                                                  "EXCEPTION");
92 
93     p = cpu_reg_names;
94     for (i = 0; i < 8; i++) {
95         sprintf(p, "D%d", i);
96         cpu_dregs[i] = tcg_global_mem_new(cpu_env,
97                                           offsetof(CPUM68KState, dregs[i]), p);
98         p += 3;
99         sprintf(p, "A%d", i);
100         cpu_aregs[i] = tcg_global_mem_new(cpu_env,
101                                           offsetof(CPUM68KState, aregs[i]), p);
102         p += 3;
103     }
104     for (i = 0; i < 4; i++) {
105         sprintf(p, "ACC%d", i);
106         cpu_macc[i] = tcg_global_mem_new_i64(cpu_env,
107                                          offsetof(CPUM68KState, macc[i]), p);
108         p += 5;
109     }
110 
111     NULL_QREG = tcg_global_mem_new(cpu_env, -4, "NULL");
112     store_dummy = tcg_global_mem_new(cpu_env, -8, "NULL");
113 }
114 
115 /* internal defines */
116 typedef struct DisasContext {
117     DisasContextBase base;
118     CPUM68KState *env;
119     target_ulong pc;
120     target_ulong pc_prev;
121     CCOp cc_op; /* Current CC operation */
122     int cc_op_synced;
123     TCGv_i64 mactmp;
124     int done_mac;
125     int writeback_mask;
126     TCGv writeback[8];
127     bool ss_active;
128 } DisasContext;
129 
130 static TCGv get_areg(DisasContext *s, unsigned regno)
131 {
132     if (s->writeback_mask & (1 << regno)) {
133         return s->writeback[regno];
134     } else {
135         return cpu_aregs[regno];
136     }
137 }
138 
139 static void delay_set_areg(DisasContext *s, unsigned regno,
140                            TCGv val, bool give_temp)
141 {
142     if (s->writeback_mask & (1 << regno)) {
143         if (give_temp) {
144             s->writeback[regno] = val;
145         } else {
146             tcg_gen_mov_i32(s->writeback[regno], val);
147         }
148     } else {
149         s->writeback_mask |= 1 << regno;
150         if (give_temp) {
151             s->writeback[regno] = val;
152         } else {
153             TCGv tmp = tcg_temp_new();
154             s->writeback[regno] = tmp;
155             tcg_gen_mov_i32(tmp, val);
156         }
157     }
158 }
159 
160 static void do_writebacks(DisasContext *s)
161 {
162     unsigned mask = s->writeback_mask;
163     if (mask) {
164         s->writeback_mask = 0;
165         do {
166             unsigned regno = ctz32(mask);
167             tcg_gen_mov_i32(cpu_aregs[regno], s->writeback[regno]);
168             mask &= mask - 1;
169         } while (mask);
170     }
171 }
172 
173 /* is_jmp field values */
174 #define DISAS_JUMP      DISAS_TARGET_0 /* only pc was modified dynamically */
175 #define DISAS_EXIT      DISAS_TARGET_1 /* cpu state was modified dynamically */
176 
177 #if defined(CONFIG_USER_ONLY)
178 #define IS_USER(s) 1
179 #else
180 #define IS_USER(s)   (!(s->base.tb->flags & TB_FLAGS_MSR_S))
181 #define SFC_INDEX(s) ((s->base.tb->flags & TB_FLAGS_SFC_S) ? \
182                       MMU_KERNEL_IDX : MMU_USER_IDX)
183 #define DFC_INDEX(s) ((s->base.tb->flags & TB_FLAGS_DFC_S) ? \
184                       MMU_KERNEL_IDX : MMU_USER_IDX)
185 #endif
186 
187 typedef void (*disas_proc)(CPUM68KState *env, DisasContext *s, uint16_t insn);
188 
189 #ifdef DEBUG_DISPATCH
190 #define DISAS_INSN(name)                                                \
191     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
192                                   uint16_t insn);                       \
193     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
194                              uint16_t insn)                             \
195     {                                                                   \
196         qemu_log("Dispatch " #name "\n");                               \
197         real_disas_##name(env, s, insn);                                \
198     }                                                                   \
199     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
200                                   uint16_t insn)
201 #else
202 #define DISAS_INSN(name)                                                \
203     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
204                              uint16_t insn)
205 #endif
206 
207 static const uint8_t cc_op_live[CC_OP_NB] = {
208     [CC_OP_DYNAMIC] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
209     [CC_OP_FLAGS] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
210     [CC_OP_ADDB ... CC_OP_ADDL] = CCF_X | CCF_N | CCF_V,
211     [CC_OP_SUBB ... CC_OP_SUBL] = CCF_X | CCF_N | CCF_V,
212     [CC_OP_CMPB ... CC_OP_CMPL] = CCF_X | CCF_N | CCF_V,
213     [CC_OP_LOGIC] = CCF_X | CCF_N
214 };
215 
216 static void set_cc_op(DisasContext *s, CCOp op)
217 {
218     CCOp old_op = s->cc_op;
219     int dead;
220 
221     if (old_op == op) {
222         return;
223     }
224     s->cc_op = op;
225     s->cc_op_synced = 0;
226 
227     /*
228      * Discard CC computation that will no longer be used.
229      * Note that X and N are never dead.
230      */
231     dead = cc_op_live[old_op] & ~cc_op_live[op];
232     if (dead & CCF_C) {
233         tcg_gen_discard_i32(QREG_CC_C);
234     }
235     if (dead & CCF_Z) {
236         tcg_gen_discard_i32(QREG_CC_Z);
237     }
238     if (dead & CCF_V) {
239         tcg_gen_discard_i32(QREG_CC_V);
240     }
241 }
242 
243 /* Update the CPU env CC_OP state.  */
244 static void update_cc_op(DisasContext *s)
245 {
246     if (!s->cc_op_synced) {
247         s->cc_op_synced = 1;
248         tcg_gen_movi_i32(QREG_CC_OP, s->cc_op);
249     }
250 }
251 
252 /* Generate a jump to an immediate address.  */
253 static void gen_jmp_im(DisasContext *s, uint32_t dest)
254 {
255     update_cc_op(s);
256     tcg_gen_movi_i32(QREG_PC, dest);
257     s->base.is_jmp = DISAS_JUMP;
258 }
259 
260 /* Generate a jump to the address in qreg DEST.  */
261 static void gen_jmp(DisasContext *s, TCGv dest)
262 {
263     update_cc_op(s);
264     tcg_gen_mov_i32(QREG_PC, dest);
265     s->base.is_jmp = DISAS_JUMP;
266 }
267 
268 static void gen_raise_exception(int nr)
269 {
270     gen_helper_raise_exception(cpu_env, tcg_constant_i32(nr));
271 }
272 
273 static void gen_raise_exception_format2(DisasContext *s, int nr,
274                                         target_ulong this_pc)
275 {
276     /*
277      * Pass the address of the insn to the exception handler,
278      * for recording in the Format $2 (6-word) stack frame.
279      * Re-use mmu.ar for the purpose, since that's only valid
280      * after tlb_fill.
281      */
282     tcg_gen_st_i32(tcg_constant_i32(this_pc), cpu_env,
283                    offsetof(CPUM68KState, mmu.ar));
284     gen_raise_exception(nr);
285     s->base.is_jmp = DISAS_NORETURN;
286 }
287 
288 static void gen_exception(DisasContext *s, uint32_t dest, int nr)
289 {
290     update_cc_op(s);
291     tcg_gen_movi_i32(QREG_PC, dest);
292 
293     gen_raise_exception(nr);
294 
295     s->base.is_jmp = DISAS_NORETURN;
296 }
297 
298 static inline void gen_addr_fault(DisasContext *s)
299 {
300     gen_exception(s, s->base.pc_next, EXCP_ADDRESS);
301 }
302 
303 /*
304  * Generate a load from the specified address.  Narrow values are
305  *  sign extended to full register width.
306  */
307 static inline TCGv gen_load(DisasContext *s, int opsize, TCGv addr,
308                             int sign, int index)
309 {
310     TCGv tmp = tcg_temp_new_i32();
311 
312     switch (opsize) {
313     case OS_BYTE:
314     case OS_WORD:
315     case OS_LONG:
316         tcg_gen_qemu_ld_tl(tmp, addr, index,
317                            opsize | (sign ? MO_SIGN : 0) | MO_TE);
318         break;
319     default:
320         g_assert_not_reached();
321     }
322     return tmp;
323 }
324 
325 /* Generate a store.  */
326 static inline void gen_store(DisasContext *s, int opsize, TCGv addr, TCGv val,
327                              int index)
328 {
329     switch (opsize) {
330     case OS_BYTE:
331     case OS_WORD:
332     case OS_LONG:
333         tcg_gen_qemu_st_tl(val, addr, index, opsize | MO_TE);
334         break;
335     default:
336         g_assert_not_reached();
337     }
338 }
339 
340 typedef enum {
341     EA_STORE,
342     EA_LOADU,
343     EA_LOADS
344 } ea_what;
345 
346 /*
347  * Generate an unsigned load if VAL is 0 a signed load if val is -1,
348  * otherwise generate a store.
349  */
350 static TCGv gen_ldst(DisasContext *s, int opsize, TCGv addr, TCGv val,
351                      ea_what what, int index)
352 {
353     if (what == EA_STORE) {
354         gen_store(s, opsize, addr, val, index);
355         return store_dummy;
356     } else {
357         return gen_load(s, opsize, addr, what == EA_LOADS, index);
358     }
359 }
360 
361 /* Read a 16-bit immediate constant */
362 static inline uint16_t read_im16(CPUM68KState *env, DisasContext *s)
363 {
364     uint16_t im;
365     im = translator_lduw(env, &s->base, s->pc);
366     s->pc += 2;
367     return im;
368 }
369 
370 /* Read an 8-bit immediate constant */
371 static inline uint8_t read_im8(CPUM68KState *env, DisasContext *s)
372 {
373     return read_im16(env, s);
374 }
375 
376 /* Read a 32-bit immediate constant.  */
377 static inline uint32_t read_im32(CPUM68KState *env, DisasContext *s)
378 {
379     uint32_t im;
380     im = read_im16(env, s) << 16;
381     im |= 0xffff & read_im16(env, s);
382     return im;
383 }
384 
385 /* Read a 64-bit immediate constant.  */
386 static inline uint64_t read_im64(CPUM68KState *env, DisasContext *s)
387 {
388     uint64_t im;
389     im = (uint64_t)read_im32(env, s) << 32;
390     im |= (uint64_t)read_im32(env, s);
391     return im;
392 }
393 
394 /* Calculate and address index.  */
395 static TCGv gen_addr_index(DisasContext *s, uint16_t ext, TCGv tmp)
396 {
397     TCGv add;
398     int scale;
399 
400     add = (ext & 0x8000) ? AREG(ext, 12) : DREG(ext, 12);
401     if ((ext & 0x800) == 0) {
402         tcg_gen_ext16s_i32(tmp, add);
403         add = tmp;
404     }
405     scale = (ext >> 9) & 3;
406     if (scale != 0) {
407         tcg_gen_shli_i32(tmp, add, scale);
408         add = tmp;
409     }
410     return add;
411 }
412 
413 /*
414  * Handle a base + index + displacement effective address.
415  * A NULL_QREG base means pc-relative.
416  */
417 static TCGv gen_lea_indexed(CPUM68KState *env, DisasContext *s, TCGv base)
418 {
419     uint32_t offset;
420     uint16_t ext;
421     TCGv add;
422     TCGv tmp;
423     uint32_t bd, od;
424 
425     offset = s->pc;
426     ext = read_im16(env, s);
427 
428     if ((ext & 0x800) == 0 && !m68k_feature(s->env, M68K_FEATURE_WORD_INDEX))
429         return NULL_QREG;
430 
431     if (m68k_feature(s->env, M68K_FEATURE_M68K) &&
432         !m68k_feature(s->env, M68K_FEATURE_SCALED_INDEX)) {
433         ext &= ~(3 << 9);
434     }
435 
436     if (ext & 0x100) {
437         /* full extension word format */
438         if (!m68k_feature(s->env, M68K_FEATURE_EXT_FULL))
439             return NULL_QREG;
440 
441         if ((ext & 0x30) > 0x10) {
442             /* base displacement */
443             if ((ext & 0x30) == 0x20) {
444                 bd = (int16_t)read_im16(env, s);
445             } else {
446                 bd = read_im32(env, s);
447             }
448         } else {
449             bd = 0;
450         }
451         tmp = tcg_temp_new();
452         if ((ext & 0x44) == 0) {
453             /* pre-index */
454             add = gen_addr_index(s, ext, tmp);
455         } else {
456             add = NULL_QREG;
457         }
458         if ((ext & 0x80) == 0) {
459             /* base not suppressed */
460             if (IS_NULL_QREG(base)) {
461                 base = tcg_constant_i32(offset + bd);
462                 bd = 0;
463             }
464             if (!IS_NULL_QREG(add)) {
465                 tcg_gen_add_i32(tmp, add, base);
466                 add = tmp;
467             } else {
468                 add = base;
469             }
470         }
471         if (!IS_NULL_QREG(add)) {
472             if (bd != 0) {
473                 tcg_gen_addi_i32(tmp, add, bd);
474                 add = tmp;
475             }
476         } else {
477             add = tcg_constant_i32(bd);
478         }
479         if ((ext & 3) != 0) {
480             /* memory indirect */
481             base = gen_load(s, OS_LONG, add, 0, IS_USER(s));
482             if ((ext & 0x44) == 4) {
483                 add = gen_addr_index(s, ext, tmp);
484                 tcg_gen_add_i32(tmp, add, base);
485                 add = tmp;
486             } else {
487                 add = base;
488             }
489             if ((ext & 3) > 1) {
490                 /* outer displacement */
491                 if ((ext & 3) == 2) {
492                     od = (int16_t)read_im16(env, s);
493                 } else {
494                     od = read_im32(env, s);
495                 }
496             } else {
497                 od = 0;
498             }
499             if (od != 0) {
500                 tcg_gen_addi_i32(tmp, add, od);
501                 add = tmp;
502             }
503         }
504     } else {
505         /* brief extension word format */
506         tmp = tcg_temp_new();
507         add = gen_addr_index(s, ext, tmp);
508         if (!IS_NULL_QREG(base)) {
509             tcg_gen_add_i32(tmp, add, base);
510             if ((int8_t)ext)
511                 tcg_gen_addi_i32(tmp, tmp, (int8_t)ext);
512         } else {
513             tcg_gen_addi_i32(tmp, add, offset + (int8_t)ext);
514         }
515         add = tmp;
516     }
517     return add;
518 }
519 
520 /* Sign or zero extend a value.  */
521 
522 static inline void gen_ext(TCGv res, TCGv val, int opsize, int sign)
523 {
524     switch (opsize) {
525     case OS_BYTE:
526         if (sign) {
527             tcg_gen_ext8s_i32(res, val);
528         } else {
529             tcg_gen_ext8u_i32(res, val);
530         }
531         break;
532     case OS_WORD:
533         if (sign) {
534             tcg_gen_ext16s_i32(res, val);
535         } else {
536             tcg_gen_ext16u_i32(res, val);
537         }
538         break;
539     case OS_LONG:
540         tcg_gen_mov_i32(res, val);
541         break;
542     default:
543         g_assert_not_reached();
544     }
545 }
546 
547 /* Evaluate all the CC flags.  */
548 
549 static void gen_flush_flags(DisasContext *s)
550 {
551     TCGv t0, t1;
552 
553     switch (s->cc_op) {
554     case CC_OP_FLAGS:
555         return;
556 
557     case CC_OP_ADDB:
558     case CC_OP_ADDW:
559     case CC_OP_ADDL:
560         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
561         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
562         /* Compute signed overflow for addition.  */
563         t0 = tcg_temp_new();
564         t1 = tcg_temp_new();
565         tcg_gen_sub_i32(t0, QREG_CC_N, QREG_CC_V);
566         gen_ext(t0, t0, s->cc_op - CC_OP_ADDB, 1);
567         tcg_gen_xor_i32(t1, QREG_CC_N, QREG_CC_V);
568         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
569         tcg_gen_andc_i32(QREG_CC_V, t1, QREG_CC_V);
570         break;
571 
572     case CC_OP_SUBB:
573     case CC_OP_SUBW:
574     case CC_OP_SUBL:
575         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
576         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
577         /* Compute signed overflow for subtraction.  */
578         t0 = tcg_temp_new();
579         t1 = tcg_temp_new();
580         tcg_gen_add_i32(t0, QREG_CC_N, QREG_CC_V);
581         gen_ext(t0, t0, s->cc_op - CC_OP_SUBB, 1);
582         tcg_gen_xor_i32(t1, QREG_CC_N, t0);
583         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
584         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t1);
585         break;
586 
587     case CC_OP_CMPB:
588     case CC_OP_CMPW:
589     case CC_OP_CMPL:
590         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_C, QREG_CC_N, QREG_CC_V);
591         tcg_gen_sub_i32(QREG_CC_Z, QREG_CC_N, QREG_CC_V);
592         gen_ext(QREG_CC_Z, QREG_CC_Z, s->cc_op - CC_OP_CMPB, 1);
593         /* Compute signed overflow for subtraction.  */
594         t0 = tcg_temp_new();
595         tcg_gen_xor_i32(t0, QREG_CC_Z, QREG_CC_N);
596         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, QREG_CC_N);
597         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t0);
598         tcg_gen_mov_i32(QREG_CC_N, QREG_CC_Z);
599         break;
600 
601     case CC_OP_LOGIC:
602         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
603         tcg_gen_movi_i32(QREG_CC_C, 0);
604         tcg_gen_movi_i32(QREG_CC_V, 0);
605         break;
606 
607     case CC_OP_DYNAMIC:
608         gen_helper_flush_flags(cpu_env, QREG_CC_OP);
609         s->cc_op_synced = 1;
610         break;
611 
612     default:
613         gen_helper_flush_flags(cpu_env, tcg_constant_i32(s->cc_op));
614         s->cc_op_synced = 1;
615         break;
616     }
617 
618     /* Note that flush_flags also assigned to env->cc_op.  */
619     s->cc_op = CC_OP_FLAGS;
620 }
621 
622 static inline TCGv gen_extend(DisasContext *s, TCGv val, int opsize, int sign)
623 {
624     TCGv tmp;
625 
626     if (opsize == OS_LONG) {
627         tmp = val;
628     } else {
629         tmp = tcg_temp_new();
630         gen_ext(tmp, val, opsize, sign);
631     }
632 
633     return tmp;
634 }
635 
636 static void gen_logic_cc(DisasContext *s, TCGv val, int opsize)
637 {
638     gen_ext(QREG_CC_N, val, opsize, 1);
639     set_cc_op(s, CC_OP_LOGIC);
640 }
641 
642 static void gen_update_cc_cmp(DisasContext *s, TCGv dest, TCGv src, int opsize)
643 {
644     tcg_gen_mov_i32(QREG_CC_N, dest);
645     tcg_gen_mov_i32(QREG_CC_V, src);
646     set_cc_op(s, CC_OP_CMPB + opsize);
647 }
648 
649 static void gen_update_cc_add(TCGv dest, TCGv src, int opsize)
650 {
651     gen_ext(QREG_CC_N, dest, opsize, 1);
652     tcg_gen_mov_i32(QREG_CC_V, src);
653 }
654 
655 static inline int opsize_bytes(int opsize)
656 {
657     switch (opsize) {
658     case OS_BYTE: return 1;
659     case OS_WORD: return 2;
660     case OS_LONG: return 4;
661     case OS_SINGLE: return 4;
662     case OS_DOUBLE: return 8;
663     case OS_EXTENDED: return 12;
664     case OS_PACKED: return 12;
665     default:
666         g_assert_not_reached();
667     }
668 }
669 
670 static inline int insn_opsize(int insn)
671 {
672     switch ((insn >> 6) & 3) {
673     case 0: return OS_BYTE;
674     case 1: return OS_WORD;
675     case 2: return OS_LONG;
676     default:
677         g_assert_not_reached();
678     }
679 }
680 
681 static inline int ext_opsize(int ext, int pos)
682 {
683     switch ((ext >> pos) & 7) {
684     case 0: return OS_LONG;
685     case 1: return OS_SINGLE;
686     case 2: return OS_EXTENDED;
687     case 3: return OS_PACKED;
688     case 4: return OS_WORD;
689     case 5: return OS_DOUBLE;
690     case 6: return OS_BYTE;
691     default:
692         g_assert_not_reached();
693     }
694 }
695 
696 /*
697  * Assign value to a register.  If the width is less than the register width
698  * only the low part of the register is set.
699  */
700 static void gen_partset_reg(int opsize, TCGv reg, TCGv val)
701 {
702     TCGv tmp;
703     switch (opsize) {
704     case OS_BYTE:
705         tcg_gen_andi_i32(reg, reg, 0xffffff00);
706         tmp = tcg_temp_new();
707         tcg_gen_ext8u_i32(tmp, val);
708         tcg_gen_or_i32(reg, reg, tmp);
709         break;
710     case OS_WORD:
711         tcg_gen_andi_i32(reg, reg, 0xffff0000);
712         tmp = tcg_temp_new();
713         tcg_gen_ext16u_i32(tmp, val);
714         tcg_gen_or_i32(reg, reg, tmp);
715         break;
716     case OS_LONG:
717     case OS_SINGLE:
718         tcg_gen_mov_i32(reg, val);
719         break;
720     default:
721         g_assert_not_reached();
722     }
723 }
724 
725 /*
726  * Generate code for an "effective address".  Does not adjust the base
727  * register for autoincrement addressing modes.
728  */
729 static TCGv gen_lea_mode(CPUM68KState *env, DisasContext *s,
730                          int mode, int reg0, int opsize)
731 {
732     TCGv reg;
733     TCGv tmp;
734     uint16_t ext;
735     uint32_t offset;
736 
737     switch (mode) {
738     case 0: /* Data register direct.  */
739     case 1: /* Address register direct.  */
740         return NULL_QREG;
741     case 3: /* Indirect postincrement.  */
742         if (opsize == OS_UNSIZED) {
743             return NULL_QREG;
744         }
745         /* fallthru */
746     case 2: /* Indirect register */
747         return get_areg(s, reg0);
748     case 4: /* Indirect predecrememnt.  */
749         if (opsize == OS_UNSIZED) {
750             return NULL_QREG;
751         }
752         reg = get_areg(s, reg0);
753         tmp = tcg_temp_new();
754         if (reg0 == 7 && opsize == OS_BYTE &&
755             m68k_feature(s->env, M68K_FEATURE_M68K)) {
756             tcg_gen_subi_i32(tmp, reg, 2);
757         } else {
758             tcg_gen_subi_i32(tmp, reg, opsize_bytes(opsize));
759         }
760         return tmp;
761     case 5: /* Indirect displacement.  */
762         reg = get_areg(s, reg0);
763         tmp = tcg_temp_new();
764         ext = read_im16(env, s);
765         tcg_gen_addi_i32(tmp, reg, (int16_t)ext);
766         return tmp;
767     case 6: /* Indirect index + displacement.  */
768         reg = get_areg(s, reg0);
769         return gen_lea_indexed(env, s, reg);
770     case 7: /* Other */
771         switch (reg0) {
772         case 0: /* Absolute short.  */
773             offset = (int16_t)read_im16(env, s);
774             return tcg_constant_i32(offset);
775         case 1: /* Absolute long.  */
776             offset = read_im32(env, s);
777             return tcg_constant_i32(offset);
778         case 2: /* pc displacement  */
779             offset = s->pc;
780             offset += (int16_t)read_im16(env, s);
781             return tcg_constant_i32(offset);
782         case 3: /* pc index+displacement.  */
783             return gen_lea_indexed(env, s, NULL_QREG);
784         case 4: /* Immediate.  */
785         default:
786             return NULL_QREG;
787         }
788     }
789     /* Should never happen.  */
790     return NULL_QREG;
791 }
792 
793 static TCGv gen_lea(CPUM68KState *env, DisasContext *s, uint16_t insn,
794                     int opsize)
795 {
796     int mode = extract32(insn, 3, 3);
797     int reg0 = REG(insn, 0);
798     return gen_lea_mode(env, s, mode, reg0, opsize);
799 }
800 
801 /*
802  * Generate code to load/store a value from/into an EA.  If WHAT > 0 this is
803  * a write otherwise it is a read (0 == sign extend, -1 == zero extend).
804  * ADDRP is non-null for readwrite operands.
805  */
806 static TCGv gen_ea_mode(CPUM68KState *env, DisasContext *s, int mode, int reg0,
807                         int opsize, TCGv val, TCGv *addrp, ea_what what,
808                         int index)
809 {
810     TCGv reg, tmp, result;
811     int32_t offset;
812 
813     switch (mode) {
814     case 0: /* Data register direct.  */
815         reg = cpu_dregs[reg0];
816         if (what == EA_STORE) {
817             gen_partset_reg(opsize, reg, val);
818             return store_dummy;
819         } else {
820             return gen_extend(s, reg, opsize, what == EA_LOADS);
821         }
822     case 1: /* Address register direct.  */
823         reg = get_areg(s, reg0);
824         if (what == EA_STORE) {
825             tcg_gen_mov_i32(reg, val);
826             return store_dummy;
827         } else {
828             return gen_extend(s, reg, opsize, what == EA_LOADS);
829         }
830     case 2: /* Indirect register */
831         reg = get_areg(s, reg0);
832         return gen_ldst(s, opsize, reg, val, what, index);
833     case 3: /* Indirect postincrement.  */
834         reg = get_areg(s, reg0);
835         result = gen_ldst(s, opsize, reg, val, what, index);
836         if (what == EA_STORE || !addrp) {
837             TCGv tmp = tcg_temp_new();
838             if (reg0 == 7 && opsize == OS_BYTE &&
839                 m68k_feature(s->env, M68K_FEATURE_M68K)) {
840                 tcg_gen_addi_i32(tmp, reg, 2);
841             } else {
842                 tcg_gen_addi_i32(tmp, reg, opsize_bytes(opsize));
843             }
844             delay_set_areg(s, reg0, tmp, true);
845         }
846         return result;
847     case 4: /* Indirect predecrememnt.  */
848         if (addrp && what == EA_STORE) {
849             tmp = *addrp;
850         } else {
851             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
852             if (IS_NULL_QREG(tmp)) {
853                 return tmp;
854             }
855             if (addrp) {
856                 *addrp = tmp;
857             }
858         }
859         result = gen_ldst(s, opsize, tmp, val, what, index);
860         if (what == EA_STORE || !addrp) {
861             delay_set_areg(s, reg0, tmp, false);
862         }
863         return result;
864     case 5: /* Indirect displacement.  */
865     case 6: /* Indirect index + displacement.  */
866     do_indirect:
867         if (addrp && what == EA_STORE) {
868             tmp = *addrp;
869         } else {
870             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
871             if (IS_NULL_QREG(tmp)) {
872                 return tmp;
873             }
874             if (addrp) {
875                 *addrp = tmp;
876             }
877         }
878         return gen_ldst(s, opsize, tmp, val, what, index);
879     case 7: /* Other */
880         switch (reg0) {
881         case 0: /* Absolute short.  */
882         case 1: /* Absolute long.  */
883         case 2: /* pc displacement  */
884         case 3: /* pc index+displacement.  */
885             goto do_indirect;
886         case 4: /* Immediate.  */
887             /* Sign extend values for consistency.  */
888             switch (opsize) {
889             case OS_BYTE:
890                 if (what == EA_LOADS) {
891                     offset = (int8_t)read_im8(env, s);
892                 } else {
893                     offset = read_im8(env, s);
894                 }
895                 break;
896             case OS_WORD:
897                 if (what == EA_LOADS) {
898                     offset = (int16_t)read_im16(env, s);
899                 } else {
900                     offset = read_im16(env, s);
901                 }
902                 break;
903             case OS_LONG:
904                 offset = read_im32(env, s);
905                 break;
906             default:
907                 g_assert_not_reached();
908             }
909             return tcg_constant_i32(offset);
910         default:
911             return NULL_QREG;
912         }
913     }
914     /* Should never happen.  */
915     return NULL_QREG;
916 }
917 
918 static TCGv gen_ea(CPUM68KState *env, DisasContext *s, uint16_t insn,
919                    int opsize, TCGv val, TCGv *addrp, ea_what what, int index)
920 {
921     int mode = extract32(insn, 3, 3);
922     int reg0 = REG(insn, 0);
923     return gen_ea_mode(env, s, mode, reg0, opsize, val, addrp, what, index);
924 }
925 
926 static TCGv_ptr gen_fp_ptr(int freg)
927 {
928     TCGv_ptr fp = tcg_temp_new_ptr();
929     tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fregs[freg]));
930     return fp;
931 }
932 
933 static TCGv_ptr gen_fp_result_ptr(void)
934 {
935     TCGv_ptr fp = tcg_temp_new_ptr();
936     tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fp_result));
937     return fp;
938 }
939 
940 static void gen_fp_move(TCGv_ptr dest, TCGv_ptr src)
941 {
942     TCGv t32;
943     TCGv_i64 t64;
944 
945     t32 = tcg_temp_new();
946     tcg_gen_ld16u_i32(t32, src, offsetof(FPReg, l.upper));
947     tcg_gen_st16_i32(t32, dest, offsetof(FPReg, l.upper));
948 
949     t64 = tcg_temp_new_i64();
950     tcg_gen_ld_i64(t64, src, offsetof(FPReg, l.lower));
951     tcg_gen_st_i64(t64, dest, offsetof(FPReg, l.lower));
952 }
953 
954 static void gen_load_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
955                         int index)
956 {
957     TCGv tmp;
958     TCGv_i64 t64;
959 
960     t64 = tcg_temp_new_i64();
961     tmp = tcg_temp_new();
962     switch (opsize) {
963     case OS_BYTE:
964     case OS_WORD:
965     case OS_LONG:
966         tcg_gen_qemu_ld_tl(tmp, addr, index, opsize | MO_SIGN | MO_TE);
967         gen_helper_exts32(cpu_env, fp, tmp);
968         break;
969     case OS_SINGLE:
970         tcg_gen_qemu_ld_tl(tmp, addr, index, MO_TEUL);
971         gen_helper_extf32(cpu_env, fp, tmp);
972         break;
973     case OS_DOUBLE:
974         tcg_gen_qemu_ld_i64(t64, addr, index, MO_TEUQ);
975         gen_helper_extf64(cpu_env, fp, t64);
976         break;
977     case OS_EXTENDED:
978         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
979             gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
980             break;
981         }
982         tcg_gen_qemu_ld_i32(tmp, addr, index, MO_TEUL);
983         tcg_gen_shri_i32(tmp, tmp, 16);
984         tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
985         tcg_gen_addi_i32(tmp, addr, 4);
986         tcg_gen_qemu_ld_i64(t64, tmp, index, MO_TEUQ);
987         tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
988         break;
989     case OS_PACKED:
990         /*
991          * unimplemented data type on 68040/ColdFire
992          * FIXME if needed for another FPU
993          */
994         gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
995         break;
996     default:
997         g_assert_not_reached();
998     }
999 }
1000 
1001 static void gen_store_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
1002                          int index)
1003 {
1004     TCGv tmp;
1005     TCGv_i64 t64;
1006 
1007     t64 = tcg_temp_new_i64();
1008     tmp = tcg_temp_new();
1009     switch (opsize) {
1010     case OS_BYTE:
1011     case OS_WORD:
1012     case OS_LONG:
1013         gen_helper_reds32(tmp, cpu_env, fp);
1014         tcg_gen_qemu_st_tl(tmp, addr, index, opsize | MO_TE);
1015         break;
1016     case OS_SINGLE:
1017         gen_helper_redf32(tmp, cpu_env, fp);
1018         tcg_gen_qemu_st_tl(tmp, addr, index, MO_TEUL);
1019         break;
1020     case OS_DOUBLE:
1021         gen_helper_redf64(t64, cpu_env, fp);
1022         tcg_gen_qemu_st_i64(t64, addr, index, MO_TEUQ);
1023         break;
1024     case OS_EXTENDED:
1025         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1026             gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1027             break;
1028         }
1029         tcg_gen_ld16u_i32(tmp, fp, offsetof(FPReg, l.upper));
1030         tcg_gen_shli_i32(tmp, tmp, 16);
1031         tcg_gen_qemu_st_i32(tmp, addr, index, MO_TEUL);
1032         tcg_gen_addi_i32(tmp, addr, 4);
1033         tcg_gen_ld_i64(t64, fp, offsetof(FPReg, l.lower));
1034         tcg_gen_qemu_st_i64(t64, tmp, index, MO_TEUQ);
1035         break;
1036     case OS_PACKED:
1037         /*
1038          * unimplemented data type on 68040/ColdFire
1039          * FIXME if needed for another FPU
1040          */
1041         gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1042         break;
1043     default:
1044         g_assert_not_reached();
1045     }
1046 }
1047 
1048 static void gen_ldst_fp(DisasContext *s, int opsize, TCGv addr,
1049                         TCGv_ptr fp, ea_what what, int index)
1050 {
1051     if (what == EA_STORE) {
1052         gen_store_fp(s, opsize, addr, fp, index);
1053     } else {
1054         gen_load_fp(s, opsize, addr, fp, index);
1055     }
1056 }
1057 
1058 static int gen_ea_mode_fp(CPUM68KState *env, DisasContext *s, int mode,
1059                           int reg0, int opsize, TCGv_ptr fp, ea_what what,
1060                           int index)
1061 {
1062     TCGv reg, addr, tmp;
1063     TCGv_i64 t64;
1064 
1065     switch (mode) {
1066     case 0: /* Data register direct.  */
1067         reg = cpu_dregs[reg0];
1068         if (what == EA_STORE) {
1069             switch (opsize) {
1070             case OS_BYTE:
1071             case OS_WORD:
1072             case OS_LONG:
1073                 gen_helper_reds32(reg, cpu_env, fp);
1074                 break;
1075             case OS_SINGLE:
1076                 gen_helper_redf32(reg, cpu_env, fp);
1077                 break;
1078             default:
1079                 g_assert_not_reached();
1080             }
1081         } else {
1082             tmp = tcg_temp_new();
1083             switch (opsize) {
1084             case OS_BYTE:
1085                 tcg_gen_ext8s_i32(tmp, reg);
1086                 gen_helper_exts32(cpu_env, fp, tmp);
1087                 break;
1088             case OS_WORD:
1089                 tcg_gen_ext16s_i32(tmp, reg);
1090                 gen_helper_exts32(cpu_env, fp, tmp);
1091                 break;
1092             case OS_LONG:
1093                 gen_helper_exts32(cpu_env, fp, reg);
1094                 break;
1095             case OS_SINGLE:
1096                 gen_helper_extf32(cpu_env, fp, reg);
1097                 break;
1098             default:
1099                 g_assert_not_reached();
1100             }
1101         }
1102         return 0;
1103     case 1: /* Address register direct.  */
1104         return -1;
1105     case 2: /* Indirect register */
1106         addr = get_areg(s, reg0);
1107         gen_ldst_fp(s, opsize, addr, fp, what, index);
1108         return 0;
1109     case 3: /* Indirect postincrement.  */
1110         addr = cpu_aregs[reg0];
1111         gen_ldst_fp(s, opsize, addr, fp, what, index);
1112         tcg_gen_addi_i32(addr, addr, opsize_bytes(opsize));
1113         return 0;
1114     case 4: /* Indirect predecrememnt.  */
1115         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1116         if (IS_NULL_QREG(addr)) {
1117             return -1;
1118         }
1119         gen_ldst_fp(s, opsize, addr, fp, what, index);
1120         tcg_gen_mov_i32(cpu_aregs[reg0], addr);
1121         return 0;
1122     case 5: /* Indirect displacement.  */
1123     case 6: /* Indirect index + displacement.  */
1124     do_indirect:
1125         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1126         if (IS_NULL_QREG(addr)) {
1127             return -1;
1128         }
1129         gen_ldst_fp(s, opsize, addr, fp, what, index);
1130         return 0;
1131     case 7: /* Other */
1132         switch (reg0) {
1133         case 0: /* Absolute short.  */
1134         case 1: /* Absolute long.  */
1135         case 2: /* pc displacement  */
1136         case 3: /* pc index+displacement.  */
1137             goto do_indirect;
1138         case 4: /* Immediate.  */
1139             if (what == EA_STORE) {
1140                 return -1;
1141             }
1142             switch (opsize) {
1143             case OS_BYTE:
1144                 tmp = tcg_constant_i32((int8_t)read_im8(env, s));
1145                 gen_helper_exts32(cpu_env, fp, tmp);
1146                 break;
1147             case OS_WORD:
1148                 tmp = tcg_constant_i32((int16_t)read_im16(env, s));
1149                 gen_helper_exts32(cpu_env, fp, tmp);
1150                 break;
1151             case OS_LONG:
1152                 tmp = tcg_constant_i32(read_im32(env, s));
1153                 gen_helper_exts32(cpu_env, fp, tmp);
1154                 break;
1155             case OS_SINGLE:
1156                 tmp = tcg_constant_i32(read_im32(env, s));
1157                 gen_helper_extf32(cpu_env, fp, tmp);
1158                 break;
1159             case OS_DOUBLE:
1160                 t64 = tcg_constant_i64(read_im64(env, s));
1161                 gen_helper_extf64(cpu_env, fp, t64);
1162                 break;
1163             case OS_EXTENDED:
1164                 if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1165                     gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1166                     break;
1167                 }
1168                 tmp = tcg_constant_i32(read_im32(env, s) >> 16);
1169                 tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
1170                 t64 = tcg_constant_i64(read_im64(env, s));
1171                 tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
1172                 break;
1173             case OS_PACKED:
1174                 /*
1175                  * unimplemented data type on 68040/ColdFire
1176                  * FIXME if needed for another FPU
1177                  */
1178                 gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1179                 break;
1180             default:
1181                 g_assert_not_reached();
1182             }
1183             return 0;
1184         default:
1185             return -1;
1186         }
1187     }
1188     return -1;
1189 }
1190 
1191 static int gen_ea_fp(CPUM68KState *env, DisasContext *s, uint16_t insn,
1192                        int opsize, TCGv_ptr fp, ea_what what, int index)
1193 {
1194     int mode = extract32(insn, 3, 3);
1195     int reg0 = REG(insn, 0);
1196     return gen_ea_mode_fp(env, s, mode, reg0, opsize, fp, what, index);
1197 }
1198 
1199 typedef struct {
1200     TCGCond tcond;
1201     TCGv v1;
1202     TCGv v2;
1203 } DisasCompare;
1204 
1205 static void gen_cc_cond(DisasCompare *c, DisasContext *s, int cond)
1206 {
1207     TCGv tmp, tmp2;
1208     TCGCond tcond;
1209     CCOp op = s->cc_op;
1210 
1211     /* The CC_OP_CMP form can handle most normal comparisons directly.  */
1212     if (op == CC_OP_CMPB || op == CC_OP_CMPW || op == CC_OP_CMPL) {
1213         c->v1 = QREG_CC_N;
1214         c->v2 = QREG_CC_V;
1215         switch (cond) {
1216         case 2: /* HI */
1217         case 3: /* LS */
1218             tcond = TCG_COND_LEU;
1219             goto done;
1220         case 4: /* CC */
1221         case 5: /* CS */
1222             tcond = TCG_COND_LTU;
1223             goto done;
1224         case 6: /* NE */
1225         case 7: /* EQ */
1226             tcond = TCG_COND_EQ;
1227             goto done;
1228         case 10: /* PL */
1229         case 11: /* MI */
1230             c->v2 = tcg_constant_i32(0);
1231             c->v1 = tmp = tcg_temp_new();
1232             tcg_gen_sub_i32(tmp, QREG_CC_N, QREG_CC_V);
1233             gen_ext(tmp, tmp, op - CC_OP_CMPB, 1);
1234             /* fallthru */
1235         case 12: /* GE */
1236         case 13: /* LT */
1237             tcond = TCG_COND_LT;
1238             goto done;
1239         case 14: /* GT */
1240         case 15: /* LE */
1241             tcond = TCG_COND_LE;
1242             goto done;
1243         }
1244     }
1245 
1246     c->v2 = tcg_constant_i32(0);
1247 
1248     switch (cond) {
1249     case 0: /* T */
1250     case 1: /* F */
1251         c->v1 = c->v2;
1252         tcond = TCG_COND_NEVER;
1253         goto done;
1254     case 14: /* GT (!(Z || (N ^ V))) */
1255     case 15: /* LE (Z || (N ^ V)) */
1256         /*
1257          * Logic operations clear V, which simplifies LE to (Z || N),
1258          * and since Z and N are co-located, this becomes a normal
1259          * comparison vs N.
1260          */
1261         if (op == CC_OP_LOGIC) {
1262             c->v1 = QREG_CC_N;
1263             tcond = TCG_COND_LE;
1264             goto done;
1265         }
1266         break;
1267     case 12: /* GE (!(N ^ V)) */
1268     case 13: /* LT (N ^ V) */
1269         /* Logic operations clear V, which simplifies this to N.  */
1270         if (op != CC_OP_LOGIC) {
1271             break;
1272         }
1273         /* fallthru */
1274     case 10: /* PL (!N) */
1275     case 11: /* MI (N) */
1276         /* Several cases represent N normally.  */
1277         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1278             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1279             op == CC_OP_LOGIC) {
1280             c->v1 = QREG_CC_N;
1281             tcond = TCG_COND_LT;
1282             goto done;
1283         }
1284         break;
1285     case 6: /* NE (!Z) */
1286     case 7: /* EQ (Z) */
1287         /* Some cases fold Z into N.  */
1288         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1289             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1290             op == CC_OP_LOGIC) {
1291             tcond = TCG_COND_EQ;
1292             c->v1 = QREG_CC_N;
1293             goto done;
1294         }
1295         break;
1296     case 4: /* CC (!C) */
1297     case 5: /* CS (C) */
1298         /* Some cases fold C into X.  */
1299         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1300             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL) {
1301             tcond = TCG_COND_NE;
1302             c->v1 = QREG_CC_X;
1303             goto done;
1304         }
1305         /* fallthru */
1306     case 8: /* VC (!V) */
1307     case 9: /* VS (V) */
1308         /* Logic operations clear V and C.  */
1309         if (op == CC_OP_LOGIC) {
1310             tcond = TCG_COND_NEVER;
1311             c->v1 = c->v2;
1312             goto done;
1313         }
1314         break;
1315     }
1316 
1317     /* Otherwise, flush flag state to CC_OP_FLAGS.  */
1318     gen_flush_flags(s);
1319 
1320     switch (cond) {
1321     case 0: /* T */
1322     case 1: /* F */
1323     default:
1324         /* Invalid, or handled above.  */
1325         abort();
1326     case 2: /* HI (!C && !Z) -> !(C || Z)*/
1327     case 3: /* LS (C || Z) */
1328         c->v1 = tmp = tcg_temp_new();
1329         tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1330         tcg_gen_or_i32(tmp, tmp, QREG_CC_C);
1331         tcond = TCG_COND_NE;
1332         break;
1333     case 4: /* CC (!C) */
1334     case 5: /* CS (C) */
1335         c->v1 = QREG_CC_C;
1336         tcond = TCG_COND_NE;
1337         break;
1338     case 6: /* NE (!Z) */
1339     case 7: /* EQ (Z) */
1340         c->v1 = QREG_CC_Z;
1341         tcond = TCG_COND_EQ;
1342         break;
1343     case 8: /* VC (!V) */
1344     case 9: /* VS (V) */
1345         c->v1 = QREG_CC_V;
1346         tcond = TCG_COND_LT;
1347         break;
1348     case 10: /* PL (!N) */
1349     case 11: /* MI (N) */
1350         c->v1 = QREG_CC_N;
1351         tcond = TCG_COND_LT;
1352         break;
1353     case 12: /* GE (!(N ^ V)) */
1354     case 13: /* LT (N ^ V) */
1355         c->v1 = tmp = tcg_temp_new();
1356         tcg_gen_xor_i32(tmp, QREG_CC_N, QREG_CC_V);
1357         tcond = TCG_COND_LT;
1358         break;
1359     case 14: /* GT (!(Z || (N ^ V))) */
1360     case 15: /* LE (Z || (N ^ V)) */
1361         c->v1 = tmp = tcg_temp_new();
1362         tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1363         tcg_gen_neg_i32(tmp, tmp);
1364         tmp2 = tcg_temp_new();
1365         tcg_gen_xor_i32(tmp2, QREG_CC_N, QREG_CC_V);
1366         tcg_gen_or_i32(tmp, tmp, tmp2);
1367         tcond = TCG_COND_LT;
1368         break;
1369     }
1370 
1371  done:
1372     if ((cond & 1) == 0) {
1373         tcond = tcg_invert_cond(tcond);
1374     }
1375     c->tcond = tcond;
1376 }
1377 
1378 static void gen_jmpcc(DisasContext *s, int cond, TCGLabel *l1)
1379 {
1380   DisasCompare c;
1381 
1382   gen_cc_cond(&c, s, cond);
1383   update_cc_op(s);
1384   tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
1385 }
1386 
1387 /* Force a TB lookup after an instruction that changes the CPU state.  */
1388 static void gen_exit_tb(DisasContext *s)
1389 {
1390     update_cc_op(s);
1391     tcg_gen_movi_i32(QREG_PC, s->pc);
1392     s->base.is_jmp = DISAS_EXIT;
1393 }
1394 
1395 #define SRC_EA(env, result, opsize, op_sign, addrp) do {                \
1396         result = gen_ea(env, s, insn, opsize, NULL_QREG, addrp,         \
1397                         op_sign ? EA_LOADS : EA_LOADU, IS_USER(s));     \
1398         if (IS_NULL_QREG(result)) {                                     \
1399             gen_addr_fault(s);                                          \
1400             return;                                                     \
1401         }                                                               \
1402     } while (0)
1403 
1404 #define DEST_EA(env, insn, opsize, val, addrp) do {                     \
1405         TCGv ea_result = gen_ea(env, s, insn, opsize, val, addrp,       \
1406                                 EA_STORE, IS_USER(s));                  \
1407         if (IS_NULL_QREG(ea_result)) {                                  \
1408             gen_addr_fault(s);                                          \
1409             return;                                                     \
1410         }                                                               \
1411     } while (0)
1412 
1413 /* Generate a jump to an immediate address.  */
1414 static void gen_jmp_tb(DisasContext *s, int n, target_ulong dest,
1415                        target_ulong src)
1416 {
1417     if (unlikely(s->ss_active)) {
1418         update_cc_op(s);
1419         tcg_gen_movi_i32(QREG_PC, dest);
1420         gen_raise_exception_format2(s, EXCP_TRACE, src);
1421     } else if (translator_use_goto_tb(&s->base, dest)) {
1422         tcg_gen_goto_tb(n);
1423         tcg_gen_movi_i32(QREG_PC, dest);
1424         tcg_gen_exit_tb(s->base.tb, n);
1425     } else {
1426         gen_jmp_im(s, dest);
1427         tcg_gen_exit_tb(NULL, 0);
1428     }
1429     s->base.is_jmp = DISAS_NORETURN;
1430 }
1431 
1432 DISAS_INSN(scc)
1433 {
1434     DisasCompare c;
1435     int cond;
1436     TCGv tmp;
1437 
1438     cond = (insn >> 8) & 0xf;
1439     gen_cc_cond(&c, s, cond);
1440 
1441     tmp = tcg_temp_new();
1442     tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
1443 
1444     tcg_gen_neg_i32(tmp, tmp);
1445     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
1446 }
1447 
1448 DISAS_INSN(dbcc)
1449 {
1450     TCGLabel *l1;
1451     TCGv reg;
1452     TCGv tmp;
1453     int16_t offset;
1454     uint32_t base;
1455 
1456     reg = DREG(insn, 0);
1457     base = s->pc;
1458     offset = (int16_t)read_im16(env, s);
1459     l1 = gen_new_label();
1460     gen_jmpcc(s, (insn >> 8) & 0xf, l1);
1461 
1462     tmp = tcg_temp_new();
1463     tcg_gen_ext16s_i32(tmp, reg);
1464     tcg_gen_addi_i32(tmp, tmp, -1);
1465     gen_partset_reg(OS_WORD, reg, tmp);
1466     tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, -1, l1);
1467     gen_jmp_tb(s, 1, base + offset, s->base.pc_next);
1468     gen_set_label(l1);
1469     gen_jmp_tb(s, 0, s->pc, s->base.pc_next);
1470 }
1471 
1472 DISAS_INSN(undef_mac)
1473 {
1474     gen_exception(s, s->base.pc_next, EXCP_LINEA);
1475 }
1476 
1477 DISAS_INSN(undef_fpu)
1478 {
1479     gen_exception(s, s->base.pc_next, EXCP_LINEF);
1480 }
1481 
1482 DISAS_INSN(undef)
1483 {
1484     /*
1485      * ??? This is both instructions that are as yet unimplemented
1486      * for the 680x0 series, as well as those that are implemented
1487      * but actually illegal for CPU32 or pre-68020.
1488      */
1489     qemu_log_mask(LOG_UNIMP, "Illegal instruction: %04x @ %08x\n",
1490                   insn, s->base.pc_next);
1491     gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
1492 }
1493 
1494 DISAS_INSN(mulw)
1495 {
1496     TCGv reg;
1497     TCGv tmp;
1498     TCGv src;
1499     int sign;
1500 
1501     sign = (insn & 0x100) != 0;
1502     reg = DREG(insn, 9);
1503     tmp = tcg_temp_new();
1504     if (sign)
1505         tcg_gen_ext16s_i32(tmp, reg);
1506     else
1507         tcg_gen_ext16u_i32(tmp, reg);
1508     SRC_EA(env, src, OS_WORD, sign, NULL);
1509     tcg_gen_mul_i32(tmp, tmp, src);
1510     tcg_gen_mov_i32(reg, tmp);
1511     gen_logic_cc(s, tmp, OS_LONG);
1512 }
1513 
1514 DISAS_INSN(divw)
1515 {
1516     int sign;
1517     TCGv src;
1518     TCGv destr;
1519     TCGv ilen;
1520 
1521     /* divX.w <EA>,Dn    32/16 -> 16r:16q */
1522 
1523     sign = (insn & 0x100) != 0;
1524 
1525     /* dest.l / src.w */
1526 
1527     SRC_EA(env, src, OS_WORD, sign, NULL);
1528     destr = tcg_constant_i32(REG(insn, 9));
1529     ilen = tcg_constant_i32(s->pc - s->base.pc_next);
1530     if (sign) {
1531         gen_helper_divsw(cpu_env, destr, src, ilen);
1532     } else {
1533         gen_helper_divuw(cpu_env, destr, src, ilen);
1534     }
1535 
1536     set_cc_op(s, CC_OP_FLAGS);
1537 }
1538 
1539 DISAS_INSN(divl)
1540 {
1541     TCGv num, reg, den, ilen;
1542     int sign;
1543     uint16_t ext;
1544 
1545     ext = read_im16(env, s);
1546 
1547     sign = (ext & 0x0800) != 0;
1548 
1549     if (ext & 0x400) {
1550         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
1551             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
1552             return;
1553         }
1554 
1555         /* divX.l <EA>, Dr:Dq    64/32 -> 32r:32q */
1556 
1557         SRC_EA(env, den, OS_LONG, 0, NULL);
1558         num = tcg_constant_i32(REG(ext, 12));
1559         reg = tcg_constant_i32(REG(ext, 0));
1560         ilen = tcg_constant_i32(s->pc - s->base.pc_next);
1561         if (sign) {
1562             gen_helper_divsll(cpu_env, num, reg, den, ilen);
1563         } else {
1564             gen_helper_divull(cpu_env, num, reg, den, ilen);
1565         }
1566         set_cc_op(s, CC_OP_FLAGS);
1567         return;
1568     }
1569 
1570     /* divX.l <EA>, Dq        32/32 -> 32q     */
1571     /* divXl.l <EA>, Dr:Dq    32/32 -> 32r:32q */
1572 
1573     SRC_EA(env, den, OS_LONG, 0, NULL);
1574     num = tcg_constant_i32(REG(ext, 12));
1575     reg = tcg_constant_i32(REG(ext, 0));
1576     ilen = tcg_constant_i32(s->pc - s->base.pc_next);
1577     if (sign) {
1578         gen_helper_divsl(cpu_env, num, reg, den, ilen);
1579     } else {
1580         gen_helper_divul(cpu_env, num, reg, den, ilen);
1581     }
1582 
1583     set_cc_op(s, CC_OP_FLAGS);
1584 }
1585 
1586 static void bcd_add(TCGv dest, TCGv src)
1587 {
1588     TCGv t0, t1;
1589 
1590     /*
1591      * dest10 = dest10 + src10 + X
1592      *
1593      *        t1 = src
1594      *        t2 = t1 + 0x066
1595      *        t3 = t2 + dest + X
1596      *        t4 = t2 ^ dest
1597      *        t5 = t3 ^ t4
1598      *        t6 = ~t5 & 0x110
1599      *        t7 = (t6 >> 2) | (t6 >> 3)
1600      *        return t3 - t7
1601      */
1602 
1603     /*
1604      * t1 = (src + 0x066) + dest + X
1605      *    = result with some possible exceeding 0x6
1606      */
1607 
1608     t0 = tcg_temp_new();
1609     tcg_gen_addi_i32(t0, src, 0x066);
1610 
1611     t1 = tcg_temp_new();
1612     tcg_gen_add_i32(t1, t0, dest);
1613     tcg_gen_add_i32(t1, t1, QREG_CC_X);
1614 
1615     /* we will remove exceeding 0x6 where there is no carry */
1616 
1617     /*
1618      * t0 = (src + 0x0066) ^ dest
1619      *    = t1 without carries
1620      */
1621 
1622     tcg_gen_xor_i32(t0, t0, dest);
1623 
1624     /*
1625      * extract the carries
1626      * t0 = t0 ^ t1
1627      *    = only the carries
1628      */
1629 
1630     tcg_gen_xor_i32(t0, t0, t1);
1631 
1632     /*
1633      * generate 0x1 where there is no carry
1634      * and for each 0x10, generate a 0x6
1635      */
1636 
1637     tcg_gen_shri_i32(t0, t0, 3);
1638     tcg_gen_not_i32(t0, t0);
1639     tcg_gen_andi_i32(t0, t0, 0x22);
1640     tcg_gen_add_i32(dest, t0, t0);
1641     tcg_gen_add_i32(dest, dest, t0);
1642 
1643     /*
1644      * remove the exceeding 0x6
1645      * for digits that have not generated a carry
1646      */
1647 
1648     tcg_gen_sub_i32(dest, t1, dest);
1649 }
1650 
1651 static void bcd_sub(TCGv dest, TCGv src)
1652 {
1653     TCGv t0, t1, t2;
1654 
1655     /*
1656      *  dest10 = dest10 - src10 - X
1657      *         = bcd_add(dest + 1 - X, 0x199 - src)
1658      */
1659 
1660     /* t0 = 0x066 + (0x199 - src) */
1661 
1662     t0 = tcg_temp_new();
1663     tcg_gen_subfi_i32(t0, 0x1ff, src);
1664 
1665     /* t1 = t0 + dest + 1 - X*/
1666 
1667     t1 = tcg_temp_new();
1668     tcg_gen_add_i32(t1, t0, dest);
1669     tcg_gen_addi_i32(t1, t1, 1);
1670     tcg_gen_sub_i32(t1, t1, QREG_CC_X);
1671 
1672     /* t2 = t0 ^ dest */
1673 
1674     t2 = tcg_temp_new();
1675     tcg_gen_xor_i32(t2, t0, dest);
1676 
1677     /* t0 = t1 ^ t2 */
1678 
1679     tcg_gen_xor_i32(t0, t1, t2);
1680 
1681     /*
1682      * t2 = ~t0 & 0x110
1683      * t0 = (t2 >> 2) | (t2 >> 3)
1684      *
1685      * to fit on 8bit operands, changed in:
1686      *
1687      * t2 = ~(t0 >> 3) & 0x22
1688      * t0 = t2 + t2
1689      * t0 = t0 + t2
1690      */
1691 
1692     tcg_gen_shri_i32(t2, t0, 3);
1693     tcg_gen_not_i32(t2, t2);
1694     tcg_gen_andi_i32(t2, t2, 0x22);
1695     tcg_gen_add_i32(t0, t2, t2);
1696     tcg_gen_add_i32(t0, t0, t2);
1697 
1698     /* return t1 - t0 */
1699 
1700     tcg_gen_sub_i32(dest, t1, t0);
1701 }
1702 
1703 static void bcd_flags(TCGv val)
1704 {
1705     tcg_gen_andi_i32(QREG_CC_C, val, 0x0ff);
1706     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_C);
1707 
1708     tcg_gen_extract_i32(QREG_CC_C, val, 8, 1);
1709 
1710     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
1711 }
1712 
1713 DISAS_INSN(abcd_reg)
1714 {
1715     TCGv src;
1716     TCGv dest;
1717 
1718     gen_flush_flags(s); /* !Z is sticky */
1719 
1720     src = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
1721     dest = gen_extend(s, DREG(insn, 9), OS_BYTE, 0);
1722     bcd_add(dest, src);
1723     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1724 
1725     bcd_flags(dest);
1726 }
1727 
1728 DISAS_INSN(abcd_mem)
1729 {
1730     TCGv src, dest, addr;
1731 
1732     gen_flush_flags(s); /* !Z is sticky */
1733 
1734     /* Indirect pre-decrement load (mode 4) */
1735 
1736     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1737                       NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1738     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1739                        NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1740 
1741     bcd_add(dest, src);
1742 
1743     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1744                 EA_STORE, IS_USER(s));
1745 
1746     bcd_flags(dest);
1747 }
1748 
1749 DISAS_INSN(sbcd_reg)
1750 {
1751     TCGv src, dest;
1752 
1753     gen_flush_flags(s); /* !Z is sticky */
1754 
1755     src = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
1756     dest = gen_extend(s, DREG(insn, 9), OS_BYTE, 0);
1757 
1758     bcd_sub(dest, src);
1759 
1760     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1761 
1762     bcd_flags(dest);
1763 }
1764 
1765 DISAS_INSN(sbcd_mem)
1766 {
1767     TCGv src, dest, addr;
1768 
1769     gen_flush_flags(s); /* !Z is sticky */
1770 
1771     /* Indirect pre-decrement load (mode 4) */
1772 
1773     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1774                       NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1775     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1776                        NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1777 
1778     bcd_sub(dest, src);
1779 
1780     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1781                 EA_STORE, IS_USER(s));
1782 
1783     bcd_flags(dest);
1784 }
1785 
1786 DISAS_INSN(nbcd)
1787 {
1788     TCGv src, dest;
1789     TCGv addr;
1790 
1791     gen_flush_flags(s); /* !Z is sticky */
1792 
1793     SRC_EA(env, src, OS_BYTE, 0, &addr);
1794 
1795     dest = tcg_temp_new();
1796     tcg_gen_movi_i32(dest, 0);
1797     bcd_sub(dest, src);
1798 
1799     DEST_EA(env, insn, OS_BYTE, dest, &addr);
1800 
1801     bcd_flags(dest);
1802 }
1803 
1804 DISAS_INSN(addsub)
1805 {
1806     TCGv reg;
1807     TCGv dest;
1808     TCGv src;
1809     TCGv tmp;
1810     TCGv addr;
1811     int add;
1812     int opsize;
1813 
1814     add = (insn & 0x4000) != 0;
1815     opsize = insn_opsize(insn);
1816     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
1817     dest = tcg_temp_new();
1818     if (insn & 0x100) {
1819         SRC_EA(env, tmp, opsize, 1, &addr);
1820         src = reg;
1821     } else {
1822         tmp = reg;
1823         SRC_EA(env, src, opsize, 1, NULL);
1824     }
1825     if (add) {
1826         tcg_gen_add_i32(dest, tmp, src);
1827         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, src);
1828         set_cc_op(s, CC_OP_ADDB + opsize);
1829     } else {
1830         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, tmp, src);
1831         tcg_gen_sub_i32(dest, tmp, src);
1832         set_cc_op(s, CC_OP_SUBB + opsize);
1833     }
1834     gen_update_cc_add(dest, src, opsize);
1835     if (insn & 0x100) {
1836         DEST_EA(env, insn, opsize, dest, &addr);
1837     } else {
1838         gen_partset_reg(opsize, DREG(insn, 9), dest);
1839     }
1840 }
1841 
1842 /* Reverse the order of the bits in REG.  */
1843 DISAS_INSN(bitrev)
1844 {
1845     TCGv reg;
1846     reg = DREG(insn, 0);
1847     gen_helper_bitrev(reg, reg);
1848 }
1849 
1850 DISAS_INSN(bitop_reg)
1851 {
1852     int opsize;
1853     int op;
1854     TCGv src1;
1855     TCGv src2;
1856     TCGv tmp;
1857     TCGv addr;
1858     TCGv dest;
1859 
1860     if ((insn & 0x38) != 0)
1861         opsize = OS_BYTE;
1862     else
1863         opsize = OS_LONG;
1864     op = (insn >> 6) & 3;
1865     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
1866 
1867     gen_flush_flags(s);
1868     src2 = tcg_temp_new();
1869     if (opsize == OS_BYTE)
1870         tcg_gen_andi_i32(src2, DREG(insn, 9), 7);
1871     else
1872         tcg_gen_andi_i32(src2, DREG(insn, 9), 31);
1873 
1874     tmp = tcg_temp_new();
1875     tcg_gen_shl_i32(tmp, tcg_constant_i32(1), src2);
1876 
1877     tcg_gen_and_i32(QREG_CC_Z, src1, tmp);
1878 
1879     dest = tcg_temp_new();
1880     switch (op) {
1881     case 1: /* bchg */
1882         tcg_gen_xor_i32(dest, src1, tmp);
1883         break;
1884     case 2: /* bclr */
1885         tcg_gen_andc_i32(dest, src1, tmp);
1886         break;
1887     case 3: /* bset */
1888         tcg_gen_or_i32(dest, src1, tmp);
1889         break;
1890     default: /* btst */
1891         break;
1892     }
1893     if (op) {
1894         DEST_EA(env, insn, opsize, dest, &addr);
1895     }
1896 }
1897 
1898 DISAS_INSN(sats)
1899 {
1900     TCGv reg;
1901     reg = DREG(insn, 0);
1902     gen_flush_flags(s);
1903     gen_helper_sats(reg, reg, QREG_CC_V);
1904     gen_logic_cc(s, reg, OS_LONG);
1905 }
1906 
1907 static void gen_push(DisasContext *s, TCGv val)
1908 {
1909     TCGv tmp;
1910 
1911     tmp = tcg_temp_new();
1912     tcg_gen_subi_i32(tmp, QREG_SP, 4);
1913     gen_store(s, OS_LONG, tmp, val, IS_USER(s));
1914     tcg_gen_mov_i32(QREG_SP, tmp);
1915 }
1916 
1917 static TCGv mreg(int reg)
1918 {
1919     if (reg < 8) {
1920         /* Dx */
1921         return cpu_dregs[reg];
1922     }
1923     /* Ax */
1924     return cpu_aregs[reg & 7];
1925 }
1926 
1927 DISAS_INSN(movem)
1928 {
1929     TCGv addr, incr, tmp, r[16];
1930     int is_load = (insn & 0x0400) != 0;
1931     int opsize = (insn & 0x40) != 0 ? OS_LONG : OS_WORD;
1932     uint16_t mask = read_im16(env, s);
1933     int mode = extract32(insn, 3, 3);
1934     int reg0 = REG(insn, 0);
1935     int i;
1936 
1937     tmp = cpu_aregs[reg0];
1938 
1939     switch (mode) {
1940     case 0: /* data register direct */
1941     case 1: /* addr register direct */
1942     do_addr_fault:
1943         gen_addr_fault(s);
1944         return;
1945 
1946     case 2: /* indirect */
1947         break;
1948 
1949     case 3: /* indirect post-increment */
1950         if (!is_load) {
1951             /* post-increment is not allowed */
1952             goto do_addr_fault;
1953         }
1954         break;
1955 
1956     case 4: /* indirect pre-decrement */
1957         if (is_load) {
1958             /* pre-decrement is not allowed */
1959             goto do_addr_fault;
1960         }
1961         /*
1962          * We want a bare copy of the address reg, without any pre-decrement
1963          * adjustment, as gen_lea would provide.
1964          */
1965         break;
1966 
1967     default:
1968         tmp = gen_lea_mode(env, s, mode, reg0, opsize);
1969         if (IS_NULL_QREG(tmp)) {
1970             goto do_addr_fault;
1971         }
1972         break;
1973     }
1974 
1975     addr = tcg_temp_new();
1976     tcg_gen_mov_i32(addr, tmp);
1977     incr = tcg_constant_i32(opsize_bytes(opsize));
1978 
1979     if (is_load) {
1980         /* memory to register */
1981         for (i = 0; i < 16; i++) {
1982             if (mask & (1 << i)) {
1983                 r[i] = gen_load(s, opsize, addr, 1, IS_USER(s));
1984                 tcg_gen_add_i32(addr, addr, incr);
1985             }
1986         }
1987         for (i = 0; i < 16; i++) {
1988             if (mask & (1 << i)) {
1989                 tcg_gen_mov_i32(mreg(i), r[i]);
1990             }
1991         }
1992         if (mode == 3) {
1993             /* post-increment: movem (An)+,X */
1994             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
1995         }
1996     } else {
1997         /* register to memory */
1998         if (mode == 4) {
1999             /* pre-decrement: movem X,-(An) */
2000             for (i = 15; i >= 0; i--) {
2001                 if ((mask << i) & 0x8000) {
2002                     tcg_gen_sub_i32(addr, addr, incr);
2003                     if (reg0 + 8 == i &&
2004                         m68k_feature(s->env, M68K_FEATURE_EXT_FULL)) {
2005                         /*
2006                          * M68020+: if the addressing register is the
2007                          * register moved to memory, the value written
2008                          * is the initial value decremented by the size of
2009                          * the operation, regardless of how many actual
2010                          * stores have been performed until this point.
2011                          * M68000/M68010: the value is the initial value.
2012                          */
2013                         tmp = tcg_temp_new();
2014                         tcg_gen_sub_i32(tmp, cpu_aregs[reg0], incr);
2015                         gen_store(s, opsize, addr, tmp, IS_USER(s));
2016                     } else {
2017                         gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2018                     }
2019                 }
2020             }
2021             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2022         } else {
2023             for (i = 0; i < 16; i++) {
2024                 if (mask & (1 << i)) {
2025                     gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2026                     tcg_gen_add_i32(addr, addr, incr);
2027                 }
2028             }
2029         }
2030     }
2031 }
2032 
2033 DISAS_INSN(movep)
2034 {
2035     uint8_t i;
2036     int16_t displ;
2037     TCGv reg;
2038     TCGv addr;
2039     TCGv abuf;
2040     TCGv dbuf;
2041 
2042     displ = read_im16(env, s);
2043 
2044     addr = AREG(insn, 0);
2045     reg = DREG(insn, 9);
2046 
2047     abuf = tcg_temp_new();
2048     tcg_gen_addi_i32(abuf, addr, displ);
2049     dbuf = tcg_temp_new();
2050 
2051     if (insn & 0x40) {
2052         i = 4;
2053     } else {
2054         i = 2;
2055     }
2056 
2057     if (insn & 0x80) {
2058         for ( ; i > 0 ; i--) {
2059             tcg_gen_shri_i32(dbuf, reg, (i - 1) * 8);
2060             tcg_gen_qemu_st_i32(dbuf, abuf, IS_USER(s), MO_UB);
2061             if (i > 1) {
2062                 tcg_gen_addi_i32(abuf, abuf, 2);
2063             }
2064         }
2065     } else {
2066         for ( ; i > 0 ; i--) {
2067             tcg_gen_qemu_ld_tl(dbuf, abuf, IS_USER(s), MO_UB);
2068             tcg_gen_deposit_i32(reg, reg, dbuf, (i - 1) * 8, 8);
2069             if (i > 1) {
2070                 tcg_gen_addi_i32(abuf, abuf, 2);
2071             }
2072         }
2073     }
2074 }
2075 
2076 DISAS_INSN(bitop_im)
2077 {
2078     int opsize;
2079     int op;
2080     TCGv src1;
2081     uint32_t mask;
2082     int bitnum;
2083     TCGv tmp;
2084     TCGv addr;
2085 
2086     if ((insn & 0x38) != 0)
2087         opsize = OS_BYTE;
2088     else
2089         opsize = OS_LONG;
2090     op = (insn >> 6) & 3;
2091 
2092     bitnum = read_im16(env, s);
2093     if (m68k_feature(s->env, M68K_FEATURE_M68K)) {
2094         if (bitnum & 0xfe00) {
2095             disas_undef(env, s, insn);
2096             return;
2097         }
2098     } else {
2099         if (bitnum & 0xff00) {
2100             disas_undef(env, s, insn);
2101             return;
2102         }
2103     }
2104 
2105     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
2106 
2107     gen_flush_flags(s);
2108     if (opsize == OS_BYTE)
2109         bitnum &= 7;
2110     else
2111         bitnum &= 31;
2112     mask = 1 << bitnum;
2113 
2114    tcg_gen_andi_i32(QREG_CC_Z, src1, mask);
2115 
2116     if (op) {
2117         tmp = tcg_temp_new();
2118         switch (op) {
2119         case 1: /* bchg */
2120             tcg_gen_xori_i32(tmp, src1, mask);
2121             break;
2122         case 2: /* bclr */
2123             tcg_gen_andi_i32(tmp, src1, ~mask);
2124             break;
2125         case 3: /* bset */
2126             tcg_gen_ori_i32(tmp, src1, mask);
2127             break;
2128         default: /* btst */
2129             break;
2130         }
2131         DEST_EA(env, insn, opsize, tmp, &addr);
2132     }
2133 }
2134 
2135 static TCGv gen_get_ccr(DisasContext *s)
2136 {
2137     TCGv dest;
2138 
2139     update_cc_op(s);
2140     dest = tcg_temp_new();
2141     gen_helper_get_ccr(dest, cpu_env);
2142     return dest;
2143 }
2144 
2145 static TCGv gen_get_sr(DisasContext *s)
2146 {
2147     TCGv ccr;
2148     TCGv sr;
2149 
2150     ccr = gen_get_ccr(s);
2151     sr = tcg_temp_new();
2152     tcg_gen_andi_i32(sr, QREG_SR, 0xffe0);
2153     tcg_gen_or_i32(sr, sr, ccr);
2154     return sr;
2155 }
2156 
2157 static void gen_set_sr_im(DisasContext *s, uint16_t val, int ccr_only)
2158 {
2159     if (ccr_only) {
2160         tcg_gen_movi_i32(QREG_CC_C, val & CCF_C ? 1 : 0);
2161         tcg_gen_movi_i32(QREG_CC_V, val & CCF_V ? -1 : 0);
2162         tcg_gen_movi_i32(QREG_CC_Z, val & CCF_Z ? 0 : 1);
2163         tcg_gen_movi_i32(QREG_CC_N, val & CCF_N ? -1 : 0);
2164         tcg_gen_movi_i32(QREG_CC_X, val & CCF_X ? 1 : 0);
2165     } else {
2166         /* Must writeback before changing security state. */
2167         do_writebacks(s);
2168         gen_helper_set_sr(cpu_env, tcg_constant_i32(val));
2169     }
2170     set_cc_op(s, CC_OP_FLAGS);
2171 }
2172 
2173 static void gen_set_sr(DisasContext *s, TCGv val, int ccr_only)
2174 {
2175     if (ccr_only) {
2176         gen_helper_set_ccr(cpu_env, val);
2177     } else {
2178         /* Must writeback before changing security state. */
2179         do_writebacks(s);
2180         gen_helper_set_sr(cpu_env, val);
2181     }
2182     set_cc_op(s, CC_OP_FLAGS);
2183 }
2184 
2185 static void gen_move_to_sr(CPUM68KState *env, DisasContext *s, uint16_t insn,
2186                            bool ccr_only)
2187 {
2188     if ((insn & 0x3f) == 0x3c) {
2189         uint16_t val;
2190         val = read_im16(env, s);
2191         gen_set_sr_im(s, val, ccr_only);
2192     } else {
2193         TCGv src;
2194         SRC_EA(env, src, OS_WORD, 0, NULL);
2195         gen_set_sr(s, src, ccr_only);
2196     }
2197 }
2198 
2199 DISAS_INSN(arith_im)
2200 {
2201     int op;
2202     TCGv im;
2203     TCGv src1;
2204     TCGv dest;
2205     TCGv addr;
2206     int opsize;
2207     bool with_SR = ((insn & 0x3f) == 0x3c);
2208 
2209     op = (insn >> 9) & 7;
2210     opsize = insn_opsize(insn);
2211     switch (opsize) {
2212     case OS_BYTE:
2213         im = tcg_constant_i32((int8_t)read_im8(env, s));
2214         break;
2215     case OS_WORD:
2216         im = tcg_constant_i32((int16_t)read_im16(env, s));
2217         break;
2218     case OS_LONG:
2219         im = tcg_constant_i32(read_im32(env, s));
2220         break;
2221     default:
2222         g_assert_not_reached();
2223     }
2224 
2225     if (with_SR) {
2226         /* SR/CCR can only be used with andi/eori/ori */
2227         if (op == 2 || op == 3 || op == 6) {
2228             disas_undef(env, s, insn);
2229             return;
2230         }
2231         switch (opsize) {
2232         case OS_BYTE:
2233             src1 = gen_get_ccr(s);
2234             break;
2235         case OS_WORD:
2236             if (IS_USER(s)) {
2237                 gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
2238                 return;
2239             }
2240             src1 = gen_get_sr(s);
2241             break;
2242         default:
2243             /* OS_LONG; others already g_assert_not_reached.  */
2244             disas_undef(env, s, insn);
2245             return;
2246         }
2247     } else {
2248         SRC_EA(env, src1, opsize, 1, (op == 6) ? NULL : &addr);
2249     }
2250     dest = tcg_temp_new();
2251     switch (op) {
2252     case 0: /* ori */
2253         tcg_gen_or_i32(dest, src1, im);
2254         if (with_SR) {
2255             gen_set_sr(s, dest, opsize == OS_BYTE);
2256             gen_exit_tb(s);
2257         } else {
2258             DEST_EA(env, insn, opsize, dest, &addr);
2259             gen_logic_cc(s, dest, opsize);
2260         }
2261         break;
2262     case 1: /* andi */
2263         tcg_gen_and_i32(dest, src1, im);
2264         if (with_SR) {
2265             gen_set_sr(s, dest, opsize == OS_BYTE);
2266             gen_exit_tb(s);
2267         } else {
2268             DEST_EA(env, insn, opsize, dest, &addr);
2269             gen_logic_cc(s, dest, opsize);
2270         }
2271         break;
2272     case 2: /* subi */
2273         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, src1, im);
2274         tcg_gen_sub_i32(dest, src1, im);
2275         gen_update_cc_add(dest, im, opsize);
2276         set_cc_op(s, CC_OP_SUBB + opsize);
2277         DEST_EA(env, insn, opsize, dest, &addr);
2278         break;
2279     case 3: /* addi */
2280         tcg_gen_add_i32(dest, src1, im);
2281         gen_update_cc_add(dest, im, opsize);
2282         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, im);
2283         set_cc_op(s, CC_OP_ADDB + opsize);
2284         DEST_EA(env, insn, opsize, dest, &addr);
2285         break;
2286     case 5: /* eori */
2287         tcg_gen_xor_i32(dest, src1, im);
2288         if (with_SR) {
2289             gen_set_sr(s, dest, opsize == OS_BYTE);
2290             gen_exit_tb(s);
2291         } else {
2292             DEST_EA(env, insn, opsize, dest, &addr);
2293             gen_logic_cc(s, dest, opsize);
2294         }
2295         break;
2296     case 6: /* cmpi */
2297         gen_update_cc_cmp(s, src1, im, opsize);
2298         break;
2299     default:
2300         abort();
2301     }
2302 }
2303 
2304 DISAS_INSN(cas)
2305 {
2306     int opsize;
2307     TCGv addr;
2308     uint16_t ext;
2309     TCGv load;
2310     TCGv cmp;
2311     MemOp opc;
2312 
2313     switch ((insn >> 9) & 3) {
2314     case 1:
2315         opsize = OS_BYTE;
2316         opc = MO_SB;
2317         break;
2318     case 2:
2319         opsize = OS_WORD;
2320         opc = MO_TESW;
2321         break;
2322     case 3:
2323         opsize = OS_LONG;
2324         opc = MO_TESL;
2325         break;
2326     default:
2327         g_assert_not_reached();
2328     }
2329 
2330     ext = read_im16(env, s);
2331 
2332     /* cas Dc,Du,<EA> */
2333 
2334     addr = gen_lea(env, s, insn, opsize);
2335     if (IS_NULL_QREG(addr)) {
2336         gen_addr_fault(s);
2337         return;
2338     }
2339 
2340     cmp = gen_extend(s, DREG(ext, 0), opsize, 1);
2341 
2342     /*
2343      * if  <EA> == Dc then
2344      *     <EA> = Du
2345      *     Dc = <EA> (because <EA> == Dc)
2346      * else
2347      *     Dc = <EA>
2348      */
2349 
2350     load = tcg_temp_new();
2351     tcg_gen_atomic_cmpxchg_i32(load, addr, cmp, DREG(ext, 6),
2352                                IS_USER(s), opc);
2353     /* update flags before setting cmp to load */
2354     gen_update_cc_cmp(s, load, cmp, opsize);
2355     gen_partset_reg(opsize, DREG(ext, 0), load);
2356 
2357     switch (extract32(insn, 3, 3)) {
2358     case 3: /* Indirect postincrement.  */
2359         tcg_gen_addi_i32(AREG(insn, 0), addr, opsize_bytes(opsize));
2360         break;
2361     case 4: /* Indirect predecrememnt.  */
2362         tcg_gen_mov_i32(AREG(insn, 0), addr);
2363         break;
2364     }
2365 }
2366 
2367 DISAS_INSN(cas2w)
2368 {
2369     uint16_t ext1, ext2;
2370     TCGv addr1, addr2;
2371 
2372     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2373 
2374     ext1 = read_im16(env, s);
2375 
2376     if (ext1 & 0x8000) {
2377         /* Address Register */
2378         addr1 = AREG(ext1, 12);
2379     } else {
2380         /* Data Register */
2381         addr1 = DREG(ext1, 12);
2382     }
2383 
2384     ext2 = read_im16(env, s);
2385     if (ext2 & 0x8000) {
2386         /* Address Register */
2387         addr2 = AREG(ext2, 12);
2388     } else {
2389         /* Data Register */
2390         addr2 = DREG(ext2, 12);
2391     }
2392 
2393     /*
2394      * if (R1) == Dc1 && (R2) == Dc2 then
2395      *     (R1) = Du1
2396      *     (R2) = Du2
2397      * else
2398      *     Dc1 = (R1)
2399      *     Dc2 = (R2)
2400      */
2401 
2402     if (tb_cflags(s->base.tb) & CF_PARALLEL) {
2403         gen_helper_exit_atomic(cpu_env);
2404     } else {
2405         TCGv regs = tcg_constant_i32(REG(ext2, 6) |
2406                                      (REG(ext1, 6) << 3) |
2407                                      (REG(ext2, 0) << 6) |
2408                                      (REG(ext1, 0) << 9));
2409         gen_helper_cas2w(cpu_env, regs, addr1, addr2);
2410     }
2411 
2412     /* Note that cas2w also assigned to env->cc_op.  */
2413     s->cc_op = CC_OP_CMPW;
2414     s->cc_op_synced = 1;
2415 }
2416 
2417 DISAS_INSN(cas2l)
2418 {
2419     uint16_t ext1, ext2;
2420     TCGv addr1, addr2, regs;
2421 
2422     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2423 
2424     ext1 = read_im16(env, s);
2425 
2426     if (ext1 & 0x8000) {
2427         /* Address Register */
2428         addr1 = AREG(ext1, 12);
2429     } else {
2430         /* Data Register */
2431         addr1 = DREG(ext1, 12);
2432     }
2433 
2434     ext2 = read_im16(env, s);
2435     if (ext2 & 0x8000) {
2436         /* Address Register */
2437         addr2 = AREG(ext2, 12);
2438     } else {
2439         /* Data Register */
2440         addr2 = DREG(ext2, 12);
2441     }
2442 
2443     /*
2444      * if (R1) == Dc1 && (R2) == Dc2 then
2445      *     (R1) = Du1
2446      *     (R2) = Du2
2447      * else
2448      *     Dc1 = (R1)
2449      *     Dc2 = (R2)
2450      */
2451 
2452     regs = tcg_constant_i32(REG(ext2, 6) |
2453                             (REG(ext1, 6) << 3) |
2454                             (REG(ext2, 0) << 6) |
2455                             (REG(ext1, 0) << 9));
2456     if (tb_cflags(s->base.tb) & CF_PARALLEL) {
2457         gen_helper_cas2l_parallel(cpu_env, regs, addr1, addr2);
2458     } else {
2459         gen_helper_cas2l(cpu_env, regs, addr1, addr2);
2460     }
2461 
2462     /* Note that cas2l also assigned to env->cc_op.  */
2463     s->cc_op = CC_OP_CMPL;
2464     s->cc_op_synced = 1;
2465 }
2466 
2467 DISAS_INSN(byterev)
2468 {
2469     TCGv reg;
2470 
2471     reg = DREG(insn, 0);
2472     tcg_gen_bswap32_i32(reg, reg);
2473 }
2474 
2475 DISAS_INSN(move)
2476 {
2477     TCGv src;
2478     TCGv dest;
2479     int op;
2480     int opsize;
2481 
2482     switch (insn >> 12) {
2483     case 1: /* move.b */
2484         opsize = OS_BYTE;
2485         break;
2486     case 2: /* move.l */
2487         opsize = OS_LONG;
2488         break;
2489     case 3: /* move.w */
2490         opsize = OS_WORD;
2491         break;
2492     default:
2493         abort();
2494     }
2495     SRC_EA(env, src, opsize, 1, NULL);
2496     op = (insn >> 6) & 7;
2497     if (op == 1) {
2498         /* movea */
2499         /* The value will already have been sign extended.  */
2500         dest = AREG(insn, 9);
2501         tcg_gen_mov_i32(dest, src);
2502     } else {
2503         /* normal move */
2504         uint16_t dest_ea;
2505         dest_ea = ((insn >> 9) & 7) | (op << 3);
2506         DEST_EA(env, dest_ea, opsize, src, NULL);
2507         /* This will be correct because loads sign extend.  */
2508         gen_logic_cc(s, src, opsize);
2509     }
2510 }
2511 
2512 DISAS_INSN(negx)
2513 {
2514     TCGv z;
2515     TCGv src;
2516     TCGv addr;
2517     int opsize;
2518 
2519     opsize = insn_opsize(insn);
2520     SRC_EA(env, src, opsize, 1, &addr);
2521 
2522     gen_flush_flags(s); /* compute old Z */
2523 
2524     /*
2525      * Perform subtract with borrow.
2526      * (X, N) =  -(src + X);
2527      */
2528 
2529     z = tcg_constant_i32(0);
2530     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, z, QREG_CC_X, z);
2531     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, z, z, QREG_CC_N, QREG_CC_X);
2532     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
2533 
2534     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
2535 
2536     /*
2537      * Compute signed-overflow for negation.  The normal formula for
2538      * subtraction is (res ^ src) & (src ^ dest), but with dest==0
2539      * this simplifies to res & src.
2540      */
2541 
2542     tcg_gen_and_i32(QREG_CC_V, QREG_CC_N, src);
2543 
2544     /* Copy the rest of the results into place.  */
2545     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
2546     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
2547 
2548     set_cc_op(s, CC_OP_FLAGS);
2549 
2550     /* result is in QREG_CC_N */
2551 
2552     DEST_EA(env, insn, opsize, QREG_CC_N, &addr);
2553 }
2554 
2555 DISAS_INSN(lea)
2556 {
2557     TCGv reg;
2558     TCGv tmp;
2559 
2560     reg = AREG(insn, 9);
2561     tmp = gen_lea(env, s, insn, OS_LONG);
2562     if (IS_NULL_QREG(tmp)) {
2563         gen_addr_fault(s);
2564         return;
2565     }
2566     tcg_gen_mov_i32(reg, tmp);
2567 }
2568 
2569 DISAS_INSN(clr)
2570 {
2571     int opsize;
2572     TCGv zero;
2573 
2574     zero = tcg_constant_i32(0);
2575     opsize = insn_opsize(insn);
2576     DEST_EA(env, insn, opsize, zero, NULL);
2577     gen_logic_cc(s, zero, opsize);
2578 }
2579 
2580 DISAS_INSN(move_from_ccr)
2581 {
2582     TCGv ccr;
2583 
2584     ccr = gen_get_ccr(s);
2585     DEST_EA(env, insn, OS_WORD, ccr, NULL);
2586 }
2587 
2588 DISAS_INSN(neg)
2589 {
2590     TCGv src1;
2591     TCGv dest;
2592     TCGv addr;
2593     int opsize;
2594 
2595     opsize = insn_opsize(insn);
2596     SRC_EA(env, src1, opsize, 1, &addr);
2597     dest = tcg_temp_new();
2598     tcg_gen_neg_i32(dest, src1);
2599     set_cc_op(s, CC_OP_SUBB + opsize);
2600     gen_update_cc_add(dest, src1, opsize);
2601     tcg_gen_setcondi_i32(TCG_COND_NE, QREG_CC_X, dest, 0);
2602     DEST_EA(env, insn, opsize, dest, &addr);
2603 }
2604 
2605 DISAS_INSN(move_to_ccr)
2606 {
2607     gen_move_to_sr(env, s, insn, true);
2608 }
2609 
2610 DISAS_INSN(not)
2611 {
2612     TCGv src1;
2613     TCGv dest;
2614     TCGv addr;
2615     int opsize;
2616 
2617     opsize = insn_opsize(insn);
2618     SRC_EA(env, src1, opsize, 1, &addr);
2619     dest = tcg_temp_new();
2620     tcg_gen_not_i32(dest, src1);
2621     DEST_EA(env, insn, opsize, dest, &addr);
2622     gen_logic_cc(s, dest, opsize);
2623 }
2624 
2625 DISAS_INSN(swap)
2626 {
2627     TCGv src1;
2628     TCGv src2;
2629     TCGv reg;
2630 
2631     src1 = tcg_temp_new();
2632     src2 = tcg_temp_new();
2633     reg = DREG(insn, 0);
2634     tcg_gen_shli_i32(src1, reg, 16);
2635     tcg_gen_shri_i32(src2, reg, 16);
2636     tcg_gen_or_i32(reg, src1, src2);
2637     gen_logic_cc(s, reg, OS_LONG);
2638 }
2639 
2640 DISAS_INSN(bkpt)
2641 {
2642 #if defined(CONFIG_SOFTMMU)
2643     gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2644 #else
2645     gen_exception(s, s->base.pc_next, EXCP_DEBUG);
2646 #endif
2647 }
2648 
2649 DISAS_INSN(pea)
2650 {
2651     TCGv tmp;
2652 
2653     tmp = gen_lea(env, s, insn, OS_LONG);
2654     if (IS_NULL_QREG(tmp)) {
2655         gen_addr_fault(s);
2656         return;
2657     }
2658     gen_push(s, tmp);
2659 }
2660 
2661 DISAS_INSN(ext)
2662 {
2663     int op;
2664     TCGv reg;
2665     TCGv tmp;
2666 
2667     reg = DREG(insn, 0);
2668     op = (insn >> 6) & 7;
2669     tmp = tcg_temp_new();
2670     if (op == 3)
2671         tcg_gen_ext16s_i32(tmp, reg);
2672     else
2673         tcg_gen_ext8s_i32(tmp, reg);
2674     if (op == 2)
2675         gen_partset_reg(OS_WORD, reg, tmp);
2676     else
2677         tcg_gen_mov_i32(reg, tmp);
2678     gen_logic_cc(s, tmp, OS_LONG);
2679 }
2680 
2681 DISAS_INSN(tst)
2682 {
2683     int opsize;
2684     TCGv tmp;
2685 
2686     opsize = insn_opsize(insn);
2687     SRC_EA(env, tmp, opsize, 1, NULL);
2688     gen_logic_cc(s, tmp, opsize);
2689 }
2690 
2691 DISAS_INSN(pulse)
2692 {
2693   /* Implemented as a NOP.  */
2694 }
2695 
2696 DISAS_INSN(illegal)
2697 {
2698     gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2699 }
2700 
2701 DISAS_INSN(tas)
2702 {
2703     int mode = extract32(insn, 3, 3);
2704     int reg0 = REG(insn, 0);
2705 
2706     if (mode == 0) {
2707         /* data register direct */
2708         TCGv dest = cpu_dregs[reg0];
2709         gen_logic_cc(s, dest, OS_BYTE);
2710         tcg_gen_ori_tl(dest, dest, 0x80);
2711     } else {
2712         TCGv src1, addr;
2713 
2714         addr = gen_lea_mode(env, s, mode, reg0, OS_BYTE);
2715         if (IS_NULL_QREG(addr)) {
2716             gen_addr_fault(s);
2717             return;
2718         }
2719         src1 = tcg_temp_new();
2720         tcg_gen_atomic_fetch_or_tl(src1, addr, tcg_constant_tl(0x80),
2721                                    IS_USER(s), MO_SB);
2722         gen_logic_cc(s, src1, OS_BYTE);
2723 
2724         switch (mode) {
2725         case 3: /* Indirect postincrement.  */
2726             tcg_gen_addi_i32(AREG(insn, 0), addr, 1);
2727             break;
2728         case 4: /* Indirect predecrememnt.  */
2729             tcg_gen_mov_i32(AREG(insn, 0), addr);
2730             break;
2731         }
2732     }
2733 }
2734 
2735 DISAS_INSN(mull)
2736 {
2737     uint16_t ext;
2738     TCGv src1;
2739     int sign;
2740 
2741     ext = read_im16(env, s);
2742 
2743     sign = ext & 0x800;
2744 
2745     if (ext & 0x400) {
2746         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
2747             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2748             return;
2749         }
2750 
2751         SRC_EA(env, src1, OS_LONG, 0, NULL);
2752 
2753         if (sign) {
2754             tcg_gen_muls2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2755         } else {
2756             tcg_gen_mulu2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2757         }
2758         /* if Dl == Dh, 68040 returns low word */
2759         tcg_gen_mov_i32(DREG(ext, 0), QREG_CC_N);
2760         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_Z);
2761         tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N);
2762 
2763         tcg_gen_movi_i32(QREG_CC_V, 0);
2764         tcg_gen_movi_i32(QREG_CC_C, 0);
2765 
2766         set_cc_op(s, CC_OP_FLAGS);
2767         return;
2768     }
2769     SRC_EA(env, src1, OS_LONG, 0, NULL);
2770     if (m68k_feature(s->env, M68K_FEATURE_M68K)) {
2771         tcg_gen_movi_i32(QREG_CC_C, 0);
2772         if (sign) {
2773             tcg_gen_muls2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2774             /* QREG_CC_V is -(QREG_CC_V != (QREG_CC_N >> 31)) */
2775             tcg_gen_sari_i32(QREG_CC_Z, QREG_CC_N, 31);
2776             tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_Z);
2777         } else {
2778             tcg_gen_mulu2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2779             /* QREG_CC_V is -(QREG_CC_V != 0), use QREG_CC_C as 0 */
2780             tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_C);
2781         }
2782         tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
2783         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_N);
2784 
2785         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
2786 
2787         set_cc_op(s, CC_OP_FLAGS);
2788     } else {
2789         /*
2790          * The upper 32 bits of the product are discarded, so
2791          * muls.l and mulu.l are functionally equivalent.
2792          */
2793         tcg_gen_mul_i32(DREG(ext, 12), src1, DREG(ext, 12));
2794         gen_logic_cc(s, DREG(ext, 12), OS_LONG);
2795     }
2796 }
2797 
2798 static void gen_link(DisasContext *s, uint16_t insn, int32_t offset)
2799 {
2800     TCGv reg;
2801     TCGv tmp;
2802 
2803     reg = AREG(insn, 0);
2804     tmp = tcg_temp_new();
2805     tcg_gen_subi_i32(tmp, QREG_SP, 4);
2806     gen_store(s, OS_LONG, tmp, reg, IS_USER(s));
2807     if ((insn & 7) != 7) {
2808         tcg_gen_mov_i32(reg, tmp);
2809     }
2810     tcg_gen_addi_i32(QREG_SP, tmp, offset);
2811 }
2812 
2813 DISAS_INSN(link)
2814 {
2815     int16_t offset;
2816 
2817     offset = read_im16(env, s);
2818     gen_link(s, insn, offset);
2819 }
2820 
2821 DISAS_INSN(linkl)
2822 {
2823     int32_t offset;
2824 
2825     offset = read_im32(env, s);
2826     gen_link(s, insn, offset);
2827 }
2828 
2829 DISAS_INSN(unlk)
2830 {
2831     TCGv src;
2832     TCGv reg;
2833     TCGv tmp;
2834 
2835     src = tcg_temp_new();
2836     reg = AREG(insn, 0);
2837     tcg_gen_mov_i32(src, reg);
2838     tmp = gen_load(s, OS_LONG, src, 0, IS_USER(s));
2839     tcg_gen_mov_i32(reg, tmp);
2840     tcg_gen_addi_i32(QREG_SP, src, 4);
2841 }
2842 
2843 #if defined(CONFIG_SOFTMMU)
2844 DISAS_INSN(reset)
2845 {
2846     if (IS_USER(s)) {
2847         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
2848         return;
2849     }
2850 
2851     gen_helper_reset(cpu_env);
2852 }
2853 #endif
2854 
2855 DISAS_INSN(nop)
2856 {
2857 }
2858 
2859 DISAS_INSN(rtd)
2860 {
2861     TCGv tmp;
2862     int16_t offset = read_im16(env, s);
2863 
2864     tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
2865     tcg_gen_addi_i32(QREG_SP, QREG_SP, offset + 4);
2866     gen_jmp(s, tmp);
2867 }
2868 
2869 DISAS_INSN(rtr)
2870 {
2871     TCGv tmp;
2872     TCGv ccr;
2873     TCGv sp;
2874 
2875     sp = tcg_temp_new();
2876     ccr = gen_load(s, OS_WORD, QREG_SP, 0, IS_USER(s));
2877     tcg_gen_addi_i32(sp, QREG_SP, 2);
2878     tmp = gen_load(s, OS_LONG, sp, 0, IS_USER(s));
2879     tcg_gen_addi_i32(QREG_SP, sp, 4);
2880 
2881     gen_set_sr(s, ccr, true);
2882 
2883     gen_jmp(s, tmp);
2884 }
2885 
2886 DISAS_INSN(rts)
2887 {
2888     TCGv tmp;
2889 
2890     tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
2891     tcg_gen_addi_i32(QREG_SP, QREG_SP, 4);
2892     gen_jmp(s, tmp);
2893 }
2894 
2895 DISAS_INSN(jump)
2896 {
2897     TCGv tmp;
2898 
2899     /*
2900      * Load the target address first to ensure correct exception
2901      * behavior.
2902      */
2903     tmp = gen_lea(env, s, insn, OS_LONG);
2904     if (IS_NULL_QREG(tmp)) {
2905         gen_addr_fault(s);
2906         return;
2907     }
2908     if ((insn & 0x40) == 0) {
2909         /* jsr */
2910         gen_push(s, tcg_constant_i32(s->pc));
2911     }
2912     gen_jmp(s, tmp);
2913 }
2914 
2915 DISAS_INSN(addsubq)
2916 {
2917     TCGv src;
2918     TCGv dest;
2919     TCGv val;
2920     int imm;
2921     TCGv addr;
2922     int opsize;
2923 
2924     if ((insn & 070) == 010) {
2925         /* Operation on address register is always long.  */
2926         opsize = OS_LONG;
2927     } else {
2928         opsize = insn_opsize(insn);
2929     }
2930     SRC_EA(env, src, opsize, 1, &addr);
2931     imm = (insn >> 9) & 7;
2932     if (imm == 0) {
2933         imm = 8;
2934     }
2935     val = tcg_constant_i32(imm);
2936     dest = tcg_temp_new();
2937     tcg_gen_mov_i32(dest, src);
2938     if ((insn & 0x38) == 0x08) {
2939         /*
2940          * Don't update condition codes if the destination is an
2941          * address register.
2942          */
2943         if (insn & 0x0100) {
2944             tcg_gen_sub_i32(dest, dest, val);
2945         } else {
2946             tcg_gen_add_i32(dest, dest, val);
2947         }
2948     } else {
2949         if (insn & 0x0100) {
2950             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
2951             tcg_gen_sub_i32(dest, dest, val);
2952             set_cc_op(s, CC_OP_SUBB + opsize);
2953         } else {
2954             tcg_gen_add_i32(dest, dest, val);
2955             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
2956             set_cc_op(s, CC_OP_ADDB + opsize);
2957         }
2958         gen_update_cc_add(dest, val, opsize);
2959     }
2960     DEST_EA(env, insn, opsize, dest, &addr);
2961 }
2962 
2963 DISAS_INSN(branch)
2964 {
2965     int32_t offset;
2966     uint32_t base;
2967     int op;
2968 
2969     base = s->pc;
2970     op = (insn >> 8) & 0xf;
2971     offset = (int8_t)insn;
2972     if (offset == 0) {
2973         offset = (int16_t)read_im16(env, s);
2974     } else if (offset == -1) {
2975         offset = read_im32(env, s);
2976     }
2977     if (op == 1) {
2978         /* bsr */
2979         gen_push(s, tcg_constant_i32(s->pc));
2980     }
2981     if (op > 1) {
2982         /* Bcc */
2983         TCGLabel *l1 = gen_new_label();
2984         gen_jmpcc(s, ((insn >> 8) & 0xf) ^ 1, l1);
2985         gen_jmp_tb(s, 1, base + offset, s->base.pc_next);
2986         gen_set_label(l1);
2987         gen_jmp_tb(s, 0, s->pc, s->base.pc_next);
2988     } else {
2989         /* Unconditional branch.  */
2990         update_cc_op(s);
2991         gen_jmp_tb(s, 0, base + offset, s->base.pc_next);
2992     }
2993 }
2994 
2995 DISAS_INSN(moveq)
2996 {
2997     tcg_gen_movi_i32(DREG(insn, 9), (int8_t)insn);
2998     gen_logic_cc(s, DREG(insn, 9), OS_LONG);
2999 }
3000 
3001 DISAS_INSN(mvzs)
3002 {
3003     int opsize;
3004     TCGv src;
3005     TCGv reg;
3006 
3007     if (insn & 0x40)
3008         opsize = OS_WORD;
3009     else
3010         opsize = OS_BYTE;
3011     SRC_EA(env, src, opsize, (insn & 0x80) == 0, NULL);
3012     reg = DREG(insn, 9);
3013     tcg_gen_mov_i32(reg, src);
3014     gen_logic_cc(s, src, opsize);
3015 }
3016 
3017 DISAS_INSN(or)
3018 {
3019     TCGv reg;
3020     TCGv dest;
3021     TCGv src;
3022     TCGv addr;
3023     int opsize;
3024 
3025     opsize = insn_opsize(insn);
3026     reg = gen_extend(s, DREG(insn, 9), opsize, 0);
3027     dest = tcg_temp_new();
3028     if (insn & 0x100) {
3029         SRC_EA(env, src, opsize, 0, &addr);
3030         tcg_gen_or_i32(dest, src, reg);
3031         DEST_EA(env, insn, opsize, dest, &addr);
3032     } else {
3033         SRC_EA(env, src, opsize, 0, NULL);
3034         tcg_gen_or_i32(dest, src, reg);
3035         gen_partset_reg(opsize, DREG(insn, 9), dest);
3036     }
3037     gen_logic_cc(s, dest, opsize);
3038 }
3039 
3040 DISAS_INSN(suba)
3041 {
3042     TCGv src;
3043     TCGv reg;
3044 
3045     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3046     reg = AREG(insn, 9);
3047     tcg_gen_sub_i32(reg, reg, src);
3048 }
3049 
3050 static inline void gen_subx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3051 {
3052     TCGv tmp, zero;
3053 
3054     gen_flush_flags(s); /* compute old Z */
3055 
3056     /*
3057      * Perform subtract with borrow.
3058      * (X, N) = dest - (src + X);
3059      */
3060 
3061     zero = tcg_constant_i32(0);
3062     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, zero, QREG_CC_X, zero);
3063     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, dest, zero, QREG_CC_N, QREG_CC_X);
3064     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3065     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
3066 
3067     /* Compute signed-overflow for subtract.  */
3068 
3069     tmp = tcg_temp_new();
3070     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, dest);
3071     tcg_gen_xor_i32(tmp, dest, src);
3072     tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, tmp);
3073 
3074     /* Copy the rest of the results into place.  */
3075     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3076     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3077 
3078     set_cc_op(s, CC_OP_FLAGS);
3079 
3080     /* result is in QREG_CC_N */
3081 }
3082 
3083 DISAS_INSN(subx_reg)
3084 {
3085     TCGv dest;
3086     TCGv src;
3087     int opsize;
3088 
3089     opsize = insn_opsize(insn);
3090 
3091     src = gen_extend(s, DREG(insn, 0), opsize, 1);
3092     dest = gen_extend(s, DREG(insn, 9), opsize, 1);
3093 
3094     gen_subx(s, src, dest, opsize);
3095 
3096     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3097 }
3098 
3099 DISAS_INSN(subx_mem)
3100 {
3101     TCGv src;
3102     TCGv addr_src;
3103     TCGv dest;
3104     TCGv addr_dest;
3105     int opsize;
3106 
3107     opsize = insn_opsize(insn);
3108 
3109     addr_src = AREG(insn, 0);
3110     tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3111     src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3112 
3113     addr_dest = AREG(insn, 9);
3114     tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3115     dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3116 
3117     gen_subx(s, src, dest, opsize);
3118 
3119     gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3120 }
3121 
3122 DISAS_INSN(mov3q)
3123 {
3124     TCGv src;
3125     int val;
3126 
3127     val = (insn >> 9) & 7;
3128     if (val == 0) {
3129         val = -1;
3130     }
3131     src = tcg_constant_i32(val);
3132     gen_logic_cc(s, src, OS_LONG);
3133     DEST_EA(env, insn, OS_LONG, src, NULL);
3134 }
3135 
3136 DISAS_INSN(cmp)
3137 {
3138     TCGv src;
3139     TCGv reg;
3140     int opsize;
3141 
3142     opsize = insn_opsize(insn);
3143     SRC_EA(env, src, opsize, 1, NULL);
3144     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
3145     gen_update_cc_cmp(s, reg, src, opsize);
3146 }
3147 
3148 DISAS_INSN(cmpa)
3149 {
3150     int opsize;
3151     TCGv src;
3152     TCGv reg;
3153 
3154     if (insn & 0x100) {
3155         opsize = OS_LONG;
3156     } else {
3157         opsize = OS_WORD;
3158     }
3159     SRC_EA(env, src, opsize, 1, NULL);
3160     reg = AREG(insn, 9);
3161     gen_update_cc_cmp(s, reg, src, OS_LONG);
3162 }
3163 
3164 DISAS_INSN(cmpm)
3165 {
3166     int opsize = insn_opsize(insn);
3167     TCGv src, dst;
3168 
3169     /* Post-increment load (mode 3) from Ay.  */
3170     src = gen_ea_mode(env, s, 3, REG(insn, 0), opsize,
3171                       NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3172     /* Post-increment load (mode 3) from Ax.  */
3173     dst = gen_ea_mode(env, s, 3, REG(insn, 9), opsize,
3174                       NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3175 
3176     gen_update_cc_cmp(s, dst, src, opsize);
3177 }
3178 
3179 DISAS_INSN(eor)
3180 {
3181     TCGv src;
3182     TCGv dest;
3183     TCGv addr;
3184     int opsize;
3185 
3186     opsize = insn_opsize(insn);
3187 
3188     SRC_EA(env, src, opsize, 0, &addr);
3189     dest = tcg_temp_new();
3190     tcg_gen_xor_i32(dest, src, DREG(insn, 9));
3191     gen_logic_cc(s, dest, opsize);
3192     DEST_EA(env, insn, opsize, dest, &addr);
3193 }
3194 
3195 static void do_exg(TCGv reg1, TCGv reg2)
3196 {
3197     TCGv temp = tcg_temp_new();
3198     tcg_gen_mov_i32(temp, reg1);
3199     tcg_gen_mov_i32(reg1, reg2);
3200     tcg_gen_mov_i32(reg2, temp);
3201 }
3202 
3203 DISAS_INSN(exg_dd)
3204 {
3205     /* exchange Dx and Dy */
3206     do_exg(DREG(insn, 9), DREG(insn, 0));
3207 }
3208 
3209 DISAS_INSN(exg_aa)
3210 {
3211     /* exchange Ax and Ay */
3212     do_exg(AREG(insn, 9), AREG(insn, 0));
3213 }
3214 
3215 DISAS_INSN(exg_da)
3216 {
3217     /* exchange Dx and Ay */
3218     do_exg(DREG(insn, 9), AREG(insn, 0));
3219 }
3220 
3221 DISAS_INSN(and)
3222 {
3223     TCGv src;
3224     TCGv reg;
3225     TCGv dest;
3226     TCGv addr;
3227     int opsize;
3228 
3229     dest = tcg_temp_new();
3230 
3231     opsize = insn_opsize(insn);
3232     reg = DREG(insn, 9);
3233     if (insn & 0x100) {
3234         SRC_EA(env, src, opsize, 0, &addr);
3235         tcg_gen_and_i32(dest, src, reg);
3236         DEST_EA(env, insn, opsize, dest, &addr);
3237     } else {
3238         SRC_EA(env, src, opsize, 0, NULL);
3239         tcg_gen_and_i32(dest, src, reg);
3240         gen_partset_reg(opsize, reg, dest);
3241     }
3242     gen_logic_cc(s, dest, opsize);
3243 }
3244 
3245 DISAS_INSN(adda)
3246 {
3247     TCGv src;
3248     TCGv reg;
3249 
3250     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3251     reg = AREG(insn, 9);
3252     tcg_gen_add_i32(reg, reg, src);
3253 }
3254 
3255 static inline void gen_addx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3256 {
3257     TCGv tmp, zero;
3258 
3259     gen_flush_flags(s); /* compute old Z */
3260 
3261     /*
3262      * Perform addition with carry.
3263      * (X, N) = src + dest + X;
3264      */
3265 
3266     zero = tcg_constant_i32(0);
3267     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_X, zero, dest, zero);
3268     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_N, QREG_CC_X, src, zero);
3269     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3270 
3271     /* Compute signed-overflow for addition.  */
3272 
3273     tmp = tcg_temp_new();
3274     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3275     tcg_gen_xor_i32(tmp, dest, src);
3276     tcg_gen_andc_i32(QREG_CC_V, QREG_CC_V, tmp);
3277 
3278     /* Copy the rest of the results into place.  */
3279     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3280     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3281 
3282     set_cc_op(s, CC_OP_FLAGS);
3283 
3284     /* result is in QREG_CC_N */
3285 }
3286 
3287 DISAS_INSN(addx_reg)
3288 {
3289     TCGv dest;
3290     TCGv src;
3291     int opsize;
3292 
3293     opsize = insn_opsize(insn);
3294 
3295     dest = gen_extend(s, DREG(insn, 9), opsize, 1);
3296     src = gen_extend(s, DREG(insn, 0), opsize, 1);
3297 
3298     gen_addx(s, src, dest, opsize);
3299 
3300     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3301 }
3302 
3303 DISAS_INSN(addx_mem)
3304 {
3305     TCGv src;
3306     TCGv addr_src;
3307     TCGv dest;
3308     TCGv addr_dest;
3309     int opsize;
3310 
3311     opsize = insn_opsize(insn);
3312 
3313     addr_src = AREG(insn, 0);
3314     tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3315     src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3316 
3317     addr_dest = AREG(insn, 9);
3318     tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3319     dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3320 
3321     gen_addx(s, src, dest, opsize);
3322 
3323     gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3324 }
3325 
3326 static inline void shift_im(DisasContext *s, uint16_t insn, int opsize)
3327 {
3328     int count = (insn >> 9) & 7;
3329     int logical = insn & 8;
3330     int left = insn & 0x100;
3331     int bits = opsize_bytes(opsize) * 8;
3332     TCGv reg = gen_extend(s, DREG(insn, 0), opsize, !logical);
3333 
3334     if (count == 0) {
3335         count = 8;
3336     }
3337 
3338     tcg_gen_movi_i32(QREG_CC_V, 0);
3339     if (left) {
3340         tcg_gen_shri_i32(QREG_CC_C, reg, bits - count);
3341         tcg_gen_shli_i32(QREG_CC_N, reg, count);
3342 
3343         /*
3344          * Note that ColdFire always clears V (done above),
3345          * while M68000 sets if the most significant bit is changed at
3346          * any time during the shift operation.
3347          */
3348         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68K)) {
3349             /* if shift count >= bits, V is (reg != 0) */
3350             if (count >= bits) {
3351                 tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, reg, QREG_CC_V);
3352             } else {
3353                 TCGv t0 = tcg_temp_new();
3354                 tcg_gen_sari_i32(QREG_CC_V, reg, bits - 1);
3355                 tcg_gen_sari_i32(t0, reg, bits - count - 1);
3356                 tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, t0);
3357             }
3358             tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3359         }
3360     } else {
3361         tcg_gen_shri_i32(QREG_CC_C, reg, count - 1);
3362         if (logical) {
3363             tcg_gen_shri_i32(QREG_CC_N, reg, count);
3364         } else {
3365             tcg_gen_sari_i32(QREG_CC_N, reg, count);
3366         }
3367     }
3368 
3369     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3370     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3371     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3372     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3373 
3374     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3375     set_cc_op(s, CC_OP_FLAGS);
3376 }
3377 
3378 static inline void shift_reg(DisasContext *s, uint16_t insn, int opsize)
3379 {
3380     int logical = insn & 8;
3381     int left = insn & 0x100;
3382     int bits = opsize_bytes(opsize) * 8;
3383     TCGv reg = gen_extend(s, DREG(insn, 0), opsize, !logical);
3384     TCGv s32;
3385     TCGv_i64 t64, s64;
3386 
3387     t64 = tcg_temp_new_i64();
3388     s64 = tcg_temp_new_i64();
3389     s32 = tcg_temp_new();
3390 
3391     /*
3392      * Note that m68k truncates the shift count modulo 64, not 32.
3393      * In addition, a 64-bit shift makes it easy to find "the last
3394      * bit shifted out", for the carry flag.
3395      */
3396     tcg_gen_andi_i32(s32, DREG(insn, 9), 63);
3397     tcg_gen_extu_i32_i64(s64, s32);
3398     tcg_gen_extu_i32_i64(t64, reg);
3399 
3400     /* Optimistically set V=0.  Also used as a zero source below.  */
3401     tcg_gen_movi_i32(QREG_CC_V, 0);
3402     if (left) {
3403         tcg_gen_shl_i64(t64, t64, s64);
3404 
3405         if (opsize == OS_LONG) {
3406             tcg_gen_extr_i64_i32(QREG_CC_N, QREG_CC_C, t64);
3407             /* Note that C=0 if shift count is 0, and we get that for free.  */
3408         } else {
3409             TCGv zero = tcg_constant_i32(0);
3410             tcg_gen_extrl_i64_i32(QREG_CC_N, t64);
3411             tcg_gen_shri_i32(QREG_CC_C, QREG_CC_N, bits);
3412             tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3413                                 s32, zero, zero, QREG_CC_C);
3414         }
3415         tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3416 
3417         /* X = C, but only if the shift count was non-zero.  */
3418         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3419                             QREG_CC_C, QREG_CC_X);
3420 
3421         /*
3422          * M68000 sets V if the most significant bit is changed at
3423          * any time during the shift operation.  Do this via creating
3424          * an extension of the sign bit, comparing, and discarding
3425          * the bits below the sign bit.  I.e.
3426          *     int64_t s = (intN_t)reg;
3427          *     int64_t t = (int64_t)(intN_t)reg << count;
3428          *     V = ((s ^ t) & (-1 << (bits - 1))) != 0
3429          */
3430         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68K)) {
3431             TCGv_i64 tt = tcg_constant_i64(32);
3432             /* if shift is greater than 32, use 32 */
3433             tcg_gen_movcond_i64(TCG_COND_GT, s64, s64, tt, tt, s64);
3434             /* Sign extend the input to 64 bits; re-do the shift.  */
3435             tcg_gen_ext_i32_i64(t64, reg);
3436             tcg_gen_shl_i64(s64, t64, s64);
3437             /* Clear all bits that are unchanged.  */
3438             tcg_gen_xor_i64(t64, t64, s64);
3439             /* Ignore the bits below the sign bit.  */
3440             tcg_gen_andi_i64(t64, t64, -1ULL << (bits - 1));
3441             /* If any bits remain set, we have overflow.  */
3442             tcg_gen_setcondi_i64(TCG_COND_NE, t64, t64, 0);
3443             tcg_gen_extrl_i64_i32(QREG_CC_V, t64);
3444             tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3445         }
3446     } else {
3447         tcg_gen_shli_i64(t64, t64, 32);
3448         if (logical) {
3449             tcg_gen_shr_i64(t64, t64, s64);
3450         } else {
3451             tcg_gen_sar_i64(t64, t64, s64);
3452         }
3453         tcg_gen_extr_i64_i32(QREG_CC_C, QREG_CC_N, t64);
3454 
3455         /* Note that C=0 if shift count is 0, and we get that for free.  */
3456         tcg_gen_shri_i32(QREG_CC_C, QREG_CC_C, 31);
3457 
3458         /* X = C, but only if the shift count was non-zero.  */
3459         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3460                             QREG_CC_C, QREG_CC_X);
3461     }
3462     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3463     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3464 
3465     /* Write back the result.  */
3466     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3467     set_cc_op(s, CC_OP_FLAGS);
3468 }
3469 
3470 DISAS_INSN(shift8_im)
3471 {
3472     shift_im(s, insn, OS_BYTE);
3473 }
3474 
3475 DISAS_INSN(shift16_im)
3476 {
3477     shift_im(s, insn, OS_WORD);
3478 }
3479 
3480 DISAS_INSN(shift_im)
3481 {
3482     shift_im(s, insn, OS_LONG);
3483 }
3484 
3485 DISAS_INSN(shift8_reg)
3486 {
3487     shift_reg(s, insn, OS_BYTE);
3488 }
3489 
3490 DISAS_INSN(shift16_reg)
3491 {
3492     shift_reg(s, insn, OS_WORD);
3493 }
3494 
3495 DISAS_INSN(shift_reg)
3496 {
3497     shift_reg(s, insn, OS_LONG);
3498 }
3499 
3500 DISAS_INSN(shift_mem)
3501 {
3502     int logical = insn & 8;
3503     int left = insn & 0x100;
3504     TCGv src;
3505     TCGv addr;
3506 
3507     SRC_EA(env, src, OS_WORD, !logical, &addr);
3508     tcg_gen_movi_i32(QREG_CC_V, 0);
3509     if (left) {
3510         tcg_gen_shri_i32(QREG_CC_C, src, 15);
3511         tcg_gen_shli_i32(QREG_CC_N, src, 1);
3512 
3513         /*
3514          * Note that ColdFire always clears V,
3515          * while M68000 sets if the most significant bit is changed at
3516          * any time during the shift operation
3517          */
3518         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68K)) {
3519             src = gen_extend(s, src, OS_WORD, 1);
3520             tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3521         }
3522     } else {
3523         tcg_gen_mov_i32(QREG_CC_C, src);
3524         if (logical) {
3525             tcg_gen_shri_i32(QREG_CC_N, src, 1);
3526         } else {
3527             tcg_gen_sari_i32(QREG_CC_N, src, 1);
3528         }
3529     }
3530 
3531     gen_ext(QREG_CC_N, QREG_CC_N, OS_WORD, 1);
3532     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3533     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3534     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3535 
3536     DEST_EA(env, insn, OS_WORD, QREG_CC_N, &addr);
3537     set_cc_op(s, CC_OP_FLAGS);
3538 }
3539 
3540 static void rotate(TCGv reg, TCGv shift, int left, int size)
3541 {
3542     switch (size) {
3543     case 8:
3544         /* Replicate the 8-bit input so that a 32-bit rotate works.  */
3545         tcg_gen_ext8u_i32(reg, reg);
3546         tcg_gen_muli_i32(reg, reg, 0x01010101);
3547         goto do_long;
3548     case 16:
3549         /* Replicate the 16-bit input so that a 32-bit rotate works.  */
3550         tcg_gen_deposit_i32(reg, reg, reg, 16, 16);
3551         goto do_long;
3552     do_long:
3553     default:
3554         if (left) {
3555             tcg_gen_rotl_i32(reg, reg, shift);
3556         } else {
3557             tcg_gen_rotr_i32(reg, reg, shift);
3558         }
3559     }
3560 
3561     /* compute flags */
3562 
3563     switch (size) {
3564     case 8:
3565         tcg_gen_ext8s_i32(reg, reg);
3566         break;
3567     case 16:
3568         tcg_gen_ext16s_i32(reg, reg);
3569         break;
3570     default:
3571         break;
3572     }
3573 
3574     /* QREG_CC_X is not affected */
3575 
3576     tcg_gen_mov_i32(QREG_CC_N, reg);
3577     tcg_gen_mov_i32(QREG_CC_Z, reg);
3578 
3579     if (left) {
3580         tcg_gen_andi_i32(QREG_CC_C, reg, 1);
3581     } else {
3582         tcg_gen_shri_i32(QREG_CC_C, reg, 31);
3583     }
3584 
3585     tcg_gen_movi_i32(QREG_CC_V, 0); /* always cleared */
3586 }
3587 
3588 static void rotate_x_flags(TCGv reg, TCGv X, int size)
3589 {
3590     switch (size) {
3591     case 8:
3592         tcg_gen_ext8s_i32(reg, reg);
3593         break;
3594     case 16:
3595         tcg_gen_ext16s_i32(reg, reg);
3596         break;
3597     default:
3598         break;
3599     }
3600     tcg_gen_mov_i32(QREG_CC_N, reg);
3601     tcg_gen_mov_i32(QREG_CC_Z, reg);
3602     tcg_gen_mov_i32(QREG_CC_X, X);
3603     tcg_gen_mov_i32(QREG_CC_C, X);
3604     tcg_gen_movi_i32(QREG_CC_V, 0);
3605 }
3606 
3607 /* Result of rotate_x() is valid if 0 <= shift <= size */
3608 static TCGv rotate_x(TCGv reg, TCGv shift, int left, int size)
3609 {
3610     TCGv X, shl, shr, shx, sz, zero;
3611 
3612     sz = tcg_constant_i32(size);
3613 
3614     shr = tcg_temp_new();
3615     shl = tcg_temp_new();
3616     shx = tcg_temp_new();
3617     if (left) {
3618         tcg_gen_mov_i32(shl, shift);      /* shl = shift */
3619         tcg_gen_movi_i32(shr, size + 1);
3620         tcg_gen_sub_i32(shr, shr, shift); /* shr = size + 1 - shift */
3621         tcg_gen_subi_i32(shx, shift, 1);  /* shx = shift - 1 */
3622         /* shx = shx < 0 ? size : shx; */
3623         zero = tcg_constant_i32(0);
3624         tcg_gen_movcond_i32(TCG_COND_LT, shx, shx, zero, sz, shx);
3625     } else {
3626         tcg_gen_mov_i32(shr, shift);      /* shr = shift */
3627         tcg_gen_movi_i32(shl, size + 1);
3628         tcg_gen_sub_i32(shl, shl, shift); /* shl = size + 1 - shift */
3629         tcg_gen_sub_i32(shx, sz, shift); /* shx = size - shift */
3630     }
3631 
3632     /* reg = (reg << shl) | (reg >> shr) | (x << shx); */
3633 
3634     tcg_gen_shl_i32(shl, reg, shl);
3635     tcg_gen_shr_i32(shr, reg, shr);
3636     tcg_gen_or_i32(reg, shl, shr);
3637     tcg_gen_shl_i32(shx, QREG_CC_X, shx);
3638     tcg_gen_or_i32(reg, reg, shx);
3639 
3640     /* X = (reg >> size) & 1 */
3641 
3642     X = tcg_temp_new();
3643     tcg_gen_extract_i32(X, reg, size, 1);
3644 
3645     return X;
3646 }
3647 
3648 /* Result of rotate32_x() is valid if 0 <= shift < 33 */
3649 static TCGv rotate32_x(TCGv reg, TCGv shift, int left)
3650 {
3651     TCGv_i64 t0, shift64;
3652     TCGv X, lo, hi, zero;
3653 
3654     shift64 = tcg_temp_new_i64();
3655     tcg_gen_extu_i32_i64(shift64, shift);
3656 
3657     t0 = tcg_temp_new_i64();
3658 
3659     X = tcg_temp_new();
3660     lo = tcg_temp_new();
3661     hi = tcg_temp_new();
3662 
3663     if (left) {
3664         /* create [reg:X:..] */
3665 
3666         tcg_gen_shli_i32(lo, QREG_CC_X, 31);
3667         tcg_gen_concat_i32_i64(t0, lo, reg);
3668 
3669         /* rotate */
3670 
3671         tcg_gen_rotl_i64(t0, t0, shift64);
3672 
3673         /* result is [reg:..:reg:X] */
3674 
3675         tcg_gen_extr_i64_i32(lo, hi, t0);
3676         tcg_gen_andi_i32(X, lo, 1);
3677 
3678         tcg_gen_shri_i32(lo, lo, 1);
3679     } else {
3680         /* create [..:X:reg] */
3681 
3682         tcg_gen_concat_i32_i64(t0, reg, QREG_CC_X);
3683 
3684         tcg_gen_rotr_i64(t0, t0, shift64);
3685 
3686         /* result is value: [X:reg:..:reg] */
3687 
3688         tcg_gen_extr_i64_i32(lo, hi, t0);
3689 
3690         /* extract X */
3691 
3692         tcg_gen_shri_i32(X, hi, 31);
3693 
3694         /* extract result */
3695 
3696         tcg_gen_shli_i32(hi, hi, 1);
3697     }
3698     tcg_gen_or_i32(lo, lo, hi);
3699 
3700     /* if shift == 0, register and X are not affected */
3701 
3702     zero = tcg_constant_i32(0);
3703     tcg_gen_movcond_i32(TCG_COND_EQ, X, shift, zero, QREG_CC_X, X);
3704     tcg_gen_movcond_i32(TCG_COND_EQ, reg, shift, zero, reg, lo);
3705 
3706     return X;
3707 }
3708 
3709 DISAS_INSN(rotate_im)
3710 {
3711     TCGv shift;
3712     int tmp;
3713     int left = (insn & 0x100);
3714 
3715     tmp = (insn >> 9) & 7;
3716     if (tmp == 0) {
3717         tmp = 8;
3718     }
3719 
3720     shift = tcg_constant_i32(tmp);
3721     if (insn & 8) {
3722         rotate(DREG(insn, 0), shift, left, 32);
3723     } else {
3724         TCGv X = rotate32_x(DREG(insn, 0), shift, left);
3725         rotate_x_flags(DREG(insn, 0), X, 32);
3726     }
3727 
3728     set_cc_op(s, CC_OP_FLAGS);
3729 }
3730 
3731 DISAS_INSN(rotate8_im)
3732 {
3733     int left = (insn & 0x100);
3734     TCGv reg;
3735     TCGv shift;
3736     int tmp;
3737 
3738     reg = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
3739 
3740     tmp = (insn >> 9) & 7;
3741     if (tmp == 0) {
3742         tmp = 8;
3743     }
3744 
3745     shift = tcg_constant_i32(tmp);
3746     if (insn & 8) {
3747         rotate(reg, shift, left, 8);
3748     } else {
3749         TCGv X = rotate_x(reg, shift, left, 8);
3750         rotate_x_flags(reg, X, 8);
3751     }
3752     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3753     set_cc_op(s, CC_OP_FLAGS);
3754 }
3755 
3756 DISAS_INSN(rotate16_im)
3757 {
3758     int left = (insn & 0x100);
3759     TCGv reg;
3760     TCGv shift;
3761     int tmp;
3762 
3763     reg = gen_extend(s, DREG(insn, 0), OS_WORD, 0);
3764     tmp = (insn >> 9) & 7;
3765     if (tmp == 0) {
3766         tmp = 8;
3767     }
3768 
3769     shift = tcg_constant_i32(tmp);
3770     if (insn & 8) {
3771         rotate(reg, shift, left, 16);
3772     } else {
3773         TCGv X = rotate_x(reg, shift, left, 16);
3774         rotate_x_flags(reg, X, 16);
3775     }
3776     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3777     set_cc_op(s, CC_OP_FLAGS);
3778 }
3779 
3780 DISAS_INSN(rotate_reg)
3781 {
3782     TCGv reg;
3783     TCGv src;
3784     TCGv t0, t1;
3785     int left = (insn & 0x100);
3786 
3787     reg = DREG(insn, 0);
3788     src = DREG(insn, 9);
3789     /* shift in [0..63] */
3790     t0 = tcg_temp_new();
3791     tcg_gen_andi_i32(t0, src, 63);
3792     t1 = tcg_temp_new_i32();
3793     if (insn & 8) {
3794         tcg_gen_andi_i32(t1, src, 31);
3795         rotate(reg, t1, left, 32);
3796         /* if shift == 0, clear C */
3797         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3798                             t0, QREG_CC_V /* 0 */,
3799                             QREG_CC_V /* 0 */, QREG_CC_C);
3800     } else {
3801         TCGv X;
3802         /* modulo 33 */
3803         tcg_gen_movi_i32(t1, 33);
3804         tcg_gen_remu_i32(t1, t0, t1);
3805         X = rotate32_x(DREG(insn, 0), t1, left);
3806         rotate_x_flags(DREG(insn, 0), X, 32);
3807     }
3808     set_cc_op(s, CC_OP_FLAGS);
3809 }
3810 
3811 DISAS_INSN(rotate8_reg)
3812 {
3813     TCGv reg;
3814     TCGv src;
3815     TCGv t0, t1;
3816     int left = (insn & 0x100);
3817 
3818     reg = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
3819     src = DREG(insn, 9);
3820     /* shift in [0..63] */
3821     t0 = tcg_temp_new_i32();
3822     tcg_gen_andi_i32(t0, src, 63);
3823     t1 = tcg_temp_new_i32();
3824     if (insn & 8) {
3825         tcg_gen_andi_i32(t1, src, 7);
3826         rotate(reg, t1, left, 8);
3827         /* if shift == 0, clear C */
3828         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3829                             t0, QREG_CC_V /* 0 */,
3830                             QREG_CC_V /* 0 */, QREG_CC_C);
3831     } else {
3832         TCGv X;
3833         /* modulo 9 */
3834         tcg_gen_movi_i32(t1, 9);
3835         tcg_gen_remu_i32(t1, t0, t1);
3836         X = rotate_x(reg, t1, left, 8);
3837         rotate_x_flags(reg, X, 8);
3838     }
3839     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3840     set_cc_op(s, CC_OP_FLAGS);
3841 }
3842 
3843 DISAS_INSN(rotate16_reg)
3844 {
3845     TCGv reg;
3846     TCGv src;
3847     TCGv t0, t1;
3848     int left = (insn & 0x100);
3849 
3850     reg = gen_extend(s, DREG(insn, 0), OS_WORD, 0);
3851     src = DREG(insn, 9);
3852     /* shift in [0..63] */
3853     t0 = tcg_temp_new_i32();
3854     tcg_gen_andi_i32(t0, src, 63);
3855     t1 = tcg_temp_new_i32();
3856     if (insn & 8) {
3857         tcg_gen_andi_i32(t1, src, 15);
3858         rotate(reg, t1, left, 16);
3859         /* if shift == 0, clear C */
3860         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3861                             t0, QREG_CC_V /* 0 */,
3862                             QREG_CC_V /* 0 */, QREG_CC_C);
3863     } else {
3864         TCGv X;
3865         /* modulo 17 */
3866         tcg_gen_movi_i32(t1, 17);
3867         tcg_gen_remu_i32(t1, t0, t1);
3868         X = rotate_x(reg, t1, left, 16);
3869         rotate_x_flags(reg, X, 16);
3870     }
3871     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3872     set_cc_op(s, CC_OP_FLAGS);
3873 }
3874 
3875 DISAS_INSN(rotate_mem)
3876 {
3877     TCGv src;
3878     TCGv addr;
3879     TCGv shift;
3880     int left = (insn & 0x100);
3881 
3882     SRC_EA(env, src, OS_WORD, 0, &addr);
3883 
3884     shift = tcg_constant_i32(1);
3885     if (insn & 0x0200) {
3886         rotate(src, shift, left, 16);
3887     } else {
3888         TCGv X = rotate_x(src, shift, left, 16);
3889         rotate_x_flags(src, X, 16);
3890     }
3891     DEST_EA(env, insn, OS_WORD, src, &addr);
3892     set_cc_op(s, CC_OP_FLAGS);
3893 }
3894 
3895 DISAS_INSN(bfext_reg)
3896 {
3897     int ext = read_im16(env, s);
3898     int is_sign = insn & 0x200;
3899     TCGv src = DREG(insn, 0);
3900     TCGv dst = DREG(ext, 12);
3901     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
3902     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
3903     int pos = 32 - ofs - len;        /* little bit-endian */
3904     TCGv tmp = tcg_temp_new();
3905     TCGv shift;
3906 
3907     /*
3908      * In general, we're going to rotate the field so that it's at the
3909      * top of the word and then right-shift by the complement of the
3910      * width to extend the field.
3911      */
3912     if (ext & 0x20) {
3913         /* Variable width.  */
3914         if (ext & 0x800) {
3915             /* Variable offset.  */
3916             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
3917             tcg_gen_rotl_i32(tmp, src, tmp);
3918         } else {
3919             tcg_gen_rotli_i32(tmp, src, ofs);
3920         }
3921 
3922         shift = tcg_temp_new();
3923         tcg_gen_neg_i32(shift, DREG(ext, 0));
3924         tcg_gen_andi_i32(shift, shift, 31);
3925         tcg_gen_sar_i32(QREG_CC_N, tmp, shift);
3926         if (is_sign) {
3927             tcg_gen_mov_i32(dst, QREG_CC_N);
3928         } else {
3929             tcg_gen_shr_i32(dst, tmp, shift);
3930         }
3931     } else {
3932         /* Immediate width.  */
3933         if (ext & 0x800) {
3934             /* Variable offset */
3935             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
3936             tcg_gen_rotl_i32(tmp, src, tmp);
3937             src = tmp;
3938             pos = 32 - len;
3939         } else {
3940             /*
3941              * Immediate offset.  If the field doesn't wrap around the
3942              * end of the word, rely on (s)extract completely.
3943              */
3944             if (pos < 0) {
3945                 tcg_gen_rotli_i32(tmp, src, ofs);
3946                 src = tmp;
3947                 pos = 32 - len;
3948             }
3949         }
3950 
3951         tcg_gen_sextract_i32(QREG_CC_N, src, pos, len);
3952         if (is_sign) {
3953             tcg_gen_mov_i32(dst, QREG_CC_N);
3954         } else {
3955             tcg_gen_extract_i32(dst, src, pos, len);
3956         }
3957     }
3958 
3959     set_cc_op(s, CC_OP_LOGIC);
3960 }
3961 
3962 DISAS_INSN(bfext_mem)
3963 {
3964     int ext = read_im16(env, s);
3965     int is_sign = insn & 0x200;
3966     TCGv dest = DREG(ext, 12);
3967     TCGv addr, len, ofs;
3968 
3969     addr = gen_lea(env, s, insn, OS_UNSIZED);
3970     if (IS_NULL_QREG(addr)) {
3971         gen_addr_fault(s);
3972         return;
3973     }
3974 
3975     if (ext & 0x20) {
3976         len = DREG(ext, 0);
3977     } else {
3978         len = tcg_constant_i32(extract32(ext, 0, 5));
3979     }
3980     if (ext & 0x800) {
3981         ofs = DREG(ext, 6);
3982     } else {
3983         ofs = tcg_constant_i32(extract32(ext, 6, 5));
3984     }
3985 
3986     if (is_sign) {
3987         gen_helper_bfexts_mem(dest, cpu_env, addr, ofs, len);
3988         tcg_gen_mov_i32(QREG_CC_N, dest);
3989     } else {
3990         TCGv_i64 tmp = tcg_temp_new_i64();
3991         gen_helper_bfextu_mem(tmp, cpu_env, addr, ofs, len);
3992         tcg_gen_extr_i64_i32(dest, QREG_CC_N, tmp);
3993     }
3994     set_cc_op(s, CC_OP_LOGIC);
3995 }
3996 
3997 DISAS_INSN(bfop_reg)
3998 {
3999     int ext = read_im16(env, s);
4000     TCGv src = DREG(insn, 0);
4001     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4002     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4003     TCGv mask, tofs = NULL, tlen = NULL;
4004     bool is_bfffo = (insn & 0x0f00) == 0x0d00;
4005 
4006     if ((ext & 0x820) == 0) {
4007         /* Immediate width and offset.  */
4008         uint32_t maski = 0x7fffffffu >> (len - 1);
4009         if (ofs + len <= 32) {
4010             tcg_gen_shli_i32(QREG_CC_N, src, ofs);
4011         } else {
4012             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4013         }
4014         tcg_gen_andi_i32(QREG_CC_N, QREG_CC_N, ~maski);
4015 
4016         mask = tcg_constant_i32(ror32(maski, ofs));
4017         if (is_bfffo) {
4018             tofs = tcg_constant_i32(ofs);
4019             tlen = tcg_constant_i32(len);
4020         }
4021     } else {
4022         TCGv tmp = tcg_temp_new();
4023 
4024         mask = tcg_temp_new();
4025         if (ext & 0x20) {
4026             /* Variable width */
4027             tcg_gen_subi_i32(tmp, DREG(ext, 0), 1);
4028             tcg_gen_andi_i32(tmp, tmp, 31);
4029             tcg_gen_shr_i32(mask, tcg_constant_i32(0x7fffffffu), tmp);
4030             if (is_bfffo) {
4031                 tlen = tcg_temp_new();
4032                 tcg_gen_addi_i32(tlen, tmp, 1);
4033             }
4034         } else {
4035             /* Immediate width */
4036             tcg_gen_movi_i32(mask, 0x7fffffffu >> (len - 1));
4037             if (is_bfffo) {
4038                 tlen = tcg_constant_i32(len);
4039             }
4040         }
4041 
4042         if (ext & 0x800) {
4043             /* Variable offset */
4044             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4045             tcg_gen_rotl_i32(QREG_CC_N, src, tmp);
4046             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4047             tcg_gen_rotr_i32(mask, mask, tmp);
4048             if (is_bfffo) {
4049                 tofs = tmp;
4050             }
4051         } else {
4052             /* Immediate offset (and variable width) */
4053             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4054             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4055             tcg_gen_rotri_i32(mask, mask, ofs);
4056             if (is_bfffo) {
4057                 tofs = tcg_constant_i32(ofs);
4058             }
4059         }
4060     }
4061     set_cc_op(s, CC_OP_LOGIC);
4062 
4063     switch (insn & 0x0f00) {
4064     case 0x0a00: /* bfchg */
4065         tcg_gen_eqv_i32(src, src, mask);
4066         break;
4067     case 0x0c00: /* bfclr */
4068         tcg_gen_and_i32(src, src, mask);
4069         break;
4070     case 0x0d00: /* bfffo */
4071         gen_helper_bfffo_reg(DREG(ext, 12), QREG_CC_N, tofs, tlen);
4072         break;
4073     case 0x0e00: /* bfset */
4074         tcg_gen_orc_i32(src, src, mask);
4075         break;
4076     case 0x0800: /* bftst */
4077         /* flags already set; no other work to do.  */
4078         break;
4079     default:
4080         g_assert_not_reached();
4081     }
4082 }
4083 
4084 DISAS_INSN(bfop_mem)
4085 {
4086     int ext = read_im16(env, s);
4087     TCGv addr, len, ofs;
4088     TCGv_i64 t64;
4089 
4090     addr = gen_lea(env, s, insn, OS_UNSIZED);
4091     if (IS_NULL_QREG(addr)) {
4092         gen_addr_fault(s);
4093         return;
4094     }
4095 
4096     if (ext & 0x20) {
4097         len = DREG(ext, 0);
4098     } else {
4099         len = tcg_constant_i32(extract32(ext, 0, 5));
4100     }
4101     if (ext & 0x800) {
4102         ofs = DREG(ext, 6);
4103     } else {
4104         ofs = tcg_constant_i32(extract32(ext, 6, 5));
4105     }
4106 
4107     switch (insn & 0x0f00) {
4108     case 0x0a00: /* bfchg */
4109         gen_helper_bfchg_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4110         break;
4111     case 0x0c00: /* bfclr */
4112         gen_helper_bfclr_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4113         break;
4114     case 0x0d00: /* bfffo */
4115         t64 = tcg_temp_new_i64();
4116         gen_helper_bfffo_mem(t64, cpu_env, addr, ofs, len);
4117         tcg_gen_extr_i64_i32(DREG(ext, 12), QREG_CC_N, t64);
4118         break;
4119     case 0x0e00: /* bfset */
4120         gen_helper_bfset_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4121         break;
4122     case 0x0800: /* bftst */
4123         gen_helper_bfexts_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4124         break;
4125     default:
4126         g_assert_not_reached();
4127     }
4128     set_cc_op(s, CC_OP_LOGIC);
4129 }
4130 
4131 DISAS_INSN(bfins_reg)
4132 {
4133     int ext = read_im16(env, s);
4134     TCGv dst = DREG(insn, 0);
4135     TCGv src = DREG(ext, 12);
4136     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4137     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4138     int pos = 32 - ofs - len;        /* little bit-endian */
4139     TCGv tmp;
4140 
4141     tmp = tcg_temp_new();
4142 
4143     if (ext & 0x20) {
4144         /* Variable width */
4145         tcg_gen_neg_i32(tmp, DREG(ext, 0));
4146         tcg_gen_andi_i32(tmp, tmp, 31);
4147         tcg_gen_shl_i32(QREG_CC_N, src, tmp);
4148     } else {
4149         /* Immediate width */
4150         tcg_gen_shli_i32(QREG_CC_N, src, 32 - len);
4151     }
4152     set_cc_op(s, CC_OP_LOGIC);
4153 
4154     /* Immediate width and offset */
4155     if ((ext & 0x820) == 0) {
4156         /* Check for suitability for deposit.  */
4157         if (pos >= 0) {
4158             tcg_gen_deposit_i32(dst, dst, src, pos, len);
4159         } else {
4160             uint32_t maski = -2U << (len - 1);
4161             uint32_t roti = (ofs + len) & 31;
4162             tcg_gen_andi_i32(tmp, src, ~maski);
4163             tcg_gen_rotri_i32(tmp, tmp, roti);
4164             tcg_gen_andi_i32(dst, dst, ror32(maski, roti));
4165             tcg_gen_or_i32(dst, dst, tmp);
4166         }
4167     } else {
4168         TCGv mask = tcg_temp_new();
4169         TCGv rot = tcg_temp_new();
4170 
4171         if (ext & 0x20) {
4172             /* Variable width */
4173             tcg_gen_subi_i32(rot, DREG(ext, 0), 1);
4174             tcg_gen_andi_i32(rot, rot, 31);
4175             tcg_gen_movi_i32(mask, -2);
4176             tcg_gen_shl_i32(mask, mask, rot);
4177             tcg_gen_mov_i32(rot, DREG(ext, 0));
4178             tcg_gen_andc_i32(tmp, src, mask);
4179         } else {
4180             /* Immediate width (variable offset) */
4181             uint32_t maski = -2U << (len - 1);
4182             tcg_gen_andi_i32(tmp, src, ~maski);
4183             tcg_gen_movi_i32(mask, maski);
4184             tcg_gen_movi_i32(rot, len & 31);
4185         }
4186         if (ext & 0x800) {
4187             /* Variable offset */
4188             tcg_gen_add_i32(rot, rot, DREG(ext, 6));
4189         } else {
4190             /* Immediate offset (variable width) */
4191             tcg_gen_addi_i32(rot, rot, ofs);
4192         }
4193         tcg_gen_andi_i32(rot, rot, 31);
4194         tcg_gen_rotr_i32(mask, mask, rot);
4195         tcg_gen_rotr_i32(tmp, tmp, rot);
4196         tcg_gen_and_i32(dst, dst, mask);
4197         tcg_gen_or_i32(dst, dst, tmp);
4198     }
4199 }
4200 
4201 DISAS_INSN(bfins_mem)
4202 {
4203     int ext = read_im16(env, s);
4204     TCGv src = DREG(ext, 12);
4205     TCGv addr, len, ofs;
4206 
4207     addr = gen_lea(env, s, insn, OS_UNSIZED);
4208     if (IS_NULL_QREG(addr)) {
4209         gen_addr_fault(s);
4210         return;
4211     }
4212 
4213     if (ext & 0x20) {
4214         len = DREG(ext, 0);
4215     } else {
4216         len = tcg_constant_i32(extract32(ext, 0, 5));
4217     }
4218     if (ext & 0x800) {
4219         ofs = DREG(ext, 6);
4220     } else {
4221         ofs = tcg_constant_i32(extract32(ext, 6, 5));
4222     }
4223 
4224     gen_helper_bfins_mem(QREG_CC_N, cpu_env, addr, src, ofs, len);
4225     set_cc_op(s, CC_OP_LOGIC);
4226 }
4227 
4228 DISAS_INSN(ff1)
4229 {
4230     TCGv reg;
4231     reg = DREG(insn, 0);
4232     gen_logic_cc(s, reg, OS_LONG);
4233     gen_helper_ff1(reg, reg);
4234 }
4235 
4236 DISAS_INSN(chk)
4237 {
4238     TCGv src, reg;
4239     int opsize;
4240 
4241     switch ((insn >> 7) & 3) {
4242     case 3:
4243         opsize = OS_WORD;
4244         break;
4245     case 2:
4246         if (m68k_feature(env, M68K_FEATURE_CHK2)) {
4247             opsize = OS_LONG;
4248             break;
4249         }
4250         /* fallthru */
4251     default:
4252         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4253         return;
4254     }
4255     SRC_EA(env, src, opsize, 1, NULL);
4256     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
4257 
4258     gen_flush_flags(s);
4259     gen_helper_chk(cpu_env, reg, src);
4260 }
4261 
4262 DISAS_INSN(chk2)
4263 {
4264     uint16_t ext;
4265     TCGv addr1, addr2, bound1, bound2, reg;
4266     int opsize;
4267 
4268     switch ((insn >> 9) & 3) {
4269     case 0:
4270         opsize = OS_BYTE;
4271         break;
4272     case 1:
4273         opsize = OS_WORD;
4274         break;
4275     case 2:
4276         opsize = OS_LONG;
4277         break;
4278     default:
4279         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4280         return;
4281     }
4282 
4283     ext = read_im16(env, s);
4284     if ((ext & 0x0800) == 0) {
4285         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4286         return;
4287     }
4288 
4289     addr1 = gen_lea(env, s, insn, OS_UNSIZED);
4290     addr2 = tcg_temp_new();
4291     tcg_gen_addi_i32(addr2, addr1, opsize_bytes(opsize));
4292 
4293     bound1 = gen_load(s, opsize, addr1, 1, IS_USER(s));
4294     bound2 = gen_load(s, opsize, addr2, 1, IS_USER(s));
4295 
4296     reg = tcg_temp_new();
4297     if (ext & 0x8000) {
4298         tcg_gen_mov_i32(reg, AREG(ext, 12));
4299     } else {
4300         gen_ext(reg, DREG(ext, 12), opsize, 1);
4301     }
4302 
4303     gen_flush_flags(s);
4304     gen_helper_chk2(cpu_env, reg, bound1, bound2);
4305 }
4306 
4307 static void m68k_copy_line(TCGv dst, TCGv src, int index)
4308 {
4309     TCGv addr;
4310     TCGv_i64 t0, t1;
4311 
4312     addr = tcg_temp_new();
4313 
4314     t0 = tcg_temp_new_i64();
4315     t1 = tcg_temp_new_i64();
4316 
4317     tcg_gen_andi_i32(addr, src, ~15);
4318     tcg_gen_qemu_ld_i64(t0, addr, index, MO_TEUQ);
4319     tcg_gen_addi_i32(addr, addr, 8);
4320     tcg_gen_qemu_ld_i64(t1, addr, index, MO_TEUQ);
4321 
4322     tcg_gen_andi_i32(addr, dst, ~15);
4323     tcg_gen_qemu_st_i64(t0, addr, index, MO_TEUQ);
4324     tcg_gen_addi_i32(addr, addr, 8);
4325     tcg_gen_qemu_st_i64(t1, addr, index, MO_TEUQ);
4326 }
4327 
4328 DISAS_INSN(move16_reg)
4329 {
4330     int index = IS_USER(s);
4331     TCGv tmp;
4332     uint16_t ext;
4333 
4334     ext = read_im16(env, s);
4335     if ((ext & (1 << 15)) == 0) {
4336         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4337     }
4338 
4339     m68k_copy_line(AREG(ext, 12), AREG(insn, 0), index);
4340 
4341     /* Ax can be Ay, so save Ay before incrementing Ax */
4342     tmp = tcg_temp_new();
4343     tcg_gen_mov_i32(tmp, AREG(ext, 12));
4344     tcg_gen_addi_i32(AREG(insn, 0), AREG(insn, 0), 16);
4345     tcg_gen_addi_i32(AREG(ext, 12), tmp, 16);
4346 }
4347 
4348 DISAS_INSN(move16_mem)
4349 {
4350     int index = IS_USER(s);
4351     TCGv reg, addr;
4352 
4353     reg = AREG(insn, 0);
4354     addr = tcg_constant_i32(read_im32(env, s));
4355 
4356     if ((insn >> 3) & 1) {
4357         /* MOVE16 (xxx).L, (Ay) */
4358         m68k_copy_line(reg, addr, index);
4359     } else {
4360         /* MOVE16 (Ay), (xxx).L */
4361         m68k_copy_line(addr, reg, index);
4362     }
4363 
4364     if (((insn >> 3) & 2) == 0) {
4365         /* (Ay)+ */
4366         tcg_gen_addi_i32(reg, reg, 16);
4367     }
4368 }
4369 
4370 DISAS_INSN(strldsr)
4371 {
4372     uint16_t ext;
4373     uint32_t addr;
4374 
4375     addr = s->pc - 2;
4376     ext = read_im16(env, s);
4377     if (ext != 0x46FC) {
4378         gen_exception(s, addr, EXCP_ILLEGAL);
4379         return;
4380     }
4381     ext = read_im16(env, s);
4382     if (IS_USER(s) || (ext & SR_S) == 0) {
4383         gen_exception(s, addr, EXCP_PRIVILEGE);
4384         return;
4385     }
4386     gen_push(s, gen_get_sr(s));
4387     gen_set_sr_im(s, ext, 0);
4388     gen_exit_tb(s);
4389 }
4390 
4391 DISAS_INSN(move_from_sr)
4392 {
4393     TCGv sr;
4394 
4395     if (IS_USER(s) && m68k_feature(env, M68K_FEATURE_MOVEFROMSR_PRIV)) {
4396         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4397         return;
4398     }
4399     sr = gen_get_sr(s);
4400     DEST_EA(env, insn, OS_WORD, sr, NULL);
4401 }
4402 
4403 #if defined(CONFIG_SOFTMMU)
4404 DISAS_INSN(moves)
4405 {
4406     int opsize;
4407     uint16_t ext;
4408     TCGv reg;
4409     TCGv addr;
4410     int extend;
4411 
4412     if (IS_USER(s)) {
4413         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4414         return;
4415     }
4416 
4417     ext = read_im16(env, s);
4418 
4419     opsize = insn_opsize(insn);
4420 
4421     if (ext & 0x8000) {
4422         /* address register */
4423         reg = AREG(ext, 12);
4424         extend = 1;
4425     } else {
4426         /* data register */
4427         reg = DREG(ext, 12);
4428         extend = 0;
4429     }
4430 
4431     addr = gen_lea(env, s, insn, opsize);
4432     if (IS_NULL_QREG(addr)) {
4433         gen_addr_fault(s);
4434         return;
4435     }
4436 
4437     if (ext & 0x0800) {
4438         /* from reg to ea */
4439         gen_store(s, opsize, addr, reg, DFC_INDEX(s));
4440     } else {
4441         /* from ea to reg */
4442         TCGv tmp = gen_load(s, opsize, addr, 0, SFC_INDEX(s));
4443         if (extend) {
4444             gen_ext(reg, tmp, opsize, 1);
4445         } else {
4446             gen_partset_reg(opsize, reg, tmp);
4447         }
4448     }
4449     switch (extract32(insn, 3, 3)) {
4450     case 3: /* Indirect postincrement.  */
4451         tcg_gen_addi_i32(AREG(insn, 0), addr,
4452                          REG(insn, 0) == 7 && opsize == OS_BYTE
4453                          ? 2
4454                          : opsize_bytes(opsize));
4455         break;
4456     case 4: /* Indirect predecrememnt.  */
4457         tcg_gen_mov_i32(AREG(insn, 0), addr);
4458         break;
4459     }
4460 }
4461 
4462 DISAS_INSN(move_to_sr)
4463 {
4464     if (IS_USER(s)) {
4465         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4466         return;
4467     }
4468     gen_move_to_sr(env, s, insn, false);
4469     gen_exit_tb(s);
4470 }
4471 
4472 DISAS_INSN(move_from_usp)
4473 {
4474     if (IS_USER(s)) {
4475         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4476         return;
4477     }
4478     tcg_gen_ld_i32(AREG(insn, 0), cpu_env,
4479                    offsetof(CPUM68KState, sp[M68K_USP]));
4480 }
4481 
4482 DISAS_INSN(move_to_usp)
4483 {
4484     if (IS_USER(s)) {
4485         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4486         return;
4487     }
4488     tcg_gen_st_i32(AREG(insn, 0), cpu_env,
4489                    offsetof(CPUM68KState, sp[M68K_USP]));
4490 }
4491 
4492 DISAS_INSN(halt)
4493 {
4494     if (IS_USER(s)) {
4495         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4496         return;
4497     }
4498 
4499     gen_exception(s, s->pc, EXCP_HALT_INSN);
4500 }
4501 
4502 DISAS_INSN(stop)
4503 {
4504     uint16_t ext;
4505 
4506     if (IS_USER(s)) {
4507         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4508         return;
4509     }
4510 
4511     ext = read_im16(env, s);
4512 
4513     gen_set_sr_im(s, ext, 0);
4514     tcg_gen_movi_i32(cpu_halted, 1);
4515     gen_exception(s, s->pc, EXCP_HLT);
4516 }
4517 
4518 DISAS_INSN(rte)
4519 {
4520     if (IS_USER(s)) {
4521         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4522         return;
4523     }
4524     gen_exception(s, s->base.pc_next, EXCP_RTE);
4525 }
4526 
4527 DISAS_INSN(cf_movec)
4528 {
4529     uint16_t ext;
4530     TCGv reg;
4531 
4532     if (IS_USER(s)) {
4533         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4534         return;
4535     }
4536 
4537     ext = read_im16(env, s);
4538 
4539     if (ext & 0x8000) {
4540         reg = AREG(ext, 12);
4541     } else {
4542         reg = DREG(ext, 12);
4543     }
4544     gen_helper_cf_movec_to(cpu_env, tcg_constant_i32(ext & 0xfff), reg);
4545     gen_exit_tb(s);
4546 }
4547 
4548 DISAS_INSN(m68k_movec)
4549 {
4550     uint16_t ext;
4551     TCGv reg, creg;
4552 
4553     if (IS_USER(s)) {
4554         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4555         return;
4556     }
4557 
4558     ext = read_im16(env, s);
4559 
4560     if (ext & 0x8000) {
4561         reg = AREG(ext, 12);
4562     } else {
4563         reg = DREG(ext, 12);
4564     }
4565     creg = tcg_constant_i32(ext & 0xfff);
4566     if (insn & 1) {
4567         gen_helper_m68k_movec_to(cpu_env, creg, reg);
4568     } else {
4569         gen_helper_m68k_movec_from(reg, cpu_env, creg);
4570     }
4571     gen_exit_tb(s);
4572 }
4573 
4574 DISAS_INSN(intouch)
4575 {
4576     if (IS_USER(s)) {
4577         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4578         return;
4579     }
4580     /* ICache fetch.  Implement as no-op.  */
4581 }
4582 
4583 DISAS_INSN(cpushl)
4584 {
4585     if (IS_USER(s)) {
4586         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4587         return;
4588     }
4589     /* Cache push/invalidate.  Implement as no-op.  */
4590 }
4591 
4592 DISAS_INSN(cpush)
4593 {
4594     if (IS_USER(s)) {
4595         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4596         return;
4597     }
4598     /* Cache push/invalidate.  Implement as no-op.  */
4599 }
4600 
4601 DISAS_INSN(cinv)
4602 {
4603     if (IS_USER(s)) {
4604         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4605         return;
4606     }
4607     /* Invalidate cache line.  Implement as no-op.  */
4608 }
4609 
4610 #if defined(CONFIG_SOFTMMU)
4611 DISAS_INSN(pflush)
4612 {
4613     TCGv opmode;
4614 
4615     if (IS_USER(s)) {
4616         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4617         return;
4618     }
4619 
4620     opmode = tcg_constant_i32((insn >> 3) & 3);
4621     gen_helper_pflush(cpu_env, AREG(insn, 0), opmode);
4622 }
4623 
4624 DISAS_INSN(ptest)
4625 {
4626     TCGv is_read;
4627 
4628     if (IS_USER(s)) {
4629         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4630         return;
4631     }
4632     is_read = tcg_constant_i32((insn >> 5) & 1);
4633     gen_helper_ptest(cpu_env, AREG(insn, 0), is_read);
4634 }
4635 #endif
4636 
4637 DISAS_INSN(wddata)
4638 {
4639     gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4640 }
4641 
4642 DISAS_INSN(wdebug)
4643 {
4644     if (IS_USER(s)) {
4645         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4646         return;
4647     }
4648     /* TODO: Implement wdebug.  */
4649     cpu_abort(env_cpu(env), "WDEBUG not implemented");
4650 }
4651 #endif
4652 
4653 DISAS_INSN(trap)
4654 {
4655     gen_exception(s, s->pc, EXCP_TRAP0 + (insn & 0xf));
4656 }
4657 
4658 static void do_trapcc(DisasContext *s, DisasCompare *c)
4659 {
4660     if (c->tcond != TCG_COND_NEVER) {
4661         TCGLabel *over = NULL;
4662 
4663         update_cc_op(s);
4664 
4665         if (c->tcond != TCG_COND_ALWAYS) {
4666             /* Jump over if !c. */
4667             over = gen_new_label();
4668             tcg_gen_brcond_i32(tcg_invert_cond(c->tcond), c->v1, c->v2, over);
4669         }
4670 
4671         tcg_gen_movi_i32(QREG_PC, s->pc);
4672         gen_raise_exception_format2(s, EXCP_TRAPCC, s->base.pc_next);
4673 
4674         if (over != NULL) {
4675             gen_set_label(over);
4676             s->base.is_jmp = DISAS_NEXT;
4677         }
4678     }
4679 }
4680 
4681 DISAS_INSN(trapcc)
4682 {
4683     DisasCompare c;
4684 
4685     /* Consume and discard the immediate operand. */
4686     switch (extract32(insn, 0, 3)) {
4687     case 2: /* trapcc.w */
4688         (void)read_im16(env, s);
4689         break;
4690     case 3: /* trapcc.l */
4691         (void)read_im32(env, s);
4692         break;
4693     case 4: /* trapcc (no operand) */
4694         break;
4695     default:
4696         /* trapcc registered with only valid opmodes */
4697         g_assert_not_reached();
4698     }
4699 
4700     gen_cc_cond(&c, s, extract32(insn, 8, 4));
4701     do_trapcc(s, &c);
4702 }
4703 
4704 DISAS_INSN(trapv)
4705 {
4706     DisasCompare c;
4707 
4708     gen_cc_cond(&c, s, 9); /* V set */
4709     do_trapcc(s, &c);
4710 }
4711 
4712 static void gen_load_fcr(DisasContext *s, TCGv res, int reg)
4713 {
4714     switch (reg) {
4715     case M68K_FPIAR:
4716         tcg_gen_movi_i32(res, 0);
4717         break;
4718     case M68K_FPSR:
4719         tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpsr));
4720         break;
4721     case M68K_FPCR:
4722         tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpcr));
4723         break;
4724     }
4725 }
4726 
4727 static void gen_store_fcr(DisasContext *s, TCGv val, int reg)
4728 {
4729     switch (reg) {
4730     case M68K_FPIAR:
4731         break;
4732     case M68K_FPSR:
4733         tcg_gen_st_i32(val, cpu_env, offsetof(CPUM68KState, fpsr));
4734         break;
4735     case M68K_FPCR:
4736         gen_helper_set_fpcr(cpu_env, val);
4737         break;
4738     }
4739 }
4740 
4741 static void gen_qemu_store_fcr(DisasContext *s, TCGv addr, int reg)
4742 {
4743     int index = IS_USER(s);
4744     TCGv tmp;
4745 
4746     tmp = tcg_temp_new();
4747     gen_load_fcr(s, tmp, reg);
4748     tcg_gen_qemu_st_tl(tmp, addr, index, MO_TEUL);
4749 }
4750 
4751 static void gen_qemu_load_fcr(DisasContext *s, TCGv addr, int reg)
4752 {
4753     int index = IS_USER(s);
4754     TCGv tmp;
4755 
4756     tmp = tcg_temp_new();
4757     tcg_gen_qemu_ld_tl(tmp, addr, index, MO_TEUL);
4758     gen_store_fcr(s, tmp, reg);
4759 }
4760 
4761 
4762 static void gen_op_fmove_fcr(CPUM68KState *env, DisasContext *s,
4763                              uint32_t insn, uint32_t ext)
4764 {
4765     int mask = (ext >> 10) & 7;
4766     int is_write = (ext >> 13) & 1;
4767     int mode = extract32(insn, 3, 3);
4768     int i;
4769     TCGv addr, tmp;
4770 
4771     switch (mode) {
4772     case 0: /* Dn */
4773         if (mask != M68K_FPIAR && mask != M68K_FPSR && mask != M68K_FPCR) {
4774             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4775             return;
4776         }
4777         if (is_write) {
4778             gen_load_fcr(s, DREG(insn, 0), mask);
4779         } else {
4780             gen_store_fcr(s, DREG(insn, 0), mask);
4781         }
4782         return;
4783     case 1: /* An, only with FPIAR */
4784         if (mask != M68K_FPIAR) {
4785             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4786             return;
4787         }
4788         if (is_write) {
4789             gen_load_fcr(s, AREG(insn, 0), mask);
4790         } else {
4791             gen_store_fcr(s, AREG(insn, 0), mask);
4792         }
4793         return;
4794     case 7: /* Immediate */
4795         if (REG(insn, 0) == 4) {
4796             if (is_write ||
4797                 (mask != M68K_FPIAR && mask != M68K_FPSR &&
4798                  mask != M68K_FPCR)) {
4799                 gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4800                 return;
4801             }
4802             tmp = tcg_constant_i32(read_im32(env, s));
4803             gen_store_fcr(s, tmp, mask);
4804             return;
4805         }
4806         break;
4807     default:
4808         break;
4809     }
4810 
4811     tmp = gen_lea(env, s, insn, OS_LONG);
4812     if (IS_NULL_QREG(tmp)) {
4813         gen_addr_fault(s);
4814         return;
4815     }
4816 
4817     addr = tcg_temp_new();
4818     tcg_gen_mov_i32(addr, tmp);
4819 
4820     /*
4821      * mask:
4822      *
4823      * 0b100 Floating-Point Control Register
4824      * 0b010 Floating-Point Status Register
4825      * 0b001 Floating-Point Instruction Address Register
4826      *
4827      */
4828 
4829     if (is_write && mode == 4) {
4830         for (i = 2; i >= 0; i--, mask >>= 1) {
4831             if (mask & 1) {
4832                 gen_qemu_store_fcr(s, addr, 1 << i);
4833                 if (mask != 1) {
4834                     tcg_gen_subi_i32(addr, addr, opsize_bytes(OS_LONG));
4835                 }
4836             }
4837        }
4838        tcg_gen_mov_i32(AREG(insn, 0), addr);
4839     } else {
4840         for (i = 0; i < 3; i++, mask >>= 1) {
4841             if (mask & 1) {
4842                 if (is_write) {
4843                     gen_qemu_store_fcr(s, addr, 1 << i);
4844                 } else {
4845                     gen_qemu_load_fcr(s, addr, 1 << i);
4846                 }
4847                 if (mask != 1 || mode == 3) {
4848                     tcg_gen_addi_i32(addr, addr, opsize_bytes(OS_LONG));
4849                 }
4850             }
4851         }
4852         if (mode == 3) {
4853             tcg_gen_mov_i32(AREG(insn, 0), addr);
4854         }
4855     }
4856 }
4857 
4858 static void gen_op_fmovem(CPUM68KState *env, DisasContext *s,
4859                           uint32_t insn, uint32_t ext)
4860 {
4861     int opsize;
4862     TCGv addr, tmp;
4863     int mode = (ext >> 11) & 0x3;
4864     int is_load = ((ext & 0x2000) == 0);
4865 
4866     if (m68k_feature(s->env, M68K_FEATURE_FPU)) {
4867         opsize = OS_EXTENDED;
4868     } else {
4869         opsize = OS_DOUBLE;  /* FIXME */
4870     }
4871 
4872     addr = gen_lea(env, s, insn, opsize);
4873     if (IS_NULL_QREG(addr)) {
4874         gen_addr_fault(s);
4875         return;
4876     }
4877 
4878     tmp = tcg_temp_new();
4879     if (mode & 0x1) {
4880         /* Dynamic register list */
4881         tcg_gen_ext8u_i32(tmp, DREG(ext, 4));
4882     } else {
4883         /* Static register list */
4884         tcg_gen_movi_i32(tmp, ext & 0xff);
4885     }
4886 
4887     if (!is_load && (mode & 2) == 0) {
4888         /*
4889          * predecrement addressing mode
4890          * only available to store register to memory
4891          */
4892         if (opsize == OS_EXTENDED) {
4893             gen_helper_fmovemx_st_predec(tmp, cpu_env, addr, tmp);
4894         } else {
4895             gen_helper_fmovemd_st_predec(tmp, cpu_env, addr, tmp);
4896         }
4897     } else {
4898         /* postincrement addressing mode */
4899         if (opsize == OS_EXTENDED) {
4900             if (is_load) {
4901                 gen_helper_fmovemx_ld_postinc(tmp, cpu_env, addr, tmp);
4902             } else {
4903                 gen_helper_fmovemx_st_postinc(tmp, cpu_env, addr, tmp);
4904             }
4905         } else {
4906             if (is_load) {
4907                 gen_helper_fmovemd_ld_postinc(tmp, cpu_env, addr, tmp);
4908             } else {
4909                 gen_helper_fmovemd_st_postinc(tmp, cpu_env, addr, tmp);
4910             }
4911         }
4912     }
4913     if ((insn & 070) == 030 || (insn & 070) == 040) {
4914         tcg_gen_mov_i32(AREG(insn, 0), tmp);
4915     }
4916 }
4917 
4918 /*
4919  * ??? FP exceptions are not implemented.  Most exceptions are deferred until
4920  * immediately before the next FP instruction is executed.
4921  */
4922 DISAS_INSN(fpu)
4923 {
4924     uint16_t ext;
4925     int opmode;
4926     int opsize;
4927     TCGv_ptr cpu_src, cpu_dest;
4928 
4929     ext = read_im16(env, s);
4930     opmode = ext & 0x7f;
4931     switch ((ext >> 13) & 7) {
4932     case 0:
4933         break;
4934     case 1:
4935         goto undef;
4936     case 2:
4937         if (insn == 0xf200 && (ext & 0xfc00) == 0x5c00) {
4938             /* fmovecr */
4939             TCGv rom_offset = tcg_constant_i32(opmode);
4940             cpu_dest = gen_fp_ptr(REG(ext, 7));
4941             gen_helper_fconst(cpu_env, cpu_dest, rom_offset);
4942             return;
4943         }
4944         break;
4945     case 3: /* fmove out */
4946         cpu_src = gen_fp_ptr(REG(ext, 7));
4947         opsize = ext_opsize(ext, 10);
4948         if (gen_ea_fp(env, s, insn, opsize, cpu_src,
4949                       EA_STORE, IS_USER(s)) == -1) {
4950             gen_addr_fault(s);
4951         }
4952         gen_helper_ftst(cpu_env, cpu_src);
4953         return;
4954     case 4: /* fmove to control register.  */
4955     case 5: /* fmove from control register.  */
4956         gen_op_fmove_fcr(env, s, insn, ext);
4957         return;
4958     case 6: /* fmovem */
4959     case 7:
4960         if ((ext & 0x1000) == 0 && !m68k_feature(s->env, M68K_FEATURE_FPU)) {
4961             goto undef;
4962         }
4963         gen_op_fmovem(env, s, insn, ext);
4964         return;
4965     }
4966     if (ext & (1 << 14)) {
4967         /* Source effective address.  */
4968         opsize = ext_opsize(ext, 10);
4969         cpu_src = gen_fp_result_ptr();
4970         if (gen_ea_fp(env, s, insn, opsize, cpu_src,
4971                       EA_LOADS, IS_USER(s)) == -1) {
4972             gen_addr_fault(s);
4973             return;
4974         }
4975     } else {
4976         /* Source register.  */
4977         opsize = OS_EXTENDED;
4978         cpu_src = gen_fp_ptr(REG(ext, 10));
4979     }
4980     cpu_dest = gen_fp_ptr(REG(ext, 7));
4981     switch (opmode) {
4982     case 0: /* fmove */
4983         gen_fp_move(cpu_dest, cpu_src);
4984         break;
4985     case 0x40: /* fsmove */
4986         gen_helper_fsround(cpu_env, cpu_dest, cpu_src);
4987         break;
4988     case 0x44: /* fdmove */
4989         gen_helper_fdround(cpu_env, cpu_dest, cpu_src);
4990         break;
4991     case 1: /* fint */
4992         gen_helper_firound(cpu_env, cpu_dest, cpu_src);
4993         break;
4994     case 2: /* fsinh */
4995         gen_helper_fsinh(cpu_env, cpu_dest, cpu_src);
4996         break;
4997     case 3: /* fintrz */
4998         gen_helper_fitrunc(cpu_env, cpu_dest, cpu_src);
4999         break;
5000     case 4: /* fsqrt */
5001         gen_helper_fsqrt(cpu_env, cpu_dest, cpu_src);
5002         break;
5003     case 0x41: /* fssqrt */
5004         gen_helper_fssqrt(cpu_env, cpu_dest, cpu_src);
5005         break;
5006     case 0x45: /* fdsqrt */
5007         gen_helper_fdsqrt(cpu_env, cpu_dest, cpu_src);
5008         break;
5009     case 0x06: /* flognp1 */
5010         gen_helper_flognp1(cpu_env, cpu_dest, cpu_src);
5011         break;
5012     case 0x08: /* fetoxm1 */
5013         gen_helper_fetoxm1(cpu_env, cpu_dest, cpu_src);
5014         break;
5015     case 0x09: /* ftanh */
5016         gen_helper_ftanh(cpu_env, cpu_dest, cpu_src);
5017         break;
5018     case 0x0a: /* fatan */
5019         gen_helper_fatan(cpu_env, cpu_dest, cpu_src);
5020         break;
5021     case 0x0c: /* fasin */
5022         gen_helper_fasin(cpu_env, cpu_dest, cpu_src);
5023         break;
5024     case 0x0d: /* fatanh */
5025         gen_helper_fatanh(cpu_env, cpu_dest, cpu_src);
5026         break;
5027     case 0x0e: /* fsin */
5028         gen_helper_fsin(cpu_env, cpu_dest, cpu_src);
5029         break;
5030     case 0x0f: /* ftan */
5031         gen_helper_ftan(cpu_env, cpu_dest, cpu_src);
5032         break;
5033     case 0x10: /* fetox */
5034         gen_helper_fetox(cpu_env, cpu_dest, cpu_src);
5035         break;
5036     case 0x11: /* ftwotox */
5037         gen_helper_ftwotox(cpu_env, cpu_dest, cpu_src);
5038         break;
5039     case 0x12: /* ftentox */
5040         gen_helper_ftentox(cpu_env, cpu_dest, cpu_src);
5041         break;
5042     case 0x14: /* flogn */
5043         gen_helper_flogn(cpu_env, cpu_dest, cpu_src);
5044         break;
5045     case 0x15: /* flog10 */
5046         gen_helper_flog10(cpu_env, cpu_dest, cpu_src);
5047         break;
5048     case 0x16: /* flog2 */
5049         gen_helper_flog2(cpu_env, cpu_dest, cpu_src);
5050         break;
5051     case 0x18: /* fabs */
5052         gen_helper_fabs(cpu_env, cpu_dest, cpu_src);
5053         break;
5054     case 0x58: /* fsabs */
5055         gen_helper_fsabs(cpu_env, cpu_dest, cpu_src);
5056         break;
5057     case 0x5c: /* fdabs */
5058         gen_helper_fdabs(cpu_env, cpu_dest, cpu_src);
5059         break;
5060     case 0x19: /* fcosh */
5061         gen_helper_fcosh(cpu_env, cpu_dest, cpu_src);
5062         break;
5063     case 0x1a: /* fneg */
5064         gen_helper_fneg(cpu_env, cpu_dest, cpu_src);
5065         break;
5066     case 0x5a: /* fsneg */
5067         gen_helper_fsneg(cpu_env, cpu_dest, cpu_src);
5068         break;
5069     case 0x5e: /* fdneg */
5070         gen_helper_fdneg(cpu_env, cpu_dest, cpu_src);
5071         break;
5072     case 0x1c: /* facos */
5073         gen_helper_facos(cpu_env, cpu_dest, cpu_src);
5074         break;
5075     case 0x1d: /* fcos */
5076         gen_helper_fcos(cpu_env, cpu_dest, cpu_src);
5077         break;
5078     case 0x1e: /* fgetexp */
5079         gen_helper_fgetexp(cpu_env, cpu_dest, cpu_src);
5080         break;
5081     case 0x1f: /* fgetman */
5082         gen_helper_fgetman(cpu_env, cpu_dest, cpu_src);
5083         break;
5084     case 0x20: /* fdiv */
5085         gen_helper_fdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5086         break;
5087     case 0x60: /* fsdiv */
5088         gen_helper_fsdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5089         break;
5090     case 0x64: /* fddiv */
5091         gen_helper_fddiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5092         break;
5093     case 0x21: /* fmod */
5094         gen_helper_fmod(cpu_env, cpu_dest, cpu_src, cpu_dest);
5095         break;
5096     case 0x22: /* fadd */
5097         gen_helper_fadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5098         break;
5099     case 0x62: /* fsadd */
5100         gen_helper_fsadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5101         break;
5102     case 0x66: /* fdadd */
5103         gen_helper_fdadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5104         break;
5105     case 0x23: /* fmul */
5106         gen_helper_fmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5107         break;
5108     case 0x63: /* fsmul */
5109         gen_helper_fsmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5110         break;
5111     case 0x67: /* fdmul */
5112         gen_helper_fdmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5113         break;
5114     case 0x24: /* fsgldiv */
5115         gen_helper_fsgldiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5116         break;
5117     case 0x25: /* frem */
5118         gen_helper_frem(cpu_env, cpu_dest, cpu_src, cpu_dest);
5119         break;
5120     case 0x26: /* fscale */
5121         gen_helper_fscale(cpu_env, cpu_dest, cpu_src, cpu_dest);
5122         break;
5123     case 0x27: /* fsglmul */
5124         gen_helper_fsglmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5125         break;
5126     case 0x28: /* fsub */
5127         gen_helper_fsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5128         break;
5129     case 0x68: /* fssub */
5130         gen_helper_fssub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5131         break;
5132     case 0x6c: /* fdsub */
5133         gen_helper_fdsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5134         break;
5135     case 0x30: case 0x31: case 0x32:
5136     case 0x33: case 0x34: case 0x35:
5137     case 0x36: case 0x37: {
5138             TCGv_ptr cpu_dest2 = gen_fp_ptr(REG(ext, 0));
5139             gen_helper_fsincos(cpu_env, cpu_dest, cpu_dest2, cpu_src);
5140         }
5141         break;
5142     case 0x38: /* fcmp */
5143         gen_helper_fcmp(cpu_env, cpu_src, cpu_dest);
5144         return;
5145     case 0x3a: /* ftst */
5146         gen_helper_ftst(cpu_env, cpu_src);
5147         return;
5148     default:
5149         goto undef;
5150     }
5151     gen_helper_ftst(cpu_env, cpu_dest);
5152     return;
5153 undef:
5154     /* FIXME: Is this right for offset addressing modes?  */
5155     s->pc -= 2;
5156     disas_undef_fpu(env, s, insn);
5157 }
5158 
5159 static void gen_fcc_cond(DisasCompare *c, DisasContext *s, int cond)
5160 {
5161     TCGv fpsr;
5162 
5163     c->v2 = tcg_constant_i32(0);
5164     /* TODO: Raise BSUN exception.  */
5165     fpsr = tcg_temp_new();
5166     gen_load_fcr(s, fpsr, M68K_FPSR);
5167     switch (cond) {
5168     case 0:  /* False */
5169     case 16: /* Signaling False */
5170         c->v1 = c->v2;
5171         c->tcond = TCG_COND_NEVER;
5172         break;
5173     case 1:  /* EQual Z */
5174     case 17: /* Signaling EQual Z */
5175         c->v1 = tcg_temp_new();
5176         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5177         c->tcond = TCG_COND_NE;
5178         break;
5179     case 2:  /* Ordered Greater Than !(A || Z || N) */
5180     case 18: /* Greater Than !(A || Z || N) */
5181         c->v1 = tcg_temp_new();
5182         tcg_gen_andi_i32(c->v1, fpsr,
5183                          FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5184         c->tcond = TCG_COND_EQ;
5185         break;
5186     case 3:  /* Ordered Greater than or Equal Z || !(A || N) */
5187     case 19: /* Greater than or Equal Z || !(A || N) */
5188         c->v1 = tcg_temp_new();
5189         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5190         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5191         tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_Z | FPSR_CC_N);
5192         tcg_gen_or_i32(c->v1, c->v1, fpsr);
5193         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5194         c->tcond = TCG_COND_NE;
5195         break;
5196     case 4:  /* Ordered Less Than !(!N || A || Z); */
5197     case 20: /* Less Than !(!N || A || Z); */
5198         c->v1 = tcg_temp_new();
5199         tcg_gen_xori_i32(c->v1, fpsr, FPSR_CC_N);
5200         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_N | FPSR_CC_A | FPSR_CC_Z);
5201         c->tcond = TCG_COND_EQ;
5202         break;
5203     case 5:  /* Ordered Less than or Equal Z || (N && !A) */
5204     case 21: /* Less than or Equal Z || (N && !A) */
5205         c->v1 = tcg_temp_new();
5206         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5207         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5208         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5209         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_Z | FPSR_CC_N);
5210         c->tcond = TCG_COND_NE;
5211         break;
5212     case 6:  /* Ordered Greater or Less than !(A || Z) */
5213     case 22: /* Greater or Less than !(A || Z) */
5214         c->v1 = tcg_temp_new();
5215         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5216         c->tcond = TCG_COND_EQ;
5217         break;
5218     case 7:  /* Ordered !A */
5219     case 23: /* Greater, Less or Equal !A */
5220         c->v1 = tcg_temp_new();
5221         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5222         c->tcond = TCG_COND_EQ;
5223         break;
5224     case 8:  /* Unordered A */
5225     case 24: /* Not Greater, Less or Equal A */
5226         c->v1 = tcg_temp_new();
5227         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5228         c->tcond = TCG_COND_NE;
5229         break;
5230     case 9:  /* Unordered or Equal A || Z */
5231     case 25: /* Not Greater or Less then A || Z */
5232         c->v1 = tcg_temp_new();
5233         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5234         c->tcond = TCG_COND_NE;
5235         break;
5236     case 10: /* Unordered or Greater Than A || !(N || Z)) */
5237     case 26: /* Not Less or Equal A || !(N || Z)) */
5238         c->v1 = tcg_temp_new();
5239         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5240         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5241         tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_A | FPSR_CC_N);
5242         tcg_gen_or_i32(c->v1, c->v1, fpsr);
5243         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5244         c->tcond = TCG_COND_NE;
5245         break;
5246     case 11: /* Unordered or Greater or Equal A || Z || !N */
5247     case 27: /* Not Less Than A || Z || !N */
5248         c->v1 = tcg_temp_new();
5249         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5250         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5251         c->tcond = TCG_COND_NE;
5252         break;
5253     case 12: /* Unordered or Less Than A || (N && !Z) */
5254     case 28: /* Not Greater than or Equal A || (N && !Z) */
5255         c->v1 = tcg_temp_new();
5256         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5257         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5258         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5259         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_A | FPSR_CC_N);
5260         c->tcond = TCG_COND_NE;
5261         break;
5262     case 13: /* Unordered or Less or Equal A || Z || N */
5263     case 29: /* Not Greater Than A || Z || N */
5264         c->v1 = tcg_temp_new();
5265         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5266         c->tcond = TCG_COND_NE;
5267         break;
5268     case 14: /* Not Equal !Z */
5269     case 30: /* Signaling Not Equal !Z */
5270         c->v1 = tcg_temp_new();
5271         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5272         c->tcond = TCG_COND_EQ;
5273         break;
5274     case 15: /* True */
5275     case 31: /* Signaling True */
5276         c->v1 = c->v2;
5277         c->tcond = TCG_COND_ALWAYS;
5278         break;
5279     }
5280 }
5281 
5282 static void gen_fjmpcc(DisasContext *s, int cond, TCGLabel *l1)
5283 {
5284     DisasCompare c;
5285 
5286     gen_fcc_cond(&c, s, cond);
5287     update_cc_op(s);
5288     tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
5289 }
5290 
5291 DISAS_INSN(fbcc)
5292 {
5293     uint32_t offset;
5294     uint32_t base;
5295     TCGLabel *l1;
5296 
5297     base = s->pc;
5298     offset = (int16_t)read_im16(env, s);
5299     if (insn & (1 << 6)) {
5300         offset = (offset << 16) | read_im16(env, s);
5301     }
5302 
5303     l1 = gen_new_label();
5304     update_cc_op(s);
5305     gen_fjmpcc(s, insn & 0x3f, l1);
5306     gen_jmp_tb(s, 0, s->pc, s->base.pc_next);
5307     gen_set_label(l1);
5308     gen_jmp_tb(s, 1, base + offset, s->base.pc_next);
5309 }
5310 
5311 DISAS_INSN(fscc)
5312 {
5313     DisasCompare c;
5314     int cond;
5315     TCGv tmp;
5316     uint16_t ext;
5317 
5318     ext = read_im16(env, s);
5319     cond = ext & 0x3f;
5320     gen_fcc_cond(&c, s, cond);
5321 
5322     tmp = tcg_temp_new();
5323     tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
5324 
5325     tcg_gen_neg_i32(tmp, tmp);
5326     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
5327 }
5328 
5329 DISAS_INSN(ftrapcc)
5330 {
5331     DisasCompare c;
5332     uint16_t ext;
5333     int cond;
5334 
5335     ext = read_im16(env, s);
5336     cond = ext & 0x3f;
5337 
5338     /* Consume and discard the immediate operand. */
5339     switch (extract32(insn, 0, 3)) {
5340     case 2: /* ftrapcc.w */
5341         (void)read_im16(env, s);
5342         break;
5343     case 3: /* ftrapcc.l */
5344         (void)read_im32(env, s);
5345         break;
5346     case 4: /* ftrapcc (no operand) */
5347         break;
5348     default:
5349         /* ftrapcc registered with only valid opmodes */
5350         g_assert_not_reached();
5351     }
5352 
5353     gen_fcc_cond(&c, s, cond);
5354     do_trapcc(s, &c);
5355 }
5356 
5357 #if defined(CONFIG_SOFTMMU)
5358 DISAS_INSN(frestore)
5359 {
5360     TCGv addr;
5361 
5362     if (IS_USER(s)) {
5363         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
5364         return;
5365     }
5366     if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5367         SRC_EA(env, addr, OS_LONG, 0, NULL);
5368         /* FIXME: check the state frame */
5369     } else {
5370         disas_undef(env, s, insn);
5371     }
5372 }
5373 
5374 DISAS_INSN(fsave)
5375 {
5376     if (IS_USER(s)) {
5377         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
5378         return;
5379     }
5380 
5381     if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5382         /* always write IDLE */
5383         TCGv idle = tcg_constant_i32(0x41000000);
5384         DEST_EA(env, insn, OS_LONG, idle, NULL);
5385     } else {
5386         disas_undef(env, s, insn);
5387     }
5388 }
5389 #endif
5390 
5391 static inline TCGv gen_mac_extract_word(DisasContext *s, TCGv val, int upper)
5392 {
5393     TCGv tmp = tcg_temp_new();
5394     if (s->env->macsr & MACSR_FI) {
5395         if (upper)
5396             tcg_gen_andi_i32(tmp, val, 0xffff0000);
5397         else
5398             tcg_gen_shli_i32(tmp, val, 16);
5399     } else if (s->env->macsr & MACSR_SU) {
5400         if (upper)
5401             tcg_gen_sari_i32(tmp, val, 16);
5402         else
5403             tcg_gen_ext16s_i32(tmp, val);
5404     } else {
5405         if (upper)
5406             tcg_gen_shri_i32(tmp, val, 16);
5407         else
5408             tcg_gen_ext16u_i32(tmp, val);
5409     }
5410     return tmp;
5411 }
5412 
5413 static void gen_mac_clear_flags(void)
5414 {
5415     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR,
5416                      ~(MACSR_V | MACSR_Z | MACSR_N | MACSR_EV));
5417 }
5418 
5419 DISAS_INSN(mac)
5420 {
5421     TCGv rx;
5422     TCGv ry;
5423     uint16_t ext;
5424     int acc;
5425     TCGv tmp;
5426     TCGv addr;
5427     TCGv loadval;
5428     int dual;
5429     TCGv saved_flags;
5430 
5431     if (!s->done_mac) {
5432         s->mactmp = tcg_temp_new_i64();
5433         s->done_mac = 1;
5434     }
5435 
5436     ext = read_im16(env, s);
5437 
5438     acc = ((insn >> 7) & 1) | ((ext >> 3) & 2);
5439     dual = ((insn & 0x30) != 0 && (ext & 3) != 0);
5440     if (dual && !m68k_feature(s->env, M68K_FEATURE_CF_EMAC_B)) {
5441         disas_undef(env, s, insn);
5442         return;
5443     }
5444     if (insn & 0x30) {
5445         /* MAC with load.  */
5446         tmp = gen_lea(env, s, insn, OS_LONG);
5447         addr = tcg_temp_new();
5448         tcg_gen_and_i32(addr, tmp, QREG_MAC_MASK);
5449         /*
5450          * Load the value now to ensure correct exception behavior.
5451          * Perform writeback after reading the MAC inputs.
5452          */
5453         loadval = gen_load(s, OS_LONG, addr, 0, IS_USER(s));
5454 
5455         acc ^= 1;
5456         rx = (ext & 0x8000) ? AREG(ext, 12) : DREG(insn, 12);
5457         ry = (ext & 8) ? AREG(ext, 0) : DREG(ext, 0);
5458     } else {
5459         loadval = addr = NULL_QREG;
5460         rx = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5461         ry = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5462     }
5463 
5464     gen_mac_clear_flags();
5465 #if 0
5466     l1 = -1;
5467     /* Disabled because conditional branches clobber temporary vars.  */
5468     if ((s->env->macsr & MACSR_OMC) != 0 && !dual) {
5469         /* Skip the multiply if we know we will ignore it.  */
5470         l1 = gen_new_label();
5471         tmp = tcg_temp_new();
5472         tcg_gen_andi_i32(tmp, QREG_MACSR, 1 << (acc + 8));
5473         gen_op_jmp_nz32(tmp, l1);
5474     }
5475 #endif
5476 
5477     if ((ext & 0x0800) == 0) {
5478         /* Word.  */
5479         rx = gen_mac_extract_word(s, rx, (ext & 0x80) != 0);
5480         ry = gen_mac_extract_word(s, ry, (ext & 0x40) != 0);
5481     }
5482     if (s->env->macsr & MACSR_FI) {
5483         gen_helper_macmulf(s->mactmp, cpu_env, rx, ry);
5484     } else {
5485         if (s->env->macsr & MACSR_SU)
5486             gen_helper_macmuls(s->mactmp, cpu_env, rx, ry);
5487         else
5488             gen_helper_macmulu(s->mactmp, cpu_env, rx, ry);
5489         switch ((ext >> 9) & 3) {
5490         case 1:
5491             tcg_gen_shli_i64(s->mactmp, s->mactmp, 1);
5492             break;
5493         case 3:
5494             tcg_gen_shri_i64(s->mactmp, s->mactmp, 1);
5495             break;
5496         }
5497     }
5498 
5499     if (dual) {
5500         /* Save the overflow flag from the multiply.  */
5501         saved_flags = tcg_temp_new();
5502         tcg_gen_mov_i32(saved_flags, QREG_MACSR);
5503     } else {
5504         saved_flags = NULL_QREG;
5505     }
5506 
5507 #if 0
5508     /* Disabled because conditional branches clobber temporary vars.  */
5509     if ((s->env->macsr & MACSR_OMC) != 0 && dual) {
5510         /* Skip the accumulate if the value is already saturated.  */
5511         l1 = gen_new_label();
5512         tmp = tcg_temp_new();
5513         gen_op_and32(tmp, QREG_MACSR, tcg_constant_i32(MACSR_PAV0 << acc));
5514         gen_op_jmp_nz32(tmp, l1);
5515     }
5516 #endif
5517 
5518     if (insn & 0x100)
5519         tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5520     else
5521         tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5522 
5523     if (s->env->macsr & MACSR_FI)
5524         gen_helper_macsatf(cpu_env, tcg_constant_i32(acc));
5525     else if (s->env->macsr & MACSR_SU)
5526         gen_helper_macsats(cpu_env, tcg_constant_i32(acc));
5527     else
5528         gen_helper_macsatu(cpu_env, tcg_constant_i32(acc));
5529 
5530 #if 0
5531     /* Disabled because conditional branches clobber temporary vars.  */
5532     if (l1 != -1)
5533         gen_set_label(l1);
5534 #endif
5535 
5536     if (dual) {
5537         /* Dual accumulate variant.  */
5538         acc = (ext >> 2) & 3;
5539         /* Restore the overflow flag from the multiplier.  */
5540         tcg_gen_mov_i32(QREG_MACSR, saved_flags);
5541 #if 0
5542         /* Disabled because conditional branches clobber temporary vars.  */
5543         if ((s->env->macsr & MACSR_OMC) != 0) {
5544             /* Skip the accumulate if the value is already saturated.  */
5545             l1 = gen_new_label();
5546             tmp = tcg_temp_new();
5547             gen_op_and32(tmp, QREG_MACSR, tcg_constant_i32(MACSR_PAV0 << acc));
5548             gen_op_jmp_nz32(tmp, l1);
5549         }
5550 #endif
5551         if (ext & 2)
5552             tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5553         else
5554             tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5555         if (s->env->macsr & MACSR_FI)
5556             gen_helper_macsatf(cpu_env, tcg_constant_i32(acc));
5557         else if (s->env->macsr & MACSR_SU)
5558             gen_helper_macsats(cpu_env, tcg_constant_i32(acc));
5559         else
5560             gen_helper_macsatu(cpu_env, tcg_constant_i32(acc));
5561 #if 0
5562         /* Disabled because conditional branches clobber temporary vars.  */
5563         if (l1 != -1)
5564             gen_set_label(l1);
5565 #endif
5566     }
5567     gen_helper_mac_set_flags(cpu_env, tcg_constant_i32(acc));
5568 
5569     if (insn & 0x30) {
5570         TCGv rw;
5571         rw = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5572         tcg_gen_mov_i32(rw, loadval);
5573         /*
5574          * FIXME: Should address writeback happen with the masked or
5575          * unmasked value?
5576          */
5577         switch ((insn >> 3) & 7) {
5578         case 3: /* Post-increment.  */
5579             tcg_gen_addi_i32(AREG(insn, 0), addr, 4);
5580             break;
5581         case 4: /* Pre-decrement.  */
5582             tcg_gen_mov_i32(AREG(insn, 0), addr);
5583         }
5584     }
5585 }
5586 
5587 DISAS_INSN(from_mac)
5588 {
5589     TCGv rx;
5590     TCGv_i64 acc;
5591     int accnum;
5592 
5593     rx = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5594     accnum = (insn >> 9) & 3;
5595     acc = MACREG(accnum);
5596     if (s->env->macsr & MACSR_FI) {
5597         gen_helper_get_macf(rx, cpu_env, acc);
5598     } else if ((s->env->macsr & MACSR_OMC) == 0) {
5599         tcg_gen_extrl_i64_i32(rx, acc);
5600     } else if (s->env->macsr & MACSR_SU) {
5601         gen_helper_get_macs(rx, acc);
5602     } else {
5603         gen_helper_get_macu(rx, acc);
5604     }
5605     if (insn & 0x40) {
5606         tcg_gen_movi_i64(acc, 0);
5607         tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5608     }
5609 }
5610 
5611 DISAS_INSN(move_mac)
5612 {
5613     /* FIXME: This can be done without a helper.  */
5614     int src;
5615     TCGv dest;
5616     src = insn & 3;
5617     dest = tcg_constant_i32((insn >> 9) & 3);
5618     gen_helper_mac_move(cpu_env, dest, tcg_constant_i32(src));
5619     gen_mac_clear_flags();
5620     gen_helper_mac_set_flags(cpu_env, dest);
5621 }
5622 
5623 DISAS_INSN(from_macsr)
5624 {
5625     TCGv reg;
5626 
5627     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5628     tcg_gen_mov_i32(reg, QREG_MACSR);
5629 }
5630 
5631 DISAS_INSN(from_mask)
5632 {
5633     TCGv reg;
5634     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5635     tcg_gen_mov_i32(reg, QREG_MAC_MASK);
5636 }
5637 
5638 DISAS_INSN(from_mext)
5639 {
5640     TCGv reg;
5641     TCGv acc;
5642     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5643     acc = tcg_constant_i32((insn & 0x400) ? 2 : 0);
5644     if (s->env->macsr & MACSR_FI)
5645         gen_helper_get_mac_extf(reg, cpu_env, acc);
5646     else
5647         gen_helper_get_mac_exti(reg, cpu_env, acc);
5648 }
5649 
5650 DISAS_INSN(macsr_to_ccr)
5651 {
5652     TCGv tmp = tcg_temp_new();
5653 
5654     /* Note that X and C are always cleared. */
5655     tcg_gen_andi_i32(tmp, QREG_MACSR, CCF_N | CCF_Z | CCF_V);
5656     gen_helper_set_ccr(cpu_env, tmp);
5657     set_cc_op(s, CC_OP_FLAGS);
5658 }
5659 
5660 DISAS_INSN(to_mac)
5661 {
5662     TCGv_i64 acc;
5663     TCGv val;
5664     int accnum;
5665     accnum = (insn >> 9) & 3;
5666     acc = MACREG(accnum);
5667     SRC_EA(env, val, OS_LONG, 0, NULL);
5668     if (s->env->macsr & MACSR_FI) {
5669         tcg_gen_ext_i32_i64(acc, val);
5670         tcg_gen_shli_i64(acc, acc, 8);
5671     } else if (s->env->macsr & MACSR_SU) {
5672         tcg_gen_ext_i32_i64(acc, val);
5673     } else {
5674         tcg_gen_extu_i32_i64(acc, val);
5675     }
5676     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5677     gen_mac_clear_flags();
5678     gen_helper_mac_set_flags(cpu_env, tcg_constant_i32(accnum));
5679 }
5680 
5681 DISAS_INSN(to_macsr)
5682 {
5683     TCGv val;
5684     SRC_EA(env, val, OS_LONG, 0, NULL);
5685     gen_helper_set_macsr(cpu_env, val);
5686     gen_exit_tb(s);
5687 }
5688 
5689 DISAS_INSN(to_mask)
5690 {
5691     TCGv val;
5692     SRC_EA(env, val, OS_LONG, 0, NULL);
5693     tcg_gen_ori_i32(QREG_MAC_MASK, val, 0xffff0000);
5694 }
5695 
5696 DISAS_INSN(to_mext)
5697 {
5698     TCGv val;
5699     TCGv acc;
5700     SRC_EA(env, val, OS_LONG, 0, NULL);
5701     acc = tcg_constant_i32((insn & 0x400) ? 2 : 0);
5702     if (s->env->macsr & MACSR_FI)
5703         gen_helper_set_mac_extf(cpu_env, val, acc);
5704     else if (s->env->macsr & MACSR_SU)
5705         gen_helper_set_mac_exts(cpu_env, val, acc);
5706     else
5707         gen_helper_set_mac_extu(cpu_env, val, acc);
5708 }
5709 
5710 static disas_proc opcode_table[65536];
5711 
5712 static void
5713 register_opcode (disas_proc proc, uint16_t opcode, uint16_t mask)
5714 {
5715   int i;
5716   int from;
5717   int to;
5718 
5719   /* Sanity check.  All set bits must be included in the mask.  */
5720   if (opcode & ~mask) {
5721       fprintf(stderr,
5722               "qemu internal error: bogus opcode definition %04x/%04x\n",
5723               opcode, mask);
5724       abort();
5725   }
5726   /*
5727    * This could probably be cleverer.  For now just optimize the case where
5728    * the top bits are known.
5729    */
5730   /* Find the first zero bit in the mask.  */
5731   i = 0x8000;
5732   while ((i & mask) != 0)
5733       i >>= 1;
5734   /* Iterate over all combinations of this and lower bits.  */
5735   if (i == 0)
5736       i = 1;
5737   else
5738       i <<= 1;
5739   from = opcode & ~(i - 1);
5740   to = from + i;
5741   for (i = from; i < to; i++) {
5742       if ((i & mask) == opcode)
5743           opcode_table[i] = proc;
5744   }
5745 }
5746 
5747 /*
5748  * Register m68k opcode handlers.  Order is important.
5749  * Later insn override earlier ones.
5750  */
5751 void register_m68k_insns (CPUM68KState *env)
5752 {
5753     /*
5754      * Build the opcode table only once to avoid
5755      * multithreading issues.
5756      */
5757     if (opcode_table[0] != NULL) {
5758         return;
5759     }
5760 
5761     /*
5762      * use BASE() for instruction available
5763      * for CF_ISA_A and M68000.
5764      */
5765 #define BASE(name, opcode, mask) \
5766     register_opcode(disas_##name, 0x##opcode, 0x##mask)
5767 #define INSN(name, opcode, mask, feature) do { \
5768     if (m68k_feature(env, M68K_FEATURE_##feature)) \
5769         BASE(name, opcode, mask); \
5770     } while(0)
5771     BASE(undef,     0000, 0000);
5772     INSN(arith_im,  0080, fff8, CF_ISA_A);
5773     INSN(arith_im,  0000, ff00, M68K);
5774     INSN(chk2,      00c0, f9c0, CHK2);
5775     INSN(bitrev,    00c0, fff8, CF_ISA_APLUSC);
5776     BASE(bitop_reg, 0100, f1c0);
5777     BASE(bitop_reg, 0140, f1c0);
5778     BASE(bitop_reg, 0180, f1c0);
5779     BASE(bitop_reg, 01c0, f1c0);
5780     INSN(movep,     0108, f138, MOVEP);
5781     INSN(arith_im,  0280, fff8, CF_ISA_A);
5782     INSN(arith_im,  0200, ff00, M68K);
5783     INSN(undef,     02c0, ffc0, M68K);
5784     INSN(byterev,   02c0, fff8, CF_ISA_APLUSC);
5785     INSN(arith_im,  0480, fff8, CF_ISA_A);
5786     INSN(arith_im,  0400, ff00, M68K);
5787     INSN(undef,     04c0, ffc0, M68K);
5788     INSN(arith_im,  0600, ff00, M68K);
5789     INSN(undef,     06c0, ffc0, M68K);
5790     INSN(ff1,       04c0, fff8, CF_ISA_APLUSC);
5791     INSN(arith_im,  0680, fff8, CF_ISA_A);
5792     INSN(arith_im,  0c00, ff38, CF_ISA_A);
5793     INSN(arith_im,  0c00, ff00, M68K);
5794     BASE(bitop_im,  0800, ffc0);
5795     BASE(bitop_im,  0840, ffc0);
5796     BASE(bitop_im,  0880, ffc0);
5797     BASE(bitop_im,  08c0, ffc0);
5798     INSN(arith_im,  0a80, fff8, CF_ISA_A);
5799     INSN(arith_im,  0a00, ff00, M68K);
5800 #if defined(CONFIG_SOFTMMU)
5801     INSN(moves,     0e00, ff00, M68K);
5802 #endif
5803     INSN(cas,       0ac0, ffc0, CAS);
5804     INSN(cas,       0cc0, ffc0, CAS);
5805     INSN(cas,       0ec0, ffc0, CAS);
5806     INSN(cas2w,     0cfc, ffff, CAS);
5807     INSN(cas2l,     0efc, ffff, CAS);
5808     BASE(move,      1000, f000);
5809     BASE(move,      2000, f000);
5810     BASE(move,      3000, f000);
5811     INSN(chk,       4000, f040, M68K);
5812     INSN(strldsr,   40e7, ffff, CF_ISA_APLUSC);
5813     INSN(negx,      4080, fff8, CF_ISA_A);
5814     INSN(negx,      4000, ff00, M68K);
5815     INSN(undef,     40c0, ffc0, M68K);
5816     INSN(move_from_sr, 40c0, fff8, CF_ISA_A);
5817     INSN(move_from_sr, 40c0, ffc0, M68K);
5818     BASE(lea,       41c0, f1c0);
5819     BASE(clr,       4200, ff00);
5820     BASE(undef,     42c0, ffc0);
5821     INSN(move_from_ccr, 42c0, fff8, CF_ISA_A);
5822     INSN(move_from_ccr, 42c0, ffc0, M68K);
5823     INSN(neg,       4480, fff8, CF_ISA_A);
5824     INSN(neg,       4400, ff00, M68K);
5825     INSN(undef,     44c0, ffc0, M68K);
5826     BASE(move_to_ccr, 44c0, ffc0);
5827     INSN(not,       4680, fff8, CF_ISA_A);
5828     INSN(not,       4600, ff00, M68K);
5829 #if defined(CONFIG_SOFTMMU)
5830     BASE(move_to_sr, 46c0, ffc0);
5831 #endif
5832     INSN(nbcd,      4800, ffc0, M68K);
5833     INSN(linkl,     4808, fff8, M68K);
5834     BASE(pea,       4840, ffc0);
5835     BASE(swap,      4840, fff8);
5836     INSN(bkpt,      4848, fff8, BKPT);
5837     INSN(movem,     48d0, fbf8, CF_ISA_A);
5838     INSN(movem,     48e8, fbf8, CF_ISA_A);
5839     INSN(movem,     4880, fb80, M68K);
5840     BASE(ext,       4880, fff8);
5841     BASE(ext,       48c0, fff8);
5842     BASE(ext,       49c0, fff8);
5843     BASE(tst,       4a00, ff00);
5844     INSN(tas,       4ac0, ffc0, CF_ISA_B);
5845     INSN(tas,       4ac0, ffc0, M68K);
5846 #if defined(CONFIG_SOFTMMU)
5847     INSN(halt,      4ac8, ffff, CF_ISA_A);
5848     INSN(halt,      4ac8, ffff, M68K);
5849 #endif
5850     INSN(pulse,     4acc, ffff, CF_ISA_A);
5851     BASE(illegal,   4afc, ffff);
5852     INSN(mull,      4c00, ffc0, CF_ISA_A);
5853     INSN(mull,      4c00, ffc0, LONG_MULDIV);
5854     INSN(divl,      4c40, ffc0, CF_ISA_A);
5855     INSN(divl,      4c40, ffc0, LONG_MULDIV);
5856     INSN(sats,      4c80, fff8, CF_ISA_B);
5857     BASE(trap,      4e40, fff0);
5858     BASE(link,      4e50, fff8);
5859     BASE(unlk,      4e58, fff8);
5860 #if defined(CONFIG_SOFTMMU)
5861     INSN(move_to_usp, 4e60, fff8, USP);
5862     INSN(move_from_usp, 4e68, fff8, USP);
5863     INSN(reset,     4e70, ffff, M68K);
5864     BASE(stop,      4e72, ffff);
5865     BASE(rte,       4e73, ffff);
5866     INSN(cf_movec,  4e7b, ffff, CF_ISA_A);
5867     INSN(m68k_movec, 4e7a, fffe, MOVEC);
5868 #endif
5869     BASE(nop,       4e71, ffff);
5870     INSN(rtd,       4e74, ffff, RTD);
5871     BASE(rts,       4e75, ffff);
5872     INSN(trapv,     4e76, ffff, M68K);
5873     INSN(rtr,       4e77, ffff, M68K);
5874     BASE(jump,      4e80, ffc0);
5875     BASE(jump,      4ec0, ffc0);
5876     INSN(addsubq,   5000, f080, M68K);
5877     BASE(addsubq,   5080, f0c0);
5878     INSN(scc,       50c0, f0f8, CF_ISA_A); /* Scc.B Dx   */
5879     INSN(scc,       50c0, f0c0, M68K);     /* Scc.B <EA> */
5880     INSN(dbcc,      50c8, f0f8, M68K);
5881     INSN(trapcc,    50fa, f0fe, TRAPCC);   /* opmode 010, 011 */
5882     INSN(trapcc,    50fc, f0ff, TRAPCC);   /* opmode 100 */
5883     INSN(trapcc,    51fa, fffe, CF_ISA_A); /* TPF (trapf) opmode 010, 011 */
5884     INSN(trapcc,    51fc, ffff, CF_ISA_A); /* TPF (trapf) opmode 100 */
5885 
5886     /* Branch instructions.  */
5887     BASE(branch,    6000, f000);
5888     /* Disable long branch instructions, then add back the ones we want.  */
5889     BASE(undef,     60ff, f0ff); /* All long branches.  */
5890     INSN(branch,    60ff, f0ff, CF_ISA_B);
5891     INSN(undef,     60ff, ffff, CF_ISA_B); /* bra.l */
5892     INSN(branch,    60ff, ffff, BRAL);
5893     INSN(branch,    60ff, f0ff, BCCL);
5894 
5895     BASE(moveq,     7000, f100);
5896     INSN(mvzs,      7100, f100, CF_ISA_B);
5897     BASE(or,        8000, f000);
5898     BASE(divw,      80c0, f0c0);
5899     INSN(sbcd_reg,  8100, f1f8, M68K);
5900     INSN(sbcd_mem,  8108, f1f8, M68K);
5901     BASE(addsub,    9000, f000);
5902     INSN(undef,     90c0, f0c0, CF_ISA_A);
5903     INSN(subx_reg,  9180, f1f8, CF_ISA_A);
5904     INSN(subx_reg,  9100, f138, M68K);
5905     INSN(subx_mem,  9108, f138, M68K);
5906     INSN(suba,      91c0, f1c0, CF_ISA_A);
5907     INSN(suba,      90c0, f0c0, M68K);
5908 
5909     BASE(undef_mac, a000, f000);
5910     INSN(mac,       a000, f100, CF_EMAC);
5911     INSN(from_mac,  a180, f9b0, CF_EMAC);
5912     INSN(move_mac,  a110, f9fc, CF_EMAC);
5913     INSN(from_macsr,a980, f9f0, CF_EMAC);
5914     INSN(from_mask, ad80, fff0, CF_EMAC);
5915     INSN(from_mext, ab80, fbf0, CF_EMAC);
5916     INSN(macsr_to_ccr, a9c0, ffff, CF_EMAC);
5917     INSN(to_mac,    a100, f9c0, CF_EMAC);
5918     INSN(to_macsr,  a900, ffc0, CF_EMAC);
5919     INSN(to_mext,   ab00, fbc0, CF_EMAC);
5920     INSN(to_mask,   ad00, ffc0, CF_EMAC);
5921 
5922     INSN(mov3q,     a140, f1c0, CF_ISA_B);
5923     INSN(cmp,       b000, f1c0, CF_ISA_B); /* cmp.b */
5924     INSN(cmp,       b040, f1c0, CF_ISA_B); /* cmp.w */
5925     INSN(cmpa,      b0c0, f1c0, CF_ISA_B); /* cmpa.w */
5926     INSN(cmp,       b080, f1c0, CF_ISA_A);
5927     INSN(cmpa,      b1c0, f1c0, CF_ISA_A);
5928     INSN(cmp,       b000, f100, M68K);
5929     INSN(eor,       b100, f100, M68K);
5930     INSN(cmpm,      b108, f138, M68K);
5931     INSN(cmpa,      b0c0, f0c0, M68K);
5932     INSN(eor,       b180, f1c0, CF_ISA_A);
5933     BASE(and,       c000, f000);
5934     INSN(exg_dd,    c140, f1f8, M68K);
5935     INSN(exg_aa,    c148, f1f8, M68K);
5936     INSN(exg_da,    c188, f1f8, M68K);
5937     BASE(mulw,      c0c0, f0c0);
5938     INSN(abcd_reg,  c100, f1f8, M68K);
5939     INSN(abcd_mem,  c108, f1f8, M68K);
5940     BASE(addsub,    d000, f000);
5941     INSN(undef,     d0c0, f0c0, CF_ISA_A);
5942     INSN(addx_reg,      d180, f1f8, CF_ISA_A);
5943     INSN(addx_reg,  d100, f138, M68K);
5944     INSN(addx_mem,  d108, f138, M68K);
5945     INSN(adda,      d1c0, f1c0, CF_ISA_A);
5946     INSN(adda,      d0c0, f0c0, M68K);
5947     INSN(shift_im,  e080, f0f0, CF_ISA_A);
5948     INSN(shift_reg, e0a0, f0f0, CF_ISA_A);
5949     INSN(shift8_im, e000, f0f0, M68K);
5950     INSN(shift16_im, e040, f0f0, M68K);
5951     INSN(shift_im,  e080, f0f0, M68K);
5952     INSN(shift8_reg, e020, f0f0, M68K);
5953     INSN(shift16_reg, e060, f0f0, M68K);
5954     INSN(shift_reg, e0a0, f0f0, M68K);
5955     INSN(shift_mem, e0c0, fcc0, M68K);
5956     INSN(rotate_im, e090, f0f0, M68K);
5957     INSN(rotate8_im, e010, f0f0, M68K);
5958     INSN(rotate16_im, e050, f0f0, M68K);
5959     INSN(rotate_reg, e0b0, f0f0, M68K);
5960     INSN(rotate8_reg, e030, f0f0, M68K);
5961     INSN(rotate16_reg, e070, f0f0, M68K);
5962     INSN(rotate_mem, e4c0, fcc0, M68K);
5963     INSN(bfext_mem, e9c0, fdc0, BITFIELD);  /* bfextu & bfexts */
5964     INSN(bfext_reg, e9c0, fdf8, BITFIELD);
5965     INSN(bfins_mem, efc0, ffc0, BITFIELD);
5966     INSN(bfins_reg, efc0, fff8, BITFIELD);
5967     INSN(bfop_mem, eac0, ffc0, BITFIELD);   /* bfchg */
5968     INSN(bfop_reg, eac0, fff8, BITFIELD);   /* bfchg */
5969     INSN(bfop_mem, ecc0, ffc0, BITFIELD);   /* bfclr */
5970     INSN(bfop_reg, ecc0, fff8, BITFIELD);   /* bfclr */
5971     INSN(bfop_mem, edc0, ffc0, BITFIELD);   /* bfffo */
5972     INSN(bfop_reg, edc0, fff8, BITFIELD);   /* bfffo */
5973     INSN(bfop_mem, eec0, ffc0, BITFIELD);   /* bfset */
5974     INSN(bfop_reg, eec0, fff8, BITFIELD);   /* bfset */
5975     INSN(bfop_mem, e8c0, ffc0, BITFIELD);   /* bftst */
5976     INSN(bfop_reg, e8c0, fff8, BITFIELD);   /* bftst */
5977     BASE(undef_fpu, f000, f000);
5978     INSN(fpu,       f200, ffc0, CF_FPU);
5979     INSN(fbcc,      f280, ffc0, CF_FPU);
5980     INSN(fpu,       f200, ffc0, FPU);
5981     INSN(fscc,      f240, ffc0, FPU);
5982     INSN(ftrapcc,   f27a, fffe, FPU);       /* opmode 010, 011 */
5983     INSN(ftrapcc,   f27c, ffff, FPU);       /* opmode 100 */
5984     INSN(fbcc,      f280, ff80, FPU);
5985 #if defined(CONFIG_SOFTMMU)
5986     INSN(frestore,  f340, ffc0, CF_FPU);
5987     INSN(fsave,     f300, ffc0, CF_FPU);
5988     INSN(frestore,  f340, ffc0, FPU);
5989     INSN(fsave,     f300, ffc0, FPU);
5990     INSN(intouch,   f340, ffc0, CF_ISA_A);
5991     INSN(cpushl,    f428, ff38, CF_ISA_A);
5992     INSN(cpush,     f420, ff20, M68040);
5993     INSN(cinv,      f400, ff20, M68040);
5994     INSN(pflush,    f500, ffe0, M68040);
5995     INSN(ptest,     f548, ffd8, M68040);
5996     INSN(wddata,    fb00, ff00, CF_ISA_A);
5997     INSN(wdebug,    fbc0, ffc0, CF_ISA_A);
5998 #endif
5999     INSN(move16_mem, f600, ffe0, M68040);
6000     INSN(move16_reg, f620, fff8, M68040);
6001 #undef INSN
6002 }
6003 
6004 static void m68k_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cpu)
6005 {
6006     DisasContext *dc = container_of(dcbase, DisasContext, base);
6007     CPUM68KState *env = cpu->env_ptr;
6008 
6009     dc->env = env;
6010     dc->pc = dc->base.pc_first;
6011     /* This value will always be filled in properly before m68k_tr_tb_stop. */
6012     dc->pc_prev = 0xdeadbeef;
6013     dc->cc_op = CC_OP_DYNAMIC;
6014     dc->cc_op_synced = 1;
6015     dc->done_mac = 0;
6016     dc->writeback_mask = 0;
6017 
6018     dc->ss_active = (M68K_SR_TRACE(env->sr) == M68K_SR_TRACE_ANY_INS);
6019     /* If architectural single step active, limit to 1 */
6020     if (dc->ss_active) {
6021         dc->base.max_insns = 1;
6022     }
6023 }
6024 
6025 static void m68k_tr_tb_start(DisasContextBase *dcbase, CPUState *cpu)
6026 {
6027 }
6028 
6029 static void m68k_tr_insn_start(DisasContextBase *dcbase, CPUState *cpu)
6030 {
6031     DisasContext *dc = container_of(dcbase, DisasContext, base);
6032     tcg_gen_insn_start(dc->base.pc_next, dc->cc_op);
6033 }
6034 
6035 static void m68k_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
6036 {
6037     DisasContext *dc = container_of(dcbase, DisasContext, base);
6038     CPUM68KState *env = cpu->env_ptr;
6039     uint16_t insn = read_im16(env, dc);
6040 
6041     opcode_table[insn](env, dc, insn);
6042     do_writebacks(dc);
6043 
6044     dc->pc_prev = dc->base.pc_next;
6045     dc->base.pc_next = dc->pc;
6046 
6047     if (dc->base.is_jmp == DISAS_NEXT) {
6048         /*
6049          * Stop translation when the next insn might touch a new page.
6050          * This ensures that prefetch aborts at the right place.
6051          *
6052          * We cannot determine the size of the next insn without
6053          * completely decoding it.  However, the maximum insn size
6054          * is 32 bytes, so end if we do not have that much remaining.
6055          * This may produce several small TBs at the end of each page,
6056          * but they will all be linked with goto_tb.
6057          *
6058          * ??? ColdFire maximum is 4 bytes; MC68000's maximum is also
6059          * smaller than MC68020's.
6060          */
6061         target_ulong start_page_offset
6062             = dc->pc - (dc->base.pc_first & TARGET_PAGE_MASK);
6063 
6064         if (start_page_offset >= TARGET_PAGE_SIZE - 32) {
6065             dc->base.is_jmp = DISAS_TOO_MANY;
6066         }
6067     }
6068 }
6069 
6070 static void m68k_tr_tb_stop(DisasContextBase *dcbase, CPUState *cpu)
6071 {
6072     DisasContext *dc = container_of(dcbase, DisasContext, base);
6073 
6074     switch (dc->base.is_jmp) {
6075     case DISAS_NORETURN:
6076         break;
6077     case DISAS_TOO_MANY:
6078         update_cc_op(dc);
6079         gen_jmp_tb(dc, 0, dc->pc, dc->pc_prev);
6080         break;
6081     case DISAS_JUMP:
6082         /* We updated CC_OP and PC in gen_jmp/gen_jmp_im.  */
6083         if (dc->ss_active) {
6084             gen_raise_exception_format2(dc, EXCP_TRACE, dc->pc_prev);
6085         } else {
6086             tcg_gen_lookup_and_goto_ptr();
6087         }
6088         break;
6089     case DISAS_EXIT:
6090         /*
6091          * We updated CC_OP and PC in gen_exit_tb, but also modified
6092          * other state that may require returning to the main loop.
6093          */
6094         if (dc->ss_active) {
6095             gen_raise_exception_format2(dc, EXCP_TRACE, dc->pc_prev);
6096         } else {
6097             tcg_gen_exit_tb(NULL, 0);
6098         }
6099         break;
6100     default:
6101         g_assert_not_reached();
6102     }
6103 }
6104 
6105 static void m68k_tr_disas_log(const DisasContextBase *dcbase,
6106                               CPUState *cpu, FILE *logfile)
6107 {
6108     fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
6109     target_disas(logfile, cpu, dcbase->pc_first, dcbase->tb->size);
6110 }
6111 
6112 static const TranslatorOps m68k_tr_ops = {
6113     .init_disas_context = m68k_tr_init_disas_context,
6114     .tb_start           = m68k_tr_tb_start,
6115     .insn_start         = m68k_tr_insn_start,
6116     .translate_insn     = m68k_tr_translate_insn,
6117     .tb_stop            = m68k_tr_tb_stop,
6118     .disas_log          = m68k_tr_disas_log,
6119 };
6120 
6121 void gen_intermediate_code(CPUState *cpu, TranslationBlock *tb, int *max_insns,
6122                            target_ulong pc, void *host_pc)
6123 {
6124     DisasContext dc;
6125     translator_loop(cpu, tb, max_insns, pc, host_pc, &m68k_tr_ops, &dc.base);
6126 }
6127 
6128 static double floatx80_to_double(CPUM68KState *env, uint16_t high, uint64_t low)
6129 {
6130     floatx80 a = { .high = high, .low = low };
6131     union {
6132         float64 f64;
6133         double d;
6134     } u;
6135 
6136     u.f64 = floatx80_to_float64(a, &env->fp_status);
6137     return u.d;
6138 }
6139 
6140 void m68k_cpu_dump_state(CPUState *cs, FILE *f, int flags)
6141 {
6142     M68kCPU *cpu = M68K_CPU(cs);
6143     CPUM68KState *env = &cpu->env;
6144     int i;
6145     uint16_t sr;
6146     for (i = 0; i < 8; i++) {
6147         qemu_fprintf(f, "D%d = %08x   A%d = %08x   "
6148                      "F%d = %04x %016"PRIx64"  (%12g)\n",
6149                      i, env->dregs[i], i, env->aregs[i],
6150                      i, env->fregs[i].l.upper, env->fregs[i].l.lower,
6151                      floatx80_to_double(env, env->fregs[i].l.upper,
6152                                         env->fregs[i].l.lower));
6153     }
6154     qemu_fprintf(f, "PC = %08x   ", env->pc);
6155     sr = env->sr | cpu_m68k_get_ccr(env);
6156     qemu_fprintf(f, "SR = %04x T:%x I:%x %c%c %c%c%c%c%c\n",
6157                  sr, (sr & SR_T) >> SR_T_SHIFT, (sr & SR_I) >> SR_I_SHIFT,
6158                  (sr & SR_S) ? 'S' : 'U', (sr & SR_M) ? '%' : 'I',
6159                  (sr & CCF_X) ? 'X' : '-', (sr & CCF_N) ? 'N' : '-',
6160                  (sr & CCF_Z) ? 'Z' : '-', (sr & CCF_V) ? 'V' : '-',
6161                  (sr & CCF_C) ? 'C' : '-');
6162     qemu_fprintf(f, "FPSR = %08x %c%c%c%c ", env->fpsr,
6163                  (env->fpsr & FPSR_CC_A) ? 'A' : '-',
6164                  (env->fpsr & FPSR_CC_I) ? 'I' : '-',
6165                  (env->fpsr & FPSR_CC_Z) ? 'Z' : '-',
6166                  (env->fpsr & FPSR_CC_N) ? 'N' : '-');
6167     qemu_fprintf(f, "\n                                "
6168                  "FPCR =     %04x ", env->fpcr);
6169     switch (env->fpcr & FPCR_PREC_MASK) {
6170     case FPCR_PREC_X:
6171         qemu_fprintf(f, "X ");
6172         break;
6173     case FPCR_PREC_S:
6174         qemu_fprintf(f, "S ");
6175         break;
6176     case FPCR_PREC_D:
6177         qemu_fprintf(f, "D ");
6178         break;
6179     }
6180     switch (env->fpcr & FPCR_RND_MASK) {
6181     case FPCR_RND_N:
6182         qemu_fprintf(f, "RN ");
6183         break;
6184     case FPCR_RND_Z:
6185         qemu_fprintf(f, "RZ ");
6186         break;
6187     case FPCR_RND_M:
6188         qemu_fprintf(f, "RM ");
6189         break;
6190     case FPCR_RND_P:
6191         qemu_fprintf(f, "RP ");
6192         break;
6193     }
6194     qemu_fprintf(f, "\n");
6195 #ifdef CONFIG_SOFTMMU
6196     qemu_fprintf(f, "%sA7(MSP) = %08x %sA7(USP) = %08x %sA7(ISP) = %08x\n",
6197                  env->current_sp == M68K_SSP ? "->" : "  ", env->sp[M68K_SSP],
6198                  env->current_sp == M68K_USP ? "->" : "  ", env->sp[M68K_USP],
6199                  env->current_sp == M68K_ISP ? "->" : "  ", env->sp[M68K_ISP]);
6200     qemu_fprintf(f, "VBR = 0x%08x\n", env->vbr);
6201     qemu_fprintf(f, "SFC = %x DFC %x\n", env->sfc, env->dfc);
6202     qemu_fprintf(f, "SSW %08x TCR %08x URP %08x SRP %08x\n",
6203                  env->mmu.ssw, env->mmu.tcr, env->mmu.urp, env->mmu.srp);
6204     qemu_fprintf(f, "DTTR0/1: %08x/%08x ITTR0/1: %08x/%08x\n",
6205                  env->mmu.ttr[M68K_DTTR0], env->mmu.ttr[M68K_DTTR1],
6206                  env->mmu.ttr[M68K_ITTR0], env->mmu.ttr[M68K_ITTR1]);
6207     qemu_fprintf(f, "MMUSR %08x, fault at %08x\n",
6208                  env->mmu.mmusr, env->mmu.ar);
6209 #endif
6210 }
6211