xref: /openbmc/qemu/target/m68k/translate.c (revision 9c489ea6)
1 /*
2  *  m68k translation
3  *
4  *  Copyright (c) 2005-2007 CodeSourcery
5  *  Written by Paul Brook
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "disas/disas.h"
24 #include "exec/exec-all.h"
25 #include "tcg-op.h"
26 #include "qemu/log.h"
27 #include "exec/cpu_ldst.h"
28 
29 #include "exec/helper-proto.h"
30 #include "exec/helper-gen.h"
31 
32 #include "trace-tcg.h"
33 #include "exec/log.h"
34 
35 //#define DEBUG_DISPATCH 1
36 
37 #define DEFO32(name, offset) static TCGv QREG_##name;
38 #define DEFO64(name, offset) static TCGv_i64 QREG_##name;
39 #include "qregs.def"
40 #undef DEFO32
41 #undef DEFO64
42 
43 static TCGv_i32 cpu_halted;
44 static TCGv_i32 cpu_exception_index;
45 
46 static TCGv_env cpu_env;
47 
48 static char cpu_reg_names[2 * 8 * 3 + 5 * 4];
49 static TCGv cpu_dregs[8];
50 static TCGv cpu_aregs[8];
51 static TCGv_i64 cpu_macc[4];
52 
53 #define REG(insn, pos)  (((insn) >> (pos)) & 7)
54 #define DREG(insn, pos) cpu_dregs[REG(insn, pos)]
55 #define AREG(insn, pos) get_areg(s, REG(insn, pos))
56 #define MACREG(acc)     cpu_macc[acc]
57 #define QREG_SP         get_areg(s, 7)
58 
59 static TCGv NULL_QREG;
60 #define IS_NULL_QREG(t) (TCGV_EQUAL(t, NULL_QREG))
61 /* Used to distinguish stores from bad addressing modes.  */
62 static TCGv store_dummy;
63 
64 #include "exec/gen-icount.h"
65 
66 void m68k_tcg_init(void)
67 {
68     char *p;
69     int i;
70 
71     cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
72     tcg_ctx.tcg_env = cpu_env;
73 
74 #define DEFO32(name, offset) \
75     QREG_##name = tcg_global_mem_new_i32(cpu_env, \
76         offsetof(CPUM68KState, offset), #name);
77 #define DEFO64(name, offset) \
78     QREG_##name = tcg_global_mem_new_i64(cpu_env, \
79         offsetof(CPUM68KState, offset), #name);
80 #include "qregs.def"
81 #undef DEFO32
82 #undef DEFO64
83 
84     cpu_halted = tcg_global_mem_new_i32(cpu_env,
85                                         -offsetof(M68kCPU, env) +
86                                         offsetof(CPUState, halted), "HALTED");
87     cpu_exception_index = tcg_global_mem_new_i32(cpu_env,
88                                                  -offsetof(M68kCPU, env) +
89                                                  offsetof(CPUState, exception_index),
90                                                  "EXCEPTION");
91 
92     p = cpu_reg_names;
93     for (i = 0; i < 8; i++) {
94         sprintf(p, "D%d", i);
95         cpu_dregs[i] = tcg_global_mem_new(cpu_env,
96                                           offsetof(CPUM68KState, dregs[i]), p);
97         p += 3;
98         sprintf(p, "A%d", i);
99         cpu_aregs[i] = tcg_global_mem_new(cpu_env,
100                                           offsetof(CPUM68KState, aregs[i]), p);
101         p += 3;
102     }
103     for (i = 0; i < 4; i++) {
104         sprintf(p, "ACC%d", i);
105         cpu_macc[i] = tcg_global_mem_new_i64(cpu_env,
106                                          offsetof(CPUM68KState, macc[i]), p);
107         p += 5;
108     }
109 
110     NULL_QREG = tcg_global_mem_new(cpu_env, -4, "NULL");
111     store_dummy = tcg_global_mem_new(cpu_env, -8, "NULL");
112 }
113 
114 /* internal defines */
115 typedef struct DisasContext {
116     CPUM68KState *env;
117     target_ulong insn_pc; /* Start of the current instruction.  */
118     target_ulong pc;
119     int is_jmp;
120     CCOp cc_op; /* Current CC operation */
121     int cc_op_synced;
122     int user;
123     struct TranslationBlock *tb;
124     int singlestep_enabled;
125     TCGv_i64 mactmp;
126     int done_mac;
127     int writeback_mask;
128     TCGv writeback[8];
129 } DisasContext;
130 
131 static TCGv get_areg(DisasContext *s, unsigned regno)
132 {
133     if (s->writeback_mask & (1 << regno)) {
134         return s->writeback[regno];
135     } else {
136         return cpu_aregs[regno];
137     }
138 }
139 
140 static void delay_set_areg(DisasContext *s, unsigned regno,
141                            TCGv val, bool give_temp)
142 {
143     if (s->writeback_mask & (1 << regno)) {
144         if (give_temp) {
145             tcg_temp_free(s->writeback[regno]);
146             s->writeback[regno] = val;
147         } else {
148             tcg_gen_mov_i32(s->writeback[regno], val);
149         }
150     } else {
151         s->writeback_mask |= 1 << regno;
152         if (give_temp) {
153             s->writeback[regno] = val;
154         } else {
155             TCGv tmp = tcg_temp_new();
156             s->writeback[regno] = tmp;
157             tcg_gen_mov_i32(tmp, val);
158         }
159     }
160 }
161 
162 static void do_writebacks(DisasContext *s)
163 {
164     unsigned mask = s->writeback_mask;
165     if (mask) {
166         s->writeback_mask = 0;
167         do {
168             unsigned regno = ctz32(mask);
169             tcg_gen_mov_i32(cpu_aregs[regno], s->writeback[regno]);
170             tcg_temp_free(s->writeback[regno]);
171             mask &= mask - 1;
172         } while (mask);
173     }
174 }
175 
176 #define DISAS_JUMP_NEXT 4
177 
178 #if defined(CONFIG_USER_ONLY)
179 #define IS_USER(s) 1
180 #else
181 #define IS_USER(s) s->user
182 #endif
183 
184 /* XXX: move that elsewhere */
185 /* ??? Fix exceptions.  */
186 static void *gen_throws_exception;
187 #define gen_last_qop NULL
188 
189 typedef void (*disas_proc)(CPUM68KState *env, DisasContext *s, uint16_t insn);
190 
191 #ifdef DEBUG_DISPATCH
192 #define DISAS_INSN(name)                                                \
193     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
194                                   uint16_t insn);                       \
195     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
196                              uint16_t insn)                             \
197     {                                                                   \
198         qemu_log("Dispatch " #name "\n");                               \
199         real_disas_##name(env, s, insn);                                \
200     }                                                                   \
201     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
202                                   uint16_t insn)
203 #else
204 #define DISAS_INSN(name)                                                \
205     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
206                              uint16_t insn)
207 #endif
208 
209 static const uint8_t cc_op_live[CC_OP_NB] = {
210     [CC_OP_FLAGS] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
211     [CC_OP_ADDB ... CC_OP_ADDL] = CCF_X | CCF_N | CCF_V,
212     [CC_OP_SUBB ... CC_OP_SUBL] = CCF_X | CCF_N | CCF_V,
213     [CC_OP_CMPB ... CC_OP_CMPL] = CCF_X | CCF_N | CCF_V,
214     [CC_OP_LOGIC] = CCF_X | CCF_N
215 };
216 
217 static void set_cc_op(DisasContext *s, CCOp op)
218 {
219     CCOp old_op = s->cc_op;
220     int dead;
221 
222     if (old_op == op) {
223         return;
224     }
225     s->cc_op = op;
226     s->cc_op_synced = 0;
227 
228     /* Discard CC computation that will no longer be used.
229        Note that X and N are never dead.  */
230     dead = cc_op_live[old_op] & ~cc_op_live[op];
231     if (dead & CCF_C) {
232         tcg_gen_discard_i32(QREG_CC_C);
233     }
234     if (dead & CCF_Z) {
235         tcg_gen_discard_i32(QREG_CC_Z);
236     }
237     if (dead & CCF_V) {
238         tcg_gen_discard_i32(QREG_CC_V);
239     }
240 }
241 
242 /* Update the CPU env CC_OP state.  */
243 static void update_cc_op(DisasContext *s)
244 {
245     if (!s->cc_op_synced) {
246         s->cc_op_synced = 1;
247         tcg_gen_movi_i32(QREG_CC_OP, s->cc_op);
248     }
249 }
250 
251 /* Generate a jump to an immediate address.  */
252 static void gen_jmp_im(DisasContext *s, uint32_t dest)
253 {
254     update_cc_op(s);
255     tcg_gen_movi_i32(QREG_PC, dest);
256     s->is_jmp = DISAS_JUMP;
257 }
258 
259 /* Generate a jump to the address in qreg DEST.  */
260 static void gen_jmp(DisasContext *s, TCGv dest)
261 {
262     update_cc_op(s);
263     tcg_gen_mov_i32(QREG_PC, dest);
264     s->is_jmp = DISAS_JUMP;
265 }
266 
267 static void gen_raise_exception(int nr)
268 {
269     TCGv_i32 tmp = tcg_const_i32(nr);
270 
271     gen_helper_raise_exception(cpu_env, tmp);
272     tcg_temp_free_i32(tmp);
273 }
274 
275 static void gen_exception(DisasContext *s, uint32_t where, int nr)
276 {
277     update_cc_op(s);
278     gen_jmp_im(s, where);
279     gen_raise_exception(nr);
280 }
281 
282 static inline void gen_addr_fault(DisasContext *s)
283 {
284     gen_exception(s, s->insn_pc, EXCP_ADDRESS);
285 }
286 
287 /* Generate a load from the specified address.  Narrow values are
288    sign extended to full register width.  */
289 static inline TCGv gen_load(DisasContext * s, int opsize, TCGv addr, int sign)
290 {
291     TCGv tmp;
292     int index = IS_USER(s);
293     tmp = tcg_temp_new_i32();
294     switch(opsize) {
295     case OS_BYTE:
296         if (sign)
297             tcg_gen_qemu_ld8s(tmp, addr, index);
298         else
299             tcg_gen_qemu_ld8u(tmp, addr, index);
300         break;
301     case OS_WORD:
302         if (sign)
303             tcg_gen_qemu_ld16s(tmp, addr, index);
304         else
305             tcg_gen_qemu_ld16u(tmp, addr, index);
306         break;
307     case OS_LONG:
308         tcg_gen_qemu_ld32u(tmp, addr, index);
309         break;
310     default:
311         g_assert_not_reached();
312     }
313     gen_throws_exception = gen_last_qop;
314     return tmp;
315 }
316 
317 /* Generate a store.  */
318 static inline void gen_store(DisasContext *s, int opsize, TCGv addr, TCGv val)
319 {
320     int index = IS_USER(s);
321     switch(opsize) {
322     case OS_BYTE:
323         tcg_gen_qemu_st8(val, addr, index);
324         break;
325     case OS_WORD:
326         tcg_gen_qemu_st16(val, addr, index);
327         break;
328     case OS_LONG:
329         tcg_gen_qemu_st32(val, addr, index);
330         break;
331     default:
332         g_assert_not_reached();
333     }
334     gen_throws_exception = gen_last_qop;
335 }
336 
337 typedef enum {
338     EA_STORE,
339     EA_LOADU,
340     EA_LOADS
341 } ea_what;
342 
343 /* Generate an unsigned load if VAL is 0 a signed load if val is -1,
344    otherwise generate a store.  */
345 static TCGv gen_ldst(DisasContext *s, int opsize, TCGv addr, TCGv val,
346                      ea_what what)
347 {
348     if (what == EA_STORE) {
349         gen_store(s, opsize, addr, val);
350         return store_dummy;
351     } else {
352         return gen_load(s, opsize, addr, what == EA_LOADS);
353     }
354 }
355 
356 /* Read a 16-bit immediate constant */
357 static inline uint16_t read_im16(CPUM68KState *env, DisasContext *s)
358 {
359     uint16_t im;
360     im = cpu_lduw_code(env, s->pc);
361     s->pc += 2;
362     return im;
363 }
364 
365 /* Read an 8-bit immediate constant */
366 static inline uint8_t read_im8(CPUM68KState *env, DisasContext *s)
367 {
368     return read_im16(env, s);
369 }
370 
371 /* Read a 32-bit immediate constant.  */
372 static inline uint32_t read_im32(CPUM68KState *env, DisasContext *s)
373 {
374     uint32_t im;
375     im = read_im16(env, s) << 16;
376     im |= 0xffff & read_im16(env, s);
377     return im;
378 }
379 
380 /* Read a 64-bit immediate constant.  */
381 static inline uint64_t read_im64(CPUM68KState *env, DisasContext *s)
382 {
383     uint64_t im;
384     im = (uint64_t)read_im32(env, s) << 32;
385     im |= (uint64_t)read_im32(env, s);
386     return im;
387 }
388 
389 /* Calculate and address index.  */
390 static TCGv gen_addr_index(DisasContext *s, uint16_t ext, TCGv tmp)
391 {
392     TCGv add;
393     int scale;
394 
395     add = (ext & 0x8000) ? AREG(ext, 12) : DREG(ext, 12);
396     if ((ext & 0x800) == 0) {
397         tcg_gen_ext16s_i32(tmp, add);
398         add = tmp;
399     }
400     scale = (ext >> 9) & 3;
401     if (scale != 0) {
402         tcg_gen_shli_i32(tmp, add, scale);
403         add = tmp;
404     }
405     return add;
406 }
407 
408 /* Handle a base + index + displacement effective addresss.
409    A NULL_QREG base means pc-relative.  */
410 static TCGv gen_lea_indexed(CPUM68KState *env, DisasContext *s, TCGv base)
411 {
412     uint32_t offset;
413     uint16_t ext;
414     TCGv add;
415     TCGv tmp;
416     uint32_t bd, od;
417 
418     offset = s->pc;
419     ext = read_im16(env, s);
420 
421     if ((ext & 0x800) == 0 && !m68k_feature(s->env, M68K_FEATURE_WORD_INDEX))
422         return NULL_QREG;
423 
424     if (m68k_feature(s->env, M68K_FEATURE_M68000) &&
425         !m68k_feature(s->env, M68K_FEATURE_SCALED_INDEX)) {
426         ext &= ~(3 << 9);
427     }
428 
429     if (ext & 0x100) {
430         /* full extension word format */
431         if (!m68k_feature(s->env, M68K_FEATURE_EXT_FULL))
432             return NULL_QREG;
433 
434         if ((ext & 0x30) > 0x10) {
435             /* base displacement */
436             if ((ext & 0x30) == 0x20) {
437                 bd = (int16_t)read_im16(env, s);
438             } else {
439                 bd = read_im32(env, s);
440             }
441         } else {
442             bd = 0;
443         }
444         tmp = tcg_temp_new();
445         if ((ext & 0x44) == 0) {
446             /* pre-index */
447             add = gen_addr_index(s, ext, tmp);
448         } else {
449             add = NULL_QREG;
450         }
451         if ((ext & 0x80) == 0) {
452             /* base not suppressed */
453             if (IS_NULL_QREG(base)) {
454                 base = tcg_const_i32(offset + bd);
455                 bd = 0;
456             }
457             if (!IS_NULL_QREG(add)) {
458                 tcg_gen_add_i32(tmp, add, base);
459                 add = tmp;
460             } else {
461                 add = base;
462             }
463         }
464         if (!IS_NULL_QREG(add)) {
465             if (bd != 0) {
466                 tcg_gen_addi_i32(tmp, add, bd);
467                 add = tmp;
468             }
469         } else {
470             add = tcg_const_i32(bd);
471         }
472         if ((ext & 3) != 0) {
473             /* memory indirect */
474             base = gen_load(s, OS_LONG, add, 0);
475             if ((ext & 0x44) == 4) {
476                 add = gen_addr_index(s, ext, tmp);
477                 tcg_gen_add_i32(tmp, add, base);
478                 add = tmp;
479             } else {
480                 add = base;
481             }
482             if ((ext & 3) > 1) {
483                 /* outer displacement */
484                 if ((ext & 3) == 2) {
485                     od = (int16_t)read_im16(env, s);
486                 } else {
487                     od = read_im32(env, s);
488                 }
489             } else {
490                 od = 0;
491             }
492             if (od != 0) {
493                 tcg_gen_addi_i32(tmp, add, od);
494                 add = tmp;
495             }
496         }
497     } else {
498         /* brief extension word format */
499         tmp = tcg_temp_new();
500         add = gen_addr_index(s, ext, tmp);
501         if (!IS_NULL_QREG(base)) {
502             tcg_gen_add_i32(tmp, add, base);
503             if ((int8_t)ext)
504                 tcg_gen_addi_i32(tmp, tmp, (int8_t)ext);
505         } else {
506             tcg_gen_addi_i32(tmp, add, offset + (int8_t)ext);
507         }
508         add = tmp;
509     }
510     return add;
511 }
512 
513 /* Sign or zero extend a value.  */
514 
515 static inline void gen_ext(TCGv res, TCGv val, int opsize, int sign)
516 {
517     switch (opsize) {
518     case OS_BYTE:
519         if (sign) {
520             tcg_gen_ext8s_i32(res, val);
521         } else {
522             tcg_gen_ext8u_i32(res, val);
523         }
524         break;
525     case OS_WORD:
526         if (sign) {
527             tcg_gen_ext16s_i32(res, val);
528         } else {
529             tcg_gen_ext16u_i32(res, val);
530         }
531         break;
532     case OS_LONG:
533         tcg_gen_mov_i32(res, val);
534         break;
535     default:
536         g_assert_not_reached();
537     }
538 }
539 
540 /* Evaluate all the CC flags.  */
541 
542 static void gen_flush_flags(DisasContext *s)
543 {
544     TCGv t0, t1;
545 
546     switch (s->cc_op) {
547     case CC_OP_FLAGS:
548         return;
549 
550     case CC_OP_ADDB:
551     case CC_OP_ADDW:
552     case CC_OP_ADDL:
553         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
554         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
555         /* Compute signed overflow for addition.  */
556         t0 = tcg_temp_new();
557         t1 = tcg_temp_new();
558         tcg_gen_sub_i32(t0, QREG_CC_N, QREG_CC_V);
559         gen_ext(t0, t0, s->cc_op - CC_OP_ADDB, 1);
560         tcg_gen_xor_i32(t1, QREG_CC_N, QREG_CC_V);
561         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
562         tcg_temp_free(t0);
563         tcg_gen_andc_i32(QREG_CC_V, t1, QREG_CC_V);
564         tcg_temp_free(t1);
565         break;
566 
567     case CC_OP_SUBB:
568     case CC_OP_SUBW:
569     case CC_OP_SUBL:
570         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
571         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
572         /* Compute signed overflow for subtraction.  */
573         t0 = tcg_temp_new();
574         t1 = tcg_temp_new();
575         tcg_gen_add_i32(t0, QREG_CC_N, QREG_CC_V);
576         gen_ext(t0, t0, s->cc_op - CC_OP_SUBB, 1);
577         tcg_gen_xor_i32(t1, QREG_CC_N, t0);
578         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
579         tcg_temp_free(t0);
580         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t1);
581         tcg_temp_free(t1);
582         break;
583 
584     case CC_OP_CMPB:
585     case CC_OP_CMPW:
586     case CC_OP_CMPL:
587         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_C, QREG_CC_N, QREG_CC_V);
588         tcg_gen_sub_i32(QREG_CC_Z, QREG_CC_N, QREG_CC_V);
589         gen_ext(QREG_CC_Z, QREG_CC_Z, s->cc_op - CC_OP_CMPB, 1);
590         /* Compute signed overflow for subtraction.  */
591         t0 = tcg_temp_new();
592         tcg_gen_xor_i32(t0, QREG_CC_Z, QREG_CC_N);
593         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, QREG_CC_N);
594         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t0);
595         tcg_temp_free(t0);
596         tcg_gen_mov_i32(QREG_CC_N, QREG_CC_Z);
597         break;
598 
599     case CC_OP_LOGIC:
600         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
601         tcg_gen_movi_i32(QREG_CC_C, 0);
602         tcg_gen_movi_i32(QREG_CC_V, 0);
603         break;
604 
605     case CC_OP_DYNAMIC:
606         gen_helper_flush_flags(cpu_env, QREG_CC_OP);
607         s->cc_op_synced = 1;
608         break;
609 
610     default:
611         t0 = tcg_const_i32(s->cc_op);
612         gen_helper_flush_flags(cpu_env, t0);
613         tcg_temp_free(t0);
614         s->cc_op_synced = 1;
615         break;
616     }
617 
618     /* Note that flush_flags also assigned to env->cc_op.  */
619     s->cc_op = CC_OP_FLAGS;
620 }
621 
622 static inline TCGv gen_extend(TCGv val, int opsize, int sign)
623 {
624     TCGv tmp;
625 
626     if (opsize == OS_LONG) {
627         tmp = val;
628     } else {
629         tmp = tcg_temp_new();
630         gen_ext(tmp, val, opsize, sign);
631     }
632 
633     return tmp;
634 }
635 
636 static void gen_logic_cc(DisasContext *s, TCGv val, int opsize)
637 {
638     gen_ext(QREG_CC_N, val, opsize, 1);
639     set_cc_op(s, CC_OP_LOGIC);
640 }
641 
642 static void gen_update_cc_cmp(DisasContext *s, TCGv dest, TCGv src, int opsize)
643 {
644     tcg_gen_mov_i32(QREG_CC_N, dest);
645     tcg_gen_mov_i32(QREG_CC_V, src);
646     set_cc_op(s, CC_OP_CMPB + opsize);
647 }
648 
649 static void gen_update_cc_add(TCGv dest, TCGv src, int opsize)
650 {
651     gen_ext(QREG_CC_N, dest, opsize, 1);
652     tcg_gen_mov_i32(QREG_CC_V, src);
653 }
654 
655 static inline int opsize_bytes(int opsize)
656 {
657     switch (opsize) {
658     case OS_BYTE: return 1;
659     case OS_WORD: return 2;
660     case OS_LONG: return 4;
661     case OS_SINGLE: return 4;
662     case OS_DOUBLE: return 8;
663     case OS_EXTENDED: return 12;
664     case OS_PACKED: return 12;
665     default:
666         g_assert_not_reached();
667     }
668 }
669 
670 static inline int insn_opsize(int insn)
671 {
672     switch ((insn >> 6) & 3) {
673     case 0: return OS_BYTE;
674     case 1: return OS_WORD;
675     case 2: return OS_LONG;
676     default:
677         g_assert_not_reached();
678     }
679 }
680 
681 static inline int ext_opsize(int ext, int pos)
682 {
683     switch ((ext >> pos) & 7) {
684     case 0: return OS_LONG;
685     case 1: return OS_SINGLE;
686     case 2: return OS_EXTENDED;
687     case 3: return OS_PACKED;
688     case 4: return OS_WORD;
689     case 5: return OS_DOUBLE;
690     case 6: return OS_BYTE;
691     default:
692         g_assert_not_reached();
693     }
694 }
695 
696 /* Assign value to a register.  If the width is less than the register width
697    only the low part of the register is set.  */
698 static void gen_partset_reg(int opsize, TCGv reg, TCGv val)
699 {
700     TCGv tmp;
701     switch (opsize) {
702     case OS_BYTE:
703         tcg_gen_andi_i32(reg, reg, 0xffffff00);
704         tmp = tcg_temp_new();
705         tcg_gen_ext8u_i32(tmp, val);
706         tcg_gen_or_i32(reg, reg, tmp);
707         tcg_temp_free(tmp);
708         break;
709     case OS_WORD:
710         tcg_gen_andi_i32(reg, reg, 0xffff0000);
711         tmp = tcg_temp_new();
712         tcg_gen_ext16u_i32(tmp, val);
713         tcg_gen_or_i32(reg, reg, tmp);
714         tcg_temp_free(tmp);
715         break;
716     case OS_LONG:
717     case OS_SINGLE:
718         tcg_gen_mov_i32(reg, val);
719         break;
720     default:
721         g_assert_not_reached();
722     }
723 }
724 
725 /* Generate code for an "effective address".  Does not adjust the base
726    register for autoincrement addressing modes.  */
727 static TCGv gen_lea_mode(CPUM68KState *env, DisasContext *s,
728                          int mode, int reg0, int opsize)
729 {
730     TCGv reg;
731     TCGv tmp;
732     uint16_t ext;
733     uint32_t offset;
734 
735     switch (mode) {
736     case 0: /* Data register direct.  */
737     case 1: /* Address register direct.  */
738         return NULL_QREG;
739     case 3: /* Indirect postincrement.  */
740         if (opsize == OS_UNSIZED) {
741             return NULL_QREG;
742         }
743         /* fallthru */
744     case 2: /* Indirect register */
745         return get_areg(s, reg0);
746     case 4: /* Indirect predecrememnt.  */
747         if (opsize == OS_UNSIZED) {
748             return NULL_QREG;
749         }
750         reg = get_areg(s, reg0);
751         tmp = tcg_temp_new();
752         if (reg0 == 7 && opsize == OS_BYTE &&
753             m68k_feature(s->env, M68K_FEATURE_M68000)) {
754             tcg_gen_subi_i32(tmp, reg, 2);
755         } else {
756             tcg_gen_subi_i32(tmp, reg, opsize_bytes(opsize));
757         }
758         return tmp;
759     case 5: /* Indirect displacement.  */
760         reg = get_areg(s, reg0);
761         tmp = tcg_temp_new();
762         ext = read_im16(env, s);
763         tcg_gen_addi_i32(tmp, reg, (int16_t)ext);
764         return tmp;
765     case 6: /* Indirect index + displacement.  */
766         reg = get_areg(s, reg0);
767         return gen_lea_indexed(env, s, reg);
768     case 7: /* Other */
769         switch (reg0) {
770         case 0: /* Absolute short.  */
771             offset = (int16_t)read_im16(env, s);
772             return tcg_const_i32(offset);
773         case 1: /* Absolute long.  */
774             offset = read_im32(env, s);
775             return tcg_const_i32(offset);
776         case 2: /* pc displacement  */
777             offset = s->pc;
778             offset += (int16_t)read_im16(env, s);
779             return tcg_const_i32(offset);
780         case 3: /* pc index+displacement.  */
781             return gen_lea_indexed(env, s, NULL_QREG);
782         case 4: /* Immediate.  */
783         default:
784             return NULL_QREG;
785         }
786     }
787     /* Should never happen.  */
788     return NULL_QREG;
789 }
790 
791 static TCGv gen_lea(CPUM68KState *env, DisasContext *s, uint16_t insn,
792                     int opsize)
793 {
794     int mode = extract32(insn, 3, 3);
795     int reg0 = REG(insn, 0);
796     return gen_lea_mode(env, s, mode, reg0, opsize);
797 }
798 
799 /* Generate code to load/store a value from/into an EA.  If WHAT > 0 this is
800    a write otherwise it is a read (0 == sign extend, -1 == zero extend).
801    ADDRP is non-null for readwrite operands.  */
802 static TCGv gen_ea_mode(CPUM68KState *env, DisasContext *s, int mode, int reg0,
803                         int opsize, TCGv val, TCGv *addrp, ea_what what)
804 {
805     TCGv reg, tmp, result;
806     int32_t offset;
807 
808     switch (mode) {
809     case 0: /* Data register direct.  */
810         reg = cpu_dregs[reg0];
811         if (what == EA_STORE) {
812             gen_partset_reg(opsize, reg, val);
813             return store_dummy;
814         } else {
815             return gen_extend(reg, opsize, what == EA_LOADS);
816         }
817     case 1: /* Address register direct.  */
818         reg = get_areg(s, reg0);
819         if (what == EA_STORE) {
820             tcg_gen_mov_i32(reg, val);
821             return store_dummy;
822         } else {
823             return gen_extend(reg, opsize, what == EA_LOADS);
824         }
825     case 2: /* Indirect register */
826         reg = get_areg(s, reg0);
827         return gen_ldst(s, opsize, reg, val, what);
828     case 3: /* Indirect postincrement.  */
829         reg = get_areg(s, reg0);
830         result = gen_ldst(s, opsize, reg, val, what);
831         if (what == EA_STORE || !addrp) {
832             TCGv tmp = tcg_temp_new();
833             if (reg0 == 7 && opsize == OS_BYTE &&
834                 m68k_feature(s->env, M68K_FEATURE_M68000)) {
835                 tcg_gen_addi_i32(tmp, reg, 2);
836             } else {
837                 tcg_gen_addi_i32(tmp, reg, opsize_bytes(opsize));
838             }
839             delay_set_areg(s, reg0, tmp, true);
840         }
841         return result;
842     case 4: /* Indirect predecrememnt.  */
843         if (addrp && what == EA_STORE) {
844             tmp = *addrp;
845         } else {
846             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
847             if (IS_NULL_QREG(tmp)) {
848                 return tmp;
849             }
850             if (addrp) {
851                 *addrp = tmp;
852             }
853         }
854         result = gen_ldst(s, opsize, tmp, val, what);
855         if (what == EA_STORE || !addrp) {
856             delay_set_areg(s, reg0, tmp, false);
857         }
858         return result;
859     case 5: /* Indirect displacement.  */
860     case 6: /* Indirect index + displacement.  */
861     do_indirect:
862         if (addrp && what == EA_STORE) {
863             tmp = *addrp;
864         } else {
865             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
866             if (IS_NULL_QREG(tmp)) {
867                 return tmp;
868             }
869             if (addrp) {
870                 *addrp = tmp;
871             }
872         }
873         return gen_ldst(s, opsize, tmp, val, what);
874     case 7: /* Other */
875         switch (reg0) {
876         case 0: /* Absolute short.  */
877         case 1: /* Absolute long.  */
878         case 2: /* pc displacement  */
879         case 3: /* pc index+displacement.  */
880             goto do_indirect;
881         case 4: /* Immediate.  */
882             /* Sign extend values for consistency.  */
883             switch (opsize) {
884             case OS_BYTE:
885                 if (what == EA_LOADS) {
886                     offset = (int8_t)read_im8(env, s);
887                 } else {
888                     offset = read_im8(env, s);
889                 }
890                 break;
891             case OS_WORD:
892                 if (what == EA_LOADS) {
893                     offset = (int16_t)read_im16(env, s);
894                 } else {
895                     offset = read_im16(env, s);
896                 }
897                 break;
898             case OS_LONG:
899                 offset = read_im32(env, s);
900                 break;
901             default:
902                 g_assert_not_reached();
903             }
904             return tcg_const_i32(offset);
905         default:
906             return NULL_QREG;
907         }
908     }
909     /* Should never happen.  */
910     return NULL_QREG;
911 }
912 
913 static TCGv gen_ea(CPUM68KState *env, DisasContext *s, uint16_t insn,
914                    int opsize, TCGv val, TCGv *addrp, ea_what what)
915 {
916     int mode = extract32(insn, 3, 3);
917     int reg0 = REG(insn, 0);
918     return gen_ea_mode(env, s, mode, reg0, opsize, val, addrp, what);
919 }
920 
921 static TCGv_ptr gen_fp_ptr(int freg)
922 {
923     TCGv_ptr fp = tcg_temp_new_ptr();
924     tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fregs[freg]));
925     return fp;
926 }
927 
928 static TCGv_ptr gen_fp_result_ptr(void)
929 {
930     TCGv_ptr fp = tcg_temp_new_ptr();
931     tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fp_result));
932     return fp;
933 }
934 
935 static void gen_fp_move(TCGv_ptr dest, TCGv_ptr src)
936 {
937     TCGv t32;
938     TCGv_i64 t64;
939 
940     t32 = tcg_temp_new();
941     tcg_gen_ld16u_i32(t32, src, offsetof(FPReg, l.upper));
942     tcg_gen_st16_i32(t32, dest, offsetof(FPReg, l.upper));
943     tcg_temp_free(t32);
944 
945     t64 = tcg_temp_new_i64();
946     tcg_gen_ld_i64(t64, src, offsetof(FPReg, l.lower));
947     tcg_gen_st_i64(t64, dest, offsetof(FPReg, l.lower));
948     tcg_temp_free_i64(t64);
949 }
950 
951 static void gen_load_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp)
952 {
953     TCGv tmp;
954     TCGv_i64 t64;
955     int index = IS_USER(s);
956 
957     t64 = tcg_temp_new_i64();
958     tmp = tcg_temp_new();
959     switch (opsize) {
960     case OS_BYTE:
961         tcg_gen_qemu_ld8s(tmp, addr, index);
962         gen_helper_exts32(cpu_env, fp, tmp);
963         break;
964     case OS_WORD:
965         tcg_gen_qemu_ld16s(tmp, addr, index);
966         gen_helper_exts32(cpu_env, fp, tmp);
967         break;
968     case OS_LONG:
969         tcg_gen_qemu_ld32u(tmp, addr, index);
970         gen_helper_exts32(cpu_env, fp, tmp);
971         break;
972     case OS_SINGLE:
973         tcg_gen_qemu_ld32u(tmp, addr, index);
974         gen_helper_extf32(cpu_env, fp, tmp);
975         break;
976     case OS_DOUBLE:
977         tcg_gen_qemu_ld64(t64, addr, index);
978         gen_helper_extf64(cpu_env, fp, t64);
979         tcg_temp_free_i64(t64);
980         break;
981     case OS_EXTENDED:
982         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
983             gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
984             break;
985         }
986         tcg_gen_qemu_ld32u(tmp, addr, index);
987         tcg_gen_shri_i32(tmp, tmp, 16);
988         tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
989         tcg_gen_addi_i32(tmp, addr, 4);
990         tcg_gen_qemu_ld64(t64, tmp, index);
991         tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
992         break;
993     case OS_PACKED:
994         /* unimplemented data type on 68040/ColdFire
995          * FIXME if needed for another FPU
996          */
997         gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
998         break;
999     default:
1000         g_assert_not_reached();
1001     }
1002     tcg_temp_free(tmp);
1003     tcg_temp_free_i64(t64);
1004     gen_throws_exception = gen_last_qop;
1005 }
1006 
1007 static void gen_store_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp)
1008 {
1009     TCGv tmp;
1010     TCGv_i64 t64;
1011     int index = IS_USER(s);
1012 
1013     t64 = tcg_temp_new_i64();
1014     tmp = tcg_temp_new();
1015     switch (opsize) {
1016     case OS_BYTE:
1017         gen_helper_reds32(tmp, cpu_env, fp);
1018         tcg_gen_qemu_st8(tmp, addr, index);
1019         break;
1020     case OS_WORD:
1021         gen_helper_reds32(tmp, cpu_env, fp);
1022         tcg_gen_qemu_st16(tmp, addr, index);
1023         break;
1024     case OS_LONG:
1025         gen_helper_reds32(tmp, cpu_env, fp);
1026         tcg_gen_qemu_st32(tmp, addr, index);
1027         break;
1028     case OS_SINGLE:
1029         gen_helper_redf32(tmp, cpu_env, fp);
1030         tcg_gen_qemu_st32(tmp, addr, index);
1031         break;
1032     case OS_DOUBLE:
1033         gen_helper_redf64(t64, cpu_env, fp);
1034         tcg_gen_qemu_st64(t64, addr, index);
1035         break;
1036     case OS_EXTENDED:
1037         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1038             gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
1039             break;
1040         }
1041         tcg_gen_ld16u_i32(tmp, fp, offsetof(FPReg, l.upper));
1042         tcg_gen_shli_i32(tmp, tmp, 16);
1043         tcg_gen_qemu_st32(tmp, addr, index);
1044         tcg_gen_addi_i32(tmp, addr, 4);
1045         tcg_gen_ld_i64(t64, fp, offsetof(FPReg, l.lower));
1046         tcg_gen_qemu_st64(t64, tmp, index);
1047         break;
1048     case OS_PACKED:
1049         /* unimplemented data type on 68040/ColdFire
1050          * FIXME if needed for another FPU
1051          */
1052         gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
1053         break;
1054     default:
1055         g_assert_not_reached();
1056     }
1057     tcg_temp_free(tmp);
1058     tcg_temp_free_i64(t64);
1059     gen_throws_exception = gen_last_qop;
1060 }
1061 
1062 static void gen_ldst_fp(DisasContext *s, int opsize, TCGv addr,
1063                         TCGv_ptr fp, ea_what what)
1064 {
1065     if (what == EA_STORE) {
1066         gen_store_fp(s, opsize, addr, fp);
1067     } else {
1068         gen_load_fp(s, opsize, addr, fp);
1069     }
1070 }
1071 
1072 static int gen_ea_mode_fp(CPUM68KState *env, DisasContext *s, int mode,
1073                           int reg0, int opsize, TCGv_ptr fp, ea_what what)
1074 {
1075     TCGv reg, addr, tmp;
1076     TCGv_i64 t64;
1077 
1078     switch (mode) {
1079     case 0: /* Data register direct.  */
1080         reg = cpu_dregs[reg0];
1081         if (what == EA_STORE) {
1082             switch (opsize) {
1083             case OS_BYTE:
1084             case OS_WORD:
1085             case OS_LONG:
1086                 gen_helper_reds32(reg, cpu_env, fp);
1087                 break;
1088             case OS_SINGLE:
1089                 gen_helper_redf32(reg, cpu_env, fp);
1090                 break;
1091             default:
1092                 g_assert_not_reached();
1093             }
1094         } else {
1095             tmp = tcg_temp_new();
1096             switch (opsize) {
1097             case OS_BYTE:
1098                 tcg_gen_ext8s_i32(tmp, reg);
1099                 gen_helper_exts32(cpu_env, fp, tmp);
1100                 break;
1101             case OS_WORD:
1102                 tcg_gen_ext16s_i32(tmp, reg);
1103                 gen_helper_exts32(cpu_env, fp, tmp);
1104                 break;
1105             case OS_LONG:
1106                 gen_helper_exts32(cpu_env, fp, reg);
1107                 break;
1108             case OS_SINGLE:
1109                 gen_helper_extf32(cpu_env, fp, reg);
1110                 break;
1111             default:
1112                 g_assert_not_reached();
1113             }
1114             tcg_temp_free(tmp);
1115         }
1116         return 0;
1117     case 1: /* Address register direct.  */
1118         return -1;
1119     case 2: /* Indirect register */
1120         addr = get_areg(s, reg0);
1121         gen_ldst_fp(s, opsize, addr, fp, what);
1122         return 0;
1123     case 3: /* Indirect postincrement.  */
1124         addr = cpu_aregs[reg0];
1125         gen_ldst_fp(s, opsize, addr, fp, what);
1126         tcg_gen_addi_i32(addr, addr, opsize_bytes(opsize));
1127         return 0;
1128     case 4: /* Indirect predecrememnt.  */
1129         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1130         if (IS_NULL_QREG(addr)) {
1131             return -1;
1132         }
1133         gen_ldst_fp(s, opsize, addr, fp, what);
1134         tcg_gen_mov_i32(cpu_aregs[reg0], addr);
1135         return 0;
1136     case 5: /* Indirect displacement.  */
1137     case 6: /* Indirect index + displacement.  */
1138     do_indirect:
1139         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1140         if (IS_NULL_QREG(addr)) {
1141             return -1;
1142         }
1143         gen_ldst_fp(s, opsize, addr, fp, what);
1144         return 0;
1145     case 7: /* Other */
1146         switch (reg0) {
1147         case 0: /* Absolute short.  */
1148         case 1: /* Absolute long.  */
1149         case 2: /* pc displacement  */
1150         case 3: /* pc index+displacement.  */
1151             goto do_indirect;
1152         case 4: /* Immediate.  */
1153             if (what == EA_STORE) {
1154                 return -1;
1155             }
1156             switch (opsize) {
1157             case OS_BYTE:
1158                 tmp = tcg_const_i32((int8_t)read_im8(env, s));
1159                 gen_helper_exts32(cpu_env, fp, tmp);
1160                 tcg_temp_free(tmp);
1161                 break;
1162             case OS_WORD:
1163                 tmp = tcg_const_i32((int16_t)read_im16(env, s));
1164                 gen_helper_exts32(cpu_env, fp, tmp);
1165                 tcg_temp_free(tmp);
1166                 break;
1167             case OS_LONG:
1168                 tmp = tcg_const_i32(read_im32(env, s));
1169                 gen_helper_exts32(cpu_env, fp, tmp);
1170                 tcg_temp_free(tmp);
1171                 break;
1172             case OS_SINGLE:
1173                 tmp = tcg_const_i32(read_im32(env, s));
1174                 gen_helper_extf32(cpu_env, fp, tmp);
1175                 tcg_temp_free(tmp);
1176                 break;
1177             case OS_DOUBLE:
1178                 t64 = tcg_const_i64(read_im64(env, s));
1179                 gen_helper_extf64(cpu_env, fp, t64);
1180                 tcg_temp_free_i64(t64);
1181                 break;
1182             case OS_EXTENDED:
1183                 if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1184                     gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
1185                     break;
1186                 }
1187                 tmp = tcg_const_i32(read_im32(env, s) >> 16);
1188                 tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
1189                 tcg_temp_free(tmp);
1190                 t64 = tcg_const_i64(read_im64(env, s));
1191                 tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
1192                 tcg_temp_free_i64(t64);
1193                 break;
1194             case OS_PACKED:
1195                 /* unimplemented data type on 68040/ColdFire
1196                  * FIXME if needed for another FPU
1197                  */
1198                 gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
1199                 break;
1200             default:
1201                 g_assert_not_reached();
1202             }
1203             return 0;
1204         default:
1205             return -1;
1206         }
1207     }
1208     return -1;
1209 }
1210 
1211 static int gen_ea_fp(CPUM68KState *env, DisasContext *s, uint16_t insn,
1212                        int opsize, TCGv_ptr fp, ea_what what)
1213 {
1214     int mode = extract32(insn, 3, 3);
1215     int reg0 = REG(insn, 0);
1216     return gen_ea_mode_fp(env, s, mode, reg0, opsize, fp, what);
1217 }
1218 
1219 typedef struct {
1220     TCGCond tcond;
1221     bool g1;
1222     bool g2;
1223     TCGv v1;
1224     TCGv v2;
1225 } DisasCompare;
1226 
1227 static void gen_cc_cond(DisasCompare *c, DisasContext *s, int cond)
1228 {
1229     TCGv tmp, tmp2;
1230     TCGCond tcond;
1231     CCOp op = s->cc_op;
1232 
1233     /* The CC_OP_CMP form can handle most normal comparisons directly.  */
1234     if (op == CC_OP_CMPB || op == CC_OP_CMPW || op == CC_OP_CMPL) {
1235         c->g1 = c->g2 = 1;
1236         c->v1 = QREG_CC_N;
1237         c->v2 = QREG_CC_V;
1238         switch (cond) {
1239         case 2: /* HI */
1240         case 3: /* LS */
1241             tcond = TCG_COND_LEU;
1242             goto done;
1243         case 4: /* CC */
1244         case 5: /* CS */
1245             tcond = TCG_COND_LTU;
1246             goto done;
1247         case 6: /* NE */
1248         case 7: /* EQ */
1249             tcond = TCG_COND_EQ;
1250             goto done;
1251         case 10: /* PL */
1252         case 11: /* MI */
1253             c->g1 = c->g2 = 0;
1254             c->v2 = tcg_const_i32(0);
1255             c->v1 = tmp = tcg_temp_new();
1256             tcg_gen_sub_i32(tmp, QREG_CC_N, QREG_CC_V);
1257             gen_ext(tmp, tmp, op - CC_OP_CMPB, 1);
1258             /* fallthru */
1259         case 12: /* GE */
1260         case 13: /* LT */
1261             tcond = TCG_COND_LT;
1262             goto done;
1263         case 14: /* GT */
1264         case 15: /* LE */
1265             tcond = TCG_COND_LE;
1266             goto done;
1267         }
1268     }
1269 
1270     c->g1 = 1;
1271     c->g2 = 0;
1272     c->v2 = tcg_const_i32(0);
1273 
1274     switch (cond) {
1275     case 0: /* T */
1276     case 1: /* F */
1277         c->v1 = c->v2;
1278         tcond = TCG_COND_NEVER;
1279         goto done;
1280     case 14: /* GT (!(Z || (N ^ V))) */
1281     case 15: /* LE (Z || (N ^ V)) */
1282         /* Logic operations clear V, which simplifies LE to (Z || N),
1283            and since Z and N are co-located, this becomes a normal
1284            comparison vs N.  */
1285         if (op == CC_OP_LOGIC) {
1286             c->v1 = QREG_CC_N;
1287             tcond = TCG_COND_LE;
1288             goto done;
1289         }
1290         break;
1291     case 12: /* GE (!(N ^ V)) */
1292     case 13: /* LT (N ^ V) */
1293         /* Logic operations clear V, which simplifies this to N.  */
1294         if (op != CC_OP_LOGIC) {
1295             break;
1296         }
1297         /* fallthru */
1298     case 10: /* PL (!N) */
1299     case 11: /* MI (N) */
1300         /* Several cases represent N normally.  */
1301         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1302             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1303             op == CC_OP_LOGIC) {
1304             c->v1 = QREG_CC_N;
1305             tcond = TCG_COND_LT;
1306             goto done;
1307         }
1308         break;
1309     case 6: /* NE (!Z) */
1310     case 7: /* EQ (Z) */
1311         /* Some cases fold Z into N.  */
1312         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1313             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1314             op == CC_OP_LOGIC) {
1315             tcond = TCG_COND_EQ;
1316             c->v1 = QREG_CC_N;
1317             goto done;
1318         }
1319         break;
1320     case 4: /* CC (!C) */
1321     case 5: /* CS (C) */
1322         /* Some cases fold C into X.  */
1323         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1324             op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL) {
1325             tcond = TCG_COND_NE;
1326             c->v1 = QREG_CC_X;
1327             goto done;
1328         }
1329         /* fallthru */
1330     case 8: /* VC (!V) */
1331     case 9: /* VS (V) */
1332         /* Logic operations clear V and C.  */
1333         if (op == CC_OP_LOGIC) {
1334             tcond = TCG_COND_NEVER;
1335             c->v1 = c->v2;
1336             goto done;
1337         }
1338         break;
1339     }
1340 
1341     /* Otherwise, flush flag state to CC_OP_FLAGS.  */
1342     gen_flush_flags(s);
1343 
1344     switch (cond) {
1345     case 0: /* T */
1346     case 1: /* F */
1347     default:
1348         /* Invalid, or handled above.  */
1349         abort();
1350     case 2: /* HI (!C && !Z) -> !(C || Z)*/
1351     case 3: /* LS (C || Z) */
1352         c->v1 = tmp = tcg_temp_new();
1353         c->g1 = 0;
1354         tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1355         tcg_gen_or_i32(tmp, tmp, QREG_CC_C);
1356         tcond = TCG_COND_NE;
1357         break;
1358     case 4: /* CC (!C) */
1359     case 5: /* CS (C) */
1360         c->v1 = QREG_CC_C;
1361         tcond = TCG_COND_NE;
1362         break;
1363     case 6: /* NE (!Z) */
1364     case 7: /* EQ (Z) */
1365         c->v1 = QREG_CC_Z;
1366         tcond = TCG_COND_EQ;
1367         break;
1368     case 8: /* VC (!V) */
1369     case 9: /* VS (V) */
1370         c->v1 = QREG_CC_V;
1371         tcond = TCG_COND_LT;
1372         break;
1373     case 10: /* PL (!N) */
1374     case 11: /* MI (N) */
1375         c->v1 = QREG_CC_N;
1376         tcond = TCG_COND_LT;
1377         break;
1378     case 12: /* GE (!(N ^ V)) */
1379     case 13: /* LT (N ^ V) */
1380         c->v1 = tmp = tcg_temp_new();
1381         c->g1 = 0;
1382         tcg_gen_xor_i32(tmp, QREG_CC_N, QREG_CC_V);
1383         tcond = TCG_COND_LT;
1384         break;
1385     case 14: /* GT (!(Z || (N ^ V))) */
1386     case 15: /* LE (Z || (N ^ V)) */
1387         c->v1 = tmp = tcg_temp_new();
1388         c->g1 = 0;
1389         tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1390         tcg_gen_neg_i32(tmp, tmp);
1391         tmp2 = tcg_temp_new();
1392         tcg_gen_xor_i32(tmp2, QREG_CC_N, QREG_CC_V);
1393         tcg_gen_or_i32(tmp, tmp, tmp2);
1394         tcg_temp_free(tmp2);
1395         tcond = TCG_COND_LT;
1396         break;
1397     }
1398 
1399  done:
1400     if ((cond & 1) == 0) {
1401         tcond = tcg_invert_cond(tcond);
1402     }
1403     c->tcond = tcond;
1404 }
1405 
1406 static void free_cond(DisasCompare *c)
1407 {
1408     if (!c->g1) {
1409         tcg_temp_free(c->v1);
1410     }
1411     if (!c->g2) {
1412         tcg_temp_free(c->v2);
1413     }
1414 }
1415 
1416 static void gen_jmpcc(DisasContext *s, int cond, TCGLabel *l1)
1417 {
1418   DisasCompare c;
1419 
1420   gen_cc_cond(&c, s, cond);
1421   update_cc_op(s);
1422   tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
1423   free_cond(&c);
1424 }
1425 
1426 /* Force a TB lookup after an instruction that changes the CPU state.  */
1427 static void gen_lookup_tb(DisasContext *s)
1428 {
1429     update_cc_op(s);
1430     tcg_gen_movi_i32(QREG_PC, s->pc);
1431     s->is_jmp = DISAS_UPDATE;
1432 }
1433 
1434 #define SRC_EA(env, result, opsize, op_sign, addrp) do {                \
1435         result = gen_ea(env, s, insn, opsize, NULL_QREG, addrp,         \
1436                         op_sign ? EA_LOADS : EA_LOADU);                 \
1437         if (IS_NULL_QREG(result)) {                                     \
1438             gen_addr_fault(s);                                          \
1439             return;                                                     \
1440         }                                                               \
1441     } while (0)
1442 
1443 #define DEST_EA(env, insn, opsize, val, addrp) do {                     \
1444         TCGv ea_result = gen_ea(env, s, insn, opsize, val, addrp, EA_STORE); \
1445         if (IS_NULL_QREG(ea_result)) {                                  \
1446             gen_addr_fault(s);                                          \
1447             return;                                                     \
1448         }                                                               \
1449     } while (0)
1450 
1451 static inline bool use_goto_tb(DisasContext *s, uint32_t dest)
1452 {
1453 #ifndef CONFIG_USER_ONLY
1454     return (s->tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) ||
1455            (s->insn_pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK);
1456 #else
1457     return true;
1458 #endif
1459 }
1460 
1461 /* Generate a jump to an immediate address.  */
1462 static void gen_jmp_tb(DisasContext *s, int n, uint32_t dest)
1463 {
1464     if (unlikely(s->singlestep_enabled)) {
1465         gen_exception(s, dest, EXCP_DEBUG);
1466     } else if (use_goto_tb(s, dest)) {
1467         tcg_gen_goto_tb(n);
1468         tcg_gen_movi_i32(QREG_PC, dest);
1469         tcg_gen_exit_tb((uintptr_t)s->tb + n);
1470     } else {
1471         gen_jmp_im(s, dest);
1472         tcg_gen_exit_tb(0);
1473     }
1474     s->is_jmp = DISAS_TB_JUMP;
1475 }
1476 
1477 DISAS_INSN(scc)
1478 {
1479     DisasCompare c;
1480     int cond;
1481     TCGv tmp;
1482 
1483     cond = (insn >> 8) & 0xf;
1484     gen_cc_cond(&c, s, cond);
1485 
1486     tmp = tcg_temp_new();
1487     tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
1488     free_cond(&c);
1489 
1490     tcg_gen_neg_i32(tmp, tmp);
1491     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
1492     tcg_temp_free(tmp);
1493 }
1494 
1495 DISAS_INSN(dbcc)
1496 {
1497     TCGLabel *l1;
1498     TCGv reg;
1499     TCGv tmp;
1500     int16_t offset;
1501     uint32_t base;
1502 
1503     reg = DREG(insn, 0);
1504     base = s->pc;
1505     offset = (int16_t)read_im16(env, s);
1506     l1 = gen_new_label();
1507     gen_jmpcc(s, (insn >> 8) & 0xf, l1);
1508 
1509     tmp = tcg_temp_new();
1510     tcg_gen_ext16s_i32(tmp, reg);
1511     tcg_gen_addi_i32(tmp, tmp, -1);
1512     gen_partset_reg(OS_WORD, reg, tmp);
1513     tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, -1, l1);
1514     gen_jmp_tb(s, 1, base + offset);
1515     gen_set_label(l1);
1516     gen_jmp_tb(s, 0, s->pc);
1517 }
1518 
1519 DISAS_INSN(undef_mac)
1520 {
1521     gen_exception(s, s->pc - 2, EXCP_LINEA);
1522 }
1523 
1524 DISAS_INSN(undef_fpu)
1525 {
1526     gen_exception(s, s->pc - 2, EXCP_LINEF);
1527 }
1528 
1529 DISAS_INSN(undef)
1530 {
1531     /* ??? This is both instructions that are as yet unimplemented
1532        for the 680x0 series, as well as those that are implemented
1533        but actually illegal for CPU32 or pre-68020.  */
1534     qemu_log_mask(LOG_UNIMP, "Illegal instruction: %04x @ %08x",
1535                   insn, s->pc - 2);
1536     gen_exception(s, s->pc - 2, EXCP_UNSUPPORTED);
1537 }
1538 
1539 DISAS_INSN(mulw)
1540 {
1541     TCGv reg;
1542     TCGv tmp;
1543     TCGv src;
1544     int sign;
1545 
1546     sign = (insn & 0x100) != 0;
1547     reg = DREG(insn, 9);
1548     tmp = tcg_temp_new();
1549     if (sign)
1550         tcg_gen_ext16s_i32(tmp, reg);
1551     else
1552         tcg_gen_ext16u_i32(tmp, reg);
1553     SRC_EA(env, src, OS_WORD, sign, NULL);
1554     tcg_gen_mul_i32(tmp, tmp, src);
1555     tcg_gen_mov_i32(reg, tmp);
1556     gen_logic_cc(s, tmp, OS_LONG);
1557     tcg_temp_free(tmp);
1558 }
1559 
1560 DISAS_INSN(divw)
1561 {
1562     int sign;
1563     TCGv src;
1564     TCGv destr;
1565 
1566     /* divX.w <EA>,Dn    32/16 -> 16r:16q */
1567 
1568     sign = (insn & 0x100) != 0;
1569 
1570     /* dest.l / src.w */
1571 
1572     SRC_EA(env, src, OS_WORD, sign, NULL);
1573     destr = tcg_const_i32(REG(insn, 9));
1574     if (sign) {
1575         gen_helper_divsw(cpu_env, destr, src);
1576     } else {
1577         gen_helper_divuw(cpu_env, destr, src);
1578     }
1579     tcg_temp_free(destr);
1580 
1581     set_cc_op(s, CC_OP_FLAGS);
1582 }
1583 
1584 DISAS_INSN(divl)
1585 {
1586     TCGv num, reg, den;
1587     int sign;
1588     uint16_t ext;
1589 
1590     ext = read_im16(env, s);
1591 
1592     sign = (ext & 0x0800) != 0;
1593 
1594     if (ext & 0x400) {
1595         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
1596             gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
1597             return;
1598         }
1599 
1600         /* divX.l <EA>, Dr:Dq    64/32 -> 32r:32q */
1601 
1602         SRC_EA(env, den, OS_LONG, 0, NULL);
1603         num = tcg_const_i32(REG(ext, 12));
1604         reg = tcg_const_i32(REG(ext, 0));
1605         if (sign) {
1606             gen_helper_divsll(cpu_env, num, reg, den);
1607         } else {
1608             gen_helper_divull(cpu_env, num, reg, den);
1609         }
1610         tcg_temp_free(reg);
1611         tcg_temp_free(num);
1612         set_cc_op(s, CC_OP_FLAGS);
1613         return;
1614     }
1615 
1616     /* divX.l <EA>, Dq        32/32 -> 32q     */
1617     /* divXl.l <EA>, Dr:Dq    32/32 -> 32r:32q */
1618 
1619     SRC_EA(env, den, OS_LONG, 0, NULL);
1620     num = tcg_const_i32(REG(ext, 12));
1621     reg = tcg_const_i32(REG(ext, 0));
1622     if (sign) {
1623         gen_helper_divsl(cpu_env, num, reg, den);
1624     } else {
1625         gen_helper_divul(cpu_env, num, reg, den);
1626     }
1627     tcg_temp_free(reg);
1628     tcg_temp_free(num);
1629 
1630     set_cc_op(s, CC_OP_FLAGS);
1631 }
1632 
1633 static void bcd_add(TCGv dest, TCGv src)
1634 {
1635     TCGv t0, t1;
1636 
1637     /*  dest10 = dest10 + src10 + X
1638      *
1639      *        t1 = src
1640      *        t2 = t1 + 0x066
1641      *        t3 = t2 + dest + X
1642      *        t4 = t2 ^ dest
1643      *        t5 = t3 ^ t4
1644      *        t6 = ~t5 & 0x110
1645      *        t7 = (t6 >> 2) | (t6 >> 3)
1646      *        return t3 - t7
1647      */
1648 
1649     /* t1 = (src + 0x066) + dest + X
1650      *    = result with some possible exceding 0x6
1651      */
1652 
1653     t0 = tcg_const_i32(0x066);
1654     tcg_gen_add_i32(t0, t0, src);
1655 
1656     t1 = tcg_temp_new();
1657     tcg_gen_add_i32(t1, t0, dest);
1658     tcg_gen_add_i32(t1, t1, QREG_CC_X);
1659 
1660     /* we will remove exceding 0x6 where there is no carry */
1661 
1662     /* t0 = (src + 0x0066) ^ dest
1663      *    = t1 without carries
1664      */
1665 
1666     tcg_gen_xor_i32(t0, t0, dest);
1667 
1668     /* extract the carries
1669      * t0 = t0 ^ t1
1670      *    = only the carries
1671      */
1672 
1673     tcg_gen_xor_i32(t0, t0, t1);
1674 
1675     /* generate 0x1 where there is no carry
1676      * and for each 0x10, generate a 0x6
1677      */
1678 
1679     tcg_gen_shri_i32(t0, t0, 3);
1680     tcg_gen_not_i32(t0, t0);
1681     tcg_gen_andi_i32(t0, t0, 0x22);
1682     tcg_gen_add_i32(dest, t0, t0);
1683     tcg_gen_add_i32(dest, dest, t0);
1684     tcg_temp_free(t0);
1685 
1686     /* remove the exceding 0x6
1687      * for digits that have not generated a carry
1688      */
1689 
1690     tcg_gen_sub_i32(dest, t1, dest);
1691     tcg_temp_free(t1);
1692 }
1693 
1694 static void bcd_sub(TCGv dest, TCGv src)
1695 {
1696     TCGv t0, t1, t2;
1697 
1698     /*  dest10 = dest10 - src10 - X
1699      *         = bcd_add(dest + 1 - X, 0x199 - src)
1700      */
1701 
1702     /* t0 = 0x066 + (0x199 - src) */
1703 
1704     t0 = tcg_temp_new();
1705     tcg_gen_subfi_i32(t0, 0x1ff, src);
1706 
1707     /* t1 = t0 + dest + 1 - X*/
1708 
1709     t1 = tcg_temp_new();
1710     tcg_gen_add_i32(t1, t0, dest);
1711     tcg_gen_addi_i32(t1, t1, 1);
1712     tcg_gen_sub_i32(t1, t1, QREG_CC_X);
1713 
1714     /* t2 = t0 ^ dest */
1715 
1716     t2 = tcg_temp_new();
1717     tcg_gen_xor_i32(t2, t0, dest);
1718 
1719     /* t0 = t1 ^ t2 */
1720 
1721     tcg_gen_xor_i32(t0, t1, t2);
1722 
1723     /* t2 = ~t0 & 0x110
1724      * t0 = (t2 >> 2) | (t2 >> 3)
1725      *
1726      * to fit on 8bit operands, changed in:
1727      *
1728      * t2 = ~(t0 >> 3) & 0x22
1729      * t0 = t2 + t2
1730      * t0 = t0 + t2
1731      */
1732 
1733     tcg_gen_shri_i32(t2, t0, 3);
1734     tcg_gen_not_i32(t2, t2);
1735     tcg_gen_andi_i32(t2, t2, 0x22);
1736     tcg_gen_add_i32(t0, t2, t2);
1737     tcg_gen_add_i32(t0, t0, t2);
1738     tcg_temp_free(t2);
1739 
1740     /* return t1 - t0 */
1741 
1742     tcg_gen_sub_i32(dest, t1, t0);
1743     tcg_temp_free(t0);
1744     tcg_temp_free(t1);
1745 }
1746 
1747 static void bcd_flags(TCGv val)
1748 {
1749     tcg_gen_andi_i32(QREG_CC_C, val, 0x0ff);
1750     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_C);
1751 
1752     tcg_gen_extract_i32(QREG_CC_C, val, 8, 1);
1753 
1754     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
1755 }
1756 
1757 DISAS_INSN(abcd_reg)
1758 {
1759     TCGv src;
1760     TCGv dest;
1761 
1762     gen_flush_flags(s); /* !Z is sticky */
1763 
1764     src = gen_extend(DREG(insn, 0), OS_BYTE, 0);
1765     dest = gen_extend(DREG(insn, 9), OS_BYTE, 0);
1766     bcd_add(dest, src);
1767     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1768 
1769     bcd_flags(dest);
1770 }
1771 
1772 DISAS_INSN(abcd_mem)
1773 {
1774     TCGv src, dest, addr;
1775 
1776     gen_flush_flags(s); /* !Z is sticky */
1777 
1778     /* Indirect pre-decrement load (mode 4) */
1779 
1780     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1781                       NULL_QREG, NULL, EA_LOADU);
1782     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1783                        NULL_QREG, &addr, EA_LOADU);
1784 
1785     bcd_add(dest, src);
1786 
1787     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr, EA_STORE);
1788 
1789     bcd_flags(dest);
1790 }
1791 
1792 DISAS_INSN(sbcd_reg)
1793 {
1794     TCGv src, dest;
1795 
1796     gen_flush_flags(s); /* !Z is sticky */
1797 
1798     src = gen_extend(DREG(insn, 0), OS_BYTE, 0);
1799     dest = gen_extend(DREG(insn, 9), OS_BYTE, 0);
1800 
1801     bcd_sub(dest, src);
1802 
1803     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1804 
1805     bcd_flags(dest);
1806 }
1807 
1808 DISAS_INSN(sbcd_mem)
1809 {
1810     TCGv src, dest, addr;
1811 
1812     gen_flush_flags(s); /* !Z is sticky */
1813 
1814     /* Indirect pre-decrement load (mode 4) */
1815 
1816     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1817                       NULL_QREG, NULL, EA_LOADU);
1818     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1819                        NULL_QREG, &addr, EA_LOADU);
1820 
1821     bcd_sub(dest, src);
1822 
1823     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr, EA_STORE);
1824 
1825     bcd_flags(dest);
1826 }
1827 
1828 DISAS_INSN(nbcd)
1829 {
1830     TCGv src, dest;
1831     TCGv addr;
1832 
1833     gen_flush_flags(s); /* !Z is sticky */
1834 
1835     SRC_EA(env, src, OS_BYTE, 0, &addr);
1836 
1837     dest = tcg_const_i32(0);
1838     bcd_sub(dest, src);
1839 
1840     DEST_EA(env, insn, OS_BYTE, dest, &addr);
1841 
1842     bcd_flags(dest);
1843 
1844     tcg_temp_free(dest);
1845 }
1846 
1847 DISAS_INSN(addsub)
1848 {
1849     TCGv reg;
1850     TCGv dest;
1851     TCGv src;
1852     TCGv tmp;
1853     TCGv addr;
1854     int add;
1855     int opsize;
1856 
1857     add = (insn & 0x4000) != 0;
1858     opsize = insn_opsize(insn);
1859     reg = gen_extend(DREG(insn, 9), opsize, 1);
1860     dest = tcg_temp_new();
1861     if (insn & 0x100) {
1862         SRC_EA(env, tmp, opsize, 1, &addr);
1863         src = reg;
1864     } else {
1865         tmp = reg;
1866         SRC_EA(env, src, opsize, 1, NULL);
1867     }
1868     if (add) {
1869         tcg_gen_add_i32(dest, tmp, src);
1870         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, src);
1871         set_cc_op(s, CC_OP_ADDB + opsize);
1872     } else {
1873         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, tmp, src);
1874         tcg_gen_sub_i32(dest, tmp, src);
1875         set_cc_op(s, CC_OP_SUBB + opsize);
1876     }
1877     gen_update_cc_add(dest, src, opsize);
1878     if (insn & 0x100) {
1879         DEST_EA(env, insn, opsize, dest, &addr);
1880     } else {
1881         gen_partset_reg(opsize, DREG(insn, 9), dest);
1882     }
1883     tcg_temp_free(dest);
1884 }
1885 
1886 /* Reverse the order of the bits in REG.  */
1887 DISAS_INSN(bitrev)
1888 {
1889     TCGv reg;
1890     reg = DREG(insn, 0);
1891     gen_helper_bitrev(reg, reg);
1892 }
1893 
1894 DISAS_INSN(bitop_reg)
1895 {
1896     int opsize;
1897     int op;
1898     TCGv src1;
1899     TCGv src2;
1900     TCGv tmp;
1901     TCGv addr;
1902     TCGv dest;
1903 
1904     if ((insn & 0x38) != 0)
1905         opsize = OS_BYTE;
1906     else
1907         opsize = OS_LONG;
1908     op = (insn >> 6) & 3;
1909     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
1910 
1911     gen_flush_flags(s);
1912     src2 = tcg_temp_new();
1913     if (opsize == OS_BYTE)
1914         tcg_gen_andi_i32(src2, DREG(insn, 9), 7);
1915     else
1916         tcg_gen_andi_i32(src2, DREG(insn, 9), 31);
1917 
1918     tmp = tcg_const_i32(1);
1919     tcg_gen_shl_i32(tmp, tmp, src2);
1920     tcg_temp_free(src2);
1921 
1922     tcg_gen_and_i32(QREG_CC_Z, src1, tmp);
1923 
1924     dest = tcg_temp_new();
1925     switch (op) {
1926     case 1: /* bchg */
1927         tcg_gen_xor_i32(dest, src1, tmp);
1928         break;
1929     case 2: /* bclr */
1930         tcg_gen_andc_i32(dest, src1, tmp);
1931         break;
1932     case 3: /* bset */
1933         tcg_gen_or_i32(dest, src1, tmp);
1934         break;
1935     default: /* btst */
1936         break;
1937     }
1938     tcg_temp_free(tmp);
1939     if (op) {
1940         DEST_EA(env, insn, opsize, dest, &addr);
1941     }
1942     tcg_temp_free(dest);
1943 }
1944 
1945 DISAS_INSN(sats)
1946 {
1947     TCGv reg;
1948     reg = DREG(insn, 0);
1949     gen_flush_flags(s);
1950     gen_helper_sats(reg, reg, QREG_CC_V);
1951     gen_logic_cc(s, reg, OS_LONG);
1952 }
1953 
1954 static void gen_push(DisasContext *s, TCGv val)
1955 {
1956     TCGv tmp;
1957 
1958     tmp = tcg_temp_new();
1959     tcg_gen_subi_i32(tmp, QREG_SP, 4);
1960     gen_store(s, OS_LONG, tmp, val);
1961     tcg_gen_mov_i32(QREG_SP, tmp);
1962     tcg_temp_free(tmp);
1963 }
1964 
1965 static TCGv mreg(int reg)
1966 {
1967     if (reg < 8) {
1968         /* Dx */
1969         return cpu_dregs[reg];
1970     }
1971     /* Ax */
1972     return cpu_aregs[reg & 7];
1973 }
1974 
1975 DISAS_INSN(movem)
1976 {
1977     TCGv addr, incr, tmp, r[16];
1978     int is_load = (insn & 0x0400) != 0;
1979     int opsize = (insn & 0x40) != 0 ? OS_LONG : OS_WORD;
1980     uint16_t mask = read_im16(env, s);
1981     int mode = extract32(insn, 3, 3);
1982     int reg0 = REG(insn, 0);
1983     int i;
1984 
1985     tmp = cpu_aregs[reg0];
1986 
1987     switch (mode) {
1988     case 0: /* data register direct */
1989     case 1: /* addr register direct */
1990     do_addr_fault:
1991         gen_addr_fault(s);
1992         return;
1993 
1994     case 2: /* indirect */
1995         break;
1996 
1997     case 3: /* indirect post-increment */
1998         if (!is_load) {
1999             /* post-increment is not allowed */
2000             goto do_addr_fault;
2001         }
2002         break;
2003 
2004     case 4: /* indirect pre-decrement */
2005         if (is_load) {
2006             /* pre-decrement is not allowed */
2007             goto do_addr_fault;
2008         }
2009         /* We want a bare copy of the address reg, without any pre-decrement
2010            adjustment, as gen_lea would provide.  */
2011         break;
2012 
2013     default:
2014         tmp = gen_lea_mode(env, s, mode, reg0, opsize);
2015         if (IS_NULL_QREG(tmp)) {
2016             goto do_addr_fault;
2017         }
2018         break;
2019     }
2020 
2021     addr = tcg_temp_new();
2022     tcg_gen_mov_i32(addr, tmp);
2023     incr = tcg_const_i32(opsize_bytes(opsize));
2024 
2025     if (is_load) {
2026         /* memory to register */
2027         for (i = 0; i < 16; i++) {
2028             if (mask & (1 << i)) {
2029                 r[i] = gen_load(s, opsize, addr, 1);
2030                 tcg_gen_add_i32(addr, addr, incr);
2031             }
2032         }
2033         for (i = 0; i < 16; i++) {
2034             if (mask & (1 << i)) {
2035                 tcg_gen_mov_i32(mreg(i), r[i]);
2036                 tcg_temp_free(r[i]);
2037             }
2038         }
2039         if (mode == 3) {
2040             /* post-increment: movem (An)+,X */
2041             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2042         }
2043     } else {
2044         /* register to memory */
2045         if (mode == 4) {
2046             /* pre-decrement: movem X,-(An) */
2047             for (i = 15; i >= 0; i--) {
2048                 if ((mask << i) & 0x8000) {
2049                     tcg_gen_sub_i32(addr, addr, incr);
2050                     if (reg0 + 8 == i &&
2051                         m68k_feature(s->env, M68K_FEATURE_EXT_FULL)) {
2052                         /* M68020+: if the addressing register is the
2053                          * register moved to memory, the value written
2054                          * is the initial value decremented by the size of
2055                          * the operation, regardless of how many actual
2056                          * stores have been performed until this point.
2057                          * M68000/M68010: the value is the initial value.
2058                          */
2059                         tmp = tcg_temp_new();
2060                         tcg_gen_sub_i32(tmp, cpu_aregs[reg0], incr);
2061                         gen_store(s, opsize, addr, tmp);
2062                         tcg_temp_free(tmp);
2063                     } else {
2064                         gen_store(s, opsize, addr, mreg(i));
2065                     }
2066                 }
2067             }
2068             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2069         } else {
2070             for (i = 0; i < 16; i++) {
2071                 if (mask & (1 << i)) {
2072                     gen_store(s, opsize, addr, mreg(i));
2073                     tcg_gen_add_i32(addr, addr, incr);
2074                 }
2075             }
2076         }
2077     }
2078 
2079     tcg_temp_free(incr);
2080     tcg_temp_free(addr);
2081 }
2082 
2083 DISAS_INSN(bitop_im)
2084 {
2085     int opsize;
2086     int op;
2087     TCGv src1;
2088     uint32_t mask;
2089     int bitnum;
2090     TCGv tmp;
2091     TCGv addr;
2092 
2093     if ((insn & 0x38) != 0)
2094         opsize = OS_BYTE;
2095     else
2096         opsize = OS_LONG;
2097     op = (insn >> 6) & 3;
2098 
2099     bitnum = read_im16(env, s);
2100     if (m68k_feature(s->env, M68K_FEATURE_M68000)) {
2101         if (bitnum & 0xfe00) {
2102             disas_undef(env, s, insn);
2103             return;
2104         }
2105     } else {
2106         if (bitnum & 0xff00) {
2107             disas_undef(env, s, insn);
2108             return;
2109         }
2110     }
2111 
2112     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
2113 
2114     gen_flush_flags(s);
2115     if (opsize == OS_BYTE)
2116         bitnum &= 7;
2117     else
2118         bitnum &= 31;
2119     mask = 1 << bitnum;
2120 
2121    tcg_gen_andi_i32(QREG_CC_Z, src1, mask);
2122 
2123     if (op) {
2124         tmp = tcg_temp_new();
2125         switch (op) {
2126         case 1: /* bchg */
2127             tcg_gen_xori_i32(tmp, src1, mask);
2128             break;
2129         case 2: /* bclr */
2130             tcg_gen_andi_i32(tmp, src1, ~mask);
2131             break;
2132         case 3: /* bset */
2133             tcg_gen_ori_i32(tmp, src1, mask);
2134             break;
2135         default: /* btst */
2136             break;
2137         }
2138         DEST_EA(env, insn, opsize, tmp, &addr);
2139         tcg_temp_free(tmp);
2140     }
2141 }
2142 
2143 DISAS_INSN(arith_im)
2144 {
2145     int op;
2146     TCGv im;
2147     TCGv src1;
2148     TCGv dest;
2149     TCGv addr;
2150     int opsize;
2151 
2152     op = (insn >> 9) & 7;
2153     opsize = insn_opsize(insn);
2154     switch (opsize) {
2155     case OS_BYTE:
2156         im = tcg_const_i32((int8_t)read_im8(env, s));
2157         break;
2158     case OS_WORD:
2159         im = tcg_const_i32((int16_t)read_im16(env, s));
2160         break;
2161     case OS_LONG:
2162         im = tcg_const_i32(read_im32(env, s));
2163         break;
2164     default:
2165        abort();
2166     }
2167     SRC_EA(env, src1, opsize, 1, (op == 6) ? NULL : &addr);
2168     dest = tcg_temp_new();
2169     switch (op) {
2170     case 0: /* ori */
2171         tcg_gen_or_i32(dest, src1, im);
2172         gen_logic_cc(s, dest, opsize);
2173         break;
2174     case 1: /* andi */
2175         tcg_gen_and_i32(dest, src1, im);
2176         gen_logic_cc(s, dest, opsize);
2177         break;
2178     case 2: /* subi */
2179         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, src1, im);
2180         tcg_gen_sub_i32(dest, src1, im);
2181         gen_update_cc_add(dest, im, opsize);
2182         set_cc_op(s, CC_OP_SUBB + opsize);
2183         break;
2184     case 3: /* addi */
2185         tcg_gen_add_i32(dest, src1, im);
2186         gen_update_cc_add(dest, im, opsize);
2187         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, im);
2188         set_cc_op(s, CC_OP_ADDB + opsize);
2189         break;
2190     case 5: /* eori */
2191         tcg_gen_xor_i32(dest, src1, im);
2192         gen_logic_cc(s, dest, opsize);
2193         break;
2194     case 6: /* cmpi */
2195         gen_update_cc_cmp(s, src1, im, opsize);
2196         break;
2197     default:
2198         abort();
2199     }
2200     tcg_temp_free(im);
2201     if (op != 6) {
2202         DEST_EA(env, insn, opsize, dest, &addr);
2203     }
2204     tcg_temp_free(dest);
2205 }
2206 
2207 DISAS_INSN(cas)
2208 {
2209     int opsize;
2210     TCGv addr;
2211     uint16_t ext;
2212     TCGv load;
2213     TCGv cmp;
2214     TCGMemOp opc;
2215 
2216     switch ((insn >> 9) & 3) {
2217     case 1:
2218         opsize = OS_BYTE;
2219         opc = MO_SB;
2220         break;
2221     case 2:
2222         opsize = OS_WORD;
2223         opc = MO_TESW;
2224         break;
2225     case 3:
2226         opsize = OS_LONG;
2227         opc = MO_TESL;
2228         break;
2229     default:
2230         g_assert_not_reached();
2231     }
2232 
2233     ext = read_im16(env, s);
2234 
2235     /* cas Dc,Du,<EA> */
2236 
2237     addr = gen_lea(env, s, insn, opsize);
2238     if (IS_NULL_QREG(addr)) {
2239         gen_addr_fault(s);
2240         return;
2241     }
2242 
2243     cmp = gen_extend(DREG(ext, 0), opsize, 1);
2244 
2245     /* if  <EA> == Dc then
2246      *     <EA> = Du
2247      *     Dc = <EA> (because <EA> == Dc)
2248      * else
2249      *     Dc = <EA>
2250      */
2251 
2252     load = tcg_temp_new();
2253     tcg_gen_atomic_cmpxchg_i32(load, addr, cmp, DREG(ext, 6),
2254                                IS_USER(s), opc);
2255     /* update flags before setting cmp to load */
2256     gen_update_cc_cmp(s, load, cmp, opsize);
2257     gen_partset_reg(opsize, DREG(ext, 0), load);
2258 
2259     tcg_temp_free(load);
2260 
2261     switch (extract32(insn, 3, 3)) {
2262     case 3: /* Indirect postincrement.  */
2263         tcg_gen_addi_i32(AREG(insn, 0), addr, opsize_bytes(opsize));
2264         break;
2265     case 4: /* Indirect predecrememnt.  */
2266         tcg_gen_mov_i32(AREG(insn, 0), addr);
2267         break;
2268     }
2269 }
2270 
2271 DISAS_INSN(cas2w)
2272 {
2273     uint16_t ext1, ext2;
2274     TCGv addr1, addr2;
2275     TCGv regs;
2276 
2277     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2278 
2279     ext1 = read_im16(env, s);
2280 
2281     if (ext1 & 0x8000) {
2282         /* Address Register */
2283         addr1 = AREG(ext1, 12);
2284     } else {
2285         /* Data Register */
2286         addr1 = DREG(ext1, 12);
2287     }
2288 
2289     ext2 = read_im16(env, s);
2290     if (ext2 & 0x8000) {
2291         /* Address Register */
2292         addr2 = AREG(ext2, 12);
2293     } else {
2294         /* Data Register */
2295         addr2 = DREG(ext2, 12);
2296     }
2297 
2298     /* if (R1) == Dc1 && (R2) == Dc2 then
2299      *     (R1) = Du1
2300      *     (R2) = Du2
2301      * else
2302      *     Dc1 = (R1)
2303      *     Dc2 = (R2)
2304      */
2305 
2306     regs = tcg_const_i32(REG(ext2, 6) |
2307                          (REG(ext1, 6) << 3) |
2308                          (REG(ext2, 0) << 6) |
2309                          (REG(ext1, 0) << 9));
2310     gen_helper_cas2w(cpu_env, regs, addr1, addr2);
2311     tcg_temp_free(regs);
2312 
2313     /* Note that cas2w also assigned to env->cc_op.  */
2314     s->cc_op = CC_OP_CMPW;
2315     s->cc_op_synced = 1;
2316 }
2317 
2318 DISAS_INSN(cas2l)
2319 {
2320     uint16_t ext1, ext2;
2321     TCGv addr1, addr2, regs;
2322 
2323     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2324 
2325     ext1 = read_im16(env, s);
2326 
2327     if (ext1 & 0x8000) {
2328         /* Address Register */
2329         addr1 = AREG(ext1, 12);
2330     } else {
2331         /* Data Register */
2332         addr1 = DREG(ext1, 12);
2333     }
2334 
2335     ext2 = read_im16(env, s);
2336     if (ext2 & 0x8000) {
2337         /* Address Register */
2338         addr2 = AREG(ext2, 12);
2339     } else {
2340         /* Data Register */
2341         addr2 = DREG(ext2, 12);
2342     }
2343 
2344     /* if (R1) == Dc1 && (R2) == Dc2 then
2345      *     (R1) = Du1
2346      *     (R2) = Du2
2347      * else
2348      *     Dc1 = (R1)
2349      *     Dc2 = (R2)
2350      */
2351 
2352     regs = tcg_const_i32(REG(ext2, 6) |
2353                          (REG(ext1, 6) << 3) |
2354                          (REG(ext2, 0) << 6) |
2355                          (REG(ext1, 0) << 9));
2356     gen_helper_cas2l(cpu_env, regs, addr1, addr2);
2357     tcg_temp_free(regs);
2358 
2359     /* Note that cas2l also assigned to env->cc_op.  */
2360     s->cc_op = CC_OP_CMPL;
2361     s->cc_op_synced = 1;
2362 }
2363 
2364 DISAS_INSN(byterev)
2365 {
2366     TCGv reg;
2367 
2368     reg = DREG(insn, 0);
2369     tcg_gen_bswap32_i32(reg, reg);
2370 }
2371 
2372 DISAS_INSN(move)
2373 {
2374     TCGv src;
2375     TCGv dest;
2376     int op;
2377     int opsize;
2378 
2379     switch (insn >> 12) {
2380     case 1: /* move.b */
2381         opsize = OS_BYTE;
2382         break;
2383     case 2: /* move.l */
2384         opsize = OS_LONG;
2385         break;
2386     case 3: /* move.w */
2387         opsize = OS_WORD;
2388         break;
2389     default:
2390         abort();
2391     }
2392     SRC_EA(env, src, opsize, 1, NULL);
2393     op = (insn >> 6) & 7;
2394     if (op == 1) {
2395         /* movea */
2396         /* The value will already have been sign extended.  */
2397         dest = AREG(insn, 9);
2398         tcg_gen_mov_i32(dest, src);
2399     } else {
2400         /* normal move */
2401         uint16_t dest_ea;
2402         dest_ea = ((insn >> 9) & 7) | (op << 3);
2403         DEST_EA(env, dest_ea, opsize, src, NULL);
2404         /* This will be correct because loads sign extend.  */
2405         gen_logic_cc(s, src, opsize);
2406     }
2407 }
2408 
2409 DISAS_INSN(negx)
2410 {
2411     TCGv z;
2412     TCGv src;
2413     TCGv addr;
2414     int opsize;
2415 
2416     opsize = insn_opsize(insn);
2417     SRC_EA(env, src, opsize, 1, &addr);
2418 
2419     gen_flush_flags(s); /* compute old Z */
2420 
2421     /* Perform substract with borrow.
2422      * (X, N) =  -(src + X);
2423      */
2424 
2425     z = tcg_const_i32(0);
2426     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, z, QREG_CC_X, z);
2427     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, z, z, QREG_CC_N, QREG_CC_X);
2428     tcg_temp_free(z);
2429     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
2430 
2431     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
2432 
2433     /* Compute signed-overflow for negation.  The normal formula for
2434      * subtraction is (res ^ src) & (src ^ dest), but with dest==0
2435      * this simplies to res & src.
2436      */
2437 
2438     tcg_gen_and_i32(QREG_CC_V, QREG_CC_N, src);
2439 
2440     /* Copy the rest of the results into place.  */
2441     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
2442     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
2443 
2444     set_cc_op(s, CC_OP_FLAGS);
2445 
2446     /* result is in QREG_CC_N */
2447 
2448     DEST_EA(env, insn, opsize, QREG_CC_N, &addr);
2449 }
2450 
2451 DISAS_INSN(lea)
2452 {
2453     TCGv reg;
2454     TCGv tmp;
2455 
2456     reg = AREG(insn, 9);
2457     tmp = gen_lea(env, s, insn, OS_LONG);
2458     if (IS_NULL_QREG(tmp)) {
2459         gen_addr_fault(s);
2460         return;
2461     }
2462     tcg_gen_mov_i32(reg, tmp);
2463 }
2464 
2465 DISAS_INSN(clr)
2466 {
2467     int opsize;
2468     TCGv zero;
2469 
2470     zero = tcg_const_i32(0);
2471 
2472     opsize = insn_opsize(insn);
2473     DEST_EA(env, insn, opsize, zero, NULL);
2474     gen_logic_cc(s, zero, opsize);
2475     tcg_temp_free(zero);
2476 }
2477 
2478 static TCGv gen_get_ccr(DisasContext *s)
2479 {
2480     TCGv dest;
2481 
2482     gen_flush_flags(s);
2483     update_cc_op(s);
2484     dest = tcg_temp_new();
2485     gen_helper_get_ccr(dest, cpu_env);
2486     return dest;
2487 }
2488 
2489 DISAS_INSN(move_from_ccr)
2490 {
2491     TCGv ccr;
2492 
2493     ccr = gen_get_ccr(s);
2494     DEST_EA(env, insn, OS_WORD, ccr, NULL);
2495 }
2496 
2497 DISAS_INSN(neg)
2498 {
2499     TCGv src1;
2500     TCGv dest;
2501     TCGv addr;
2502     int opsize;
2503 
2504     opsize = insn_opsize(insn);
2505     SRC_EA(env, src1, opsize, 1, &addr);
2506     dest = tcg_temp_new();
2507     tcg_gen_neg_i32(dest, src1);
2508     set_cc_op(s, CC_OP_SUBB + opsize);
2509     gen_update_cc_add(dest, src1, opsize);
2510     tcg_gen_setcondi_i32(TCG_COND_NE, QREG_CC_X, dest, 0);
2511     DEST_EA(env, insn, opsize, dest, &addr);
2512     tcg_temp_free(dest);
2513 }
2514 
2515 static void gen_set_sr_im(DisasContext *s, uint16_t val, int ccr_only)
2516 {
2517     if (ccr_only) {
2518         tcg_gen_movi_i32(QREG_CC_C, val & CCF_C ? 1 : 0);
2519         tcg_gen_movi_i32(QREG_CC_V, val & CCF_V ? -1 : 0);
2520         tcg_gen_movi_i32(QREG_CC_Z, val & CCF_Z ? 0 : 1);
2521         tcg_gen_movi_i32(QREG_CC_N, val & CCF_N ? -1 : 0);
2522         tcg_gen_movi_i32(QREG_CC_X, val & CCF_X ? 1 : 0);
2523     } else {
2524         gen_helper_set_sr(cpu_env, tcg_const_i32(val));
2525     }
2526     set_cc_op(s, CC_OP_FLAGS);
2527 }
2528 
2529 static void gen_set_sr(CPUM68KState *env, DisasContext *s, uint16_t insn,
2530                        int ccr_only)
2531 {
2532     if ((insn & 0x38) == 0) {
2533         if (ccr_only) {
2534             gen_helper_set_ccr(cpu_env, DREG(insn, 0));
2535         } else {
2536             gen_helper_set_sr(cpu_env, DREG(insn, 0));
2537         }
2538         set_cc_op(s, CC_OP_FLAGS);
2539     } else if ((insn & 0x3f) == 0x3c) {
2540         uint16_t val;
2541         val = read_im16(env, s);
2542         gen_set_sr_im(s, val, ccr_only);
2543     } else {
2544         disas_undef(env, s, insn);
2545     }
2546 }
2547 
2548 
2549 DISAS_INSN(move_to_ccr)
2550 {
2551     gen_set_sr(env, s, insn, 1);
2552 }
2553 
2554 DISAS_INSN(not)
2555 {
2556     TCGv src1;
2557     TCGv dest;
2558     TCGv addr;
2559     int opsize;
2560 
2561     opsize = insn_opsize(insn);
2562     SRC_EA(env, src1, opsize, 1, &addr);
2563     dest = tcg_temp_new();
2564     tcg_gen_not_i32(dest, src1);
2565     DEST_EA(env, insn, opsize, dest, &addr);
2566     gen_logic_cc(s, dest, opsize);
2567 }
2568 
2569 DISAS_INSN(swap)
2570 {
2571     TCGv src1;
2572     TCGv src2;
2573     TCGv reg;
2574 
2575     src1 = tcg_temp_new();
2576     src2 = tcg_temp_new();
2577     reg = DREG(insn, 0);
2578     tcg_gen_shli_i32(src1, reg, 16);
2579     tcg_gen_shri_i32(src2, reg, 16);
2580     tcg_gen_or_i32(reg, src1, src2);
2581     tcg_temp_free(src2);
2582     tcg_temp_free(src1);
2583     gen_logic_cc(s, reg, OS_LONG);
2584 }
2585 
2586 DISAS_INSN(bkpt)
2587 {
2588     gen_exception(s, s->pc - 2, EXCP_DEBUG);
2589 }
2590 
2591 DISAS_INSN(pea)
2592 {
2593     TCGv tmp;
2594 
2595     tmp = gen_lea(env, s, insn, OS_LONG);
2596     if (IS_NULL_QREG(tmp)) {
2597         gen_addr_fault(s);
2598         return;
2599     }
2600     gen_push(s, tmp);
2601 }
2602 
2603 DISAS_INSN(ext)
2604 {
2605     int op;
2606     TCGv reg;
2607     TCGv tmp;
2608 
2609     reg = DREG(insn, 0);
2610     op = (insn >> 6) & 7;
2611     tmp = tcg_temp_new();
2612     if (op == 3)
2613         tcg_gen_ext16s_i32(tmp, reg);
2614     else
2615         tcg_gen_ext8s_i32(tmp, reg);
2616     if (op == 2)
2617         gen_partset_reg(OS_WORD, reg, tmp);
2618     else
2619         tcg_gen_mov_i32(reg, tmp);
2620     gen_logic_cc(s, tmp, OS_LONG);
2621     tcg_temp_free(tmp);
2622 }
2623 
2624 DISAS_INSN(tst)
2625 {
2626     int opsize;
2627     TCGv tmp;
2628 
2629     opsize = insn_opsize(insn);
2630     SRC_EA(env, tmp, opsize, 1, NULL);
2631     gen_logic_cc(s, tmp, opsize);
2632 }
2633 
2634 DISAS_INSN(pulse)
2635 {
2636   /* Implemented as a NOP.  */
2637 }
2638 
2639 DISAS_INSN(illegal)
2640 {
2641     gen_exception(s, s->pc - 2, EXCP_ILLEGAL);
2642 }
2643 
2644 /* ??? This should be atomic.  */
2645 DISAS_INSN(tas)
2646 {
2647     TCGv dest;
2648     TCGv src1;
2649     TCGv addr;
2650 
2651     dest = tcg_temp_new();
2652     SRC_EA(env, src1, OS_BYTE, 1, &addr);
2653     gen_logic_cc(s, src1, OS_BYTE);
2654     tcg_gen_ori_i32(dest, src1, 0x80);
2655     DEST_EA(env, insn, OS_BYTE, dest, &addr);
2656     tcg_temp_free(dest);
2657 }
2658 
2659 DISAS_INSN(mull)
2660 {
2661     uint16_t ext;
2662     TCGv src1;
2663     int sign;
2664 
2665     ext = read_im16(env, s);
2666 
2667     sign = ext & 0x800;
2668 
2669     if (ext & 0x400) {
2670         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
2671             gen_exception(s, s->pc - 4, EXCP_UNSUPPORTED);
2672             return;
2673         }
2674 
2675         SRC_EA(env, src1, OS_LONG, 0, NULL);
2676 
2677         if (sign) {
2678             tcg_gen_muls2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2679         } else {
2680             tcg_gen_mulu2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2681         }
2682         /* if Dl == Dh, 68040 returns low word */
2683         tcg_gen_mov_i32(DREG(ext, 0), QREG_CC_N);
2684         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_Z);
2685         tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N);
2686 
2687         tcg_gen_movi_i32(QREG_CC_V, 0);
2688         tcg_gen_movi_i32(QREG_CC_C, 0);
2689 
2690         set_cc_op(s, CC_OP_FLAGS);
2691         return;
2692     }
2693     SRC_EA(env, src1, OS_LONG, 0, NULL);
2694     if (m68k_feature(s->env, M68K_FEATURE_M68000)) {
2695         tcg_gen_movi_i32(QREG_CC_C, 0);
2696         if (sign) {
2697             tcg_gen_muls2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2698             /* QREG_CC_V is -(QREG_CC_V != (QREG_CC_N >> 31)) */
2699             tcg_gen_sari_i32(QREG_CC_Z, QREG_CC_N, 31);
2700             tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_Z);
2701         } else {
2702             tcg_gen_mulu2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2703             /* QREG_CC_V is -(QREG_CC_V != 0), use QREG_CC_C as 0 */
2704             tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_C);
2705         }
2706         tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
2707         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_N);
2708 
2709         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
2710 
2711         set_cc_op(s, CC_OP_FLAGS);
2712     } else {
2713         /* The upper 32 bits of the product are discarded, so
2714            muls.l and mulu.l are functionally equivalent.  */
2715         tcg_gen_mul_i32(DREG(ext, 12), src1, DREG(ext, 12));
2716         gen_logic_cc(s, DREG(ext, 12), OS_LONG);
2717     }
2718 }
2719 
2720 static void gen_link(DisasContext *s, uint16_t insn, int32_t offset)
2721 {
2722     TCGv reg;
2723     TCGv tmp;
2724 
2725     reg = AREG(insn, 0);
2726     tmp = tcg_temp_new();
2727     tcg_gen_subi_i32(tmp, QREG_SP, 4);
2728     gen_store(s, OS_LONG, tmp, reg);
2729     if ((insn & 7) != 7) {
2730         tcg_gen_mov_i32(reg, tmp);
2731     }
2732     tcg_gen_addi_i32(QREG_SP, tmp, offset);
2733     tcg_temp_free(tmp);
2734 }
2735 
2736 DISAS_INSN(link)
2737 {
2738     int16_t offset;
2739 
2740     offset = read_im16(env, s);
2741     gen_link(s, insn, offset);
2742 }
2743 
2744 DISAS_INSN(linkl)
2745 {
2746     int32_t offset;
2747 
2748     offset = read_im32(env, s);
2749     gen_link(s, insn, offset);
2750 }
2751 
2752 DISAS_INSN(unlk)
2753 {
2754     TCGv src;
2755     TCGv reg;
2756     TCGv tmp;
2757 
2758     src = tcg_temp_new();
2759     reg = AREG(insn, 0);
2760     tcg_gen_mov_i32(src, reg);
2761     tmp = gen_load(s, OS_LONG, src, 0);
2762     tcg_gen_mov_i32(reg, tmp);
2763     tcg_gen_addi_i32(QREG_SP, src, 4);
2764     tcg_temp_free(src);
2765 }
2766 
2767 DISAS_INSN(nop)
2768 {
2769 }
2770 
2771 DISAS_INSN(rtd)
2772 {
2773     TCGv tmp;
2774     int16_t offset = read_im16(env, s);
2775 
2776     tmp = gen_load(s, OS_LONG, QREG_SP, 0);
2777     tcg_gen_addi_i32(QREG_SP, QREG_SP, offset + 4);
2778     gen_jmp(s, tmp);
2779 }
2780 
2781 DISAS_INSN(rts)
2782 {
2783     TCGv tmp;
2784 
2785     tmp = gen_load(s, OS_LONG, QREG_SP, 0);
2786     tcg_gen_addi_i32(QREG_SP, QREG_SP, 4);
2787     gen_jmp(s, tmp);
2788 }
2789 
2790 DISAS_INSN(jump)
2791 {
2792     TCGv tmp;
2793 
2794     /* Load the target address first to ensure correct exception
2795        behavior.  */
2796     tmp = gen_lea(env, s, insn, OS_LONG);
2797     if (IS_NULL_QREG(tmp)) {
2798         gen_addr_fault(s);
2799         return;
2800     }
2801     if ((insn & 0x40) == 0) {
2802         /* jsr */
2803         gen_push(s, tcg_const_i32(s->pc));
2804     }
2805     gen_jmp(s, tmp);
2806 }
2807 
2808 DISAS_INSN(addsubq)
2809 {
2810     TCGv src;
2811     TCGv dest;
2812     TCGv val;
2813     int imm;
2814     TCGv addr;
2815     int opsize;
2816 
2817     if ((insn & 070) == 010) {
2818         /* Operation on address register is always long.  */
2819         opsize = OS_LONG;
2820     } else {
2821         opsize = insn_opsize(insn);
2822     }
2823     SRC_EA(env, src, opsize, 1, &addr);
2824     imm = (insn >> 9) & 7;
2825     if (imm == 0) {
2826         imm = 8;
2827     }
2828     val = tcg_const_i32(imm);
2829     dest = tcg_temp_new();
2830     tcg_gen_mov_i32(dest, src);
2831     if ((insn & 0x38) == 0x08) {
2832         /* Don't update condition codes if the destination is an
2833            address register.  */
2834         if (insn & 0x0100) {
2835             tcg_gen_sub_i32(dest, dest, val);
2836         } else {
2837             tcg_gen_add_i32(dest, dest, val);
2838         }
2839     } else {
2840         if (insn & 0x0100) {
2841             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
2842             tcg_gen_sub_i32(dest, dest, val);
2843             set_cc_op(s, CC_OP_SUBB + opsize);
2844         } else {
2845             tcg_gen_add_i32(dest, dest, val);
2846             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
2847             set_cc_op(s, CC_OP_ADDB + opsize);
2848         }
2849         gen_update_cc_add(dest, val, opsize);
2850     }
2851     tcg_temp_free(val);
2852     DEST_EA(env, insn, opsize, dest, &addr);
2853     tcg_temp_free(dest);
2854 }
2855 
2856 DISAS_INSN(tpf)
2857 {
2858     switch (insn & 7) {
2859     case 2: /* One extension word.  */
2860         s->pc += 2;
2861         break;
2862     case 3: /* Two extension words.  */
2863         s->pc += 4;
2864         break;
2865     case 4: /* No extension words.  */
2866         break;
2867     default:
2868         disas_undef(env, s, insn);
2869     }
2870 }
2871 
2872 DISAS_INSN(branch)
2873 {
2874     int32_t offset;
2875     uint32_t base;
2876     int op;
2877     TCGLabel *l1;
2878 
2879     base = s->pc;
2880     op = (insn >> 8) & 0xf;
2881     offset = (int8_t)insn;
2882     if (offset == 0) {
2883         offset = (int16_t)read_im16(env, s);
2884     } else if (offset == -1) {
2885         offset = read_im32(env, s);
2886     }
2887     if (op == 1) {
2888         /* bsr */
2889         gen_push(s, tcg_const_i32(s->pc));
2890     }
2891     if (op > 1) {
2892         /* Bcc */
2893         l1 = gen_new_label();
2894         gen_jmpcc(s, ((insn >> 8) & 0xf) ^ 1, l1);
2895         gen_jmp_tb(s, 1, base + offset);
2896         gen_set_label(l1);
2897         gen_jmp_tb(s, 0, s->pc);
2898     } else {
2899         /* Unconditional branch.  */
2900         gen_jmp_tb(s, 0, base + offset);
2901     }
2902 }
2903 
2904 DISAS_INSN(moveq)
2905 {
2906     tcg_gen_movi_i32(DREG(insn, 9), (int8_t)insn);
2907     gen_logic_cc(s, DREG(insn, 9), OS_LONG);
2908 }
2909 
2910 DISAS_INSN(mvzs)
2911 {
2912     int opsize;
2913     TCGv src;
2914     TCGv reg;
2915 
2916     if (insn & 0x40)
2917         opsize = OS_WORD;
2918     else
2919         opsize = OS_BYTE;
2920     SRC_EA(env, src, opsize, (insn & 0x80) == 0, NULL);
2921     reg = DREG(insn, 9);
2922     tcg_gen_mov_i32(reg, src);
2923     gen_logic_cc(s, src, opsize);
2924 }
2925 
2926 DISAS_INSN(or)
2927 {
2928     TCGv reg;
2929     TCGv dest;
2930     TCGv src;
2931     TCGv addr;
2932     int opsize;
2933 
2934     opsize = insn_opsize(insn);
2935     reg = gen_extend(DREG(insn, 9), opsize, 0);
2936     dest = tcg_temp_new();
2937     if (insn & 0x100) {
2938         SRC_EA(env, src, opsize, 0, &addr);
2939         tcg_gen_or_i32(dest, src, reg);
2940         DEST_EA(env, insn, opsize, dest, &addr);
2941     } else {
2942         SRC_EA(env, src, opsize, 0, NULL);
2943         tcg_gen_or_i32(dest, src, reg);
2944         gen_partset_reg(opsize, DREG(insn, 9), dest);
2945     }
2946     gen_logic_cc(s, dest, opsize);
2947     tcg_temp_free(dest);
2948 }
2949 
2950 DISAS_INSN(suba)
2951 {
2952     TCGv src;
2953     TCGv reg;
2954 
2955     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
2956     reg = AREG(insn, 9);
2957     tcg_gen_sub_i32(reg, reg, src);
2958 }
2959 
2960 static inline void gen_subx(DisasContext *s, TCGv src, TCGv dest, int opsize)
2961 {
2962     TCGv tmp;
2963 
2964     gen_flush_flags(s); /* compute old Z */
2965 
2966     /* Perform substract with borrow.
2967      * (X, N) = dest - (src + X);
2968      */
2969 
2970     tmp = tcg_const_i32(0);
2971     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, tmp, QREG_CC_X, tmp);
2972     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, dest, tmp, QREG_CC_N, QREG_CC_X);
2973     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
2974     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
2975 
2976     /* Compute signed-overflow for substract.  */
2977 
2978     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, dest);
2979     tcg_gen_xor_i32(tmp, dest, src);
2980     tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, tmp);
2981     tcg_temp_free(tmp);
2982 
2983     /* Copy the rest of the results into place.  */
2984     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
2985     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
2986 
2987     set_cc_op(s, CC_OP_FLAGS);
2988 
2989     /* result is in QREG_CC_N */
2990 }
2991 
2992 DISAS_INSN(subx_reg)
2993 {
2994     TCGv dest;
2995     TCGv src;
2996     int opsize;
2997 
2998     opsize = insn_opsize(insn);
2999 
3000     src = gen_extend(DREG(insn, 0), opsize, 1);
3001     dest = gen_extend(DREG(insn, 9), opsize, 1);
3002 
3003     gen_subx(s, src, dest, opsize);
3004 
3005     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3006 }
3007 
3008 DISAS_INSN(subx_mem)
3009 {
3010     TCGv src;
3011     TCGv addr_src;
3012     TCGv dest;
3013     TCGv addr_dest;
3014     int opsize;
3015 
3016     opsize = insn_opsize(insn);
3017 
3018     addr_src = AREG(insn, 0);
3019     tcg_gen_subi_i32(addr_src, addr_src, opsize);
3020     src = gen_load(s, opsize, addr_src, 1);
3021 
3022     addr_dest = AREG(insn, 9);
3023     tcg_gen_subi_i32(addr_dest, addr_dest, opsize);
3024     dest = gen_load(s, opsize, addr_dest, 1);
3025 
3026     gen_subx(s, src, dest, opsize);
3027 
3028     gen_store(s, opsize, addr_dest, QREG_CC_N);
3029 }
3030 
3031 DISAS_INSN(mov3q)
3032 {
3033     TCGv src;
3034     int val;
3035 
3036     val = (insn >> 9) & 7;
3037     if (val == 0)
3038         val = -1;
3039     src = tcg_const_i32(val);
3040     gen_logic_cc(s, src, OS_LONG);
3041     DEST_EA(env, insn, OS_LONG, src, NULL);
3042     tcg_temp_free(src);
3043 }
3044 
3045 DISAS_INSN(cmp)
3046 {
3047     TCGv src;
3048     TCGv reg;
3049     int opsize;
3050 
3051     opsize = insn_opsize(insn);
3052     SRC_EA(env, src, opsize, 1, NULL);
3053     reg = gen_extend(DREG(insn, 9), opsize, 1);
3054     gen_update_cc_cmp(s, reg, src, opsize);
3055 }
3056 
3057 DISAS_INSN(cmpa)
3058 {
3059     int opsize;
3060     TCGv src;
3061     TCGv reg;
3062 
3063     if (insn & 0x100) {
3064         opsize = OS_LONG;
3065     } else {
3066         opsize = OS_WORD;
3067     }
3068     SRC_EA(env, src, opsize, 1, NULL);
3069     reg = AREG(insn, 9);
3070     gen_update_cc_cmp(s, reg, src, OS_LONG);
3071 }
3072 
3073 DISAS_INSN(cmpm)
3074 {
3075     int opsize = insn_opsize(insn);
3076     TCGv src, dst;
3077 
3078     /* Post-increment load (mode 3) from Ay.  */
3079     src = gen_ea_mode(env, s, 3, REG(insn, 0), opsize,
3080                       NULL_QREG, NULL, EA_LOADS);
3081     /* Post-increment load (mode 3) from Ax.  */
3082     dst = gen_ea_mode(env, s, 3, REG(insn, 9), opsize,
3083                       NULL_QREG, NULL, EA_LOADS);
3084 
3085     gen_update_cc_cmp(s, dst, src, opsize);
3086 }
3087 
3088 DISAS_INSN(eor)
3089 {
3090     TCGv src;
3091     TCGv dest;
3092     TCGv addr;
3093     int opsize;
3094 
3095     opsize = insn_opsize(insn);
3096 
3097     SRC_EA(env, src, opsize, 0, &addr);
3098     dest = tcg_temp_new();
3099     tcg_gen_xor_i32(dest, src, DREG(insn, 9));
3100     gen_logic_cc(s, dest, opsize);
3101     DEST_EA(env, insn, opsize, dest, &addr);
3102     tcg_temp_free(dest);
3103 }
3104 
3105 static void do_exg(TCGv reg1, TCGv reg2)
3106 {
3107     TCGv temp = tcg_temp_new();
3108     tcg_gen_mov_i32(temp, reg1);
3109     tcg_gen_mov_i32(reg1, reg2);
3110     tcg_gen_mov_i32(reg2, temp);
3111     tcg_temp_free(temp);
3112 }
3113 
3114 DISAS_INSN(exg_dd)
3115 {
3116     /* exchange Dx and Dy */
3117     do_exg(DREG(insn, 9), DREG(insn, 0));
3118 }
3119 
3120 DISAS_INSN(exg_aa)
3121 {
3122     /* exchange Ax and Ay */
3123     do_exg(AREG(insn, 9), AREG(insn, 0));
3124 }
3125 
3126 DISAS_INSN(exg_da)
3127 {
3128     /* exchange Dx and Ay */
3129     do_exg(DREG(insn, 9), AREG(insn, 0));
3130 }
3131 
3132 DISAS_INSN(and)
3133 {
3134     TCGv src;
3135     TCGv reg;
3136     TCGv dest;
3137     TCGv addr;
3138     int opsize;
3139 
3140     dest = tcg_temp_new();
3141 
3142     opsize = insn_opsize(insn);
3143     reg = DREG(insn, 9);
3144     if (insn & 0x100) {
3145         SRC_EA(env, src, opsize, 0, &addr);
3146         tcg_gen_and_i32(dest, src, reg);
3147         DEST_EA(env, insn, opsize, dest, &addr);
3148     } else {
3149         SRC_EA(env, src, opsize, 0, NULL);
3150         tcg_gen_and_i32(dest, src, reg);
3151         gen_partset_reg(opsize, reg, dest);
3152     }
3153     gen_logic_cc(s, dest, opsize);
3154     tcg_temp_free(dest);
3155 }
3156 
3157 DISAS_INSN(adda)
3158 {
3159     TCGv src;
3160     TCGv reg;
3161 
3162     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3163     reg = AREG(insn, 9);
3164     tcg_gen_add_i32(reg, reg, src);
3165 }
3166 
3167 static inline void gen_addx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3168 {
3169     TCGv tmp;
3170 
3171     gen_flush_flags(s); /* compute old Z */
3172 
3173     /* Perform addition with carry.
3174      * (X, N) = src + dest + X;
3175      */
3176 
3177     tmp = tcg_const_i32(0);
3178     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_X, tmp, dest, tmp);
3179     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_N, QREG_CC_X, src, tmp);
3180     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3181 
3182     /* Compute signed-overflow for addition.  */
3183 
3184     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3185     tcg_gen_xor_i32(tmp, dest, src);
3186     tcg_gen_andc_i32(QREG_CC_V, QREG_CC_V, tmp);
3187     tcg_temp_free(tmp);
3188 
3189     /* Copy the rest of the results into place.  */
3190     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3191     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3192 
3193     set_cc_op(s, CC_OP_FLAGS);
3194 
3195     /* result is in QREG_CC_N */
3196 }
3197 
3198 DISAS_INSN(addx_reg)
3199 {
3200     TCGv dest;
3201     TCGv src;
3202     int opsize;
3203 
3204     opsize = insn_opsize(insn);
3205 
3206     dest = gen_extend(DREG(insn, 9), opsize, 1);
3207     src = gen_extend(DREG(insn, 0), opsize, 1);
3208 
3209     gen_addx(s, src, dest, opsize);
3210 
3211     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3212 }
3213 
3214 DISAS_INSN(addx_mem)
3215 {
3216     TCGv src;
3217     TCGv addr_src;
3218     TCGv dest;
3219     TCGv addr_dest;
3220     int opsize;
3221 
3222     opsize = insn_opsize(insn);
3223 
3224     addr_src = AREG(insn, 0);
3225     tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3226     src = gen_load(s, opsize, addr_src, 1);
3227 
3228     addr_dest = AREG(insn, 9);
3229     tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3230     dest = gen_load(s, opsize, addr_dest, 1);
3231 
3232     gen_addx(s, src, dest, opsize);
3233 
3234     gen_store(s, opsize, addr_dest, QREG_CC_N);
3235 }
3236 
3237 static inline void shift_im(DisasContext *s, uint16_t insn, int opsize)
3238 {
3239     int count = (insn >> 9) & 7;
3240     int logical = insn & 8;
3241     int left = insn & 0x100;
3242     int bits = opsize_bytes(opsize) * 8;
3243     TCGv reg = gen_extend(DREG(insn, 0), opsize, !logical);
3244 
3245     if (count == 0) {
3246         count = 8;
3247     }
3248 
3249     tcg_gen_movi_i32(QREG_CC_V, 0);
3250     if (left) {
3251         tcg_gen_shri_i32(QREG_CC_C, reg, bits - count);
3252         tcg_gen_shli_i32(QREG_CC_N, reg, count);
3253 
3254         /* Note that ColdFire always clears V (done above),
3255            while M68000 sets if the most significant bit is changed at
3256            any time during the shift operation */
3257         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3258             /* if shift count >= bits, V is (reg != 0) */
3259             if (count >= bits) {
3260                 tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, reg, QREG_CC_V);
3261             } else {
3262                 TCGv t0 = tcg_temp_new();
3263                 tcg_gen_sari_i32(QREG_CC_V, reg, bits - 1);
3264                 tcg_gen_sari_i32(t0, reg, bits - count - 1);
3265                 tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, t0);
3266                 tcg_temp_free(t0);
3267             }
3268             tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3269         }
3270     } else {
3271         tcg_gen_shri_i32(QREG_CC_C, reg, count - 1);
3272         if (logical) {
3273             tcg_gen_shri_i32(QREG_CC_N, reg, count);
3274         } else {
3275             tcg_gen_sari_i32(QREG_CC_N, reg, count);
3276         }
3277     }
3278 
3279     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3280     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3281     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3282     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3283 
3284     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3285     set_cc_op(s, CC_OP_FLAGS);
3286 }
3287 
3288 static inline void shift_reg(DisasContext *s, uint16_t insn, int opsize)
3289 {
3290     int logical = insn & 8;
3291     int left = insn & 0x100;
3292     int bits = opsize_bytes(opsize) * 8;
3293     TCGv reg = gen_extend(DREG(insn, 0), opsize, !logical);
3294     TCGv s32;
3295     TCGv_i64 t64, s64;
3296 
3297     t64 = tcg_temp_new_i64();
3298     s64 = tcg_temp_new_i64();
3299     s32 = tcg_temp_new();
3300 
3301     /* Note that m68k truncates the shift count modulo 64, not 32.
3302        In addition, a 64-bit shift makes it easy to find "the last
3303        bit shifted out", for the carry flag.  */
3304     tcg_gen_andi_i32(s32, DREG(insn, 9), 63);
3305     tcg_gen_extu_i32_i64(s64, s32);
3306     tcg_gen_extu_i32_i64(t64, reg);
3307 
3308     /* Optimistically set V=0.  Also used as a zero source below.  */
3309     tcg_gen_movi_i32(QREG_CC_V, 0);
3310     if (left) {
3311         tcg_gen_shl_i64(t64, t64, s64);
3312 
3313         if (opsize == OS_LONG) {
3314             tcg_gen_extr_i64_i32(QREG_CC_N, QREG_CC_C, t64);
3315             /* Note that C=0 if shift count is 0, and we get that for free.  */
3316         } else {
3317             TCGv zero = tcg_const_i32(0);
3318             tcg_gen_extrl_i64_i32(QREG_CC_N, t64);
3319             tcg_gen_shri_i32(QREG_CC_C, QREG_CC_N, bits);
3320             tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3321                                 s32, zero, zero, QREG_CC_C);
3322             tcg_temp_free(zero);
3323         }
3324         tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3325 
3326         /* X = C, but only if the shift count was non-zero.  */
3327         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3328                             QREG_CC_C, QREG_CC_X);
3329 
3330         /* M68000 sets V if the most significant bit is changed at
3331          * any time during the shift operation.  Do this via creating
3332          * an extension of the sign bit, comparing, and discarding
3333          * the bits below the sign bit.  I.e.
3334          *     int64_t s = (intN_t)reg;
3335          *     int64_t t = (int64_t)(intN_t)reg << count;
3336          *     V = ((s ^ t) & (-1 << (bits - 1))) != 0
3337          */
3338         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3339             TCGv_i64 tt = tcg_const_i64(32);
3340             /* if shift is greater than 32, use 32 */
3341             tcg_gen_movcond_i64(TCG_COND_GT, s64, s64, tt, tt, s64);
3342             tcg_temp_free_i64(tt);
3343             /* Sign extend the input to 64 bits; re-do the shift.  */
3344             tcg_gen_ext_i32_i64(t64, reg);
3345             tcg_gen_shl_i64(s64, t64, s64);
3346             /* Clear all bits that are unchanged.  */
3347             tcg_gen_xor_i64(t64, t64, s64);
3348             /* Ignore the bits below the sign bit.  */
3349             tcg_gen_andi_i64(t64, t64, -1ULL << (bits - 1));
3350             /* If any bits remain set, we have overflow.  */
3351             tcg_gen_setcondi_i64(TCG_COND_NE, t64, t64, 0);
3352             tcg_gen_extrl_i64_i32(QREG_CC_V, t64);
3353             tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3354         }
3355     } else {
3356         tcg_gen_shli_i64(t64, t64, 32);
3357         if (logical) {
3358             tcg_gen_shr_i64(t64, t64, s64);
3359         } else {
3360             tcg_gen_sar_i64(t64, t64, s64);
3361         }
3362         tcg_gen_extr_i64_i32(QREG_CC_C, QREG_CC_N, t64);
3363 
3364         /* Note that C=0 if shift count is 0, and we get that for free.  */
3365         tcg_gen_shri_i32(QREG_CC_C, QREG_CC_C, 31);
3366 
3367         /* X = C, but only if the shift count was non-zero.  */
3368         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3369                             QREG_CC_C, QREG_CC_X);
3370     }
3371     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3372     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3373 
3374     tcg_temp_free(s32);
3375     tcg_temp_free_i64(s64);
3376     tcg_temp_free_i64(t64);
3377 
3378     /* Write back the result.  */
3379     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3380     set_cc_op(s, CC_OP_FLAGS);
3381 }
3382 
3383 DISAS_INSN(shift8_im)
3384 {
3385     shift_im(s, insn, OS_BYTE);
3386 }
3387 
3388 DISAS_INSN(shift16_im)
3389 {
3390     shift_im(s, insn, OS_WORD);
3391 }
3392 
3393 DISAS_INSN(shift_im)
3394 {
3395     shift_im(s, insn, OS_LONG);
3396 }
3397 
3398 DISAS_INSN(shift8_reg)
3399 {
3400     shift_reg(s, insn, OS_BYTE);
3401 }
3402 
3403 DISAS_INSN(shift16_reg)
3404 {
3405     shift_reg(s, insn, OS_WORD);
3406 }
3407 
3408 DISAS_INSN(shift_reg)
3409 {
3410     shift_reg(s, insn, OS_LONG);
3411 }
3412 
3413 DISAS_INSN(shift_mem)
3414 {
3415     int logical = insn & 8;
3416     int left = insn & 0x100;
3417     TCGv src;
3418     TCGv addr;
3419 
3420     SRC_EA(env, src, OS_WORD, !logical, &addr);
3421     tcg_gen_movi_i32(QREG_CC_V, 0);
3422     if (left) {
3423         tcg_gen_shri_i32(QREG_CC_C, src, 15);
3424         tcg_gen_shli_i32(QREG_CC_N, src, 1);
3425 
3426         /* Note that ColdFire always clears V,
3427            while M68000 sets if the most significant bit is changed at
3428            any time during the shift operation */
3429         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3430             src = gen_extend(src, OS_WORD, 1);
3431             tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3432         }
3433     } else {
3434         tcg_gen_mov_i32(QREG_CC_C, src);
3435         if (logical) {
3436             tcg_gen_shri_i32(QREG_CC_N, src, 1);
3437         } else {
3438             tcg_gen_sari_i32(QREG_CC_N, src, 1);
3439         }
3440     }
3441 
3442     gen_ext(QREG_CC_N, QREG_CC_N, OS_WORD, 1);
3443     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3444     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3445     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3446 
3447     DEST_EA(env, insn, OS_WORD, QREG_CC_N, &addr);
3448     set_cc_op(s, CC_OP_FLAGS);
3449 }
3450 
3451 static void rotate(TCGv reg, TCGv shift, int left, int size)
3452 {
3453     switch (size) {
3454     case 8:
3455         /* Replicate the 8-bit input so that a 32-bit rotate works.  */
3456         tcg_gen_ext8u_i32(reg, reg);
3457         tcg_gen_muli_i32(reg, reg, 0x01010101);
3458         goto do_long;
3459     case 16:
3460         /* Replicate the 16-bit input so that a 32-bit rotate works.  */
3461         tcg_gen_deposit_i32(reg, reg, reg, 16, 16);
3462         goto do_long;
3463     do_long:
3464     default:
3465         if (left) {
3466             tcg_gen_rotl_i32(reg, reg, shift);
3467         } else {
3468             tcg_gen_rotr_i32(reg, reg, shift);
3469         }
3470     }
3471 
3472     /* compute flags */
3473 
3474     switch (size) {
3475     case 8:
3476         tcg_gen_ext8s_i32(reg, reg);
3477         break;
3478     case 16:
3479         tcg_gen_ext16s_i32(reg, reg);
3480         break;
3481     default:
3482         break;
3483     }
3484 
3485     /* QREG_CC_X is not affected */
3486 
3487     tcg_gen_mov_i32(QREG_CC_N, reg);
3488     tcg_gen_mov_i32(QREG_CC_Z, reg);
3489 
3490     if (left) {
3491         tcg_gen_andi_i32(QREG_CC_C, reg, 1);
3492     } else {
3493         tcg_gen_shri_i32(QREG_CC_C, reg, 31);
3494     }
3495 
3496     tcg_gen_movi_i32(QREG_CC_V, 0); /* always cleared */
3497 }
3498 
3499 static void rotate_x_flags(TCGv reg, TCGv X, int size)
3500 {
3501     switch (size) {
3502     case 8:
3503         tcg_gen_ext8s_i32(reg, reg);
3504         break;
3505     case 16:
3506         tcg_gen_ext16s_i32(reg, reg);
3507         break;
3508     default:
3509         break;
3510     }
3511     tcg_gen_mov_i32(QREG_CC_N, reg);
3512     tcg_gen_mov_i32(QREG_CC_Z, reg);
3513     tcg_gen_mov_i32(QREG_CC_X, X);
3514     tcg_gen_mov_i32(QREG_CC_C, X);
3515     tcg_gen_movi_i32(QREG_CC_V, 0);
3516 }
3517 
3518 /* Result of rotate_x() is valid if 0 <= shift <= size */
3519 static TCGv rotate_x(TCGv reg, TCGv shift, int left, int size)
3520 {
3521     TCGv X, shl, shr, shx, sz, zero;
3522 
3523     sz = tcg_const_i32(size);
3524 
3525     shr = tcg_temp_new();
3526     shl = tcg_temp_new();
3527     shx = tcg_temp_new();
3528     if (left) {
3529         tcg_gen_mov_i32(shl, shift);      /* shl = shift */
3530         tcg_gen_movi_i32(shr, size + 1);
3531         tcg_gen_sub_i32(shr, shr, shift); /* shr = size + 1 - shift */
3532         tcg_gen_subi_i32(shx, shift, 1);  /* shx = shift - 1 */
3533         /* shx = shx < 0 ? size : shx; */
3534         zero = tcg_const_i32(0);
3535         tcg_gen_movcond_i32(TCG_COND_LT, shx, shx, zero, sz, shx);
3536         tcg_temp_free(zero);
3537     } else {
3538         tcg_gen_mov_i32(shr, shift);      /* shr = shift */
3539         tcg_gen_movi_i32(shl, size + 1);
3540         tcg_gen_sub_i32(shl, shl, shift); /* shl = size + 1 - shift */
3541         tcg_gen_sub_i32(shx, sz, shift); /* shx = size - shift */
3542     }
3543 
3544     /* reg = (reg << shl) | (reg >> shr) | (x << shx); */
3545 
3546     tcg_gen_shl_i32(shl, reg, shl);
3547     tcg_gen_shr_i32(shr, reg, shr);
3548     tcg_gen_or_i32(reg, shl, shr);
3549     tcg_temp_free(shl);
3550     tcg_temp_free(shr);
3551     tcg_gen_shl_i32(shx, QREG_CC_X, shx);
3552     tcg_gen_or_i32(reg, reg, shx);
3553     tcg_temp_free(shx);
3554 
3555     /* X = (reg >> size) & 1 */
3556 
3557     X = tcg_temp_new();
3558     tcg_gen_shr_i32(X, reg, sz);
3559     tcg_gen_andi_i32(X, X, 1);
3560     tcg_temp_free(sz);
3561 
3562     return X;
3563 }
3564 
3565 /* Result of rotate32_x() is valid if 0 <= shift < 33 */
3566 static TCGv rotate32_x(TCGv reg, TCGv shift, int left)
3567 {
3568     TCGv_i64 t0, shift64;
3569     TCGv X, lo, hi, zero;
3570 
3571     shift64 = tcg_temp_new_i64();
3572     tcg_gen_extu_i32_i64(shift64, shift);
3573 
3574     t0 = tcg_temp_new_i64();
3575 
3576     X = tcg_temp_new();
3577     lo = tcg_temp_new();
3578     hi = tcg_temp_new();
3579 
3580     if (left) {
3581         /* create [reg:X:..] */
3582 
3583         tcg_gen_shli_i32(lo, QREG_CC_X, 31);
3584         tcg_gen_concat_i32_i64(t0, lo, reg);
3585 
3586         /* rotate */
3587 
3588         tcg_gen_rotl_i64(t0, t0, shift64);
3589         tcg_temp_free_i64(shift64);
3590 
3591         /* result is [reg:..:reg:X] */
3592 
3593         tcg_gen_extr_i64_i32(lo, hi, t0);
3594         tcg_gen_andi_i32(X, lo, 1);
3595 
3596         tcg_gen_shri_i32(lo, lo, 1);
3597     } else {
3598         /* create [..:X:reg] */
3599 
3600         tcg_gen_concat_i32_i64(t0, reg, QREG_CC_X);
3601 
3602         tcg_gen_rotr_i64(t0, t0, shift64);
3603         tcg_temp_free_i64(shift64);
3604 
3605         /* result is value: [X:reg:..:reg] */
3606 
3607         tcg_gen_extr_i64_i32(lo, hi, t0);
3608 
3609         /* extract X */
3610 
3611         tcg_gen_shri_i32(X, hi, 31);
3612 
3613         /* extract result */
3614 
3615         tcg_gen_shli_i32(hi, hi, 1);
3616     }
3617     tcg_temp_free_i64(t0);
3618     tcg_gen_or_i32(lo, lo, hi);
3619     tcg_temp_free(hi);
3620 
3621     /* if shift == 0, register and X are not affected */
3622 
3623     zero = tcg_const_i32(0);
3624     tcg_gen_movcond_i32(TCG_COND_EQ, X, shift, zero, QREG_CC_X, X);
3625     tcg_gen_movcond_i32(TCG_COND_EQ, reg, shift, zero, reg, lo);
3626     tcg_temp_free(zero);
3627     tcg_temp_free(lo);
3628 
3629     return X;
3630 }
3631 
3632 DISAS_INSN(rotate_im)
3633 {
3634     TCGv shift;
3635     int tmp;
3636     int left = (insn & 0x100);
3637 
3638     tmp = (insn >> 9) & 7;
3639     if (tmp == 0) {
3640         tmp = 8;
3641     }
3642 
3643     shift = tcg_const_i32(tmp);
3644     if (insn & 8) {
3645         rotate(DREG(insn, 0), shift, left, 32);
3646     } else {
3647         TCGv X = rotate32_x(DREG(insn, 0), shift, left);
3648         rotate_x_flags(DREG(insn, 0), X, 32);
3649         tcg_temp_free(X);
3650     }
3651     tcg_temp_free(shift);
3652 
3653     set_cc_op(s, CC_OP_FLAGS);
3654 }
3655 
3656 DISAS_INSN(rotate8_im)
3657 {
3658     int left = (insn & 0x100);
3659     TCGv reg;
3660     TCGv shift;
3661     int tmp;
3662 
3663     reg = gen_extend(DREG(insn, 0), OS_BYTE, 0);
3664 
3665     tmp = (insn >> 9) & 7;
3666     if (tmp == 0) {
3667         tmp = 8;
3668     }
3669 
3670     shift = tcg_const_i32(tmp);
3671     if (insn & 8) {
3672         rotate(reg, shift, left, 8);
3673     } else {
3674         TCGv X = rotate_x(reg, shift, left, 8);
3675         rotate_x_flags(reg, X, 8);
3676         tcg_temp_free(X);
3677     }
3678     tcg_temp_free(shift);
3679     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3680     set_cc_op(s, CC_OP_FLAGS);
3681 }
3682 
3683 DISAS_INSN(rotate16_im)
3684 {
3685     int left = (insn & 0x100);
3686     TCGv reg;
3687     TCGv shift;
3688     int tmp;
3689 
3690     reg = gen_extend(DREG(insn, 0), OS_WORD, 0);
3691     tmp = (insn >> 9) & 7;
3692     if (tmp == 0) {
3693         tmp = 8;
3694     }
3695 
3696     shift = tcg_const_i32(tmp);
3697     if (insn & 8) {
3698         rotate(reg, shift, left, 16);
3699     } else {
3700         TCGv X = rotate_x(reg, shift, left, 16);
3701         rotate_x_flags(reg, X, 16);
3702         tcg_temp_free(X);
3703     }
3704     tcg_temp_free(shift);
3705     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3706     set_cc_op(s, CC_OP_FLAGS);
3707 }
3708 
3709 DISAS_INSN(rotate_reg)
3710 {
3711     TCGv reg;
3712     TCGv src;
3713     TCGv t0, t1;
3714     int left = (insn & 0x100);
3715 
3716     reg = DREG(insn, 0);
3717     src = DREG(insn, 9);
3718     /* shift in [0..63] */
3719     t0 = tcg_temp_new();
3720     tcg_gen_andi_i32(t0, src, 63);
3721     t1 = tcg_temp_new_i32();
3722     if (insn & 8) {
3723         tcg_gen_andi_i32(t1, src, 31);
3724         rotate(reg, t1, left, 32);
3725         /* if shift == 0, clear C */
3726         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3727                             t0, QREG_CC_V /* 0 */,
3728                             QREG_CC_V /* 0 */, QREG_CC_C);
3729     } else {
3730         TCGv X;
3731         /* modulo 33 */
3732         tcg_gen_movi_i32(t1, 33);
3733         tcg_gen_remu_i32(t1, t0, t1);
3734         X = rotate32_x(DREG(insn, 0), t1, left);
3735         rotate_x_flags(DREG(insn, 0), X, 32);
3736         tcg_temp_free(X);
3737     }
3738     tcg_temp_free(t1);
3739     tcg_temp_free(t0);
3740     set_cc_op(s, CC_OP_FLAGS);
3741 }
3742 
3743 DISAS_INSN(rotate8_reg)
3744 {
3745     TCGv reg;
3746     TCGv src;
3747     TCGv t0, t1;
3748     int left = (insn & 0x100);
3749 
3750     reg = gen_extend(DREG(insn, 0), OS_BYTE, 0);
3751     src = DREG(insn, 9);
3752     /* shift in [0..63] */
3753     t0 = tcg_temp_new_i32();
3754     tcg_gen_andi_i32(t0, src, 63);
3755     t1 = tcg_temp_new_i32();
3756     if (insn & 8) {
3757         tcg_gen_andi_i32(t1, src, 7);
3758         rotate(reg, t1, left, 8);
3759         /* if shift == 0, clear C */
3760         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3761                             t0, QREG_CC_V /* 0 */,
3762                             QREG_CC_V /* 0 */, QREG_CC_C);
3763     } else {
3764         TCGv X;
3765         /* modulo 9 */
3766         tcg_gen_movi_i32(t1, 9);
3767         tcg_gen_remu_i32(t1, t0, t1);
3768         X = rotate_x(reg, t1, left, 8);
3769         rotate_x_flags(reg, X, 8);
3770         tcg_temp_free(X);
3771     }
3772     tcg_temp_free(t1);
3773     tcg_temp_free(t0);
3774     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3775     set_cc_op(s, CC_OP_FLAGS);
3776 }
3777 
3778 DISAS_INSN(rotate16_reg)
3779 {
3780     TCGv reg;
3781     TCGv src;
3782     TCGv t0, t1;
3783     int left = (insn & 0x100);
3784 
3785     reg = gen_extend(DREG(insn, 0), OS_WORD, 0);
3786     src = DREG(insn, 9);
3787     /* shift in [0..63] */
3788     t0 = tcg_temp_new_i32();
3789     tcg_gen_andi_i32(t0, src, 63);
3790     t1 = tcg_temp_new_i32();
3791     if (insn & 8) {
3792         tcg_gen_andi_i32(t1, src, 15);
3793         rotate(reg, t1, left, 16);
3794         /* if shift == 0, clear C */
3795         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3796                             t0, QREG_CC_V /* 0 */,
3797                             QREG_CC_V /* 0 */, QREG_CC_C);
3798     } else {
3799         TCGv X;
3800         /* modulo 17 */
3801         tcg_gen_movi_i32(t1, 17);
3802         tcg_gen_remu_i32(t1, t0, t1);
3803         X = rotate_x(reg, t1, left, 16);
3804         rotate_x_flags(reg, X, 16);
3805         tcg_temp_free(X);
3806     }
3807     tcg_temp_free(t1);
3808     tcg_temp_free(t0);
3809     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3810     set_cc_op(s, CC_OP_FLAGS);
3811 }
3812 
3813 DISAS_INSN(rotate_mem)
3814 {
3815     TCGv src;
3816     TCGv addr;
3817     TCGv shift;
3818     int left = (insn & 0x100);
3819 
3820     SRC_EA(env, src, OS_WORD, 0, &addr);
3821 
3822     shift = tcg_const_i32(1);
3823     if (insn & 0x0200) {
3824         rotate(src, shift, left, 16);
3825     } else {
3826         TCGv X = rotate_x(src, shift, left, 16);
3827         rotate_x_flags(src, X, 16);
3828         tcg_temp_free(X);
3829     }
3830     tcg_temp_free(shift);
3831     DEST_EA(env, insn, OS_WORD, src, &addr);
3832     set_cc_op(s, CC_OP_FLAGS);
3833 }
3834 
3835 DISAS_INSN(bfext_reg)
3836 {
3837     int ext = read_im16(env, s);
3838     int is_sign = insn & 0x200;
3839     TCGv src = DREG(insn, 0);
3840     TCGv dst = DREG(ext, 12);
3841     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
3842     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
3843     int pos = 32 - ofs - len;        /* little bit-endian */
3844     TCGv tmp = tcg_temp_new();
3845     TCGv shift;
3846 
3847     /* In general, we're going to rotate the field so that it's at the
3848        top of the word and then right-shift by the compliment of the
3849        width to extend the field.  */
3850     if (ext & 0x20) {
3851         /* Variable width.  */
3852         if (ext & 0x800) {
3853             /* Variable offset.  */
3854             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
3855             tcg_gen_rotl_i32(tmp, src, tmp);
3856         } else {
3857             tcg_gen_rotli_i32(tmp, src, ofs);
3858         }
3859 
3860         shift = tcg_temp_new();
3861         tcg_gen_neg_i32(shift, DREG(ext, 0));
3862         tcg_gen_andi_i32(shift, shift, 31);
3863         tcg_gen_sar_i32(QREG_CC_N, tmp, shift);
3864         if (is_sign) {
3865             tcg_gen_mov_i32(dst, QREG_CC_N);
3866         } else {
3867             tcg_gen_shr_i32(dst, tmp, shift);
3868         }
3869         tcg_temp_free(shift);
3870     } else {
3871         /* Immediate width.  */
3872         if (ext & 0x800) {
3873             /* Variable offset */
3874             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
3875             tcg_gen_rotl_i32(tmp, src, tmp);
3876             src = tmp;
3877             pos = 32 - len;
3878         } else {
3879             /* Immediate offset.  If the field doesn't wrap around the
3880                end of the word, rely on (s)extract completely.  */
3881             if (pos < 0) {
3882                 tcg_gen_rotli_i32(tmp, src, ofs);
3883                 src = tmp;
3884                 pos = 32 - len;
3885             }
3886         }
3887 
3888         tcg_gen_sextract_i32(QREG_CC_N, src, pos, len);
3889         if (is_sign) {
3890             tcg_gen_mov_i32(dst, QREG_CC_N);
3891         } else {
3892             tcg_gen_extract_i32(dst, src, pos, len);
3893         }
3894     }
3895 
3896     tcg_temp_free(tmp);
3897     set_cc_op(s, CC_OP_LOGIC);
3898 }
3899 
3900 DISAS_INSN(bfext_mem)
3901 {
3902     int ext = read_im16(env, s);
3903     int is_sign = insn & 0x200;
3904     TCGv dest = DREG(ext, 12);
3905     TCGv addr, len, ofs;
3906 
3907     addr = gen_lea(env, s, insn, OS_UNSIZED);
3908     if (IS_NULL_QREG(addr)) {
3909         gen_addr_fault(s);
3910         return;
3911     }
3912 
3913     if (ext & 0x20) {
3914         len = DREG(ext, 0);
3915     } else {
3916         len = tcg_const_i32(extract32(ext, 0, 5));
3917     }
3918     if (ext & 0x800) {
3919         ofs = DREG(ext, 6);
3920     } else {
3921         ofs = tcg_const_i32(extract32(ext, 6, 5));
3922     }
3923 
3924     if (is_sign) {
3925         gen_helper_bfexts_mem(dest, cpu_env, addr, ofs, len);
3926         tcg_gen_mov_i32(QREG_CC_N, dest);
3927     } else {
3928         TCGv_i64 tmp = tcg_temp_new_i64();
3929         gen_helper_bfextu_mem(tmp, cpu_env, addr, ofs, len);
3930         tcg_gen_extr_i64_i32(dest, QREG_CC_N, tmp);
3931         tcg_temp_free_i64(tmp);
3932     }
3933     set_cc_op(s, CC_OP_LOGIC);
3934 
3935     if (!(ext & 0x20)) {
3936         tcg_temp_free(len);
3937     }
3938     if (!(ext & 0x800)) {
3939         tcg_temp_free(ofs);
3940     }
3941 }
3942 
3943 DISAS_INSN(bfop_reg)
3944 {
3945     int ext = read_im16(env, s);
3946     TCGv src = DREG(insn, 0);
3947     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
3948     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
3949     TCGv mask, tofs, tlen;
3950 
3951     TCGV_UNUSED(tofs);
3952     TCGV_UNUSED(tlen);
3953     if ((insn & 0x0f00) == 0x0d00) { /* bfffo */
3954         tofs = tcg_temp_new();
3955         tlen = tcg_temp_new();
3956     }
3957 
3958     if ((ext & 0x820) == 0) {
3959         /* Immediate width and offset.  */
3960         uint32_t maski = 0x7fffffffu >> (len - 1);
3961         if (ofs + len <= 32) {
3962             tcg_gen_shli_i32(QREG_CC_N, src, ofs);
3963         } else {
3964             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
3965         }
3966         tcg_gen_andi_i32(QREG_CC_N, QREG_CC_N, ~maski);
3967         mask = tcg_const_i32(ror32(maski, ofs));
3968         if (!TCGV_IS_UNUSED(tofs)) {
3969             tcg_gen_movi_i32(tofs, ofs);
3970             tcg_gen_movi_i32(tlen, len);
3971         }
3972     } else {
3973         TCGv tmp = tcg_temp_new();
3974         if (ext & 0x20) {
3975             /* Variable width */
3976             tcg_gen_subi_i32(tmp, DREG(ext, 0), 1);
3977             tcg_gen_andi_i32(tmp, tmp, 31);
3978             mask = tcg_const_i32(0x7fffffffu);
3979             tcg_gen_shr_i32(mask, mask, tmp);
3980             if (!TCGV_IS_UNUSED(tlen)) {
3981                 tcg_gen_addi_i32(tlen, tmp, 1);
3982             }
3983         } else {
3984             /* Immediate width */
3985             mask = tcg_const_i32(0x7fffffffu >> (len - 1));
3986             if (!TCGV_IS_UNUSED(tlen)) {
3987                 tcg_gen_movi_i32(tlen, len);
3988             }
3989         }
3990         if (ext & 0x800) {
3991             /* Variable offset */
3992             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
3993             tcg_gen_rotl_i32(QREG_CC_N, src, tmp);
3994             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
3995             tcg_gen_rotr_i32(mask, mask, tmp);
3996             if (!TCGV_IS_UNUSED(tofs)) {
3997                 tcg_gen_mov_i32(tofs, tmp);
3998             }
3999         } else {
4000             /* Immediate offset (and variable width) */
4001             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4002             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4003             tcg_gen_rotri_i32(mask, mask, ofs);
4004             if (!TCGV_IS_UNUSED(tofs)) {
4005                 tcg_gen_movi_i32(tofs, ofs);
4006             }
4007         }
4008         tcg_temp_free(tmp);
4009     }
4010     set_cc_op(s, CC_OP_LOGIC);
4011 
4012     switch (insn & 0x0f00) {
4013     case 0x0a00: /* bfchg */
4014         tcg_gen_eqv_i32(src, src, mask);
4015         break;
4016     case 0x0c00: /* bfclr */
4017         tcg_gen_and_i32(src, src, mask);
4018         break;
4019     case 0x0d00: /* bfffo */
4020         gen_helper_bfffo_reg(DREG(ext, 12), QREG_CC_N, tofs, tlen);
4021         tcg_temp_free(tlen);
4022         tcg_temp_free(tofs);
4023         break;
4024     case 0x0e00: /* bfset */
4025         tcg_gen_orc_i32(src, src, mask);
4026         break;
4027     case 0x0800: /* bftst */
4028         /* flags already set; no other work to do.  */
4029         break;
4030     default:
4031         g_assert_not_reached();
4032     }
4033     tcg_temp_free(mask);
4034 }
4035 
4036 DISAS_INSN(bfop_mem)
4037 {
4038     int ext = read_im16(env, s);
4039     TCGv addr, len, ofs;
4040     TCGv_i64 t64;
4041 
4042     addr = gen_lea(env, s, insn, OS_UNSIZED);
4043     if (IS_NULL_QREG(addr)) {
4044         gen_addr_fault(s);
4045         return;
4046     }
4047 
4048     if (ext & 0x20) {
4049         len = DREG(ext, 0);
4050     } else {
4051         len = tcg_const_i32(extract32(ext, 0, 5));
4052     }
4053     if (ext & 0x800) {
4054         ofs = DREG(ext, 6);
4055     } else {
4056         ofs = tcg_const_i32(extract32(ext, 6, 5));
4057     }
4058 
4059     switch (insn & 0x0f00) {
4060     case 0x0a00: /* bfchg */
4061         gen_helper_bfchg_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4062         break;
4063     case 0x0c00: /* bfclr */
4064         gen_helper_bfclr_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4065         break;
4066     case 0x0d00: /* bfffo */
4067         t64 = tcg_temp_new_i64();
4068         gen_helper_bfffo_mem(t64, cpu_env, addr, ofs, len);
4069         tcg_gen_extr_i64_i32(DREG(ext, 12), QREG_CC_N, t64);
4070         tcg_temp_free_i64(t64);
4071         break;
4072     case 0x0e00: /* bfset */
4073         gen_helper_bfset_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4074         break;
4075     case 0x0800: /* bftst */
4076         gen_helper_bfexts_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4077         break;
4078     default:
4079         g_assert_not_reached();
4080     }
4081     set_cc_op(s, CC_OP_LOGIC);
4082 
4083     if (!(ext & 0x20)) {
4084         tcg_temp_free(len);
4085     }
4086     if (!(ext & 0x800)) {
4087         tcg_temp_free(ofs);
4088     }
4089 }
4090 
4091 DISAS_INSN(bfins_reg)
4092 {
4093     int ext = read_im16(env, s);
4094     TCGv dst = DREG(insn, 0);
4095     TCGv src = DREG(ext, 12);
4096     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4097     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4098     int pos = 32 - ofs - len;        /* little bit-endian */
4099     TCGv tmp;
4100 
4101     tmp = tcg_temp_new();
4102 
4103     if (ext & 0x20) {
4104         /* Variable width */
4105         tcg_gen_neg_i32(tmp, DREG(ext, 0));
4106         tcg_gen_andi_i32(tmp, tmp, 31);
4107         tcg_gen_shl_i32(QREG_CC_N, src, tmp);
4108     } else {
4109         /* Immediate width */
4110         tcg_gen_shli_i32(QREG_CC_N, src, 32 - len);
4111     }
4112     set_cc_op(s, CC_OP_LOGIC);
4113 
4114     /* Immediate width and offset */
4115     if ((ext & 0x820) == 0) {
4116         /* Check for suitability for deposit.  */
4117         if (pos >= 0) {
4118             tcg_gen_deposit_i32(dst, dst, src, pos, len);
4119         } else {
4120             uint32_t maski = -2U << (len - 1);
4121             uint32_t roti = (ofs + len) & 31;
4122             tcg_gen_andi_i32(tmp, src, ~maski);
4123             tcg_gen_rotri_i32(tmp, tmp, roti);
4124             tcg_gen_andi_i32(dst, dst, ror32(maski, roti));
4125             tcg_gen_or_i32(dst, dst, tmp);
4126         }
4127     } else {
4128         TCGv mask = tcg_temp_new();
4129         TCGv rot = tcg_temp_new();
4130 
4131         if (ext & 0x20) {
4132             /* Variable width */
4133             tcg_gen_subi_i32(rot, DREG(ext, 0), 1);
4134             tcg_gen_andi_i32(rot, rot, 31);
4135             tcg_gen_movi_i32(mask, -2);
4136             tcg_gen_shl_i32(mask, mask, rot);
4137             tcg_gen_mov_i32(rot, DREG(ext, 0));
4138             tcg_gen_andc_i32(tmp, src, mask);
4139         } else {
4140             /* Immediate width (variable offset) */
4141             uint32_t maski = -2U << (len - 1);
4142             tcg_gen_andi_i32(tmp, src, ~maski);
4143             tcg_gen_movi_i32(mask, maski);
4144             tcg_gen_movi_i32(rot, len & 31);
4145         }
4146         if (ext & 0x800) {
4147             /* Variable offset */
4148             tcg_gen_add_i32(rot, rot, DREG(ext, 6));
4149         } else {
4150             /* Immediate offset (variable width) */
4151             tcg_gen_addi_i32(rot, rot, ofs);
4152         }
4153         tcg_gen_andi_i32(rot, rot, 31);
4154         tcg_gen_rotr_i32(mask, mask, rot);
4155         tcg_gen_rotr_i32(tmp, tmp, rot);
4156         tcg_gen_and_i32(dst, dst, mask);
4157         tcg_gen_or_i32(dst, dst, tmp);
4158 
4159         tcg_temp_free(rot);
4160         tcg_temp_free(mask);
4161     }
4162     tcg_temp_free(tmp);
4163 }
4164 
4165 DISAS_INSN(bfins_mem)
4166 {
4167     int ext = read_im16(env, s);
4168     TCGv src = DREG(ext, 12);
4169     TCGv addr, len, ofs;
4170 
4171     addr = gen_lea(env, s, insn, OS_UNSIZED);
4172     if (IS_NULL_QREG(addr)) {
4173         gen_addr_fault(s);
4174         return;
4175     }
4176 
4177     if (ext & 0x20) {
4178         len = DREG(ext, 0);
4179     } else {
4180         len = tcg_const_i32(extract32(ext, 0, 5));
4181     }
4182     if (ext & 0x800) {
4183         ofs = DREG(ext, 6);
4184     } else {
4185         ofs = tcg_const_i32(extract32(ext, 6, 5));
4186     }
4187 
4188     gen_helper_bfins_mem(QREG_CC_N, cpu_env, addr, src, ofs, len);
4189     set_cc_op(s, CC_OP_LOGIC);
4190 
4191     if (!(ext & 0x20)) {
4192         tcg_temp_free(len);
4193     }
4194     if (!(ext & 0x800)) {
4195         tcg_temp_free(ofs);
4196     }
4197 }
4198 
4199 DISAS_INSN(ff1)
4200 {
4201     TCGv reg;
4202     reg = DREG(insn, 0);
4203     gen_logic_cc(s, reg, OS_LONG);
4204     gen_helper_ff1(reg, reg);
4205 }
4206 
4207 static TCGv gen_get_sr(DisasContext *s)
4208 {
4209     TCGv ccr;
4210     TCGv sr;
4211 
4212     ccr = gen_get_ccr(s);
4213     sr = tcg_temp_new();
4214     tcg_gen_andi_i32(sr, QREG_SR, 0xffe0);
4215     tcg_gen_or_i32(sr, sr, ccr);
4216     return sr;
4217 }
4218 
4219 DISAS_INSN(strldsr)
4220 {
4221     uint16_t ext;
4222     uint32_t addr;
4223 
4224     addr = s->pc - 2;
4225     ext = read_im16(env, s);
4226     if (ext != 0x46FC) {
4227         gen_exception(s, addr, EXCP_UNSUPPORTED);
4228         return;
4229     }
4230     ext = read_im16(env, s);
4231     if (IS_USER(s) || (ext & SR_S) == 0) {
4232         gen_exception(s, addr, EXCP_PRIVILEGE);
4233         return;
4234     }
4235     gen_push(s, gen_get_sr(s));
4236     gen_set_sr_im(s, ext, 0);
4237 }
4238 
4239 DISAS_INSN(move_from_sr)
4240 {
4241     TCGv sr;
4242 
4243     if (IS_USER(s) && !m68k_feature(env, M68K_FEATURE_M68000)) {
4244         gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
4245         return;
4246     }
4247     sr = gen_get_sr(s);
4248     DEST_EA(env, insn, OS_WORD, sr, NULL);
4249 }
4250 
4251 DISAS_INSN(move_to_sr)
4252 {
4253     if (IS_USER(s)) {
4254         gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
4255         return;
4256     }
4257     gen_set_sr(env, s, insn, 0);
4258     gen_lookup_tb(s);
4259 }
4260 
4261 DISAS_INSN(move_from_usp)
4262 {
4263     if (IS_USER(s)) {
4264         gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
4265         return;
4266     }
4267     tcg_gen_ld_i32(AREG(insn, 0), cpu_env,
4268                    offsetof(CPUM68KState, sp[M68K_USP]));
4269 }
4270 
4271 DISAS_INSN(move_to_usp)
4272 {
4273     if (IS_USER(s)) {
4274         gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
4275         return;
4276     }
4277     tcg_gen_st_i32(AREG(insn, 0), cpu_env,
4278                    offsetof(CPUM68KState, sp[M68K_USP]));
4279 }
4280 
4281 DISAS_INSN(halt)
4282 {
4283     gen_exception(s, s->pc, EXCP_HALT_INSN);
4284 }
4285 
4286 DISAS_INSN(stop)
4287 {
4288     uint16_t ext;
4289 
4290     if (IS_USER(s)) {
4291         gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
4292         return;
4293     }
4294 
4295     ext = read_im16(env, s);
4296 
4297     gen_set_sr_im(s, ext, 0);
4298     tcg_gen_movi_i32(cpu_halted, 1);
4299     gen_exception(s, s->pc, EXCP_HLT);
4300 }
4301 
4302 DISAS_INSN(rte)
4303 {
4304     if (IS_USER(s)) {
4305         gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
4306         return;
4307     }
4308     gen_exception(s, s->pc - 2, EXCP_RTE);
4309 }
4310 
4311 DISAS_INSN(movec)
4312 {
4313     uint16_t ext;
4314     TCGv reg;
4315 
4316     if (IS_USER(s)) {
4317         gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
4318         return;
4319     }
4320 
4321     ext = read_im16(env, s);
4322 
4323     if (ext & 0x8000) {
4324         reg = AREG(ext, 12);
4325     } else {
4326         reg = DREG(ext, 12);
4327     }
4328     gen_helper_movec(cpu_env, tcg_const_i32(ext & 0xfff), reg);
4329     gen_lookup_tb(s);
4330 }
4331 
4332 DISAS_INSN(intouch)
4333 {
4334     if (IS_USER(s)) {
4335         gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
4336         return;
4337     }
4338     /* ICache fetch.  Implement as no-op.  */
4339 }
4340 
4341 DISAS_INSN(cpushl)
4342 {
4343     if (IS_USER(s)) {
4344         gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
4345         return;
4346     }
4347     /* Cache push/invalidate.  Implement as no-op.  */
4348 }
4349 
4350 DISAS_INSN(wddata)
4351 {
4352     gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
4353 }
4354 
4355 DISAS_INSN(wdebug)
4356 {
4357     M68kCPU *cpu = m68k_env_get_cpu(env);
4358 
4359     if (IS_USER(s)) {
4360         gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
4361         return;
4362     }
4363     /* TODO: Implement wdebug.  */
4364     cpu_abort(CPU(cpu), "WDEBUG not implemented");
4365 }
4366 
4367 DISAS_INSN(trap)
4368 {
4369     gen_exception(s, s->pc - 2, EXCP_TRAP0 + (insn & 0xf));
4370 }
4371 
4372 static void gen_load_fcr(DisasContext *s, TCGv res, int reg)
4373 {
4374     switch (reg) {
4375     case M68K_FPIAR:
4376         tcg_gen_movi_i32(res, 0);
4377         break;
4378     case M68K_FPSR:
4379         tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpsr));
4380         break;
4381     case M68K_FPCR:
4382         tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpcr));
4383         break;
4384     }
4385 }
4386 
4387 static void gen_store_fcr(DisasContext *s, TCGv val, int reg)
4388 {
4389     switch (reg) {
4390     case M68K_FPIAR:
4391         break;
4392     case M68K_FPSR:
4393         tcg_gen_st_i32(val, cpu_env, offsetof(CPUM68KState, fpsr));
4394         break;
4395     case M68K_FPCR:
4396         gen_helper_set_fpcr(cpu_env, val);
4397         break;
4398     }
4399 }
4400 
4401 static void gen_qemu_store_fcr(DisasContext *s, TCGv addr, int reg)
4402 {
4403     int index = IS_USER(s);
4404     TCGv tmp;
4405 
4406     tmp = tcg_temp_new();
4407     gen_load_fcr(s, tmp, reg);
4408     tcg_gen_qemu_st32(tmp, addr, index);
4409     tcg_temp_free(tmp);
4410 }
4411 
4412 static void gen_qemu_load_fcr(DisasContext *s, TCGv addr, int reg)
4413 {
4414     int index = IS_USER(s);
4415     TCGv tmp;
4416 
4417     tmp = tcg_temp_new();
4418     tcg_gen_qemu_ld32u(tmp, addr, index);
4419     gen_store_fcr(s, tmp, reg);
4420     tcg_temp_free(tmp);
4421 }
4422 
4423 
4424 static void gen_op_fmove_fcr(CPUM68KState *env, DisasContext *s,
4425                              uint32_t insn, uint32_t ext)
4426 {
4427     int mask = (ext >> 10) & 7;
4428     int is_write = (ext >> 13) & 1;
4429     int mode = extract32(insn, 3, 3);
4430     int i;
4431     TCGv addr, tmp;
4432 
4433     switch (mode) {
4434     case 0: /* Dn */
4435         if (mask != M68K_FPIAR && mask != M68K_FPSR && mask != M68K_FPCR) {
4436             gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
4437             return;
4438         }
4439         if (is_write) {
4440             gen_load_fcr(s, DREG(insn, 0), mask);
4441         } else {
4442             gen_store_fcr(s, DREG(insn, 0), mask);
4443         }
4444         return;
4445     case 1: /* An, only with FPIAR */
4446         if (mask != M68K_FPIAR) {
4447             gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
4448             return;
4449         }
4450         if (is_write) {
4451             gen_load_fcr(s, AREG(insn, 0), mask);
4452         } else {
4453             gen_store_fcr(s, AREG(insn, 0), mask);
4454         }
4455         return;
4456     default:
4457         break;
4458     }
4459 
4460     tmp = gen_lea(env, s, insn, OS_LONG);
4461     if (IS_NULL_QREG(tmp)) {
4462         gen_addr_fault(s);
4463         return;
4464     }
4465 
4466     addr = tcg_temp_new();
4467     tcg_gen_mov_i32(addr, tmp);
4468 
4469     /* mask:
4470      *
4471      * 0b100 Floating-Point Control Register
4472      * 0b010 Floating-Point Status Register
4473      * 0b001 Floating-Point Instruction Address Register
4474      *
4475      */
4476 
4477     if (is_write && mode == 4) {
4478         for (i = 2; i >= 0; i--, mask >>= 1) {
4479             if (mask & 1) {
4480                 gen_qemu_store_fcr(s, addr, 1 << i);
4481                 if (mask != 1) {
4482                     tcg_gen_subi_i32(addr, addr, opsize_bytes(OS_LONG));
4483                 }
4484             }
4485        }
4486        tcg_gen_mov_i32(AREG(insn, 0), addr);
4487     } else {
4488         for (i = 0; i < 3; i++, mask >>= 1) {
4489             if (mask & 1) {
4490                 if (is_write) {
4491                     gen_qemu_store_fcr(s, addr, 1 << i);
4492                 } else {
4493                     gen_qemu_load_fcr(s, addr, 1 << i);
4494                 }
4495                 if (mask != 1 || mode == 3) {
4496                     tcg_gen_addi_i32(addr, addr, opsize_bytes(OS_LONG));
4497                 }
4498             }
4499         }
4500         if (mode == 3) {
4501             tcg_gen_mov_i32(AREG(insn, 0), addr);
4502         }
4503     }
4504     tcg_temp_free_i32(addr);
4505 }
4506 
4507 static void gen_op_fmovem(CPUM68KState *env, DisasContext *s,
4508                           uint32_t insn, uint32_t ext)
4509 {
4510     int opsize;
4511     TCGv addr, tmp;
4512     int mode = (ext >> 11) & 0x3;
4513     int is_load = ((ext & 0x2000) == 0);
4514 
4515     if (m68k_feature(s->env, M68K_FEATURE_FPU)) {
4516         opsize = OS_EXTENDED;
4517     } else {
4518         opsize = OS_DOUBLE;  /* FIXME */
4519     }
4520 
4521     addr = gen_lea(env, s, insn, opsize);
4522     if (IS_NULL_QREG(addr)) {
4523         gen_addr_fault(s);
4524         return;
4525     }
4526 
4527     tmp = tcg_temp_new();
4528     if (mode & 0x1) {
4529         /* Dynamic register list */
4530         tcg_gen_ext8u_i32(tmp, DREG(ext, 4));
4531     } else {
4532         /* Static register list */
4533         tcg_gen_movi_i32(tmp, ext & 0xff);
4534     }
4535 
4536     if (!is_load && (mode & 2) == 0) {
4537         /* predecrement addressing mode
4538          * only available to store register to memory
4539          */
4540         if (opsize == OS_EXTENDED) {
4541             gen_helper_fmovemx_st_predec(tmp, cpu_env, addr, tmp);
4542         } else {
4543             gen_helper_fmovemd_st_predec(tmp, cpu_env, addr, tmp);
4544         }
4545     } else {
4546         /* postincrement addressing mode */
4547         if (opsize == OS_EXTENDED) {
4548             if (is_load) {
4549                 gen_helper_fmovemx_ld_postinc(tmp, cpu_env, addr, tmp);
4550             } else {
4551                 gen_helper_fmovemx_st_postinc(tmp, cpu_env, addr, tmp);
4552             }
4553         } else {
4554             if (is_load) {
4555                 gen_helper_fmovemd_ld_postinc(tmp, cpu_env, addr, tmp);
4556             } else {
4557                 gen_helper_fmovemd_st_postinc(tmp, cpu_env, addr, tmp);
4558             }
4559         }
4560     }
4561     if ((insn & 070) == 030 || (insn & 070) == 040) {
4562         tcg_gen_mov_i32(AREG(insn, 0), tmp);
4563     }
4564     tcg_temp_free(tmp);
4565 }
4566 
4567 /* ??? FP exceptions are not implemented.  Most exceptions are deferred until
4568    immediately before the next FP instruction is executed.  */
4569 DISAS_INSN(fpu)
4570 {
4571     uint16_t ext;
4572     int opmode;
4573     int opsize;
4574     TCGv_ptr cpu_src, cpu_dest;
4575 
4576     ext = read_im16(env, s);
4577     opmode = ext & 0x7f;
4578     switch ((ext >> 13) & 7) {
4579     case 0:
4580         break;
4581     case 1:
4582         goto undef;
4583     case 2:
4584         if (insn == 0xf200 && (ext & 0xfc00) == 0x5c00) {
4585             /* fmovecr */
4586             TCGv rom_offset = tcg_const_i32(opmode);
4587             cpu_dest = gen_fp_ptr(REG(ext, 7));
4588             gen_helper_fconst(cpu_env, cpu_dest, rom_offset);
4589             tcg_temp_free_ptr(cpu_dest);
4590             tcg_temp_free(rom_offset);
4591             return;
4592         }
4593         break;
4594     case 3: /* fmove out */
4595         cpu_src = gen_fp_ptr(REG(ext, 7));
4596         opsize = ext_opsize(ext, 10);
4597         if (gen_ea_fp(env, s, insn, opsize, cpu_src, EA_STORE) == -1) {
4598             gen_addr_fault(s);
4599         }
4600         gen_helper_ftst(cpu_env, cpu_src);
4601         tcg_temp_free_ptr(cpu_src);
4602         return;
4603     case 4: /* fmove to control register.  */
4604     case 5: /* fmove from control register.  */
4605         gen_op_fmove_fcr(env, s, insn, ext);
4606         return;
4607     case 6: /* fmovem */
4608     case 7:
4609         if ((ext & 0x1000) == 0 && !m68k_feature(s->env, M68K_FEATURE_FPU)) {
4610             goto undef;
4611         }
4612         gen_op_fmovem(env, s, insn, ext);
4613         return;
4614     }
4615     if (ext & (1 << 14)) {
4616         /* Source effective address.  */
4617         opsize = ext_opsize(ext, 10);
4618         cpu_src = gen_fp_result_ptr();
4619         if (gen_ea_fp(env, s, insn, opsize, cpu_src, EA_LOADS) == -1) {
4620             gen_addr_fault(s);
4621             return;
4622         }
4623     } else {
4624         /* Source register.  */
4625         opsize = OS_EXTENDED;
4626         cpu_src = gen_fp_ptr(REG(ext, 10));
4627     }
4628     cpu_dest = gen_fp_ptr(REG(ext, 7));
4629     switch (opmode) {
4630     case 0: /* fmove */
4631         gen_fp_move(cpu_dest, cpu_src);
4632         break;
4633     case 0x40: /* fsmove */
4634         gen_helper_fsround(cpu_env, cpu_dest, cpu_src);
4635         break;
4636     case 0x44: /* fdmove */
4637         gen_helper_fdround(cpu_env, cpu_dest, cpu_src);
4638         break;
4639     case 1: /* fint */
4640         gen_helper_firound(cpu_env, cpu_dest, cpu_src);
4641         break;
4642     case 3: /* fintrz */
4643         gen_helper_fitrunc(cpu_env, cpu_dest, cpu_src);
4644         break;
4645     case 4: /* fsqrt */
4646         gen_helper_fsqrt(cpu_env, cpu_dest, cpu_src);
4647         break;
4648     case 0x41: /* fssqrt */
4649         gen_helper_fssqrt(cpu_env, cpu_dest, cpu_src);
4650         break;
4651     case 0x45: /* fdsqrt */
4652         gen_helper_fdsqrt(cpu_env, cpu_dest, cpu_src);
4653         break;
4654     case 0x18: /* fabs */
4655         gen_helper_fabs(cpu_env, cpu_dest, cpu_src);
4656         break;
4657     case 0x58: /* fsabs */
4658         gen_helper_fsabs(cpu_env, cpu_dest, cpu_src);
4659         break;
4660     case 0x5c: /* fdabs */
4661         gen_helper_fdabs(cpu_env, cpu_dest, cpu_src);
4662         break;
4663     case 0x1a: /* fneg */
4664         gen_helper_fneg(cpu_env, cpu_dest, cpu_src);
4665         break;
4666     case 0x5a: /* fsneg */
4667         gen_helper_fsneg(cpu_env, cpu_dest, cpu_src);
4668         break;
4669     case 0x5e: /* fdneg */
4670         gen_helper_fdneg(cpu_env, cpu_dest, cpu_src);
4671         break;
4672     case 0x20: /* fdiv */
4673         gen_helper_fdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
4674         break;
4675     case 0x60: /* fsdiv */
4676         gen_helper_fsdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
4677         break;
4678     case 0x64: /* fddiv */
4679         gen_helper_fddiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
4680         break;
4681     case 0x22: /* fadd */
4682         gen_helper_fadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
4683         break;
4684     case 0x62: /* fsadd */
4685         gen_helper_fsadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
4686         break;
4687     case 0x66: /* fdadd */
4688         gen_helper_fdadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
4689         break;
4690     case 0x23: /* fmul */
4691         gen_helper_fmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
4692         break;
4693     case 0x63: /* fsmul */
4694         gen_helper_fsmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
4695         break;
4696     case 0x67: /* fdmul */
4697         gen_helper_fdmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
4698         break;
4699     case 0x24: /* fsgldiv */
4700         gen_helper_fsgldiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
4701         break;
4702     case 0x27: /* fsglmul */
4703         gen_helper_fsglmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
4704         break;
4705     case 0x28: /* fsub */
4706         gen_helper_fsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
4707         break;
4708     case 0x68: /* fssub */
4709         gen_helper_fssub(cpu_env, cpu_dest, cpu_src, cpu_dest);
4710         break;
4711     case 0x6c: /* fdsub */
4712         gen_helper_fdsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
4713         break;
4714     case 0x38: /* fcmp */
4715         gen_helper_fcmp(cpu_env, cpu_src, cpu_dest);
4716         return;
4717     case 0x3a: /* ftst */
4718         gen_helper_ftst(cpu_env, cpu_src);
4719         return;
4720     default:
4721         goto undef;
4722     }
4723     tcg_temp_free_ptr(cpu_src);
4724     gen_helper_ftst(cpu_env, cpu_dest);
4725     tcg_temp_free_ptr(cpu_dest);
4726     return;
4727 undef:
4728     /* FIXME: Is this right for offset addressing modes?  */
4729     s->pc -= 2;
4730     disas_undef_fpu(env, s, insn);
4731 }
4732 
4733 static void gen_fcc_cond(DisasCompare *c, DisasContext *s, int cond)
4734 {
4735     TCGv fpsr;
4736 
4737     c->g1 = 1;
4738     c->v2 = tcg_const_i32(0);
4739     c->g2 = 0;
4740     /* TODO: Raise BSUN exception.  */
4741     fpsr = tcg_temp_new();
4742     gen_load_fcr(s, fpsr, M68K_FPSR);
4743     switch (cond) {
4744     case 0:  /* False */
4745     case 16: /* Signaling False */
4746         c->v1 = c->v2;
4747         c->tcond = TCG_COND_NEVER;
4748         break;
4749     case 1:  /* EQual Z */
4750     case 17: /* Signaling EQual Z */
4751         c->v1 = tcg_temp_new();
4752         c->g1 = 0;
4753         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
4754         c->tcond = TCG_COND_NE;
4755         break;
4756     case 2:  /* Ordered Greater Than !(A || Z || N) */
4757     case 18: /* Greater Than !(A || Z || N) */
4758         c->v1 = tcg_temp_new();
4759         c->g1 = 0;
4760         tcg_gen_andi_i32(c->v1, fpsr,
4761                          FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
4762         c->tcond = TCG_COND_EQ;
4763         break;
4764     case 3:  /* Ordered Greater than or Equal Z || !(A || N) */
4765     case 19: /* Greater than or Equal Z || !(A || N) */
4766         c->v1 = tcg_temp_new();
4767         c->g1 = 0;
4768         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
4769         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
4770         tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_Z | FPSR_CC_N);
4771         tcg_gen_or_i32(c->v1, c->v1, fpsr);
4772         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
4773         c->tcond = TCG_COND_NE;
4774         break;
4775     case 4:  /* Ordered Less Than !(!N || A || Z); */
4776     case 20: /* Less Than !(!N || A || Z); */
4777         c->v1 = tcg_temp_new();
4778         c->g1 = 0;
4779         tcg_gen_xori_i32(c->v1, fpsr, FPSR_CC_N);
4780         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_N | FPSR_CC_A | FPSR_CC_Z);
4781         c->tcond = TCG_COND_EQ;
4782         break;
4783     case 5:  /* Ordered Less than or Equal Z || (N && !A) */
4784     case 21: /* Less than or Equal Z || (N && !A) */
4785         c->v1 = tcg_temp_new();
4786         c->g1 = 0;
4787         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
4788         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
4789         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
4790         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_Z | FPSR_CC_N);
4791         c->tcond = TCG_COND_NE;
4792         break;
4793     case 6:  /* Ordered Greater or Less than !(A || Z) */
4794     case 22: /* Greater or Less than !(A || Z) */
4795         c->v1 = tcg_temp_new();
4796         c->g1 = 0;
4797         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
4798         c->tcond = TCG_COND_EQ;
4799         break;
4800     case 7:  /* Ordered !A */
4801     case 23: /* Greater, Less or Equal !A */
4802         c->v1 = tcg_temp_new();
4803         c->g1 = 0;
4804         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
4805         c->tcond = TCG_COND_EQ;
4806         break;
4807     case 8:  /* Unordered A */
4808     case 24: /* Not Greater, Less or Equal A */
4809         c->v1 = tcg_temp_new();
4810         c->g1 = 0;
4811         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
4812         c->tcond = TCG_COND_NE;
4813         break;
4814     case 9:  /* Unordered or Equal A || Z */
4815     case 25: /* Not Greater or Less then A || Z */
4816         c->v1 = tcg_temp_new();
4817         c->g1 = 0;
4818         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
4819         c->tcond = TCG_COND_NE;
4820         break;
4821     case 10: /* Unordered or Greater Than A || !(N || Z)) */
4822     case 26: /* Not Less or Equal A || !(N || Z)) */
4823         c->v1 = tcg_temp_new();
4824         c->g1 = 0;
4825         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
4826         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
4827         tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_A | FPSR_CC_N);
4828         tcg_gen_or_i32(c->v1, c->v1, fpsr);
4829         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
4830         c->tcond = TCG_COND_NE;
4831         break;
4832     case 11: /* Unordered or Greater or Equal A || Z || !N */
4833     case 27: /* Not Less Than A || Z || !N */
4834         c->v1 = tcg_temp_new();
4835         c->g1 = 0;
4836         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
4837         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
4838         c->tcond = TCG_COND_NE;
4839         break;
4840     case 12: /* Unordered or Less Than A || (N && !Z) */
4841     case 28: /* Not Greater than or Equal A || (N && !Z) */
4842         c->v1 = tcg_temp_new();
4843         c->g1 = 0;
4844         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
4845         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
4846         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
4847         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_A | FPSR_CC_N);
4848         c->tcond = TCG_COND_NE;
4849         break;
4850     case 13: /* Unordered or Less or Equal A || Z || N */
4851     case 29: /* Not Greater Than A || Z || N */
4852         c->v1 = tcg_temp_new();
4853         c->g1 = 0;
4854         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
4855         c->tcond = TCG_COND_NE;
4856         break;
4857     case 14: /* Not Equal !Z */
4858     case 30: /* Signaling Not Equal !Z */
4859         c->v1 = tcg_temp_new();
4860         c->g1 = 0;
4861         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
4862         c->tcond = TCG_COND_EQ;
4863         break;
4864     case 15: /* True */
4865     case 31: /* Signaling True */
4866         c->v1 = c->v2;
4867         c->tcond = TCG_COND_ALWAYS;
4868         break;
4869     }
4870     tcg_temp_free(fpsr);
4871 }
4872 
4873 static void gen_fjmpcc(DisasContext *s, int cond, TCGLabel *l1)
4874 {
4875     DisasCompare c;
4876 
4877     gen_fcc_cond(&c, s, cond);
4878     tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
4879     free_cond(&c);
4880 }
4881 
4882 DISAS_INSN(fbcc)
4883 {
4884     uint32_t offset;
4885     uint32_t base;
4886     TCGLabel *l1;
4887 
4888     base = s->pc;
4889     offset = (int16_t)read_im16(env, s);
4890     if (insn & (1 << 6)) {
4891         offset = (offset << 16) | read_im16(env, s);
4892     }
4893 
4894     l1 = gen_new_label();
4895     update_cc_op(s);
4896     gen_fjmpcc(s, insn & 0x3f, l1);
4897     gen_jmp_tb(s, 0, s->pc);
4898     gen_set_label(l1);
4899     gen_jmp_tb(s, 1, base + offset);
4900 }
4901 
4902 DISAS_INSN(fscc)
4903 {
4904     DisasCompare c;
4905     int cond;
4906     TCGv tmp;
4907     uint16_t ext;
4908 
4909     ext = read_im16(env, s);
4910     cond = ext & 0x3f;
4911     gen_fcc_cond(&c, s, cond);
4912 
4913     tmp = tcg_temp_new();
4914     tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
4915     free_cond(&c);
4916 
4917     tcg_gen_neg_i32(tmp, tmp);
4918     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
4919     tcg_temp_free(tmp);
4920 }
4921 
4922 DISAS_INSN(frestore)
4923 {
4924     M68kCPU *cpu = m68k_env_get_cpu(env);
4925 
4926     /* TODO: Implement frestore.  */
4927     cpu_abort(CPU(cpu), "FRESTORE not implemented");
4928 }
4929 
4930 DISAS_INSN(fsave)
4931 {
4932     M68kCPU *cpu = m68k_env_get_cpu(env);
4933 
4934     /* TODO: Implement fsave.  */
4935     cpu_abort(CPU(cpu), "FSAVE not implemented");
4936 }
4937 
4938 static inline TCGv gen_mac_extract_word(DisasContext *s, TCGv val, int upper)
4939 {
4940     TCGv tmp = tcg_temp_new();
4941     if (s->env->macsr & MACSR_FI) {
4942         if (upper)
4943             tcg_gen_andi_i32(tmp, val, 0xffff0000);
4944         else
4945             tcg_gen_shli_i32(tmp, val, 16);
4946     } else if (s->env->macsr & MACSR_SU) {
4947         if (upper)
4948             tcg_gen_sari_i32(tmp, val, 16);
4949         else
4950             tcg_gen_ext16s_i32(tmp, val);
4951     } else {
4952         if (upper)
4953             tcg_gen_shri_i32(tmp, val, 16);
4954         else
4955             tcg_gen_ext16u_i32(tmp, val);
4956     }
4957     return tmp;
4958 }
4959 
4960 static void gen_mac_clear_flags(void)
4961 {
4962     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR,
4963                      ~(MACSR_V | MACSR_Z | MACSR_N | MACSR_EV));
4964 }
4965 
4966 DISAS_INSN(mac)
4967 {
4968     TCGv rx;
4969     TCGv ry;
4970     uint16_t ext;
4971     int acc;
4972     TCGv tmp;
4973     TCGv addr;
4974     TCGv loadval;
4975     int dual;
4976     TCGv saved_flags;
4977 
4978     if (!s->done_mac) {
4979         s->mactmp = tcg_temp_new_i64();
4980         s->done_mac = 1;
4981     }
4982 
4983     ext = read_im16(env, s);
4984 
4985     acc = ((insn >> 7) & 1) | ((ext >> 3) & 2);
4986     dual = ((insn & 0x30) != 0 && (ext & 3) != 0);
4987     if (dual && !m68k_feature(s->env, M68K_FEATURE_CF_EMAC_B)) {
4988         disas_undef(env, s, insn);
4989         return;
4990     }
4991     if (insn & 0x30) {
4992         /* MAC with load.  */
4993         tmp = gen_lea(env, s, insn, OS_LONG);
4994         addr = tcg_temp_new();
4995         tcg_gen_and_i32(addr, tmp, QREG_MAC_MASK);
4996         /* Load the value now to ensure correct exception behavior.
4997            Perform writeback after reading the MAC inputs.  */
4998         loadval = gen_load(s, OS_LONG, addr, 0);
4999 
5000         acc ^= 1;
5001         rx = (ext & 0x8000) ? AREG(ext, 12) : DREG(insn, 12);
5002         ry = (ext & 8) ? AREG(ext, 0) : DREG(ext, 0);
5003     } else {
5004         loadval = addr = NULL_QREG;
5005         rx = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5006         ry = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5007     }
5008 
5009     gen_mac_clear_flags();
5010 #if 0
5011     l1 = -1;
5012     /* Disabled because conditional branches clobber temporary vars.  */
5013     if ((s->env->macsr & MACSR_OMC) != 0 && !dual) {
5014         /* Skip the multiply if we know we will ignore it.  */
5015         l1 = gen_new_label();
5016         tmp = tcg_temp_new();
5017         tcg_gen_andi_i32(tmp, QREG_MACSR, 1 << (acc + 8));
5018         gen_op_jmp_nz32(tmp, l1);
5019     }
5020 #endif
5021 
5022     if ((ext & 0x0800) == 0) {
5023         /* Word.  */
5024         rx = gen_mac_extract_word(s, rx, (ext & 0x80) != 0);
5025         ry = gen_mac_extract_word(s, ry, (ext & 0x40) != 0);
5026     }
5027     if (s->env->macsr & MACSR_FI) {
5028         gen_helper_macmulf(s->mactmp, cpu_env, rx, ry);
5029     } else {
5030         if (s->env->macsr & MACSR_SU)
5031             gen_helper_macmuls(s->mactmp, cpu_env, rx, ry);
5032         else
5033             gen_helper_macmulu(s->mactmp, cpu_env, rx, ry);
5034         switch ((ext >> 9) & 3) {
5035         case 1:
5036             tcg_gen_shli_i64(s->mactmp, s->mactmp, 1);
5037             break;
5038         case 3:
5039             tcg_gen_shri_i64(s->mactmp, s->mactmp, 1);
5040             break;
5041         }
5042     }
5043 
5044     if (dual) {
5045         /* Save the overflow flag from the multiply.  */
5046         saved_flags = tcg_temp_new();
5047         tcg_gen_mov_i32(saved_flags, QREG_MACSR);
5048     } else {
5049         saved_flags = NULL_QREG;
5050     }
5051 
5052 #if 0
5053     /* Disabled because conditional branches clobber temporary vars.  */
5054     if ((s->env->macsr & MACSR_OMC) != 0 && dual) {
5055         /* Skip the accumulate if the value is already saturated.  */
5056         l1 = gen_new_label();
5057         tmp = tcg_temp_new();
5058         gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
5059         gen_op_jmp_nz32(tmp, l1);
5060     }
5061 #endif
5062 
5063     if (insn & 0x100)
5064         tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5065     else
5066         tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5067 
5068     if (s->env->macsr & MACSR_FI)
5069         gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
5070     else if (s->env->macsr & MACSR_SU)
5071         gen_helper_macsats(cpu_env, tcg_const_i32(acc));
5072     else
5073         gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
5074 
5075 #if 0
5076     /* Disabled because conditional branches clobber temporary vars.  */
5077     if (l1 != -1)
5078         gen_set_label(l1);
5079 #endif
5080 
5081     if (dual) {
5082         /* Dual accumulate variant.  */
5083         acc = (ext >> 2) & 3;
5084         /* Restore the overflow flag from the multiplier.  */
5085         tcg_gen_mov_i32(QREG_MACSR, saved_flags);
5086 #if 0
5087         /* Disabled because conditional branches clobber temporary vars.  */
5088         if ((s->env->macsr & MACSR_OMC) != 0) {
5089             /* Skip the accumulate if the value is already saturated.  */
5090             l1 = gen_new_label();
5091             tmp = tcg_temp_new();
5092             gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
5093             gen_op_jmp_nz32(tmp, l1);
5094         }
5095 #endif
5096         if (ext & 2)
5097             tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5098         else
5099             tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5100         if (s->env->macsr & MACSR_FI)
5101             gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
5102         else if (s->env->macsr & MACSR_SU)
5103             gen_helper_macsats(cpu_env, tcg_const_i32(acc));
5104         else
5105             gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
5106 #if 0
5107         /* Disabled because conditional branches clobber temporary vars.  */
5108         if (l1 != -1)
5109             gen_set_label(l1);
5110 #endif
5111     }
5112     gen_helper_mac_set_flags(cpu_env, tcg_const_i32(acc));
5113 
5114     if (insn & 0x30) {
5115         TCGv rw;
5116         rw = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5117         tcg_gen_mov_i32(rw, loadval);
5118         /* FIXME: Should address writeback happen with the masked or
5119            unmasked value?  */
5120         switch ((insn >> 3) & 7) {
5121         case 3: /* Post-increment.  */
5122             tcg_gen_addi_i32(AREG(insn, 0), addr, 4);
5123             break;
5124         case 4: /* Pre-decrement.  */
5125             tcg_gen_mov_i32(AREG(insn, 0), addr);
5126         }
5127     }
5128 }
5129 
5130 DISAS_INSN(from_mac)
5131 {
5132     TCGv rx;
5133     TCGv_i64 acc;
5134     int accnum;
5135 
5136     rx = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5137     accnum = (insn >> 9) & 3;
5138     acc = MACREG(accnum);
5139     if (s->env->macsr & MACSR_FI) {
5140         gen_helper_get_macf(rx, cpu_env, acc);
5141     } else if ((s->env->macsr & MACSR_OMC) == 0) {
5142         tcg_gen_extrl_i64_i32(rx, acc);
5143     } else if (s->env->macsr & MACSR_SU) {
5144         gen_helper_get_macs(rx, acc);
5145     } else {
5146         gen_helper_get_macu(rx, acc);
5147     }
5148     if (insn & 0x40) {
5149         tcg_gen_movi_i64(acc, 0);
5150         tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5151     }
5152 }
5153 
5154 DISAS_INSN(move_mac)
5155 {
5156     /* FIXME: This can be done without a helper.  */
5157     int src;
5158     TCGv dest;
5159     src = insn & 3;
5160     dest = tcg_const_i32((insn >> 9) & 3);
5161     gen_helper_mac_move(cpu_env, dest, tcg_const_i32(src));
5162     gen_mac_clear_flags();
5163     gen_helper_mac_set_flags(cpu_env, dest);
5164 }
5165 
5166 DISAS_INSN(from_macsr)
5167 {
5168     TCGv reg;
5169 
5170     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5171     tcg_gen_mov_i32(reg, QREG_MACSR);
5172 }
5173 
5174 DISAS_INSN(from_mask)
5175 {
5176     TCGv reg;
5177     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5178     tcg_gen_mov_i32(reg, QREG_MAC_MASK);
5179 }
5180 
5181 DISAS_INSN(from_mext)
5182 {
5183     TCGv reg;
5184     TCGv acc;
5185     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5186     acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
5187     if (s->env->macsr & MACSR_FI)
5188         gen_helper_get_mac_extf(reg, cpu_env, acc);
5189     else
5190         gen_helper_get_mac_exti(reg, cpu_env, acc);
5191 }
5192 
5193 DISAS_INSN(macsr_to_ccr)
5194 {
5195     TCGv tmp = tcg_temp_new();
5196     tcg_gen_andi_i32(tmp, QREG_MACSR, 0xf);
5197     gen_helper_set_sr(cpu_env, tmp);
5198     tcg_temp_free(tmp);
5199     set_cc_op(s, CC_OP_FLAGS);
5200 }
5201 
5202 DISAS_INSN(to_mac)
5203 {
5204     TCGv_i64 acc;
5205     TCGv val;
5206     int accnum;
5207     accnum = (insn >> 9) & 3;
5208     acc = MACREG(accnum);
5209     SRC_EA(env, val, OS_LONG, 0, NULL);
5210     if (s->env->macsr & MACSR_FI) {
5211         tcg_gen_ext_i32_i64(acc, val);
5212         tcg_gen_shli_i64(acc, acc, 8);
5213     } else if (s->env->macsr & MACSR_SU) {
5214         tcg_gen_ext_i32_i64(acc, val);
5215     } else {
5216         tcg_gen_extu_i32_i64(acc, val);
5217     }
5218     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5219     gen_mac_clear_flags();
5220     gen_helper_mac_set_flags(cpu_env, tcg_const_i32(accnum));
5221 }
5222 
5223 DISAS_INSN(to_macsr)
5224 {
5225     TCGv val;
5226     SRC_EA(env, val, OS_LONG, 0, NULL);
5227     gen_helper_set_macsr(cpu_env, val);
5228     gen_lookup_tb(s);
5229 }
5230 
5231 DISAS_INSN(to_mask)
5232 {
5233     TCGv val;
5234     SRC_EA(env, val, OS_LONG, 0, NULL);
5235     tcg_gen_ori_i32(QREG_MAC_MASK, val, 0xffff0000);
5236 }
5237 
5238 DISAS_INSN(to_mext)
5239 {
5240     TCGv val;
5241     TCGv acc;
5242     SRC_EA(env, val, OS_LONG, 0, NULL);
5243     acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
5244     if (s->env->macsr & MACSR_FI)
5245         gen_helper_set_mac_extf(cpu_env, val, acc);
5246     else if (s->env->macsr & MACSR_SU)
5247         gen_helper_set_mac_exts(cpu_env, val, acc);
5248     else
5249         gen_helper_set_mac_extu(cpu_env, val, acc);
5250 }
5251 
5252 static disas_proc opcode_table[65536];
5253 
5254 static void
5255 register_opcode (disas_proc proc, uint16_t opcode, uint16_t mask)
5256 {
5257   int i;
5258   int from;
5259   int to;
5260 
5261   /* Sanity check.  All set bits must be included in the mask.  */
5262   if (opcode & ~mask) {
5263       fprintf(stderr,
5264               "qemu internal error: bogus opcode definition %04x/%04x\n",
5265               opcode, mask);
5266       abort();
5267   }
5268   /* This could probably be cleverer.  For now just optimize the case where
5269      the top bits are known.  */
5270   /* Find the first zero bit in the mask.  */
5271   i = 0x8000;
5272   while ((i & mask) != 0)
5273       i >>= 1;
5274   /* Iterate over all combinations of this and lower bits.  */
5275   if (i == 0)
5276       i = 1;
5277   else
5278       i <<= 1;
5279   from = opcode & ~(i - 1);
5280   to = from + i;
5281   for (i = from; i < to; i++) {
5282       if ((i & mask) == opcode)
5283           opcode_table[i] = proc;
5284   }
5285 }
5286 
5287 /* Register m68k opcode handlers.  Order is important.
5288    Later insn override earlier ones.  */
5289 void register_m68k_insns (CPUM68KState *env)
5290 {
5291     /* Build the opcode table only once to avoid
5292        multithreading issues. */
5293     if (opcode_table[0] != NULL) {
5294         return;
5295     }
5296 
5297     /* use BASE() for instruction available
5298      * for CF_ISA_A and M68000.
5299      */
5300 #define BASE(name, opcode, mask) \
5301     register_opcode(disas_##name, 0x##opcode, 0x##mask)
5302 #define INSN(name, opcode, mask, feature) do { \
5303     if (m68k_feature(env, M68K_FEATURE_##feature)) \
5304         BASE(name, opcode, mask); \
5305     } while(0)
5306     BASE(undef,     0000, 0000);
5307     INSN(arith_im,  0080, fff8, CF_ISA_A);
5308     INSN(arith_im,  0000, ff00, M68000);
5309     INSN(undef,     00c0, ffc0, M68000);
5310     INSN(bitrev,    00c0, fff8, CF_ISA_APLUSC);
5311     BASE(bitop_reg, 0100, f1c0);
5312     BASE(bitop_reg, 0140, f1c0);
5313     BASE(bitop_reg, 0180, f1c0);
5314     BASE(bitop_reg, 01c0, f1c0);
5315     INSN(arith_im,  0280, fff8, CF_ISA_A);
5316     INSN(arith_im,  0200, ff00, M68000);
5317     INSN(undef,     02c0, ffc0, M68000);
5318     INSN(byterev,   02c0, fff8, CF_ISA_APLUSC);
5319     INSN(arith_im,  0480, fff8, CF_ISA_A);
5320     INSN(arith_im,  0400, ff00, M68000);
5321     INSN(undef,     04c0, ffc0, M68000);
5322     INSN(arith_im,  0600, ff00, M68000);
5323     INSN(undef,     06c0, ffc0, M68000);
5324     INSN(ff1,       04c0, fff8, CF_ISA_APLUSC);
5325     INSN(arith_im,  0680, fff8, CF_ISA_A);
5326     INSN(arith_im,  0c00, ff38, CF_ISA_A);
5327     INSN(arith_im,  0c00, ff00, M68000);
5328     BASE(bitop_im,  0800, ffc0);
5329     BASE(bitop_im,  0840, ffc0);
5330     BASE(bitop_im,  0880, ffc0);
5331     BASE(bitop_im,  08c0, ffc0);
5332     INSN(arith_im,  0a80, fff8, CF_ISA_A);
5333     INSN(arith_im,  0a00, ff00, M68000);
5334     INSN(cas,       0ac0, ffc0, CAS);
5335     INSN(cas,       0cc0, ffc0, CAS);
5336     INSN(cas,       0ec0, ffc0, CAS);
5337     INSN(cas2w,     0cfc, ffff, CAS);
5338     INSN(cas2l,     0efc, ffff, CAS);
5339     BASE(move,      1000, f000);
5340     BASE(move,      2000, f000);
5341     BASE(move,      3000, f000);
5342     INSN(strldsr,   40e7, ffff, CF_ISA_APLUSC);
5343     INSN(negx,      4080, fff8, CF_ISA_A);
5344     INSN(negx,      4000, ff00, M68000);
5345     INSN(undef,     40c0, ffc0, M68000);
5346     INSN(move_from_sr, 40c0, fff8, CF_ISA_A);
5347     INSN(move_from_sr, 40c0, ffc0, M68000);
5348     BASE(lea,       41c0, f1c0);
5349     BASE(clr,       4200, ff00);
5350     BASE(undef,     42c0, ffc0);
5351     INSN(move_from_ccr, 42c0, fff8, CF_ISA_A);
5352     INSN(move_from_ccr, 42c0, ffc0, M68000);
5353     INSN(neg,       4480, fff8, CF_ISA_A);
5354     INSN(neg,       4400, ff00, M68000);
5355     INSN(undef,     44c0, ffc0, M68000);
5356     BASE(move_to_ccr, 44c0, ffc0);
5357     INSN(not,       4680, fff8, CF_ISA_A);
5358     INSN(not,       4600, ff00, M68000);
5359     INSN(undef,     46c0, ffc0, M68000);
5360     INSN(move_to_sr, 46c0, ffc0, CF_ISA_A);
5361     INSN(nbcd,      4800, ffc0, M68000);
5362     INSN(linkl,     4808, fff8, M68000);
5363     BASE(pea,       4840, ffc0);
5364     BASE(swap,      4840, fff8);
5365     INSN(bkpt,      4848, fff8, BKPT);
5366     INSN(movem,     48d0, fbf8, CF_ISA_A);
5367     INSN(movem,     48e8, fbf8, CF_ISA_A);
5368     INSN(movem,     4880, fb80, M68000);
5369     BASE(ext,       4880, fff8);
5370     BASE(ext,       48c0, fff8);
5371     BASE(ext,       49c0, fff8);
5372     BASE(tst,       4a00, ff00);
5373     INSN(tas,       4ac0, ffc0, CF_ISA_B);
5374     INSN(tas,       4ac0, ffc0, M68000);
5375     INSN(halt,      4ac8, ffff, CF_ISA_A);
5376     INSN(pulse,     4acc, ffff, CF_ISA_A);
5377     BASE(illegal,   4afc, ffff);
5378     INSN(mull,      4c00, ffc0, CF_ISA_A);
5379     INSN(mull,      4c00, ffc0, LONG_MULDIV);
5380     INSN(divl,      4c40, ffc0, CF_ISA_A);
5381     INSN(divl,      4c40, ffc0, LONG_MULDIV);
5382     INSN(sats,      4c80, fff8, CF_ISA_B);
5383     BASE(trap,      4e40, fff0);
5384     BASE(link,      4e50, fff8);
5385     BASE(unlk,      4e58, fff8);
5386     INSN(move_to_usp, 4e60, fff8, USP);
5387     INSN(move_from_usp, 4e68, fff8, USP);
5388     BASE(nop,       4e71, ffff);
5389     BASE(stop,      4e72, ffff);
5390     BASE(rte,       4e73, ffff);
5391     INSN(rtd,       4e74, ffff, RTD);
5392     BASE(rts,       4e75, ffff);
5393     INSN(movec,     4e7b, ffff, CF_ISA_A);
5394     BASE(jump,      4e80, ffc0);
5395     BASE(jump,      4ec0, ffc0);
5396     INSN(addsubq,   5000, f080, M68000);
5397     BASE(addsubq,   5080, f0c0);
5398     INSN(scc,       50c0, f0f8, CF_ISA_A); /* Scc.B Dx   */
5399     INSN(scc,       50c0, f0c0, M68000);   /* Scc.B <EA> */
5400     INSN(dbcc,      50c8, f0f8, M68000);
5401     INSN(tpf,       51f8, fff8, CF_ISA_A);
5402 
5403     /* Branch instructions.  */
5404     BASE(branch,    6000, f000);
5405     /* Disable long branch instructions, then add back the ones we want.  */
5406     BASE(undef,     60ff, f0ff); /* All long branches.  */
5407     INSN(branch,    60ff, f0ff, CF_ISA_B);
5408     INSN(undef,     60ff, ffff, CF_ISA_B); /* bra.l */
5409     INSN(branch,    60ff, ffff, BRAL);
5410     INSN(branch,    60ff, f0ff, BCCL);
5411 
5412     BASE(moveq,     7000, f100);
5413     INSN(mvzs,      7100, f100, CF_ISA_B);
5414     BASE(or,        8000, f000);
5415     BASE(divw,      80c0, f0c0);
5416     INSN(sbcd_reg,  8100, f1f8, M68000);
5417     INSN(sbcd_mem,  8108, f1f8, M68000);
5418     BASE(addsub,    9000, f000);
5419     INSN(undef,     90c0, f0c0, CF_ISA_A);
5420     INSN(subx_reg,  9180, f1f8, CF_ISA_A);
5421     INSN(subx_reg,  9100, f138, M68000);
5422     INSN(subx_mem,  9108, f138, M68000);
5423     INSN(suba,      91c0, f1c0, CF_ISA_A);
5424     INSN(suba,      90c0, f0c0, M68000);
5425 
5426     BASE(undef_mac, a000, f000);
5427     INSN(mac,       a000, f100, CF_EMAC);
5428     INSN(from_mac,  a180, f9b0, CF_EMAC);
5429     INSN(move_mac,  a110, f9fc, CF_EMAC);
5430     INSN(from_macsr,a980, f9f0, CF_EMAC);
5431     INSN(from_mask, ad80, fff0, CF_EMAC);
5432     INSN(from_mext, ab80, fbf0, CF_EMAC);
5433     INSN(macsr_to_ccr, a9c0, ffff, CF_EMAC);
5434     INSN(to_mac,    a100, f9c0, CF_EMAC);
5435     INSN(to_macsr,  a900, ffc0, CF_EMAC);
5436     INSN(to_mext,   ab00, fbc0, CF_EMAC);
5437     INSN(to_mask,   ad00, ffc0, CF_EMAC);
5438 
5439     INSN(mov3q,     a140, f1c0, CF_ISA_B);
5440     INSN(cmp,       b000, f1c0, CF_ISA_B); /* cmp.b */
5441     INSN(cmp,       b040, f1c0, CF_ISA_B); /* cmp.w */
5442     INSN(cmpa,      b0c0, f1c0, CF_ISA_B); /* cmpa.w */
5443     INSN(cmp,       b080, f1c0, CF_ISA_A);
5444     INSN(cmpa,      b1c0, f1c0, CF_ISA_A);
5445     INSN(cmp,       b000, f100, M68000);
5446     INSN(eor,       b100, f100, M68000);
5447     INSN(cmpm,      b108, f138, M68000);
5448     INSN(cmpa,      b0c0, f0c0, M68000);
5449     INSN(eor,       b180, f1c0, CF_ISA_A);
5450     BASE(and,       c000, f000);
5451     INSN(exg_dd,    c140, f1f8, M68000);
5452     INSN(exg_aa,    c148, f1f8, M68000);
5453     INSN(exg_da,    c188, f1f8, M68000);
5454     BASE(mulw,      c0c0, f0c0);
5455     INSN(abcd_reg,  c100, f1f8, M68000);
5456     INSN(abcd_mem,  c108, f1f8, M68000);
5457     BASE(addsub,    d000, f000);
5458     INSN(undef,     d0c0, f0c0, CF_ISA_A);
5459     INSN(addx_reg,      d180, f1f8, CF_ISA_A);
5460     INSN(addx_reg,  d100, f138, M68000);
5461     INSN(addx_mem,  d108, f138, M68000);
5462     INSN(adda,      d1c0, f1c0, CF_ISA_A);
5463     INSN(adda,      d0c0, f0c0, M68000);
5464     INSN(shift_im,  e080, f0f0, CF_ISA_A);
5465     INSN(shift_reg, e0a0, f0f0, CF_ISA_A);
5466     INSN(shift8_im, e000, f0f0, M68000);
5467     INSN(shift16_im, e040, f0f0, M68000);
5468     INSN(shift_im,  e080, f0f0, M68000);
5469     INSN(shift8_reg, e020, f0f0, M68000);
5470     INSN(shift16_reg, e060, f0f0, M68000);
5471     INSN(shift_reg, e0a0, f0f0, M68000);
5472     INSN(shift_mem, e0c0, fcc0, M68000);
5473     INSN(rotate_im, e090, f0f0, M68000);
5474     INSN(rotate8_im, e010, f0f0, M68000);
5475     INSN(rotate16_im, e050, f0f0, M68000);
5476     INSN(rotate_reg, e0b0, f0f0, M68000);
5477     INSN(rotate8_reg, e030, f0f0, M68000);
5478     INSN(rotate16_reg, e070, f0f0, M68000);
5479     INSN(rotate_mem, e4c0, fcc0, M68000);
5480     INSN(bfext_mem, e9c0, fdc0, BITFIELD);  /* bfextu & bfexts */
5481     INSN(bfext_reg, e9c0, fdf8, BITFIELD);
5482     INSN(bfins_mem, efc0, ffc0, BITFIELD);
5483     INSN(bfins_reg, efc0, fff8, BITFIELD);
5484     INSN(bfop_mem, eac0, ffc0, BITFIELD);   /* bfchg */
5485     INSN(bfop_reg, eac0, fff8, BITFIELD);   /* bfchg */
5486     INSN(bfop_mem, ecc0, ffc0, BITFIELD);   /* bfclr */
5487     INSN(bfop_reg, ecc0, fff8, BITFIELD);   /* bfclr */
5488     INSN(bfop_mem, edc0, ffc0, BITFIELD);   /* bfffo */
5489     INSN(bfop_reg, edc0, fff8, BITFIELD);   /* bfffo */
5490     INSN(bfop_mem, eec0, ffc0, BITFIELD);   /* bfset */
5491     INSN(bfop_reg, eec0, fff8, BITFIELD);   /* bfset */
5492     INSN(bfop_mem, e8c0, ffc0, BITFIELD);   /* bftst */
5493     INSN(bfop_reg, e8c0, fff8, BITFIELD);   /* bftst */
5494     BASE(undef_fpu, f000, f000);
5495     INSN(fpu,       f200, ffc0, CF_FPU);
5496     INSN(fbcc,      f280, ffc0, CF_FPU);
5497     INSN(frestore,  f340, ffc0, CF_FPU);
5498     INSN(fsave,     f300, ffc0, CF_FPU);
5499     INSN(fpu,       f200, ffc0, FPU);
5500     INSN(fscc,      f240, ffc0, FPU);
5501     INSN(fbcc,      f280, ff80, FPU);
5502     INSN(frestore,  f340, ffc0, FPU);
5503     INSN(fsave,     f300, ffc0, FPU);
5504     INSN(intouch,   f340, ffc0, CF_ISA_A);
5505     INSN(cpushl,    f428, ff38, CF_ISA_A);
5506     INSN(wddata,    fb00, ff00, CF_ISA_A);
5507     INSN(wdebug,    fbc0, ffc0, CF_ISA_A);
5508 #undef INSN
5509 }
5510 
5511 /* ??? Some of this implementation is not exception safe.  We should always
5512    write back the result to memory before setting the condition codes.  */
5513 static void disas_m68k_insn(CPUM68KState * env, DisasContext *s)
5514 {
5515     uint16_t insn = read_im16(env, s);
5516     opcode_table[insn](env, s, insn);
5517     do_writebacks(s);
5518 }
5519 
5520 /* generate intermediate code for basic block 'tb'.  */
5521 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb)
5522 {
5523     CPUM68KState *env = cs->env_ptr;
5524     DisasContext dc1, *dc = &dc1;
5525     target_ulong pc_start;
5526     int pc_offset;
5527     int num_insns;
5528     int max_insns;
5529 
5530     /* generate intermediate code */
5531     pc_start = tb->pc;
5532 
5533     dc->tb = tb;
5534 
5535     dc->env = env;
5536     dc->is_jmp = DISAS_NEXT;
5537     dc->pc = pc_start;
5538     dc->cc_op = CC_OP_DYNAMIC;
5539     dc->cc_op_synced = 1;
5540     dc->singlestep_enabled = cs->singlestep_enabled;
5541     dc->user = (env->sr & SR_S) == 0;
5542     dc->done_mac = 0;
5543     dc->writeback_mask = 0;
5544     num_insns = 0;
5545     max_insns = tb->cflags & CF_COUNT_MASK;
5546     if (max_insns == 0) {
5547         max_insns = CF_COUNT_MASK;
5548     }
5549     if (max_insns > TCG_MAX_INSNS) {
5550         max_insns = TCG_MAX_INSNS;
5551     }
5552 
5553     gen_tb_start(tb);
5554     do {
5555         pc_offset = dc->pc - pc_start;
5556         gen_throws_exception = NULL;
5557         tcg_gen_insn_start(dc->pc, dc->cc_op);
5558         num_insns++;
5559 
5560         if (unlikely(cpu_breakpoint_test(cs, dc->pc, BP_ANY))) {
5561             gen_exception(dc, dc->pc, EXCP_DEBUG);
5562             dc->is_jmp = DISAS_JUMP;
5563             /* The address covered by the breakpoint must be included in
5564                [tb->pc, tb->pc + tb->size) in order to for it to be
5565                properly cleared -- thus we increment the PC here so that
5566                the logic setting tb->size below does the right thing.  */
5567             dc->pc += 2;
5568             break;
5569         }
5570 
5571         if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
5572             gen_io_start();
5573         }
5574 
5575         dc->insn_pc = dc->pc;
5576 	disas_m68k_insn(env, dc);
5577     } while (!dc->is_jmp && !tcg_op_buf_full() &&
5578              !cs->singlestep_enabled &&
5579              !singlestep &&
5580              (pc_offset) < (TARGET_PAGE_SIZE - 32) &&
5581              num_insns < max_insns);
5582 
5583     if (tb->cflags & CF_LAST_IO)
5584         gen_io_end();
5585     if (unlikely(cs->singlestep_enabled)) {
5586         /* Make sure the pc is updated, and raise a debug exception.  */
5587         if (!dc->is_jmp) {
5588             update_cc_op(dc);
5589             tcg_gen_movi_i32(QREG_PC, dc->pc);
5590         }
5591         gen_helper_raise_exception(cpu_env, tcg_const_i32(EXCP_DEBUG));
5592     } else {
5593         switch(dc->is_jmp) {
5594         case DISAS_NEXT:
5595             update_cc_op(dc);
5596             gen_jmp_tb(dc, 0, dc->pc);
5597             break;
5598         default:
5599         case DISAS_JUMP:
5600         case DISAS_UPDATE:
5601             update_cc_op(dc);
5602             /* indicate that the hash table must be used to find the next TB */
5603             tcg_gen_exit_tb(0);
5604             break;
5605         case DISAS_TB_JUMP:
5606             /* nothing more to generate */
5607             break;
5608         }
5609     }
5610     gen_tb_end(tb, num_insns);
5611 
5612 #ifdef DEBUG_DISAS
5613     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)
5614         && qemu_log_in_addr_range(pc_start)) {
5615         qemu_log_lock();
5616         qemu_log("----------------\n");
5617         qemu_log("IN: %s\n", lookup_symbol(pc_start));
5618         log_target_disas(cs, pc_start, dc->pc - pc_start, 0);
5619         qemu_log("\n");
5620         qemu_log_unlock();
5621     }
5622 #endif
5623     tb->size = dc->pc - pc_start;
5624     tb->icount = num_insns;
5625 }
5626 
5627 static double floatx80_to_double(CPUM68KState *env, uint16_t high, uint64_t low)
5628 {
5629     floatx80 a = { .high = high, .low = low };
5630     union {
5631         float64 f64;
5632         double d;
5633     } u;
5634 
5635     u.f64 = floatx80_to_float64(a, &env->fp_status);
5636     return u.d;
5637 }
5638 
5639 void m68k_cpu_dump_state(CPUState *cs, FILE *f, fprintf_function cpu_fprintf,
5640                          int flags)
5641 {
5642     M68kCPU *cpu = M68K_CPU(cs);
5643     CPUM68KState *env = &cpu->env;
5644     int i;
5645     uint16_t sr;
5646     for (i = 0; i < 8; i++) {
5647         cpu_fprintf(f, "D%d = %08x   A%d = %08x   "
5648                     "F%d = %04x %016"PRIx64"  (%12g)\n",
5649                     i, env->dregs[i], i, env->aregs[i],
5650                     i, env->fregs[i].l.upper, env->fregs[i].l.lower,
5651                     floatx80_to_double(env, env->fregs[i].l.upper,
5652                                        env->fregs[i].l.lower));
5653     }
5654     cpu_fprintf (f, "PC = %08x   ", env->pc);
5655     sr = env->sr | cpu_m68k_get_ccr(env);
5656     cpu_fprintf(f, "SR = %04x %c%c%c%c%c ", sr, (sr & CCF_X) ? 'X' : '-',
5657                 (sr & CCF_N) ? 'N' : '-', (sr & CCF_Z) ? 'Z' : '-',
5658                 (sr & CCF_V) ? 'V' : '-', (sr & CCF_C) ? 'C' : '-');
5659     cpu_fprintf(f, "FPSR = %08x %c%c%c%c ", env->fpsr,
5660                 (env->fpsr & FPSR_CC_A) ? 'A' : '-',
5661                 (env->fpsr & FPSR_CC_I) ? 'I' : '-',
5662                 (env->fpsr & FPSR_CC_Z) ? 'Z' : '-',
5663                 (env->fpsr & FPSR_CC_N) ? 'N' : '-');
5664     cpu_fprintf(f, "\n                                "
5665                    "FPCR =     %04x ", env->fpcr);
5666     switch (env->fpcr & FPCR_PREC_MASK) {
5667     case FPCR_PREC_X:
5668         cpu_fprintf(f, "X ");
5669         break;
5670     case FPCR_PREC_S:
5671         cpu_fprintf(f, "S ");
5672         break;
5673     case FPCR_PREC_D:
5674         cpu_fprintf(f, "D ");
5675         break;
5676     }
5677     switch (env->fpcr & FPCR_RND_MASK) {
5678     case FPCR_RND_N:
5679         cpu_fprintf(f, "RN ");
5680         break;
5681     case FPCR_RND_Z:
5682         cpu_fprintf(f, "RZ ");
5683         break;
5684     case FPCR_RND_M:
5685         cpu_fprintf(f, "RM ");
5686         break;
5687     case FPCR_RND_P:
5688         cpu_fprintf(f, "RP ");
5689         break;
5690     }
5691 }
5692 
5693 void restore_state_to_opc(CPUM68KState *env, TranslationBlock *tb,
5694                           target_ulong *data)
5695 {
5696     int cc_op = data[1];
5697     env->pc = data[0];
5698     if (cc_op != CC_OP_DYNAMIC) {
5699         env->cc_op = cc_op;
5700     }
5701 }
5702