xref: /openbmc/qemu/target/m68k/translate.c (revision bb2e0039)
1 /*
2  *  m68k translation
3  *
4  *  Copyright (c) 2005-2007 CodeSourcery
5  *  Written by Paul Brook
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "disas/disas.h"
24 #include "exec/exec-all.h"
25 #include "tcg-op.h"
26 #include "qemu/log.h"
27 #include "exec/cpu_ldst.h"
28 #include "exec/translator.h"
29 
30 #include "exec/helper-proto.h"
31 #include "exec/helper-gen.h"
32 
33 #include "trace-tcg.h"
34 #include "exec/log.h"
35 
36 //#define DEBUG_DISPATCH 1
37 
38 #define DEFO32(name, offset) static TCGv QREG_##name;
39 #define DEFO64(name, offset) static TCGv_i64 QREG_##name;
40 #include "qregs.def"
41 #undef DEFO32
42 #undef DEFO64
43 
44 static TCGv_i32 cpu_halted;
45 static TCGv_i32 cpu_exception_index;
46 
47 static TCGv_env cpu_env;
48 
49 static char cpu_reg_names[2 * 8 * 3 + 5 * 4];
50 static TCGv cpu_dregs[8];
51 static TCGv cpu_aregs[8];
52 static TCGv_i64 cpu_macc[4];
53 
54 #define REG(insn, pos)  (((insn) >> (pos)) & 7)
55 #define DREG(insn, pos) cpu_dregs[REG(insn, pos)]
56 #define AREG(insn, pos) get_areg(s, REG(insn, pos))
57 #define MACREG(acc)     cpu_macc[acc]
58 #define QREG_SP         get_areg(s, 7)
59 
60 static TCGv NULL_QREG;
61 #define IS_NULL_QREG(t) (TCGV_EQUAL(t, NULL_QREG))
62 /* Used to distinguish stores from bad addressing modes.  */
63 static TCGv store_dummy;
64 
65 #include "exec/gen-icount.h"
66 
67 void m68k_tcg_init(void)
68 {
69     char *p;
70     int i;
71 
72     cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
73     tcg_ctx.tcg_env = cpu_env;
74 
75 #define DEFO32(name, offset) \
76     QREG_##name = tcg_global_mem_new_i32(cpu_env, \
77         offsetof(CPUM68KState, offset), #name);
78 #define DEFO64(name, offset) \
79     QREG_##name = tcg_global_mem_new_i64(cpu_env, \
80         offsetof(CPUM68KState, offset), #name);
81 #include "qregs.def"
82 #undef DEFO32
83 #undef DEFO64
84 
85     cpu_halted = tcg_global_mem_new_i32(cpu_env,
86                                         -offsetof(M68kCPU, env) +
87                                         offsetof(CPUState, halted), "HALTED");
88     cpu_exception_index = tcg_global_mem_new_i32(cpu_env,
89                                                  -offsetof(M68kCPU, env) +
90                                                  offsetof(CPUState, exception_index),
91                                                  "EXCEPTION");
92 
93     p = cpu_reg_names;
94     for (i = 0; i < 8; i++) {
95         sprintf(p, "D%d", i);
96         cpu_dregs[i] = tcg_global_mem_new(cpu_env,
97                                           offsetof(CPUM68KState, dregs[i]), p);
98         p += 3;
99         sprintf(p, "A%d", i);
100         cpu_aregs[i] = tcg_global_mem_new(cpu_env,
101                                           offsetof(CPUM68KState, aregs[i]), p);
102         p += 3;
103     }
104     for (i = 0; i < 4; i++) {
105         sprintf(p, "ACC%d", i);
106         cpu_macc[i] = tcg_global_mem_new_i64(cpu_env,
107                                          offsetof(CPUM68KState, macc[i]), p);
108         p += 5;
109     }
110 
111     NULL_QREG = tcg_global_mem_new(cpu_env, -4, "NULL");
112     store_dummy = tcg_global_mem_new(cpu_env, -8, "NULL");
113 }
114 
115 /* internal defines */
116 typedef struct DisasContext {
117     CPUM68KState *env;
118     target_ulong insn_pc; /* Start of the current instruction.  */
119     target_ulong pc;
120     int is_jmp;
121     CCOp cc_op; /* Current CC operation */
122     int cc_op_synced;
123     int user;
124     struct TranslationBlock *tb;
125     int singlestep_enabled;
126     TCGv_i64 mactmp;
127     int done_mac;
128     int writeback_mask;
129     TCGv writeback[8];
130 } DisasContext;
131 
132 static TCGv get_areg(DisasContext *s, unsigned regno)
133 {
134     if (s->writeback_mask & (1 << regno)) {
135         return s->writeback[regno];
136     } else {
137         return cpu_aregs[regno];
138     }
139 }
140 
141 static void delay_set_areg(DisasContext *s, unsigned regno,
142                            TCGv val, bool give_temp)
143 {
144     if (s->writeback_mask & (1 << regno)) {
145         if (give_temp) {
146             tcg_temp_free(s->writeback[regno]);
147             s->writeback[regno] = val;
148         } else {
149             tcg_gen_mov_i32(s->writeback[regno], val);
150         }
151     } else {
152         s->writeback_mask |= 1 << regno;
153         if (give_temp) {
154             s->writeback[regno] = val;
155         } else {
156             TCGv tmp = tcg_temp_new();
157             s->writeback[regno] = tmp;
158             tcg_gen_mov_i32(tmp, val);
159         }
160     }
161 }
162 
163 static void do_writebacks(DisasContext *s)
164 {
165     unsigned mask = s->writeback_mask;
166     if (mask) {
167         s->writeback_mask = 0;
168         do {
169             unsigned regno = ctz32(mask);
170             tcg_gen_mov_i32(cpu_aregs[regno], s->writeback[regno]);
171             tcg_temp_free(s->writeback[regno]);
172             mask &= mask - 1;
173         } while (mask);
174     }
175 }
176 
177 /* is_jmp field values */
178 #define DISAS_JUMP      DISAS_TARGET_0 /* only pc was modified dynamically */
179 #define DISAS_UPDATE    DISAS_TARGET_1 /* cpu state was modified dynamically */
180 #define DISAS_TB_JUMP   DISAS_TARGET_2 /* only pc was modified statically */
181 #define DISAS_JUMP_NEXT DISAS_TARGET_3
182 
183 #if defined(CONFIG_USER_ONLY)
184 #define IS_USER(s) 1
185 #else
186 #define IS_USER(s) s->user
187 #endif
188 
189 /* XXX: move that elsewhere */
190 /* ??? Fix exceptions.  */
191 static void *gen_throws_exception;
192 #define gen_last_qop NULL
193 
194 typedef void (*disas_proc)(CPUM68KState *env, DisasContext *s, uint16_t insn);
195 
196 #ifdef DEBUG_DISPATCH
197 #define DISAS_INSN(name)                                                \
198     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
199                                   uint16_t insn);                       \
200     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
201                              uint16_t insn)                             \
202     {                                                                   \
203         qemu_log("Dispatch " #name "\n");                               \
204         real_disas_##name(env, s, insn);                                \
205     }                                                                   \
206     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
207                                   uint16_t insn)
208 #else
209 #define DISAS_INSN(name)                                                \
210     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
211                              uint16_t insn)
212 #endif
213 
214 static const uint8_t cc_op_live[CC_OP_NB] = {
215     [CC_OP_FLAGS] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
216     [CC_OP_ADDB ... CC_OP_ADDL] = CCF_X | CCF_N | CCF_V,
217     [CC_OP_SUBB ... CC_OP_SUBL] = CCF_X | CCF_N | CCF_V,
218     [CC_OP_CMPB ... CC_OP_CMPL] = CCF_X | CCF_N | CCF_V,
219     [CC_OP_LOGIC] = CCF_X | CCF_N
220 };
221 
222 static void set_cc_op(DisasContext *s, CCOp op)
223 {
224     CCOp old_op = s->cc_op;
225     int dead;
226 
227     if (old_op == op) {
228         return;
229     }
230     s->cc_op = op;
231     s->cc_op_synced = 0;
232 
233     /* Discard CC computation that will no longer be used.
234        Note that X and N are never dead.  */
235     dead = cc_op_live[old_op] & ~cc_op_live[op];
236     if (dead & CCF_C) {
237         tcg_gen_discard_i32(QREG_CC_C);
238     }
239     if (dead & CCF_Z) {
240         tcg_gen_discard_i32(QREG_CC_Z);
241     }
242     if (dead & CCF_V) {
243         tcg_gen_discard_i32(QREG_CC_V);
244     }
245 }
246 
247 /* Update the CPU env CC_OP state.  */
248 static void update_cc_op(DisasContext *s)
249 {
250     if (!s->cc_op_synced) {
251         s->cc_op_synced = 1;
252         tcg_gen_movi_i32(QREG_CC_OP, s->cc_op);
253     }
254 }
255 
256 /* Generate a jump to an immediate address.  */
257 static void gen_jmp_im(DisasContext *s, uint32_t dest)
258 {
259     update_cc_op(s);
260     tcg_gen_movi_i32(QREG_PC, dest);
261     s->is_jmp = DISAS_JUMP;
262 }
263 
264 /* Generate a jump to the address in qreg DEST.  */
265 static void gen_jmp(DisasContext *s, TCGv dest)
266 {
267     update_cc_op(s);
268     tcg_gen_mov_i32(QREG_PC, dest);
269     s->is_jmp = DISAS_JUMP;
270 }
271 
272 static void gen_raise_exception(int nr)
273 {
274     TCGv_i32 tmp = tcg_const_i32(nr);
275 
276     gen_helper_raise_exception(cpu_env, tmp);
277     tcg_temp_free_i32(tmp);
278 }
279 
280 static void gen_exception(DisasContext *s, uint32_t where, int nr)
281 {
282     update_cc_op(s);
283     gen_jmp_im(s, where);
284     gen_raise_exception(nr);
285 }
286 
287 static inline void gen_addr_fault(DisasContext *s)
288 {
289     gen_exception(s, s->insn_pc, EXCP_ADDRESS);
290 }
291 
292 /* Generate a load from the specified address.  Narrow values are
293    sign extended to full register width.  */
294 static inline TCGv gen_load(DisasContext * s, int opsize, TCGv addr, int sign)
295 {
296     TCGv tmp;
297     int index = IS_USER(s);
298     tmp = tcg_temp_new_i32();
299     switch(opsize) {
300     case OS_BYTE:
301         if (sign)
302             tcg_gen_qemu_ld8s(tmp, addr, index);
303         else
304             tcg_gen_qemu_ld8u(tmp, addr, index);
305         break;
306     case OS_WORD:
307         if (sign)
308             tcg_gen_qemu_ld16s(tmp, addr, index);
309         else
310             tcg_gen_qemu_ld16u(tmp, addr, index);
311         break;
312     case OS_LONG:
313         tcg_gen_qemu_ld32u(tmp, addr, index);
314         break;
315     default:
316         g_assert_not_reached();
317     }
318     gen_throws_exception = gen_last_qop;
319     return tmp;
320 }
321 
322 /* Generate a store.  */
323 static inline void gen_store(DisasContext *s, int opsize, TCGv addr, TCGv val)
324 {
325     int index = IS_USER(s);
326     switch(opsize) {
327     case OS_BYTE:
328         tcg_gen_qemu_st8(val, addr, index);
329         break;
330     case OS_WORD:
331         tcg_gen_qemu_st16(val, addr, index);
332         break;
333     case OS_LONG:
334         tcg_gen_qemu_st32(val, addr, index);
335         break;
336     default:
337         g_assert_not_reached();
338     }
339     gen_throws_exception = gen_last_qop;
340 }
341 
342 typedef enum {
343     EA_STORE,
344     EA_LOADU,
345     EA_LOADS
346 } ea_what;
347 
348 /* Generate an unsigned load if VAL is 0 a signed load if val is -1,
349    otherwise generate a store.  */
350 static TCGv gen_ldst(DisasContext *s, int opsize, TCGv addr, TCGv val,
351                      ea_what what)
352 {
353     if (what == EA_STORE) {
354         gen_store(s, opsize, addr, val);
355         return store_dummy;
356     } else {
357         return gen_load(s, opsize, addr, what == EA_LOADS);
358     }
359 }
360 
361 /* Read a 16-bit immediate constant */
362 static inline uint16_t read_im16(CPUM68KState *env, DisasContext *s)
363 {
364     uint16_t im;
365     im = cpu_lduw_code(env, s->pc);
366     s->pc += 2;
367     return im;
368 }
369 
370 /* Read an 8-bit immediate constant */
371 static inline uint8_t read_im8(CPUM68KState *env, DisasContext *s)
372 {
373     return read_im16(env, s);
374 }
375 
376 /* Read a 32-bit immediate constant.  */
377 static inline uint32_t read_im32(CPUM68KState *env, DisasContext *s)
378 {
379     uint32_t im;
380     im = read_im16(env, s) << 16;
381     im |= 0xffff & read_im16(env, s);
382     return im;
383 }
384 
385 /* Read a 64-bit immediate constant.  */
386 static inline uint64_t read_im64(CPUM68KState *env, DisasContext *s)
387 {
388     uint64_t im;
389     im = (uint64_t)read_im32(env, s) << 32;
390     im |= (uint64_t)read_im32(env, s);
391     return im;
392 }
393 
394 /* Calculate and address index.  */
395 static TCGv gen_addr_index(DisasContext *s, uint16_t ext, TCGv tmp)
396 {
397     TCGv add;
398     int scale;
399 
400     add = (ext & 0x8000) ? AREG(ext, 12) : DREG(ext, 12);
401     if ((ext & 0x800) == 0) {
402         tcg_gen_ext16s_i32(tmp, add);
403         add = tmp;
404     }
405     scale = (ext >> 9) & 3;
406     if (scale != 0) {
407         tcg_gen_shli_i32(tmp, add, scale);
408         add = tmp;
409     }
410     return add;
411 }
412 
413 /* Handle a base + index + displacement effective addresss.
414    A NULL_QREG base means pc-relative.  */
415 static TCGv gen_lea_indexed(CPUM68KState *env, DisasContext *s, TCGv base)
416 {
417     uint32_t offset;
418     uint16_t ext;
419     TCGv add;
420     TCGv tmp;
421     uint32_t bd, od;
422 
423     offset = s->pc;
424     ext = read_im16(env, s);
425 
426     if ((ext & 0x800) == 0 && !m68k_feature(s->env, M68K_FEATURE_WORD_INDEX))
427         return NULL_QREG;
428 
429     if (m68k_feature(s->env, M68K_FEATURE_M68000) &&
430         !m68k_feature(s->env, M68K_FEATURE_SCALED_INDEX)) {
431         ext &= ~(3 << 9);
432     }
433 
434     if (ext & 0x100) {
435         /* full extension word format */
436         if (!m68k_feature(s->env, M68K_FEATURE_EXT_FULL))
437             return NULL_QREG;
438 
439         if ((ext & 0x30) > 0x10) {
440             /* base displacement */
441             if ((ext & 0x30) == 0x20) {
442                 bd = (int16_t)read_im16(env, s);
443             } else {
444                 bd = read_im32(env, s);
445             }
446         } else {
447             bd = 0;
448         }
449         tmp = tcg_temp_new();
450         if ((ext & 0x44) == 0) {
451             /* pre-index */
452             add = gen_addr_index(s, ext, tmp);
453         } else {
454             add = NULL_QREG;
455         }
456         if ((ext & 0x80) == 0) {
457             /* base not suppressed */
458             if (IS_NULL_QREG(base)) {
459                 base = tcg_const_i32(offset + bd);
460                 bd = 0;
461             }
462             if (!IS_NULL_QREG(add)) {
463                 tcg_gen_add_i32(tmp, add, base);
464                 add = tmp;
465             } else {
466                 add = base;
467             }
468         }
469         if (!IS_NULL_QREG(add)) {
470             if (bd != 0) {
471                 tcg_gen_addi_i32(tmp, add, bd);
472                 add = tmp;
473             }
474         } else {
475             add = tcg_const_i32(bd);
476         }
477         if ((ext & 3) != 0) {
478             /* memory indirect */
479             base = gen_load(s, OS_LONG, add, 0);
480             if ((ext & 0x44) == 4) {
481                 add = gen_addr_index(s, ext, tmp);
482                 tcg_gen_add_i32(tmp, add, base);
483                 add = tmp;
484             } else {
485                 add = base;
486             }
487             if ((ext & 3) > 1) {
488                 /* outer displacement */
489                 if ((ext & 3) == 2) {
490                     od = (int16_t)read_im16(env, s);
491                 } else {
492                     od = read_im32(env, s);
493                 }
494             } else {
495                 od = 0;
496             }
497             if (od != 0) {
498                 tcg_gen_addi_i32(tmp, add, od);
499                 add = tmp;
500             }
501         }
502     } else {
503         /* brief extension word format */
504         tmp = tcg_temp_new();
505         add = gen_addr_index(s, ext, tmp);
506         if (!IS_NULL_QREG(base)) {
507             tcg_gen_add_i32(tmp, add, base);
508             if ((int8_t)ext)
509                 tcg_gen_addi_i32(tmp, tmp, (int8_t)ext);
510         } else {
511             tcg_gen_addi_i32(tmp, add, offset + (int8_t)ext);
512         }
513         add = tmp;
514     }
515     return add;
516 }
517 
518 /* Sign or zero extend a value.  */
519 
520 static inline void gen_ext(TCGv res, TCGv val, int opsize, int sign)
521 {
522     switch (opsize) {
523     case OS_BYTE:
524         if (sign) {
525             tcg_gen_ext8s_i32(res, val);
526         } else {
527             tcg_gen_ext8u_i32(res, val);
528         }
529         break;
530     case OS_WORD:
531         if (sign) {
532             tcg_gen_ext16s_i32(res, val);
533         } else {
534             tcg_gen_ext16u_i32(res, val);
535         }
536         break;
537     case OS_LONG:
538         tcg_gen_mov_i32(res, val);
539         break;
540     default:
541         g_assert_not_reached();
542     }
543 }
544 
545 /* Evaluate all the CC flags.  */
546 
547 static void gen_flush_flags(DisasContext *s)
548 {
549     TCGv t0, t1;
550 
551     switch (s->cc_op) {
552     case CC_OP_FLAGS:
553         return;
554 
555     case CC_OP_ADDB:
556     case CC_OP_ADDW:
557     case CC_OP_ADDL:
558         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
559         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
560         /* Compute signed overflow for addition.  */
561         t0 = tcg_temp_new();
562         t1 = tcg_temp_new();
563         tcg_gen_sub_i32(t0, QREG_CC_N, QREG_CC_V);
564         gen_ext(t0, t0, s->cc_op - CC_OP_ADDB, 1);
565         tcg_gen_xor_i32(t1, QREG_CC_N, QREG_CC_V);
566         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
567         tcg_temp_free(t0);
568         tcg_gen_andc_i32(QREG_CC_V, t1, QREG_CC_V);
569         tcg_temp_free(t1);
570         break;
571 
572     case CC_OP_SUBB:
573     case CC_OP_SUBW:
574     case CC_OP_SUBL:
575         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
576         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
577         /* Compute signed overflow for subtraction.  */
578         t0 = tcg_temp_new();
579         t1 = tcg_temp_new();
580         tcg_gen_add_i32(t0, QREG_CC_N, QREG_CC_V);
581         gen_ext(t0, t0, s->cc_op - CC_OP_SUBB, 1);
582         tcg_gen_xor_i32(t1, QREG_CC_N, t0);
583         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
584         tcg_temp_free(t0);
585         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t1);
586         tcg_temp_free(t1);
587         break;
588 
589     case CC_OP_CMPB:
590     case CC_OP_CMPW:
591     case CC_OP_CMPL:
592         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_C, QREG_CC_N, QREG_CC_V);
593         tcg_gen_sub_i32(QREG_CC_Z, QREG_CC_N, QREG_CC_V);
594         gen_ext(QREG_CC_Z, QREG_CC_Z, s->cc_op - CC_OP_CMPB, 1);
595         /* Compute signed overflow for subtraction.  */
596         t0 = tcg_temp_new();
597         tcg_gen_xor_i32(t0, QREG_CC_Z, QREG_CC_N);
598         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, QREG_CC_N);
599         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t0);
600         tcg_temp_free(t0);
601         tcg_gen_mov_i32(QREG_CC_N, QREG_CC_Z);
602         break;
603 
604     case CC_OP_LOGIC:
605         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
606         tcg_gen_movi_i32(QREG_CC_C, 0);
607         tcg_gen_movi_i32(QREG_CC_V, 0);
608         break;
609 
610     case CC_OP_DYNAMIC:
611         gen_helper_flush_flags(cpu_env, QREG_CC_OP);
612         s->cc_op_synced = 1;
613         break;
614 
615     default:
616         t0 = tcg_const_i32(s->cc_op);
617         gen_helper_flush_flags(cpu_env, t0);
618         tcg_temp_free(t0);
619         s->cc_op_synced = 1;
620         break;
621     }
622 
623     /* Note that flush_flags also assigned to env->cc_op.  */
624     s->cc_op = CC_OP_FLAGS;
625 }
626 
627 static inline TCGv gen_extend(TCGv val, int opsize, int sign)
628 {
629     TCGv tmp;
630 
631     if (opsize == OS_LONG) {
632         tmp = val;
633     } else {
634         tmp = tcg_temp_new();
635         gen_ext(tmp, val, opsize, sign);
636     }
637 
638     return tmp;
639 }
640 
641 static void gen_logic_cc(DisasContext *s, TCGv val, int opsize)
642 {
643     gen_ext(QREG_CC_N, val, opsize, 1);
644     set_cc_op(s, CC_OP_LOGIC);
645 }
646 
647 static void gen_update_cc_cmp(DisasContext *s, TCGv dest, TCGv src, int opsize)
648 {
649     tcg_gen_mov_i32(QREG_CC_N, dest);
650     tcg_gen_mov_i32(QREG_CC_V, src);
651     set_cc_op(s, CC_OP_CMPB + opsize);
652 }
653 
654 static void gen_update_cc_add(TCGv dest, TCGv src, int opsize)
655 {
656     gen_ext(QREG_CC_N, dest, opsize, 1);
657     tcg_gen_mov_i32(QREG_CC_V, src);
658 }
659 
660 static inline int opsize_bytes(int opsize)
661 {
662     switch (opsize) {
663     case OS_BYTE: return 1;
664     case OS_WORD: return 2;
665     case OS_LONG: return 4;
666     case OS_SINGLE: return 4;
667     case OS_DOUBLE: return 8;
668     case OS_EXTENDED: return 12;
669     case OS_PACKED: return 12;
670     default:
671         g_assert_not_reached();
672     }
673 }
674 
675 static inline int insn_opsize(int insn)
676 {
677     switch ((insn >> 6) & 3) {
678     case 0: return OS_BYTE;
679     case 1: return OS_WORD;
680     case 2: return OS_LONG;
681     default:
682         g_assert_not_reached();
683     }
684 }
685 
686 static inline int ext_opsize(int ext, int pos)
687 {
688     switch ((ext >> pos) & 7) {
689     case 0: return OS_LONG;
690     case 1: return OS_SINGLE;
691     case 2: return OS_EXTENDED;
692     case 3: return OS_PACKED;
693     case 4: return OS_WORD;
694     case 5: return OS_DOUBLE;
695     case 6: return OS_BYTE;
696     default:
697         g_assert_not_reached();
698     }
699 }
700 
701 /* Assign value to a register.  If the width is less than the register width
702    only the low part of the register is set.  */
703 static void gen_partset_reg(int opsize, TCGv reg, TCGv val)
704 {
705     TCGv tmp;
706     switch (opsize) {
707     case OS_BYTE:
708         tcg_gen_andi_i32(reg, reg, 0xffffff00);
709         tmp = tcg_temp_new();
710         tcg_gen_ext8u_i32(tmp, val);
711         tcg_gen_or_i32(reg, reg, tmp);
712         tcg_temp_free(tmp);
713         break;
714     case OS_WORD:
715         tcg_gen_andi_i32(reg, reg, 0xffff0000);
716         tmp = tcg_temp_new();
717         tcg_gen_ext16u_i32(tmp, val);
718         tcg_gen_or_i32(reg, reg, tmp);
719         tcg_temp_free(tmp);
720         break;
721     case OS_LONG:
722     case OS_SINGLE:
723         tcg_gen_mov_i32(reg, val);
724         break;
725     default:
726         g_assert_not_reached();
727     }
728 }
729 
730 /* Generate code for an "effective address".  Does not adjust the base
731    register for autoincrement addressing modes.  */
732 static TCGv gen_lea_mode(CPUM68KState *env, DisasContext *s,
733                          int mode, int reg0, int opsize)
734 {
735     TCGv reg;
736     TCGv tmp;
737     uint16_t ext;
738     uint32_t offset;
739 
740     switch (mode) {
741     case 0: /* Data register direct.  */
742     case 1: /* Address register direct.  */
743         return NULL_QREG;
744     case 3: /* Indirect postincrement.  */
745         if (opsize == OS_UNSIZED) {
746             return NULL_QREG;
747         }
748         /* fallthru */
749     case 2: /* Indirect register */
750         return get_areg(s, reg0);
751     case 4: /* Indirect predecrememnt.  */
752         if (opsize == OS_UNSIZED) {
753             return NULL_QREG;
754         }
755         reg = get_areg(s, reg0);
756         tmp = tcg_temp_new();
757         if (reg0 == 7 && opsize == OS_BYTE &&
758             m68k_feature(s->env, M68K_FEATURE_M68000)) {
759             tcg_gen_subi_i32(tmp, reg, 2);
760         } else {
761             tcg_gen_subi_i32(tmp, reg, opsize_bytes(opsize));
762         }
763         return tmp;
764     case 5: /* Indirect displacement.  */
765         reg = get_areg(s, reg0);
766         tmp = tcg_temp_new();
767         ext = read_im16(env, s);
768         tcg_gen_addi_i32(tmp, reg, (int16_t)ext);
769         return tmp;
770     case 6: /* Indirect index + displacement.  */
771         reg = get_areg(s, reg0);
772         return gen_lea_indexed(env, s, reg);
773     case 7: /* Other */
774         switch (reg0) {
775         case 0: /* Absolute short.  */
776             offset = (int16_t)read_im16(env, s);
777             return tcg_const_i32(offset);
778         case 1: /* Absolute long.  */
779             offset = read_im32(env, s);
780             return tcg_const_i32(offset);
781         case 2: /* pc displacement  */
782             offset = s->pc;
783             offset += (int16_t)read_im16(env, s);
784             return tcg_const_i32(offset);
785         case 3: /* pc index+displacement.  */
786             return gen_lea_indexed(env, s, NULL_QREG);
787         case 4: /* Immediate.  */
788         default:
789             return NULL_QREG;
790         }
791     }
792     /* Should never happen.  */
793     return NULL_QREG;
794 }
795 
796 static TCGv gen_lea(CPUM68KState *env, DisasContext *s, uint16_t insn,
797                     int opsize)
798 {
799     int mode = extract32(insn, 3, 3);
800     int reg0 = REG(insn, 0);
801     return gen_lea_mode(env, s, mode, reg0, opsize);
802 }
803 
804 /* Generate code to load/store a value from/into an EA.  If WHAT > 0 this is
805    a write otherwise it is a read (0 == sign extend, -1 == zero extend).
806    ADDRP is non-null for readwrite operands.  */
807 static TCGv gen_ea_mode(CPUM68KState *env, DisasContext *s, int mode, int reg0,
808                         int opsize, TCGv val, TCGv *addrp, ea_what what)
809 {
810     TCGv reg, tmp, result;
811     int32_t offset;
812 
813     switch (mode) {
814     case 0: /* Data register direct.  */
815         reg = cpu_dregs[reg0];
816         if (what == EA_STORE) {
817             gen_partset_reg(opsize, reg, val);
818             return store_dummy;
819         } else {
820             return gen_extend(reg, opsize, what == EA_LOADS);
821         }
822     case 1: /* Address register direct.  */
823         reg = get_areg(s, reg0);
824         if (what == EA_STORE) {
825             tcg_gen_mov_i32(reg, val);
826             return store_dummy;
827         } else {
828             return gen_extend(reg, opsize, what == EA_LOADS);
829         }
830     case 2: /* Indirect register */
831         reg = get_areg(s, reg0);
832         return gen_ldst(s, opsize, reg, val, what);
833     case 3: /* Indirect postincrement.  */
834         reg = get_areg(s, reg0);
835         result = gen_ldst(s, opsize, reg, val, what);
836         if (what == EA_STORE || !addrp) {
837             TCGv tmp = tcg_temp_new();
838             if (reg0 == 7 && opsize == OS_BYTE &&
839                 m68k_feature(s->env, M68K_FEATURE_M68000)) {
840                 tcg_gen_addi_i32(tmp, reg, 2);
841             } else {
842                 tcg_gen_addi_i32(tmp, reg, opsize_bytes(opsize));
843             }
844             delay_set_areg(s, reg0, tmp, true);
845         }
846         return result;
847     case 4: /* Indirect predecrememnt.  */
848         if (addrp && what == EA_STORE) {
849             tmp = *addrp;
850         } else {
851             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
852             if (IS_NULL_QREG(tmp)) {
853                 return tmp;
854             }
855             if (addrp) {
856                 *addrp = tmp;
857             }
858         }
859         result = gen_ldst(s, opsize, tmp, val, what);
860         if (what == EA_STORE || !addrp) {
861             delay_set_areg(s, reg0, tmp, false);
862         }
863         return result;
864     case 5: /* Indirect displacement.  */
865     case 6: /* Indirect index + displacement.  */
866     do_indirect:
867         if (addrp && what == EA_STORE) {
868             tmp = *addrp;
869         } else {
870             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
871             if (IS_NULL_QREG(tmp)) {
872                 return tmp;
873             }
874             if (addrp) {
875                 *addrp = tmp;
876             }
877         }
878         return gen_ldst(s, opsize, tmp, val, what);
879     case 7: /* Other */
880         switch (reg0) {
881         case 0: /* Absolute short.  */
882         case 1: /* Absolute long.  */
883         case 2: /* pc displacement  */
884         case 3: /* pc index+displacement.  */
885             goto do_indirect;
886         case 4: /* Immediate.  */
887             /* Sign extend values for consistency.  */
888             switch (opsize) {
889             case OS_BYTE:
890                 if (what == EA_LOADS) {
891                     offset = (int8_t)read_im8(env, s);
892                 } else {
893                     offset = read_im8(env, s);
894                 }
895                 break;
896             case OS_WORD:
897                 if (what == EA_LOADS) {
898                     offset = (int16_t)read_im16(env, s);
899                 } else {
900                     offset = read_im16(env, s);
901                 }
902                 break;
903             case OS_LONG:
904                 offset = read_im32(env, s);
905                 break;
906             default:
907                 g_assert_not_reached();
908             }
909             return tcg_const_i32(offset);
910         default:
911             return NULL_QREG;
912         }
913     }
914     /* Should never happen.  */
915     return NULL_QREG;
916 }
917 
918 static TCGv gen_ea(CPUM68KState *env, DisasContext *s, uint16_t insn,
919                    int opsize, TCGv val, TCGv *addrp, ea_what what)
920 {
921     int mode = extract32(insn, 3, 3);
922     int reg0 = REG(insn, 0);
923     return gen_ea_mode(env, s, mode, reg0, opsize, val, addrp, what);
924 }
925 
926 static TCGv_ptr gen_fp_ptr(int freg)
927 {
928     TCGv_ptr fp = tcg_temp_new_ptr();
929     tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fregs[freg]));
930     return fp;
931 }
932 
933 static TCGv_ptr gen_fp_result_ptr(void)
934 {
935     TCGv_ptr fp = tcg_temp_new_ptr();
936     tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fp_result));
937     return fp;
938 }
939 
940 static void gen_fp_move(TCGv_ptr dest, TCGv_ptr src)
941 {
942     TCGv t32;
943     TCGv_i64 t64;
944 
945     t32 = tcg_temp_new();
946     tcg_gen_ld16u_i32(t32, src, offsetof(FPReg, l.upper));
947     tcg_gen_st16_i32(t32, dest, offsetof(FPReg, l.upper));
948     tcg_temp_free(t32);
949 
950     t64 = tcg_temp_new_i64();
951     tcg_gen_ld_i64(t64, src, offsetof(FPReg, l.lower));
952     tcg_gen_st_i64(t64, dest, offsetof(FPReg, l.lower));
953     tcg_temp_free_i64(t64);
954 }
955 
956 static void gen_load_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp)
957 {
958     TCGv tmp;
959     TCGv_i64 t64;
960     int index = IS_USER(s);
961 
962     t64 = tcg_temp_new_i64();
963     tmp = tcg_temp_new();
964     switch (opsize) {
965     case OS_BYTE:
966         tcg_gen_qemu_ld8s(tmp, addr, index);
967         gen_helper_exts32(cpu_env, fp, tmp);
968         break;
969     case OS_WORD:
970         tcg_gen_qemu_ld16s(tmp, addr, index);
971         gen_helper_exts32(cpu_env, fp, tmp);
972         break;
973     case OS_LONG:
974         tcg_gen_qemu_ld32u(tmp, addr, index);
975         gen_helper_exts32(cpu_env, fp, tmp);
976         break;
977     case OS_SINGLE:
978         tcg_gen_qemu_ld32u(tmp, addr, index);
979         gen_helper_extf32(cpu_env, fp, tmp);
980         break;
981     case OS_DOUBLE:
982         tcg_gen_qemu_ld64(t64, addr, index);
983         gen_helper_extf64(cpu_env, fp, t64);
984         tcg_temp_free_i64(t64);
985         break;
986     case OS_EXTENDED:
987         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
988             gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
989             break;
990         }
991         tcg_gen_qemu_ld32u(tmp, addr, index);
992         tcg_gen_shri_i32(tmp, tmp, 16);
993         tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
994         tcg_gen_addi_i32(tmp, addr, 4);
995         tcg_gen_qemu_ld64(t64, tmp, index);
996         tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
997         break;
998     case OS_PACKED:
999         /* unimplemented data type on 68040/ColdFire
1000          * FIXME if needed for another FPU
1001          */
1002         gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
1003         break;
1004     default:
1005         g_assert_not_reached();
1006     }
1007     tcg_temp_free(tmp);
1008     tcg_temp_free_i64(t64);
1009     gen_throws_exception = gen_last_qop;
1010 }
1011 
1012 static void gen_store_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp)
1013 {
1014     TCGv tmp;
1015     TCGv_i64 t64;
1016     int index = IS_USER(s);
1017 
1018     t64 = tcg_temp_new_i64();
1019     tmp = tcg_temp_new();
1020     switch (opsize) {
1021     case OS_BYTE:
1022         gen_helper_reds32(tmp, cpu_env, fp);
1023         tcg_gen_qemu_st8(tmp, addr, index);
1024         break;
1025     case OS_WORD:
1026         gen_helper_reds32(tmp, cpu_env, fp);
1027         tcg_gen_qemu_st16(tmp, addr, index);
1028         break;
1029     case OS_LONG:
1030         gen_helper_reds32(tmp, cpu_env, fp);
1031         tcg_gen_qemu_st32(tmp, addr, index);
1032         break;
1033     case OS_SINGLE:
1034         gen_helper_redf32(tmp, cpu_env, fp);
1035         tcg_gen_qemu_st32(tmp, addr, index);
1036         break;
1037     case OS_DOUBLE:
1038         gen_helper_redf64(t64, cpu_env, fp);
1039         tcg_gen_qemu_st64(t64, addr, index);
1040         break;
1041     case OS_EXTENDED:
1042         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1043             gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
1044             break;
1045         }
1046         tcg_gen_ld16u_i32(tmp, fp, offsetof(FPReg, l.upper));
1047         tcg_gen_shli_i32(tmp, tmp, 16);
1048         tcg_gen_qemu_st32(tmp, addr, index);
1049         tcg_gen_addi_i32(tmp, addr, 4);
1050         tcg_gen_ld_i64(t64, fp, offsetof(FPReg, l.lower));
1051         tcg_gen_qemu_st64(t64, tmp, index);
1052         break;
1053     case OS_PACKED:
1054         /* unimplemented data type on 68040/ColdFire
1055          * FIXME if needed for another FPU
1056          */
1057         gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
1058         break;
1059     default:
1060         g_assert_not_reached();
1061     }
1062     tcg_temp_free(tmp);
1063     tcg_temp_free_i64(t64);
1064     gen_throws_exception = gen_last_qop;
1065 }
1066 
1067 static void gen_ldst_fp(DisasContext *s, int opsize, TCGv addr,
1068                         TCGv_ptr fp, ea_what what)
1069 {
1070     if (what == EA_STORE) {
1071         gen_store_fp(s, opsize, addr, fp);
1072     } else {
1073         gen_load_fp(s, opsize, addr, fp);
1074     }
1075 }
1076 
1077 static int gen_ea_mode_fp(CPUM68KState *env, DisasContext *s, int mode,
1078                           int reg0, int opsize, TCGv_ptr fp, ea_what what)
1079 {
1080     TCGv reg, addr, tmp;
1081     TCGv_i64 t64;
1082 
1083     switch (mode) {
1084     case 0: /* Data register direct.  */
1085         reg = cpu_dregs[reg0];
1086         if (what == EA_STORE) {
1087             switch (opsize) {
1088             case OS_BYTE:
1089             case OS_WORD:
1090             case OS_LONG:
1091                 gen_helper_reds32(reg, cpu_env, fp);
1092                 break;
1093             case OS_SINGLE:
1094                 gen_helper_redf32(reg, cpu_env, fp);
1095                 break;
1096             default:
1097                 g_assert_not_reached();
1098             }
1099         } else {
1100             tmp = tcg_temp_new();
1101             switch (opsize) {
1102             case OS_BYTE:
1103                 tcg_gen_ext8s_i32(tmp, reg);
1104                 gen_helper_exts32(cpu_env, fp, tmp);
1105                 break;
1106             case OS_WORD:
1107                 tcg_gen_ext16s_i32(tmp, reg);
1108                 gen_helper_exts32(cpu_env, fp, tmp);
1109                 break;
1110             case OS_LONG:
1111                 gen_helper_exts32(cpu_env, fp, reg);
1112                 break;
1113             case OS_SINGLE:
1114                 gen_helper_extf32(cpu_env, fp, reg);
1115                 break;
1116             default:
1117                 g_assert_not_reached();
1118             }
1119             tcg_temp_free(tmp);
1120         }
1121         return 0;
1122     case 1: /* Address register direct.  */
1123         return -1;
1124     case 2: /* Indirect register */
1125         addr = get_areg(s, reg0);
1126         gen_ldst_fp(s, opsize, addr, fp, what);
1127         return 0;
1128     case 3: /* Indirect postincrement.  */
1129         addr = cpu_aregs[reg0];
1130         gen_ldst_fp(s, opsize, addr, fp, what);
1131         tcg_gen_addi_i32(addr, addr, opsize_bytes(opsize));
1132         return 0;
1133     case 4: /* Indirect predecrememnt.  */
1134         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1135         if (IS_NULL_QREG(addr)) {
1136             return -1;
1137         }
1138         gen_ldst_fp(s, opsize, addr, fp, what);
1139         tcg_gen_mov_i32(cpu_aregs[reg0], addr);
1140         return 0;
1141     case 5: /* Indirect displacement.  */
1142     case 6: /* Indirect index + displacement.  */
1143     do_indirect:
1144         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1145         if (IS_NULL_QREG(addr)) {
1146             return -1;
1147         }
1148         gen_ldst_fp(s, opsize, addr, fp, what);
1149         return 0;
1150     case 7: /* Other */
1151         switch (reg0) {
1152         case 0: /* Absolute short.  */
1153         case 1: /* Absolute long.  */
1154         case 2: /* pc displacement  */
1155         case 3: /* pc index+displacement.  */
1156             goto do_indirect;
1157         case 4: /* Immediate.  */
1158             if (what == EA_STORE) {
1159                 return -1;
1160             }
1161             switch (opsize) {
1162             case OS_BYTE:
1163                 tmp = tcg_const_i32((int8_t)read_im8(env, s));
1164                 gen_helper_exts32(cpu_env, fp, tmp);
1165                 tcg_temp_free(tmp);
1166                 break;
1167             case OS_WORD:
1168                 tmp = tcg_const_i32((int16_t)read_im16(env, s));
1169                 gen_helper_exts32(cpu_env, fp, tmp);
1170                 tcg_temp_free(tmp);
1171                 break;
1172             case OS_LONG:
1173                 tmp = tcg_const_i32(read_im32(env, s));
1174                 gen_helper_exts32(cpu_env, fp, tmp);
1175                 tcg_temp_free(tmp);
1176                 break;
1177             case OS_SINGLE:
1178                 tmp = tcg_const_i32(read_im32(env, s));
1179                 gen_helper_extf32(cpu_env, fp, tmp);
1180                 tcg_temp_free(tmp);
1181                 break;
1182             case OS_DOUBLE:
1183                 t64 = tcg_const_i64(read_im64(env, s));
1184                 gen_helper_extf64(cpu_env, fp, t64);
1185                 tcg_temp_free_i64(t64);
1186                 break;
1187             case OS_EXTENDED:
1188                 if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1189                     gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
1190                     break;
1191                 }
1192                 tmp = tcg_const_i32(read_im32(env, s) >> 16);
1193                 tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
1194                 tcg_temp_free(tmp);
1195                 t64 = tcg_const_i64(read_im64(env, s));
1196                 tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
1197                 tcg_temp_free_i64(t64);
1198                 break;
1199             case OS_PACKED:
1200                 /* unimplemented data type on 68040/ColdFire
1201                  * FIXME if needed for another FPU
1202                  */
1203                 gen_exception(s, s->insn_pc, EXCP_FP_UNIMP);
1204                 break;
1205             default:
1206                 g_assert_not_reached();
1207             }
1208             return 0;
1209         default:
1210             return -1;
1211         }
1212     }
1213     return -1;
1214 }
1215 
1216 static int gen_ea_fp(CPUM68KState *env, DisasContext *s, uint16_t insn,
1217                        int opsize, TCGv_ptr fp, ea_what what)
1218 {
1219     int mode = extract32(insn, 3, 3);
1220     int reg0 = REG(insn, 0);
1221     return gen_ea_mode_fp(env, s, mode, reg0, opsize, fp, what);
1222 }
1223 
1224 typedef struct {
1225     TCGCond tcond;
1226     bool g1;
1227     bool g2;
1228     TCGv v1;
1229     TCGv v2;
1230 } DisasCompare;
1231 
1232 static void gen_cc_cond(DisasCompare *c, DisasContext *s, int cond)
1233 {
1234     TCGv tmp, tmp2;
1235     TCGCond tcond;
1236     CCOp op = s->cc_op;
1237 
1238     /* The CC_OP_CMP form can handle most normal comparisons directly.  */
1239     if (op == CC_OP_CMPB || op == CC_OP_CMPW || op == CC_OP_CMPL) {
1240         c->g1 = c->g2 = 1;
1241         c->v1 = QREG_CC_N;
1242         c->v2 = QREG_CC_V;
1243         switch (cond) {
1244         case 2: /* HI */
1245         case 3: /* LS */
1246             tcond = TCG_COND_LEU;
1247             goto done;
1248         case 4: /* CC */
1249         case 5: /* CS */
1250             tcond = TCG_COND_LTU;
1251             goto done;
1252         case 6: /* NE */
1253         case 7: /* EQ */
1254             tcond = TCG_COND_EQ;
1255             goto done;
1256         case 10: /* PL */
1257         case 11: /* MI */
1258             c->g1 = c->g2 = 0;
1259             c->v2 = tcg_const_i32(0);
1260             c->v1 = tmp = tcg_temp_new();
1261             tcg_gen_sub_i32(tmp, QREG_CC_N, QREG_CC_V);
1262             gen_ext(tmp, tmp, op - CC_OP_CMPB, 1);
1263             /* fallthru */
1264         case 12: /* GE */
1265         case 13: /* LT */
1266             tcond = TCG_COND_LT;
1267             goto done;
1268         case 14: /* GT */
1269         case 15: /* LE */
1270             tcond = TCG_COND_LE;
1271             goto done;
1272         }
1273     }
1274 
1275     c->g1 = 1;
1276     c->g2 = 0;
1277     c->v2 = tcg_const_i32(0);
1278 
1279     switch (cond) {
1280     case 0: /* T */
1281     case 1: /* F */
1282         c->v1 = c->v2;
1283         tcond = TCG_COND_NEVER;
1284         goto done;
1285     case 14: /* GT (!(Z || (N ^ V))) */
1286     case 15: /* LE (Z || (N ^ V)) */
1287         /* Logic operations clear V, which simplifies LE to (Z || N),
1288            and since Z and N are co-located, this becomes a normal
1289            comparison vs N.  */
1290         if (op == CC_OP_LOGIC) {
1291             c->v1 = QREG_CC_N;
1292             tcond = TCG_COND_LE;
1293             goto done;
1294         }
1295         break;
1296     case 12: /* GE (!(N ^ V)) */
1297     case 13: /* LT (N ^ V) */
1298         /* Logic operations clear V, which simplifies this to N.  */
1299         if (op != CC_OP_LOGIC) {
1300             break;
1301         }
1302         /* fallthru */
1303     case 10: /* PL (!N) */
1304     case 11: /* MI (N) */
1305         /* Several cases represent N normally.  */
1306         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1307             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1308             op == CC_OP_LOGIC) {
1309             c->v1 = QREG_CC_N;
1310             tcond = TCG_COND_LT;
1311             goto done;
1312         }
1313         break;
1314     case 6: /* NE (!Z) */
1315     case 7: /* EQ (Z) */
1316         /* Some cases fold Z into N.  */
1317         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1318             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1319             op == CC_OP_LOGIC) {
1320             tcond = TCG_COND_EQ;
1321             c->v1 = QREG_CC_N;
1322             goto done;
1323         }
1324         break;
1325     case 4: /* CC (!C) */
1326     case 5: /* CS (C) */
1327         /* Some cases fold C into X.  */
1328         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1329             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL) {
1330             tcond = TCG_COND_NE;
1331             c->v1 = QREG_CC_X;
1332             goto done;
1333         }
1334         /* fallthru */
1335     case 8: /* VC (!V) */
1336     case 9: /* VS (V) */
1337         /* Logic operations clear V and C.  */
1338         if (op == CC_OP_LOGIC) {
1339             tcond = TCG_COND_NEVER;
1340             c->v1 = c->v2;
1341             goto done;
1342         }
1343         break;
1344     }
1345 
1346     /* Otherwise, flush flag state to CC_OP_FLAGS.  */
1347     gen_flush_flags(s);
1348 
1349     switch (cond) {
1350     case 0: /* T */
1351     case 1: /* F */
1352     default:
1353         /* Invalid, or handled above.  */
1354         abort();
1355     case 2: /* HI (!C && !Z) -> !(C || Z)*/
1356     case 3: /* LS (C || Z) */
1357         c->v1 = tmp = tcg_temp_new();
1358         c->g1 = 0;
1359         tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1360         tcg_gen_or_i32(tmp, tmp, QREG_CC_C);
1361         tcond = TCG_COND_NE;
1362         break;
1363     case 4: /* CC (!C) */
1364     case 5: /* CS (C) */
1365         c->v1 = QREG_CC_C;
1366         tcond = TCG_COND_NE;
1367         break;
1368     case 6: /* NE (!Z) */
1369     case 7: /* EQ (Z) */
1370         c->v1 = QREG_CC_Z;
1371         tcond = TCG_COND_EQ;
1372         break;
1373     case 8: /* VC (!V) */
1374     case 9: /* VS (V) */
1375         c->v1 = QREG_CC_V;
1376         tcond = TCG_COND_LT;
1377         break;
1378     case 10: /* PL (!N) */
1379     case 11: /* MI (N) */
1380         c->v1 = QREG_CC_N;
1381         tcond = TCG_COND_LT;
1382         break;
1383     case 12: /* GE (!(N ^ V)) */
1384     case 13: /* LT (N ^ V) */
1385         c->v1 = tmp = tcg_temp_new();
1386         c->g1 = 0;
1387         tcg_gen_xor_i32(tmp, QREG_CC_N, QREG_CC_V);
1388         tcond = TCG_COND_LT;
1389         break;
1390     case 14: /* GT (!(Z || (N ^ V))) */
1391     case 15: /* LE (Z || (N ^ V)) */
1392         c->v1 = tmp = tcg_temp_new();
1393         c->g1 = 0;
1394         tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1395         tcg_gen_neg_i32(tmp, tmp);
1396         tmp2 = tcg_temp_new();
1397         tcg_gen_xor_i32(tmp2, QREG_CC_N, QREG_CC_V);
1398         tcg_gen_or_i32(tmp, tmp, tmp2);
1399         tcg_temp_free(tmp2);
1400         tcond = TCG_COND_LT;
1401         break;
1402     }
1403 
1404  done:
1405     if ((cond & 1) == 0) {
1406         tcond = tcg_invert_cond(tcond);
1407     }
1408     c->tcond = tcond;
1409 }
1410 
1411 static void free_cond(DisasCompare *c)
1412 {
1413     if (!c->g1) {
1414         tcg_temp_free(c->v1);
1415     }
1416     if (!c->g2) {
1417         tcg_temp_free(c->v2);
1418     }
1419 }
1420 
1421 static void gen_jmpcc(DisasContext *s, int cond, TCGLabel *l1)
1422 {
1423   DisasCompare c;
1424 
1425   gen_cc_cond(&c, s, cond);
1426   update_cc_op(s);
1427   tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
1428   free_cond(&c);
1429 }
1430 
1431 /* Force a TB lookup after an instruction that changes the CPU state.  */
1432 static void gen_lookup_tb(DisasContext *s)
1433 {
1434     update_cc_op(s);
1435     tcg_gen_movi_i32(QREG_PC, s->pc);
1436     s->is_jmp = DISAS_UPDATE;
1437 }
1438 
1439 #define SRC_EA(env, result, opsize, op_sign, addrp) do {                \
1440         result = gen_ea(env, s, insn, opsize, NULL_QREG, addrp,         \
1441                         op_sign ? EA_LOADS : EA_LOADU);                 \
1442         if (IS_NULL_QREG(result)) {                                     \
1443             gen_addr_fault(s);                                          \
1444             return;                                                     \
1445         }                                                               \
1446     } while (0)
1447 
1448 #define DEST_EA(env, insn, opsize, val, addrp) do {                     \
1449         TCGv ea_result = gen_ea(env, s, insn, opsize, val, addrp, EA_STORE); \
1450         if (IS_NULL_QREG(ea_result)) {                                  \
1451             gen_addr_fault(s);                                          \
1452             return;                                                     \
1453         }                                                               \
1454     } while (0)
1455 
1456 static inline bool use_goto_tb(DisasContext *s, uint32_t dest)
1457 {
1458 #ifndef CONFIG_USER_ONLY
1459     return (s->tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) ||
1460            (s->insn_pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK);
1461 #else
1462     return true;
1463 #endif
1464 }
1465 
1466 /* Generate a jump to an immediate address.  */
1467 static void gen_jmp_tb(DisasContext *s, int n, uint32_t dest)
1468 {
1469     if (unlikely(s->singlestep_enabled)) {
1470         gen_exception(s, dest, EXCP_DEBUG);
1471     } else if (use_goto_tb(s, dest)) {
1472         tcg_gen_goto_tb(n);
1473         tcg_gen_movi_i32(QREG_PC, dest);
1474         tcg_gen_exit_tb((uintptr_t)s->tb + n);
1475     } else {
1476         gen_jmp_im(s, dest);
1477         tcg_gen_exit_tb(0);
1478     }
1479     s->is_jmp = DISAS_TB_JUMP;
1480 }
1481 
1482 DISAS_INSN(scc)
1483 {
1484     DisasCompare c;
1485     int cond;
1486     TCGv tmp;
1487 
1488     cond = (insn >> 8) & 0xf;
1489     gen_cc_cond(&c, s, cond);
1490 
1491     tmp = tcg_temp_new();
1492     tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
1493     free_cond(&c);
1494 
1495     tcg_gen_neg_i32(tmp, tmp);
1496     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
1497     tcg_temp_free(tmp);
1498 }
1499 
1500 DISAS_INSN(dbcc)
1501 {
1502     TCGLabel *l1;
1503     TCGv reg;
1504     TCGv tmp;
1505     int16_t offset;
1506     uint32_t base;
1507 
1508     reg = DREG(insn, 0);
1509     base = s->pc;
1510     offset = (int16_t)read_im16(env, s);
1511     l1 = gen_new_label();
1512     gen_jmpcc(s, (insn >> 8) & 0xf, l1);
1513 
1514     tmp = tcg_temp_new();
1515     tcg_gen_ext16s_i32(tmp, reg);
1516     tcg_gen_addi_i32(tmp, tmp, -1);
1517     gen_partset_reg(OS_WORD, reg, tmp);
1518     tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, -1, l1);
1519     gen_jmp_tb(s, 1, base + offset);
1520     gen_set_label(l1);
1521     gen_jmp_tb(s, 0, s->pc);
1522 }
1523 
1524 DISAS_INSN(undef_mac)
1525 {
1526     gen_exception(s, s->pc - 2, EXCP_LINEA);
1527 }
1528 
1529 DISAS_INSN(undef_fpu)
1530 {
1531     gen_exception(s, s->pc - 2, EXCP_LINEF);
1532 }
1533 
1534 DISAS_INSN(undef)
1535 {
1536     /* ??? This is both instructions that are as yet unimplemented
1537        for the 680x0 series, as well as those that are implemented
1538        but actually illegal for CPU32 or pre-68020.  */
1539     qemu_log_mask(LOG_UNIMP, "Illegal instruction: %04x @ %08x",
1540                   insn, s->pc - 2);
1541     gen_exception(s, s->pc - 2, EXCP_UNSUPPORTED);
1542 }
1543 
1544 DISAS_INSN(mulw)
1545 {
1546     TCGv reg;
1547     TCGv tmp;
1548     TCGv src;
1549     int sign;
1550 
1551     sign = (insn & 0x100) != 0;
1552     reg = DREG(insn, 9);
1553     tmp = tcg_temp_new();
1554     if (sign)
1555         tcg_gen_ext16s_i32(tmp, reg);
1556     else
1557         tcg_gen_ext16u_i32(tmp, reg);
1558     SRC_EA(env, src, OS_WORD, sign, NULL);
1559     tcg_gen_mul_i32(tmp, tmp, src);
1560     tcg_gen_mov_i32(reg, tmp);
1561     gen_logic_cc(s, tmp, OS_LONG);
1562     tcg_temp_free(tmp);
1563 }
1564 
1565 DISAS_INSN(divw)
1566 {
1567     int sign;
1568     TCGv src;
1569     TCGv destr;
1570 
1571     /* divX.w <EA>,Dn    32/16 -> 16r:16q */
1572 
1573     sign = (insn & 0x100) != 0;
1574 
1575     /* dest.l / src.w */
1576 
1577     SRC_EA(env, src, OS_WORD, sign, NULL);
1578     destr = tcg_const_i32(REG(insn, 9));
1579     if (sign) {
1580         gen_helper_divsw(cpu_env, destr, src);
1581     } else {
1582         gen_helper_divuw(cpu_env, destr, src);
1583     }
1584     tcg_temp_free(destr);
1585 
1586     set_cc_op(s, CC_OP_FLAGS);
1587 }
1588 
1589 DISAS_INSN(divl)
1590 {
1591     TCGv num, reg, den;
1592     int sign;
1593     uint16_t ext;
1594 
1595     ext = read_im16(env, s);
1596 
1597     sign = (ext & 0x0800) != 0;
1598 
1599     if (ext & 0x400) {
1600         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
1601             gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
1602             return;
1603         }
1604 
1605         /* divX.l <EA>, Dr:Dq    64/32 -> 32r:32q */
1606 
1607         SRC_EA(env, den, OS_LONG, 0, NULL);
1608         num = tcg_const_i32(REG(ext, 12));
1609         reg = tcg_const_i32(REG(ext, 0));
1610         if (sign) {
1611             gen_helper_divsll(cpu_env, num, reg, den);
1612         } else {
1613             gen_helper_divull(cpu_env, num, reg, den);
1614         }
1615         tcg_temp_free(reg);
1616         tcg_temp_free(num);
1617         set_cc_op(s, CC_OP_FLAGS);
1618         return;
1619     }
1620 
1621     /* divX.l <EA>, Dq        32/32 -> 32q     */
1622     /* divXl.l <EA>, Dr:Dq    32/32 -> 32r:32q */
1623 
1624     SRC_EA(env, den, OS_LONG, 0, NULL);
1625     num = tcg_const_i32(REG(ext, 12));
1626     reg = tcg_const_i32(REG(ext, 0));
1627     if (sign) {
1628         gen_helper_divsl(cpu_env, num, reg, den);
1629     } else {
1630         gen_helper_divul(cpu_env, num, reg, den);
1631     }
1632     tcg_temp_free(reg);
1633     tcg_temp_free(num);
1634 
1635     set_cc_op(s, CC_OP_FLAGS);
1636 }
1637 
1638 static void bcd_add(TCGv dest, TCGv src)
1639 {
1640     TCGv t0, t1;
1641 
1642     /*  dest10 = dest10 + src10 + X
1643      *
1644      *        t1 = src
1645      *        t2 = t1 + 0x066
1646      *        t3 = t2 + dest + X
1647      *        t4 = t2 ^ dest
1648      *        t5 = t3 ^ t4
1649      *        t6 = ~t5 & 0x110
1650      *        t7 = (t6 >> 2) | (t6 >> 3)
1651      *        return t3 - t7
1652      */
1653 
1654     /* t1 = (src + 0x066) + dest + X
1655      *    = result with some possible exceding 0x6
1656      */
1657 
1658     t0 = tcg_const_i32(0x066);
1659     tcg_gen_add_i32(t0, t0, src);
1660 
1661     t1 = tcg_temp_new();
1662     tcg_gen_add_i32(t1, t0, dest);
1663     tcg_gen_add_i32(t1, t1, QREG_CC_X);
1664 
1665     /* we will remove exceding 0x6 where there is no carry */
1666 
1667     /* t0 = (src + 0x0066) ^ dest
1668      *    = t1 without carries
1669      */
1670 
1671     tcg_gen_xor_i32(t0, t0, dest);
1672 
1673     /* extract the carries
1674      * t0 = t0 ^ t1
1675      *    = only the carries
1676      */
1677 
1678     tcg_gen_xor_i32(t0, t0, t1);
1679 
1680     /* generate 0x1 where there is no carry
1681      * and for each 0x10, generate a 0x6
1682      */
1683 
1684     tcg_gen_shri_i32(t0, t0, 3);
1685     tcg_gen_not_i32(t0, t0);
1686     tcg_gen_andi_i32(t0, t0, 0x22);
1687     tcg_gen_add_i32(dest, t0, t0);
1688     tcg_gen_add_i32(dest, dest, t0);
1689     tcg_temp_free(t0);
1690 
1691     /* remove the exceding 0x6
1692      * for digits that have not generated a carry
1693      */
1694 
1695     tcg_gen_sub_i32(dest, t1, dest);
1696     tcg_temp_free(t1);
1697 }
1698 
1699 static void bcd_sub(TCGv dest, TCGv src)
1700 {
1701     TCGv t0, t1, t2;
1702 
1703     /*  dest10 = dest10 - src10 - X
1704      *         = bcd_add(dest + 1 - X, 0x199 - src)
1705      */
1706 
1707     /* t0 = 0x066 + (0x199 - src) */
1708 
1709     t0 = tcg_temp_new();
1710     tcg_gen_subfi_i32(t0, 0x1ff, src);
1711 
1712     /* t1 = t0 + dest + 1 - X*/
1713 
1714     t1 = tcg_temp_new();
1715     tcg_gen_add_i32(t1, t0, dest);
1716     tcg_gen_addi_i32(t1, t1, 1);
1717     tcg_gen_sub_i32(t1, t1, QREG_CC_X);
1718 
1719     /* t2 = t0 ^ dest */
1720 
1721     t2 = tcg_temp_new();
1722     tcg_gen_xor_i32(t2, t0, dest);
1723 
1724     /* t0 = t1 ^ t2 */
1725 
1726     tcg_gen_xor_i32(t0, t1, t2);
1727 
1728     /* t2 = ~t0 & 0x110
1729      * t0 = (t2 >> 2) | (t2 >> 3)
1730      *
1731      * to fit on 8bit operands, changed in:
1732      *
1733      * t2 = ~(t0 >> 3) & 0x22
1734      * t0 = t2 + t2
1735      * t0 = t0 + t2
1736      */
1737 
1738     tcg_gen_shri_i32(t2, t0, 3);
1739     tcg_gen_not_i32(t2, t2);
1740     tcg_gen_andi_i32(t2, t2, 0x22);
1741     tcg_gen_add_i32(t0, t2, t2);
1742     tcg_gen_add_i32(t0, t0, t2);
1743     tcg_temp_free(t2);
1744 
1745     /* return t1 - t0 */
1746 
1747     tcg_gen_sub_i32(dest, t1, t0);
1748     tcg_temp_free(t0);
1749     tcg_temp_free(t1);
1750 }
1751 
1752 static void bcd_flags(TCGv val)
1753 {
1754     tcg_gen_andi_i32(QREG_CC_C, val, 0x0ff);
1755     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_C);
1756 
1757     tcg_gen_extract_i32(QREG_CC_C, val, 8, 1);
1758 
1759     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
1760 }
1761 
1762 DISAS_INSN(abcd_reg)
1763 {
1764     TCGv src;
1765     TCGv dest;
1766 
1767     gen_flush_flags(s); /* !Z is sticky */
1768 
1769     src = gen_extend(DREG(insn, 0), OS_BYTE, 0);
1770     dest = gen_extend(DREG(insn, 9), OS_BYTE, 0);
1771     bcd_add(dest, src);
1772     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1773 
1774     bcd_flags(dest);
1775 }
1776 
1777 DISAS_INSN(abcd_mem)
1778 {
1779     TCGv src, dest, addr;
1780 
1781     gen_flush_flags(s); /* !Z is sticky */
1782 
1783     /* Indirect pre-decrement load (mode 4) */
1784 
1785     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1786                       NULL_QREG, NULL, EA_LOADU);
1787     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1788                        NULL_QREG, &addr, EA_LOADU);
1789 
1790     bcd_add(dest, src);
1791 
1792     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr, EA_STORE);
1793 
1794     bcd_flags(dest);
1795 }
1796 
1797 DISAS_INSN(sbcd_reg)
1798 {
1799     TCGv src, dest;
1800 
1801     gen_flush_flags(s); /* !Z is sticky */
1802 
1803     src = gen_extend(DREG(insn, 0), OS_BYTE, 0);
1804     dest = gen_extend(DREG(insn, 9), OS_BYTE, 0);
1805 
1806     bcd_sub(dest, src);
1807 
1808     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1809 
1810     bcd_flags(dest);
1811 }
1812 
1813 DISAS_INSN(sbcd_mem)
1814 {
1815     TCGv src, dest, addr;
1816 
1817     gen_flush_flags(s); /* !Z is sticky */
1818 
1819     /* Indirect pre-decrement load (mode 4) */
1820 
1821     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1822                       NULL_QREG, NULL, EA_LOADU);
1823     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1824                        NULL_QREG, &addr, EA_LOADU);
1825 
1826     bcd_sub(dest, src);
1827 
1828     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr, EA_STORE);
1829 
1830     bcd_flags(dest);
1831 }
1832 
1833 DISAS_INSN(nbcd)
1834 {
1835     TCGv src, dest;
1836     TCGv addr;
1837 
1838     gen_flush_flags(s); /* !Z is sticky */
1839 
1840     SRC_EA(env, src, OS_BYTE, 0, &addr);
1841 
1842     dest = tcg_const_i32(0);
1843     bcd_sub(dest, src);
1844 
1845     DEST_EA(env, insn, OS_BYTE, dest, &addr);
1846 
1847     bcd_flags(dest);
1848 
1849     tcg_temp_free(dest);
1850 }
1851 
1852 DISAS_INSN(addsub)
1853 {
1854     TCGv reg;
1855     TCGv dest;
1856     TCGv src;
1857     TCGv tmp;
1858     TCGv addr;
1859     int add;
1860     int opsize;
1861 
1862     add = (insn & 0x4000) != 0;
1863     opsize = insn_opsize(insn);
1864     reg = gen_extend(DREG(insn, 9), opsize, 1);
1865     dest = tcg_temp_new();
1866     if (insn & 0x100) {
1867         SRC_EA(env, tmp, opsize, 1, &addr);
1868         src = reg;
1869     } else {
1870         tmp = reg;
1871         SRC_EA(env, src, opsize, 1, NULL);
1872     }
1873     if (add) {
1874         tcg_gen_add_i32(dest, tmp, src);
1875         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, src);
1876         set_cc_op(s, CC_OP_ADDB + opsize);
1877     } else {
1878         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, tmp, src);
1879         tcg_gen_sub_i32(dest, tmp, src);
1880         set_cc_op(s, CC_OP_SUBB + opsize);
1881     }
1882     gen_update_cc_add(dest, src, opsize);
1883     if (insn & 0x100) {
1884         DEST_EA(env, insn, opsize, dest, &addr);
1885     } else {
1886         gen_partset_reg(opsize, DREG(insn, 9), dest);
1887     }
1888     tcg_temp_free(dest);
1889 }
1890 
1891 /* Reverse the order of the bits in REG.  */
1892 DISAS_INSN(bitrev)
1893 {
1894     TCGv reg;
1895     reg = DREG(insn, 0);
1896     gen_helper_bitrev(reg, reg);
1897 }
1898 
1899 DISAS_INSN(bitop_reg)
1900 {
1901     int opsize;
1902     int op;
1903     TCGv src1;
1904     TCGv src2;
1905     TCGv tmp;
1906     TCGv addr;
1907     TCGv dest;
1908 
1909     if ((insn & 0x38) != 0)
1910         opsize = OS_BYTE;
1911     else
1912         opsize = OS_LONG;
1913     op = (insn >> 6) & 3;
1914     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
1915 
1916     gen_flush_flags(s);
1917     src2 = tcg_temp_new();
1918     if (opsize == OS_BYTE)
1919         tcg_gen_andi_i32(src2, DREG(insn, 9), 7);
1920     else
1921         tcg_gen_andi_i32(src2, DREG(insn, 9), 31);
1922 
1923     tmp = tcg_const_i32(1);
1924     tcg_gen_shl_i32(tmp, tmp, src2);
1925     tcg_temp_free(src2);
1926 
1927     tcg_gen_and_i32(QREG_CC_Z, src1, tmp);
1928 
1929     dest = tcg_temp_new();
1930     switch (op) {
1931     case 1: /* bchg */
1932         tcg_gen_xor_i32(dest, src1, tmp);
1933         break;
1934     case 2: /* bclr */
1935         tcg_gen_andc_i32(dest, src1, tmp);
1936         break;
1937     case 3: /* bset */
1938         tcg_gen_or_i32(dest, src1, tmp);
1939         break;
1940     default: /* btst */
1941         break;
1942     }
1943     tcg_temp_free(tmp);
1944     if (op) {
1945         DEST_EA(env, insn, opsize, dest, &addr);
1946     }
1947     tcg_temp_free(dest);
1948 }
1949 
1950 DISAS_INSN(sats)
1951 {
1952     TCGv reg;
1953     reg = DREG(insn, 0);
1954     gen_flush_flags(s);
1955     gen_helper_sats(reg, reg, QREG_CC_V);
1956     gen_logic_cc(s, reg, OS_LONG);
1957 }
1958 
1959 static void gen_push(DisasContext *s, TCGv val)
1960 {
1961     TCGv tmp;
1962 
1963     tmp = tcg_temp_new();
1964     tcg_gen_subi_i32(tmp, QREG_SP, 4);
1965     gen_store(s, OS_LONG, tmp, val);
1966     tcg_gen_mov_i32(QREG_SP, tmp);
1967     tcg_temp_free(tmp);
1968 }
1969 
1970 static TCGv mreg(int reg)
1971 {
1972     if (reg < 8) {
1973         /* Dx */
1974         return cpu_dregs[reg];
1975     }
1976     /* Ax */
1977     return cpu_aregs[reg & 7];
1978 }
1979 
1980 DISAS_INSN(movem)
1981 {
1982     TCGv addr, incr, tmp, r[16];
1983     int is_load = (insn & 0x0400) != 0;
1984     int opsize = (insn & 0x40) != 0 ? OS_LONG : OS_WORD;
1985     uint16_t mask = read_im16(env, s);
1986     int mode = extract32(insn, 3, 3);
1987     int reg0 = REG(insn, 0);
1988     int i;
1989 
1990     tmp = cpu_aregs[reg0];
1991 
1992     switch (mode) {
1993     case 0: /* data register direct */
1994     case 1: /* addr register direct */
1995     do_addr_fault:
1996         gen_addr_fault(s);
1997         return;
1998 
1999     case 2: /* indirect */
2000         break;
2001 
2002     case 3: /* indirect post-increment */
2003         if (!is_load) {
2004             /* post-increment is not allowed */
2005             goto do_addr_fault;
2006         }
2007         break;
2008 
2009     case 4: /* indirect pre-decrement */
2010         if (is_load) {
2011             /* pre-decrement is not allowed */
2012             goto do_addr_fault;
2013         }
2014         /* We want a bare copy of the address reg, without any pre-decrement
2015            adjustment, as gen_lea would provide.  */
2016         break;
2017 
2018     default:
2019         tmp = gen_lea_mode(env, s, mode, reg0, opsize);
2020         if (IS_NULL_QREG(tmp)) {
2021             goto do_addr_fault;
2022         }
2023         break;
2024     }
2025 
2026     addr = tcg_temp_new();
2027     tcg_gen_mov_i32(addr, tmp);
2028     incr = tcg_const_i32(opsize_bytes(opsize));
2029 
2030     if (is_load) {
2031         /* memory to register */
2032         for (i = 0; i < 16; i++) {
2033             if (mask & (1 << i)) {
2034                 r[i] = gen_load(s, opsize, addr, 1);
2035                 tcg_gen_add_i32(addr, addr, incr);
2036             }
2037         }
2038         for (i = 0; i < 16; i++) {
2039             if (mask & (1 << i)) {
2040                 tcg_gen_mov_i32(mreg(i), r[i]);
2041                 tcg_temp_free(r[i]);
2042             }
2043         }
2044         if (mode == 3) {
2045             /* post-increment: movem (An)+,X */
2046             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2047         }
2048     } else {
2049         /* register to memory */
2050         if (mode == 4) {
2051             /* pre-decrement: movem X,-(An) */
2052             for (i = 15; i >= 0; i--) {
2053                 if ((mask << i) & 0x8000) {
2054                     tcg_gen_sub_i32(addr, addr, incr);
2055                     if (reg0 + 8 == i &&
2056                         m68k_feature(s->env, M68K_FEATURE_EXT_FULL)) {
2057                         /* M68020+: if the addressing register is the
2058                          * register moved to memory, the value written
2059                          * is the initial value decremented by the size of
2060                          * the operation, regardless of how many actual
2061                          * stores have been performed until this point.
2062                          * M68000/M68010: the value is the initial value.
2063                          */
2064                         tmp = tcg_temp_new();
2065                         tcg_gen_sub_i32(tmp, cpu_aregs[reg0], incr);
2066                         gen_store(s, opsize, addr, tmp);
2067                         tcg_temp_free(tmp);
2068                     } else {
2069                         gen_store(s, opsize, addr, mreg(i));
2070                     }
2071                 }
2072             }
2073             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2074         } else {
2075             for (i = 0; i < 16; i++) {
2076                 if (mask & (1 << i)) {
2077                     gen_store(s, opsize, addr, mreg(i));
2078                     tcg_gen_add_i32(addr, addr, incr);
2079                 }
2080             }
2081         }
2082     }
2083 
2084     tcg_temp_free(incr);
2085     tcg_temp_free(addr);
2086 }
2087 
2088 DISAS_INSN(bitop_im)
2089 {
2090     int opsize;
2091     int op;
2092     TCGv src1;
2093     uint32_t mask;
2094     int bitnum;
2095     TCGv tmp;
2096     TCGv addr;
2097 
2098     if ((insn & 0x38) != 0)
2099         opsize = OS_BYTE;
2100     else
2101         opsize = OS_LONG;
2102     op = (insn >> 6) & 3;
2103 
2104     bitnum = read_im16(env, s);
2105     if (m68k_feature(s->env, M68K_FEATURE_M68000)) {
2106         if (bitnum & 0xfe00) {
2107             disas_undef(env, s, insn);
2108             return;
2109         }
2110     } else {
2111         if (bitnum & 0xff00) {
2112             disas_undef(env, s, insn);
2113             return;
2114         }
2115     }
2116 
2117     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
2118 
2119     gen_flush_flags(s);
2120     if (opsize == OS_BYTE)
2121         bitnum &= 7;
2122     else
2123         bitnum &= 31;
2124     mask = 1 << bitnum;
2125 
2126    tcg_gen_andi_i32(QREG_CC_Z, src1, mask);
2127 
2128     if (op) {
2129         tmp = tcg_temp_new();
2130         switch (op) {
2131         case 1: /* bchg */
2132             tcg_gen_xori_i32(tmp, src1, mask);
2133             break;
2134         case 2: /* bclr */
2135             tcg_gen_andi_i32(tmp, src1, ~mask);
2136             break;
2137         case 3: /* bset */
2138             tcg_gen_ori_i32(tmp, src1, mask);
2139             break;
2140         default: /* btst */
2141             break;
2142         }
2143         DEST_EA(env, insn, opsize, tmp, &addr);
2144         tcg_temp_free(tmp);
2145     }
2146 }
2147 
2148 DISAS_INSN(arith_im)
2149 {
2150     int op;
2151     TCGv im;
2152     TCGv src1;
2153     TCGv dest;
2154     TCGv addr;
2155     int opsize;
2156 
2157     op = (insn >> 9) & 7;
2158     opsize = insn_opsize(insn);
2159     switch (opsize) {
2160     case OS_BYTE:
2161         im = tcg_const_i32((int8_t)read_im8(env, s));
2162         break;
2163     case OS_WORD:
2164         im = tcg_const_i32((int16_t)read_im16(env, s));
2165         break;
2166     case OS_LONG:
2167         im = tcg_const_i32(read_im32(env, s));
2168         break;
2169     default:
2170        abort();
2171     }
2172     SRC_EA(env, src1, opsize, 1, (op == 6) ? NULL : &addr);
2173     dest = tcg_temp_new();
2174     switch (op) {
2175     case 0: /* ori */
2176         tcg_gen_or_i32(dest, src1, im);
2177         gen_logic_cc(s, dest, opsize);
2178         break;
2179     case 1: /* andi */
2180         tcg_gen_and_i32(dest, src1, im);
2181         gen_logic_cc(s, dest, opsize);
2182         break;
2183     case 2: /* subi */
2184         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, src1, im);
2185         tcg_gen_sub_i32(dest, src1, im);
2186         gen_update_cc_add(dest, im, opsize);
2187         set_cc_op(s, CC_OP_SUBB + opsize);
2188         break;
2189     case 3: /* addi */
2190         tcg_gen_add_i32(dest, src1, im);
2191         gen_update_cc_add(dest, im, opsize);
2192         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, im);
2193         set_cc_op(s, CC_OP_ADDB + opsize);
2194         break;
2195     case 5: /* eori */
2196         tcg_gen_xor_i32(dest, src1, im);
2197         gen_logic_cc(s, dest, opsize);
2198         break;
2199     case 6: /* cmpi */
2200         gen_update_cc_cmp(s, src1, im, opsize);
2201         break;
2202     default:
2203         abort();
2204     }
2205     tcg_temp_free(im);
2206     if (op != 6) {
2207         DEST_EA(env, insn, opsize, dest, &addr);
2208     }
2209     tcg_temp_free(dest);
2210 }
2211 
2212 DISAS_INSN(cas)
2213 {
2214     int opsize;
2215     TCGv addr;
2216     uint16_t ext;
2217     TCGv load;
2218     TCGv cmp;
2219     TCGMemOp opc;
2220 
2221     switch ((insn >> 9) & 3) {
2222     case 1:
2223         opsize = OS_BYTE;
2224         opc = MO_SB;
2225         break;
2226     case 2:
2227         opsize = OS_WORD;
2228         opc = MO_TESW;
2229         break;
2230     case 3:
2231         opsize = OS_LONG;
2232         opc = MO_TESL;
2233         break;
2234     default:
2235         g_assert_not_reached();
2236     }
2237 
2238     ext = read_im16(env, s);
2239 
2240     /* cas Dc,Du,<EA> */
2241 
2242     addr = gen_lea(env, s, insn, opsize);
2243     if (IS_NULL_QREG(addr)) {
2244         gen_addr_fault(s);
2245         return;
2246     }
2247 
2248     cmp = gen_extend(DREG(ext, 0), opsize, 1);
2249 
2250     /* if  <EA> == Dc then
2251      *     <EA> = Du
2252      *     Dc = <EA> (because <EA> == Dc)
2253      * else
2254      *     Dc = <EA>
2255      */
2256 
2257     load = tcg_temp_new();
2258     tcg_gen_atomic_cmpxchg_i32(load, addr, cmp, DREG(ext, 6),
2259                                IS_USER(s), opc);
2260     /* update flags before setting cmp to load */
2261     gen_update_cc_cmp(s, load, cmp, opsize);
2262     gen_partset_reg(opsize, DREG(ext, 0), load);
2263 
2264     tcg_temp_free(load);
2265 
2266     switch (extract32(insn, 3, 3)) {
2267     case 3: /* Indirect postincrement.  */
2268         tcg_gen_addi_i32(AREG(insn, 0), addr, opsize_bytes(opsize));
2269         break;
2270     case 4: /* Indirect predecrememnt.  */
2271         tcg_gen_mov_i32(AREG(insn, 0), addr);
2272         break;
2273     }
2274 }
2275 
2276 DISAS_INSN(cas2w)
2277 {
2278     uint16_t ext1, ext2;
2279     TCGv addr1, addr2;
2280     TCGv regs;
2281 
2282     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2283 
2284     ext1 = read_im16(env, s);
2285 
2286     if (ext1 & 0x8000) {
2287         /* Address Register */
2288         addr1 = AREG(ext1, 12);
2289     } else {
2290         /* Data Register */
2291         addr1 = DREG(ext1, 12);
2292     }
2293 
2294     ext2 = read_im16(env, s);
2295     if (ext2 & 0x8000) {
2296         /* Address Register */
2297         addr2 = AREG(ext2, 12);
2298     } else {
2299         /* Data Register */
2300         addr2 = DREG(ext2, 12);
2301     }
2302 
2303     /* if (R1) == Dc1 && (R2) == Dc2 then
2304      *     (R1) = Du1
2305      *     (R2) = Du2
2306      * else
2307      *     Dc1 = (R1)
2308      *     Dc2 = (R2)
2309      */
2310 
2311     regs = tcg_const_i32(REG(ext2, 6) |
2312                          (REG(ext1, 6) << 3) |
2313                          (REG(ext2, 0) << 6) |
2314                          (REG(ext1, 0) << 9));
2315     gen_helper_cas2w(cpu_env, regs, addr1, addr2);
2316     tcg_temp_free(regs);
2317 
2318     /* Note that cas2w also assigned to env->cc_op.  */
2319     s->cc_op = CC_OP_CMPW;
2320     s->cc_op_synced = 1;
2321 }
2322 
2323 DISAS_INSN(cas2l)
2324 {
2325     uint16_t ext1, ext2;
2326     TCGv addr1, addr2, regs;
2327 
2328     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2329 
2330     ext1 = read_im16(env, s);
2331 
2332     if (ext1 & 0x8000) {
2333         /* Address Register */
2334         addr1 = AREG(ext1, 12);
2335     } else {
2336         /* Data Register */
2337         addr1 = DREG(ext1, 12);
2338     }
2339 
2340     ext2 = read_im16(env, s);
2341     if (ext2 & 0x8000) {
2342         /* Address Register */
2343         addr2 = AREG(ext2, 12);
2344     } else {
2345         /* Data Register */
2346         addr2 = DREG(ext2, 12);
2347     }
2348 
2349     /* if (R1) == Dc1 && (R2) == Dc2 then
2350      *     (R1) = Du1
2351      *     (R2) = Du2
2352      * else
2353      *     Dc1 = (R1)
2354      *     Dc2 = (R2)
2355      */
2356 
2357     regs = tcg_const_i32(REG(ext2, 6) |
2358                          (REG(ext1, 6) << 3) |
2359                          (REG(ext2, 0) << 6) |
2360                          (REG(ext1, 0) << 9));
2361     gen_helper_cas2l(cpu_env, regs, addr1, addr2);
2362     tcg_temp_free(regs);
2363 
2364     /* Note that cas2l also assigned to env->cc_op.  */
2365     s->cc_op = CC_OP_CMPL;
2366     s->cc_op_synced = 1;
2367 }
2368 
2369 DISAS_INSN(byterev)
2370 {
2371     TCGv reg;
2372 
2373     reg = DREG(insn, 0);
2374     tcg_gen_bswap32_i32(reg, reg);
2375 }
2376 
2377 DISAS_INSN(move)
2378 {
2379     TCGv src;
2380     TCGv dest;
2381     int op;
2382     int opsize;
2383 
2384     switch (insn >> 12) {
2385     case 1: /* move.b */
2386         opsize = OS_BYTE;
2387         break;
2388     case 2: /* move.l */
2389         opsize = OS_LONG;
2390         break;
2391     case 3: /* move.w */
2392         opsize = OS_WORD;
2393         break;
2394     default:
2395         abort();
2396     }
2397     SRC_EA(env, src, opsize, 1, NULL);
2398     op = (insn >> 6) & 7;
2399     if (op == 1) {
2400         /* movea */
2401         /* The value will already have been sign extended.  */
2402         dest = AREG(insn, 9);
2403         tcg_gen_mov_i32(dest, src);
2404     } else {
2405         /* normal move */
2406         uint16_t dest_ea;
2407         dest_ea = ((insn >> 9) & 7) | (op << 3);
2408         DEST_EA(env, dest_ea, opsize, src, NULL);
2409         /* This will be correct because loads sign extend.  */
2410         gen_logic_cc(s, src, opsize);
2411     }
2412 }
2413 
2414 DISAS_INSN(negx)
2415 {
2416     TCGv z;
2417     TCGv src;
2418     TCGv addr;
2419     int opsize;
2420 
2421     opsize = insn_opsize(insn);
2422     SRC_EA(env, src, opsize, 1, &addr);
2423 
2424     gen_flush_flags(s); /* compute old Z */
2425 
2426     /* Perform substract with borrow.
2427      * (X, N) =  -(src + X);
2428      */
2429 
2430     z = tcg_const_i32(0);
2431     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, z, QREG_CC_X, z);
2432     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, z, z, QREG_CC_N, QREG_CC_X);
2433     tcg_temp_free(z);
2434     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
2435 
2436     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
2437 
2438     /* Compute signed-overflow for negation.  The normal formula for
2439      * subtraction is (res ^ src) & (src ^ dest), but with dest==0
2440      * this simplies to res & src.
2441      */
2442 
2443     tcg_gen_and_i32(QREG_CC_V, QREG_CC_N, src);
2444 
2445     /* Copy the rest of the results into place.  */
2446     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
2447     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
2448 
2449     set_cc_op(s, CC_OP_FLAGS);
2450 
2451     /* result is in QREG_CC_N */
2452 
2453     DEST_EA(env, insn, opsize, QREG_CC_N, &addr);
2454 }
2455 
2456 DISAS_INSN(lea)
2457 {
2458     TCGv reg;
2459     TCGv tmp;
2460 
2461     reg = AREG(insn, 9);
2462     tmp = gen_lea(env, s, insn, OS_LONG);
2463     if (IS_NULL_QREG(tmp)) {
2464         gen_addr_fault(s);
2465         return;
2466     }
2467     tcg_gen_mov_i32(reg, tmp);
2468 }
2469 
2470 DISAS_INSN(clr)
2471 {
2472     int opsize;
2473     TCGv zero;
2474 
2475     zero = tcg_const_i32(0);
2476 
2477     opsize = insn_opsize(insn);
2478     DEST_EA(env, insn, opsize, zero, NULL);
2479     gen_logic_cc(s, zero, opsize);
2480     tcg_temp_free(zero);
2481 }
2482 
2483 static TCGv gen_get_ccr(DisasContext *s)
2484 {
2485     TCGv dest;
2486 
2487     gen_flush_flags(s);
2488     update_cc_op(s);
2489     dest = tcg_temp_new();
2490     gen_helper_get_ccr(dest, cpu_env);
2491     return dest;
2492 }
2493 
2494 DISAS_INSN(move_from_ccr)
2495 {
2496     TCGv ccr;
2497 
2498     ccr = gen_get_ccr(s);
2499     DEST_EA(env, insn, OS_WORD, ccr, NULL);
2500 }
2501 
2502 DISAS_INSN(neg)
2503 {
2504     TCGv src1;
2505     TCGv dest;
2506     TCGv addr;
2507     int opsize;
2508 
2509     opsize = insn_opsize(insn);
2510     SRC_EA(env, src1, opsize, 1, &addr);
2511     dest = tcg_temp_new();
2512     tcg_gen_neg_i32(dest, src1);
2513     set_cc_op(s, CC_OP_SUBB + opsize);
2514     gen_update_cc_add(dest, src1, opsize);
2515     tcg_gen_setcondi_i32(TCG_COND_NE, QREG_CC_X, dest, 0);
2516     DEST_EA(env, insn, opsize, dest, &addr);
2517     tcg_temp_free(dest);
2518 }
2519 
2520 static void gen_set_sr_im(DisasContext *s, uint16_t val, int ccr_only)
2521 {
2522     if (ccr_only) {
2523         tcg_gen_movi_i32(QREG_CC_C, val & CCF_C ? 1 : 0);
2524         tcg_gen_movi_i32(QREG_CC_V, val & CCF_V ? -1 : 0);
2525         tcg_gen_movi_i32(QREG_CC_Z, val & CCF_Z ? 0 : 1);
2526         tcg_gen_movi_i32(QREG_CC_N, val & CCF_N ? -1 : 0);
2527         tcg_gen_movi_i32(QREG_CC_X, val & CCF_X ? 1 : 0);
2528     } else {
2529         gen_helper_set_sr(cpu_env, tcg_const_i32(val));
2530     }
2531     set_cc_op(s, CC_OP_FLAGS);
2532 }
2533 
2534 static void gen_set_sr(CPUM68KState *env, DisasContext *s, uint16_t insn,
2535                        int ccr_only)
2536 {
2537     if ((insn & 0x38) == 0) {
2538         if (ccr_only) {
2539             gen_helper_set_ccr(cpu_env, DREG(insn, 0));
2540         } else {
2541             gen_helper_set_sr(cpu_env, DREG(insn, 0));
2542         }
2543         set_cc_op(s, CC_OP_FLAGS);
2544     } else if ((insn & 0x3f) == 0x3c) {
2545         uint16_t val;
2546         val = read_im16(env, s);
2547         gen_set_sr_im(s, val, ccr_only);
2548     } else {
2549         disas_undef(env, s, insn);
2550     }
2551 }
2552 
2553 
2554 DISAS_INSN(move_to_ccr)
2555 {
2556     gen_set_sr(env, s, insn, 1);
2557 }
2558 
2559 DISAS_INSN(not)
2560 {
2561     TCGv src1;
2562     TCGv dest;
2563     TCGv addr;
2564     int opsize;
2565 
2566     opsize = insn_opsize(insn);
2567     SRC_EA(env, src1, opsize, 1, &addr);
2568     dest = tcg_temp_new();
2569     tcg_gen_not_i32(dest, src1);
2570     DEST_EA(env, insn, opsize, dest, &addr);
2571     gen_logic_cc(s, dest, opsize);
2572 }
2573 
2574 DISAS_INSN(swap)
2575 {
2576     TCGv src1;
2577     TCGv src2;
2578     TCGv reg;
2579 
2580     src1 = tcg_temp_new();
2581     src2 = tcg_temp_new();
2582     reg = DREG(insn, 0);
2583     tcg_gen_shli_i32(src1, reg, 16);
2584     tcg_gen_shri_i32(src2, reg, 16);
2585     tcg_gen_or_i32(reg, src1, src2);
2586     tcg_temp_free(src2);
2587     tcg_temp_free(src1);
2588     gen_logic_cc(s, reg, OS_LONG);
2589 }
2590 
2591 DISAS_INSN(bkpt)
2592 {
2593     gen_exception(s, s->pc - 2, EXCP_DEBUG);
2594 }
2595 
2596 DISAS_INSN(pea)
2597 {
2598     TCGv tmp;
2599 
2600     tmp = gen_lea(env, s, insn, OS_LONG);
2601     if (IS_NULL_QREG(tmp)) {
2602         gen_addr_fault(s);
2603         return;
2604     }
2605     gen_push(s, tmp);
2606 }
2607 
2608 DISAS_INSN(ext)
2609 {
2610     int op;
2611     TCGv reg;
2612     TCGv tmp;
2613 
2614     reg = DREG(insn, 0);
2615     op = (insn >> 6) & 7;
2616     tmp = tcg_temp_new();
2617     if (op == 3)
2618         tcg_gen_ext16s_i32(tmp, reg);
2619     else
2620         tcg_gen_ext8s_i32(tmp, reg);
2621     if (op == 2)
2622         gen_partset_reg(OS_WORD, reg, tmp);
2623     else
2624         tcg_gen_mov_i32(reg, tmp);
2625     gen_logic_cc(s, tmp, OS_LONG);
2626     tcg_temp_free(tmp);
2627 }
2628 
2629 DISAS_INSN(tst)
2630 {
2631     int opsize;
2632     TCGv tmp;
2633 
2634     opsize = insn_opsize(insn);
2635     SRC_EA(env, tmp, opsize, 1, NULL);
2636     gen_logic_cc(s, tmp, opsize);
2637 }
2638 
2639 DISAS_INSN(pulse)
2640 {
2641   /* Implemented as a NOP.  */
2642 }
2643 
2644 DISAS_INSN(illegal)
2645 {
2646     gen_exception(s, s->pc - 2, EXCP_ILLEGAL);
2647 }
2648 
2649 /* ??? This should be atomic.  */
2650 DISAS_INSN(tas)
2651 {
2652     TCGv dest;
2653     TCGv src1;
2654     TCGv addr;
2655 
2656     dest = tcg_temp_new();
2657     SRC_EA(env, src1, OS_BYTE, 1, &addr);
2658     gen_logic_cc(s, src1, OS_BYTE);
2659     tcg_gen_ori_i32(dest, src1, 0x80);
2660     DEST_EA(env, insn, OS_BYTE, dest, &addr);
2661     tcg_temp_free(dest);
2662 }
2663 
2664 DISAS_INSN(mull)
2665 {
2666     uint16_t ext;
2667     TCGv src1;
2668     int sign;
2669 
2670     ext = read_im16(env, s);
2671 
2672     sign = ext & 0x800;
2673 
2674     if (ext & 0x400) {
2675         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
2676             gen_exception(s, s->pc - 4, EXCP_UNSUPPORTED);
2677             return;
2678         }
2679 
2680         SRC_EA(env, src1, OS_LONG, 0, NULL);
2681 
2682         if (sign) {
2683             tcg_gen_muls2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2684         } else {
2685             tcg_gen_mulu2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2686         }
2687         /* if Dl == Dh, 68040 returns low word */
2688         tcg_gen_mov_i32(DREG(ext, 0), QREG_CC_N);
2689         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_Z);
2690         tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N);
2691 
2692         tcg_gen_movi_i32(QREG_CC_V, 0);
2693         tcg_gen_movi_i32(QREG_CC_C, 0);
2694 
2695         set_cc_op(s, CC_OP_FLAGS);
2696         return;
2697     }
2698     SRC_EA(env, src1, OS_LONG, 0, NULL);
2699     if (m68k_feature(s->env, M68K_FEATURE_M68000)) {
2700         tcg_gen_movi_i32(QREG_CC_C, 0);
2701         if (sign) {
2702             tcg_gen_muls2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2703             /* QREG_CC_V is -(QREG_CC_V != (QREG_CC_N >> 31)) */
2704             tcg_gen_sari_i32(QREG_CC_Z, QREG_CC_N, 31);
2705             tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_Z);
2706         } else {
2707             tcg_gen_mulu2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2708             /* QREG_CC_V is -(QREG_CC_V != 0), use QREG_CC_C as 0 */
2709             tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_C);
2710         }
2711         tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
2712         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_N);
2713 
2714         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
2715 
2716         set_cc_op(s, CC_OP_FLAGS);
2717     } else {
2718         /* The upper 32 bits of the product are discarded, so
2719            muls.l and mulu.l are functionally equivalent.  */
2720         tcg_gen_mul_i32(DREG(ext, 12), src1, DREG(ext, 12));
2721         gen_logic_cc(s, DREG(ext, 12), OS_LONG);
2722     }
2723 }
2724 
2725 static void gen_link(DisasContext *s, uint16_t insn, int32_t offset)
2726 {
2727     TCGv reg;
2728     TCGv tmp;
2729 
2730     reg = AREG(insn, 0);
2731     tmp = tcg_temp_new();
2732     tcg_gen_subi_i32(tmp, QREG_SP, 4);
2733     gen_store(s, OS_LONG, tmp, reg);
2734     if ((insn & 7) != 7) {
2735         tcg_gen_mov_i32(reg, tmp);
2736     }
2737     tcg_gen_addi_i32(QREG_SP, tmp, offset);
2738     tcg_temp_free(tmp);
2739 }
2740 
2741 DISAS_INSN(link)
2742 {
2743     int16_t offset;
2744 
2745     offset = read_im16(env, s);
2746     gen_link(s, insn, offset);
2747 }
2748 
2749 DISAS_INSN(linkl)
2750 {
2751     int32_t offset;
2752 
2753     offset = read_im32(env, s);
2754     gen_link(s, insn, offset);
2755 }
2756 
2757 DISAS_INSN(unlk)
2758 {
2759     TCGv src;
2760     TCGv reg;
2761     TCGv tmp;
2762 
2763     src = tcg_temp_new();
2764     reg = AREG(insn, 0);
2765     tcg_gen_mov_i32(src, reg);
2766     tmp = gen_load(s, OS_LONG, src, 0);
2767     tcg_gen_mov_i32(reg, tmp);
2768     tcg_gen_addi_i32(QREG_SP, src, 4);
2769     tcg_temp_free(src);
2770 }
2771 
2772 DISAS_INSN(nop)
2773 {
2774 }
2775 
2776 DISAS_INSN(rtd)
2777 {
2778     TCGv tmp;
2779     int16_t offset = read_im16(env, s);
2780 
2781     tmp = gen_load(s, OS_LONG, QREG_SP, 0);
2782     tcg_gen_addi_i32(QREG_SP, QREG_SP, offset + 4);
2783     gen_jmp(s, tmp);
2784 }
2785 
2786 DISAS_INSN(rts)
2787 {
2788     TCGv tmp;
2789 
2790     tmp = gen_load(s, OS_LONG, QREG_SP, 0);
2791     tcg_gen_addi_i32(QREG_SP, QREG_SP, 4);
2792     gen_jmp(s, tmp);
2793 }
2794 
2795 DISAS_INSN(jump)
2796 {
2797     TCGv tmp;
2798 
2799     /* Load the target address first to ensure correct exception
2800        behavior.  */
2801     tmp = gen_lea(env, s, insn, OS_LONG);
2802     if (IS_NULL_QREG(tmp)) {
2803         gen_addr_fault(s);
2804         return;
2805     }
2806     if ((insn & 0x40) == 0) {
2807         /* jsr */
2808         gen_push(s, tcg_const_i32(s->pc));
2809     }
2810     gen_jmp(s, tmp);
2811 }
2812 
2813 DISAS_INSN(addsubq)
2814 {
2815     TCGv src;
2816     TCGv dest;
2817     TCGv val;
2818     int imm;
2819     TCGv addr;
2820     int opsize;
2821 
2822     if ((insn & 070) == 010) {
2823         /* Operation on address register is always long.  */
2824         opsize = OS_LONG;
2825     } else {
2826         opsize = insn_opsize(insn);
2827     }
2828     SRC_EA(env, src, opsize, 1, &addr);
2829     imm = (insn >> 9) & 7;
2830     if (imm == 0) {
2831         imm = 8;
2832     }
2833     val = tcg_const_i32(imm);
2834     dest = tcg_temp_new();
2835     tcg_gen_mov_i32(dest, src);
2836     if ((insn & 0x38) == 0x08) {
2837         /* Don't update condition codes if the destination is an
2838            address register.  */
2839         if (insn & 0x0100) {
2840             tcg_gen_sub_i32(dest, dest, val);
2841         } else {
2842             tcg_gen_add_i32(dest, dest, val);
2843         }
2844     } else {
2845         if (insn & 0x0100) {
2846             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
2847             tcg_gen_sub_i32(dest, dest, val);
2848             set_cc_op(s, CC_OP_SUBB + opsize);
2849         } else {
2850             tcg_gen_add_i32(dest, dest, val);
2851             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
2852             set_cc_op(s, CC_OP_ADDB + opsize);
2853         }
2854         gen_update_cc_add(dest, val, opsize);
2855     }
2856     tcg_temp_free(val);
2857     DEST_EA(env, insn, opsize, dest, &addr);
2858     tcg_temp_free(dest);
2859 }
2860 
2861 DISAS_INSN(tpf)
2862 {
2863     switch (insn & 7) {
2864     case 2: /* One extension word.  */
2865         s->pc += 2;
2866         break;
2867     case 3: /* Two extension words.  */
2868         s->pc += 4;
2869         break;
2870     case 4: /* No extension words.  */
2871         break;
2872     default:
2873         disas_undef(env, s, insn);
2874     }
2875 }
2876 
2877 DISAS_INSN(branch)
2878 {
2879     int32_t offset;
2880     uint32_t base;
2881     int op;
2882     TCGLabel *l1;
2883 
2884     base = s->pc;
2885     op = (insn >> 8) & 0xf;
2886     offset = (int8_t)insn;
2887     if (offset == 0) {
2888         offset = (int16_t)read_im16(env, s);
2889     } else if (offset == -1) {
2890         offset = read_im32(env, s);
2891     }
2892     if (op == 1) {
2893         /* bsr */
2894         gen_push(s, tcg_const_i32(s->pc));
2895     }
2896     if (op > 1) {
2897         /* Bcc */
2898         l1 = gen_new_label();
2899         gen_jmpcc(s, ((insn >> 8) & 0xf) ^ 1, l1);
2900         gen_jmp_tb(s, 1, base + offset);
2901         gen_set_label(l1);
2902         gen_jmp_tb(s, 0, s->pc);
2903     } else {
2904         /* Unconditional branch.  */
2905         gen_jmp_tb(s, 0, base + offset);
2906     }
2907 }
2908 
2909 DISAS_INSN(moveq)
2910 {
2911     tcg_gen_movi_i32(DREG(insn, 9), (int8_t)insn);
2912     gen_logic_cc(s, DREG(insn, 9), OS_LONG);
2913 }
2914 
2915 DISAS_INSN(mvzs)
2916 {
2917     int opsize;
2918     TCGv src;
2919     TCGv reg;
2920 
2921     if (insn & 0x40)
2922         opsize = OS_WORD;
2923     else
2924         opsize = OS_BYTE;
2925     SRC_EA(env, src, opsize, (insn & 0x80) == 0, NULL);
2926     reg = DREG(insn, 9);
2927     tcg_gen_mov_i32(reg, src);
2928     gen_logic_cc(s, src, opsize);
2929 }
2930 
2931 DISAS_INSN(or)
2932 {
2933     TCGv reg;
2934     TCGv dest;
2935     TCGv src;
2936     TCGv addr;
2937     int opsize;
2938 
2939     opsize = insn_opsize(insn);
2940     reg = gen_extend(DREG(insn, 9), opsize, 0);
2941     dest = tcg_temp_new();
2942     if (insn & 0x100) {
2943         SRC_EA(env, src, opsize, 0, &addr);
2944         tcg_gen_or_i32(dest, src, reg);
2945         DEST_EA(env, insn, opsize, dest, &addr);
2946     } else {
2947         SRC_EA(env, src, opsize, 0, NULL);
2948         tcg_gen_or_i32(dest, src, reg);
2949         gen_partset_reg(opsize, DREG(insn, 9), dest);
2950     }
2951     gen_logic_cc(s, dest, opsize);
2952     tcg_temp_free(dest);
2953 }
2954 
2955 DISAS_INSN(suba)
2956 {
2957     TCGv src;
2958     TCGv reg;
2959 
2960     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
2961     reg = AREG(insn, 9);
2962     tcg_gen_sub_i32(reg, reg, src);
2963 }
2964 
2965 static inline void gen_subx(DisasContext *s, TCGv src, TCGv dest, int opsize)
2966 {
2967     TCGv tmp;
2968 
2969     gen_flush_flags(s); /* compute old Z */
2970 
2971     /* Perform substract with borrow.
2972      * (X, N) = dest - (src + X);
2973      */
2974 
2975     tmp = tcg_const_i32(0);
2976     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, tmp, QREG_CC_X, tmp);
2977     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, dest, tmp, QREG_CC_N, QREG_CC_X);
2978     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
2979     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
2980 
2981     /* Compute signed-overflow for substract.  */
2982 
2983     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, dest);
2984     tcg_gen_xor_i32(tmp, dest, src);
2985     tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, tmp);
2986     tcg_temp_free(tmp);
2987 
2988     /* Copy the rest of the results into place.  */
2989     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
2990     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
2991 
2992     set_cc_op(s, CC_OP_FLAGS);
2993 
2994     /* result is in QREG_CC_N */
2995 }
2996 
2997 DISAS_INSN(subx_reg)
2998 {
2999     TCGv dest;
3000     TCGv src;
3001     int opsize;
3002 
3003     opsize = insn_opsize(insn);
3004 
3005     src = gen_extend(DREG(insn, 0), opsize, 1);
3006     dest = gen_extend(DREG(insn, 9), opsize, 1);
3007 
3008     gen_subx(s, src, dest, opsize);
3009 
3010     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3011 }
3012 
3013 DISAS_INSN(subx_mem)
3014 {
3015     TCGv src;
3016     TCGv addr_src;
3017     TCGv dest;
3018     TCGv addr_dest;
3019     int opsize;
3020 
3021     opsize = insn_opsize(insn);
3022 
3023     addr_src = AREG(insn, 0);
3024     tcg_gen_subi_i32(addr_src, addr_src, opsize);
3025     src = gen_load(s, opsize, addr_src, 1);
3026 
3027     addr_dest = AREG(insn, 9);
3028     tcg_gen_subi_i32(addr_dest, addr_dest, opsize);
3029     dest = gen_load(s, opsize, addr_dest, 1);
3030 
3031     gen_subx(s, src, dest, opsize);
3032 
3033     gen_store(s, opsize, addr_dest, QREG_CC_N);
3034 }
3035 
3036 DISAS_INSN(mov3q)
3037 {
3038     TCGv src;
3039     int val;
3040 
3041     val = (insn >> 9) & 7;
3042     if (val == 0)
3043         val = -1;
3044     src = tcg_const_i32(val);
3045     gen_logic_cc(s, src, OS_LONG);
3046     DEST_EA(env, insn, OS_LONG, src, NULL);
3047     tcg_temp_free(src);
3048 }
3049 
3050 DISAS_INSN(cmp)
3051 {
3052     TCGv src;
3053     TCGv reg;
3054     int opsize;
3055 
3056     opsize = insn_opsize(insn);
3057     SRC_EA(env, src, opsize, 1, NULL);
3058     reg = gen_extend(DREG(insn, 9), opsize, 1);
3059     gen_update_cc_cmp(s, reg, src, opsize);
3060 }
3061 
3062 DISAS_INSN(cmpa)
3063 {
3064     int opsize;
3065     TCGv src;
3066     TCGv reg;
3067 
3068     if (insn & 0x100) {
3069         opsize = OS_LONG;
3070     } else {
3071         opsize = OS_WORD;
3072     }
3073     SRC_EA(env, src, opsize, 1, NULL);
3074     reg = AREG(insn, 9);
3075     gen_update_cc_cmp(s, reg, src, OS_LONG);
3076 }
3077 
3078 DISAS_INSN(cmpm)
3079 {
3080     int opsize = insn_opsize(insn);
3081     TCGv src, dst;
3082 
3083     /* Post-increment load (mode 3) from Ay.  */
3084     src = gen_ea_mode(env, s, 3, REG(insn, 0), opsize,
3085                       NULL_QREG, NULL, EA_LOADS);
3086     /* Post-increment load (mode 3) from Ax.  */
3087     dst = gen_ea_mode(env, s, 3, REG(insn, 9), opsize,
3088                       NULL_QREG, NULL, EA_LOADS);
3089 
3090     gen_update_cc_cmp(s, dst, src, opsize);
3091 }
3092 
3093 DISAS_INSN(eor)
3094 {
3095     TCGv src;
3096     TCGv dest;
3097     TCGv addr;
3098     int opsize;
3099 
3100     opsize = insn_opsize(insn);
3101 
3102     SRC_EA(env, src, opsize, 0, &addr);
3103     dest = tcg_temp_new();
3104     tcg_gen_xor_i32(dest, src, DREG(insn, 9));
3105     gen_logic_cc(s, dest, opsize);
3106     DEST_EA(env, insn, opsize, dest, &addr);
3107     tcg_temp_free(dest);
3108 }
3109 
3110 static void do_exg(TCGv reg1, TCGv reg2)
3111 {
3112     TCGv temp = tcg_temp_new();
3113     tcg_gen_mov_i32(temp, reg1);
3114     tcg_gen_mov_i32(reg1, reg2);
3115     tcg_gen_mov_i32(reg2, temp);
3116     tcg_temp_free(temp);
3117 }
3118 
3119 DISAS_INSN(exg_dd)
3120 {
3121     /* exchange Dx and Dy */
3122     do_exg(DREG(insn, 9), DREG(insn, 0));
3123 }
3124 
3125 DISAS_INSN(exg_aa)
3126 {
3127     /* exchange Ax and Ay */
3128     do_exg(AREG(insn, 9), AREG(insn, 0));
3129 }
3130 
3131 DISAS_INSN(exg_da)
3132 {
3133     /* exchange Dx and Ay */
3134     do_exg(DREG(insn, 9), AREG(insn, 0));
3135 }
3136 
3137 DISAS_INSN(and)
3138 {
3139     TCGv src;
3140     TCGv reg;
3141     TCGv dest;
3142     TCGv addr;
3143     int opsize;
3144 
3145     dest = tcg_temp_new();
3146 
3147     opsize = insn_opsize(insn);
3148     reg = DREG(insn, 9);
3149     if (insn & 0x100) {
3150         SRC_EA(env, src, opsize, 0, &addr);
3151         tcg_gen_and_i32(dest, src, reg);
3152         DEST_EA(env, insn, opsize, dest, &addr);
3153     } else {
3154         SRC_EA(env, src, opsize, 0, NULL);
3155         tcg_gen_and_i32(dest, src, reg);
3156         gen_partset_reg(opsize, reg, dest);
3157     }
3158     gen_logic_cc(s, dest, opsize);
3159     tcg_temp_free(dest);
3160 }
3161 
3162 DISAS_INSN(adda)
3163 {
3164     TCGv src;
3165     TCGv reg;
3166 
3167     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3168     reg = AREG(insn, 9);
3169     tcg_gen_add_i32(reg, reg, src);
3170 }
3171 
3172 static inline void gen_addx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3173 {
3174     TCGv tmp;
3175 
3176     gen_flush_flags(s); /* compute old Z */
3177 
3178     /* Perform addition with carry.
3179      * (X, N) = src + dest + X;
3180      */
3181 
3182     tmp = tcg_const_i32(0);
3183     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_X, tmp, dest, tmp);
3184     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_N, QREG_CC_X, src, tmp);
3185     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3186 
3187     /* Compute signed-overflow for addition.  */
3188 
3189     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3190     tcg_gen_xor_i32(tmp, dest, src);
3191     tcg_gen_andc_i32(QREG_CC_V, QREG_CC_V, tmp);
3192     tcg_temp_free(tmp);
3193 
3194     /* Copy the rest of the results into place.  */
3195     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3196     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3197 
3198     set_cc_op(s, CC_OP_FLAGS);
3199 
3200     /* result is in QREG_CC_N */
3201 }
3202 
3203 DISAS_INSN(addx_reg)
3204 {
3205     TCGv dest;
3206     TCGv src;
3207     int opsize;
3208 
3209     opsize = insn_opsize(insn);
3210 
3211     dest = gen_extend(DREG(insn, 9), opsize, 1);
3212     src = gen_extend(DREG(insn, 0), opsize, 1);
3213 
3214     gen_addx(s, src, dest, opsize);
3215 
3216     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3217 }
3218 
3219 DISAS_INSN(addx_mem)
3220 {
3221     TCGv src;
3222     TCGv addr_src;
3223     TCGv dest;
3224     TCGv addr_dest;
3225     int opsize;
3226 
3227     opsize = insn_opsize(insn);
3228 
3229     addr_src = AREG(insn, 0);
3230     tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3231     src = gen_load(s, opsize, addr_src, 1);
3232 
3233     addr_dest = AREG(insn, 9);
3234     tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3235     dest = gen_load(s, opsize, addr_dest, 1);
3236 
3237     gen_addx(s, src, dest, opsize);
3238 
3239     gen_store(s, opsize, addr_dest, QREG_CC_N);
3240 }
3241 
3242 static inline void shift_im(DisasContext *s, uint16_t insn, int opsize)
3243 {
3244     int count = (insn >> 9) & 7;
3245     int logical = insn & 8;
3246     int left = insn & 0x100;
3247     int bits = opsize_bytes(opsize) * 8;
3248     TCGv reg = gen_extend(DREG(insn, 0), opsize, !logical);
3249 
3250     if (count == 0) {
3251         count = 8;
3252     }
3253 
3254     tcg_gen_movi_i32(QREG_CC_V, 0);
3255     if (left) {
3256         tcg_gen_shri_i32(QREG_CC_C, reg, bits - count);
3257         tcg_gen_shli_i32(QREG_CC_N, reg, count);
3258 
3259         /* Note that ColdFire always clears V (done above),
3260            while M68000 sets if the most significant bit is changed at
3261            any time during the shift operation */
3262         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3263             /* if shift count >= bits, V is (reg != 0) */
3264             if (count >= bits) {
3265                 tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, reg, QREG_CC_V);
3266             } else {
3267                 TCGv t0 = tcg_temp_new();
3268                 tcg_gen_sari_i32(QREG_CC_V, reg, bits - 1);
3269                 tcg_gen_sari_i32(t0, reg, bits - count - 1);
3270                 tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, t0);
3271                 tcg_temp_free(t0);
3272             }
3273             tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3274         }
3275     } else {
3276         tcg_gen_shri_i32(QREG_CC_C, reg, count - 1);
3277         if (logical) {
3278             tcg_gen_shri_i32(QREG_CC_N, reg, count);
3279         } else {
3280             tcg_gen_sari_i32(QREG_CC_N, reg, count);
3281         }
3282     }
3283 
3284     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3285     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3286     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3287     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3288 
3289     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3290     set_cc_op(s, CC_OP_FLAGS);
3291 }
3292 
3293 static inline void shift_reg(DisasContext *s, uint16_t insn, int opsize)
3294 {
3295     int logical = insn & 8;
3296     int left = insn & 0x100;
3297     int bits = opsize_bytes(opsize) * 8;
3298     TCGv reg = gen_extend(DREG(insn, 0), opsize, !logical);
3299     TCGv s32;
3300     TCGv_i64 t64, s64;
3301 
3302     t64 = tcg_temp_new_i64();
3303     s64 = tcg_temp_new_i64();
3304     s32 = tcg_temp_new();
3305 
3306     /* Note that m68k truncates the shift count modulo 64, not 32.
3307        In addition, a 64-bit shift makes it easy to find "the last
3308        bit shifted out", for the carry flag.  */
3309     tcg_gen_andi_i32(s32, DREG(insn, 9), 63);
3310     tcg_gen_extu_i32_i64(s64, s32);
3311     tcg_gen_extu_i32_i64(t64, reg);
3312 
3313     /* Optimistically set V=0.  Also used as a zero source below.  */
3314     tcg_gen_movi_i32(QREG_CC_V, 0);
3315     if (left) {
3316         tcg_gen_shl_i64(t64, t64, s64);
3317 
3318         if (opsize == OS_LONG) {
3319             tcg_gen_extr_i64_i32(QREG_CC_N, QREG_CC_C, t64);
3320             /* Note that C=0 if shift count is 0, and we get that for free.  */
3321         } else {
3322             TCGv zero = tcg_const_i32(0);
3323             tcg_gen_extrl_i64_i32(QREG_CC_N, t64);
3324             tcg_gen_shri_i32(QREG_CC_C, QREG_CC_N, bits);
3325             tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3326                                 s32, zero, zero, QREG_CC_C);
3327             tcg_temp_free(zero);
3328         }
3329         tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3330 
3331         /* X = C, but only if the shift count was non-zero.  */
3332         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3333                             QREG_CC_C, QREG_CC_X);
3334 
3335         /* M68000 sets V if the most significant bit is changed at
3336          * any time during the shift operation.  Do this via creating
3337          * an extension of the sign bit, comparing, and discarding
3338          * the bits below the sign bit.  I.e.
3339          *     int64_t s = (intN_t)reg;
3340          *     int64_t t = (int64_t)(intN_t)reg << count;
3341          *     V = ((s ^ t) & (-1 << (bits - 1))) != 0
3342          */
3343         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3344             TCGv_i64 tt = tcg_const_i64(32);
3345             /* if shift is greater than 32, use 32 */
3346             tcg_gen_movcond_i64(TCG_COND_GT, s64, s64, tt, tt, s64);
3347             tcg_temp_free_i64(tt);
3348             /* Sign extend the input to 64 bits; re-do the shift.  */
3349             tcg_gen_ext_i32_i64(t64, reg);
3350             tcg_gen_shl_i64(s64, t64, s64);
3351             /* Clear all bits that are unchanged.  */
3352             tcg_gen_xor_i64(t64, t64, s64);
3353             /* Ignore the bits below the sign bit.  */
3354             tcg_gen_andi_i64(t64, t64, -1ULL << (bits - 1));
3355             /* If any bits remain set, we have overflow.  */
3356             tcg_gen_setcondi_i64(TCG_COND_NE, t64, t64, 0);
3357             tcg_gen_extrl_i64_i32(QREG_CC_V, t64);
3358             tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3359         }
3360     } else {
3361         tcg_gen_shli_i64(t64, t64, 32);
3362         if (logical) {
3363             tcg_gen_shr_i64(t64, t64, s64);
3364         } else {
3365             tcg_gen_sar_i64(t64, t64, s64);
3366         }
3367         tcg_gen_extr_i64_i32(QREG_CC_C, QREG_CC_N, t64);
3368 
3369         /* Note that C=0 if shift count is 0, and we get that for free.  */
3370         tcg_gen_shri_i32(QREG_CC_C, QREG_CC_C, 31);
3371 
3372         /* X = C, but only if the shift count was non-zero.  */
3373         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3374                             QREG_CC_C, QREG_CC_X);
3375     }
3376     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3377     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3378 
3379     tcg_temp_free(s32);
3380     tcg_temp_free_i64(s64);
3381     tcg_temp_free_i64(t64);
3382 
3383     /* Write back the result.  */
3384     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3385     set_cc_op(s, CC_OP_FLAGS);
3386 }
3387 
3388 DISAS_INSN(shift8_im)
3389 {
3390     shift_im(s, insn, OS_BYTE);
3391 }
3392 
3393 DISAS_INSN(shift16_im)
3394 {
3395     shift_im(s, insn, OS_WORD);
3396 }
3397 
3398 DISAS_INSN(shift_im)
3399 {
3400     shift_im(s, insn, OS_LONG);
3401 }
3402 
3403 DISAS_INSN(shift8_reg)
3404 {
3405     shift_reg(s, insn, OS_BYTE);
3406 }
3407 
3408 DISAS_INSN(shift16_reg)
3409 {
3410     shift_reg(s, insn, OS_WORD);
3411 }
3412 
3413 DISAS_INSN(shift_reg)
3414 {
3415     shift_reg(s, insn, OS_LONG);
3416 }
3417 
3418 DISAS_INSN(shift_mem)
3419 {
3420     int logical = insn & 8;
3421     int left = insn & 0x100;
3422     TCGv src;
3423     TCGv addr;
3424 
3425     SRC_EA(env, src, OS_WORD, !logical, &addr);
3426     tcg_gen_movi_i32(QREG_CC_V, 0);
3427     if (left) {
3428         tcg_gen_shri_i32(QREG_CC_C, src, 15);
3429         tcg_gen_shli_i32(QREG_CC_N, src, 1);
3430 
3431         /* Note that ColdFire always clears V,
3432            while M68000 sets if the most significant bit is changed at
3433            any time during the shift operation */
3434         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3435             src = gen_extend(src, OS_WORD, 1);
3436             tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3437         }
3438     } else {
3439         tcg_gen_mov_i32(QREG_CC_C, src);
3440         if (logical) {
3441             tcg_gen_shri_i32(QREG_CC_N, src, 1);
3442         } else {
3443             tcg_gen_sari_i32(QREG_CC_N, src, 1);
3444         }
3445     }
3446 
3447     gen_ext(QREG_CC_N, QREG_CC_N, OS_WORD, 1);
3448     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3449     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3450     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3451 
3452     DEST_EA(env, insn, OS_WORD, QREG_CC_N, &addr);
3453     set_cc_op(s, CC_OP_FLAGS);
3454 }
3455 
3456 static void rotate(TCGv reg, TCGv shift, int left, int size)
3457 {
3458     switch (size) {
3459     case 8:
3460         /* Replicate the 8-bit input so that a 32-bit rotate works.  */
3461         tcg_gen_ext8u_i32(reg, reg);
3462         tcg_gen_muli_i32(reg, reg, 0x01010101);
3463         goto do_long;
3464     case 16:
3465         /* Replicate the 16-bit input so that a 32-bit rotate works.  */
3466         tcg_gen_deposit_i32(reg, reg, reg, 16, 16);
3467         goto do_long;
3468     do_long:
3469     default:
3470         if (left) {
3471             tcg_gen_rotl_i32(reg, reg, shift);
3472         } else {
3473             tcg_gen_rotr_i32(reg, reg, shift);
3474         }
3475     }
3476 
3477     /* compute flags */
3478 
3479     switch (size) {
3480     case 8:
3481         tcg_gen_ext8s_i32(reg, reg);
3482         break;
3483     case 16:
3484         tcg_gen_ext16s_i32(reg, reg);
3485         break;
3486     default:
3487         break;
3488     }
3489 
3490     /* QREG_CC_X is not affected */
3491 
3492     tcg_gen_mov_i32(QREG_CC_N, reg);
3493     tcg_gen_mov_i32(QREG_CC_Z, reg);
3494 
3495     if (left) {
3496         tcg_gen_andi_i32(QREG_CC_C, reg, 1);
3497     } else {
3498         tcg_gen_shri_i32(QREG_CC_C, reg, 31);
3499     }
3500 
3501     tcg_gen_movi_i32(QREG_CC_V, 0); /* always cleared */
3502 }
3503 
3504 static void rotate_x_flags(TCGv reg, TCGv X, int size)
3505 {
3506     switch (size) {
3507     case 8:
3508         tcg_gen_ext8s_i32(reg, reg);
3509         break;
3510     case 16:
3511         tcg_gen_ext16s_i32(reg, reg);
3512         break;
3513     default:
3514         break;
3515     }
3516     tcg_gen_mov_i32(QREG_CC_N, reg);
3517     tcg_gen_mov_i32(QREG_CC_Z, reg);
3518     tcg_gen_mov_i32(QREG_CC_X, X);
3519     tcg_gen_mov_i32(QREG_CC_C, X);
3520     tcg_gen_movi_i32(QREG_CC_V, 0);
3521 }
3522 
3523 /* Result of rotate_x() is valid if 0 <= shift <= size */
3524 static TCGv rotate_x(TCGv reg, TCGv shift, int left, int size)
3525 {
3526     TCGv X, shl, shr, shx, sz, zero;
3527 
3528     sz = tcg_const_i32(size);
3529 
3530     shr = tcg_temp_new();
3531     shl = tcg_temp_new();
3532     shx = tcg_temp_new();
3533     if (left) {
3534         tcg_gen_mov_i32(shl, shift);      /* shl = shift */
3535         tcg_gen_movi_i32(shr, size + 1);
3536         tcg_gen_sub_i32(shr, shr, shift); /* shr = size + 1 - shift */
3537         tcg_gen_subi_i32(shx, shift, 1);  /* shx = shift - 1 */
3538         /* shx = shx < 0 ? size : shx; */
3539         zero = tcg_const_i32(0);
3540         tcg_gen_movcond_i32(TCG_COND_LT, shx, shx, zero, sz, shx);
3541         tcg_temp_free(zero);
3542     } else {
3543         tcg_gen_mov_i32(shr, shift);      /* shr = shift */
3544         tcg_gen_movi_i32(shl, size + 1);
3545         tcg_gen_sub_i32(shl, shl, shift); /* shl = size + 1 - shift */
3546         tcg_gen_sub_i32(shx, sz, shift); /* shx = size - shift */
3547     }
3548 
3549     /* reg = (reg << shl) | (reg >> shr) | (x << shx); */
3550 
3551     tcg_gen_shl_i32(shl, reg, shl);
3552     tcg_gen_shr_i32(shr, reg, shr);
3553     tcg_gen_or_i32(reg, shl, shr);
3554     tcg_temp_free(shl);
3555     tcg_temp_free(shr);
3556     tcg_gen_shl_i32(shx, QREG_CC_X, shx);
3557     tcg_gen_or_i32(reg, reg, shx);
3558     tcg_temp_free(shx);
3559 
3560     /* X = (reg >> size) & 1 */
3561 
3562     X = tcg_temp_new();
3563     tcg_gen_shr_i32(X, reg, sz);
3564     tcg_gen_andi_i32(X, X, 1);
3565     tcg_temp_free(sz);
3566 
3567     return X;
3568 }
3569 
3570 /* Result of rotate32_x() is valid if 0 <= shift < 33 */
3571 static TCGv rotate32_x(TCGv reg, TCGv shift, int left)
3572 {
3573     TCGv_i64 t0, shift64;
3574     TCGv X, lo, hi, zero;
3575 
3576     shift64 = tcg_temp_new_i64();
3577     tcg_gen_extu_i32_i64(shift64, shift);
3578 
3579     t0 = tcg_temp_new_i64();
3580 
3581     X = tcg_temp_new();
3582     lo = tcg_temp_new();
3583     hi = tcg_temp_new();
3584 
3585     if (left) {
3586         /* create [reg:X:..] */
3587 
3588         tcg_gen_shli_i32(lo, QREG_CC_X, 31);
3589         tcg_gen_concat_i32_i64(t0, lo, reg);
3590 
3591         /* rotate */
3592 
3593         tcg_gen_rotl_i64(t0, t0, shift64);
3594         tcg_temp_free_i64(shift64);
3595 
3596         /* result is [reg:..:reg:X] */
3597 
3598         tcg_gen_extr_i64_i32(lo, hi, t0);
3599         tcg_gen_andi_i32(X, lo, 1);
3600 
3601         tcg_gen_shri_i32(lo, lo, 1);
3602     } else {
3603         /* create [..:X:reg] */
3604 
3605         tcg_gen_concat_i32_i64(t0, reg, QREG_CC_X);
3606 
3607         tcg_gen_rotr_i64(t0, t0, shift64);
3608         tcg_temp_free_i64(shift64);
3609 
3610         /* result is value: [X:reg:..:reg] */
3611 
3612         tcg_gen_extr_i64_i32(lo, hi, t0);
3613 
3614         /* extract X */
3615 
3616         tcg_gen_shri_i32(X, hi, 31);
3617 
3618         /* extract result */
3619 
3620         tcg_gen_shli_i32(hi, hi, 1);
3621     }
3622     tcg_temp_free_i64(t0);
3623     tcg_gen_or_i32(lo, lo, hi);
3624     tcg_temp_free(hi);
3625 
3626     /* if shift == 0, register and X are not affected */
3627 
3628     zero = tcg_const_i32(0);
3629     tcg_gen_movcond_i32(TCG_COND_EQ, X, shift, zero, QREG_CC_X, X);
3630     tcg_gen_movcond_i32(TCG_COND_EQ, reg, shift, zero, reg, lo);
3631     tcg_temp_free(zero);
3632     tcg_temp_free(lo);
3633 
3634     return X;
3635 }
3636 
3637 DISAS_INSN(rotate_im)
3638 {
3639     TCGv shift;
3640     int tmp;
3641     int left = (insn & 0x100);
3642 
3643     tmp = (insn >> 9) & 7;
3644     if (tmp == 0) {
3645         tmp = 8;
3646     }
3647 
3648     shift = tcg_const_i32(tmp);
3649     if (insn & 8) {
3650         rotate(DREG(insn, 0), shift, left, 32);
3651     } else {
3652         TCGv X = rotate32_x(DREG(insn, 0), shift, left);
3653         rotate_x_flags(DREG(insn, 0), X, 32);
3654         tcg_temp_free(X);
3655     }
3656     tcg_temp_free(shift);
3657 
3658     set_cc_op(s, CC_OP_FLAGS);
3659 }
3660 
3661 DISAS_INSN(rotate8_im)
3662 {
3663     int left = (insn & 0x100);
3664     TCGv reg;
3665     TCGv shift;
3666     int tmp;
3667 
3668     reg = gen_extend(DREG(insn, 0), OS_BYTE, 0);
3669 
3670     tmp = (insn >> 9) & 7;
3671     if (tmp == 0) {
3672         tmp = 8;
3673     }
3674 
3675     shift = tcg_const_i32(tmp);
3676     if (insn & 8) {
3677         rotate(reg, shift, left, 8);
3678     } else {
3679         TCGv X = rotate_x(reg, shift, left, 8);
3680         rotate_x_flags(reg, X, 8);
3681         tcg_temp_free(X);
3682     }
3683     tcg_temp_free(shift);
3684     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3685     set_cc_op(s, CC_OP_FLAGS);
3686 }
3687 
3688 DISAS_INSN(rotate16_im)
3689 {
3690     int left = (insn & 0x100);
3691     TCGv reg;
3692     TCGv shift;
3693     int tmp;
3694 
3695     reg = gen_extend(DREG(insn, 0), OS_WORD, 0);
3696     tmp = (insn >> 9) & 7;
3697     if (tmp == 0) {
3698         tmp = 8;
3699     }
3700 
3701     shift = tcg_const_i32(tmp);
3702     if (insn & 8) {
3703         rotate(reg, shift, left, 16);
3704     } else {
3705         TCGv X = rotate_x(reg, shift, left, 16);
3706         rotate_x_flags(reg, X, 16);
3707         tcg_temp_free(X);
3708     }
3709     tcg_temp_free(shift);
3710     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3711     set_cc_op(s, CC_OP_FLAGS);
3712 }
3713 
3714 DISAS_INSN(rotate_reg)
3715 {
3716     TCGv reg;
3717     TCGv src;
3718     TCGv t0, t1;
3719     int left = (insn & 0x100);
3720 
3721     reg = DREG(insn, 0);
3722     src = DREG(insn, 9);
3723     /* shift in [0..63] */
3724     t0 = tcg_temp_new();
3725     tcg_gen_andi_i32(t0, src, 63);
3726     t1 = tcg_temp_new_i32();
3727     if (insn & 8) {
3728         tcg_gen_andi_i32(t1, src, 31);
3729         rotate(reg, t1, left, 32);
3730         /* if shift == 0, clear C */
3731         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3732                             t0, QREG_CC_V /* 0 */,
3733                             QREG_CC_V /* 0 */, QREG_CC_C);
3734     } else {
3735         TCGv X;
3736         /* modulo 33 */
3737         tcg_gen_movi_i32(t1, 33);
3738         tcg_gen_remu_i32(t1, t0, t1);
3739         X = rotate32_x(DREG(insn, 0), t1, left);
3740         rotate_x_flags(DREG(insn, 0), X, 32);
3741         tcg_temp_free(X);
3742     }
3743     tcg_temp_free(t1);
3744     tcg_temp_free(t0);
3745     set_cc_op(s, CC_OP_FLAGS);
3746 }
3747 
3748 DISAS_INSN(rotate8_reg)
3749 {
3750     TCGv reg;
3751     TCGv src;
3752     TCGv t0, t1;
3753     int left = (insn & 0x100);
3754 
3755     reg = gen_extend(DREG(insn, 0), OS_BYTE, 0);
3756     src = DREG(insn, 9);
3757     /* shift in [0..63] */
3758     t0 = tcg_temp_new_i32();
3759     tcg_gen_andi_i32(t0, src, 63);
3760     t1 = tcg_temp_new_i32();
3761     if (insn & 8) {
3762         tcg_gen_andi_i32(t1, src, 7);
3763         rotate(reg, t1, left, 8);
3764         /* if shift == 0, clear C */
3765         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3766                             t0, QREG_CC_V /* 0 */,
3767                             QREG_CC_V /* 0 */, QREG_CC_C);
3768     } else {
3769         TCGv X;
3770         /* modulo 9 */
3771         tcg_gen_movi_i32(t1, 9);
3772         tcg_gen_remu_i32(t1, t0, t1);
3773         X = rotate_x(reg, t1, left, 8);
3774         rotate_x_flags(reg, X, 8);
3775         tcg_temp_free(X);
3776     }
3777     tcg_temp_free(t1);
3778     tcg_temp_free(t0);
3779     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3780     set_cc_op(s, CC_OP_FLAGS);
3781 }
3782 
3783 DISAS_INSN(rotate16_reg)
3784 {
3785     TCGv reg;
3786     TCGv src;
3787     TCGv t0, t1;
3788     int left = (insn & 0x100);
3789 
3790     reg = gen_extend(DREG(insn, 0), OS_WORD, 0);
3791     src = DREG(insn, 9);
3792     /* shift in [0..63] */
3793     t0 = tcg_temp_new_i32();
3794     tcg_gen_andi_i32(t0, src, 63);
3795     t1 = tcg_temp_new_i32();
3796     if (insn & 8) {
3797         tcg_gen_andi_i32(t1, src, 15);
3798         rotate(reg, t1, left, 16);
3799         /* if shift == 0, clear C */
3800         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3801                             t0, QREG_CC_V /* 0 */,
3802                             QREG_CC_V /* 0 */, QREG_CC_C);
3803     } else {
3804         TCGv X;
3805         /* modulo 17 */
3806         tcg_gen_movi_i32(t1, 17);
3807         tcg_gen_remu_i32(t1, t0, t1);
3808         X = rotate_x(reg, t1, left, 16);
3809         rotate_x_flags(reg, X, 16);
3810         tcg_temp_free(X);
3811     }
3812     tcg_temp_free(t1);
3813     tcg_temp_free(t0);
3814     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3815     set_cc_op(s, CC_OP_FLAGS);
3816 }
3817 
3818 DISAS_INSN(rotate_mem)
3819 {
3820     TCGv src;
3821     TCGv addr;
3822     TCGv shift;
3823     int left = (insn & 0x100);
3824 
3825     SRC_EA(env, src, OS_WORD, 0, &addr);
3826 
3827     shift = tcg_const_i32(1);
3828     if (insn & 0x0200) {
3829         rotate(src, shift, left, 16);
3830     } else {
3831         TCGv X = rotate_x(src, shift, left, 16);
3832         rotate_x_flags(src, X, 16);
3833         tcg_temp_free(X);
3834     }
3835     tcg_temp_free(shift);
3836     DEST_EA(env, insn, OS_WORD, src, &addr);
3837     set_cc_op(s, CC_OP_FLAGS);
3838 }
3839 
3840 DISAS_INSN(bfext_reg)
3841 {
3842     int ext = read_im16(env, s);
3843     int is_sign = insn & 0x200;
3844     TCGv src = DREG(insn, 0);
3845     TCGv dst = DREG(ext, 12);
3846     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
3847     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
3848     int pos = 32 - ofs - len;        /* little bit-endian */
3849     TCGv tmp = tcg_temp_new();
3850     TCGv shift;
3851 
3852     /* In general, we're going to rotate the field so that it's at the
3853        top of the word and then right-shift by the compliment of the
3854        width to extend the field.  */
3855     if (ext & 0x20) {
3856         /* Variable width.  */
3857         if (ext & 0x800) {
3858             /* Variable offset.  */
3859             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
3860             tcg_gen_rotl_i32(tmp, src, tmp);
3861         } else {
3862             tcg_gen_rotli_i32(tmp, src, ofs);
3863         }
3864 
3865         shift = tcg_temp_new();
3866         tcg_gen_neg_i32(shift, DREG(ext, 0));
3867         tcg_gen_andi_i32(shift, shift, 31);
3868         tcg_gen_sar_i32(QREG_CC_N, tmp, shift);
3869         if (is_sign) {
3870             tcg_gen_mov_i32(dst, QREG_CC_N);
3871         } else {
3872             tcg_gen_shr_i32(dst, tmp, shift);
3873         }
3874         tcg_temp_free(shift);
3875     } else {
3876         /* Immediate width.  */
3877         if (ext & 0x800) {
3878             /* Variable offset */
3879             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
3880             tcg_gen_rotl_i32(tmp, src, tmp);
3881             src = tmp;
3882             pos = 32 - len;
3883         } else {
3884             /* Immediate offset.  If the field doesn't wrap around the
3885                end of the word, rely on (s)extract completely.  */
3886             if (pos < 0) {
3887                 tcg_gen_rotli_i32(tmp, src, ofs);
3888                 src = tmp;
3889                 pos = 32 - len;
3890             }
3891         }
3892 
3893         tcg_gen_sextract_i32(QREG_CC_N, src, pos, len);
3894         if (is_sign) {
3895             tcg_gen_mov_i32(dst, QREG_CC_N);
3896         } else {
3897             tcg_gen_extract_i32(dst, src, pos, len);
3898         }
3899     }
3900 
3901     tcg_temp_free(tmp);
3902     set_cc_op(s, CC_OP_LOGIC);
3903 }
3904 
3905 DISAS_INSN(bfext_mem)
3906 {
3907     int ext = read_im16(env, s);
3908     int is_sign = insn & 0x200;
3909     TCGv dest = DREG(ext, 12);
3910     TCGv addr, len, ofs;
3911 
3912     addr = gen_lea(env, s, insn, OS_UNSIZED);
3913     if (IS_NULL_QREG(addr)) {
3914         gen_addr_fault(s);
3915         return;
3916     }
3917 
3918     if (ext & 0x20) {
3919         len = DREG(ext, 0);
3920     } else {
3921         len = tcg_const_i32(extract32(ext, 0, 5));
3922     }
3923     if (ext & 0x800) {
3924         ofs = DREG(ext, 6);
3925     } else {
3926         ofs = tcg_const_i32(extract32(ext, 6, 5));
3927     }
3928 
3929     if (is_sign) {
3930         gen_helper_bfexts_mem(dest, cpu_env, addr, ofs, len);
3931         tcg_gen_mov_i32(QREG_CC_N, dest);
3932     } else {
3933         TCGv_i64 tmp = tcg_temp_new_i64();
3934         gen_helper_bfextu_mem(tmp, cpu_env, addr, ofs, len);
3935         tcg_gen_extr_i64_i32(dest, QREG_CC_N, tmp);
3936         tcg_temp_free_i64(tmp);
3937     }
3938     set_cc_op(s, CC_OP_LOGIC);
3939 
3940     if (!(ext & 0x20)) {
3941         tcg_temp_free(len);
3942     }
3943     if (!(ext & 0x800)) {
3944         tcg_temp_free(ofs);
3945     }
3946 }
3947 
3948 DISAS_INSN(bfop_reg)
3949 {
3950     int ext = read_im16(env, s);
3951     TCGv src = DREG(insn, 0);
3952     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
3953     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
3954     TCGv mask, tofs, tlen;
3955 
3956     TCGV_UNUSED(tofs);
3957     TCGV_UNUSED(tlen);
3958     if ((insn & 0x0f00) == 0x0d00) { /* bfffo */
3959         tofs = tcg_temp_new();
3960         tlen = tcg_temp_new();
3961     }
3962 
3963     if ((ext & 0x820) == 0) {
3964         /* Immediate width and offset.  */
3965         uint32_t maski = 0x7fffffffu >> (len - 1);
3966         if (ofs + len <= 32) {
3967             tcg_gen_shli_i32(QREG_CC_N, src, ofs);
3968         } else {
3969             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
3970         }
3971         tcg_gen_andi_i32(QREG_CC_N, QREG_CC_N, ~maski);
3972         mask = tcg_const_i32(ror32(maski, ofs));
3973         if (!TCGV_IS_UNUSED(tofs)) {
3974             tcg_gen_movi_i32(tofs, ofs);
3975             tcg_gen_movi_i32(tlen, len);
3976         }
3977     } else {
3978         TCGv tmp = tcg_temp_new();
3979         if (ext & 0x20) {
3980             /* Variable width */
3981             tcg_gen_subi_i32(tmp, DREG(ext, 0), 1);
3982             tcg_gen_andi_i32(tmp, tmp, 31);
3983             mask = tcg_const_i32(0x7fffffffu);
3984             tcg_gen_shr_i32(mask, mask, tmp);
3985             if (!TCGV_IS_UNUSED(tlen)) {
3986                 tcg_gen_addi_i32(tlen, tmp, 1);
3987             }
3988         } else {
3989             /* Immediate width */
3990             mask = tcg_const_i32(0x7fffffffu >> (len - 1));
3991             if (!TCGV_IS_UNUSED(tlen)) {
3992                 tcg_gen_movi_i32(tlen, len);
3993             }
3994         }
3995         if (ext & 0x800) {
3996             /* Variable offset */
3997             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
3998             tcg_gen_rotl_i32(QREG_CC_N, src, tmp);
3999             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4000             tcg_gen_rotr_i32(mask, mask, tmp);
4001             if (!TCGV_IS_UNUSED(tofs)) {
4002                 tcg_gen_mov_i32(tofs, tmp);
4003             }
4004         } else {
4005             /* Immediate offset (and variable width) */
4006             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4007             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4008             tcg_gen_rotri_i32(mask, mask, ofs);
4009             if (!TCGV_IS_UNUSED(tofs)) {
4010                 tcg_gen_movi_i32(tofs, ofs);
4011             }
4012         }
4013         tcg_temp_free(tmp);
4014     }
4015     set_cc_op(s, CC_OP_LOGIC);
4016 
4017     switch (insn & 0x0f00) {
4018     case 0x0a00: /* bfchg */
4019         tcg_gen_eqv_i32(src, src, mask);
4020         break;
4021     case 0x0c00: /* bfclr */
4022         tcg_gen_and_i32(src, src, mask);
4023         break;
4024     case 0x0d00: /* bfffo */
4025         gen_helper_bfffo_reg(DREG(ext, 12), QREG_CC_N, tofs, tlen);
4026         tcg_temp_free(tlen);
4027         tcg_temp_free(tofs);
4028         break;
4029     case 0x0e00: /* bfset */
4030         tcg_gen_orc_i32(src, src, mask);
4031         break;
4032     case 0x0800: /* bftst */
4033         /* flags already set; no other work to do.  */
4034         break;
4035     default:
4036         g_assert_not_reached();
4037     }
4038     tcg_temp_free(mask);
4039 }
4040 
4041 DISAS_INSN(bfop_mem)
4042 {
4043     int ext = read_im16(env, s);
4044     TCGv addr, len, ofs;
4045     TCGv_i64 t64;
4046 
4047     addr = gen_lea(env, s, insn, OS_UNSIZED);
4048     if (IS_NULL_QREG(addr)) {
4049         gen_addr_fault(s);
4050         return;
4051     }
4052 
4053     if (ext & 0x20) {
4054         len = DREG(ext, 0);
4055     } else {
4056         len = tcg_const_i32(extract32(ext, 0, 5));
4057     }
4058     if (ext & 0x800) {
4059         ofs = DREG(ext, 6);
4060     } else {
4061         ofs = tcg_const_i32(extract32(ext, 6, 5));
4062     }
4063 
4064     switch (insn & 0x0f00) {
4065     case 0x0a00: /* bfchg */
4066         gen_helper_bfchg_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4067         break;
4068     case 0x0c00: /* bfclr */
4069         gen_helper_bfclr_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4070         break;
4071     case 0x0d00: /* bfffo */
4072         t64 = tcg_temp_new_i64();
4073         gen_helper_bfffo_mem(t64, cpu_env, addr, ofs, len);
4074         tcg_gen_extr_i64_i32(DREG(ext, 12), QREG_CC_N, t64);
4075         tcg_temp_free_i64(t64);
4076         break;
4077     case 0x0e00: /* bfset */
4078         gen_helper_bfset_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4079         break;
4080     case 0x0800: /* bftst */
4081         gen_helper_bfexts_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4082         break;
4083     default:
4084         g_assert_not_reached();
4085     }
4086     set_cc_op(s, CC_OP_LOGIC);
4087 
4088     if (!(ext & 0x20)) {
4089         tcg_temp_free(len);
4090     }
4091     if (!(ext & 0x800)) {
4092         tcg_temp_free(ofs);
4093     }
4094 }
4095 
4096 DISAS_INSN(bfins_reg)
4097 {
4098     int ext = read_im16(env, s);
4099     TCGv dst = DREG(insn, 0);
4100     TCGv src = DREG(ext, 12);
4101     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4102     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4103     int pos = 32 - ofs - len;        /* little bit-endian */
4104     TCGv tmp;
4105 
4106     tmp = tcg_temp_new();
4107 
4108     if (ext & 0x20) {
4109         /* Variable width */
4110         tcg_gen_neg_i32(tmp, DREG(ext, 0));
4111         tcg_gen_andi_i32(tmp, tmp, 31);
4112         tcg_gen_shl_i32(QREG_CC_N, src, tmp);
4113     } else {
4114         /* Immediate width */
4115         tcg_gen_shli_i32(QREG_CC_N, src, 32 - len);
4116     }
4117     set_cc_op(s, CC_OP_LOGIC);
4118 
4119     /* Immediate width and offset */
4120     if ((ext & 0x820) == 0) {
4121         /* Check for suitability for deposit.  */
4122         if (pos >= 0) {
4123             tcg_gen_deposit_i32(dst, dst, src, pos, len);
4124         } else {
4125             uint32_t maski = -2U << (len - 1);
4126             uint32_t roti = (ofs + len) & 31;
4127             tcg_gen_andi_i32(tmp, src, ~maski);
4128             tcg_gen_rotri_i32(tmp, tmp, roti);
4129             tcg_gen_andi_i32(dst, dst, ror32(maski, roti));
4130             tcg_gen_or_i32(dst, dst, tmp);
4131         }
4132     } else {
4133         TCGv mask = tcg_temp_new();
4134         TCGv rot = tcg_temp_new();
4135 
4136         if (ext & 0x20) {
4137             /* Variable width */
4138             tcg_gen_subi_i32(rot, DREG(ext, 0), 1);
4139             tcg_gen_andi_i32(rot, rot, 31);
4140             tcg_gen_movi_i32(mask, -2);
4141             tcg_gen_shl_i32(mask, mask, rot);
4142             tcg_gen_mov_i32(rot, DREG(ext, 0));
4143             tcg_gen_andc_i32(tmp, src, mask);
4144         } else {
4145             /* Immediate width (variable offset) */
4146             uint32_t maski = -2U << (len - 1);
4147             tcg_gen_andi_i32(tmp, src, ~maski);
4148             tcg_gen_movi_i32(mask, maski);
4149             tcg_gen_movi_i32(rot, len & 31);
4150         }
4151         if (ext & 0x800) {
4152             /* Variable offset */
4153             tcg_gen_add_i32(rot, rot, DREG(ext, 6));
4154         } else {
4155             /* Immediate offset (variable width) */
4156             tcg_gen_addi_i32(rot, rot, ofs);
4157         }
4158         tcg_gen_andi_i32(rot, rot, 31);
4159         tcg_gen_rotr_i32(mask, mask, rot);
4160         tcg_gen_rotr_i32(tmp, tmp, rot);
4161         tcg_gen_and_i32(dst, dst, mask);
4162         tcg_gen_or_i32(dst, dst, tmp);
4163 
4164         tcg_temp_free(rot);
4165         tcg_temp_free(mask);
4166     }
4167     tcg_temp_free(tmp);
4168 }
4169 
4170 DISAS_INSN(bfins_mem)
4171 {
4172     int ext = read_im16(env, s);
4173     TCGv src = DREG(ext, 12);
4174     TCGv addr, len, ofs;
4175 
4176     addr = gen_lea(env, s, insn, OS_UNSIZED);
4177     if (IS_NULL_QREG(addr)) {
4178         gen_addr_fault(s);
4179         return;
4180     }
4181 
4182     if (ext & 0x20) {
4183         len = DREG(ext, 0);
4184     } else {
4185         len = tcg_const_i32(extract32(ext, 0, 5));
4186     }
4187     if (ext & 0x800) {
4188         ofs = DREG(ext, 6);
4189     } else {
4190         ofs = tcg_const_i32(extract32(ext, 6, 5));
4191     }
4192 
4193     gen_helper_bfins_mem(QREG_CC_N, cpu_env, addr, src, ofs, len);
4194     set_cc_op(s, CC_OP_LOGIC);
4195 
4196     if (!(ext & 0x20)) {
4197         tcg_temp_free(len);
4198     }
4199     if (!(ext & 0x800)) {
4200         tcg_temp_free(ofs);
4201     }
4202 }
4203 
4204 DISAS_INSN(ff1)
4205 {
4206     TCGv reg;
4207     reg = DREG(insn, 0);
4208     gen_logic_cc(s, reg, OS_LONG);
4209     gen_helper_ff1(reg, reg);
4210 }
4211 
4212 static TCGv gen_get_sr(DisasContext *s)
4213 {
4214     TCGv ccr;
4215     TCGv sr;
4216 
4217     ccr = gen_get_ccr(s);
4218     sr = tcg_temp_new();
4219     tcg_gen_andi_i32(sr, QREG_SR, 0xffe0);
4220     tcg_gen_or_i32(sr, sr, ccr);
4221     return sr;
4222 }
4223 
4224 DISAS_INSN(strldsr)
4225 {
4226     uint16_t ext;
4227     uint32_t addr;
4228 
4229     addr = s->pc - 2;
4230     ext = read_im16(env, s);
4231     if (ext != 0x46FC) {
4232         gen_exception(s, addr, EXCP_UNSUPPORTED);
4233         return;
4234     }
4235     ext = read_im16(env, s);
4236     if (IS_USER(s) || (ext & SR_S) == 0) {
4237         gen_exception(s, addr, EXCP_PRIVILEGE);
4238         return;
4239     }
4240     gen_push(s, gen_get_sr(s));
4241     gen_set_sr_im(s, ext, 0);
4242 }
4243 
4244 DISAS_INSN(move_from_sr)
4245 {
4246     TCGv sr;
4247 
4248     if (IS_USER(s) && !m68k_feature(env, M68K_FEATURE_M68000)) {
4249         gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
4250         return;
4251     }
4252     sr = gen_get_sr(s);
4253     DEST_EA(env, insn, OS_WORD, sr, NULL);
4254 }
4255 
4256 DISAS_INSN(move_to_sr)
4257 {
4258     if (IS_USER(s)) {
4259         gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
4260         return;
4261     }
4262     gen_set_sr(env, s, insn, 0);
4263     gen_lookup_tb(s);
4264 }
4265 
4266 DISAS_INSN(move_from_usp)
4267 {
4268     if (IS_USER(s)) {
4269         gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
4270         return;
4271     }
4272     tcg_gen_ld_i32(AREG(insn, 0), cpu_env,
4273                    offsetof(CPUM68KState, sp[M68K_USP]));
4274 }
4275 
4276 DISAS_INSN(move_to_usp)
4277 {
4278     if (IS_USER(s)) {
4279         gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
4280         return;
4281     }
4282     tcg_gen_st_i32(AREG(insn, 0), cpu_env,
4283                    offsetof(CPUM68KState, sp[M68K_USP]));
4284 }
4285 
4286 DISAS_INSN(halt)
4287 {
4288     gen_exception(s, s->pc, EXCP_HALT_INSN);
4289 }
4290 
4291 DISAS_INSN(stop)
4292 {
4293     uint16_t ext;
4294 
4295     if (IS_USER(s)) {
4296         gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
4297         return;
4298     }
4299 
4300     ext = read_im16(env, s);
4301 
4302     gen_set_sr_im(s, ext, 0);
4303     tcg_gen_movi_i32(cpu_halted, 1);
4304     gen_exception(s, s->pc, EXCP_HLT);
4305 }
4306 
4307 DISAS_INSN(rte)
4308 {
4309     if (IS_USER(s)) {
4310         gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
4311         return;
4312     }
4313     gen_exception(s, s->pc - 2, EXCP_RTE);
4314 }
4315 
4316 DISAS_INSN(movec)
4317 {
4318     uint16_t ext;
4319     TCGv reg;
4320 
4321     if (IS_USER(s)) {
4322         gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
4323         return;
4324     }
4325 
4326     ext = read_im16(env, s);
4327 
4328     if (ext & 0x8000) {
4329         reg = AREG(ext, 12);
4330     } else {
4331         reg = DREG(ext, 12);
4332     }
4333     gen_helper_movec(cpu_env, tcg_const_i32(ext & 0xfff), reg);
4334     gen_lookup_tb(s);
4335 }
4336 
4337 DISAS_INSN(intouch)
4338 {
4339     if (IS_USER(s)) {
4340         gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
4341         return;
4342     }
4343     /* ICache fetch.  Implement as no-op.  */
4344 }
4345 
4346 DISAS_INSN(cpushl)
4347 {
4348     if (IS_USER(s)) {
4349         gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
4350         return;
4351     }
4352     /* Cache push/invalidate.  Implement as no-op.  */
4353 }
4354 
4355 DISAS_INSN(wddata)
4356 {
4357     gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
4358 }
4359 
4360 DISAS_INSN(wdebug)
4361 {
4362     M68kCPU *cpu = m68k_env_get_cpu(env);
4363 
4364     if (IS_USER(s)) {
4365         gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
4366         return;
4367     }
4368     /* TODO: Implement wdebug.  */
4369     cpu_abort(CPU(cpu), "WDEBUG not implemented");
4370 }
4371 
4372 DISAS_INSN(trap)
4373 {
4374     gen_exception(s, s->pc - 2, EXCP_TRAP0 + (insn & 0xf));
4375 }
4376 
4377 static void gen_load_fcr(DisasContext *s, TCGv res, int reg)
4378 {
4379     switch (reg) {
4380     case M68K_FPIAR:
4381         tcg_gen_movi_i32(res, 0);
4382         break;
4383     case M68K_FPSR:
4384         tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpsr));
4385         break;
4386     case M68K_FPCR:
4387         tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpcr));
4388         break;
4389     }
4390 }
4391 
4392 static void gen_store_fcr(DisasContext *s, TCGv val, int reg)
4393 {
4394     switch (reg) {
4395     case M68K_FPIAR:
4396         break;
4397     case M68K_FPSR:
4398         tcg_gen_st_i32(val, cpu_env, offsetof(CPUM68KState, fpsr));
4399         break;
4400     case M68K_FPCR:
4401         gen_helper_set_fpcr(cpu_env, val);
4402         break;
4403     }
4404 }
4405 
4406 static void gen_qemu_store_fcr(DisasContext *s, TCGv addr, int reg)
4407 {
4408     int index = IS_USER(s);
4409     TCGv tmp;
4410 
4411     tmp = tcg_temp_new();
4412     gen_load_fcr(s, tmp, reg);
4413     tcg_gen_qemu_st32(tmp, addr, index);
4414     tcg_temp_free(tmp);
4415 }
4416 
4417 static void gen_qemu_load_fcr(DisasContext *s, TCGv addr, int reg)
4418 {
4419     int index = IS_USER(s);
4420     TCGv tmp;
4421 
4422     tmp = tcg_temp_new();
4423     tcg_gen_qemu_ld32u(tmp, addr, index);
4424     gen_store_fcr(s, tmp, reg);
4425     tcg_temp_free(tmp);
4426 }
4427 
4428 
4429 static void gen_op_fmove_fcr(CPUM68KState *env, DisasContext *s,
4430                              uint32_t insn, uint32_t ext)
4431 {
4432     int mask = (ext >> 10) & 7;
4433     int is_write = (ext >> 13) & 1;
4434     int mode = extract32(insn, 3, 3);
4435     int i;
4436     TCGv addr, tmp;
4437 
4438     switch (mode) {
4439     case 0: /* Dn */
4440         if (mask != M68K_FPIAR && mask != M68K_FPSR && mask != M68K_FPCR) {
4441             gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
4442             return;
4443         }
4444         if (is_write) {
4445             gen_load_fcr(s, DREG(insn, 0), mask);
4446         } else {
4447             gen_store_fcr(s, DREG(insn, 0), mask);
4448         }
4449         return;
4450     case 1: /* An, only with FPIAR */
4451         if (mask != M68K_FPIAR) {
4452             gen_exception(s, s->insn_pc, EXCP_ILLEGAL);
4453             return;
4454         }
4455         if (is_write) {
4456             gen_load_fcr(s, AREG(insn, 0), mask);
4457         } else {
4458             gen_store_fcr(s, AREG(insn, 0), mask);
4459         }
4460         return;
4461     default:
4462         break;
4463     }
4464 
4465     tmp = gen_lea(env, s, insn, OS_LONG);
4466     if (IS_NULL_QREG(tmp)) {
4467         gen_addr_fault(s);
4468         return;
4469     }
4470 
4471     addr = tcg_temp_new();
4472     tcg_gen_mov_i32(addr, tmp);
4473 
4474     /* mask:
4475      *
4476      * 0b100 Floating-Point Control Register
4477      * 0b010 Floating-Point Status Register
4478      * 0b001 Floating-Point Instruction Address Register
4479      *
4480      */
4481 
4482     if (is_write && mode == 4) {
4483         for (i = 2; i >= 0; i--, mask >>= 1) {
4484             if (mask & 1) {
4485                 gen_qemu_store_fcr(s, addr, 1 << i);
4486                 if (mask != 1) {
4487                     tcg_gen_subi_i32(addr, addr, opsize_bytes(OS_LONG));
4488                 }
4489             }
4490        }
4491        tcg_gen_mov_i32(AREG(insn, 0), addr);
4492     } else {
4493         for (i = 0; i < 3; i++, mask >>= 1) {
4494             if (mask & 1) {
4495                 if (is_write) {
4496                     gen_qemu_store_fcr(s, addr, 1 << i);
4497                 } else {
4498                     gen_qemu_load_fcr(s, addr, 1 << i);
4499                 }
4500                 if (mask != 1 || mode == 3) {
4501                     tcg_gen_addi_i32(addr, addr, opsize_bytes(OS_LONG));
4502                 }
4503             }
4504         }
4505         if (mode == 3) {
4506             tcg_gen_mov_i32(AREG(insn, 0), addr);
4507         }
4508     }
4509     tcg_temp_free_i32(addr);
4510 }
4511 
4512 static void gen_op_fmovem(CPUM68KState *env, DisasContext *s,
4513                           uint32_t insn, uint32_t ext)
4514 {
4515     int opsize;
4516     TCGv addr, tmp;
4517     int mode = (ext >> 11) & 0x3;
4518     int is_load = ((ext & 0x2000) == 0);
4519 
4520     if (m68k_feature(s->env, M68K_FEATURE_FPU)) {
4521         opsize = OS_EXTENDED;
4522     } else {
4523         opsize = OS_DOUBLE;  /* FIXME */
4524     }
4525 
4526     addr = gen_lea(env, s, insn, opsize);
4527     if (IS_NULL_QREG(addr)) {
4528         gen_addr_fault(s);
4529         return;
4530     }
4531 
4532     tmp = tcg_temp_new();
4533     if (mode & 0x1) {
4534         /* Dynamic register list */
4535         tcg_gen_ext8u_i32(tmp, DREG(ext, 4));
4536     } else {
4537         /* Static register list */
4538         tcg_gen_movi_i32(tmp, ext & 0xff);
4539     }
4540 
4541     if (!is_load && (mode & 2) == 0) {
4542         /* predecrement addressing mode
4543          * only available to store register to memory
4544          */
4545         if (opsize == OS_EXTENDED) {
4546             gen_helper_fmovemx_st_predec(tmp, cpu_env, addr, tmp);
4547         } else {
4548             gen_helper_fmovemd_st_predec(tmp, cpu_env, addr, tmp);
4549         }
4550     } else {
4551         /* postincrement addressing mode */
4552         if (opsize == OS_EXTENDED) {
4553             if (is_load) {
4554                 gen_helper_fmovemx_ld_postinc(tmp, cpu_env, addr, tmp);
4555             } else {
4556                 gen_helper_fmovemx_st_postinc(tmp, cpu_env, addr, tmp);
4557             }
4558         } else {
4559             if (is_load) {
4560                 gen_helper_fmovemd_ld_postinc(tmp, cpu_env, addr, tmp);
4561             } else {
4562                 gen_helper_fmovemd_st_postinc(tmp, cpu_env, addr, tmp);
4563             }
4564         }
4565     }
4566     if ((insn & 070) == 030 || (insn & 070) == 040) {
4567         tcg_gen_mov_i32(AREG(insn, 0), tmp);
4568     }
4569     tcg_temp_free(tmp);
4570 }
4571 
4572 /* ??? FP exceptions are not implemented.  Most exceptions are deferred until
4573    immediately before the next FP instruction is executed.  */
4574 DISAS_INSN(fpu)
4575 {
4576     uint16_t ext;
4577     int opmode;
4578     int opsize;
4579     TCGv_ptr cpu_src, cpu_dest;
4580 
4581     ext = read_im16(env, s);
4582     opmode = ext & 0x7f;
4583     switch ((ext >> 13) & 7) {
4584     case 0:
4585         break;
4586     case 1:
4587         goto undef;
4588     case 2:
4589         if (insn == 0xf200 && (ext & 0xfc00) == 0x5c00) {
4590             /* fmovecr */
4591             TCGv rom_offset = tcg_const_i32(opmode);
4592             cpu_dest = gen_fp_ptr(REG(ext, 7));
4593             gen_helper_fconst(cpu_env, cpu_dest, rom_offset);
4594             tcg_temp_free_ptr(cpu_dest);
4595             tcg_temp_free(rom_offset);
4596             return;
4597         }
4598         break;
4599     case 3: /* fmove out */
4600         cpu_src = gen_fp_ptr(REG(ext, 7));
4601         opsize = ext_opsize(ext, 10);
4602         if (gen_ea_fp(env, s, insn, opsize, cpu_src, EA_STORE) == -1) {
4603             gen_addr_fault(s);
4604         }
4605         gen_helper_ftst(cpu_env, cpu_src);
4606         tcg_temp_free_ptr(cpu_src);
4607         return;
4608     case 4: /* fmove to control register.  */
4609     case 5: /* fmove from control register.  */
4610         gen_op_fmove_fcr(env, s, insn, ext);
4611         return;
4612     case 6: /* fmovem */
4613     case 7:
4614         if ((ext & 0x1000) == 0 && !m68k_feature(s->env, M68K_FEATURE_FPU)) {
4615             goto undef;
4616         }
4617         gen_op_fmovem(env, s, insn, ext);
4618         return;
4619     }
4620     if (ext & (1 << 14)) {
4621         /* Source effective address.  */
4622         opsize = ext_opsize(ext, 10);
4623         cpu_src = gen_fp_result_ptr();
4624         if (gen_ea_fp(env, s, insn, opsize, cpu_src, EA_LOADS) == -1) {
4625             gen_addr_fault(s);
4626             return;
4627         }
4628     } else {
4629         /* Source register.  */
4630         opsize = OS_EXTENDED;
4631         cpu_src = gen_fp_ptr(REG(ext, 10));
4632     }
4633     cpu_dest = gen_fp_ptr(REG(ext, 7));
4634     switch (opmode) {
4635     case 0: /* fmove */
4636         gen_fp_move(cpu_dest, cpu_src);
4637         break;
4638     case 0x40: /* fsmove */
4639         gen_helper_fsround(cpu_env, cpu_dest, cpu_src);
4640         break;
4641     case 0x44: /* fdmove */
4642         gen_helper_fdround(cpu_env, cpu_dest, cpu_src);
4643         break;
4644     case 1: /* fint */
4645         gen_helper_firound(cpu_env, cpu_dest, cpu_src);
4646         break;
4647     case 3: /* fintrz */
4648         gen_helper_fitrunc(cpu_env, cpu_dest, cpu_src);
4649         break;
4650     case 4: /* fsqrt */
4651         gen_helper_fsqrt(cpu_env, cpu_dest, cpu_src);
4652         break;
4653     case 0x41: /* fssqrt */
4654         gen_helper_fssqrt(cpu_env, cpu_dest, cpu_src);
4655         break;
4656     case 0x45: /* fdsqrt */
4657         gen_helper_fdsqrt(cpu_env, cpu_dest, cpu_src);
4658         break;
4659     case 0x18: /* fabs */
4660         gen_helper_fabs(cpu_env, cpu_dest, cpu_src);
4661         break;
4662     case 0x58: /* fsabs */
4663         gen_helper_fsabs(cpu_env, cpu_dest, cpu_src);
4664         break;
4665     case 0x5c: /* fdabs */
4666         gen_helper_fdabs(cpu_env, cpu_dest, cpu_src);
4667         break;
4668     case 0x1a: /* fneg */
4669         gen_helper_fneg(cpu_env, cpu_dest, cpu_src);
4670         break;
4671     case 0x5a: /* fsneg */
4672         gen_helper_fsneg(cpu_env, cpu_dest, cpu_src);
4673         break;
4674     case 0x5e: /* fdneg */
4675         gen_helper_fdneg(cpu_env, cpu_dest, cpu_src);
4676         break;
4677     case 0x20: /* fdiv */
4678         gen_helper_fdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
4679         break;
4680     case 0x60: /* fsdiv */
4681         gen_helper_fsdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
4682         break;
4683     case 0x64: /* fddiv */
4684         gen_helper_fddiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
4685         break;
4686     case 0x22: /* fadd */
4687         gen_helper_fadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
4688         break;
4689     case 0x62: /* fsadd */
4690         gen_helper_fsadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
4691         break;
4692     case 0x66: /* fdadd */
4693         gen_helper_fdadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
4694         break;
4695     case 0x23: /* fmul */
4696         gen_helper_fmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
4697         break;
4698     case 0x63: /* fsmul */
4699         gen_helper_fsmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
4700         break;
4701     case 0x67: /* fdmul */
4702         gen_helper_fdmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
4703         break;
4704     case 0x24: /* fsgldiv */
4705         gen_helper_fsgldiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
4706         break;
4707     case 0x27: /* fsglmul */
4708         gen_helper_fsglmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
4709         break;
4710     case 0x28: /* fsub */
4711         gen_helper_fsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
4712         break;
4713     case 0x68: /* fssub */
4714         gen_helper_fssub(cpu_env, cpu_dest, cpu_src, cpu_dest);
4715         break;
4716     case 0x6c: /* fdsub */
4717         gen_helper_fdsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
4718         break;
4719     case 0x38: /* fcmp */
4720         gen_helper_fcmp(cpu_env, cpu_src, cpu_dest);
4721         return;
4722     case 0x3a: /* ftst */
4723         gen_helper_ftst(cpu_env, cpu_src);
4724         return;
4725     default:
4726         goto undef;
4727     }
4728     tcg_temp_free_ptr(cpu_src);
4729     gen_helper_ftst(cpu_env, cpu_dest);
4730     tcg_temp_free_ptr(cpu_dest);
4731     return;
4732 undef:
4733     /* FIXME: Is this right for offset addressing modes?  */
4734     s->pc -= 2;
4735     disas_undef_fpu(env, s, insn);
4736 }
4737 
4738 static void gen_fcc_cond(DisasCompare *c, DisasContext *s, int cond)
4739 {
4740     TCGv fpsr;
4741 
4742     c->g1 = 1;
4743     c->v2 = tcg_const_i32(0);
4744     c->g2 = 0;
4745     /* TODO: Raise BSUN exception.  */
4746     fpsr = tcg_temp_new();
4747     gen_load_fcr(s, fpsr, M68K_FPSR);
4748     switch (cond) {
4749     case 0:  /* False */
4750     case 16: /* Signaling False */
4751         c->v1 = c->v2;
4752         c->tcond = TCG_COND_NEVER;
4753         break;
4754     case 1:  /* EQual Z */
4755     case 17: /* Signaling EQual Z */
4756         c->v1 = tcg_temp_new();
4757         c->g1 = 0;
4758         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
4759         c->tcond = TCG_COND_NE;
4760         break;
4761     case 2:  /* Ordered Greater Than !(A || Z || N) */
4762     case 18: /* Greater Than !(A || Z || N) */
4763         c->v1 = tcg_temp_new();
4764         c->g1 = 0;
4765         tcg_gen_andi_i32(c->v1, fpsr,
4766                          FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
4767         c->tcond = TCG_COND_EQ;
4768         break;
4769     case 3:  /* Ordered Greater than or Equal Z || !(A || N) */
4770     case 19: /* Greater than or Equal Z || !(A || N) */
4771         c->v1 = tcg_temp_new();
4772         c->g1 = 0;
4773         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
4774         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
4775         tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_Z | FPSR_CC_N);
4776         tcg_gen_or_i32(c->v1, c->v1, fpsr);
4777         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
4778         c->tcond = TCG_COND_NE;
4779         break;
4780     case 4:  /* Ordered Less Than !(!N || A || Z); */
4781     case 20: /* Less Than !(!N || A || Z); */
4782         c->v1 = tcg_temp_new();
4783         c->g1 = 0;
4784         tcg_gen_xori_i32(c->v1, fpsr, FPSR_CC_N);
4785         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_N | FPSR_CC_A | FPSR_CC_Z);
4786         c->tcond = TCG_COND_EQ;
4787         break;
4788     case 5:  /* Ordered Less than or Equal Z || (N && !A) */
4789     case 21: /* Less than or Equal Z || (N && !A) */
4790         c->v1 = tcg_temp_new();
4791         c->g1 = 0;
4792         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
4793         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
4794         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
4795         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_Z | FPSR_CC_N);
4796         c->tcond = TCG_COND_NE;
4797         break;
4798     case 6:  /* Ordered Greater or Less than !(A || Z) */
4799     case 22: /* Greater or Less than !(A || Z) */
4800         c->v1 = tcg_temp_new();
4801         c->g1 = 0;
4802         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
4803         c->tcond = TCG_COND_EQ;
4804         break;
4805     case 7:  /* Ordered !A */
4806     case 23: /* Greater, Less or Equal !A */
4807         c->v1 = tcg_temp_new();
4808         c->g1 = 0;
4809         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
4810         c->tcond = TCG_COND_EQ;
4811         break;
4812     case 8:  /* Unordered A */
4813     case 24: /* Not Greater, Less or Equal A */
4814         c->v1 = tcg_temp_new();
4815         c->g1 = 0;
4816         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
4817         c->tcond = TCG_COND_NE;
4818         break;
4819     case 9:  /* Unordered or Equal A || Z */
4820     case 25: /* Not Greater or Less then A || Z */
4821         c->v1 = tcg_temp_new();
4822         c->g1 = 0;
4823         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
4824         c->tcond = TCG_COND_NE;
4825         break;
4826     case 10: /* Unordered or Greater Than A || !(N || Z)) */
4827     case 26: /* Not Less or Equal A || !(N || Z)) */
4828         c->v1 = tcg_temp_new();
4829         c->g1 = 0;
4830         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
4831         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
4832         tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_A | FPSR_CC_N);
4833         tcg_gen_or_i32(c->v1, c->v1, fpsr);
4834         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
4835         c->tcond = TCG_COND_NE;
4836         break;
4837     case 11: /* Unordered or Greater or Equal A || Z || !N */
4838     case 27: /* Not Less Than A || Z || !N */
4839         c->v1 = tcg_temp_new();
4840         c->g1 = 0;
4841         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
4842         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
4843         c->tcond = TCG_COND_NE;
4844         break;
4845     case 12: /* Unordered or Less Than A || (N && !Z) */
4846     case 28: /* Not Greater than or Equal A || (N && !Z) */
4847         c->v1 = tcg_temp_new();
4848         c->g1 = 0;
4849         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
4850         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
4851         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
4852         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_A | FPSR_CC_N);
4853         c->tcond = TCG_COND_NE;
4854         break;
4855     case 13: /* Unordered or Less or Equal A || Z || N */
4856     case 29: /* Not Greater Than A || Z || N */
4857         c->v1 = tcg_temp_new();
4858         c->g1 = 0;
4859         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
4860         c->tcond = TCG_COND_NE;
4861         break;
4862     case 14: /* Not Equal !Z */
4863     case 30: /* Signaling Not Equal !Z */
4864         c->v1 = tcg_temp_new();
4865         c->g1 = 0;
4866         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
4867         c->tcond = TCG_COND_EQ;
4868         break;
4869     case 15: /* True */
4870     case 31: /* Signaling True */
4871         c->v1 = c->v2;
4872         c->tcond = TCG_COND_ALWAYS;
4873         break;
4874     }
4875     tcg_temp_free(fpsr);
4876 }
4877 
4878 static void gen_fjmpcc(DisasContext *s, int cond, TCGLabel *l1)
4879 {
4880     DisasCompare c;
4881 
4882     gen_fcc_cond(&c, s, cond);
4883     tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
4884     free_cond(&c);
4885 }
4886 
4887 DISAS_INSN(fbcc)
4888 {
4889     uint32_t offset;
4890     uint32_t base;
4891     TCGLabel *l1;
4892 
4893     base = s->pc;
4894     offset = (int16_t)read_im16(env, s);
4895     if (insn & (1 << 6)) {
4896         offset = (offset << 16) | read_im16(env, s);
4897     }
4898 
4899     l1 = gen_new_label();
4900     update_cc_op(s);
4901     gen_fjmpcc(s, insn & 0x3f, l1);
4902     gen_jmp_tb(s, 0, s->pc);
4903     gen_set_label(l1);
4904     gen_jmp_tb(s, 1, base + offset);
4905 }
4906 
4907 DISAS_INSN(fscc)
4908 {
4909     DisasCompare c;
4910     int cond;
4911     TCGv tmp;
4912     uint16_t ext;
4913 
4914     ext = read_im16(env, s);
4915     cond = ext & 0x3f;
4916     gen_fcc_cond(&c, s, cond);
4917 
4918     tmp = tcg_temp_new();
4919     tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
4920     free_cond(&c);
4921 
4922     tcg_gen_neg_i32(tmp, tmp);
4923     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
4924     tcg_temp_free(tmp);
4925 }
4926 
4927 DISAS_INSN(frestore)
4928 {
4929     M68kCPU *cpu = m68k_env_get_cpu(env);
4930 
4931     /* TODO: Implement frestore.  */
4932     cpu_abort(CPU(cpu), "FRESTORE not implemented");
4933 }
4934 
4935 DISAS_INSN(fsave)
4936 {
4937     M68kCPU *cpu = m68k_env_get_cpu(env);
4938 
4939     /* TODO: Implement fsave.  */
4940     cpu_abort(CPU(cpu), "FSAVE not implemented");
4941 }
4942 
4943 static inline TCGv gen_mac_extract_word(DisasContext *s, TCGv val, int upper)
4944 {
4945     TCGv tmp = tcg_temp_new();
4946     if (s->env->macsr & MACSR_FI) {
4947         if (upper)
4948             tcg_gen_andi_i32(tmp, val, 0xffff0000);
4949         else
4950             tcg_gen_shli_i32(tmp, val, 16);
4951     } else if (s->env->macsr & MACSR_SU) {
4952         if (upper)
4953             tcg_gen_sari_i32(tmp, val, 16);
4954         else
4955             tcg_gen_ext16s_i32(tmp, val);
4956     } else {
4957         if (upper)
4958             tcg_gen_shri_i32(tmp, val, 16);
4959         else
4960             tcg_gen_ext16u_i32(tmp, val);
4961     }
4962     return tmp;
4963 }
4964 
4965 static void gen_mac_clear_flags(void)
4966 {
4967     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR,
4968                      ~(MACSR_V | MACSR_Z | MACSR_N | MACSR_EV));
4969 }
4970 
4971 DISAS_INSN(mac)
4972 {
4973     TCGv rx;
4974     TCGv ry;
4975     uint16_t ext;
4976     int acc;
4977     TCGv tmp;
4978     TCGv addr;
4979     TCGv loadval;
4980     int dual;
4981     TCGv saved_flags;
4982 
4983     if (!s->done_mac) {
4984         s->mactmp = tcg_temp_new_i64();
4985         s->done_mac = 1;
4986     }
4987 
4988     ext = read_im16(env, s);
4989 
4990     acc = ((insn >> 7) & 1) | ((ext >> 3) & 2);
4991     dual = ((insn & 0x30) != 0 && (ext & 3) != 0);
4992     if (dual && !m68k_feature(s->env, M68K_FEATURE_CF_EMAC_B)) {
4993         disas_undef(env, s, insn);
4994         return;
4995     }
4996     if (insn & 0x30) {
4997         /* MAC with load.  */
4998         tmp = gen_lea(env, s, insn, OS_LONG);
4999         addr = tcg_temp_new();
5000         tcg_gen_and_i32(addr, tmp, QREG_MAC_MASK);
5001         /* Load the value now to ensure correct exception behavior.
5002            Perform writeback after reading the MAC inputs.  */
5003         loadval = gen_load(s, OS_LONG, addr, 0);
5004 
5005         acc ^= 1;
5006         rx = (ext & 0x8000) ? AREG(ext, 12) : DREG(insn, 12);
5007         ry = (ext & 8) ? AREG(ext, 0) : DREG(ext, 0);
5008     } else {
5009         loadval = addr = NULL_QREG;
5010         rx = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5011         ry = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5012     }
5013 
5014     gen_mac_clear_flags();
5015 #if 0
5016     l1 = -1;
5017     /* Disabled because conditional branches clobber temporary vars.  */
5018     if ((s->env->macsr & MACSR_OMC) != 0 && !dual) {
5019         /* Skip the multiply if we know we will ignore it.  */
5020         l1 = gen_new_label();
5021         tmp = tcg_temp_new();
5022         tcg_gen_andi_i32(tmp, QREG_MACSR, 1 << (acc + 8));
5023         gen_op_jmp_nz32(tmp, l1);
5024     }
5025 #endif
5026 
5027     if ((ext & 0x0800) == 0) {
5028         /* Word.  */
5029         rx = gen_mac_extract_word(s, rx, (ext & 0x80) != 0);
5030         ry = gen_mac_extract_word(s, ry, (ext & 0x40) != 0);
5031     }
5032     if (s->env->macsr & MACSR_FI) {
5033         gen_helper_macmulf(s->mactmp, cpu_env, rx, ry);
5034     } else {
5035         if (s->env->macsr & MACSR_SU)
5036             gen_helper_macmuls(s->mactmp, cpu_env, rx, ry);
5037         else
5038             gen_helper_macmulu(s->mactmp, cpu_env, rx, ry);
5039         switch ((ext >> 9) & 3) {
5040         case 1:
5041             tcg_gen_shli_i64(s->mactmp, s->mactmp, 1);
5042             break;
5043         case 3:
5044             tcg_gen_shri_i64(s->mactmp, s->mactmp, 1);
5045             break;
5046         }
5047     }
5048 
5049     if (dual) {
5050         /* Save the overflow flag from the multiply.  */
5051         saved_flags = tcg_temp_new();
5052         tcg_gen_mov_i32(saved_flags, QREG_MACSR);
5053     } else {
5054         saved_flags = NULL_QREG;
5055     }
5056 
5057 #if 0
5058     /* Disabled because conditional branches clobber temporary vars.  */
5059     if ((s->env->macsr & MACSR_OMC) != 0 && dual) {
5060         /* Skip the accumulate if the value is already saturated.  */
5061         l1 = gen_new_label();
5062         tmp = tcg_temp_new();
5063         gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
5064         gen_op_jmp_nz32(tmp, l1);
5065     }
5066 #endif
5067 
5068     if (insn & 0x100)
5069         tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5070     else
5071         tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5072 
5073     if (s->env->macsr & MACSR_FI)
5074         gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
5075     else if (s->env->macsr & MACSR_SU)
5076         gen_helper_macsats(cpu_env, tcg_const_i32(acc));
5077     else
5078         gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
5079 
5080 #if 0
5081     /* Disabled because conditional branches clobber temporary vars.  */
5082     if (l1 != -1)
5083         gen_set_label(l1);
5084 #endif
5085 
5086     if (dual) {
5087         /* Dual accumulate variant.  */
5088         acc = (ext >> 2) & 3;
5089         /* Restore the overflow flag from the multiplier.  */
5090         tcg_gen_mov_i32(QREG_MACSR, saved_flags);
5091 #if 0
5092         /* Disabled because conditional branches clobber temporary vars.  */
5093         if ((s->env->macsr & MACSR_OMC) != 0) {
5094             /* Skip the accumulate if the value is already saturated.  */
5095             l1 = gen_new_label();
5096             tmp = tcg_temp_new();
5097             gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
5098             gen_op_jmp_nz32(tmp, l1);
5099         }
5100 #endif
5101         if (ext & 2)
5102             tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5103         else
5104             tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5105         if (s->env->macsr & MACSR_FI)
5106             gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
5107         else if (s->env->macsr & MACSR_SU)
5108             gen_helper_macsats(cpu_env, tcg_const_i32(acc));
5109         else
5110             gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
5111 #if 0
5112         /* Disabled because conditional branches clobber temporary vars.  */
5113         if (l1 != -1)
5114             gen_set_label(l1);
5115 #endif
5116     }
5117     gen_helper_mac_set_flags(cpu_env, tcg_const_i32(acc));
5118 
5119     if (insn & 0x30) {
5120         TCGv rw;
5121         rw = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5122         tcg_gen_mov_i32(rw, loadval);
5123         /* FIXME: Should address writeback happen with the masked or
5124            unmasked value?  */
5125         switch ((insn >> 3) & 7) {
5126         case 3: /* Post-increment.  */
5127             tcg_gen_addi_i32(AREG(insn, 0), addr, 4);
5128             break;
5129         case 4: /* Pre-decrement.  */
5130             tcg_gen_mov_i32(AREG(insn, 0), addr);
5131         }
5132     }
5133 }
5134 
5135 DISAS_INSN(from_mac)
5136 {
5137     TCGv rx;
5138     TCGv_i64 acc;
5139     int accnum;
5140 
5141     rx = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5142     accnum = (insn >> 9) & 3;
5143     acc = MACREG(accnum);
5144     if (s->env->macsr & MACSR_FI) {
5145         gen_helper_get_macf(rx, cpu_env, acc);
5146     } else if ((s->env->macsr & MACSR_OMC) == 0) {
5147         tcg_gen_extrl_i64_i32(rx, acc);
5148     } else if (s->env->macsr & MACSR_SU) {
5149         gen_helper_get_macs(rx, acc);
5150     } else {
5151         gen_helper_get_macu(rx, acc);
5152     }
5153     if (insn & 0x40) {
5154         tcg_gen_movi_i64(acc, 0);
5155         tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5156     }
5157 }
5158 
5159 DISAS_INSN(move_mac)
5160 {
5161     /* FIXME: This can be done without a helper.  */
5162     int src;
5163     TCGv dest;
5164     src = insn & 3;
5165     dest = tcg_const_i32((insn >> 9) & 3);
5166     gen_helper_mac_move(cpu_env, dest, tcg_const_i32(src));
5167     gen_mac_clear_flags();
5168     gen_helper_mac_set_flags(cpu_env, dest);
5169 }
5170 
5171 DISAS_INSN(from_macsr)
5172 {
5173     TCGv reg;
5174 
5175     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5176     tcg_gen_mov_i32(reg, QREG_MACSR);
5177 }
5178 
5179 DISAS_INSN(from_mask)
5180 {
5181     TCGv reg;
5182     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5183     tcg_gen_mov_i32(reg, QREG_MAC_MASK);
5184 }
5185 
5186 DISAS_INSN(from_mext)
5187 {
5188     TCGv reg;
5189     TCGv acc;
5190     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5191     acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
5192     if (s->env->macsr & MACSR_FI)
5193         gen_helper_get_mac_extf(reg, cpu_env, acc);
5194     else
5195         gen_helper_get_mac_exti(reg, cpu_env, acc);
5196 }
5197 
5198 DISAS_INSN(macsr_to_ccr)
5199 {
5200     TCGv tmp = tcg_temp_new();
5201     tcg_gen_andi_i32(tmp, QREG_MACSR, 0xf);
5202     gen_helper_set_sr(cpu_env, tmp);
5203     tcg_temp_free(tmp);
5204     set_cc_op(s, CC_OP_FLAGS);
5205 }
5206 
5207 DISAS_INSN(to_mac)
5208 {
5209     TCGv_i64 acc;
5210     TCGv val;
5211     int accnum;
5212     accnum = (insn >> 9) & 3;
5213     acc = MACREG(accnum);
5214     SRC_EA(env, val, OS_LONG, 0, NULL);
5215     if (s->env->macsr & MACSR_FI) {
5216         tcg_gen_ext_i32_i64(acc, val);
5217         tcg_gen_shli_i64(acc, acc, 8);
5218     } else if (s->env->macsr & MACSR_SU) {
5219         tcg_gen_ext_i32_i64(acc, val);
5220     } else {
5221         tcg_gen_extu_i32_i64(acc, val);
5222     }
5223     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5224     gen_mac_clear_flags();
5225     gen_helper_mac_set_flags(cpu_env, tcg_const_i32(accnum));
5226 }
5227 
5228 DISAS_INSN(to_macsr)
5229 {
5230     TCGv val;
5231     SRC_EA(env, val, OS_LONG, 0, NULL);
5232     gen_helper_set_macsr(cpu_env, val);
5233     gen_lookup_tb(s);
5234 }
5235 
5236 DISAS_INSN(to_mask)
5237 {
5238     TCGv val;
5239     SRC_EA(env, val, OS_LONG, 0, NULL);
5240     tcg_gen_ori_i32(QREG_MAC_MASK, val, 0xffff0000);
5241 }
5242 
5243 DISAS_INSN(to_mext)
5244 {
5245     TCGv val;
5246     TCGv acc;
5247     SRC_EA(env, val, OS_LONG, 0, NULL);
5248     acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
5249     if (s->env->macsr & MACSR_FI)
5250         gen_helper_set_mac_extf(cpu_env, val, acc);
5251     else if (s->env->macsr & MACSR_SU)
5252         gen_helper_set_mac_exts(cpu_env, val, acc);
5253     else
5254         gen_helper_set_mac_extu(cpu_env, val, acc);
5255 }
5256 
5257 static disas_proc opcode_table[65536];
5258 
5259 static void
5260 register_opcode (disas_proc proc, uint16_t opcode, uint16_t mask)
5261 {
5262   int i;
5263   int from;
5264   int to;
5265 
5266   /* Sanity check.  All set bits must be included in the mask.  */
5267   if (opcode & ~mask) {
5268       fprintf(stderr,
5269               "qemu internal error: bogus opcode definition %04x/%04x\n",
5270               opcode, mask);
5271       abort();
5272   }
5273   /* This could probably be cleverer.  For now just optimize the case where
5274      the top bits are known.  */
5275   /* Find the first zero bit in the mask.  */
5276   i = 0x8000;
5277   while ((i & mask) != 0)
5278       i >>= 1;
5279   /* Iterate over all combinations of this and lower bits.  */
5280   if (i == 0)
5281       i = 1;
5282   else
5283       i <<= 1;
5284   from = opcode & ~(i - 1);
5285   to = from + i;
5286   for (i = from; i < to; i++) {
5287       if ((i & mask) == opcode)
5288           opcode_table[i] = proc;
5289   }
5290 }
5291 
5292 /* Register m68k opcode handlers.  Order is important.
5293    Later insn override earlier ones.  */
5294 void register_m68k_insns (CPUM68KState *env)
5295 {
5296     /* Build the opcode table only once to avoid
5297        multithreading issues. */
5298     if (opcode_table[0] != NULL) {
5299         return;
5300     }
5301 
5302     /* use BASE() for instruction available
5303      * for CF_ISA_A and M68000.
5304      */
5305 #define BASE(name, opcode, mask) \
5306     register_opcode(disas_##name, 0x##opcode, 0x##mask)
5307 #define INSN(name, opcode, mask, feature) do { \
5308     if (m68k_feature(env, M68K_FEATURE_##feature)) \
5309         BASE(name, opcode, mask); \
5310     } while(0)
5311     BASE(undef,     0000, 0000);
5312     INSN(arith_im,  0080, fff8, CF_ISA_A);
5313     INSN(arith_im,  0000, ff00, M68000);
5314     INSN(undef,     00c0, ffc0, M68000);
5315     INSN(bitrev,    00c0, fff8, CF_ISA_APLUSC);
5316     BASE(bitop_reg, 0100, f1c0);
5317     BASE(bitop_reg, 0140, f1c0);
5318     BASE(bitop_reg, 0180, f1c0);
5319     BASE(bitop_reg, 01c0, f1c0);
5320     INSN(arith_im,  0280, fff8, CF_ISA_A);
5321     INSN(arith_im,  0200, ff00, M68000);
5322     INSN(undef,     02c0, ffc0, M68000);
5323     INSN(byterev,   02c0, fff8, CF_ISA_APLUSC);
5324     INSN(arith_im,  0480, fff8, CF_ISA_A);
5325     INSN(arith_im,  0400, ff00, M68000);
5326     INSN(undef,     04c0, ffc0, M68000);
5327     INSN(arith_im,  0600, ff00, M68000);
5328     INSN(undef,     06c0, ffc0, M68000);
5329     INSN(ff1,       04c0, fff8, CF_ISA_APLUSC);
5330     INSN(arith_im,  0680, fff8, CF_ISA_A);
5331     INSN(arith_im,  0c00, ff38, CF_ISA_A);
5332     INSN(arith_im,  0c00, ff00, M68000);
5333     BASE(bitop_im,  0800, ffc0);
5334     BASE(bitop_im,  0840, ffc0);
5335     BASE(bitop_im,  0880, ffc0);
5336     BASE(bitop_im,  08c0, ffc0);
5337     INSN(arith_im,  0a80, fff8, CF_ISA_A);
5338     INSN(arith_im,  0a00, ff00, M68000);
5339     INSN(cas,       0ac0, ffc0, CAS);
5340     INSN(cas,       0cc0, ffc0, CAS);
5341     INSN(cas,       0ec0, ffc0, CAS);
5342     INSN(cas2w,     0cfc, ffff, CAS);
5343     INSN(cas2l,     0efc, ffff, CAS);
5344     BASE(move,      1000, f000);
5345     BASE(move,      2000, f000);
5346     BASE(move,      3000, f000);
5347     INSN(strldsr,   40e7, ffff, CF_ISA_APLUSC);
5348     INSN(negx,      4080, fff8, CF_ISA_A);
5349     INSN(negx,      4000, ff00, M68000);
5350     INSN(undef,     40c0, ffc0, M68000);
5351     INSN(move_from_sr, 40c0, fff8, CF_ISA_A);
5352     INSN(move_from_sr, 40c0, ffc0, M68000);
5353     BASE(lea,       41c0, f1c0);
5354     BASE(clr,       4200, ff00);
5355     BASE(undef,     42c0, ffc0);
5356     INSN(move_from_ccr, 42c0, fff8, CF_ISA_A);
5357     INSN(move_from_ccr, 42c0, ffc0, M68000);
5358     INSN(neg,       4480, fff8, CF_ISA_A);
5359     INSN(neg,       4400, ff00, M68000);
5360     INSN(undef,     44c0, ffc0, M68000);
5361     BASE(move_to_ccr, 44c0, ffc0);
5362     INSN(not,       4680, fff8, CF_ISA_A);
5363     INSN(not,       4600, ff00, M68000);
5364     INSN(undef,     46c0, ffc0, M68000);
5365     INSN(move_to_sr, 46c0, ffc0, CF_ISA_A);
5366     INSN(nbcd,      4800, ffc0, M68000);
5367     INSN(linkl,     4808, fff8, M68000);
5368     BASE(pea,       4840, ffc0);
5369     BASE(swap,      4840, fff8);
5370     INSN(bkpt,      4848, fff8, BKPT);
5371     INSN(movem,     48d0, fbf8, CF_ISA_A);
5372     INSN(movem,     48e8, fbf8, CF_ISA_A);
5373     INSN(movem,     4880, fb80, M68000);
5374     BASE(ext,       4880, fff8);
5375     BASE(ext,       48c0, fff8);
5376     BASE(ext,       49c0, fff8);
5377     BASE(tst,       4a00, ff00);
5378     INSN(tas,       4ac0, ffc0, CF_ISA_B);
5379     INSN(tas,       4ac0, ffc0, M68000);
5380     INSN(halt,      4ac8, ffff, CF_ISA_A);
5381     INSN(pulse,     4acc, ffff, CF_ISA_A);
5382     BASE(illegal,   4afc, ffff);
5383     INSN(mull,      4c00, ffc0, CF_ISA_A);
5384     INSN(mull,      4c00, ffc0, LONG_MULDIV);
5385     INSN(divl,      4c40, ffc0, CF_ISA_A);
5386     INSN(divl,      4c40, ffc0, LONG_MULDIV);
5387     INSN(sats,      4c80, fff8, CF_ISA_B);
5388     BASE(trap,      4e40, fff0);
5389     BASE(link,      4e50, fff8);
5390     BASE(unlk,      4e58, fff8);
5391     INSN(move_to_usp, 4e60, fff8, USP);
5392     INSN(move_from_usp, 4e68, fff8, USP);
5393     BASE(nop,       4e71, ffff);
5394     BASE(stop,      4e72, ffff);
5395     BASE(rte,       4e73, ffff);
5396     INSN(rtd,       4e74, ffff, RTD);
5397     BASE(rts,       4e75, ffff);
5398     INSN(movec,     4e7b, ffff, CF_ISA_A);
5399     BASE(jump,      4e80, ffc0);
5400     BASE(jump,      4ec0, ffc0);
5401     INSN(addsubq,   5000, f080, M68000);
5402     BASE(addsubq,   5080, f0c0);
5403     INSN(scc,       50c0, f0f8, CF_ISA_A); /* Scc.B Dx   */
5404     INSN(scc,       50c0, f0c0, M68000);   /* Scc.B <EA> */
5405     INSN(dbcc,      50c8, f0f8, M68000);
5406     INSN(tpf,       51f8, fff8, CF_ISA_A);
5407 
5408     /* Branch instructions.  */
5409     BASE(branch,    6000, f000);
5410     /* Disable long branch instructions, then add back the ones we want.  */
5411     BASE(undef,     60ff, f0ff); /* All long branches.  */
5412     INSN(branch,    60ff, f0ff, CF_ISA_B);
5413     INSN(undef,     60ff, ffff, CF_ISA_B); /* bra.l */
5414     INSN(branch,    60ff, ffff, BRAL);
5415     INSN(branch,    60ff, f0ff, BCCL);
5416 
5417     BASE(moveq,     7000, f100);
5418     INSN(mvzs,      7100, f100, CF_ISA_B);
5419     BASE(or,        8000, f000);
5420     BASE(divw,      80c0, f0c0);
5421     INSN(sbcd_reg,  8100, f1f8, M68000);
5422     INSN(sbcd_mem,  8108, f1f8, M68000);
5423     BASE(addsub,    9000, f000);
5424     INSN(undef,     90c0, f0c0, CF_ISA_A);
5425     INSN(subx_reg,  9180, f1f8, CF_ISA_A);
5426     INSN(subx_reg,  9100, f138, M68000);
5427     INSN(subx_mem,  9108, f138, M68000);
5428     INSN(suba,      91c0, f1c0, CF_ISA_A);
5429     INSN(suba,      90c0, f0c0, M68000);
5430 
5431     BASE(undef_mac, a000, f000);
5432     INSN(mac,       a000, f100, CF_EMAC);
5433     INSN(from_mac,  a180, f9b0, CF_EMAC);
5434     INSN(move_mac,  a110, f9fc, CF_EMAC);
5435     INSN(from_macsr,a980, f9f0, CF_EMAC);
5436     INSN(from_mask, ad80, fff0, CF_EMAC);
5437     INSN(from_mext, ab80, fbf0, CF_EMAC);
5438     INSN(macsr_to_ccr, a9c0, ffff, CF_EMAC);
5439     INSN(to_mac,    a100, f9c0, CF_EMAC);
5440     INSN(to_macsr,  a900, ffc0, CF_EMAC);
5441     INSN(to_mext,   ab00, fbc0, CF_EMAC);
5442     INSN(to_mask,   ad00, ffc0, CF_EMAC);
5443 
5444     INSN(mov3q,     a140, f1c0, CF_ISA_B);
5445     INSN(cmp,       b000, f1c0, CF_ISA_B); /* cmp.b */
5446     INSN(cmp,       b040, f1c0, CF_ISA_B); /* cmp.w */
5447     INSN(cmpa,      b0c0, f1c0, CF_ISA_B); /* cmpa.w */
5448     INSN(cmp,       b080, f1c0, CF_ISA_A);
5449     INSN(cmpa,      b1c0, f1c0, CF_ISA_A);
5450     INSN(cmp,       b000, f100, M68000);
5451     INSN(eor,       b100, f100, M68000);
5452     INSN(cmpm,      b108, f138, M68000);
5453     INSN(cmpa,      b0c0, f0c0, M68000);
5454     INSN(eor,       b180, f1c0, CF_ISA_A);
5455     BASE(and,       c000, f000);
5456     INSN(exg_dd,    c140, f1f8, M68000);
5457     INSN(exg_aa,    c148, f1f8, M68000);
5458     INSN(exg_da,    c188, f1f8, M68000);
5459     BASE(mulw,      c0c0, f0c0);
5460     INSN(abcd_reg,  c100, f1f8, M68000);
5461     INSN(abcd_mem,  c108, f1f8, M68000);
5462     BASE(addsub,    d000, f000);
5463     INSN(undef,     d0c0, f0c0, CF_ISA_A);
5464     INSN(addx_reg,      d180, f1f8, CF_ISA_A);
5465     INSN(addx_reg,  d100, f138, M68000);
5466     INSN(addx_mem,  d108, f138, M68000);
5467     INSN(adda,      d1c0, f1c0, CF_ISA_A);
5468     INSN(adda,      d0c0, f0c0, M68000);
5469     INSN(shift_im,  e080, f0f0, CF_ISA_A);
5470     INSN(shift_reg, e0a0, f0f0, CF_ISA_A);
5471     INSN(shift8_im, e000, f0f0, M68000);
5472     INSN(shift16_im, e040, f0f0, M68000);
5473     INSN(shift_im,  e080, f0f0, M68000);
5474     INSN(shift8_reg, e020, f0f0, M68000);
5475     INSN(shift16_reg, e060, f0f0, M68000);
5476     INSN(shift_reg, e0a0, f0f0, M68000);
5477     INSN(shift_mem, e0c0, fcc0, M68000);
5478     INSN(rotate_im, e090, f0f0, M68000);
5479     INSN(rotate8_im, e010, f0f0, M68000);
5480     INSN(rotate16_im, e050, f0f0, M68000);
5481     INSN(rotate_reg, e0b0, f0f0, M68000);
5482     INSN(rotate8_reg, e030, f0f0, M68000);
5483     INSN(rotate16_reg, e070, f0f0, M68000);
5484     INSN(rotate_mem, e4c0, fcc0, M68000);
5485     INSN(bfext_mem, e9c0, fdc0, BITFIELD);  /* bfextu & bfexts */
5486     INSN(bfext_reg, e9c0, fdf8, BITFIELD);
5487     INSN(bfins_mem, efc0, ffc0, BITFIELD);
5488     INSN(bfins_reg, efc0, fff8, BITFIELD);
5489     INSN(bfop_mem, eac0, ffc0, BITFIELD);   /* bfchg */
5490     INSN(bfop_reg, eac0, fff8, BITFIELD);   /* bfchg */
5491     INSN(bfop_mem, ecc0, ffc0, BITFIELD);   /* bfclr */
5492     INSN(bfop_reg, ecc0, fff8, BITFIELD);   /* bfclr */
5493     INSN(bfop_mem, edc0, ffc0, BITFIELD);   /* bfffo */
5494     INSN(bfop_reg, edc0, fff8, BITFIELD);   /* bfffo */
5495     INSN(bfop_mem, eec0, ffc0, BITFIELD);   /* bfset */
5496     INSN(bfop_reg, eec0, fff8, BITFIELD);   /* bfset */
5497     INSN(bfop_mem, e8c0, ffc0, BITFIELD);   /* bftst */
5498     INSN(bfop_reg, e8c0, fff8, BITFIELD);   /* bftst */
5499     BASE(undef_fpu, f000, f000);
5500     INSN(fpu,       f200, ffc0, CF_FPU);
5501     INSN(fbcc,      f280, ffc0, CF_FPU);
5502     INSN(frestore,  f340, ffc0, CF_FPU);
5503     INSN(fsave,     f300, ffc0, CF_FPU);
5504     INSN(fpu,       f200, ffc0, FPU);
5505     INSN(fscc,      f240, ffc0, FPU);
5506     INSN(fbcc,      f280, ff80, FPU);
5507     INSN(frestore,  f340, ffc0, FPU);
5508     INSN(fsave,     f300, ffc0, FPU);
5509     INSN(intouch,   f340, ffc0, CF_ISA_A);
5510     INSN(cpushl,    f428, ff38, CF_ISA_A);
5511     INSN(wddata,    fb00, ff00, CF_ISA_A);
5512     INSN(wdebug,    fbc0, ffc0, CF_ISA_A);
5513 #undef INSN
5514 }
5515 
5516 /* ??? Some of this implementation is not exception safe.  We should always
5517    write back the result to memory before setting the condition codes.  */
5518 static void disas_m68k_insn(CPUM68KState * env, DisasContext *s)
5519 {
5520     uint16_t insn = read_im16(env, s);
5521     opcode_table[insn](env, s, insn);
5522     do_writebacks(s);
5523 }
5524 
5525 /* generate intermediate code for basic block 'tb'.  */
5526 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb)
5527 {
5528     CPUM68KState *env = cs->env_ptr;
5529     DisasContext dc1, *dc = &dc1;
5530     target_ulong pc_start;
5531     int pc_offset;
5532     int num_insns;
5533     int max_insns;
5534 
5535     /* generate intermediate code */
5536     pc_start = tb->pc;
5537 
5538     dc->tb = tb;
5539 
5540     dc->env = env;
5541     dc->is_jmp = DISAS_NEXT;
5542     dc->pc = pc_start;
5543     dc->cc_op = CC_OP_DYNAMIC;
5544     dc->cc_op_synced = 1;
5545     dc->singlestep_enabled = cs->singlestep_enabled;
5546     dc->user = (env->sr & SR_S) == 0;
5547     dc->done_mac = 0;
5548     dc->writeback_mask = 0;
5549     num_insns = 0;
5550     max_insns = tb->cflags & CF_COUNT_MASK;
5551     if (max_insns == 0) {
5552         max_insns = CF_COUNT_MASK;
5553     }
5554     if (max_insns > TCG_MAX_INSNS) {
5555         max_insns = TCG_MAX_INSNS;
5556     }
5557 
5558     gen_tb_start(tb);
5559     do {
5560         pc_offset = dc->pc - pc_start;
5561         gen_throws_exception = NULL;
5562         tcg_gen_insn_start(dc->pc, dc->cc_op);
5563         num_insns++;
5564 
5565         if (unlikely(cpu_breakpoint_test(cs, dc->pc, BP_ANY))) {
5566             gen_exception(dc, dc->pc, EXCP_DEBUG);
5567             dc->is_jmp = DISAS_JUMP;
5568             /* The address covered by the breakpoint must be included in
5569                [tb->pc, tb->pc + tb->size) in order to for it to be
5570                properly cleared -- thus we increment the PC here so that
5571                the logic setting tb->size below does the right thing.  */
5572             dc->pc += 2;
5573             break;
5574         }
5575 
5576         if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
5577             gen_io_start();
5578         }
5579 
5580         dc->insn_pc = dc->pc;
5581 	disas_m68k_insn(env, dc);
5582     } while (!dc->is_jmp && !tcg_op_buf_full() &&
5583              !cs->singlestep_enabled &&
5584              !singlestep &&
5585              (pc_offset) < (TARGET_PAGE_SIZE - 32) &&
5586              num_insns < max_insns);
5587 
5588     if (tb->cflags & CF_LAST_IO)
5589         gen_io_end();
5590     if (unlikely(cs->singlestep_enabled)) {
5591         /* Make sure the pc is updated, and raise a debug exception.  */
5592         if (!dc->is_jmp) {
5593             update_cc_op(dc);
5594             tcg_gen_movi_i32(QREG_PC, dc->pc);
5595         }
5596         gen_helper_raise_exception(cpu_env, tcg_const_i32(EXCP_DEBUG));
5597     } else {
5598         switch(dc->is_jmp) {
5599         case DISAS_NEXT:
5600             update_cc_op(dc);
5601             gen_jmp_tb(dc, 0, dc->pc);
5602             break;
5603         default:
5604         case DISAS_JUMP:
5605         case DISAS_UPDATE:
5606             update_cc_op(dc);
5607             /* indicate that the hash table must be used to find the next TB */
5608             tcg_gen_exit_tb(0);
5609             break;
5610         case DISAS_TB_JUMP:
5611             /* nothing more to generate */
5612             break;
5613         }
5614     }
5615     gen_tb_end(tb, num_insns);
5616 
5617 #ifdef DEBUG_DISAS
5618     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)
5619         && qemu_log_in_addr_range(pc_start)) {
5620         qemu_log_lock();
5621         qemu_log("----------------\n");
5622         qemu_log("IN: %s\n", lookup_symbol(pc_start));
5623         log_target_disas(cs, pc_start, dc->pc - pc_start, 0);
5624         qemu_log("\n");
5625         qemu_log_unlock();
5626     }
5627 #endif
5628     tb->size = dc->pc - pc_start;
5629     tb->icount = num_insns;
5630 }
5631 
5632 static double floatx80_to_double(CPUM68KState *env, uint16_t high, uint64_t low)
5633 {
5634     floatx80 a = { .high = high, .low = low };
5635     union {
5636         float64 f64;
5637         double d;
5638     } u;
5639 
5640     u.f64 = floatx80_to_float64(a, &env->fp_status);
5641     return u.d;
5642 }
5643 
5644 void m68k_cpu_dump_state(CPUState *cs, FILE *f, fprintf_function cpu_fprintf,
5645                          int flags)
5646 {
5647     M68kCPU *cpu = M68K_CPU(cs);
5648     CPUM68KState *env = &cpu->env;
5649     int i;
5650     uint16_t sr;
5651     for (i = 0; i < 8; i++) {
5652         cpu_fprintf(f, "D%d = %08x   A%d = %08x   "
5653                     "F%d = %04x %016"PRIx64"  (%12g)\n",
5654                     i, env->dregs[i], i, env->aregs[i],
5655                     i, env->fregs[i].l.upper, env->fregs[i].l.lower,
5656                     floatx80_to_double(env, env->fregs[i].l.upper,
5657                                        env->fregs[i].l.lower));
5658     }
5659     cpu_fprintf (f, "PC = %08x   ", env->pc);
5660     sr = env->sr | cpu_m68k_get_ccr(env);
5661     cpu_fprintf(f, "SR = %04x %c%c%c%c%c ", sr, (sr & CCF_X) ? 'X' : '-',
5662                 (sr & CCF_N) ? 'N' : '-', (sr & CCF_Z) ? 'Z' : '-',
5663                 (sr & CCF_V) ? 'V' : '-', (sr & CCF_C) ? 'C' : '-');
5664     cpu_fprintf(f, "FPSR = %08x %c%c%c%c ", env->fpsr,
5665                 (env->fpsr & FPSR_CC_A) ? 'A' : '-',
5666                 (env->fpsr & FPSR_CC_I) ? 'I' : '-',
5667                 (env->fpsr & FPSR_CC_Z) ? 'Z' : '-',
5668                 (env->fpsr & FPSR_CC_N) ? 'N' : '-');
5669     cpu_fprintf(f, "\n                                "
5670                    "FPCR =     %04x ", env->fpcr);
5671     switch (env->fpcr & FPCR_PREC_MASK) {
5672     case FPCR_PREC_X:
5673         cpu_fprintf(f, "X ");
5674         break;
5675     case FPCR_PREC_S:
5676         cpu_fprintf(f, "S ");
5677         break;
5678     case FPCR_PREC_D:
5679         cpu_fprintf(f, "D ");
5680         break;
5681     }
5682     switch (env->fpcr & FPCR_RND_MASK) {
5683     case FPCR_RND_N:
5684         cpu_fprintf(f, "RN ");
5685         break;
5686     case FPCR_RND_Z:
5687         cpu_fprintf(f, "RZ ");
5688         break;
5689     case FPCR_RND_M:
5690         cpu_fprintf(f, "RM ");
5691         break;
5692     case FPCR_RND_P:
5693         cpu_fprintf(f, "RP ");
5694         break;
5695     }
5696 }
5697 
5698 void restore_state_to_opc(CPUM68KState *env, TranslationBlock *tb,
5699                           target_ulong *data)
5700 {
5701     int cc_op = data[1];
5702     env->pc = data[0];
5703     if (cc_op != CC_OP_DYNAMIC) {
5704         env->cc_op = cc_op;
5705     }
5706 }
5707