xref: /openbmc/qemu/target/sparc/translate.c (revision 51e47cf8)
1 /*
2    SPARC translation
3 
4    Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5    Copyright (C) 2003-2005 Fabrice Bellard
6 
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2.1 of the License, or (at your option) any later version.
11 
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16 
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 
23 #include "cpu.h"
24 #include "disas/disas.h"
25 #include "exec/helper-proto.h"
26 #include "exec/exec-all.h"
27 #include "tcg/tcg-op.h"
28 #include "exec/cpu_ldst.h"
29 
30 #include "exec/helper-gen.h"
31 
32 #include "exec/translator.h"
33 #include "exec/log.h"
34 #include "asi.h"
35 
36 
37 #define DEBUG_DISAS
38 
39 #define DYNAMIC_PC  1 /* dynamic pc value */
40 #define JUMP_PC     2 /* dynamic pc value which takes only two values
41                          according to jump_pc[T2] */
42 
43 #define DISAS_EXIT  DISAS_TARGET_0
44 
45 /* global register indexes */
46 static TCGv_ptr cpu_regwptr;
47 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
48 static TCGv_i32 cpu_cc_op;
49 static TCGv_i32 cpu_psr;
50 static TCGv cpu_fsr, cpu_pc, cpu_npc;
51 static TCGv cpu_regs[32];
52 static TCGv cpu_y;
53 #ifndef CONFIG_USER_ONLY
54 static TCGv cpu_tbr;
55 #endif
56 static TCGv cpu_cond;
57 #ifdef TARGET_SPARC64
58 static TCGv_i32 cpu_xcc, cpu_fprs;
59 static TCGv cpu_gsr;
60 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
61 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
62 #else
63 static TCGv cpu_wim;
64 #endif
65 /* Floating point registers */
66 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
67 
68 #include "exec/gen-icount.h"
69 
70 typedef struct DisasContext {
71     DisasContextBase base;
72     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
73     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
75     int mem_idx;
76     bool fpu_enabled;
77     bool address_mask_32bit;
78 #ifndef CONFIG_USER_ONLY
79     bool supervisor;
80 #ifdef TARGET_SPARC64
81     bool hypervisor;
82 #endif
83 #endif
84 
85     uint32_t cc_op;  /* current CC operation */
86     sparc_def_t *def;
87 #ifdef TARGET_SPARC64
88     int fprs_dirty;
89     int asi;
90 #endif
91 } DisasContext;
92 
93 typedef struct {
94     TCGCond cond;
95     bool is_bool;
96     TCGv c1, c2;
97 } DisasCompare;
98 
99 // This function uses non-native bit order
100 #define GET_FIELD(X, FROM, TO)                                  \
101     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
102 
103 // This function uses the order in the manuals, i.e. bit 0 is 2^0
104 #define GET_FIELD_SP(X, FROM, TO)               \
105     GET_FIELD(X, 31 - (TO), 31 - (FROM))
106 
107 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
108 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
109 
110 #ifdef TARGET_SPARC64
111 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
112 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
113 #else
114 #define DFPREG(r) (r & 0x1e)
115 #define QFPREG(r) (r & 0x1c)
116 #endif
117 
118 #define UA2005_HTRAP_MASK 0xff
119 #define V8_TRAP_MASK 0x7f
120 
121 static int sign_extend(int x, int len)
122 {
123     len = 32 - len;
124     return (x << len) >> len;
125 }
126 
127 #define IS_IMM (insn & (1<<13))
128 
129 static inline void gen_update_fprs_dirty(DisasContext *dc, int rd)
130 {
131 #if defined(TARGET_SPARC64)
132     int bit = (rd < 32) ? 1 : 2;
133     /* If we know we've already set this bit within the TB,
134        we can avoid setting it again.  */
135     if (!(dc->fprs_dirty & bit)) {
136         dc->fprs_dirty |= bit;
137         tcg_gen_ori_i32(cpu_fprs, cpu_fprs, bit);
138     }
139 #endif
140 }
141 
142 /* floating point registers moves */
143 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
144 {
145     TCGv_i32 ret = tcg_temp_new_i32();
146     if (src & 1) {
147         tcg_gen_extrl_i64_i32(ret, cpu_fpr[src / 2]);
148     } else {
149         tcg_gen_extrh_i64_i32(ret, cpu_fpr[src / 2]);
150     }
151     return ret;
152 }
153 
154 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
155 {
156     TCGv_i64 t = tcg_temp_new_i64();
157 
158     tcg_gen_extu_i32_i64(t, v);
159     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
160                         (dst & 1 ? 0 : 32), 32);
161     gen_update_fprs_dirty(dc, dst);
162 }
163 
164 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
165 {
166     return tcg_temp_new_i32();
167 }
168 
169 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
170 {
171     src = DFPREG(src);
172     return cpu_fpr[src / 2];
173 }
174 
175 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
176 {
177     dst = DFPREG(dst);
178     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
179     gen_update_fprs_dirty(dc, dst);
180 }
181 
182 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
183 {
184     return cpu_fpr[DFPREG(dst) / 2];
185 }
186 
187 static void gen_op_load_fpr_QT0(unsigned int src)
188 {
189     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
190                    offsetof(CPU_QuadU, ll.upper));
191     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
192                    offsetof(CPU_QuadU, ll.lower));
193 }
194 
195 static void gen_op_load_fpr_QT1(unsigned int src)
196 {
197     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
198                    offsetof(CPU_QuadU, ll.upper));
199     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
200                    offsetof(CPU_QuadU, ll.lower));
201 }
202 
203 static void gen_op_store_QT0_fpr(unsigned int dst)
204 {
205     tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
206                    offsetof(CPU_QuadU, ll.upper));
207     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
208                    offsetof(CPU_QuadU, ll.lower));
209 }
210 
211 static void gen_store_fpr_Q(DisasContext *dc, unsigned int dst,
212                             TCGv_i64 v1, TCGv_i64 v2)
213 {
214     dst = QFPREG(dst);
215 
216     tcg_gen_mov_i64(cpu_fpr[dst / 2], v1);
217     tcg_gen_mov_i64(cpu_fpr[dst / 2 + 1], v2);
218     gen_update_fprs_dirty(dc, dst);
219 }
220 
221 #ifdef TARGET_SPARC64
222 static TCGv_i64 gen_load_fpr_Q0(DisasContext *dc, unsigned int src)
223 {
224     src = QFPREG(src);
225     return cpu_fpr[src / 2];
226 }
227 
228 static TCGv_i64 gen_load_fpr_Q1(DisasContext *dc, unsigned int src)
229 {
230     src = QFPREG(src);
231     return cpu_fpr[src / 2 + 1];
232 }
233 
234 static void gen_move_Q(DisasContext *dc, unsigned int rd, unsigned int rs)
235 {
236     rd = QFPREG(rd);
237     rs = QFPREG(rs);
238 
239     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
240     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
241     gen_update_fprs_dirty(dc, rd);
242 }
243 #endif
244 
245 /* moves */
246 #ifdef CONFIG_USER_ONLY
247 #define supervisor(dc) 0
248 #ifdef TARGET_SPARC64
249 #define hypervisor(dc) 0
250 #endif
251 #else
252 #ifdef TARGET_SPARC64
253 #define hypervisor(dc) (dc->hypervisor)
254 #define supervisor(dc) (dc->supervisor | dc->hypervisor)
255 #else
256 #define supervisor(dc) (dc->supervisor)
257 #endif
258 #endif
259 
260 #ifdef TARGET_SPARC64
261 #ifndef TARGET_ABI32
262 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
263 #else
264 #define AM_CHECK(dc) (1)
265 #endif
266 #endif
267 
268 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
269 {
270 #ifdef TARGET_SPARC64
271     if (AM_CHECK(dc))
272         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
273 #endif
274 }
275 
276 static inline TCGv gen_load_gpr(DisasContext *dc, int reg)
277 {
278     if (reg > 0) {
279         assert(reg < 32);
280         return cpu_regs[reg];
281     } else {
282         TCGv t = tcg_temp_new();
283         tcg_gen_movi_tl(t, 0);
284         return t;
285     }
286 }
287 
288 static inline void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
289 {
290     if (reg > 0) {
291         assert(reg < 32);
292         tcg_gen_mov_tl(cpu_regs[reg], v);
293     }
294 }
295 
296 static inline TCGv gen_dest_gpr(DisasContext *dc, int reg)
297 {
298     if (reg > 0) {
299         assert(reg < 32);
300         return cpu_regs[reg];
301     } else {
302         return tcg_temp_new();
303     }
304 }
305 
306 static bool use_goto_tb(DisasContext *s, target_ulong pc, target_ulong npc)
307 {
308     return translator_use_goto_tb(&s->base, pc) &&
309            translator_use_goto_tb(&s->base, npc);
310 }
311 
312 static void gen_goto_tb(DisasContext *s, int tb_num,
313                         target_ulong pc, target_ulong npc)
314 {
315     if (use_goto_tb(s, pc, npc))  {
316         /* jump to same page: we can use a direct jump */
317         tcg_gen_goto_tb(tb_num);
318         tcg_gen_movi_tl(cpu_pc, pc);
319         tcg_gen_movi_tl(cpu_npc, npc);
320         tcg_gen_exit_tb(s->base.tb, tb_num);
321     } else {
322         /* jump to another page: currently not optimized */
323         tcg_gen_movi_tl(cpu_pc, pc);
324         tcg_gen_movi_tl(cpu_npc, npc);
325         tcg_gen_exit_tb(NULL, 0);
326     }
327 }
328 
329 // XXX suboptimal
330 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
331 {
332     tcg_gen_extu_i32_tl(reg, src);
333     tcg_gen_extract_tl(reg, reg, PSR_NEG_SHIFT, 1);
334 }
335 
336 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
337 {
338     tcg_gen_extu_i32_tl(reg, src);
339     tcg_gen_extract_tl(reg, reg, PSR_ZERO_SHIFT, 1);
340 }
341 
342 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
343 {
344     tcg_gen_extu_i32_tl(reg, src);
345     tcg_gen_extract_tl(reg, reg, PSR_OVF_SHIFT, 1);
346 }
347 
348 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
349 {
350     tcg_gen_extu_i32_tl(reg, src);
351     tcg_gen_extract_tl(reg, reg, PSR_CARRY_SHIFT, 1);
352 }
353 
354 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
355 {
356     tcg_gen_mov_tl(cpu_cc_src, src1);
357     tcg_gen_mov_tl(cpu_cc_src2, src2);
358     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
359     tcg_gen_mov_tl(dst, cpu_cc_dst);
360 }
361 
362 static TCGv_i32 gen_add32_carry32(void)
363 {
364     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
365 
366     /* Carry is computed from a previous add: (dst < src)  */
367 #if TARGET_LONG_BITS == 64
368     cc_src1_32 = tcg_temp_new_i32();
369     cc_src2_32 = tcg_temp_new_i32();
370     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
371     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
372 #else
373     cc_src1_32 = cpu_cc_dst;
374     cc_src2_32 = cpu_cc_src;
375 #endif
376 
377     carry_32 = tcg_temp_new_i32();
378     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
379 
380     return carry_32;
381 }
382 
383 static TCGv_i32 gen_sub32_carry32(void)
384 {
385     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
386 
387     /* Carry is computed from a previous borrow: (src1 < src2)  */
388 #if TARGET_LONG_BITS == 64
389     cc_src1_32 = tcg_temp_new_i32();
390     cc_src2_32 = tcg_temp_new_i32();
391     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
392     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
393 #else
394     cc_src1_32 = cpu_cc_src;
395     cc_src2_32 = cpu_cc_src2;
396 #endif
397 
398     carry_32 = tcg_temp_new_i32();
399     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
400 
401     return carry_32;
402 }
403 
404 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
405                             TCGv src2, int update_cc)
406 {
407     TCGv_i32 carry_32;
408     TCGv carry;
409 
410     switch (dc->cc_op) {
411     case CC_OP_DIV:
412     case CC_OP_LOGIC:
413         /* Carry is known to be zero.  Fall back to plain ADD.  */
414         if (update_cc) {
415             gen_op_add_cc(dst, src1, src2);
416         } else {
417             tcg_gen_add_tl(dst, src1, src2);
418         }
419         return;
420 
421     case CC_OP_ADD:
422     case CC_OP_TADD:
423     case CC_OP_TADDTV:
424         if (TARGET_LONG_BITS == 32) {
425             /* We can re-use the host's hardware carry generation by using
426                an ADD2 opcode.  We discard the low part of the output.
427                Ideally we'd combine this operation with the add that
428                generated the carry in the first place.  */
429             carry = tcg_temp_new();
430             tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
431             goto add_done;
432         }
433         carry_32 = gen_add32_carry32();
434         break;
435 
436     case CC_OP_SUB:
437     case CC_OP_TSUB:
438     case CC_OP_TSUBTV:
439         carry_32 = gen_sub32_carry32();
440         break;
441 
442     default:
443         /* We need external help to produce the carry.  */
444         carry_32 = tcg_temp_new_i32();
445         gen_helper_compute_C_icc(carry_32, cpu_env);
446         break;
447     }
448 
449 #if TARGET_LONG_BITS == 64
450     carry = tcg_temp_new();
451     tcg_gen_extu_i32_i64(carry, carry_32);
452 #else
453     carry = carry_32;
454 #endif
455 
456     tcg_gen_add_tl(dst, src1, src2);
457     tcg_gen_add_tl(dst, dst, carry);
458 
459  add_done:
460     if (update_cc) {
461         tcg_gen_mov_tl(cpu_cc_src, src1);
462         tcg_gen_mov_tl(cpu_cc_src2, src2);
463         tcg_gen_mov_tl(cpu_cc_dst, dst);
464         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
465         dc->cc_op = CC_OP_ADDX;
466     }
467 }
468 
469 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
470 {
471     tcg_gen_mov_tl(cpu_cc_src, src1);
472     tcg_gen_mov_tl(cpu_cc_src2, src2);
473     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
474     tcg_gen_mov_tl(dst, cpu_cc_dst);
475 }
476 
477 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
478                             TCGv src2, int update_cc)
479 {
480     TCGv_i32 carry_32;
481     TCGv carry;
482 
483     switch (dc->cc_op) {
484     case CC_OP_DIV:
485     case CC_OP_LOGIC:
486         /* Carry is known to be zero.  Fall back to plain SUB.  */
487         if (update_cc) {
488             gen_op_sub_cc(dst, src1, src2);
489         } else {
490             tcg_gen_sub_tl(dst, src1, src2);
491         }
492         return;
493 
494     case CC_OP_ADD:
495     case CC_OP_TADD:
496     case CC_OP_TADDTV:
497         carry_32 = gen_add32_carry32();
498         break;
499 
500     case CC_OP_SUB:
501     case CC_OP_TSUB:
502     case CC_OP_TSUBTV:
503         if (TARGET_LONG_BITS == 32) {
504             /* We can re-use the host's hardware carry generation by using
505                a SUB2 opcode.  We discard the low part of the output.
506                Ideally we'd combine this operation with the add that
507                generated the carry in the first place.  */
508             carry = tcg_temp_new();
509             tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
510             goto sub_done;
511         }
512         carry_32 = gen_sub32_carry32();
513         break;
514 
515     default:
516         /* We need external help to produce the carry.  */
517         carry_32 = tcg_temp_new_i32();
518         gen_helper_compute_C_icc(carry_32, cpu_env);
519         break;
520     }
521 
522 #if TARGET_LONG_BITS == 64
523     carry = tcg_temp_new();
524     tcg_gen_extu_i32_i64(carry, carry_32);
525 #else
526     carry = carry_32;
527 #endif
528 
529     tcg_gen_sub_tl(dst, src1, src2);
530     tcg_gen_sub_tl(dst, dst, carry);
531 
532  sub_done:
533     if (update_cc) {
534         tcg_gen_mov_tl(cpu_cc_src, src1);
535         tcg_gen_mov_tl(cpu_cc_src2, src2);
536         tcg_gen_mov_tl(cpu_cc_dst, dst);
537         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
538         dc->cc_op = CC_OP_SUBX;
539     }
540 }
541 
542 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
543 {
544     TCGv r_temp, zero, t0;
545 
546     r_temp = tcg_temp_new();
547     t0 = tcg_temp_new();
548 
549     /* old op:
550     if (!(env->y & 1))
551         T1 = 0;
552     */
553     zero = tcg_constant_tl(0);
554     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
555     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
556     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
557     tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
558                        zero, cpu_cc_src2);
559 
560     // b2 = T0 & 1;
561     // env->y = (b2 << 31) | (env->y >> 1);
562     tcg_gen_extract_tl(t0, cpu_y, 1, 31);
563     tcg_gen_deposit_tl(cpu_y, t0, cpu_cc_src, 31, 1);
564 
565     // b1 = N ^ V;
566     gen_mov_reg_N(t0, cpu_psr);
567     gen_mov_reg_V(r_temp, cpu_psr);
568     tcg_gen_xor_tl(t0, t0, r_temp);
569 
570     // T0 = (b1 << 31) | (T0 >> 1);
571     // src1 = T0;
572     tcg_gen_shli_tl(t0, t0, 31);
573     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
574     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
575 
576     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
577 
578     tcg_gen_mov_tl(dst, cpu_cc_dst);
579 }
580 
581 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
582 {
583 #if TARGET_LONG_BITS == 32
584     if (sign_ext) {
585         tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
586     } else {
587         tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
588     }
589 #else
590     TCGv t0 = tcg_temp_new_i64();
591     TCGv t1 = tcg_temp_new_i64();
592 
593     if (sign_ext) {
594         tcg_gen_ext32s_i64(t0, src1);
595         tcg_gen_ext32s_i64(t1, src2);
596     } else {
597         tcg_gen_ext32u_i64(t0, src1);
598         tcg_gen_ext32u_i64(t1, src2);
599     }
600 
601     tcg_gen_mul_i64(dst, t0, t1);
602     tcg_gen_shri_i64(cpu_y, dst, 32);
603 #endif
604 }
605 
606 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
607 {
608     /* zero-extend truncated operands before multiplication */
609     gen_op_multiply(dst, src1, src2, 0);
610 }
611 
612 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
613 {
614     /* sign-extend truncated operands before multiplication */
615     gen_op_multiply(dst, src1, src2, 1);
616 }
617 
618 // 1
619 static inline void gen_op_eval_ba(TCGv dst)
620 {
621     tcg_gen_movi_tl(dst, 1);
622 }
623 
624 // Z
625 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
626 {
627     gen_mov_reg_Z(dst, src);
628 }
629 
630 // Z | (N ^ V)
631 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
632 {
633     TCGv t0 = tcg_temp_new();
634     gen_mov_reg_N(t0, src);
635     gen_mov_reg_V(dst, src);
636     tcg_gen_xor_tl(dst, dst, t0);
637     gen_mov_reg_Z(t0, src);
638     tcg_gen_or_tl(dst, dst, t0);
639 }
640 
641 // N ^ V
642 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
643 {
644     TCGv t0 = tcg_temp_new();
645     gen_mov_reg_V(t0, src);
646     gen_mov_reg_N(dst, src);
647     tcg_gen_xor_tl(dst, dst, t0);
648 }
649 
650 // C | Z
651 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
652 {
653     TCGv t0 = tcg_temp_new();
654     gen_mov_reg_Z(t0, src);
655     gen_mov_reg_C(dst, src);
656     tcg_gen_or_tl(dst, dst, t0);
657 }
658 
659 // C
660 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
661 {
662     gen_mov_reg_C(dst, src);
663 }
664 
665 // V
666 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
667 {
668     gen_mov_reg_V(dst, src);
669 }
670 
671 // 0
672 static inline void gen_op_eval_bn(TCGv dst)
673 {
674     tcg_gen_movi_tl(dst, 0);
675 }
676 
677 // N
678 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
679 {
680     gen_mov_reg_N(dst, src);
681 }
682 
683 // !Z
684 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
685 {
686     gen_mov_reg_Z(dst, src);
687     tcg_gen_xori_tl(dst, dst, 0x1);
688 }
689 
690 // !(Z | (N ^ V))
691 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
692 {
693     gen_op_eval_ble(dst, src);
694     tcg_gen_xori_tl(dst, dst, 0x1);
695 }
696 
697 // !(N ^ V)
698 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
699 {
700     gen_op_eval_bl(dst, src);
701     tcg_gen_xori_tl(dst, dst, 0x1);
702 }
703 
704 // !(C | Z)
705 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
706 {
707     gen_op_eval_bleu(dst, src);
708     tcg_gen_xori_tl(dst, dst, 0x1);
709 }
710 
711 // !C
712 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
713 {
714     gen_mov_reg_C(dst, src);
715     tcg_gen_xori_tl(dst, dst, 0x1);
716 }
717 
718 // !N
719 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
720 {
721     gen_mov_reg_N(dst, src);
722     tcg_gen_xori_tl(dst, dst, 0x1);
723 }
724 
725 // !V
726 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
727 {
728     gen_mov_reg_V(dst, src);
729     tcg_gen_xori_tl(dst, dst, 0x1);
730 }
731 
732 /*
733   FPSR bit field FCC1 | FCC0:
734    0 =
735    1 <
736    2 >
737    3 unordered
738 */
739 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
740                                     unsigned int fcc_offset)
741 {
742     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
743     tcg_gen_andi_tl(reg, reg, 0x1);
744 }
745 
746 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
747                                     unsigned int fcc_offset)
748 {
749     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
750     tcg_gen_andi_tl(reg, reg, 0x1);
751 }
752 
753 // !0: FCC0 | FCC1
754 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
755                                     unsigned int fcc_offset)
756 {
757     TCGv t0 = tcg_temp_new();
758     gen_mov_reg_FCC0(dst, src, fcc_offset);
759     gen_mov_reg_FCC1(t0, src, fcc_offset);
760     tcg_gen_or_tl(dst, dst, t0);
761 }
762 
763 // 1 or 2: FCC0 ^ FCC1
764 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
765                                     unsigned int fcc_offset)
766 {
767     TCGv t0 = tcg_temp_new();
768     gen_mov_reg_FCC0(dst, src, fcc_offset);
769     gen_mov_reg_FCC1(t0, src, fcc_offset);
770     tcg_gen_xor_tl(dst, dst, t0);
771 }
772 
773 // 1 or 3: FCC0
774 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
775                                     unsigned int fcc_offset)
776 {
777     gen_mov_reg_FCC0(dst, src, fcc_offset);
778 }
779 
780 // 1: FCC0 & !FCC1
781 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
782                                     unsigned int fcc_offset)
783 {
784     TCGv t0 = tcg_temp_new();
785     gen_mov_reg_FCC0(dst, src, fcc_offset);
786     gen_mov_reg_FCC1(t0, src, fcc_offset);
787     tcg_gen_andc_tl(dst, dst, t0);
788 }
789 
790 // 2 or 3: FCC1
791 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
792                                     unsigned int fcc_offset)
793 {
794     gen_mov_reg_FCC1(dst, src, fcc_offset);
795 }
796 
797 // 2: !FCC0 & FCC1
798 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
799                                     unsigned int fcc_offset)
800 {
801     TCGv t0 = tcg_temp_new();
802     gen_mov_reg_FCC0(dst, src, fcc_offset);
803     gen_mov_reg_FCC1(t0, src, fcc_offset);
804     tcg_gen_andc_tl(dst, t0, dst);
805 }
806 
807 // 3: FCC0 & FCC1
808 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
809                                     unsigned int fcc_offset)
810 {
811     TCGv t0 = tcg_temp_new();
812     gen_mov_reg_FCC0(dst, src, fcc_offset);
813     gen_mov_reg_FCC1(t0, src, fcc_offset);
814     tcg_gen_and_tl(dst, dst, t0);
815 }
816 
817 // 0: !(FCC0 | FCC1)
818 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
819                                     unsigned int fcc_offset)
820 {
821     TCGv t0 = tcg_temp_new();
822     gen_mov_reg_FCC0(dst, src, fcc_offset);
823     gen_mov_reg_FCC1(t0, src, fcc_offset);
824     tcg_gen_or_tl(dst, dst, t0);
825     tcg_gen_xori_tl(dst, dst, 0x1);
826 }
827 
828 // 0 or 3: !(FCC0 ^ FCC1)
829 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
830                                     unsigned int fcc_offset)
831 {
832     TCGv t0 = tcg_temp_new();
833     gen_mov_reg_FCC0(dst, src, fcc_offset);
834     gen_mov_reg_FCC1(t0, src, fcc_offset);
835     tcg_gen_xor_tl(dst, dst, t0);
836     tcg_gen_xori_tl(dst, dst, 0x1);
837 }
838 
839 // 0 or 2: !FCC0
840 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
841                                     unsigned int fcc_offset)
842 {
843     gen_mov_reg_FCC0(dst, src, fcc_offset);
844     tcg_gen_xori_tl(dst, dst, 0x1);
845 }
846 
847 // !1: !(FCC0 & !FCC1)
848 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
849                                     unsigned int fcc_offset)
850 {
851     TCGv t0 = tcg_temp_new();
852     gen_mov_reg_FCC0(dst, src, fcc_offset);
853     gen_mov_reg_FCC1(t0, src, fcc_offset);
854     tcg_gen_andc_tl(dst, dst, t0);
855     tcg_gen_xori_tl(dst, dst, 0x1);
856 }
857 
858 // 0 or 1: !FCC1
859 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
860                                     unsigned int fcc_offset)
861 {
862     gen_mov_reg_FCC1(dst, src, fcc_offset);
863     tcg_gen_xori_tl(dst, dst, 0x1);
864 }
865 
866 // !2: !(!FCC0 & FCC1)
867 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
868                                     unsigned int fcc_offset)
869 {
870     TCGv t0 = tcg_temp_new();
871     gen_mov_reg_FCC0(dst, src, fcc_offset);
872     gen_mov_reg_FCC1(t0, src, fcc_offset);
873     tcg_gen_andc_tl(dst, t0, dst);
874     tcg_gen_xori_tl(dst, dst, 0x1);
875 }
876 
877 // !3: !(FCC0 & FCC1)
878 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
879                                     unsigned int fcc_offset)
880 {
881     TCGv t0 = tcg_temp_new();
882     gen_mov_reg_FCC0(dst, src, fcc_offset);
883     gen_mov_reg_FCC1(t0, src, fcc_offset);
884     tcg_gen_and_tl(dst, dst, t0);
885     tcg_gen_xori_tl(dst, dst, 0x1);
886 }
887 
888 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
889                                target_ulong pc2, TCGv r_cond)
890 {
891     TCGLabel *l1 = gen_new_label();
892 
893     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
894 
895     gen_goto_tb(dc, 0, pc1, pc1 + 4);
896 
897     gen_set_label(l1);
898     gen_goto_tb(dc, 1, pc2, pc2 + 4);
899 }
900 
901 static void gen_branch_a(DisasContext *dc, target_ulong pc1)
902 {
903     TCGLabel *l1 = gen_new_label();
904     target_ulong npc = dc->npc;
905 
906     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cond, 0, l1);
907 
908     gen_goto_tb(dc, 0, npc, pc1);
909 
910     gen_set_label(l1);
911     gen_goto_tb(dc, 1, npc + 4, npc + 8);
912 
913     dc->base.is_jmp = DISAS_NORETURN;
914 }
915 
916 static void gen_branch_n(DisasContext *dc, target_ulong pc1)
917 {
918     target_ulong npc = dc->npc;
919 
920     if (likely(npc != DYNAMIC_PC)) {
921         dc->pc = npc;
922         dc->jump_pc[0] = pc1;
923         dc->jump_pc[1] = npc + 4;
924         dc->npc = JUMP_PC;
925     } else {
926         TCGv t, z;
927 
928         tcg_gen_mov_tl(cpu_pc, cpu_npc);
929 
930         tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
931         t = tcg_constant_tl(pc1);
932         z = tcg_constant_tl(0);
933         tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, z, t, cpu_npc);
934 
935         dc->pc = DYNAMIC_PC;
936     }
937 }
938 
939 static inline void gen_generic_branch(DisasContext *dc)
940 {
941     TCGv npc0 = tcg_constant_tl(dc->jump_pc[0]);
942     TCGv npc1 = tcg_constant_tl(dc->jump_pc[1]);
943     TCGv zero = tcg_constant_tl(0);
944 
945     tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
946 }
947 
948 /* call this function before using the condition register as it may
949    have been set for a jump */
950 static inline void flush_cond(DisasContext *dc)
951 {
952     if (dc->npc == JUMP_PC) {
953         gen_generic_branch(dc);
954         dc->npc = DYNAMIC_PC;
955     }
956 }
957 
958 static inline void save_npc(DisasContext *dc)
959 {
960     if (dc->npc == JUMP_PC) {
961         gen_generic_branch(dc);
962         dc->npc = DYNAMIC_PC;
963     } else if (dc->npc != DYNAMIC_PC) {
964         tcg_gen_movi_tl(cpu_npc, dc->npc);
965     }
966 }
967 
968 static inline void update_psr(DisasContext *dc)
969 {
970     if (dc->cc_op != CC_OP_FLAGS) {
971         dc->cc_op = CC_OP_FLAGS;
972         gen_helper_compute_psr(cpu_env);
973     }
974 }
975 
976 static inline void save_state(DisasContext *dc)
977 {
978     tcg_gen_movi_tl(cpu_pc, dc->pc);
979     save_npc(dc);
980 }
981 
982 static void gen_exception(DisasContext *dc, int which)
983 {
984     save_state(dc);
985     gen_helper_raise_exception(cpu_env, tcg_constant_i32(which));
986     dc->base.is_jmp = DISAS_NORETURN;
987 }
988 
989 static void gen_check_align(TCGv addr, int mask)
990 {
991     gen_helper_check_align(cpu_env, addr, tcg_constant_i32(mask));
992 }
993 
994 static inline void gen_mov_pc_npc(DisasContext *dc)
995 {
996     if (dc->npc == JUMP_PC) {
997         gen_generic_branch(dc);
998         tcg_gen_mov_tl(cpu_pc, cpu_npc);
999         dc->pc = DYNAMIC_PC;
1000     } else if (dc->npc == DYNAMIC_PC) {
1001         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1002         dc->pc = DYNAMIC_PC;
1003     } else {
1004         dc->pc = dc->npc;
1005     }
1006 }
1007 
1008 static inline void gen_op_next_insn(void)
1009 {
1010     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1011     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1012 }
1013 
1014 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1015                         DisasContext *dc)
1016 {
1017     static int subcc_cond[16] = {
1018         TCG_COND_NEVER,
1019         TCG_COND_EQ,
1020         TCG_COND_LE,
1021         TCG_COND_LT,
1022         TCG_COND_LEU,
1023         TCG_COND_LTU,
1024         -1, /* neg */
1025         -1, /* overflow */
1026         TCG_COND_ALWAYS,
1027         TCG_COND_NE,
1028         TCG_COND_GT,
1029         TCG_COND_GE,
1030         TCG_COND_GTU,
1031         TCG_COND_GEU,
1032         -1, /* pos */
1033         -1, /* no overflow */
1034     };
1035 
1036     static int logic_cond[16] = {
1037         TCG_COND_NEVER,
1038         TCG_COND_EQ,     /* eq:  Z */
1039         TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1040         TCG_COND_LT,     /* lt:  N ^ V -> N */
1041         TCG_COND_EQ,     /* leu: C | Z -> Z */
1042         TCG_COND_NEVER,  /* ltu: C -> 0 */
1043         TCG_COND_LT,     /* neg: N */
1044         TCG_COND_NEVER,  /* vs:  V -> 0 */
1045         TCG_COND_ALWAYS,
1046         TCG_COND_NE,     /* ne:  !Z */
1047         TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1048         TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1049         TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1050         TCG_COND_ALWAYS, /* geu: !C -> 1 */
1051         TCG_COND_GE,     /* pos: !N */
1052         TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1053     };
1054 
1055     TCGv_i32 r_src;
1056     TCGv r_dst;
1057 
1058 #ifdef TARGET_SPARC64
1059     if (xcc) {
1060         r_src = cpu_xcc;
1061     } else {
1062         r_src = cpu_psr;
1063     }
1064 #else
1065     r_src = cpu_psr;
1066 #endif
1067 
1068     switch (dc->cc_op) {
1069     case CC_OP_LOGIC:
1070         cmp->cond = logic_cond[cond];
1071     do_compare_dst_0:
1072         cmp->is_bool = false;
1073         cmp->c2 = tcg_constant_tl(0);
1074 #ifdef TARGET_SPARC64
1075         if (!xcc) {
1076             cmp->c1 = tcg_temp_new();
1077             tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1078             break;
1079         }
1080 #endif
1081         cmp->c1 = cpu_cc_dst;
1082         break;
1083 
1084     case CC_OP_SUB:
1085         switch (cond) {
1086         case 6:  /* neg */
1087         case 14: /* pos */
1088             cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1089             goto do_compare_dst_0;
1090 
1091         case 7: /* overflow */
1092         case 15: /* !overflow */
1093             goto do_dynamic;
1094 
1095         default:
1096             cmp->cond = subcc_cond[cond];
1097             cmp->is_bool = false;
1098 #ifdef TARGET_SPARC64
1099             if (!xcc) {
1100                 /* Note that sign-extension works for unsigned compares as
1101                    long as both operands are sign-extended.  */
1102                 cmp->c1 = tcg_temp_new();
1103                 cmp->c2 = tcg_temp_new();
1104                 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1105                 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1106                 break;
1107             }
1108 #endif
1109             cmp->c1 = cpu_cc_src;
1110             cmp->c2 = cpu_cc_src2;
1111             break;
1112         }
1113         break;
1114 
1115     default:
1116     do_dynamic:
1117         gen_helper_compute_psr(cpu_env);
1118         dc->cc_op = CC_OP_FLAGS;
1119         /* FALLTHRU */
1120 
1121     case CC_OP_FLAGS:
1122         /* We're going to generate a boolean result.  */
1123         cmp->cond = TCG_COND_NE;
1124         cmp->is_bool = true;
1125         cmp->c1 = r_dst = tcg_temp_new();
1126         cmp->c2 = tcg_constant_tl(0);
1127 
1128         switch (cond) {
1129         case 0x0:
1130             gen_op_eval_bn(r_dst);
1131             break;
1132         case 0x1:
1133             gen_op_eval_be(r_dst, r_src);
1134             break;
1135         case 0x2:
1136             gen_op_eval_ble(r_dst, r_src);
1137             break;
1138         case 0x3:
1139             gen_op_eval_bl(r_dst, r_src);
1140             break;
1141         case 0x4:
1142             gen_op_eval_bleu(r_dst, r_src);
1143             break;
1144         case 0x5:
1145             gen_op_eval_bcs(r_dst, r_src);
1146             break;
1147         case 0x6:
1148             gen_op_eval_bneg(r_dst, r_src);
1149             break;
1150         case 0x7:
1151             gen_op_eval_bvs(r_dst, r_src);
1152             break;
1153         case 0x8:
1154             gen_op_eval_ba(r_dst);
1155             break;
1156         case 0x9:
1157             gen_op_eval_bne(r_dst, r_src);
1158             break;
1159         case 0xa:
1160             gen_op_eval_bg(r_dst, r_src);
1161             break;
1162         case 0xb:
1163             gen_op_eval_bge(r_dst, r_src);
1164             break;
1165         case 0xc:
1166             gen_op_eval_bgu(r_dst, r_src);
1167             break;
1168         case 0xd:
1169             gen_op_eval_bcc(r_dst, r_src);
1170             break;
1171         case 0xe:
1172             gen_op_eval_bpos(r_dst, r_src);
1173             break;
1174         case 0xf:
1175             gen_op_eval_bvc(r_dst, r_src);
1176             break;
1177         }
1178         break;
1179     }
1180 }
1181 
1182 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1183 {
1184     unsigned int offset;
1185     TCGv r_dst;
1186 
1187     /* For now we still generate a straight boolean result.  */
1188     cmp->cond = TCG_COND_NE;
1189     cmp->is_bool = true;
1190     cmp->c1 = r_dst = tcg_temp_new();
1191     cmp->c2 = tcg_constant_tl(0);
1192 
1193     switch (cc) {
1194     default:
1195     case 0x0:
1196         offset = 0;
1197         break;
1198     case 0x1:
1199         offset = 32 - 10;
1200         break;
1201     case 0x2:
1202         offset = 34 - 10;
1203         break;
1204     case 0x3:
1205         offset = 36 - 10;
1206         break;
1207     }
1208 
1209     switch (cond) {
1210     case 0x0:
1211         gen_op_eval_bn(r_dst);
1212         break;
1213     case 0x1:
1214         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1215         break;
1216     case 0x2:
1217         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1218         break;
1219     case 0x3:
1220         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1221         break;
1222     case 0x4:
1223         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1224         break;
1225     case 0x5:
1226         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1227         break;
1228     case 0x6:
1229         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1230         break;
1231     case 0x7:
1232         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1233         break;
1234     case 0x8:
1235         gen_op_eval_ba(r_dst);
1236         break;
1237     case 0x9:
1238         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1239         break;
1240     case 0xa:
1241         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1242         break;
1243     case 0xb:
1244         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1245         break;
1246     case 0xc:
1247         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1248         break;
1249     case 0xd:
1250         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1251         break;
1252     case 0xe:
1253         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1254         break;
1255     case 0xf:
1256         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1257         break;
1258     }
1259 }
1260 
1261 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1262                      DisasContext *dc)
1263 {
1264     DisasCompare cmp;
1265     gen_compare(&cmp, cc, cond, dc);
1266 
1267     /* The interface is to return a boolean in r_dst.  */
1268     if (cmp.is_bool) {
1269         tcg_gen_mov_tl(r_dst, cmp.c1);
1270     } else {
1271         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1272     }
1273 }
1274 
1275 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1276 {
1277     DisasCompare cmp;
1278     gen_fcompare(&cmp, cc, cond);
1279 
1280     /* The interface is to return a boolean in r_dst.  */
1281     if (cmp.is_bool) {
1282         tcg_gen_mov_tl(r_dst, cmp.c1);
1283     } else {
1284         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1285     }
1286 }
1287 
1288 #ifdef TARGET_SPARC64
1289 // Inverted logic
1290 static const int gen_tcg_cond_reg[8] = {
1291     -1,
1292     TCG_COND_NE,
1293     TCG_COND_GT,
1294     TCG_COND_GE,
1295     -1,
1296     TCG_COND_EQ,
1297     TCG_COND_LE,
1298     TCG_COND_LT,
1299 };
1300 
1301 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1302 {
1303     cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1304     cmp->is_bool = false;
1305     cmp->c1 = r_src;
1306     cmp->c2 = tcg_constant_tl(0);
1307 }
1308 
1309 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1310 {
1311     DisasCompare cmp;
1312     gen_compare_reg(&cmp, cond, r_src);
1313 
1314     /* The interface is to return a boolean in r_dst.  */
1315     tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1316 }
1317 #endif
1318 
1319 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1320 {
1321     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1322     target_ulong target = dc->pc + offset;
1323 
1324 #ifdef TARGET_SPARC64
1325     if (unlikely(AM_CHECK(dc))) {
1326         target &= 0xffffffffULL;
1327     }
1328 #endif
1329     if (cond == 0x0) {
1330         /* unconditional not taken */
1331         if (a) {
1332             dc->pc = dc->npc + 4;
1333             dc->npc = dc->pc + 4;
1334         } else {
1335             dc->pc = dc->npc;
1336             dc->npc = dc->pc + 4;
1337         }
1338     } else if (cond == 0x8) {
1339         /* unconditional taken */
1340         if (a) {
1341             dc->pc = target;
1342             dc->npc = dc->pc + 4;
1343         } else {
1344             dc->pc = dc->npc;
1345             dc->npc = target;
1346             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1347         }
1348     } else {
1349         flush_cond(dc);
1350         gen_cond(cpu_cond, cc, cond, dc);
1351         if (a) {
1352             gen_branch_a(dc, target);
1353         } else {
1354             gen_branch_n(dc, target);
1355         }
1356     }
1357 }
1358 
1359 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1360 {
1361     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1362     target_ulong target = dc->pc + offset;
1363 
1364 #ifdef TARGET_SPARC64
1365     if (unlikely(AM_CHECK(dc))) {
1366         target &= 0xffffffffULL;
1367     }
1368 #endif
1369     if (cond == 0x0) {
1370         /* unconditional not taken */
1371         if (a) {
1372             dc->pc = dc->npc + 4;
1373             dc->npc = dc->pc + 4;
1374         } else {
1375             dc->pc = dc->npc;
1376             dc->npc = dc->pc + 4;
1377         }
1378     } else if (cond == 0x8) {
1379         /* unconditional taken */
1380         if (a) {
1381             dc->pc = target;
1382             dc->npc = dc->pc + 4;
1383         } else {
1384             dc->pc = dc->npc;
1385             dc->npc = target;
1386             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1387         }
1388     } else {
1389         flush_cond(dc);
1390         gen_fcond(cpu_cond, cc, cond);
1391         if (a) {
1392             gen_branch_a(dc, target);
1393         } else {
1394             gen_branch_n(dc, target);
1395         }
1396     }
1397 }
1398 
1399 #ifdef TARGET_SPARC64
1400 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1401                           TCGv r_reg)
1402 {
1403     unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1404     target_ulong target = dc->pc + offset;
1405 
1406     if (unlikely(AM_CHECK(dc))) {
1407         target &= 0xffffffffULL;
1408     }
1409     flush_cond(dc);
1410     gen_cond_reg(cpu_cond, cond, r_reg);
1411     if (a) {
1412         gen_branch_a(dc, target);
1413     } else {
1414         gen_branch_n(dc, target);
1415     }
1416 }
1417 
1418 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1419 {
1420     switch (fccno) {
1421     case 0:
1422         gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1423         break;
1424     case 1:
1425         gen_helper_fcmps_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1426         break;
1427     case 2:
1428         gen_helper_fcmps_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1429         break;
1430     case 3:
1431         gen_helper_fcmps_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1432         break;
1433     }
1434 }
1435 
1436 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1437 {
1438     switch (fccno) {
1439     case 0:
1440         gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1441         break;
1442     case 1:
1443         gen_helper_fcmpd_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1444         break;
1445     case 2:
1446         gen_helper_fcmpd_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1447         break;
1448     case 3:
1449         gen_helper_fcmpd_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1450         break;
1451     }
1452 }
1453 
1454 static inline void gen_op_fcmpq(int fccno)
1455 {
1456     switch (fccno) {
1457     case 0:
1458         gen_helper_fcmpq(cpu_fsr, cpu_env);
1459         break;
1460     case 1:
1461         gen_helper_fcmpq_fcc1(cpu_fsr, cpu_env);
1462         break;
1463     case 2:
1464         gen_helper_fcmpq_fcc2(cpu_fsr, cpu_env);
1465         break;
1466     case 3:
1467         gen_helper_fcmpq_fcc3(cpu_fsr, cpu_env);
1468         break;
1469     }
1470 }
1471 
1472 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1473 {
1474     switch (fccno) {
1475     case 0:
1476         gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1477         break;
1478     case 1:
1479         gen_helper_fcmpes_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1480         break;
1481     case 2:
1482         gen_helper_fcmpes_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1483         break;
1484     case 3:
1485         gen_helper_fcmpes_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1486         break;
1487     }
1488 }
1489 
1490 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1491 {
1492     switch (fccno) {
1493     case 0:
1494         gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1495         break;
1496     case 1:
1497         gen_helper_fcmped_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1498         break;
1499     case 2:
1500         gen_helper_fcmped_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1501         break;
1502     case 3:
1503         gen_helper_fcmped_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1504         break;
1505     }
1506 }
1507 
1508 static inline void gen_op_fcmpeq(int fccno)
1509 {
1510     switch (fccno) {
1511     case 0:
1512         gen_helper_fcmpeq(cpu_fsr, cpu_env);
1513         break;
1514     case 1:
1515         gen_helper_fcmpeq_fcc1(cpu_fsr, cpu_env);
1516         break;
1517     case 2:
1518         gen_helper_fcmpeq_fcc2(cpu_fsr, cpu_env);
1519         break;
1520     case 3:
1521         gen_helper_fcmpeq_fcc3(cpu_fsr, cpu_env);
1522         break;
1523     }
1524 }
1525 
1526 #else
1527 
1528 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1529 {
1530     gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1531 }
1532 
1533 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1534 {
1535     gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1536 }
1537 
1538 static inline void gen_op_fcmpq(int fccno)
1539 {
1540     gen_helper_fcmpq(cpu_fsr, cpu_env);
1541 }
1542 
1543 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1544 {
1545     gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1546 }
1547 
1548 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1549 {
1550     gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1551 }
1552 
1553 static inline void gen_op_fcmpeq(int fccno)
1554 {
1555     gen_helper_fcmpeq(cpu_fsr, cpu_env);
1556 }
1557 #endif
1558 
1559 static void gen_op_fpexception_im(DisasContext *dc, int fsr_flags)
1560 {
1561     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1562     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1563     gen_exception(dc, TT_FP_EXCP);
1564 }
1565 
1566 static int gen_trap_ifnofpu(DisasContext *dc)
1567 {
1568 #if !defined(CONFIG_USER_ONLY)
1569     if (!dc->fpu_enabled) {
1570         gen_exception(dc, TT_NFPU_INSN);
1571         return 1;
1572     }
1573 #endif
1574     return 0;
1575 }
1576 
1577 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1578 {
1579     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1580 }
1581 
1582 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1583                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1584 {
1585     TCGv_i32 dst, src;
1586 
1587     src = gen_load_fpr_F(dc, rs);
1588     dst = gen_dest_fpr_F(dc);
1589 
1590     gen(dst, cpu_env, src);
1591     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1592 
1593     gen_store_fpr_F(dc, rd, dst);
1594 }
1595 
1596 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1597                                  void (*gen)(TCGv_i32, TCGv_i32))
1598 {
1599     TCGv_i32 dst, src;
1600 
1601     src = gen_load_fpr_F(dc, rs);
1602     dst = gen_dest_fpr_F(dc);
1603 
1604     gen(dst, src);
1605 
1606     gen_store_fpr_F(dc, rd, dst);
1607 }
1608 
1609 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1610                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1611 {
1612     TCGv_i32 dst, src1, src2;
1613 
1614     src1 = gen_load_fpr_F(dc, rs1);
1615     src2 = gen_load_fpr_F(dc, rs2);
1616     dst = gen_dest_fpr_F(dc);
1617 
1618     gen(dst, cpu_env, src1, src2);
1619     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1620 
1621     gen_store_fpr_F(dc, rd, dst);
1622 }
1623 
1624 #ifdef TARGET_SPARC64
1625 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1626                                   void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1627 {
1628     TCGv_i32 dst, src1, src2;
1629 
1630     src1 = gen_load_fpr_F(dc, rs1);
1631     src2 = gen_load_fpr_F(dc, rs2);
1632     dst = gen_dest_fpr_F(dc);
1633 
1634     gen(dst, src1, src2);
1635 
1636     gen_store_fpr_F(dc, rd, dst);
1637 }
1638 #endif
1639 
1640 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1641                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1642 {
1643     TCGv_i64 dst, src;
1644 
1645     src = gen_load_fpr_D(dc, rs);
1646     dst = gen_dest_fpr_D(dc, rd);
1647 
1648     gen(dst, cpu_env, src);
1649     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1650 
1651     gen_store_fpr_D(dc, rd, dst);
1652 }
1653 
1654 #ifdef TARGET_SPARC64
1655 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1656                                  void (*gen)(TCGv_i64, TCGv_i64))
1657 {
1658     TCGv_i64 dst, src;
1659 
1660     src = gen_load_fpr_D(dc, rs);
1661     dst = gen_dest_fpr_D(dc, rd);
1662 
1663     gen(dst, src);
1664 
1665     gen_store_fpr_D(dc, rd, dst);
1666 }
1667 #endif
1668 
1669 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1670                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1671 {
1672     TCGv_i64 dst, src1, src2;
1673 
1674     src1 = gen_load_fpr_D(dc, rs1);
1675     src2 = gen_load_fpr_D(dc, rs2);
1676     dst = gen_dest_fpr_D(dc, rd);
1677 
1678     gen(dst, cpu_env, src1, src2);
1679     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1680 
1681     gen_store_fpr_D(dc, rd, dst);
1682 }
1683 
1684 #ifdef TARGET_SPARC64
1685 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1686                                   void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1687 {
1688     TCGv_i64 dst, src1, src2;
1689 
1690     src1 = gen_load_fpr_D(dc, rs1);
1691     src2 = gen_load_fpr_D(dc, rs2);
1692     dst = gen_dest_fpr_D(dc, rd);
1693 
1694     gen(dst, src1, src2);
1695 
1696     gen_store_fpr_D(dc, rd, dst);
1697 }
1698 
1699 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1700                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1701 {
1702     TCGv_i64 dst, src1, src2;
1703 
1704     src1 = gen_load_fpr_D(dc, rs1);
1705     src2 = gen_load_fpr_D(dc, rs2);
1706     dst = gen_dest_fpr_D(dc, rd);
1707 
1708     gen(dst, cpu_gsr, src1, src2);
1709 
1710     gen_store_fpr_D(dc, rd, dst);
1711 }
1712 
1713 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1714                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1715 {
1716     TCGv_i64 dst, src0, src1, src2;
1717 
1718     src1 = gen_load_fpr_D(dc, rs1);
1719     src2 = gen_load_fpr_D(dc, rs2);
1720     src0 = gen_load_fpr_D(dc, rd);
1721     dst = gen_dest_fpr_D(dc, rd);
1722 
1723     gen(dst, src0, src1, src2);
1724 
1725     gen_store_fpr_D(dc, rd, dst);
1726 }
1727 #endif
1728 
1729 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1730                               void (*gen)(TCGv_ptr))
1731 {
1732     gen_op_load_fpr_QT1(QFPREG(rs));
1733 
1734     gen(cpu_env);
1735     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1736 
1737     gen_op_store_QT0_fpr(QFPREG(rd));
1738     gen_update_fprs_dirty(dc, QFPREG(rd));
1739 }
1740 
1741 #ifdef TARGET_SPARC64
1742 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1743                                  void (*gen)(TCGv_ptr))
1744 {
1745     gen_op_load_fpr_QT1(QFPREG(rs));
1746 
1747     gen(cpu_env);
1748 
1749     gen_op_store_QT0_fpr(QFPREG(rd));
1750     gen_update_fprs_dirty(dc, QFPREG(rd));
1751 }
1752 #endif
1753 
1754 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1755                                void (*gen)(TCGv_ptr))
1756 {
1757     gen_op_load_fpr_QT0(QFPREG(rs1));
1758     gen_op_load_fpr_QT1(QFPREG(rs2));
1759 
1760     gen(cpu_env);
1761     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1762 
1763     gen_op_store_QT0_fpr(QFPREG(rd));
1764     gen_update_fprs_dirty(dc, QFPREG(rd));
1765 }
1766 
1767 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1768                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1769 {
1770     TCGv_i64 dst;
1771     TCGv_i32 src1, src2;
1772 
1773     src1 = gen_load_fpr_F(dc, rs1);
1774     src2 = gen_load_fpr_F(dc, rs2);
1775     dst = gen_dest_fpr_D(dc, rd);
1776 
1777     gen(dst, cpu_env, src1, src2);
1778     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1779 
1780     gen_store_fpr_D(dc, rd, dst);
1781 }
1782 
1783 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1784                                void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1785 {
1786     TCGv_i64 src1, src2;
1787 
1788     src1 = gen_load_fpr_D(dc, rs1);
1789     src2 = gen_load_fpr_D(dc, rs2);
1790 
1791     gen(cpu_env, src1, src2);
1792     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1793 
1794     gen_op_store_QT0_fpr(QFPREG(rd));
1795     gen_update_fprs_dirty(dc, QFPREG(rd));
1796 }
1797 
1798 #ifdef TARGET_SPARC64
1799 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1800                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1801 {
1802     TCGv_i64 dst;
1803     TCGv_i32 src;
1804 
1805     src = gen_load_fpr_F(dc, rs);
1806     dst = gen_dest_fpr_D(dc, rd);
1807 
1808     gen(dst, cpu_env, src);
1809     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1810 
1811     gen_store_fpr_D(dc, rd, dst);
1812 }
1813 #endif
1814 
1815 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1816                                  void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1817 {
1818     TCGv_i64 dst;
1819     TCGv_i32 src;
1820 
1821     src = gen_load_fpr_F(dc, rs);
1822     dst = gen_dest_fpr_D(dc, rd);
1823 
1824     gen(dst, cpu_env, src);
1825 
1826     gen_store_fpr_D(dc, rd, dst);
1827 }
1828 
1829 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1830                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1831 {
1832     TCGv_i32 dst;
1833     TCGv_i64 src;
1834 
1835     src = gen_load_fpr_D(dc, rs);
1836     dst = gen_dest_fpr_F(dc);
1837 
1838     gen(dst, cpu_env, src);
1839     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1840 
1841     gen_store_fpr_F(dc, rd, dst);
1842 }
1843 
1844 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1845                               void (*gen)(TCGv_i32, TCGv_ptr))
1846 {
1847     TCGv_i32 dst;
1848 
1849     gen_op_load_fpr_QT1(QFPREG(rs));
1850     dst = gen_dest_fpr_F(dc);
1851 
1852     gen(dst, cpu_env);
1853     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1854 
1855     gen_store_fpr_F(dc, rd, dst);
1856 }
1857 
1858 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1859                               void (*gen)(TCGv_i64, TCGv_ptr))
1860 {
1861     TCGv_i64 dst;
1862 
1863     gen_op_load_fpr_QT1(QFPREG(rs));
1864     dst = gen_dest_fpr_D(dc, rd);
1865 
1866     gen(dst, cpu_env);
1867     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1868 
1869     gen_store_fpr_D(dc, rd, dst);
1870 }
1871 
1872 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1873                                  void (*gen)(TCGv_ptr, TCGv_i32))
1874 {
1875     TCGv_i32 src;
1876 
1877     src = gen_load_fpr_F(dc, rs);
1878 
1879     gen(cpu_env, src);
1880 
1881     gen_op_store_QT0_fpr(QFPREG(rd));
1882     gen_update_fprs_dirty(dc, QFPREG(rd));
1883 }
1884 
1885 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1886                                  void (*gen)(TCGv_ptr, TCGv_i64))
1887 {
1888     TCGv_i64 src;
1889 
1890     src = gen_load_fpr_D(dc, rs);
1891 
1892     gen(cpu_env, src);
1893 
1894     gen_op_store_QT0_fpr(QFPREG(rd));
1895     gen_update_fprs_dirty(dc, QFPREG(rd));
1896 }
1897 
1898 static void gen_swap(DisasContext *dc, TCGv dst, TCGv src,
1899                      TCGv addr, int mmu_idx, MemOp memop)
1900 {
1901     gen_address_mask(dc, addr);
1902     tcg_gen_atomic_xchg_tl(dst, addr, src, mmu_idx, memop | MO_ALIGN);
1903 }
1904 
1905 static void gen_ldstub(DisasContext *dc, TCGv dst, TCGv addr, int mmu_idx)
1906 {
1907     TCGv m1 = tcg_constant_tl(0xff);
1908     gen_address_mask(dc, addr);
1909     tcg_gen_atomic_xchg_tl(dst, addr, m1, mmu_idx, MO_UB);
1910 }
1911 
1912 /* asi moves */
1913 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1914 typedef enum {
1915     GET_ASI_HELPER,
1916     GET_ASI_EXCP,
1917     GET_ASI_DIRECT,
1918     GET_ASI_DTWINX,
1919     GET_ASI_BLOCK,
1920     GET_ASI_SHORT,
1921     GET_ASI_BCOPY,
1922     GET_ASI_BFILL,
1923 } ASIType;
1924 
1925 typedef struct {
1926     ASIType type;
1927     int asi;
1928     int mem_idx;
1929     MemOp memop;
1930 } DisasASI;
1931 
1932 static DisasASI get_asi(DisasContext *dc, int insn, MemOp memop)
1933 {
1934     int asi = GET_FIELD(insn, 19, 26);
1935     ASIType type = GET_ASI_HELPER;
1936     int mem_idx = dc->mem_idx;
1937 
1938 #ifndef TARGET_SPARC64
1939     /* Before v9, all asis are immediate and privileged.  */
1940     if (IS_IMM) {
1941         gen_exception(dc, TT_ILL_INSN);
1942         type = GET_ASI_EXCP;
1943     } else if (supervisor(dc)
1944                /* Note that LEON accepts ASI_USERDATA in user mode, for
1945                   use with CASA.  Also note that previous versions of
1946                   QEMU allowed (and old versions of gcc emitted) ASI_P
1947                   for LEON, which is incorrect.  */
1948                || (asi == ASI_USERDATA
1949                    && (dc->def->features & CPU_FEATURE_CASA))) {
1950         switch (asi) {
1951         case ASI_USERDATA:   /* User data access */
1952             mem_idx = MMU_USER_IDX;
1953             type = GET_ASI_DIRECT;
1954             break;
1955         case ASI_KERNELDATA: /* Supervisor data access */
1956             mem_idx = MMU_KERNEL_IDX;
1957             type = GET_ASI_DIRECT;
1958             break;
1959         case ASI_M_BYPASS:    /* MMU passthrough */
1960         case ASI_LEON_BYPASS: /* LEON MMU passthrough */
1961             mem_idx = MMU_PHYS_IDX;
1962             type = GET_ASI_DIRECT;
1963             break;
1964         case ASI_M_BCOPY: /* Block copy, sta access */
1965             mem_idx = MMU_KERNEL_IDX;
1966             type = GET_ASI_BCOPY;
1967             break;
1968         case ASI_M_BFILL: /* Block fill, stda access */
1969             mem_idx = MMU_KERNEL_IDX;
1970             type = GET_ASI_BFILL;
1971             break;
1972         }
1973 
1974         /* MMU_PHYS_IDX is used when the MMU is disabled to passthrough the
1975          * permissions check in get_physical_address(..).
1976          */
1977         mem_idx = (dc->mem_idx == MMU_PHYS_IDX) ? MMU_PHYS_IDX : mem_idx;
1978     } else {
1979         gen_exception(dc, TT_PRIV_INSN);
1980         type = GET_ASI_EXCP;
1981     }
1982 #else
1983     if (IS_IMM) {
1984         asi = dc->asi;
1985     }
1986     /* With v9, all asis below 0x80 are privileged.  */
1987     /* ??? We ought to check cpu_has_hypervisor, but we didn't copy
1988        down that bit into DisasContext.  For the moment that's ok,
1989        since the direct implementations below doesn't have any ASIs
1990        in the restricted [0x30, 0x7f] range, and the check will be
1991        done properly in the helper.  */
1992     if (!supervisor(dc) && asi < 0x80) {
1993         gen_exception(dc, TT_PRIV_ACT);
1994         type = GET_ASI_EXCP;
1995     } else {
1996         switch (asi) {
1997         case ASI_REAL:      /* Bypass */
1998         case ASI_REAL_IO:   /* Bypass, non-cacheable */
1999         case ASI_REAL_L:    /* Bypass LE */
2000         case ASI_REAL_IO_L: /* Bypass, non-cacheable LE */
2001         case ASI_TWINX_REAL:   /* Real address, twinx */
2002         case ASI_TWINX_REAL_L: /* Real address, twinx, LE */
2003         case ASI_QUAD_LDD_PHYS:
2004         case ASI_QUAD_LDD_PHYS_L:
2005             mem_idx = MMU_PHYS_IDX;
2006             break;
2007         case ASI_N:  /* Nucleus */
2008         case ASI_NL: /* Nucleus LE */
2009         case ASI_TWINX_N:
2010         case ASI_TWINX_NL:
2011         case ASI_NUCLEUS_QUAD_LDD:
2012         case ASI_NUCLEUS_QUAD_LDD_L:
2013             if (hypervisor(dc)) {
2014                 mem_idx = MMU_PHYS_IDX;
2015             } else {
2016                 mem_idx = MMU_NUCLEUS_IDX;
2017             }
2018             break;
2019         case ASI_AIUP:  /* As if user primary */
2020         case ASI_AIUPL: /* As if user primary LE */
2021         case ASI_TWINX_AIUP:
2022         case ASI_TWINX_AIUP_L:
2023         case ASI_BLK_AIUP_4V:
2024         case ASI_BLK_AIUP_L_4V:
2025         case ASI_BLK_AIUP:
2026         case ASI_BLK_AIUPL:
2027             mem_idx = MMU_USER_IDX;
2028             break;
2029         case ASI_AIUS:  /* As if user secondary */
2030         case ASI_AIUSL: /* As if user secondary LE */
2031         case ASI_TWINX_AIUS:
2032         case ASI_TWINX_AIUS_L:
2033         case ASI_BLK_AIUS_4V:
2034         case ASI_BLK_AIUS_L_4V:
2035         case ASI_BLK_AIUS:
2036         case ASI_BLK_AIUSL:
2037             mem_idx = MMU_USER_SECONDARY_IDX;
2038             break;
2039         case ASI_S:  /* Secondary */
2040         case ASI_SL: /* Secondary LE */
2041         case ASI_TWINX_S:
2042         case ASI_TWINX_SL:
2043         case ASI_BLK_COMMIT_S:
2044         case ASI_BLK_S:
2045         case ASI_BLK_SL:
2046         case ASI_FL8_S:
2047         case ASI_FL8_SL:
2048         case ASI_FL16_S:
2049         case ASI_FL16_SL:
2050             if (mem_idx == MMU_USER_IDX) {
2051                 mem_idx = MMU_USER_SECONDARY_IDX;
2052             } else if (mem_idx == MMU_KERNEL_IDX) {
2053                 mem_idx = MMU_KERNEL_SECONDARY_IDX;
2054             }
2055             break;
2056         case ASI_P:  /* Primary */
2057         case ASI_PL: /* Primary LE */
2058         case ASI_TWINX_P:
2059         case ASI_TWINX_PL:
2060         case ASI_BLK_COMMIT_P:
2061         case ASI_BLK_P:
2062         case ASI_BLK_PL:
2063         case ASI_FL8_P:
2064         case ASI_FL8_PL:
2065         case ASI_FL16_P:
2066         case ASI_FL16_PL:
2067             break;
2068         }
2069         switch (asi) {
2070         case ASI_REAL:
2071         case ASI_REAL_IO:
2072         case ASI_REAL_L:
2073         case ASI_REAL_IO_L:
2074         case ASI_N:
2075         case ASI_NL:
2076         case ASI_AIUP:
2077         case ASI_AIUPL:
2078         case ASI_AIUS:
2079         case ASI_AIUSL:
2080         case ASI_S:
2081         case ASI_SL:
2082         case ASI_P:
2083         case ASI_PL:
2084             type = GET_ASI_DIRECT;
2085             break;
2086         case ASI_TWINX_REAL:
2087         case ASI_TWINX_REAL_L:
2088         case ASI_TWINX_N:
2089         case ASI_TWINX_NL:
2090         case ASI_TWINX_AIUP:
2091         case ASI_TWINX_AIUP_L:
2092         case ASI_TWINX_AIUS:
2093         case ASI_TWINX_AIUS_L:
2094         case ASI_TWINX_P:
2095         case ASI_TWINX_PL:
2096         case ASI_TWINX_S:
2097         case ASI_TWINX_SL:
2098         case ASI_QUAD_LDD_PHYS:
2099         case ASI_QUAD_LDD_PHYS_L:
2100         case ASI_NUCLEUS_QUAD_LDD:
2101         case ASI_NUCLEUS_QUAD_LDD_L:
2102             type = GET_ASI_DTWINX;
2103             break;
2104         case ASI_BLK_COMMIT_P:
2105         case ASI_BLK_COMMIT_S:
2106         case ASI_BLK_AIUP_4V:
2107         case ASI_BLK_AIUP_L_4V:
2108         case ASI_BLK_AIUP:
2109         case ASI_BLK_AIUPL:
2110         case ASI_BLK_AIUS_4V:
2111         case ASI_BLK_AIUS_L_4V:
2112         case ASI_BLK_AIUS:
2113         case ASI_BLK_AIUSL:
2114         case ASI_BLK_S:
2115         case ASI_BLK_SL:
2116         case ASI_BLK_P:
2117         case ASI_BLK_PL:
2118             type = GET_ASI_BLOCK;
2119             break;
2120         case ASI_FL8_S:
2121         case ASI_FL8_SL:
2122         case ASI_FL8_P:
2123         case ASI_FL8_PL:
2124             memop = MO_UB;
2125             type = GET_ASI_SHORT;
2126             break;
2127         case ASI_FL16_S:
2128         case ASI_FL16_SL:
2129         case ASI_FL16_P:
2130         case ASI_FL16_PL:
2131             memop = MO_TEUW;
2132             type = GET_ASI_SHORT;
2133             break;
2134         }
2135         /* The little-endian asis all have bit 3 set.  */
2136         if (asi & 8) {
2137             memop ^= MO_BSWAP;
2138         }
2139     }
2140 #endif
2141 
2142     return (DisasASI){ type, asi, mem_idx, memop };
2143 }
2144 
2145 static void gen_ld_asi(DisasContext *dc, TCGv dst, TCGv addr,
2146                        int insn, MemOp memop)
2147 {
2148     DisasASI da = get_asi(dc, insn, memop);
2149 
2150     switch (da.type) {
2151     case GET_ASI_EXCP:
2152         break;
2153     case GET_ASI_DTWINX: /* Reserved for ldda.  */
2154         gen_exception(dc, TT_ILL_INSN);
2155         break;
2156     case GET_ASI_DIRECT:
2157         gen_address_mask(dc, addr);
2158         tcg_gen_qemu_ld_tl(dst, addr, da.mem_idx, da.memop | MO_ALIGN);
2159         break;
2160     default:
2161         {
2162             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2163             TCGv_i32 r_mop = tcg_constant_i32(memop | MO_ALIGN);
2164 
2165             save_state(dc);
2166 #ifdef TARGET_SPARC64
2167             gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_mop);
2168 #else
2169             {
2170                 TCGv_i64 t64 = tcg_temp_new_i64();
2171                 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2172                 tcg_gen_trunc_i64_tl(dst, t64);
2173             }
2174 #endif
2175         }
2176         break;
2177     }
2178 }
2179 
2180 static void gen_st_asi(DisasContext *dc, TCGv src, TCGv addr,
2181                        int insn, MemOp memop)
2182 {
2183     DisasASI da = get_asi(dc, insn, memop);
2184 
2185     switch (da.type) {
2186     case GET_ASI_EXCP:
2187         break;
2188     case GET_ASI_DTWINX: /* Reserved for stda.  */
2189 #ifndef TARGET_SPARC64
2190         gen_exception(dc, TT_ILL_INSN);
2191         break;
2192 #else
2193         if (!(dc->def->features & CPU_FEATURE_HYPV)) {
2194             /* Pre OpenSPARC CPUs don't have these */
2195             gen_exception(dc, TT_ILL_INSN);
2196             return;
2197         }
2198         /* in OpenSPARC T1+ CPUs TWINX ASIs in store instructions
2199          * are ST_BLKINIT_ ASIs */
2200 #endif
2201         /* fall through */
2202     case GET_ASI_DIRECT:
2203         gen_address_mask(dc, addr);
2204         tcg_gen_qemu_st_tl(src, addr, da.mem_idx, da.memop | MO_ALIGN);
2205         break;
2206 #if !defined(TARGET_SPARC64) && !defined(CONFIG_USER_ONLY)
2207     case GET_ASI_BCOPY:
2208         /* Copy 32 bytes from the address in SRC to ADDR.  */
2209         /* ??? The original qemu code suggests 4-byte alignment, dropping
2210            the low bits, but the only place I can see this used is in the
2211            Linux kernel with 32 byte alignment, which would make more sense
2212            as a cacheline-style operation.  */
2213         {
2214             TCGv saddr = tcg_temp_new();
2215             TCGv daddr = tcg_temp_new();
2216             TCGv four = tcg_constant_tl(4);
2217             TCGv_i32 tmp = tcg_temp_new_i32();
2218             int i;
2219 
2220             tcg_gen_andi_tl(saddr, src, -4);
2221             tcg_gen_andi_tl(daddr, addr, -4);
2222             for (i = 0; i < 32; i += 4) {
2223                 /* Since the loads and stores are paired, allow the
2224                    copy to happen in the host endianness.  */
2225                 tcg_gen_qemu_ld_i32(tmp, saddr, da.mem_idx, MO_UL);
2226                 tcg_gen_qemu_st_i32(tmp, daddr, da.mem_idx, MO_UL);
2227                 tcg_gen_add_tl(saddr, saddr, four);
2228                 tcg_gen_add_tl(daddr, daddr, four);
2229             }
2230         }
2231         break;
2232 #endif
2233     default:
2234         {
2235             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2236             TCGv_i32 r_mop = tcg_constant_i32(memop | MO_ALIGN);
2237 
2238             save_state(dc);
2239 #ifdef TARGET_SPARC64
2240             gen_helper_st_asi(cpu_env, addr, src, r_asi, r_mop);
2241 #else
2242             {
2243                 TCGv_i64 t64 = tcg_temp_new_i64();
2244                 tcg_gen_extu_tl_i64(t64, src);
2245                 gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2246             }
2247 #endif
2248 
2249             /* A write to a TLB register may alter page maps.  End the TB. */
2250             dc->npc = DYNAMIC_PC;
2251         }
2252         break;
2253     }
2254 }
2255 
2256 static void gen_swap_asi(DisasContext *dc, TCGv dst, TCGv src,
2257                          TCGv addr, int insn)
2258 {
2259     DisasASI da = get_asi(dc, insn, MO_TEUL);
2260 
2261     switch (da.type) {
2262     case GET_ASI_EXCP:
2263         break;
2264     case GET_ASI_DIRECT:
2265         gen_swap(dc, dst, src, addr, da.mem_idx, da.memop);
2266         break;
2267     default:
2268         /* ??? Should be DAE_invalid_asi.  */
2269         gen_exception(dc, TT_DATA_ACCESS);
2270         break;
2271     }
2272 }
2273 
2274 static void gen_cas_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2275                         int insn, int rd)
2276 {
2277     DisasASI da = get_asi(dc, insn, MO_TEUL);
2278     TCGv oldv;
2279 
2280     switch (da.type) {
2281     case GET_ASI_EXCP:
2282         return;
2283     case GET_ASI_DIRECT:
2284         oldv = tcg_temp_new();
2285         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2286                                   da.mem_idx, da.memop | MO_ALIGN);
2287         gen_store_gpr(dc, rd, oldv);
2288         break;
2289     default:
2290         /* ??? Should be DAE_invalid_asi.  */
2291         gen_exception(dc, TT_DATA_ACCESS);
2292         break;
2293     }
2294 }
2295 
2296 static void gen_ldstub_asi(DisasContext *dc, TCGv dst, TCGv addr, int insn)
2297 {
2298     DisasASI da = get_asi(dc, insn, MO_UB);
2299 
2300     switch (da.type) {
2301     case GET_ASI_EXCP:
2302         break;
2303     case GET_ASI_DIRECT:
2304         gen_ldstub(dc, dst, addr, da.mem_idx);
2305         break;
2306     default:
2307         /* ??? In theory, this should be raise DAE_invalid_asi.
2308            But the SS-20 roms do ldstuba [%l0] #ASI_M_CTL, %o1.  */
2309         if (tb_cflags(dc->base.tb) & CF_PARALLEL) {
2310             gen_helper_exit_atomic(cpu_env);
2311         } else {
2312             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2313             TCGv_i32 r_mop = tcg_constant_i32(MO_UB);
2314             TCGv_i64 s64, t64;
2315 
2316             save_state(dc);
2317             t64 = tcg_temp_new_i64();
2318             gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2319 
2320             s64 = tcg_constant_i64(0xff);
2321             gen_helper_st_asi(cpu_env, addr, s64, r_asi, r_mop);
2322 
2323             tcg_gen_trunc_i64_tl(dst, t64);
2324 
2325             /* End the TB.  */
2326             dc->npc = DYNAMIC_PC;
2327         }
2328         break;
2329     }
2330 }
2331 #endif
2332 
2333 #ifdef TARGET_SPARC64
2334 static void gen_ldf_asi(DisasContext *dc, TCGv addr,
2335                         int insn, int size, int rd)
2336 {
2337     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2338     TCGv_i32 d32;
2339     TCGv_i64 d64;
2340 
2341     switch (da.type) {
2342     case GET_ASI_EXCP:
2343         break;
2344 
2345     case GET_ASI_DIRECT:
2346         gen_address_mask(dc, addr);
2347         switch (size) {
2348         case 4:
2349             d32 = gen_dest_fpr_F(dc);
2350             tcg_gen_qemu_ld_i32(d32, addr, da.mem_idx, da.memop | MO_ALIGN);
2351             gen_store_fpr_F(dc, rd, d32);
2352             break;
2353         case 8:
2354             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2355                                 da.memop | MO_ALIGN_4);
2356             break;
2357         case 16:
2358             d64 = tcg_temp_new_i64();
2359             tcg_gen_qemu_ld_i64(d64, addr, da.mem_idx, da.memop | MO_ALIGN_4);
2360             tcg_gen_addi_tl(addr, addr, 8);
2361             tcg_gen_qemu_ld_i64(cpu_fpr[rd/2+1], addr, da.mem_idx,
2362                                 da.memop | MO_ALIGN_4);
2363             tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2364             break;
2365         default:
2366             g_assert_not_reached();
2367         }
2368         break;
2369 
2370     case GET_ASI_BLOCK:
2371         /* Valid for lddfa on aligned registers only.  */
2372         if (size == 8 && (rd & 7) == 0) {
2373             MemOp memop;
2374             TCGv eight;
2375             int i;
2376 
2377             gen_address_mask(dc, addr);
2378 
2379             /* The first operation checks required alignment.  */
2380             memop = da.memop | MO_ALIGN_64;
2381             eight = tcg_constant_tl(8);
2382             for (i = 0; ; ++i) {
2383                 tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2 + i], addr,
2384                                     da.mem_idx, memop);
2385                 if (i == 7) {
2386                     break;
2387                 }
2388                 tcg_gen_add_tl(addr, addr, eight);
2389                 memop = da.memop;
2390             }
2391         } else {
2392             gen_exception(dc, TT_ILL_INSN);
2393         }
2394         break;
2395 
2396     case GET_ASI_SHORT:
2397         /* Valid for lddfa only.  */
2398         if (size == 8) {
2399             gen_address_mask(dc, addr);
2400             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2401                                 da.memop | MO_ALIGN);
2402         } else {
2403             gen_exception(dc, TT_ILL_INSN);
2404         }
2405         break;
2406 
2407     default:
2408         {
2409             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2410             TCGv_i32 r_mop = tcg_constant_i32(da.memop | MO_ALIGN);
2411 
2412             save_state(dc);
2413             /* According to the table in the UA2011 manual, the only
2414                other asis that are valid for ldfa/lddfa/ldqfa are
2415                the NO_FAULT asis.  We still need a helper for these,
2416                but we can just use the integer asi helper for them.  */
2417             switch (size) {
2418             case 4:
2419                 d64 = tcg_temp_new_i64();
2420                 gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2421                 d32 = gen_dest_fpr_F(dc);
2422                 tcg_gen_extrl_i64_i32(d32, d64);
2423                 gen_store_fpr_F(dc, rd, d32);
2424                 break;
2425             case 8:
2426                 gen_helper_ld_asi(cpu_fpr[rd / 2], cpu_env, addr, r_asi, r_mop);
2427                 break;
2428             case 16:
2429                 d64 = tcg_temp_new_i64();
2430                 gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2431                 tcg_gen_addi_tl(addr, addr, 8);
2432                 gen_helper_ld_asi(cpu_fpr[rd/2+1], cpu_env, addr, r_asi, r_mop);
2433                 tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2434                 break;
2435             default:
2436                 g_assert_not_reached();
2437             }
2438         }
2439         break;
2440     }
2441 }
2442 
2443 static void gen_stf_asi(DisasContext *dc, TCGv addr,
2444                         int insn, int size, int rd)
2445 {
2446     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2447     TCGv_i32 d32;
2448 
2449     switch (da.type) {
2450     case GET_ASI_EXCP:
2451         break;
2452 
2453     case GET_ASI_DIRECT:
2454         gen_address_mask(dc, addr);
2455         switch (size) {
2456         case 4:
2457             d32 = gen_load_fpr_F(dc, rd);
2458             tcg_gen_qemu_st_i32(d32, addr, da.mem_idx, da.memop | MO_ALIGN);
2459             break;
2460         case 8:
2461             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2462                                 da.memop | MO_ALIGN_4);
2463             break;
2464         case 16:
2465             /* Only 4-byte alignment required.  However, it is legal for the
2466                cpu to signal the alignment fault, and the OS trap handler is
2467                required to fix it up.  Requiring 16-byte alignment here avoids
2468                having to probe the second page before performing the first
2469                write.  */
2470             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2471                                 da.memop | MO_ALIGN_16);
2472             tcg_gen_addi_tl(addr, addr, 8);
2473             tcg_gen_qemu_st_i64(cpu_fpr[rd/2+1], addr, da.mem_idx, da.memop);
2474             break;
2475         default:
2476             g_assert_not_reached();
2477         }
2478         break;
2479 
2480     case GET_ASI_BLOCK:
2481         /* Valid for stdfa on aligned registers only.  */
2482         if (size == 8 && (rd & 7) == 0) {
2483             MemOp memop;
2484             TCGv eight;
2485             int i;
2486 
2487             gen_address_mask(dc, addr);
2488 
2489             /* The first operation checks required alignment.  */
2490             memop = da.memop | MO_ALIGN_64;
2491             eight = tcg_constant_tl(8);
2492             for (i = 0; ; ++i) {
2493                 tcg_gen_qemu_st_i64(cpu_fpr[rd / 2 + i], addr,
2494                                     da.mem_idx, memop);
2495                 if (i == 7) {
2496                     break;
2497                 }
2498                 tcg_gen_add_tl(addr, addr, eight);
2499                 memop = da.memop;
2500             }
2501         } else {
2502             gen_exception(dc, TT_ILL_INSN);
2503         }
2504         break;
2505 
2506     case GET_ASI_SHORT:
2507         /* Valid for stdfa only.  */
2508         if (size == 8) {
2509             gen_address_mask(dc, addr);
2510             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2511                                 da.memop | MO_ALIGN);
2512         } else {
2513             gen_exception(dc, TT_ILL_INSN);
2514         }
2515         break;
2516 
2517     default:
2518         /* According to the table in the UA2011 manual, the only
2519            other asis that are valid for ldfa/lddfa/ldqfa are
2520            the PST* asis, which aren't currently handled.  */
2521         gen_exception(dc, TT_ILL_INSN);
2522         break;
2523     }
2524 }
2525 
2526 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2527 {
2528     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2529     TCGv_i64 hi = gen_dest_gpr(dc, rd);
2530     TCGv_i64 lo = gen_dest_gpr(dc, rd + 1);
2531 
2532     switch (da.type) {
2533     case GET_ASI_EXCP:
2534         return;
2535 
2536     case GET_ASI_DTWINX:
2537         gen_address_mask(dc, addr);
2538         tcg_gen_qemu_ld_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2539         tcg_gen_addi_tl(addr, addr, 8);
2540         tcg_gen_qemu_ld_i64(lo, addr, da.mem_idx, da.memop);
2541         break;
2542 
2543     case GET_ASI_DIRECT:
2544         {
2545             TCGv_i64 tmp = tcg_temp_new_i64();
2546 
2547             gen_address_mask(dc, addr);
2548             tcg_gen_qemu_ld_i64(tmp, addr, da.mem_idx, da.memop | MO_ALIGN);
2549 
2550             /* Note that LE ldda acts as if each 32-bit register
2551                result is byte swapped.  Having just performed one
2552                64-bit bswap, we need now to swap the writebacks.  */
2553             if ((da.memop & MO_BSWAP) == MO_TE) {
2554                 tcg_gen_extr32_i64(lo, hi, tmp);
2555             } else {
2556                 tcg_gen_extr32_i64(hi, lo, tmp);
2557             }
2558         }
2559         break;
2560 
2561     default:
2562         /* ??? In theory we've handled all of the ASIs that are valid
2563            for ldda, and this should raise DAE_invalid_asi.  However,
2564            real hardware allows others.  This can be seen with e.g.
2565            FreeBSD 10.3 wrt ASI_IC_TAG.  */
2566         {
2567             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2568             TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2569             TCGv_i64 tmp = tcg_temp_new_i64();
2570 
2571             save_state(dc);
2572             gen_helper_ld_asi(tmp, cpu_env, addr, r_asi, r_mop);
2573 
2574             /* See above.  */
2575             if ((da.memop & MO_BSWAP) == MO_TE) {
2576                 tcg_gen_extr32_i64(lo, hi, tmp);
2577             } else {
2578                 tcg_gen_extr32_i64(hi, lo, tmp);
2579             }
2580         }
2581         break;
2582     }
2583 
2584     gen_store_gpr(dc, rd, hi);
2585     gen_store_gpr(dc, rd + 1, lo);
2586 }
2587 
2588 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2589                          int insn, int rd)
2590 {
2591     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2592     TCGv lo = gen_load_gpr(dc, rd + 1);
2593 
2594     switch (da.type) {
2595     case GET_ASI_EXCP:
2596         break;
2597 
2598     case GET_ASI_DTWINX:
2599         gen_address_mask(dc, addr);
2600         tcg_gen_qemu_st_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2601         tcg_gen_addi_tl(addr, addr, 8);
2602         tcg_gen_qemu_st_i64(lo, addr, da.mem_idx, da.memop);
2603         break;
2604 
2605     case GET_ASI_DIRECT:
2606         {
2607             TCGv_i64 t64 = tcg_temp_new_i64();
2608 
2609             /* Note that LE stda acts as if each 32-bit register result is
2610                byte swapped.  We will perform one 64-bit LE store, so now
2611                we must swap the order of the construction.  */
2612             if ((da.memop & MO_BSWAP) == MO_TE) {
2613                 tcg_gen_concat32_i64(t64, lo, hi);
2614             } else {
2615                 tcg_gen_concat32_i64(t64, hi, lo);
2616             }
2617             gen_address_mask(dc, addr);
2618             tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2619         }
2620         break;
2621 
2622     default:
2623         /* ??? In theory we've handled all of the ASIs that are valid
2624            for stda, and this should raise DAE_invalid_asi.  */
2625         {
2626             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2627             TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2628             TCGv_i64 t64 = tcg_temp_new_i64();
2629 
2630             /* See above.  */
2631             if ((da.memop & MO_BSWAP) == MO_TE) {
2632                 tcg_gen_concat32_i64(t64, lo, hi);
2633             } else {
2634                 tcg_gen_concat32_i64(t64, hi, lo);
2635             }
2636 
2637             save_state(dc);
2638             gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2639         }
2640         break;
2641     }
2642 }
2643 
2644 static void gen_casx_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2645                          int insn, int rd)
2646 {
2647     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2648     TCGv oldv;
2649 
2650     switch (da.type) {
2651     case GET_ASI_EXCP:
2652         return;
2653     case GET_ASI_DIRECT:
2654         oldv = tcg_temp_new();
2655         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2656                                   da.mem_idx, da.memop | MO_ALIGN);
2657         gen_store_gpr(dc, rd, oldv);
2658         break;
2659     default:
2660         /* ??? Should be DAE_invalid_asi.  */
2661         gen_exception(dc, TT_DATA_ACCESS);
2662         break;
2663     }
2664 }
2665 
2666 #elif !defined(CONFIG_USER_ONLY)
2667 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2668 {
2669     /* ??? Work around an apparent bug in Ubuntu gcc 4.8.2-10ubuntu2+12,
2670        whereby "rd + 1" elicits "error: array subscript is above array".
2671        Since we have already asserted that rd is even, the semantics
2672        are unchanged.  */
2673     TCGv lo = gen_dest_gpr(dc, rd | 1);
2674     TCGv hi = gen_dest_gpr(dc, rd);
2675     TCGv_i64 t64 = tcg_temp_new_i64();
2676     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2677 
2678     switch (da.type) {
2679     case GET_ASI_EXCP:
2680         return;
2681     case GET_ASI_DIRECT:
2682         gen_address_mask(dc, addr);
2683         tcg_gen_qemu_ld_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2684         break;
2685     default:
2686         {
2687             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2688             TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2689 
2690             save_state(dc);
2691             gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2692         }
2693         break;
2694     }
2695 
2696     tcg_gen_extr_i64_i32(lo, hi, t64);
2697     gen_store_gpr(dc, rd | 1, lo);
2698     gen_store_gpr(dc, rd, hi);
2699 }
2700 
2701 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2702                          int insn, int rd)
2703 {
2704     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2705     TCGv lo = gen_load_gpr(dc, rd + 1);
2706     TCGv_i64 t64 = tcg_temp_new_i64();
2707 
2708     tcg_gen_concat_tl_i64(t64, lo, hi);
2709 
2710     switch (da.type) {
2711     case GET_ASI_EXCP:
2712         break;
2713     case GET_ASI_DIRECT:
2714         gen_address_mask(dc, addr);
2715         tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2716         break;
2717     case GET_ASI_BFILL:
2718         /* Store 32 bytes of T64 to ADDR.  */
2719         /* ??? The original qemu code suggests 8-byte alignment, dropping
2720            the low bits, but the only place I can see this used is in the
2721            Linux kernel with 32 byte alignment, which would make more sense
2722            as a cacheline-style operation.  */
2723         {
2724             TCGv d_addr = tcg_temp_new();
2725             TCGv eight = tcg_constant_tl(8);
2726             int i;
2727 
2728             tcg_gen_andi_tl(d_addr, addr, -8);
2729             for (i = 0; i < 32; i += 8) {
2730                 tcg_gen_qemu_st_i64(t64, d_addr, da.mem_idx, da.memop);
2731                 tcg_gen_add_tl(d_addr, d_addr, eight);
2732             }
2733         }
2734         break;
2735     default:
2736         {
2737             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2738             TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2739 
2740             save_state(dc);
2741             gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2742         }
2743         break;
2744     }
2745 }
2746 #endif
2747 
2748 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2749 {
2750     unsigned int rs1 = GET_FIELD(insn, 13, 17);
2751     return gen_load_gpr(dc, rs1);
2752 }
2753 
2754 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2755 {
2756     if (IS_IMM) { /* immediate */
2757         target_long simm = GET_FIELDs(insn, 19, 31);
2758         TCGv t = tcg_temp_new();
2759         tcg_gen_movi_tl(t, simm);
2760         return t;
2761     } else {      /* register */
2762         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2763         return gen_load_gpr(dc, rs2);
2764     }
2765 }
2766 
2767 #ifdef TARGET_SPARC64
2768 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2769 {
2770     TCGv_i32 c32, zero, dst, s1, s2;
2771 
2772     /* We have two choices here: extend the 32 bit data and use movcond_i64,
2773        or fold the comparison down to 32 bits and use movcond_i32.  Choose
2774        the later.  */
2775     c32 = tcg_temp_new_i32();
2776     if (cmp->is_bool) {
2777         tcg_gen_extrl_i64_i32(c32, cmp->c1);
2778     } else {
2779         TCGv_i64 c64 = tcg_temp_new_i64();
2780         tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2781         tcg_gen_extrl_i64_i32(c32, c64);
2782     }
2783 
2784     s1 = gen_load_fpr_F(dc, rs);
2785     s2 = gen_load_fpr_F(dc, rd);
2786     dst = gen_dest_fpr_F(dc);
2787     zero = tcg_constant_i32(0);
2788 
2789     tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2790 
2791     gen_store_fpr_F(dc, rd, dst);
2792 }
2793 
2794 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2795 {
2796     TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2797     tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2798                         gen_load_fpr_D(dc, rs),
2799                         gen_load_fpr_D(dc, rd));
2800     gen_store_fpr_D(dc, rd, dst);
2801 }
2802 
2803 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2804 {
2805     int qd = QFPREG(rd);
2806     int qs = QFPREG(rs);
2807 
2808     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2809                         cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2810     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2811                         cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2812 
2813     gen_update_fprs_dirty(dc, qd);
2814 }
2815 
2816 #ifndef CONFIG_USER_ONLY
2817 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_env cpu_env)
2818 {
2819     TCGv_i32 r_tl = tcg_temp_new_i32();
2820 
2821     /* load env->tl into r_tl */
2822     tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2823 
2824     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2825     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2826 
2827     /* calculate offset to current trap state from env->ts, reuse r_tl */
2828     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2829     tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2830 
2831     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2832     {
2833         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2834         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2835         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2836     }
2837 }
2838 #endif
2839 
2840 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2841                      int width, bool cc, bool left)
2842 {
2843     TCGv lo1, lo2;
2844     uint64_t amask, tabl, tabr;
2845     int shift, imask, omask;
2846 
2847     if (cc) {
2848         tcg_gen_mov_tl(cpu_cc_src, s1);
2849         tcg_gen_mov_tl(cpu_cc_src2, s2);
2850         tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2851         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2852         dc->cc_op = CC_OP_SUB;
2853     }
2854 
2855     /* Theory of operation: there are two tables, left and right (not to
2856        be confused with the left and right versions of the opcode).  These
2857        are indexed by the low 3 bits of the inputs.  To make things "easy",
2858        these tables are loaded into two constants, TABL and TABR below.
2859        The operation index = (input & imask) << shift calculates the index
2860        into the constant, while val = (table >> index) & omask calculates
2861        the value we're looking for.  */
2862     switch (width) {
2863     case 8:
2864         imask = 0x7;
2865         shift = 3;
2866         omask = 0xff;
2867         if (left) {
2868             tabl = 0x80c0e0f0f8fcfeffULL;
2869             tabr = 0xff7f3f1f0f070301ULL;
2870         } else {
2871             tabl = 0x0103070f1f3f7fffULL;
2872             tabr = 0xfffefcf8f0e0c080ULL;
2873         }
2874         break;
2875     case 16:
2876         imask = 0x6;
2877         shift = 1;
2878         omask = 0xf;
2879         if (left) {
2880             tabl = 0x8cef;
2881             tabr = 0xf731;
2882         } else {
2883             tabl = 0x137f;
2884             tabr = 0xfec8;
2885         }
2886         break;
2887     case 32:
2888         imask = 0x4;
2889         shift = 0;
2890         omask = 0x3;
2891         if (left) {
2892             tabl = (2 << 2) | 3;
2893             tabr = (3 << 2) | 1;
2894         } else {
2895             tabl = (1 << 2) | 3;
2896             tabr = (3 << 2) | 2;
2897         }
2898         break;
2899     default:
2900         abort();
2901     }
2902 
2903     lo1 = tcg_temp_new();
2904     lo2 = tcg_temp_new();
2905     tcg_gen_andi_tl(lo1, s1, imask);
2906     tcg_gen_andi_tl(lo2, s2, imask);
2907     tcg_gen_shli_tl(lo1, lo1, shift);
2908     tcg_gen_shli_tl(lo2, lo2, shift);
2909 
2910     tcg_gen_shr_tl(lo1, tcg_constant_tl(tabl), lo1);
2911     tcg_gen_shr_tl(lo2, tcg_constant_tl(tabr), lo2);
2912     tcg_gen_andi_tl(dst, lo1, omask);
2913     tcg_gen_andi_tl(lo2, lo2, omask);
2914 
2915     amask = -8;
2916     if (AM_CHECK(dc)) {
2917         amask &= 0xffffffffULL;
2918     }
2919     tcg_gen_andi_tl(s1, s1, amask);
2920     tcg_gen_andi_tl(s2, s2, amask);
2921 
2922     /* We want to compute
2923         dst = (s1 == s2 ? lo1 : lo1 & lo2).
2924        We've already done dst = lo1, so this reduces to
2925         dst &= (s1 == s2 ? -1 : lo2)
2926        Which we perform by
2927         lo2 |= -(s1 == s2)
2928         dst &= lo2
2929     */
2930     tcg_gen_setcond_tl(TCG_COND_EQ, lo1, s1, s2);
2931     tcg_gen_neg_tl(lo1, lo1);
2932     tcg_gen_or_tl(lo2, lo2, lo1);
2933     tcg_gen_and_tl(dst, dst, lo2);
2934 }
2935 
2936 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2937 {
2938     TCGv tmp = tcg_temp_new();
2939 
2940     tcg_gen_add_tl(tmp, s1, s2);
2941     tcg_gen_andi_tl(dst, tmp, -8);
2942     if (left) {
2943         tcg_gen_neg_tl(tmp, tmp);
2944     }
2945     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2946 }
2947 
2948 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2949 {
2950     TCGv t1, t2, shift;
2951 
2952     t1 = tcg_temp_new();
2953     t2 = tcg_temp_new();
2954     shift = tcg_temp_new();
2955 
2956     tcg_gen_andi_tl(shift, gsr, 7);
2957     tcg_gen_shli_tl(shift, shift, 3);
2958     tcg_gen_shl_tl(t1, s1, shift);
2959 
2960     /* A shift of 64 does not produce 0 in TCG.  Divide this into a
2961        shift of (up to 63) followed by a constant shift of 1.  */
2962     tcg_gen_xori_tl(shift, shift, 63);
2963     tcg_gen_shr_tl(t2, s2, shift);
2964     tcg_gen_shri_tl(t2, t2, 1);
2965 
2966     tcg_gen_or_tl(dst, t1, t2);
2967 }
2968 #endif
2969 
2970 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
2971     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2972         goto illegal_insn;
2973 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
2974     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2975         goto nfpu_insn;
2976 
2977 /* before an instruction, dc->pc must be static */
2978 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
2979 {
2980     unsigned int opc, rs1, rs2, rd;
2981     TCGv cpu_src1, cpu_src2;
2982     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2983     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2984     target_long simm;
2985 
2986     opc = GET_FIELD(insn, 0, 1);
2987     rd = GET_FIELD(insn, 2, 6);
2988 
2989     switch (opc) {
2990     case 0:                     /* branches/sethi */
2991         {
2992             unsigned int xop = GET_FIELD(insn, 7, 9);
2993             int32_t target;
2994             switch (xop) {
2995 #ifdef TARGET_SPARC64
2996             case 0x1:           /* V9 BPcc */
2997                 {
2998                     int cc;
2999 
3000                     target = GET_FIELD_SP(insn, 0, 18);
3001                     target = sign_extend(target, 19);
3002                     target <<= 2;
3003                     cc = GET_FIELD_SP(insn, 20, 21);
3004                     if (cc == 0)
3005                         do_branch(dc, target, insn, 0);
3006                     else if (cc == 2)
3007                         do_branch(dc, target, insn, 1);
3008                     else
3009                         goto illegal_insn;
3010                     goto jmp_insn;
3011                 }
3012             case 0x3:           /* V9 BPr */
3013                 {
3014                     target = GET_FIELD_SP(insn, 0, 13) |
3015                         (GET_FIELD_SP(insn, 20, 21) << 14);
3016                     target = sign_extend(target, 16);
3017                     target <<= 2;
3018                     cpu_src1 = get_src1(dc, insn);
3019                     do_branch_reg(dc, target, insn, cpu_src1);
3020                     goto jmp_insn;
3021                 }
3022             case 0x5:           /* V9 FBPcc */
3023                 {
3024                     int cc = GET_FIELD_SP(insn, 20, 21);
3025                     if (gen_trap_ifnofpu(dc)) {
3026                         goto jmp_insn;
3027                     }
3028                     target = GET_FIELD_SP(insn, 0, 18);
3029                     target = sign_extend(target, 19);
3030                     target <<= 2;
3031                     do_fbranch(dc, target, insn, cc);
3032                     goto jmp_insn;
3033                 }
3034 #else
3035             case 0x7:           /* CBN+x */
3036                 {
3037                     goto ncp_insn;
3038                 }
3039 #endif
3040             case 0x2:           /* BN+x */
3041                 {
3042                     target = GET_FIELD(insn, 10, 31);
3043                     target = sign_extend(target, 22);
3044                     target <<= 2;
3045                     do_branch(dc, target, insn, 0);
3046                     goto jmp_insn;
3047                 }
3048             case 0x6:           /* FBN+x */
3049                 {
3050                     if (gen_trap_ifnofpu(dc)) {
3051                         goto jmp_insn;
3052                     }
3053                     target = GET_FIELD(insn, 10, 31);
3054                     target = sign_extend(target, 22);
3055                     target <<= 2;
3056                     do_fbranch(dc, target, insn, 0);
3057                     goto jmp_insn;
3058                 }
3059             case 0x4:           /* SETHI */
3060                 /* Special-case %g0 because that's the canonical nop.  */
3061                 if (rd) {
3062                     uint32_t value = GET_FIELD(insn, 10, 31);
3063                     TCGv t = gen_dest_gpr(dc, rd);
3064                     tcg_gen_movi_tl(t, value << 10);
3065                     gen_store_gpr(dc, rd, t);
3066                 }
3067                 break;
3068             case 0x0:           /* UNIMPL */
3069             default:
3070                 goto illegal_insn;
3071             }
3072             break;
3073         }
3074         break;
3075     case 1:                     /*CALL*/
3076         {
3077             target_long target = GET_FIELDs(insn, 2, 31) << 2;
3078             TCGv o7 = gen_dest_gpr(dc, 15);
3079 
3080             tcg_gen_movi_tl(o7, dc->pc);
3081             gen_store_gpr(dc, 15, o7);
3082             target += dc->pc;
3083             gen_mov_pc_npc(dc);
3084 #ifdef TARGET_SPARC64
3085             if (unlikely(AM_CHECK(dc))) {
3086                 target &= 0xffffffffULL;
3087             }
3088 #endif
3089             dc->npc = target;
3090         }
3091         goto jmp_insn;
3092     case 2:                     /* FPU & Logical Operations */
3093         {
3094             unsigned int xop = GET_FIELD(insn, 7, 12);
3095             TCGv cpu_dst = tcg_temp_new();
3096             TCGv cpu_tmp0;
3097 
3098             if (xop == 0x3a) {  /* generate trap */
3099                 int cond = GET_FIELD(insn, 3, 6);
3100                 TCGv_i32 trap;
3101                 TCGLabel *l1 = NULL;
3102                 int mask;
3103 
3104                 if (cond == 0) {
3105                     /* Trap never.  */
3106                     break;
3107                 }
3108 
3109                 save_state(dc);
3110 
3111                 if (cond != 8) {
3112                     /* Conditional trap.  */
3113                     DisasCompare cmp;
3114 #ifdef TARGET_SPARC64
3115                     /* V9 icc/xcc */
3116                     int cc = GET_FIELD_SP(insn, 11, 12);
3117                     if (cc == 0) {
3118                         gen_compare(&cmp, 0, cond, dc);
3119                     } else if (cc == 2) {
3120                         gen_compare(&cmp, 1, cond, dc);
3121                     } else {
3122                         goto illegal_insn;
3123                     }
3124 #else
3125                     gen_compare(&cmp, 0, cond, dc);
3126 #endif
3127                     l1 = gen_new_label();
3128                     tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
3129                                       cmp.c1, cmp.c2, l1);
3130                 }
3131 
3132                 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
3133                         ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
3134 
3135                 /* Don't use the normal temporaries, as they may well have
3136                    gone out of scope with the branch above.  While we're
3137                    doing that we might as well pre-truncate to 32-bit.  */
3138                 trap = tcg_temp_new_i32();
3139 
3140                 rs1 = GET_FIELD_SP(insn, 14, 18);
3141                 if (IS_IMM) {
3142                     rs2 = GET_FIELD_SP(insn, 0, 7);
3143                     if (rs1 == 0) {
3144                         tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
3145                         /* Signal that the trap value is fully constant.  */
3146                         mask = 0;
3147                     } else {
3148                         TCGv t1 = gen_load_gpr(dc, rs1);
3149                         tcg_gen_trunc_tl_i32(trap, t1);
3150                         tcg_gen_addi_i32(trap, trap, rs2);
3151                     }
3152                 } else {
3153                     TCGv t1, t2;
3154                     rs2 = GET_FIELD_SP(insn, 0, 4);
3155                     t1 = gen_load_gpr(dc, rs1);
3156                     t2 = gen_load_gpr(dc, rs2);
3157                     tcg_gen_add_tl(t1, t1, t2);
3158                     tcg_gen_trunc_tl_i32(trap, t1);
3159                 }
3160                 if (mask != 0) {
3161                     tcg_gen_andi_i32(trap, trap, mask);
3162                     tcg_gen_addi_i32(trap, trap, TT_TRAP);
3163                 }
3164 
3165                 gen_helper_raise_exception(cpu_env, trap);
3166 
3167                 if (cond == 8) {
3168                     /* An unconditional trap ends the TB.  */
3169                     dc->base.is_jmp = DISAS_NORETURN;
3170                     goto jmp_insn;
3171                 } else {
3172                     /* A conditional trap falls through to the next insn.  */
3173                     gen_set_label(l1);
3174                     break;
3175                 }
3176             } else if (xop == 0x28) {
3177                 rs1 = GET_FIELD(insn, 13, 17);
3178                 switch(rs1) {
3179                 case 0: /* rdy */
3180 #ifndef TARGET_SPARC64
3181                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
3182                                        manual, rdy on the microSPARC
3183                                        II */
3184                 case 0x0f:          /* stbar in the SPARCv8 manual,
3185                                        rdy on the microSPARC II */
3186                 case 0x10 ... 0x1f: /* implementation-dependent in the
3187                                        SPARCv8 manual, rdy on the
3188                                        microSPARC II */
3189                     /* Read Asr17 */
3190                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
3191                         TCGv t = gen_dest_gpr(dc, rd);
3192                         /* Read Asr17 for a Leon3 monoprocessor */
3193                         tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
3194                         gen_store_gpr(dc, rd, t);
3195                         break;
3196                     }
3197 #endif
3198                     gen_store_gpr(dc, rd, cpu_y);
3199                     break;
3200 #ifdef TARGET_SPARC64
3201                 case 0x2: /* V9 rdccr */
3202                     update_psr(dc);
3203                     gen_helper_rdccr(cpu_dst, cpu_env);
3204                     gen_store_gpr(dc, rd, cpu_dst);
3205                     break;
3206                 case 0x3: /* V9 rdasi */
3207                     tcg_gen_movi_tl(cpu_dst, dc->asi);
3208                     gen_store_gpr(dc, rd, cpu_dst);
3209                     break;
3210                 case 0x4: /* V9 rdtick */
3211                     {
3212                         TCGv_ptr r_tickptr;
3213                         TCGv_i32 r_const;
3214 
3215                         r_tickptr = tcg_temp_new_ptr();
3216                         r_const = tcg_constant_i32(dc->mem_idx);
3217                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3218                                        offsetof(CPUSPARCState, tick));
3219                         if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3220                             gen_io_start();
3221                         }
3222                         gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3223                                                   r_const);
3224                         gen_store_gpr(dc, rd, cpu_dst);
3225                         if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3226                             /* I/O operations in icount mode must end the TB */
3227                             dc->base.is_jmp = DISAS_EXIT;
3228                         }
3229                     }
3230                     break;
3231                 case 0x5: /* V9 rdpc */
3232                     {
3233                         TCGv t = gen_dest_gpr(dc, rd);
3234                         if (unlikely(AM_CHECK(dc))) {
3235                             tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
3236                         } else {
3237                             tcg_gen_movi_tl(t, dc->pc);
3238                         }
3239                         gen_store_gpr(dc, rd, t);
3240                     }
3241                     break;
3242                 case 0x6: /* V9 rdfprs */
3243                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
3244                     gen_store_gpr(dc, rd, cpu_dst);
3245                     break;
3246                 case 0xf: /* V9 membar */
3247                     break; /* no effect */
3248                 case 0x13: /* Graphics Status */
3249                     if (gen_trap_ifnofpu(dc)) {
3250                         goto jmp_insn;
3251                     }
3252                     gen_store_gpr(dc, rd, cpu_gsr);
3253                     break;
3254                 case 0x16: /* Softint */
3255                     tcg_gen_ld32s_tl(cpu_dst, cpu_env,
3256                                      offsetof(CPUSPARCState, softint));
3257                     gen_store_gpr(dc, rd, cpu_dst);
3258                     break;
3259                 case 0x17: /* Tick compare */
3260                     gen_store_gpr(dc, rd, cpu_tick_cmpr);
3261                     break;
3262                 case 0x18: /* System tick */
3263                     {
3264                         TCGv_ptr r_tickptr;
3265                         TCGv_i32 r_const;
3266 
3267                         r_tickptr = tcg_temp_new_ptr();
3268                         r_const = tcg_constant_i32(dc->mem_idx);
3269                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3270                                        offsetof(CPUSPARCState, stick));
3271                         if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3272                             gen_io_start();
3273                         }
3274                         gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3275                                                   r_const);
3276                         gen_store_gpr(dc, rd, cpu_dst);
3277                         if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3278                             /* I/O operations in icount mode must end the TB */
3279                             dc->base.is_jmp = DISAS_EXIT;
3280                         }
3281                     }
3282                     break;
3283                 case 0x19: /* System tick compare */
3284                     gen_store_gpr(dc, rd, cpu_stick_cmpr);
3285                     break;
3286                 case 0x1a: /* UltraSPARC-T1 Strand status */
3287                     /* XXX HYPV check maybe not enough, UA2005 & UA2007 describe
3288                      * this ASR as impl. dep
3289                      */
3290                     CHECK_IU_FEATURE(dc, HYPV);
3291                     {
3292                         TCGv t = gen_dest_gpr(dc, rd);
3293                         tcg_gen_movi_tl(t, 1UL);
3294                         gen_store_gpr(dc, rd, t);
3295                     }
3296                     break;
3297                 case 0x10: /* Performance Control */
3298                 case 0x11: /* Performance Instrumentation Counter */
3299                 case 0x12: /* Dispatch Control */
3300                 case 0x14: /* Softint set, WO */
3301                 case 0x15: /* Softint clear, WO */
3302 #endif
3303                 default:
3304                     goto illegal_insn;
3305                 }
3306 #if !defined(CONFIG_USER_ONLY)
3307             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
3308 #ifndef TARGET_SPARC64
3309                 if (!supervisor(dc)) {
3310                     goto priv_insn;
3311                 }
3312                 update_psr(dc);
3313                 gen_helper_rdpsr(cpu_dst, cpu_env);
3314 #else
3315                 CHECK_IU_FEATURE(dc, HYPV);
3316                 if (!hypervisor(dc))
3317                     goto priv_insn;
3318                 rs1 = GET_FIELD(insn, 13, 17);
3319                 switch (rs1) {
3320                 case 0: // hpstate
3321                     tcg_gen_ld_i64(cpu_dst, cpu_env,
3322                                    offsetof(CPUSPARCState, hpstate));
3323                     break;
3324                 case 1: // htstate
3325                     // gen_op_rdhtstate();
3326                     break;
3327                 case 3: // hintp
3328                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
3329                     break;
3330                 case 5: // htba
3331                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
3332                     break;
3333                 case 6: // hver
3334                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
3335                     break;
3336                 case 31: // hstick_cmpr
3337                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
3338                     break;
3339                 default:
3340                     goto illegal_insn;
3341                 }
3342 #endif
3343                 gen_store_gpr(dc, rd, cpu_dst);
3344                 break;
3345             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
3346                 if (!supervisor(dc)) {
3347                     goto priv_insn;
3348                 }
3349                 cpu_tmp0 = tcg_temp_new();
3350 #ifdef TARGET_SPARC64
3351                 rs1 = GET_FIELD(insn, 13, 17);
3352                 switch (rs1) {
3353                 case 0: // tpc
3354                     {
3355                         TCGv_ptr r_tsptr;
3356 
3357                         r_tsptr = tcg_temp_new_ptr();
3358                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3359                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3360                                       offsetof(trap_state, tpc));
3361                     }
3362                     break;
3363                 case 1: // tnpc
3364                     {
3365                         TCGv_ptr r_tsptr;
3366 
3367                         r_tsptr = tcg_temp_new_ptr();
3368                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3369                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3370                                       offsetof(trap_state, tnpc));
3371                     }
3372                     break;
3373                 case 2: // tstate
3374                     {
3375                         TCGv_ptr r_tsptr;
3376 
3377                         r_tsptr = tcg_temp_new_ptr();
3378                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3379                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3380                                       offsetof(trap_state, tstate));
3381                     }
3382                     break;
3383                 case 3: // tt
3384                     {
3385                         TCGv_ptr r_tsptr = tcg_temp_new_ptr();
3386 
3387                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3388                         tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
3389                                          offsetof(trap_state, tt));
3390                     }
3391                     break;
3392                 case 4: // tick
3393                     {
3394                         TCGv_ptr r_tickptr;
3395                         TCGv_i32 r_const;
3396 
3397                         r_tickptr = tcg_temp_new_ptr();
3398                         r_const = tcg_constant_i32(dc->mem_idx);
3399                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3400                                        offsetof(CPUSPARCState, tick));
3401                         if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3402                             gen_io_start();
3403                         }
3404                         gen_helper_tick_get_count(cpu_tmp0, cpu_env,
3405                                                   r_tickptr, r_const);
3406                         if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3407                             /* I/O operations in icount mode must end the TB */
3408                             dc->base.is_jmp = DISAS_EXIT;
3409                         }
3410                     }
3411                     break;
3412                 case 5: // tba
3413                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
3414                     break;
3415                 case 6: // pstate
3416                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3417                                      offsetof(CPUSPARCState, pstate));
3418                     break;
3419                 case 7: // tl
3420                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3421                                      offsetof(CPUSPARCState, tl));
3422                     break;
3423                 case 8: // pil
3424                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3425                                      offsetof(CPUSPARCState, psrpil));
3426                     break;
3427                 case 9: // cwp
3428                     gen_helper_rdcwp(cpu_tmp0, cpu_env);
3429                     break;
3430                 case 10: // cansave
3431                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3432                                      offsetof(CPUSPARCState, cansave));
3433                     break;
3434                 case 11: // canrestore
3435                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3436                                      offsetof(CPUSPARCState, canrestore));
3437                     break;
3438                 case 12: // cleanwin
3439                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3440                                      offsetof(CPUSPARCState, cleanwin));
3441                     break;
3442                 case 13: // otherwin
3443                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3444                                      offsetof(CPUSPARCState, otherwin));
3445                     break;
3446                 case 14: // wstate
3447                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3448                                      offsetof(CPUSPARCState, wstate));
3449                     break;
3450                 case 16: // UA2005 gl
3451                     CHECK_IU_FEATURE(dc, GL);
3452                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3453                                      offsetof(CPUSPARCState, gl));
3454                     break;
3455                 case 26: // UA2005 strand status
3456                     CHECK_IU_FEATURE(dc, HYPV);
3457                     if (!hypervisor(dc))
3458                         goto priv_insn;
3459                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
3460                     break;
3461                 case 31: // ver
3462                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
3463                     break;
3464                 case 15: // fq
3465                 default:
3466                     goto illegal_insn;
3467                 }
3468 #else
3469                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
3470 #endif
3471                 gen_store_gpr(dc, rd, cpu_tmp0);
3472                 break;
3473 #endif
3474 #if defined(TARGET_SPARC64) || !defined(CONFIG_USER_ONLY)
3475             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
3476 #ifdef TARGET_SPARC64
3477                 gen_helper_flushw(cpu_env);
3478 #else
3479                 if (!supervisor(dc))
3480                     goto priv_insn;
3481                 gen_store_gpr(dc, rd, cpu_tbr);
3482 #endif
3483                 break;
3484 #endif
3485             } else if (xop == 0x34) {   /* FPU Operations */
3486                 if (gen_trap_ifnofpu(dc)) {
3487                     goto jmp_insn;
3488                 }
3489                 gen_op_clear_ieee_excp_and_FTT();
3490                 rs1 = GET_FIELD(insn, 13, 17);
3491                 rs2 = GET_FIELD(insn, 27, 31);
3492                 xop = GET_FIELD(insn, 18, 26);
3493 
3494                 switch (xop) {
3495                 case 0x1: /* fmovs */
3496                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3497                     gen_store_fpr_F(dc, rd, cpu_src1_32);
3498                     break;
3499                 case 0x5: /* fnegs */
3500                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
3501                     break;
3502                 case 0x9: /* fabss */
3503                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3504                     break;
3505                 case 0x29: /* fsqrts */
3506                     CHECK_FPU_FEATURE(dc, FSQRT);
3507                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3508                     break;
3509                 case 0x2a: /* fsqrtd */
3510                     CHECK_FPU_FEATURE(dc, FSQRT);
3511                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3512                     break;
3513                 case 0x2b: /* fsqrtq */
3514                     CHECK_FPU_FEATURE(dc, FLOAT128);
3515                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3516                     break;
3517                 case 0x41: /* fadds */
3518                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3519                     break;
3520                 case 0x42: /* faddd */
3521                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3522                     break;
3523                 case 0x43: /* faddq */
3524                     CHECK_FPU_FEATURE(dc, FLOAT128);
3525                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3526                     break;
3527                 case 0x45: /* fsubs */
3528                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3529                     break;
3530                 case 0x46: /* fsubd */
3531                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3532                     break;
3533                 case 0x47: /* fsubq */
3534                     CHECK_FPU_FEATURE(dc, FLOAT128);
3535                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3536                     break;
3537                 case 0x49: /* fmuls */
3538                     CHECK_FPU_FEATURE(dc, FMUL);
3539                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3540                     break;
3541                 case 0x4a: /* fmuld */
3542                     CHECK_FPU_FEATURE(dc, FMUL);
3543                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3544                     break;
3545                 case 0x4b: /* fmulq */
3546                     CHECK_FPU_FEATURE(dc, FLOAT128);
3547                     CHECK_FPU_FEATURE(dc, FMUL);
3548                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3549                     break;
3550                 case 0x4d: /* fdivs */
3551                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3552                     break;
3553                 case 0x4e: /* fdivd */
3554                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3555                     break;
3556                 case 0x4f: /* fdivq */
3557                     CHECK_FPU_FEATURE(dc, FLOAT128);
3558                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3559                     break;
3560                 case 0x69: /* fsmuld */
3561                     CHECK_FPU_FEATURE(dc, FSMULD);
3562                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3563                     break;
3564                 case 0x6e: /* fdmulq */
3565                     CHECK_FPU_FEATURE(dc, FLOAT128);
3566                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3567                     break;
3568                 case 0xc4: /* fitos */
3569                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3570                     break;
3571                 case 0xc6: /* fdtos */
3572                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3573                     break;
3574                 case 0xc7: /* fqtos */
3575                     CHECK_FPU_FEATURE(dc, FLOAT128);
3576                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3577                     break;
3578                 case 0xc8: /* fitod */
3579                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3580                     break;
3581                 case 0xc9: /* fstod */
3582                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3583                     break;
3584                 case 0xcb: /* fqtod */
3585                     CHECK_FPU_FEATURE(dc, FLOAT128);
3586                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3587                     break;
3588                 case 0xcc: /* fitoq */
3589                     CHECK_FPU_FEATURE(dc, FLOAT128);
3590                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3591                     break;
3592                 case 0xcd: /* fstoq */
3593                     CHECK_FPU_FEATURE(dc, FLOAT128);
3594                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3595                     break;
3596                 case 0xce: /* fdtoq */
3597                     CHECK_FPU_FEATURE(dc, FLOAT128);
3598                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3599                     break;
3600                 case 0xd1: /* fstoi */
3601                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3602                     break;
3603                 case 0xd2: /* fdtoi */
3604                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3605                     break;
3606                 case 0xd3: /* fqtoi */
3607                     CHECK_FPU_FEATURE(dc, FLOAT128);
3608                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3609                     break;
3610 #ifdef TARGET_SPARC64
3611                 case 0x2: /* V9 fmovd */
3612                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3613                     gen_store_fpr_D(dc, rd, cpu_src1_64);
3614                     break;
3615                 case 0x3: /* V9 fmovq */
3616                     CHECK_FPU_FEATURE(dc, FLOAT128);
3617                     gen_move_Q(dc, rd, rs2);
3618                     break;
3619                 case 0x6: /* V9 fnegd */
3620                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3621                     break;
3622                 case 0x7: /* V9 fnegq */
3623                     CHECK_FPU_FEATURE(dc, FLOAT128);
3624                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3625                     break;
3626                 case 0xa: /* V9 fabsd */
3627                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3628                     break;
3629                 case 0xb: /* V9 fabsq */
3630                     CHECK_FPU_FEATURE(dc, FLOAT128);
3631                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3632                     break;
3633                 case 0x81: /* V9 fstox */
3634                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3635                     break;
3636                 case 0x82: /* V9 fdtox */
3637                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3638                     break;
3639                 case 0x83: /* V9 fqtox */
3640                     CHECK_FPU_FEATURE(dc, FLOAT128);
3641                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3642                     break;
3643                 case 0x84: /* V9 fxtos */
3644                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3645                     break;
3646                 case 0x88: /* V9 fxtod */
3647                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3648                     break;
3649                 case 0x8c: /* V9 fxtoq */
3650                     CHECK_FPU_FEATURE(dc, FLOAT128);
3651                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3652                     break;
3653 #endif
3654                 default:
3655                     goto illegal_insn;
3656                 }
3657             } else if (xop == 0x35) {   /* FPU Operations */
3658 #ifdef TARGET_SPARC64
3659                 int cond;
3660 #endif
3661                 if (gen_trap_ifnofpu(dc)) {
3662                     goto jmp_insn;
3663                 }
3664                 gen_op_clear_ieee_excp_and_FTT();
3665                 rs1 = GET_FIELD(insn, 13, 17);
3666                 rs2 = GET_FIELD(insn, 27, 31);
3667                 xop = GET_FIELD(insn, 18, 26);
3668 
3669 #ifdef TARGET_SPARC64
3670 #define FMOVR(sz)                                                  \
3671                 do {                                               \
3672                     DisasCompare cmp;                              \
3673                     cond = GET_FIELD_SP(insn, 10, 12);             \
3674                     cpu_src1 = get_src1(dc, insn);                 \
3675                     gen_compare_reg(&cmp, cond, cpu_src1);         \
3676                     gen_fmov##sz(dc, &cmp, rd, rs2);               \
3677                 } while (0)
3678 
3679                 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3680                     FMOVR(s);
3681                     break;
3682                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3683                     FMOVR(d);
3684                     break;
3685                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3686                     CHECK_FPU_FEATURE(dc, FLOAT128);
3687                     FMOVR(q);
3688                     break;
3689                 }
3690 #undef FMOVR
3691 #endif
3692                 switch (xop) {
3693 #ifdef TARGET_SPARC64
3694 #define FMOVCC(fcc, sz)                                                 \
3695                     do {                                                \
3696                         DisasCompare cmp;                               \
3697                         cond = GET_FIELD_SP(insn, 14, 17);              \
3698                         gen_fcompare(&cmp, fcc, cond);                  \
3699                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3700                     } while (0)
3701 
3702                     case 0x001: /* V9 fmovscc %fcc0 */
3703                         FMOVCC(0, s);
3704                         break;
3705                     case 0x002: /* V9 fmovdcc %fcc0 */
3706                         FMOVCC(0, d);
3707                         break;
3708                     case 0x003: /* V9 fmovqcc %fcc0 */
3709                         CHECK_FPU_FEATURE(dc, FLOAT128);
3710                         FMOVCC(0, q);
3711                         break;
3712                     case 0x041: /* V9 fmovscc %fcc1 */
3713                         FMOVCC(1, s);
3714                         break;
3715                     case 0x042: /* V9 fmovdcc %fcc1 */
3716                         FMOVCC(1, d);
3717                         break;
3718                     case 0x043: /* V9 fmovqcc %fcc1 */
3719                         CHECK_FPU_FEATURE(dc, FLOAT128);
3720                         FMOVCC(1, q);
3721                         break;
3722                     case 0x081: /* V9 fmovscc %fcc2 */
3723                         FMOVCC(2, s);
3724                         break;
3725                     case 0x082: /* V9 fmovdcc %fcc2 */
3726                         FMOVCC(2, d);
3727                         break;
3728                     case 0x083: /* V9 fmovqcc %fcc2 */
3729                         CHECK_FPU_FEATURE(dc, FLOAT128);
3730                         FMOVCC(2, q);
3731                         break;
3732                     case 0x0c1: /* V9 fmovscc %fcc3 */
3733                         FMOVCC(3, s);
3734                         break;
3735                     case 0x0c2: /* V9 fmovdcc %fcc3 */
3736                         FMOVCC(3, d);
3737                         break;
3738                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3739                         CHECK_FPU_FEATURE(dc, FLOAT128);
3740                         FMOVCC(3, q);
3741                         break;
3742 #undef FMOVCC
3743 #define FMOVCC(xcc, sz)                                                 \
3744                     do {                                                \
3745                         DisasCompare cmp;                               \
3746                         cond = GET_FIELD_SP(insn, 14, 17);              \
3747                         gen_compare(&cmp, xcc, cond, dc);               \
3748                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3749                     } while (0)
3750 
3751                     case 0x101: /* V9 fmovscc %icc */
3752                         FMOVCC(0, s);
3753                         break;
3754                     case 0x102: /* V9 fmovdcc %icc */
3755                         FMOVCC(0, d);
3756                         break;
3757                     case 0x103: /* V9 fmovqcc %icc */
3758                         CHECK_FPU_FEATURE(dc, FLOAT128);
3759                         FMOVCC(0, q);
3760                         break;
3761                     case 0x181: /* V9 fmovscc %xcc */
3762                         FMOVCC(1, s);
3763                         break;
3764                     case 0x182: /* V9 fmovdcc %xcc */
3765                         FMOVCC(1, d);
3766                         break;
3767                     case 0x183: /* V9 fmovqcc %xcc */
3768                         CHECK_FPU_FEATURE(dc, FLOAT128);
3769                         FMOVCC(1, q);
3770                         break;
3771 #undef FMOVCC
3772 #endif
3773                     case 0x51: /* fcmps, V9 %fcc */
3774                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3775                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3776                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3777                         break;
3778                     case 0x52: /* fcmpd, V9 %fcc */
3779                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3780                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3781                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3782                         break;
3783                     case 0x53: /* fcmpq, V9 %fcc */
3784                         CHECK_FPU_FEATURE(dc, FLOAT128);
3785                         gen_op_load_fpr_QT0(QFPREG(rs1));
3786                         gen_op_load_fpr_QT1(QFPREG(rs2));
3787                         gen_op_fcmpq(rd & 3);
3788                         break;
3789                     case 0x55: /* fcmpes, V9 %fcc */
3790                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3791                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3792                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3793                         break;
3794                     case 0x56: /* fcmped, V9 %fcc */
3795                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3796                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3797                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3798                         break;
3799                     case 0x57: /* fcmpeq, V9 %fcc */
3800                         CHECK_FPU_FEATURE(dc, FLOAT128);
3801                         gen_op_load_fpr_QT0(QFPREG(rs1));
3802                         gen_op_load_fpr_QT1(QFPREG(rs2));
3803                         gen_op_fcmpeq(rd & 3);
3804                         break;
3805                     default:
3806                         goto illegal_insn;
3807                 }
3808             } else if (xop == 0x2) {
3809                 TCGv dst = gen_dest_gpr(dc, rd);
3810                 rs1 = GET_FIELD(insn, 13, 17);
3811                 if (rs1 == 0) {
3812                     /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3813                     if (IS_IMM) {       /* immediate */
3814                         simm = GET_FIELDs(insn, 19, 31);
3815                         tcg_gen_movi_tl(dst, simm);
3816                         gen_store_gpr(dc, rd, dst);
3817                     } else {            /* register */
3818                         rs2 = GET_FIELD(insn, 27, 31);
3819                         if (rs2 == 0) {
3820                             tcg_gen_movi_tl(dst, 0);
3821                             gen_store_gpr(dc, rd, dst);
3822                         } else {
3823                             cpu_src2 = gen_load_gpr(dc, rs2);
3824                             gen_store_gpr(dc, rd, cpu_src2);
3825                         }
3826                     }
3827                 } else {
3828                     cpu_src1 = get_src1(dc, insn);
3829                     if (IS_IMM) {       /* immediate */
3830                         simm = GET_FIELDs(insn, 19, 31);
3831                         tcg_gen_ori_tl(dst, cpu_src1, simm);
3832                         gen_store_gpr(dc, rd, dst);
3833                     } else {            /* register */
3834                         rs2 = GET_FIELD(insn, 27, 31);
3835                         if (rs2 == 0) {
3836                             /* mov shortcut:  or x, %g0, y -> mov x, y */
3837                             gen_store_gpr(dc, rd, cpu_src1);
3838                         } else {
3839                             cpu_src2 = gen_load_gpr(dc, rs2);
3840                             tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3841                             gen_store_gpr(dc, rd, dst);
3842                         }
3843                     }
3844                 }
3845 #ifdef TARGET_SPARC64
3846             } else if (xop == 0x25) { /* sll, V9 sllx */
3847                 cpu_src1 = get_src1(dc, insn);
3848                 if (IS_IMM) {   /* immediate */
3849                     simm = GET_FIELDs(insn, 20, 31);
3850                     if (insn & (1 << 12)) {
3851                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3852                     } else {
3853                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3854                     }
3855                 } else {                /* register */
3856                     rs2 = GET_FIELD(insn, 27, 31);
3857                     cpu_src2 = gen_load_gpr(dc, rs2);
3858                     cpu_tmp0 = tcg_temp_new();
3859                     if (insn & (1 << 12)) {
3860                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3861                     } else {
3862                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3863                     }
3864                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3865                 }
3866                 gen_store_gpr(dc, rd, cpu_dst);
3867             } else if (xop == 0x26) { /* srl, V9 srlx */
3868                 cpu_src1 = get_src1(dc, insn);
3869                 if (IS_IMM) {   /* immediate */
3870                     simm = GET_FIELDs(insn, 20, 31);
3871                     if (insn & (1 << 12)) {
3872                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3873                     } else {
3874                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3875                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3876                     }
3877                 } else {                /* register */
3878                     rs2 = GET_FIELD(insn, 27, 31);
3879                     cpu_src2 = gen_load_gpr(dc, rs2);
3880                     cpu_tmp0 = tcg_temp_new();
3881                     if (insn & (1 << 12)) {
3882                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3883                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3884                     } else {
3885                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3886                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3887                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3888                     }
3889                 }
3890                 gen_store_gpr(dc, rd, cpu_dst);
3891             } else if (xop == 0x27) { /* sra, V9 srax */
3892                 cpu_src1 = get_src1(dc, insn);
3893                 if (IS_IMM) {   /* immediate */
3894                     simm = GET_FIELDs(insn, 20, 31);
3895                     if (insn & (1 << 12)) {
3896                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3897                     } else {
3898                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3899                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3900                     }
3901                 } else {                /* register */
3902                     rs2 = GET_FIELD(insn, 27, 31);
3903                     cpu_src2 = gen_load_gpr(dc, rs2);
3904                     cpu_tmp0 = tcg_temp_new();
3905                     if (insn & (1 << 12)) {
3906                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3907                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3908                     } else {
3909                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3910                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3911                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3912                     }
3913                 }
3914                 gen_store_gpr(dc, rd, cpu_dst);
3915 #endif
3916             } else if (xop < 0x36) {
3917                 if (xop < 0x20) {
3918                     cpu_src1 = get_src1(dc, insn);
3919                     cpu_src2 = get_src2(dc, insn);
3920                     switch (xop & ~0x10) {
3921                     case 0x0: /* add */
3922                         if (xop & 0x10) {
3923                             gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3924                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3925                             dc->cc_op = CC_OP_ADD;
3926                         } else {
3927                             tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3928                         }
3929                         break;
3930                     case 0x1: /* and */
3931                         tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3932                         if (xop & 0x10) {
3933                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3934                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3935                             dc->cc_op = CC_OP_LOGIC;
3936                         }
3937                         break;
3938                     case 0x2: /* or */
3939                         tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3940                         if (xop & 0x10) {
3941                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3942                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3943                             dc->cc_op = CC_OP_LOGIC;
3944                         }
3945                         break;
3946                     case 0x3: /* xor */
3947                         tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3948                         if (xop & 0x10) {
3949                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3950                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3951                             dc->cc_op = CC_OP_LOGIC;
3952                         }
3953                         break;
3954                     case 0x4: /* sub */
3955                         if (xop & 0x10) {
3956                             gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3957                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3958                             dc->cc_op = CC_OP_SUB;
3959                         } else {
3960                             tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3961                         }
3962                         break;
3963                     case 0x5: /* andn */
3964                         tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3965                         if (xop & 0x10) {
3966                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3967                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3968                             dc->cc_op = CC_OP_LOGIC;
3969                         }
3970                         break;
3971                     case 0x6: /* orn */
3972                         tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3973                         if (xop & 0x10) {
3974                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3975                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3976                             dc->cc_op = CC_OP_LOGIC;
3977                         }
3978                         break;
3979                     case 0x7: /* xorn */
3980                         tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
3981                         if (xop & 0x10) {
3982                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3983                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3984                             dc->cc_op = CC_OP_LOGIC;
3985                         }
3986                         break;
3987                     case 0x8: /* addx, V9 addc */
3988                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3989                                         (xop & 0x10));
3990                         break;
3991 #ifdef TARGET_SPARC64
3992                     case 0x9: /* V9 mulx */
3993                         tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3994                         break;
3995 #endif
3996                     case 0xa: /* umul */
3997                         CHECK_IU_FEATURE(dc, MUL);
3998                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3999                         if (xop & 0x10) {
4000                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4001                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4002                             dc->cc_op = CC_OP_LOGIC;
4003                         }
4004                         break;
4005                     case 0xb: /* smul */
4006                         CHECK_IU_FEATURE(dc, MUL);
4007                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
4008                         if (xop & 0x10) {
4009                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4010                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4011                             dc->cc_op = CC_OP_LOGIC;
4012                         }
4013                         break;
4014                     case 0xc: /* subx, V9 subc */
4015                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4016                                         (xop & 0x10));
4017                         break;
4018 #ifdef TARGET_SPARC64
4019                     case 0xd: /* V9 udivx */
4020                         gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4021                         break;
4022 #endif
4023                     case 0xe: /* udiv */
4024                         CHECK_IU_FEATURE(dc, DIV);
4025                         if (xop & 0x10) {
4026                             gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
4027                                                cpu_src2);
4028                             dc->cc_op = CC_OP_DIV;
4029                         } else {
4030                             gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
4031                                             cpu_src2);
4032                         }
4033                         break;
4034                     case 0xf: /* sdiv */
4035                         CHECK_IU_FEATURE(dc, DIV);
4036                         if (xop & 0x10) {
4037                             gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
4038                                                cpu_src2);
4039                             dc->cc_op = CC_OP_DIV;
4040                         } else {
4041                             gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
4042                                             cpu_src2);
4043                         }
4044                         break;
4045                     default:
4046                         goto illegal_insn;
4047                     }
4048                     gen_store_gpr(dc, rd, cpu_dst);
4049                 } else {
4050                     cpu_src1 = get_src1(dc, insn);
4051                     cpu_src2 = get_src2(dc, insn);
4052                     switch (xop) {
4053                     case 0x20: /* taddcc */
4054                         gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4055                         gen_store_gpr(dc, rd, cpu_dst);
4056                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
4057                         dc->cc_op = CC_OP_TADD;
4058                         break;
4059                     case 0x21: /* tsubcc */
4060                         gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4061                         gen_store_gpr(dc, rd, cpu_dst);
4062                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
4063                         dc->cc_op = CC_OP_TSUB;
4064                         break;
4065                     case 0x22: /* taddcctv */
4066                         gen_helper_taddcctv(cpu_dst, cpu_env,
4067                                             cpu_src1, cpu_src2);
4068                         gen_store_gpr(dc, rd, cpu_dst);
4069                         dc->cc_op = CC_OP_TADDTV;
4070                         break;
4071                     case 0x23: /* tsubcctv */
4072                         gen_helper_tsubcctv(cpu_dst, cpu_env,
4073                                             cpu_src1, cpu_src2);
4074                         gen_store_gpr(dc, rd, cpu_dst);
4075                         dc->cc_op = CC_OP_TSUBTV;
4076                         break;
4077                     case 0x24: /* mulscc */
4078                         update_psr(dc);
4079                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
4080                         gen_store_gpr(dc, rd, cpu_dst);
4081                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4082                         dc->cc_op = CC_OP_ADD;
4083                         break;
4084 #ifndef TARGET_SPARC64
4085                     case 0x25:  /* sll */
4086                         if (IS_IMM) { /* immediate */
4087                             simm = GET_FIELDs(insn, 20, 31);
4088                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
4089                         } else { /* register */
4090                             cpu_tmp0 = tcg_temp_new();
4091                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4092                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
4093                         }
4094                         gen_store_gpr(dc, rd, cpu_dst);
4095                         break;
4096                     case 0x26:  /* srl */
4097                         if (IS_IMM) { /* immediate */
4098                             simm = GET_FIELDs(insn, 20, 31);
4099                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
4100                         } else { /* register */
4101                             cpu_tmp0 = tcg_temp_new();
4102                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4103                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
4104                         }
4105                         gen_store_gpr(dc, rd, cpu_dst);
4106                         break;
4107                     case 0x27:  /* sra */
4108                         if (IS_IMM) { /* immediate */
4109                             simm = GET_FIELDs(insn, 20, 31);
4110                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
4111                         } else { /* register */
4112                             cpu_tmp0 = tcg_temp_new();
4113                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4114                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
4115                         }
4116                         gen_store_gpr(dc, rd, cpu_dst);
4117                         break;
4118 #endif
4119                     case 0x30:
4120                         {
4121                             cpu_tmp0 = tcg_temp_new();
4122                             switch(rd) {
4123                             case 0: /* wry */
4124                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4125                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
4126                                 break;
4127 #ifndef TARGET_SPARC64
4128                             case 0x01 ... 0x0f: /* undefined in the
4129                                                    SPARCv8 manual, nop
4130                                                    on the microSPARC
4131                                                    II */
4132                             case 0x10 ... 0x1f: /* implementation-dependent
4133                                                    in the SPARCv8
4134                                                    manual, nop on the
4135                                                    microSPARC II */
4136                                 if ((rd == 0x13) && (dc->def->features &
4137                                                      CPU_FEATURE_POWERDOWN)) {
4138                                     /* LEON3 power-down */
4139                                     save_state(dc);
4140                                     gen_helper_power_down(cpu_env);
4141                                 }
4142                                 break;
4143 #else
4144                             case 0x2: /* V9 wrccr */
4145                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4146                                 gen_helper_wrccr(cpu_env, cpu_tmp0);
4147                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4148                                 dc->cc_op = CC_OP_FLAGS;
4149                                 break;
4150                             case 0x3: /* V9 wrasi */
4151                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4152                                 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
4153                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4154                                                 offsetof(CPUSPARCState, asi));
4155                                 /* End TB to notice changed ASI.  */
4156                                 save_state(dc);
4157                                 gen_op_next_insn();
4158                                 tcg_gen_exit_tb(NULL, 0);
4159                                 dc->base.is_jmp = DISAS_NORETURN;
4160                                 break;
4161                             case 0x6: /* V9 wrfprs */
4162                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4163                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
4164                                 dc->fprs_dirty = 0;
4165                                 save_state(dc);
4166                                 gen_op_next_insn();
4167                                 tcg_gen_exit_tb(NULL, 0);
4168                                 dc->base.is_jmp = DISAS_NORETURN;
4169                                 break;
4170                             case 0xf: /* V9 sir, nop if user */
4171 #if !defined(CONFIG_USER_ONLY)
4172                                 if (supervisor(dc)) {
4173                                     ; // XXX
4174                                 }
4175 #endif
4176                                 break;
4177                             case 0x13: /* Graphics Status */
4178                                 if (gen_trap_ifnofpu(dc)) {
4179                                     goto jmp_insn;
4180                                 }
4181                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
4182                                 break;
4183                             case 0x14: /* Softint set */
4184                                 if (!supervisor(dc))
4185                                     goto illegal_insn;
4186                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4187                                 gen_helper_set_softint(cpu_env, cpu_tmp0);
4188                                 break;
4189                             case 0x15: /* Softint clear */
4190                                 if (!supervisor(dc))
4191                                     goto illegal_insn;
4192                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4193                                 gen_helper_clear_softint(cpu_env, cpu_tmp0);
4194                                 break;
4195                             case 0x16: /* Softint write */
4196                                 if (!supervisor(dc))
4197                                     goto illegal_insn;
4198                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4199                                 gen_helper_write_softint(cpu_env, cpu_tmp0);
4200                                 break;
4201                             case 0x17: /* Tick compare */
4202 #if !defined(CONFIG_USER_ONLY)
4203                                 if (!supervisor(dc))
4204                                     goto illegal_insn;
4205 #endif
4206                                 {
4207                                     TCGv_ptr r_tickptr;
4208 
4209                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
4210                                                    cpu_src2);
4211                                     r_tickptr = tcg_temp_new_ptr();
4212                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4213                                                    offsetof(CPUSPARCState, tick));
4214                                     if (tb_cflags(dc->base.tb) &
4215                                            CF_USE_ICOUNT) {
4216                                         gen_io_start();
4217                                     }
4218                                     gen_helper_tick_set_limit(r_tickptr,
4219                                                               cpu_tick_cmpr);
4220                                     /* End TB to handle timer interrupt */
4221                                     dc->base.is_jmp = DISAS_EXIT;
4222                                 }
4223                                 break;
4224                             case 0x18: /* System tick */
4225 #if !defined(CONFIG_USER_ONLY)
4226                                 if (!supervisor(dc))
4227                                     goto illegal_insn;
4228 #endif
4229                                 {
4230                                     TCGv_ptr r_tickptr;
4231 
4232                                     tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
4233                                                    cpu_src2);
4234                                     r_tickptr = tcg_temp_new_ptr();
4235                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4236                                                    offsetof(CPUSPARCState, stick));
4237                                     if (tb_cflags(dc->base.tb) &
4238                                            CF_USE_ICOUNT) {
4239                                         gen_io_start();
4240                                     }
4241                                     gen_helper_tick_set_count(r_tickptr,
4242                                                               cpu_tmp0);
4243                                     /* End TB to handle timer interrupt */
4244                                     dc->base.is_jmp = DISAS_EXIT;
4245                                 }
4246                                 break;
4247                             case 0x19: /* System tick compare */
4248 #if !defined(CONFIG_USER_ONLY)
4249                                 if (!supervisor(dc))
4250                                     goto illegal_insn;
4251 #endif
4252                                 {
4253                                     TCGv_ptr r_tickptr;
4254 
4255                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
4256                                                    cpu_src2);
4257                                     r_tickptr = tcg_temp_new_ptr();
4258                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4259                                                    offsetof(CPUSPARCState, stick));
4260                                     if (tb_cflags(dc->base.tb) &
4261                                            CF_USE_ICOUNT) {
4262                                         gen_io_start();
4263                                     }
4264                                     gen_helper_tick_set_limit(r_tickptr,
4265                                                               cpu_stick_cmpr);
4266                                     /* End TB to handle timer interrupt */
4267                                     dc->base.is_jmp = DISAS_EXIT;
4268                                 }
4269                                 break;
4270 
4271                             case 0x10: /* Performance Control */
4272                             case 0x11: /* Performance Instrumentation
4273                                           Counter */
4274                             case 0x12: /* Dispatch Control */
4275 #endif
4276                             default:
4277                                 goto illegal_insn;
4278                             }
4279                         }
4280                         break;
4281 #if !defined(CONFIG_USER_ONLY)
4282                     case 0x31: /* wrpsr, V9 saved, restored */
4283                         {
4284                             if (!supervisor(dc))
4285                                 goto priv_insn;
4286 #ifdef TARGET_SPARC64
4287                             switch (rd) {
4288                             case 0:
4289                                 gen_helper_saved(cpu_env);
4290                                 break;
4291                             case 1:
4292                                 gen_helper_restored(cpu_env);
4293                                 break;
4294                             case 2: /* UA2005 allclean */
4295                             case 3: /* UA2005 otherw */
4296                             case 4: /* UA2005 normalw */
4297                             case 5: /* UA2005 invalw */
4298                                 // XXX
4299                             default:
4300                                 goto illegal_insn;
4301                             }
4302 #else
4303                             cpu_tmp0 = tcg_temp_new();
4304                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4305                             gen_helper_wrpsr(cpu_env, cpu_tmp0);
4306                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4307                             dc->cc_op = CC_OP_FLAGS;
4308                             save_state(dc);
4309                             gen_op_next_insn();
4310                             tcg_gen_exit_tb(NULL, 0);
4311                             dc->base.is_jmp = DISAS_NORETURN;
4312 #endif
4313                         }
4314                         break;
4315                     case 0x32: /* wrwim, V9 wrpr */
4316                         {
4317                             if (!supervisor(dc))
4318                                 goto priv_insn;
4319                             cpu_tmp0 = tcg_temp_new();
4320                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4321 #ifdef TARGET_SPARC64
4322                             switch (rd) {
4323                             case 0: // tpc
4324                                 {
4325                                     TCGv_ptr r_tsptr;
4326 
4327                                     r_tsptr = tcg_temp_new_ptr();
4328                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4329                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4330                                                   offsetof(trap_state, tpc));
4331                                 }
4332                                 break;
4333                             case 1: // tnpc
4334                                 {
4335                                     TCGv_ptr r_tsptr;
4336 
4337                                     r_tsptr = tcg_temp_new_ptr();
4338                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4339                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4340                                                   offsetof(trap_state, tnpc));
4341                                 }
4342                                 break;
4343                             case 2: // tstate
4344                                 {
4345                                     TCGv_ptr r_tsptr;
4346 
4347                                     r_tsptr = tcg_temp_new_ptr();
4348                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4349                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4350                                                   offsetof(trap_state,
4351                                                            tstate));
4352                                 }
4353                                 break;
4354                             case 3: // tt
4355                                 {
4356                                     TCGv_ptr r_tsptr;
4357 
4358                                     r_tsptr = tcg_temp_new_ptr();
4359                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4360                                     tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
4361                                                     offsetof(trap_state, tt));
4362                                 }
4363                                 break;
4364                             case 4: // tick
4365                                 {
4366                                     TCGv_ptr r_tickptr;
4367 
4368                                     r_tickptr = tcg_temp_new_ptr();
4369                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4370                                                    offsetof(CPUSPARCState, tick));
4371                                     if (tb_cflags(dc->base.tb) &
4372                                            CF_USE_ICOUNT) {
4373                                         gen_io_start();
4374                                     }
4375                                     gen_helper_tick_set_count(r_tickptr,
4376                                                               cpu_tmp0);
4377                                     /* End TB to handle timer interrupt */
4378                                     dc->base.is_jmp = DISAS_EXIT;
4379                                 }
4380                                 break;
4381                             case 5: // tba
4382                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
4383                                 break;
4384                             case 6: // pstate
4385                                 save_state(dc);
4386                                 if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
4387                                     gen_io_start();
4388                                 }
4389                                 gen_helper_wrpstate(cpu_env, cpu_tmp0);
4390                                 if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
4391                                     /* I/O ops in icount mode must end the TB */
4392                                     dc->base.is_jmp = DISAS_EXIT;
4393                                 }
4394                                 dc->npc = DYNAMIC_PC;
4395                                 break;
4396                             case 7: // tl
4397                                 save_state(dc);
4398                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4399                                                offsetof(CPUSPARCState, tl));
4400                                 dc->npc = DYNAMIC_PC;
4401                                 break;
4402                             case 8: // pil
4403                                 if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
4404                                     gen_io_start();
4405                                 }
4406                                 gen_helper_wrpil(cpu_env, cpu_tmp0);
4407                                 if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
4408                                     /* I/O ops in icount mode must end the TB */
4409                                     dc->base.is_jmp = DISAS_EXIT;
4410                                 }
4411                                 break;
4412                             case 9: // cwp
4413                                 gen_helper_wrcwp(cpu_env, cpu_tmp0);
4414                                 break;
4415                             case 10: // cansave
4416                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4417                                                 offsetof(CPUSPARCState,
4418                                                          cansave));
4419                                 break;
4420                             case 11: // canrestore
4421                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4422                                                 offsetof(CPUSPARCState,
4423                                                          canrestore));
4424                                 break;
4425                             case 12: // cleanwin
4426                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4427                                                 offsetof(CPUSPARCState,
4428                                                          cleanwin));
4429                                 break;
4430                             case 13: // otherwin
4431                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4432                                                 offsetof(CPUSPARCState,
4433                                                          otherwin));
4434                                 break;
4435                             case 14: // wstate
4436                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4437                                                 offsetof(CPUSPARCState,
4438                                                          wstate));
4439                                 break;
4440                             case 16: // UA2005 gl
4441                                 CHECK_IU_FEATURE(dc, GL);
4442                                 gen_helper_wrgl(cpu_env, cpu_tmp0);
4443                                 break;
4444                             case 26: // UA2005 strand status
4445                                 CHECK_IU_FEATURE(dc, HYPV);
4446                                 if (!hypervisor(dc))
4447                                     goto priv_insn;
4448                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
4449                                 break;
4450                             default:
4451                                 goto illegal_insn;
4452                             }
4453 #else
4454                             tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
4455                             if (dc->def->nwindows != 32) {
4456                                 tcg_gen_andi_tl(cpu_wim, cpu_wim,
4457                                                 (1 << dc->def->nwindows) - 1);
4458                             }
4459 #endif
4460                         }
4461                         break;
4462                     case 0x33: /* wrtbr, UA2005 wrhpr */
4463                         {
4464 #ifndef TARGET_SPARC64
4465                             if (!supervisor(dc))
4466                                 goto priv_insn;
4467                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
4468 #else
4469                             CHECK_IU_FEATURE(dc, HYPV);
4470                             if (!hypervisor(dc))
4471                                 goto priv_insn;
4472                             cpu_tmp0 = tcg_temp_new();
4473                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4474                             switch (rd) {
4475                             case 0: // hpstate
4476                                 tcg_gen_st_i64(cpu_tmp0, cpu_env,
4477                                                offsetof(CPUSPARCState,
4478                                                         hpstate));
4479                                 save_state(dc);
4480                                 gen_op_next_insn();
4481                                 tcg_gen_exit_tb(NULL, 0);
4482                                 dc->base.is_jmp = DISAS_NORETURN;
4483                                 break;
4484                             case 1: // htstate
4485                                 // XXX gen_op_wrhtstate();
4486                                 break;
4487                             case 3: // hintp
4488                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
4489                                 break;
4490                             case 5: // htba
4491                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4492                                 break;
4493                             case 31: // hstick_cmpr
4494                                 {
4495                                     TCGv_ptr r_tickptr;
4496 
4497                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4498                                     r_tickptr = tcg_temp_new_ptr();
4499                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4500                                                    offsetof(CPUSPARCState, hstick));
4501                                     if (tb_cflags(dc->base.tb) &
4502                                            CF_USE_ICOUNT) {
4503                                         gen_io_start();
4504                                     }
4505                                     gen_helper_tick_set_limit(r_tickptr,
4506                                                               cpu_hstick_cmpr);
4507                                     /* End TB to handle timer interrupt */
4508                                     dc->base.is_jmp = DISAS_EXIT;
4509                                 }
4510                                 break;
4511                             case 6: // hver readonly
4512                             default:
4513                                 goto illegal_insn;
4514                             }
4515 #endif
4516                         }
4517                         break;
4518 #endif
4519 #ifdef TARGET_SPARC64
4520                     case 0x2c: /* V9 movcc */
4521                         {
4522                             int cc = GET_FIELD_SP(insn, 11, 12);
4523                             int cond = GET_FIELD_SP(insn, 14, 17);
4524                             DisasCompare cmp;
4525                             TCGv dst;
4526 
4527                             if (insn & (1 << 18)) {
4528                                 if (cc == 0) {
4529                                     gen_compare(&cmp, 0, cond, dc);
4530                                 } else if (cc == 2) {
4531                                     gen_compare(&cmp, 1, cond, dc);
4532                                 } else {
4533                                     goto illegal_insn;
4534                                 }
4535                             } else {
4536                                 gen_fcompare(&cmp, cc, cond);
4537                             }
4538 
4539                             /* The get_src2 above loaded the normal 13-bit
4540                                immediate field, not the 11-bit field we have
4541                                in movcc.  But it did handle the reg case.  */
4542                             if (IS_IMM) {
4543                                 simm = GET_FIELD_SPs(insn, 0, 10);
4544                                 tcg_gen_movi_tl(cpu_src2, simm);
4545                             }
4546 
4547                             dst = gen_load_gpr(dc, rd);
4548                             tcg_gen_movcond_tl(cmp.cond, dst,
4549                                                cmp.c1, cmp.c2,
4550                                                cpu_src2, dst);
4551                             gen_store_gpr(dc, rd, dst);
4552                             break;
4553                         }
4554                     case 0x2d: /* V9 sdivx */
4555                         gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4556                         gen_store_gpr(dc, rd, cpu_dst);
4557                         break;
4558                     case 0x2e: /* V9 popc */
4559                         tcg_gen_ctpop_tl(cpu_dst, cpu_src2);
4560                         gen_store_gpr(dc, rd, cpu_dst);
4561                         break;
4562                     case 0x2f: /* V9 movr */
4563                         {
4564                             int cond = GET_FIELD_SP(insn, 10, 12);
4565                             DisasCompare cmp;
4566                             TCGv dst;
4567 
4568                             gen_compare_reg(&cmp, cond, cpu_src1);
4569 
4570                             /* The get_src2 above loaded the normal 13-bit
4571                                immediate field, not the 10-bit field we have
4572                                in movr.  But it did handle the reg case.  */
4573                             if (IS_IMM) {
4574                                 simm = GET_FIELD_SPs(insn, 0, 9);
4575                                 tcg_gen_movi_tl(cpu_src2, simm);
4576                             }
4577 
4578                             dst = gen_load_gpr(dc, rd);
4579                             tcg_gen_movcond_tl(cmp.cond, dst,
4580                                                cmp.c1, cmp.c2,
4581                                                cpu_src2, dst);
4582                             gen_store_gpr(dc, rd, dst);
4583                             break;
4584                         }
4585 #endif
4586                     default:
4587                         goto illegal_insn;
4588                     }
4589                 }
4590             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4591 #ifdef TARGET_SPARC64
4592                 int opf = GET_FIELD_SP(insn, 5, 13);
4593                 rs1 = GET_FIELD(insn, 13, 17);
4594                 rs2 = GET_FIELD(insn, 27, 31);
4595                 if (gen_trap_ifnofpu(dc)) {
4596                     goto jmp_insn;
4597                 }
4598 
4599                 switch (opf) {
4600                 case 0x000: /* VIS I edge8cc */
4601                     CHECK_FPU_FEATURE(dc, VIS1);
4602                     cpu_src1 = gen_load_gpr(dc, rs1);
4603                     cpu_src2 = gen_load_gpr(dc, rs2);
4604                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4605                     gen_store_gpr(dc, rd, cpu_dst);
4606                     break;
4607                 case 0x001: /* VIS II edge8n */
4608                     CHECK_FPU_FEATURE(dc, VIS2);
4609                     cpu_src1 = gen_load_gpr(dc, rs1);
4610                     cpu_src2 = gen_load_gpr(dc, rs2);
4611                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4612                     gen_store_gpr(dc, rd, cpu_dst);
4613                     break;
4614                 case 0x002: /* VIS I edge8lcc */
4615                     CHECK_FPU_FEATURE(dc, VIS1);
4616                     cpu_src1 = gen_load_gpr(dc, rs1);
4617                     cpu_src2 = gen_load_gpr(dc, rs2);
4618                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4619                     gen_store_gpr(dc, rd, cpu_dst);
4620                     break;
4621                 case 0x003: /* VIS II edge8ln */
4622                     CHECK_FPU_FEATURE(dc, VIS2);
4623                     cpu_src1 = gen_load_gpr(dc, rs1);
4624                     cpu_src2 = gen_load_gpr(dc, rs2);
4625                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4626                     gen_store_gpr(dc, rd, cpu_dst);
4627                     break;
4628                 case 0x004: /* VIS I edge16cc */
4629                     CHECK_FPU_FEATURE(dc, VIS1);
4630                     cpu_src1 = gen_load_gpr(dc, rs1);
4631                     cpu_src2 = gen_load_gpr(dc, rs2);
4632                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4633                     gen_store_gpr(dc, rd, cpu_dst);
4634                     break;
4635                 case 0x005: /* VIS II edge16n */
4636                     CHECK_FPU_FEATURE(dc, VIS2);
4637                     cpu_src1 = gen_load_gpr(dc, rs1);
4638                     cpu_src2 = gen_load_gpr(dc, rs2);
4639                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4640                     gen_store_gpr(dc, rd, cpu_dst);
4641                     break;
4642                 case 0x006: /* VIS I edge16lcc */
4643                     CHECK_FPU_FEATURE(dc, VIS1);
4644                     cpu_src1 = gen_load_gpr(dc, rs1);
4645                     cpu_src2 = gen_load_gpr(dc, rs2);
4646                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4647                     gen_store_gpr(dc, rd, cpu_dst);
4648                     break;
4649                 case 0x007: /* VIS II edge16ln */
4650                     CHECK_FPU_FEATURE(dc, VIS2);
4651                     cpu_src1 = gen_load_gpr(dc, rs1);
4652                     cpu_src2 = gen_load_gpr(dc, rs2);
4653                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4654                     gen_store_gpr(dc, rd, cpu_dst);
4655                     break;
4656                 case 0x008: /* VIS I edge32cc */
4657                     CHECK_FPU_FEATURE(dc, VIS1);
4658                     cpu_src1 = gen_load_gpr(dc, rs1);
4659                     cpu_src2 = gen_load_gpr(dc, rs2);
4660                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4661                     gen_store_gpr(dc, rd, cpu_dst);
4662                     break;
4663                 case 0x009: /* VIS II edge32n */
4664                     CHECK_FPU_FEATURE(dc, VIS2);
4665                     cpu_src1 = gen_load_gpr(dc, rs1);
4666                     cpu_src2 = gen_load_gpr(dc, rs2);
4667                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4668                     gen_store_gpr(dc, rd, cpu_dst);
4669                     break;
4670                 case 0x00a: /* VIS I edge32lcc */
4671                     CHECK_FPU_FEATURE(dc, VIS1);
4672                     cpu_src1 = gen_load_gpr(dc, rs1);
4673                     cpu_src2 = gen_load_gpr(dc, rs2);
4674                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4675                     gen_store_gpr(dc, rd, cpu_dst);
4676                     break;
4677                 case 0x00b: /* VIS II edge32ln */
4678                     CHECK_FPU_FEATURE(dc, VIS2);
4679                     cpu_src1 = gen_load_gpr(dc, rs1);
4680                     cpu_src2 = gen_load_gpr(dc, rs2);
4681                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4682                     gen_store_gpr(dc, rd, cpu_dst);
4683                     break;
4684                 case 0x010: /* VIS I array8 */
4685                     CHECK_FPU_FEATURE(dc, VIS1);
4686                     cpu_src1 = gen_load_gpr(dc, rs1);
4687                     cpu_src2 = gen_load_gpr(dc, rs2);
4688                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4689                     gen_store_gpr(dc, rd, cpu_dst);
4690                     break;
4691                 case 0x012: /* VIS I array16 */
4692                     CHECK_FPU_FEATURE(dc, VIS1);
4693                     cpu_src1 = gen_load_gpr(dc, rs1);
4694                     cpu_src2 = gen_load_gpr(dc, rs2);
4695                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4696                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4697                     gen_store_gpr(dc, rd, cpu_dst);
4698                     break;
4699                 case 0x014: /* VIS I array32 */
4700                     CHECK_FPU_FEATURE(dc, VIS1);
4701                     cpu_src1 = gen_load_gpr(dc, rs1);
4702                     cpu_src2 = gen_load_gpr(dc, rs2);
4703                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4704                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4705                     gen_store_gpr(dc, rd, cpu_dst);
4706                     break;
4707                 case 0x018: /* VIS I alignaddr */
4708                     CHECK_FPU_FEATURE(dc, VIS1);
4709                     cpu_src1 = gen_load_gpr(dc, rs1);
4710                     cpu_src2 = gen_load_gpr(dc, rs2);
4711                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4712                     gen_store_gpr(dc, rd, cpu_dst);
4713                     break;
4714                 case 0x01a: /* VIS I alignaddrl */
4715                     CHECK_FPU_FEATURE(dc, VIS1);
4716                     cpu_src1 = gen_load_gpr(dc, rs1);
4717                     cpu_src2 = gen_load_gpr(dc, rs2);
4718                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4719                     gen_store_gpr(dc, rd, cpu_dst);
4720                     break;
4721                 case 0x019: /* VIS II bmask */
4722                     CHECK_FPU_FEATURE(dc, VIS2);
4723                     cpu_src1 = gen_load_gpr(dc, rs1);
4724                     cpu_src2 = gen_load_gpr(dc, rs2);
4725                     tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4726                     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4727                     gen_store_gpr(dc, rd, cpu_dst);
4728                     break;
4729                 case 0x020: /* VIS I fcmple16 */
4730                     CHECK_FPU_FEATURE(dc, VIS1);
4731                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4732                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4733                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4734                     gen_store_gpr(dc, rd, cpu_dst);
4735                     break;
4736                 case 0x022: /* VIS I fcmpne16 */
4737                     CHECK_FPU_FEATURE(dc, VIS1);
4738                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4739                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4740                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4741                     gen_store_gpr(dc, rd, cpu_dst);
4742                     break;
4743                 case 0x024: /* VIS I fcmple32 */
4744                     CHECK_FPU_FEATURE(dc, VIS1);
4745                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4746                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4747                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4748                     gen_store_gpr(dc, rd, cpu_dst);
4749                     break;
4750                 case 0x026: /* VIS I fcmpne32 */
4751                     CHECK_FPU_FEATURE(dc, VIS1);
4752                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4753                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4754                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4755                     gen_store_gpr(dc, rd, cpu_dst);
4756                     break;
4757                 case 0x028: /* VIS I fcmpgt16 */
4758                     CHECK_FPU_FEATURE(dc, VIS1);
4759                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4760                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4761                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4762                     gen_store_gpr(dc, rd, cpu_dst);
4763                     break;
4764                 case 0x02a: /* VIS I fcmpeq16 */
4765                     CHECK_FPU_FEATURE(dc, VIS1);
4766                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4767                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4768                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4769                     gen_store_gpr(dc, rd, cpu_dst);
4770                     break;
4771                 case 0x02c: /* VIS I fcmpgt32 */
4772                     CHECK_FPU_FEATURE(dc, VIS1);
4773                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4774                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4775                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4776                     gen_store_gpr(dc, rd, cpu_dst);
4777                     break;
4778                 case 0x02e: /* VIS I fcmpeq32 */
4779                     CHECK_FPU_FEATURE(dc, VIS1);
4780                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4781                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4782                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4783                     gen_store_gpr(dc, rd, cpu_dst);
4784                     break;
4785                 case 0x031: /* VIS I fmul8x16 */
4786                     CHECK_FPU_FEATURE(dc, VIS1);
4787                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4788                     break;
4789                 case 0x033: /* VIS I fmul8x16au */
4790                     CHECK_FPU_FEATURE(dc, VIS1);
4791                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4792                     break;
4793                 case 0x035: /* VIS I fmul8x16al */
4794                     CHECK_FPU_FEATURE(dc, VIS1);
4795                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4796                     break;
4797                 case 0x036: /* VIS I fmul8sux16 */
4798                     CHECK_FPU_FEATURE(dc, VIS1);
4799                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4800                     break;
4801                 case 0x037: /* VIS I fmul8ulx16 */
4802                     CHECK_FPU_FEATURE(dc, VIS1);
4803                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4804                     break;
4805                 case 0x038: /* VIS I fmuld8sux16 */
4806                     CHECK_FPU_FEATURE(dc, VIS1);
4807                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4808                     break;
4809                 case 0x039: /* VIS I fmuld8ulx16 */
4810                     CHECK_FPU_FEATURE(dc, VIS1);
4811                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4812                     break;
4813                 case 0x03a: /* VIS I fpack32 */
4814                     CHECK_FPU_FEATURE(dc, VIS1);
4815                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4816                     break;
4817                 case 0x03b: /* VIS I fpack16 */
4818                     CHECK_FPU_FEATURE(dc, VIS1);
4819                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4820                     cpu_dst_32 = gen_dest_fpr_F(dc);
4821                     gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4822                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4823                     break;
4824                 case 0x03d: /* VIS I fpackfix */
4825                     CHECK_FPU_FEATURE(dc, VIS1);
4826                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4827                     cpu_dst_32 = gen_dest_fpr_F(dc);
4828                     gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4829                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4830                     break;
4831                 case 0x03e: /* VIS I pdist */
4832                     CHECK_FPU_FEATURE(dc, VIS1);
4833                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4834                     break;
4835                 case 0x048: /* VIS I faligndata */
4836                     CHECK_FPU_FEATURE(dc, VIS1);
4837                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4838                     break;
4839                 case 0x04b: /* VIS I fpmerge */
4840                     CHECK_FPU_FEATURE(dc, VIS1);
4841                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4842                     break;
4843                 case 0x04c: /* VIS II bshuffle */
4844                     CHECK_FPU_FEATURE(dc, VIS2);
4845                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4846                     break;
4847                 case 0x04d: /* VIS I fexpand */
4848                     CHECK_FPU_FEATURE(dc, VIS1);
4849                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4850                     break;
4851                 case 0x050: /* VIS I fpadd16 */
4852                     CHECK_FPU_FEATURE(dc, VIS1);
4853                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4854                     break;
4855                 case 0x051: /* VIS I fpadd16s */
4856                     CHECK_FPU_FEATURE(dc, VIS1);
4857                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4858                     break;
4859                 case 0x052: /* VIS I fpadd32 */
4860                     CHECK_FPU_FEATURE(dc, VIS1);
4861                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4862                     break;
4863                 case 0x053: /* VIS I fpadd32s */
4864                     CHECK_FPU_FEATURE(dc, VIS1);
4865                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4866                     break;
4867                 case 0x054: /* VIS I fpsub16 */
4868                     CHECK_FPU_FEATURE(dc, VIS1);
4869                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4870                     break;
4871                 case 0x055: /* VIS I fpsub16s */
4872                     CHECK_FPU_FEATURE(dc, VIS1);
4873                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4874                     break;
4875                 case 0x056: /* VIS I fpsub32 */
4876                     CHECK_FPU_FEATURE(dc, VIS1);
4877                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4878                     break;
4879                 case 0x057: /* VIS I fpsub32s */
4880                     CHECK_FPU_FEATURE(dc, VIS1);
4881                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4882                     break;
4883                 case 0x060: /* VIS I fzero */
4884                     CHECK_FPU_FEATURE(dc, VIS1);
4885                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4886                     tcg_gen_movi_i64(cpu_dst_64, 0);
4887                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4888                     break;
4889                 case 0x061: /* VIS I fzeros */
4890                     CHECK_FPU_FEATURE(dc, VIS1);
4891                     cpu_dst_32 = gen_dest_fpr_F(dc);
4892                     tcg_gen_movi_i32(cpu_dst_32, 0);
4893                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4894                     break;
4895                 case 0x062: /* VIS I fnor */
4896                     CHECK_FPU_FEATURE(dc, VIS1);
4897                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4898                     break;
4899                 case 0x063: /* VIS I fnors */
4900                     CHECK_FPU_FEATURE(dc, VIS1);
4901                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4902                     break;
4903                 case 0x064: /* VIS I fandnot2 */
4904                     CHECK_FPU_FEATURE(dc, VIS1);
4905                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4906                     break;
4907                 case 0x065: /* VIS I fandnot2s */
4908                     CHECK_FPU_FEATURE(dc, VIS1);
4909                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4910                     break;
4911                 case 0x066: /* VIS I fnot2 */
4912                     CHECK_FPU_FEATURE(dc, VIS1);
4913                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4914                     break;
4915                 case 0x067: /* VIS I fnot2s */
4916                     CHECK_FPU_FEATURE(dc, VIS1);
4917                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4918                     break;
4919                 case 0x068: /* VIS I fandnot1 */
4920                     CHECK_FPU_FEATURE(dc, VIS1);
4921                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4922                     break;
4923                 case 0x069: /* VIS I fandnot1s */
4924                     CHECK_FPU_FEATURE(dc, VIS1);
4925                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4926                     break;
4927                 case 0x06a: /* VIS I fnot1 */
4928                     CHECK_FPU_FEATURE(dc, VIS1);
4929                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4930                     break;
4931                 case 0x06b: /* VIS I fnot1s */
4932                     CHECK_FPU_FEATURE(dc, VIS1);
4933                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4934                     break;
4935                 case 0x06c: /* VIS I fxor */
4936                     CHECK_FPU_FEATURE(dc, VIS1);
4937                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4938                     break;
4939                 case 0x06d: /* VIS I fxors */
4940                     CHECK_FPU_FEATURE(dc, VIS1);
4941                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4942                     break;
4943                 case 0x06e: /* VIS I fnand */
4944                     CHECK_FPU_FEATURE(dc, VIS1);
4945                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4946                     break;
4947                 case 0x06f: /* VIS I fnands */
4948                     CHECK_FPU_FEATURE(dc, VIS1);
4949                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4950                     break;
4951                 case 0x070: /* VIS I fand */
4952                     CHECK_FPU_FEATURE(dc, VIS1);
4953                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4954                     break;
4955                 case 0x071: /* VIS I fands */
4956                     CHECK_FPU_FEATURE(dc, VIS1);
4957                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4958                     break;
4959                 case 0x072: /* VIS I fxnor */
4960                     CHECK_FPU_FEATURE(dc, VIS1);
4961                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4962                     break;
4963                 case 0x073: /* VIS I fxnors */
4964                     CHECK_FPU_FEATURE(dc, VIS1);
4965                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4966                     break;
4967                 case 0x074: /* VIS I fsrc1 */
4968                     CHECK_FPU_FEATURE(dc, VIS1);
4969                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4970                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4971                     break;
4972                 case 0x075: /* VIS I fsrc1s */
4973                     CHECK_FPU_FEATURE(dc, VIS1);
4974                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4975                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4976                     break;
4977                 case 0x076: /* VIS I fornot2 */
4978                     CHECK_FPU_FEATURE(dc, VIS1);
4979                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4980                     break;
4981                 case 0x077: /* VIS I fornot2s */
4982                     CHECK_FPU_FEATURE(dc, VIS1);
4983                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4984                     break;
4985                 case 0x078: /* VIS I fsrc2 */
4986                     CHECK_FPU_FEATURE(dc, VIS1);
4987                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4988                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4989                     break;
4990                 case 0x079: /* VIS I fsrc2s */
4991                     CHECK_FPU_FEATURE(dc, VIS1);
4992                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4993                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4994                     break;
4995                 case 0x07a: /* VIS I fornot1 */
4996                     CHECK_FPU_FEATURE(dc, VIS1);
4997                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4998                     break;
4999                 case 0x07b: /* VIS I fornot1s */
5000                     CHECK_FPU_FEATURE(dc, VIS1);
5001                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
5002                     break;
5003                 case 0x07c: /* VIS I for */
5004                     CHECK_FPU_FEATURE(dc, VIS1);
5005                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
5006                     break;
5007                 case 0x07d: /* VIS I fors */
5008                     CHECK_FPU_FEATURE(dc, VIS1);
5009                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
5010                     break;
5011                 case 0x07e: /* VIS I fone */
5012                     CHECK_FPU_FEATURE(dc, VIS1);
5013                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5014                     tcg_gen_movi_i64(cpu_dst_64, -1);
5015                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5016                     break;
5017                 case 0x07f: /* VIS I fones */
5018                     CHECK_FPU_FEATURE(dc, VIS1);
5019                     cpu_dst_32 = gen_dest_fpr_F(dc);
5020                     tcg_gen_movi_i32(cpu_dst_32, -1);
5021                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5022                     break;
5023                 case 0x080: /* VIS I shutdown */
5024                 case 0x081: /* VIS II siam */
5025                     // XXX
5026                     goto illegal_insn;
5027                 default:
5028                     goto illegal_insn;
5029                 }
5030 #else
5031                 goto ncp_insn;
5032 #endif
5033             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
5034 #ifdef TARGET_SPARC64
5035                 goto illegal_insn;
5036 #else
5037                 goto ncp_insn;
5038 #endif
5039 #ifdef TARGET_SPARC64
5040             } else if (xop == 0x39) { /* V9 return */
5041                 save_state(dc);
5042                 cpu_src1 = get_src1(dc, insn);
5043                 cpu_tmp0 = tcg_temp_new();
5044                 if (IS_IMM) {   /* immediate */
5045                     simm = GET_FIELDs(insn, 19, 31);
5046                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5047                 } else {                /* register */
5048                     rs2 = GET_FIELD(insn, 27, 31);
5049                     if (rs2) {
5050                         cpu_src2 = gen_load_gpr(dc, rs2);
5051                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5052                     } else {
5053                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5054                     }
5055                 }
5056                 gen_helper_restore(cpu_env);
5057                 gen_mov_pc_npc(dc);
5058                 gen_check_align(cpu_tmp0, 3);
5059                 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5060                 dc->npc = DYNAMIC_PC;
5061                 goto jmp_insn;
5062 #endif
5063             } else {
5064                 cpu_src1 = get_src1(dc, insn);
5065                 cpu_tmp0 = tcg_temp_new();
5066                 if (IS_IMM) {   /* immediate */
5067                     simm = GET_FIELDs(insn, 19, 31);
5068                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5069                 } else {                /* register */
5070                     rs2 = GET_FIELD(insn, 27, 31);
5071                     if (rs2) {
5072                         cpu_src2 = gen_load_gpr(dc, rs2);
5073                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5074                     } else {
5075                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5076                     }
5077                 }
5078                 switch (xop) {
5079                 case 0x38:      /* jmpl */
5080                     {
5081                         TCGv t = gen_dest_gpr(dc, rd);
5082                         tcg_gen_movi_tl(t, dc->pc);
5083                         gen_store_gpr(dc, rd, t);
5084 
5085                         gen_mov_pc_npc(dc);
5086                         gen_check_align(cpu_tmp0, 3);
5087                         gen_address_mask(dc, cpu_tmp0);
5088                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5089                         dc->npc = DYNAMIC_PC;
5090                     }
5091                     goto jmp_insn;
5092 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5093                 case 0x39:      /* rett, V9 return */
5094                     {
5095                         if (!supervisor(dc))
5096                             goto priv_insn;
5097                         gen_mov_pc_npc(dc);
5098                         gen_check_align(cpu_tmp0, 3);
5099                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5100                         dc->npc = DYNAMIC_PC;
5101                         gen_helper_rett(cpu_env);
5102                     }
5103                     goto jmp_insn;
5104 #endif
5105                 case 0x3b: /* flush */
5106                     if (!((dc)->def->features & CPU_FEATURE_FLUSH))
5107                         goto unimp_flush;
5108                     /* nop */
5109                     break;
5110                 case 0x3c:      /* save */
5111                     gen_helper_save(cpu_env);
5112                     gen_store_gpr(dc, rd, cpu_tmp0);
5113                     break;
5114                 case 0x3d:      /* restore */
5115                     gen_helper_restore(cpu_env);
5116                     gen_store_gpr(dc, rd, cpu_tmp0);
5117                     break;
5118 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
5119                 case 0x3e:      /* V9 done/retry */
5120                     {
5121                         switch (rd) {
5122                         case 0:
5123                             if (!supervisor(dc))
5124                                 goto priv_insn;
5125                             dc->npc = DYNAMIC_PC;
5126                             dc->pc = DYNAMIC_PC;
5127                             if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
5128                                 gen_io_start();
5129                             }
5130                             gen_helper_done(cpu_env);
5131                             goto jmp_insn;
5132                         case 1:
5133                             if (!supervisor(dc))
5134                                 goto priv_insn;
5135                             dc->npc = DYNAMIC_PC;
5136                             dc->pc = DYNAMIC_PC;
5137                             if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
5138                                 gen_io_start();
5139                             }
5140                             gen_helper_retry(cpu_env);
5141                             goto jmp_insn;
5142                         default:
5143                             goto illegal_insn;
5144                         }
5145                     }
5146                     break;
5147 #endif
5148                 default:
5149                     goto illegal_insn;
5150                 }
5151             }
5152             break;
5153         }
5154         break;
5155     case 3:                     /* load/store instructions */
5156         {
5157             unsigned int xop = GET_FIELD(insn, 7, 12);
5158             /* ??? gen_address_mask prevents us from using a source
5159                register directly.  Always generate a temporary.  */
5160             TCGv cpu_addr = tcg_temp_new();
5161 
5162             tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
5163             if (xop == 0x3c || xop == 0x3e) {
5164                 /* V9 casa/casxa : no offset */
5165             } else if (IS_IMM) {     /* immediate */
5166                 simm = GET_FIELDs(insn, 19, 31);
5167                 if (simm != 0) {
5168                     tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
5169                 }
5170             } else {            /* register */
5171                 rs2 = GET_FIELD(insn, 27, 31);
5172                 if (rs2 != 0) {
5173                     tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
5174                 }
5175             }
5176             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
5177                 (xop > 0x17 && xop <= 0x1d ) ||
5178                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
5179                 TCGv cpu_val = gen_dest_gpr(dc, rd);
5180 
5181                 switch (xop) {
5182                 case 0x0:       /* ld, V9 lduw, load unsigned word */
5183                     gen_address_mask(dc, cpu_addr);
5184                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5185                                        dc->mem_idx, MO_TEUL | MO_ALIGN);
5186                     break;
5187                 case 0x1:       /* ldub, load unsigned byte */
5188                     gen_address_mask(dc, cpu_addr);
5189                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5190                                        dc->mem_idx, MO_UB);
5191                     break;
5192                 case 0x2:       /* lduh, load unsigned halfword */
5193                     gen_address_mask(dc, cpu_addr);
5194                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5195                                        dc->mem_idx, MO_TEUW | MO_ALIGN);
5196                     break;
5197                 case 0x3:       /* ldd, load double word */
5198                     if (rd & 1)
5199                         goto illegal_insn;
5200                     else {
5201                         TCGv_i64 t64;
5202 
5203                         gen_address_mask(dc, cpu_addr);
5204                         t64 = tcg_temp_new_i64();
5205                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5206                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5207                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5208                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5209                         gen_store_gpr(dc, rd + 1, cpu_val);
5210                         tcg_gen_shri_i64(t64, t64, 32);
5211                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5212                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5213                     }
5214                     break;
5215                 case 0x9:       /* ldsb, load signed byte */
5216                     gen_address_mask(dc, cpu_addr);
5217                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr, dc->mem_idx, MO_SB);
5218                     break;
5219                 case 0xa:       /* ldsh, load signed halfword */
5220                     gen_address_mask(dc, cpu_addr);
5221                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5222                                        dc->mem_idx, MO_TESW | MO_ALIGN);
5223                     break;
5224                 case 0xd:       /* ldstub */
5225                     gen_ldstub(dc, cpu_val, cpu_addr, dc->mem_idx);
5226                     break;
5227                 case 0x0f:
5228                     /* swap, swap register with memory. Also atomically */
5229                     CHECK_IU_FEATURE(dc, SWAP);
5230                     cpu_src1 = gen_load_gpr(dc, rd);
5231                     gen_swap(dc, cpu_val, cpu_src1, cpu_addr,
5232                              dc->mem_idx, MO_TEUL);
5233                     break;
5234 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5235                 case 0x10:      /* lda, V9 lduwa, load word alternate */
5236                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5237                     break;
5238                 case 0x11:      /* lduba, load unsigned byte alternate */
5239                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5240                     break;
5241                 case 0x12:      /* lduha, load unsigned halfword alternate */
5242                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5243                     break;
5244                 case 0x13:      /* ldda, load double word alternate */
5245                     if (rd & 1) {
5246                         goto illegal_insn;
5247                     }
5248                     gen_ldda_asi(dc, cpu_addr, insn, rd);
5249                     goto skip_move;
5250                 case 0x19:      /* ldsba, load signed byte alternate */
5251                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_SB);
5252                     break;
5253                 case 0x1a:      /* ldsha, load signed halfword alternate */
5254                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESW);
5255                     break;
5256                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
5257                     gen_ldstub_asi(dc, cpu_val, cpu_addr, insn);
5258                     break;
5259                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
5260                                    atomically */
5261                     CHECK_IU_FEATURE(dc, SWAP);
5262                     cpu_src1 = gen_load_gpr(dc, rd);
5263                     gen_swap_asi(dc, cpu_val, cpu_src1, cpu_addr, insn);
5264                     break;
5265 
5266 #ifndef TARGET_SPARC64
5267                 case 0x30: /* ldc */
5268                 case 0x31: /* ldcsr */
5269                 case 0x33: /* lddc */
5270                     goto ncp_insn;
5271 #endif
5272 #endif
5273 #ifdef TARGET_SPARC64
5274                 case 0x08: /* V9 ldsw */
5275                     gen_address_mask(dc, cpu_addr);
5276                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5277                                        dc->mem_idx, MO_TESL | MO_ALIGN);
5278                     break;
5279                 case 0x0b: /* V9 ldx */
5280                     gen_address_mask(dc, cpu_addr);
5281                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5282                                        dc->mem_idx, MO_TEUQ | MO_ALIGN);
5283                     break;
5284                 case 0x18: /* V9 ldswa */
5285                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESL);
5286                     break;
5287                 case 0x1b: /* V9 ldxa */
5288                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5289                     break;
5290                 case 0x2d: /* V9 prefetch, no effect */
5291                     goto skip_move;
5292                 case 0x30: /* V9 ldfa */
5293                     if (gen_trap_ifnofpu(dc)) {
5294                         goto jmp_insn;
5295                     }
5296                     gen_ldf_asi(dc, cpu_addr, insn, 4, rd);
5297                     gen_update_fprs_dirty(dc, rd);
5298                     goto skip_move;
5299                 case 0x33: /* V9 lddfa */
5300                     if (gen_trap_ifnofpu(dc)) {
5301                         goto jmp_insn;
5302                     }
5303                     gen_ldf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5304                     gen_update_fprs_dirty(dc, DFPREG(rd));
5305                     goto skip_move;
5306                 case 0x3d: /* V9 prefetcha, no effect */
5307                     goto skip_move;
5308                 case 0x32: /* V9 ldqfa */
5309                     CHECK_FPU_FEATURE(dc, FLOAT128);
5310                     if (gen_trap_ifnofpu(dc)) {
5311                         goto jmp_insn;
5312                     }
5313                     gen_ldf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5314                     gen_update_fprs_dirty(dc, QFPREG(rd));
5315                     goto skip_move;
5316 #endif
5317                 default:
5318                     goto illegal_insn;
5319                 }
5320                 gen_store_gpr(dc, rd, cpu_val);
5321 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5322             skip_move: ;
5323 #endif
5324             } else if (xop >= 0x20 && xop < 0x24) {
5325                 if (gen_trap_ifnofpu(dc)) {
5326                     goto jmp_insn;
5327                 }
5328                 switch (xop) {
5329                 case 0x20:      /* ldf, load fpreg */
5330                     gen_address_mask(dc, cpu_addr);
5331                     cpu_dst_32 = gen_dest_fpr_F(dc);
5332                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5333                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5334                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5335                     break;
5336                 case 0x21:      /* ldfsr, V9 ldxfsr */
5337 #ifdef TARGET_SPARC64
5338                     gen_address_mask(dc, cpu_addr);
5339                     if (rd == 1) {
5340                         TCGv_i64 t64 = tcg_temp_new_i64();
5341                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5342                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5343                         gen_helper_ldxfsr(cpu_fsr, cpu_env, cpu_fsr, t64);
5344                         break;
5345                     }
5346 #endif
5347                     cpu_dst_32 = tcg_temp_new_i32();
5348                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5349                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5350                     gen_helper_ldfsr(cpu_fsr, cpu_env, cpu_fsr, cpu_dst_32);
5351                     break;
5352                 case 0x22:      /* ldqf, load quad fpreg */
5353                     CHECK_FPU_FEATURE(dc, FLOAT128);
5354                     gen_address_mask(dc, cpu_addr);
5355                     cpu_src1_64 = tcg_temp_new_i64();
5356                     tcg_gen_qemu_ld_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5357                                         MO_TEUQ | MO_ALIGN_4);
5358                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5359                     cpu_src2_64 = tcg_temp_new_i64();
5360                     tcg_gen_qemu_ld_i64(cpu_src2_64, cpu_addr, dc->mem_idx,
5361                                         MO_TEUQ | MO_ALIGN_4);
5362                     gen_store_fpr_Q(dc, rd, cpu_src1_64, cpu_src2_64);
5363                     break;
5364                 case 0x23:      /* lddf, load double fpreg */
5365                     gen_address_mask(dc, cpu_addr);
5366                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5367                     tcg_gen_qemu_ld_i64(cpu_dst_64, cpu_addr, dc->mem_idx,
5368                                         MO_TEUQ | MO_ALIGN_4);
5369                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5370                     break;
5371                 default:
5372                     goto illegal_insn;
5373                 }
5374             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
5375                        xop == 0xe || xop == 0x1e) {
5376                 TCGv cpu_val = gen_load_gpr(dc, rd);
5377 
5378                 switch (xop) {
5379                 case 0x4: /* st, store word */
5380                     gen_address_mask(dc, cpu_addr);
5381                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5382                                        dc->mem_idx, MO_TEUL | MO_ALIGN);
5383                     break;
5384                 case 0x5: /* stb, store byte */
5385                     gen_address_mask(dc, cpu_addr);
5386                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr, dc->mem_idx, MO_UB);
5387                     break;
5388                 case 0x6: /* sth, store halfword */
5389                     gen_address_mask(dc, cpu_addr);
5390                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5391                                        dc->mem_idx, MO_TEUW | MO_ALIGN);
5392                     break;
5393                 case 0x7: /* std, store double word */
5394                     if (rd & 1)
5395                         goto illegal_insn;
5396                     else {
5397                         TCGv_i64 t64;
5398                         TCGv lo;
5399 
5400                         gen_address_mask(dc, cpu_addr);
5401                         lo = gen_load_gpr(dc, rd + 1);
5402                         t64 = tcg_temp_new_i64();
5403                         tcg_gen_concat_tl_i64(t64, lo, cpu_val);
5404                         tcg_gen_qemu_st_i64(t64, cpu_addr,
5405                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5406                     }
5407                     break;
5408 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5409                 case 0x14: /* sta, V9 stwa, store word alternate */
5410                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5411                     break;
5412                 case 0x15: /* stba, store byte alternate */
5413                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5414                     break;
5415                 case 0x16: /* stha, store halfword alternate */
5416                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5417                     break;
5418                 case 0x17: /* stda, store double word alternate */
5419                     if (rd & 1) {
5420                         goto illegal_insn;
5421                     }
5422                     gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
5423                     break;
5424 #endif
5425 #ifdef TARGET_SPARC64
5426                 case 0x0e: /* V9 stx */
5427                     gen_address_mask(dc, cpu_addr);
5428                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5429                                        dc->mem_idx, MO_TEUQ | MO_ALIGN);
5430                     break;
5431                 case 0x1e: /* V9 stxa */
5432                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5433                     break;
5434 #endif
5435                 default:
5436                     goto illegal_insn;
5437                 }
5438             } else if (xop > 0x23 && xop < 0x28) {
5439                 if (gen_trap_ifnofpu(dc)) {
5440                     goto jmp_insn;
5441                 }
5442                 switch (xop) {
5443                 case 0x24: /* stf, store fpreg */
5444                     gen_address_mask(dc, cpu_addr);
5445                     cpu_src1_32 = gen_load_fpr_F(dc, rd);
5446                     tcg_gen_qemu_st_i32(cpu_src1_32, cpu_addr,
5447                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5448                     break;
5449                 case 0x25: /* stfsr, V9 stxfsr */
5450                     {
5451 #ifdef TARGET_SPARC64
5452                         gen_address_mask(dc, cpu_addr);
5453                         if (rd == 1) {
5454                             tcg_gen_qemu_st_tl(cpu_fsr, cpu_addr,
5455                                                dc->mem_idx, MO_TEUQ | MO_ALIGN);
5456                             break;
5457                         }
5458 #endif
5459                         tcg_gen_qemu_st_tl(cpu_fsr, cpu_addr,
5460                                            dc->mem_idx, MO_TEUL | MO_ALIGN);
5461                     }
5462                     break;
5463                 case 0x26:
5464 #ifdef TARGET_SPARC64
5465                     /* V9 stqf, store quad fpreg */
5466                     CHECK_FPU_FEATURE(dc, FLOAT128);
5467                     gen_address_mask(dc, cpu_addr);
5468                     /* ??? While stqf only requires 4-byte alignment, it is
5469                        legal for the cpu to signal the unaligned exception.
5470                        The OS trap handler is then required to fix it up.
5471                        For qemu, this avoids having to probe the second page
5472                        before performing the first write.  */
5473                     cpu_src1_64 = gen_load_fpr_Q0(dc, rd);
5474                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5475                                         dc->mem_idx, MO_TEUQ | MO_ALIGN_16);
5476                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5477                     cpu_src2_64 = gen_load_fpr_Q1(dc, rd);
5478                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5479                                         dc->mem_idx, MO_TEUQ);
5480                     break;
5481 #else /* !TARGET_SPARC64 */
5482                     /* stdfq, store floating point queue */
5483 #if defined(CONFIG_USER_ONLY)
5484                     goto illegal_insn;
5485 #else
5486                     if (!supervisor(dc))
5487                         goto priv_insn;
5488                     if (gen_trap_ifnofpu(dc)) {
5489                         goto jmp_insn;
5490                     }
5491                     goto nfq_insn;
5492 #endif
5493 #endif
5494                 case 0x27: /* stdf, store double fpreg */
5495                     gen_address_mask(dc, cpu_addr);
5496                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
5497                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5498                                         MO_TEUQ | MO_ALIGN_4);
5499                     break;
5500                 default:
5501                     goto illegal_insn;
5502                 }
5503             } else if (xop > 0x33 && xop < 0x3f) {
5504                 switch (xop) {
5505 #ifdef TARGET_SPARC64
5506                 case 0x34: /* V9 stfa */
5507                     if (gen_trap_ifnofpu(dc)) {
5508                         goto jmp_insn;
5509                     }
5510                     gen_stf_asi(dc, cpu_addr, insn, 4, rd);
5511                     break;
5512                 case 0x36: /* V9 stqfa */
5513                     {
5514                         CHECK_FPU_FEATURE(dc, FLOAT128);
5515                         if (gen_trap_ifnofpu(dc)) {
5516                             goto jmp_insn;
5517                         }
5518                         gen_stf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5519                     }
5520                     break;
5521                 case 0x37: /* V9 stdfa */
5522                     if (gen_trap_ifnofpu(dc)) {
5523                         goto jmp_insn;
5524                     }
5525                     gen_stf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5526                     break;
5527                 case 0x3e: /* V9 casxa */
5528                     rs2 = GET_FIELD(insn, 27, 31);
5529                     cpu_src2 = gen_load_gpr(dc, rs2);
5530                     gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5531                     break;
5532 #else
5533                 case 0x34: /* stc */
5534                 case 0x35: /* stcsr */
5535                 case 0x36: /* stdcq */
5536                 case 0x37: /* stdc */
5537                     goto ncp_insn;
5538 #endif
5539 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5540                 case 0x3c: /* V9 or LEON3 casa */
5541 #ifndef TARGET_SPARC64
5542                     CHECK_IU_FEATURE(dc, CASA);
5543 #endif
5544                     rs2 = GET_FIELD(insn, 27, 31);
5545                     cpu_src2 = gen_load_gpr(dc, rs2);
5546                     gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5547                     break;
5548 #endif
5549                 default:
5550                     goto illegal_insn;
5551                 }
5552             } else {
5553                 goto illegal_insn;
5554             }
5555         }
5556         break;
5557     }
5558     /* default case for non jump instructions */
5559     if (dc->npc == DYNAMIC_PC) {
5560         dc->pc = DYNAMIC_PC;
5561         gen_op_next_insn();
5562     } else if (dc->npc == JUMP_PC) {
5563         /* we can do a static jump */
5564         gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5565         dc->base.is_jmp = DISAS_NORETURN;
5566     } else {
5567         dc->pc = dc->npc;
5568         dc->npc = dc->npc + 4;
5569     }
5570  jmp_insn:
5571     return;
5572  illegal_insn:
5573     gen_exception(dc, TT_ILL_INSN);
5574     return;
5575  unimp_flush:
5576     gen_exception(dc, TT_UNIMP_FLUSH);
5577     return;
5578 #if !defined(CONFIG_USER_ONLY)
5579  priv_insn:
5580     gen_exception(dc, TT_PRIV_INSN);
5581     return;
5582 #endif
5583  nfpu_insn:
5584     gen_op_fpexception_im(dc, FSR_FTT_UNIMPFPOP);
5585     return;
5586 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5587  nfq_insn:
5588     gen_op_fpexception_im(dc, FSR_FTT_SEQ_ERROR);
5589     return;
5590 #endif
5591 #ifndef TARGET_SPARC64
5592  ncp_insn:
5593     gen_exception(dc, TT_NCP_INSN);
5594     return;
5595 #endif
5596 }
5597 
5598 static void sparc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
5599 {
5600     DisasContext *dc = container_of(dcbase, DisasContext, base);
5601     CPUSPARCState *env = cs->env_ptr;
5602     int bound;
5603 
5604     dc->pc = dc->base.pc_first;
5605     dc->npc = (target_ulong)dc->base.tb->cs_base;
5606     dc->cc_op = CC_OP_DYNAMIC;
5607     dc->mem_idx = dc->base.tb->flags & TB_FLAG_MMU_MASK;
5608     dc->def = &env->def;
5609     dc->fpu_enabled = tb_fpu_enabled(dc->base.tb->flags);
5610     dc->address_mask_32bit = tb_am_enabled(dc->base.tb->flags);
5611 #ifndef CONFIG_USER_ONLY
5612     dc->supervisor = (dc->base.tb->flags & TB_FLAG_SUPER) != 0;
5613 #endif
5614 #ifdef TARGET_SPARC64
5615     dc->fprs_dirty = 0;
5616     dc->asi = (dc->base.tb->flags >> TB_FLAG_ASI_SHIFT) & 0xff;
5617 #ifndef CONFIG_USER_ONLY
5618     dc->hypervisor = (dc->base.tb->flags & TB_FLAG_HYPER) != 0;
5619 #endif
5620 #endif
5621     /*
5622      * if we reach a page boundary, we stop generation so that the
5623      * PC of a TT_TFAULT exception is always in the right page
5624      */
5625     bound = -(dc->base.pc_first | TARGET_PAGE_MASK) / 4;
5626     dc->base.max_insns = MIN(dc->base.max_insns, bound);
5627 }
5628 
5629 static void sparc_tr_tb_start(DisasContextBase *db, CPUState *cs)
5630 {
5631 }
5632 
5633 static void sparc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
5634 {
5635     DisasContext *dc = container_of(dcbase, DisasContext, base);
5636 
5637     if (dc->npc & JUMP_PC) {
5638         assert(dc->jump_pc[1] == dc->pc + 4);
5639         tcg_gen_insn_start(dc->pc, dc->jump_pc[0] | JUMP_PC);
5640     } else {
5641         tcg_gen_insn_start(dc->pc, dc->npc);
5642     }
5643 }
5644 
5645 static void sparc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
5646 {
5647     DisasContext *dc = container_of(dcbase, DisasContext, base);
5648     CPUSPARCState *env = cs->env_ptr;
5649     unsigned int insn;
5650 
5651     insn = translator_ldl(env, &dc->base, dc->pc);
5652     dc->base.pc_next += 4;
5653     disas_sparc_insn(dc, insn);
5654 
5655     if (dc->base.is_jmp == DISAS_NORETURN) {
5656         return;
5657     }
5658     if (dc->pc != dc->base.pc_next) {
5659         dc->base.is_jmp = DISAS_TOO_MANY;
5660     }
5661 }
5662 
5663 static void sparc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
5664 {
5665     DisasContext *dc = container_of(dcbase, DisasContext, base);
5666 
5667     switch (dc->base.is_jmp) {
5668     case DISAS_NEXT:
5669     case DISAS_TOO_MANY:
5670         if (dc->pc != DYNAMIC_PC &&
5671             (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5672             /* static PC and NPC: we can use direct chaining */
5673             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5674         } else {
5675             if (dc->pc != DYNAMIC_PC) {
5676                 tcg_gen_movi_tl(cpu_pc, dc->pc);
5677             }
5678             save_npc(dc);
5679             tcg_gen_exit_tb(NULL, 0);
5680         }
5681         break;
5682 
5683     case DISAS_NORETURN:
5684        break;
5685 
5686     case DISAS_EXIT:
5687         /* Exit TB */
5688         save_state(dc);
5689         tcg_gen_exit_tb(NULL, 0);
5690         break;
5691 
5692     default:
5693         g_assert_not_reached();
5694     }
5695 }
5696 
5697 static void sparc_tr_disas_log(const DisasContextBase *dcbase,
5698                                CPUState *cpu, FILE *logfile)
5699 {
5700     fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
5701     target_disas(logfile, cpu, dcbase->pc_first, dcbase->tb->size);
5702 }
5703 
5704 static const TranslatorOps sparc_tr_ops = {
5705     .init_disas_context = sparc_tr_init_disas_context,
5706     .tb_start           = sparc_tr_tb_start,
5707     .insn_start         = sparc_tr_insn_start,
5708     .translate_insn     = sparc_tr_translate_insn,
5709     .tb_stop            = sparc_tr_tb_stop,
5710     .disas_log          = sparc_tr_disas_log,
5711 };
5712 
5713 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int *max_insns,
5714                            target_ulong pc, void *host_pc)
5715 {
5716     DisasContext dc = {};
5717 
5718     translator_loop(cs, tb, max_insns, pc, host_pc, &sparc_tr_ops, &dc.base);
5719 }
5720 
5721 void sparc_tcg_init(void)
5722 {
5723     static const char gregnames[32][4] = {
5724         "g0", "g1", "g2", "g3", "g4", "g5", "g6", "g7",
5725         "o0", "o1", "o2", "o3", "o4", "o5", "o6", "o7",
5726         "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
5727         "i0", "i1", "i2", "i3", "i4", "i5", "i6", "i7",
5728     };
5729     static const char fregnames[32][4] = {
5730         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5731         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5732         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5733         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5734     };
5735 
5736     static const struct { TCGv_i32 *ptr; int off; const char *name; } r32[] = {
5737 #ifdef TARGET_SPARC64
5738         { &cpu_xcc, offsetof(CPUSPARCState, xcc), "xcc" },
5739         { &cpu_fprs, offsetof(CPUSPARCState, fprs), "fprs" },
5740 #else
5741         { &cpu_wim, offsetof(CPUSPARCState, wim), "wim" },
5742 #endif
5743         { &cpu_cc_op, offsetof(CPUSPARCState, cc_op), "cc_op" },
5744         { &cpu_psr, offsetof(CPUSPARCState, psr), "psr" },
5745     };
5746 
5747     static const struct { TCGv *ptr; int off; const char *name; } rtl[] = {
5748 #ifdef TARGET_SPARC64
5749         { &cpu_gsr, offsetof(CPUSPARCState, gsr), "gsr" },
5750         { &cpu_tick_cmpr, offsetof(CPUSPARCState, tick_cmpr), "tick_cmpr" },
5751         { &cpu_stick_cmpr, offsetof(CPUSPARCState, stick_cmpr), "stick_cmpr" },
5752         { &cpu_hstick_cmpr, offsetof(CPUSPARCState, hstick_cmpr),
5753           "hstick_cmpr" },
5754         { &cpu_hintp, offsetof(CPUSPARCState, hintp), "hintp" },
5755         { &cpu_htba, offsetof(CPUSPARCState, htba), "htba" },
5756         { &cpu_hver, offsetof(CPUSPARCState, hver), "hver" },
5757         { &cpu_ssr, offsetof(CPUSPARCState, ssr), "ssr" },
5758         { &cpu_ver, offsetof(CPUSPARCState, version), "ver" },
5759 #endif
5760         { &cpu_cond, offsetof(CPUSPARCState, cond), "cond" },
5761         { &cpu_cc_src, offsetof(CPUSPARCState, cc_src), "cc_src" },
5762         { &cpu_cc_src2, offsetof(CPUSPARCState, cc_src2), "cc_src2" },
5763         { &cpu_cc_dst, offsetof(CPUSPARCState, cc_dst), "cc_dst" },
5764         { &cpu_fsr, offsetof(CPUSPARCState, fsr), "fsr" },
5765         { &cpu_pc, offsetof(CPUSPARCState, pc), "pc" },
5766         { &cpu_npc, offsetof(CPUSPARCState, npc), "npc" },
5767         { &cpu_y, offsetof(CPUSPARCState, y), "y" },
5768 #ifndef CONFIG_USER_ONLY
5769         { &cpu_tbr, offsetof(CPUSPARCState, tbr), "tbr" },
5770 #endif
5771     };
5772 
5773     unsigned int i;
5774 
5775     cpu_regwptr = tcg_global_mem_new_ptr(cpu_env,
5776                                          offsetof(CPUSPARCState, regwptr),
5777                                          "regwptr");
5778 
5779     for (i = 0; i < ARRAY_SIZE(r32); ++i) {
5780         *r32[i].ptr = tcg_global_mem_new_i32(cpu_env, r32[i].off, r32[i].name);
5781     }
5782 
5783     for (i = 0; i < ARRAY_SIZE(rtl); ++i) {
5784         *rtl[i].ptr = tcg_global_mem_new(cpu_env, rtl[i].off, rtl[i].name);
5785     }
5786 
5787     cpu_regs[0] = NULL;
5788     for (i = 1; i < 8; ++i) {
5789         cpu_regs[i] = tcg_global_mem_new(cpu_env,
5790                                          offsetof(CPUSPARCState, gregs[i]),
5791                                          gregnames[i]);
5792     }
5793 
5794     for (i = 8; i < 32; ++i) {
5795         cpu_regs[i] = tcg_global_mem_new(cpu_regwptr,
5796                                          (i - 8) * sizeof(target_ulong),
5797                                          gregnames[i]);
5798     }
5799 
5800     for (i = 0; i < TARGET_DPREGS; i++) {
5801         cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env,
5802                                             offsetof(CPUSPARCState, fpr[i]),
5803                                             fregnames[i]);
5804     }
5805 }
5806 
5807 void sparc_restore_state_to_opc(CPUState *cs,
5808                                 const TranslationBlock *tb,
5809                                 const uint64_t *data)
5810 {
5811     SPARCCPU *cpu = SPARC_CPU(cs);
5812     CPUSPARCState *env = &cpu->env;
5813     target_ulong pc = data[0];
5814     target_ulong npc = data[1];
5815 
5816     env->pc = pc;
5817     if (npc == DYNAMIC_PC) {
5818         /* dynamic NPC: already stored */
5819     } else if (npc & JUMP_PC) {
5820         /* jump PC: use 'cond' and the jump targets of the translation */
5821         if (env->cond) {
5822             env->npc = npc & ~3;
5823         } else {
5824             env->npc = pc + 4;
5825         }
5826     } else {
5827         env->npc = npc;
5828     }
5829 }
5830