xref: /openbmc/qemu/target/sparc/translate.c (revision 5f88dd43)
1 /*
2    SPARC translation
3 
4    Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5    Copyright (C) 2003-2005 Fabrice Bellard
6 
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2.1 of the License, or (at your option) any later version.
11 
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16 
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 
23 #include "cpu.h"
24 #include "disas/disas.h"
25 #include "exec/helper-proto.h"
26 #include "exec/exec-all.h"
27 #include "tcg/tcg-op.h"
28 
29 #include "exec/helper-gen.h"
30 
31 #include "exec/translator.h"
32 #include "exec/log.h"
33 #include "asi.h"
34 
35 #define HELPER_H "helper.h"
36 #include "exec/helper-info.c.inc"
37 #undef  HELPER_H
38 
39 /* Dynamic PC, must exit to main loop. */
40 #define DYNAMIC_PC         1
41 /* Dynamic PC, one of two values according to jump_pc[T2]. */
42 #define JUMP_PC            2
43 /* Dynamic PC, may lookup next TB. */
44 #define DYNAMIC_PC_LOOKUP  3
45 
46 #define DISAS_EXIT  DISAS_TARGET_0
47 
48 /* global register indexes */
49 static TCGv_ptr cpu_regwptr;
50 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
51 static TCGv_i32 cpu_cc_op;
52 static TCGv_i32 cpu_psr;
53 static TCGv cpu_fsr, cpu_pc, cpu_npc;
54 static TCGv cpu_regs[32];
55 static TCGv cpu_y;
56 #ifndef CONFIG_USER_ONLY
57 static TCGv cpu_tbr;
58 #endif
59 static TCGv cpu_cond;
60 #ifdef TARGET_SPARC64
61 static TCGv_i32 cpu_xcc, cpu_fprs;
62 static TCGv cpu_gsr;
63 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
64 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
65 #else
66 static TCGv cpu_wim;
67 #endif
68 /* Floating point registers */
69 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
70 
71 typedef struct DisasContext {
72     DisasContextBase base;
73     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
74     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
75     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
76     int mem_idx;
77     bool fpu_enabled;
78     bool address_mask_32bit;
79 #ifndef CONFIG_USER_ONLY
80     bool supervisor;
81 #ifdef TARGET_SPARC64
82     bool hypervisor;
83 #endif
84 #endif
85 
86     uint32_t cc_op;  /* current CC operation */
87     sparc_def_t *def;
88 #ifdef TARGET_SPARC64
89     int fprs_dirty;
90     int asi;
91 #endif
92 } DisasContext;
93 
94 typedef struct {
95     TCGCond cond;
96     bool is_bool;
97     TCGv c1, c2;
98 } DisasCompare;
99 
100 // This function uses non-native bit order
101 #define GET_FIELD(X, FROM, TO)                                  \
102     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
103 
104 // This function uses the order in the manuals, i.e. bit 0 is 2^0
105 #define GET_FIELD_SP(X, FROM, TO)               \
106     GET_FIELD(X, 31 - (TO), 31 - (FROM))
107 
108 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
109 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
110 
111 #ifdef TARGET_SPARC64
112 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
113 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
114 #else
115 #define DFPREG(r) (r & 0x1e)
116 #define QFPREG(r) (r & 0x1c)
117 #endif
118 
119 #define UA2005_HTRAP_MASK 0xff
120 #define V8_TRAP_MASK 0x7f
121 
122 static int sign_extend(int x, int len)
123 {
124     len = 32 - len;
125     return (x << len) >> len;
126 }
127 
128 #define IS_IMM (insn & (1<<13))
129 
130 static void gen_update_fprs_dirty(DisasContext *dc, int rd)
131 {
132 #if defined(TARGET_SPARC64)
133     int bit = (rd < 32) ? 1 : 2;
134     /* If we know we've already set this bit within the TB,
135        we can avoid setting it again.  */
136     if (!(dc->fprs_dirty & bit)) {
137         dc->fprs_dirty |= bit;
138         tcg_gen_ori_i32(cpu_fprs, cpu_fprs, bit);
139     }
140 #endif
141 }
142 
143 /* floating point registers moves */
144 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
145 {
146     TCGv_i32 ret = tcg_temp_new_i32();
147     if (src & 1) {
148         tcg_gen_extrl_i64_i32(ret, cpu_fpr[src / 2]);
149     } else {
150         tcg_gen_extrh_i64_i32(ret, cpu_fpr[src / 2]);
151     }
152     return ret;
153 }
154 
155 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
156 {
157     TCGv_i64 t = tcg_temp_new_i64();
158 
159     tcg_gen_extu_i32_i64(t, v);
160     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
161                         (dst & 1 ? 0 : 32), 32);
162     gen_update_fprs_dirty(dc, dst);
163 }
164 
165 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
166 {
167     return tcg_temp_new_i32();
168 }
169 
170 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
171 {
172     src = DFPREG(src);
173     return cpu_fpr[src / 2];
174 }
175 
176 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
177 {
178     dst = DFPREG(dst);
179     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
180     gen_update_fprs_dirty(dc, dst);
181 }
182 
183 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
184 {
185     return cpu_fpr[DFPREG(dst) / 2];
186 }
187 
188 static void gen_op_load_fpr_QT0(unsigned int src)
189 {
190     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
191                    offsetof(CPU_QuadU, ll.upper));
192     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
193                    offsetof(CPU_QuadU, ll.lower));
194 }
195 
196 static void gen_op_load_fpr_QT1(unsigned int src)
197 {
198     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
199                    offsetof(CPU_QuadU, ll.upper));
200     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
201                    offsetof(CPU_QuadU, ll.lower));
202 }
203 
204 static void gen_op_store_QT0_fpr(unsigned int dst)
205 {
206     tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
207                    offsetof(CPU_QuadU, ll.upper));
208     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
209                    offsetof(CPU_QuadU, ll.lower));
210 }
211 
212 static void gen_store_fpr_Q(DisasContext *dc, unsigned int dst,
213                             TCGv_i64 v1, TCGv_i64 v2)
214 {
215     dst = QFPREG(dst);
216 
217     tcg_gen_mov_i64(cpu_fpr[dst / 2], v1);
218     tcg_gen_mov_i64(cpu_fpr[dst / 2 + 1], v2);
219     gen_update_fprs_dirty(dc, dst);
220 }
221 
222 #ifdef TARGET_SPARC64
223 static TCGv_i64 gen_load_fpr_Q0(DisasContext *dc, unsigned int src)
224 {
225     src = QFPREG(src);
226     return cpu_fpr[src / 2];
227 }
228 
229 static TCGv_i64 gen_load_fpr_Q1(DisasContext *dc, unsigned int src)
230 {
231     src = QFPREG(src);
232     return cpu_fpr[src / 2 + 1];
233 }
234 
235 static void gen_move_Q(DisasContext *dc, unsigned int rd, unsigned int rs)
236 {
237     rd = QFPREG(rd);
238     rs = QFPREG(rs);
239 
240     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
241     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
242     gen_update_fprs_dirty(dc, rd);
243 }
244 #endif
245 
246 /* moves */
247 #ifdef CONFIG_USER_ONLY
248 #define supervisor(dc) 0
249 #ifdef TARGET_SPARC64
250 #define hypervisor(dc) 0
251 #endif
252 #else
253 #ifdef TARGET_SPARC64
254 #define hypervisor(dc) (dc->hypervisor)
255 #define supervisor(dc) (dc->supervisor | dc->hypervisor)
256 #else
257 #define supervisor(dc) (dc->supervisor)
258 #endif
259 #endif
260 
261 #ifdef TARGET_SPARC64
262 #ifndef TARGET_ABI32
263 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
264 #else
265 #define AM_CHECK(dc) (1)
266 #endif
267 #endif
268 
269 static void gen_address_mask(DisasContext *dc, TCGv addr)
270 {
271 #ifdef TARGET_SPARC64
272     if (AM_CHECK(dc))
273         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
274 #endif
275 }
276 
277 static TCGv gen_load_gpr(DisasContext *dc, int reg)
278 {
279     if (reg > 0) {
280         assert(reg < 32);
281         return cpu_regs[reg];
282     } else {
283         TCGv t = tcg_temp_new();
284         tcg_gen_movi_tl(t, 0);
285         return t;
286     }
287 }
288 
289 static void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
290 {
291     if (reg > 0) {
292         assert(reg < 32);
293         tcg_gen_mov_tl(cpu_regs[reg], v);
294     }
295 }
296 
297 static TCGv gen_dest_gpr(DisasContext *dc, int reg)
298 {
299     if (reg > 0) {
300         assert(reg < 32);
301         return cpu_regs[reg];
302     } else {
303         return tcg_temp_new();
304     }
305 }
306 
307 static bool use_goto_tb(DisasContext *s, target_ulong pc, target_ulong npc)
308 {
309     return translator_use_goto_tb(&s->base, pc) &&
310            translator_use_goto_tb(&s->base, npc);
311 }
312 
313 static void gen_goto_tb(DisasContext *s, int tb_num,
314                         target_ulong pc, target_ulong npc)
315 {
316     if (use_goto_tb(s, pc, npc))  {
317         /* jump to same page: we can use a direct jump */
318         tcg_gen_goto_tb(tb_num);
319         tcg_gen_movi_tl(cpu_pc, pc);
320         tcg_gen_movi_tl(cpu_npc, npc);
321         tcg_gen_exit_tb(s->base.tb, tb_num);
322     } else {
323         /* jump to another page: we can use an indirect jump */
324         tcg_gen_movi_tl(cpu_pc, pc);
325         tcg_gen_movi_tl(cpu_npc, npc);
326         tcg_gen_lookup_and_goto_ptr();
327     }
328 }
329 
330 // XXX suboptimal
331 static void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
332 {
333     tcg_gen_extu_i32_tl(reg, src);
334     tcg_gen_extract_tl(reg, reg, PSR_NEG_SHIFT, 1);
335 }
336 
337 static void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
338 {
339     tcg_gen_extu_i32_tl(reg, src);
340     tcg_gen_extract_tl(reg, reg, PSR_ZERO_SHIFT, 1);
341 }
342 
343 static void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
344 {
345     tcg_gen_extu_i32_tl(reg, src);
346     tcg_gen_extract_tl(reg, reg, PSR_OVF_SHIFT, 1);
347 }
348 
349 static void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
350 {
351     tcg_gen_extu_i32_tl(reg, src);
352     tcg_gen_extract_tl(reg, reg, PSR_CARRY_SHIFT, 1);
353 }
354 
355 static void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
356 {
357     tcg_gen_mov_tl(cpu_cc_src, src1);
358     tcg_gen_mov_tl(cpu_cc_src2, src2);
359     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
360     tcg_gen_mov_tl(dst, cpu_cc_dst);
361 }
362 
363 static TCGv_i32 gen_add32_carry32(void)
364 {
365     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
366 
367     /* Carry is computed from a previous add: (dst < src)  */
368 #if TARGET_LONG_BITS == 64
369     cc_src1_32 = tcg_temp_new_i32();
370     cc_src2_32 = tcg_temp_new_i32();
371     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
372     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
373 #else
374     cc_src1_32 = cpu_cc_dst;
375     cc_src2_32 = cpu_cc_src;
376 #endif
377 
378     carry_32 = tcg_temp_new_i32();
379     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
380 
381     return carry_32;
382 }
383 
384 static TCGv_i32 gen_sub32_carry32(void)
385 {
386     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
387 
388     /* Carry is computed from a previous borrow: (src1 < src2)  */
389 #if TARGET_LONG_BITS == 64
390     cc_src1_32 = tcg_temp_new_i32();
391     cc_src2_32 = tcg_temp_new_i32();
392     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
393     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
394 #else
395     cc_src1_32 = cpu_cc_src;
396     cc_src2_32 = cpu_cc_src2;
397 #endif
398 
399     carry_32 = tcg_temp_new_i32();
400     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
401 
402     return carry_32;
403 }
404 
405 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
406                             TCGv src2, int update_cc)
407 {
408     TCGv_i32 carry_32;
409     TCGv carry;
410 
411     switch (dc->cc_op) {
412     case CC_OP_DIV:
413     case CC_OP_LOGIC:
414         /* Carry is known to be zero.  Fall back to plain ADD.  */
415         if (update_cc) {
416             gen_op_add_cc(dst, src1, src2);
417         } else {
418             tcg_gen_add_tl(dst, src1, src2);
419         }
420         return;
421 
422     case CC_OP_ADD:
423     case CC_OP_TADD:
424     case CC_OP_TADDTV:
425         if (TARGET_LONG_BITS == 32) {
426             /* We can re-use the host's hardware carry generation by using
427                an ADD2 opcode.  We discard the low part of the output.
428                Ideally we'd combine this operation with the add that
429                generated the carry in the first place.  */
430             carry = tcg_temp_new();
431             tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
432             goto add_done;
433         }
434         carry_32 = gen_add32_carry32();
435         break;
436 
437     case CC_OP_SUB:
438     case CC_OP_TSUB:
439     case CC_OP_TSUBTV:
440         carry_32 = gen_sub32_carry32();
441         break;
442 
443     default:
444         /* We need external help to produce the carry.  */
445         carry_32 = tcg_temp_new_i32();
446         gen_helper_compute_C_icc(carry_32, cpu_env);
447         break;
448     }
449 
450 #if TARGET_LONG_BITS == 64
451     carry = tcg_temp_new();
452     tcg_gen_extu_i32_i64(carry, carry_32);
453 #else
454     carry = carry_32;
455 #endif
456 
457     tcg_gen_add_tl(dst, src1, src2);
458     tcg_gen_add_tl(dst, dst, carry);
459 
460  add_done:
461     if (update_cc) {
462         tcg_gen_mov_tl(cpu_cc_src, src1);
463         tcg_gen_mov_tl(cpu_cc_src2, src2);
464         tcg_gen_mov_tl(cpu_cc_dst, dst);
465         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
466         dc->cc_op = CC_OP_ADDX;
467     }
468 }
469 
470 static void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
471 {
472     tcg_gen_mov_tl(cpu_cc_src, src1);
473     tcg_gen_mov_tl(cpu_cc_src2, src2);
474     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
475     tcg_gen_mov_tl(dst, cpu_cc_dst);
476 }
477 
478 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
479                             TCGv src2, int update_cc)
480 {
481     TCGv_i32 carry_32;
482     TCGv carry;
483 
484     switch (dc->cc_op) {
485     case CC_OP_DIV:
486     case CC_OP_LOGIC:
487         /* Carry is known to be zero.  Fall back to plain SUB.  */
488         if (update_cc) {
489             gen_op_sub_cc(dst, src1, src2);
490         } else {
491             tcg_gen_sub_tl(dst, src1, src2);
492         }
493         return;
494 
495     case CC_OP_ADD:
496     case CC_OP_TADD:
497     case CC_OP_TADDTV:
498         carry_32 = gen_add32_carry32();
499         break;
500 
501     case CC_OP_SUB:
502     case CC_OP_TSUB:
503     case CC_OP_TSUBTV:
504         if (TARGET_LONG_BITS == 32) {
505             /* We can re-use the host's hardware carry generation by using
506                a SUB2 opcode.  We discard the low part of the output.
507                Ideally we'd combine this operation with the add that
508                generated the carry in the first place.  */
509             carry = tcg_temp_new();
510             tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
511             goto sub_done;
512         }
513         carry_32 = gen_sub32_carry32();
514         break;
515 
516     default:
517         /* We need external help to produce the carry.  */
518         carry_32 = tcg_temp_new_i32();
519         gen_helper_compute_C_icc(carry_32, cpu_env);
520         break;
521     }
522 
523 #if TARGET_LONG_BITS == 64
524     carry = tcg_temp_new();
525     tcg_gen_extu_i32_i64(carry, carry_32);
526 #else
527     carry = carry_32;
528 #endif
529 
530     tcg_gen_sub_tl(dst, src1, src2);
531     tcg_gen_sub_tl(dst, dst, carry);
532 
533  sub_done:
534     if (update_cc) {
535         tcg_gen_mov_tl(cpu_cc_src, src1);
536         tcg_gen_mov_tl(cpu_cc_src2, src2);
537         tcg_gen_mov_tl(cpu_cc_dst, dst);
538         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
539         dc->cc_op = CC_OP_SUBX;
540     }
541 }
542 
543 static void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
544 {
545     TCGv r_temp, zero, t0;
546 
547     r_temp = tcg_temp_new();
548     t0 = tcg_temp_new();
549 
550     /* old op:
551     if (!(env->y & 1))
552         T1 = 0;
553     */
554     zero = tcg_constant_tl(0);
555     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
556     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
557     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
558     tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
559                        zero, cpu_cc_src2);
560 
561     // b2 = T0 & 1;
562     // env->y = (b2 << 31) | (env->y >> 1);
563     tcg_gen_extract_tl(t0, cpu_y, 1, 31);
564     tcg_gen_deposit_tl(cpu_y, t0, cpu_cc_src, 31, 1);
565 
566     // b1 = N ^ V;
567     gen_mov_reg_N(t0, cpu_psr);
568     gen_mov_reg_V(r_temp, cpu_psr);
569     tcg_gen_xor_tl(t0, t0, r_temp);
570 
571     // T0 = (b1 << 31) | (T0 >> 1);
572     // src1 = T0;
573     tcg_gen_shli_tl(t0, t0, 31);
574     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
575     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
576 
577     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
578 
579     tcg_gen_mov_tl(dst, cpu_cc_dst);
580 }
581 
582 static void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
583 {
584 #if TARGET_LONG_BITS == 32
585     if (sign_ext) {
586         tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
587     } else {
588         tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
589     }
590 #else
591     TCGv t0 = tcg_temp_new_i64();
592     TCGv t1 = tcg_temp_new_i64();
593 
594     if (sign_ext) {
595         tcg_gen_ext32s_i64(t0, src1);
596         tcg_gen_ext32s_i64(t1, src2);
597     } else {
598         tcg_gen_ext32u_i64(t0, src1);
599         tcg_gen_ext32u_i64(t1, src2);
600     }
601 
602     tcg_gen_mul_i64(dst, t0, t1);
603     tcg_gen_shri_i64(cpu_y, dst, 32);
604 #endif
605 }
606 
607 static void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
608 {
609     /* zero-extend truncated operands before multiplication */
610     gen_op_multiply(dst, src1, src2, 0);
611 }
612 
613 static void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
614 {
615     /* sign-extend truncated operands before multiplication */
616     gen_op_multiply(dst, src1, src2, 1);
617 }
618 
619 // 1
620 static void gen_op_eval_ba(TCGv dst)
621 {
622     tcg_gen_movi_tl(dst, 1);
623 }
624 
625 // Z
626 static void gen_op_eval_be(TCGv dst, TCGv_i32 src)
627 {
628     gen_mov_reg_Z(dst, src);
629 }
630 
631 // Z | (N ^ V)
632 static void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
633 {
634     TCGv t0 = tcg_temp_new();
635     gen_mov_reg_N(t0, src);
636     gen_mov_reg_V(dst, src);
637     tcg_gen_xor_tl(dst, dst, t0);
638     gen_mov_reg_Z(t0, src);
639     tcg_gen_or_tl(dst, dst, t0);
640 }
641 
642 // N ^ V
643 static void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
644 {
645     TCGv t0 = tcg_temp_new();
646     gen_mov_reg_V(t0, src);
647     gen_mov_reg_N(dst, src);
648     tcg_gen_xor_tl(dst, dst, t0);
649 }
650 
651 // C | Z
652 static void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
653 {
654     TCGv t0 = tcg_temp_new();
655     gen_mov_reg_Z(t0, src);
656     gen_mov_reg_C(dst, src);
657     tcg_gen_or_tl(dst, dst, t0);
658 }
659 
660 // C
661 static void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
662 {
663     gen_mov_reg_C(dst, src);
664 }
665 
666 // V
667 static void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
668 {
669     gen_mov_reg_V(dst, src);
670 }
671 
672 // 0
673 static void gen_op_eval_bn(TCGv dst)
674 {
675     tcg_gen_movi_tl(dst, 0);
676 }
677 
678 // N
679 static void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
680 {
681     gen_mov_reg_N(dst, src);
682 }
683 
684 // !Z
685 static void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
686 {
687     gen_mov_reg_Z(dst, src);
688     tcg_gen_xori_tl(dst, dst, 0x1);
689 }
690 
691 // !(Z | (N ^ V))
692 static void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
693 {
694     gen_op_eval_ble(dst, src);
695     tcg_gen_xori_tl(dst, dst, 0x1);
696 }
697 
698 // !(N ^ V)
699 static void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
700 {
701     gen_op_eval_bl(dst, src);
702     tcg_gen_xori_tl(dst, dst, 0x1);
703 }
704 
705 // !(C | Z)
706 static void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
707 {
708     gen_op_eval_bleu(dst, src);
709     tcg_gen_xori_tl(dst, dst, 0x1);
710 }
711 
712 // !C
713 static void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
714 {
715     gen_mov_reg_C(dst, src);
716     tcg_gen_xori_tl(dst, dst, 0x1);
717 }
718 
719 // !N
720 static void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
721 {
722     gen_mov_reg_N(dst, src);
723     tcg_gen_xori_tl(dst, dst, 0x1);
724 }
725 
726 // !V
727 static void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
728 {
729     gen_mov_reg_V(dst, src);
730     tcg_gen_xori_tl(dst, dst, 0x1);
731 }
732 
733 /*
734   FPSR bit field FCC1 | FCC0:
735    0 =
736    1 <
737    2 >
738    3 unordered
739 */
740 static void gen_mov_reg_FCC0(TCGv reg, TCGv src,
741                                     unsigned int fcc_offset)
742 {
743     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
744     tcg_gen_andi_tl(reg, reg, 0x1);
745 }
746 
747 static void gen_mov_reg_FCC1(TCGv reg, TCGv src, unsigned int fcc_offset)
748 {
749     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
750     tcg_gen_andi_tl(reg, reg, 0x1);
751 }
752 
753 // !0: FCC0 | FCC1
754 static void gen_op_eval_fbne(TCGv dst, TCGv src, unsigned int fcc_offset)
755 {
756     TCGv t0 = tcg_temp_new();
757     gen_mov_reg_FCC0(dst, src, fcc_offset);
758     gen_mov_reg_FCC1(t0, src, fcc_offset);
759     tcg_gen_or_tl(dst, dst, t0);
760 }
761 
762 // 1 or 2: FCC0 ^ FCC1
763 static void gen_op_eval_fblg(TCGv dst, TCGv src, unsigned int fcc_offset)
764 {
765     TCGv t0 = tcg_temp_new();
766     gen_mov_reg_FCC0(dst, src, fcc_offset);
767     gen_mov_reg_FCC1(t0, src, fcc_offset);
768     tcg_gen_xor_tl(dst, dst, t0);
769 }
770 
771 // 1 or 3: FCC0
772 static void gen_op_eval_fbul(TCGv dst, TCGv src, unsigned int fcc_offset)
773 {
774     gen_mov_reg_FCC0(dst, src, fcc_offset);
775 }
776 
777 // 1: FCC0 & !FCC1
778 static void gen_op_eval_fbl(TCGv dst, TCGv src, unsigned int fcc_offset)
779 {
780     TCGv t0 = tcg_temp_new();
781     gen_mov_reg_FCC0(dst, src, fcc_offset);
782     gen_mov_reg_FCC1(t0, src, fcc_offset);
783     tcg_gen_andc_tl(dst, dst, t0);
784 }
785 
786 // 2 or 3: FCC1
787 static void gen_op_eval_fbug(TCGv dst, TCGv src, unsigned int fcc_offset)
788 {
789     gen_mov_reg_FCC1(dst, src, fcc_offset);
790 }
791 
792 // 2: !FCC0 & FCC1
793 static void gen_op_eval_fbg(TCGv dst, TCGv src, unsigned int fcc_offset)
794 {
795     TCGv t0 = tcg_temp_new();
796     gen_mov_reg_FCC0(dst, src, fcc_offset);
797     gen_mov_reg_FCC1(t0, src, fcc_offset);
798     tcg_gen_andc_tl(dst, t0, dst);
799 }
800 
801 // 3: FCC0 & FCC1
802 static void gen_op_eval_fbu(TCGv dst, TCGv src, unsigned int fcc_offset)
803 {
804     TCGv t0 = tcg_temp_new();
805     gen_mov_reg_FCC0(dst, src, fcc_offset);
806     gen_mov_reg_FCC1(t0, src, fcc_offset);
807     tcg_gen_and_tl(dst, dst, t0);
808 }
809 
810 // 0: !(FCC0 | FCC1)
811 static void gen_op_eval_fbe(TCGv dst, TCGv src, unsigned int fcc_offset)
812 {
813     TCGv t0 = tcg_temp_new();
814     gen_mov_reg_FCC0(dst, src, fcc_offset);
815     gen_mov_reg_FCC1(t0, src, fcc_offset);
816     tcg_gen_or_tl(dst, dst, t0);
817     tcg_gen_xori_tl(dst, dst, 0x1);
818 }
819 
820 // 0 or 3: !(FCC0 ^ FCC1)
821 static void gen_op_eval_fbue(TCGv dst, TCGv src, unsigned int fcc_offset)
822 {
823     TCGv t0 = tcg_temp_new();
824     gen_mov_reg_FCC0(dst, src, fcc_offset);
825     gen_mov_reg_FCC1(t0, src, fcc_offset);
826     tcg_gen_xor_tl(dst, dst, t0);
827     tcg_gen_xori_tl(dst, dst, 0x1);
828 }
829 
830 // 0 or 2: !FCC0
831 static void gen_op_eval_fbge(TCGv dst, TCGv src, unsigned int fcc_offset)
832 {
833     gen_mov_reg_FCC0(dst, src, fcc_offset);
834     tcg_gen_xori_tl(dst, dst, 0x1);
835 }
836 
837 // !1: !(FCC0 & !FCC1)
838 static void gen_op_eval_fbuge(TCGv dst, TCGv src, unsigned int fcc_offset)
839 {
840     TCGv t0 = tcg_temp_new();
841     gen_mov_reg_FCC0(dst, src, fcc_offset);
842     gen_mov_reg_FCC1(t0, src, fcc_offset);
843     tcg_gen_andc_tl(dst, dst, t0);
844     tcg_gen_xori_tl(dst, dst, 0x1);
845 }
846 
847 // 0 or 1: !FCC1
848 static void gen_op_eval_fble(TCGv dst, TCGv src, unsigned int fcc_offset)
849 {
850     gen_mov_reg_FCC1(dst, src, fcc_offset);
851     tcg_gen_xori_tl(dst, dst, 0x1);
852 }
853 
854 // !2: !(!FCC0 & FCC1)
855 static void gen_op_eval_fbule(TCGv dst, TCGv src, unsigned int fcc_offset)
856 {
857     TCGv t0 = tcg_temp_new();
858     gen_mov_reg_FCC0(dst, src, fcc_offset);
859     gen_mov_reg_FCC1(t0, src, fcc_offset);
860     tcg_gen_andc_tl(dst, t0, dst);
861     tcg_gen_xori_tl(dst, dst, 0x1);
862 }
863 
864 // !3: !(FCC0 & FCC1)
865 static void gen_op_eval_fbo(TCGv dst, TCGv src, unsigned int fcc_offset)
866 {
867     TCGv t0 = tcg_temp_new();
868     gen_mov_reg_FCC0(dst, src, fcc_offset);
869     gen_mov_reg_FCC1(t0, src, fcc_offset);
870     tcg_gen_and_tl(dst, dst, t0);
871     tcg_gen_xori_tl(dst, dst, 0x1);
872 }
873 
874 static void gen_branch2(DisasContext *dc, target_ulong pc1,
875                         target_ulong pc2, TCGv r_cond)
876 {
877     TCGLabel *l1 = gen_new_label();
878 
879     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
880 
881     gen_goto_tb(dc, 0, pc1, pc1 + 4);
882 
883     gen_set_label(l1);
884     gen_goto_tb(dc, 1, pc2, pc2 + 4);
885 }
886 
887 static void gen_branch_a(DisasContext *dc, target_ulong pc1)
888 {
889     TCGLabel *l1 = gen_new_label();
890     target_ulong npc = dc->npc;
891 
892     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cond, 0, l1);
893 
894     gen_goto_tb(dc, 0, npc, pc1);
895 
896     gen_set_label(l1);
897     gen_goto_tb(dc, 1, npc + 4, npc + 8);
898 
899     dc->base.is_jmp = DISAS_NORETURN;
900 }
901 
902 static void gen_branch_n(DisasContext *dc, target_ulong pc1)
903 {
904     target_ulong npc = dc->npc;
905 
906     if (npc & 3) {
907         switch (npc) {
908         case DYNAMIC_PC:
909         case DYNAMIC_PC_LOOKUP:
910             tcg_gen_mov_tl(cpu_pc, cpu_npc);
911             tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
912             tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc,
913                                cpu_cond, tcg_constant_tl(0),
914                                tcg_constant_tl(pc1), cpu_npc);
915             dc->pc = npc;
916             break;
917         default:
918             g_assert_not_reached();
919         }
920     } else {
921         dc->pc = npc;
922         dc->jump_pc[0] = pc1;
923         dc->jump_pc[1] = npc + 4;
924         dc->npc = JUMP_PC;
925     }
926 }
927 
928 static void gen_generic_branch(DisasContext *dc)
929 {
930     TCGv npc0 = tcg_constant_tl(dc->jump_pc[0]);
931     TCGv npc1 = tcg_constant_tl(dc->jump_pc[1]);
932     TCGv zero = tcg_constant_tl(0);
933 
934     tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
935 }
936 
937 /* call this function before using the condition register as it may
938    have been set for a jump */
939 static void flush_cond(DisasContext *dc)
940 {
941     if (dc->npc == JUMP_PC) {
942         gen_generic_branch(dc);
943         dc->npc = DYNAMIC_PC_LOOKUP;
944     }
945 }
946 
947 static void save_npc(DisasContext *dc)
948 {
949     if (dc->npc & 3) {
950         switch (dc->npc) {
951         case JUMP_PC:
952             gen_generic_branch(dc);
953             dc->npc = DYNAMIC_PC_LOOKUP;
954             break;
955         case DYNAMIC_PC:
956         case DYNAMIC_PC_LOOKUP:
957             break;
958         default:
959             g_assert_not_reached();
960         }
961     } else {
962         tcg_gen_movi_tl(cpu_npc, dc->npc);
963     }
964 }
965 
966 static void update_psr(DisasContext *dc)
967 {
968     if (dc->cc_op != CC_OP_FLAGS) {
969         dc->cc_op = CC_OP_FLAGS;
970         gen_helper_compute_psr(cpu_env);
971     }
972 }
973 
974 static void save_state(DisasContext *dc)
975 {
976     tcg_gen_movi_tl(cpu_pc, dc->pc);
977     save_npc(dc);
978 }
979 
980 static void gen_exception(DisasContext *dc, int which)
981 {
982     save_state(dc);
983     gen_helper_raise_exception(cpu_env, tcg_constant_i32(which));
984     dc->base.is_jmp = DISAS_NORETURN;
985 }
986 
987 static void gen_check_align(TCGv addr, int mask)
988 {
989     gen_helper_check_align(cpu_env, addr, tcg_constant_i32(mask));
990 }
991 
992 static void gen_mov_pc_npc(DisasContext *dc)
993 {
994     if (dc->npc & 3) {
995         switch (dc->npc) {
996         case JUMP_PC:
997             gen_generic_branch(dc);
998             tcg_gen_mov_tl(cpu_pc, cpu_npc);
999             dc->pc = DYNAMIC_PC_LOOKUP;
1000             break;
1001         case DYNAMIC_PC:
1002         case DYNAMIC_PC_LOOKUP:
1003             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1004             dc->pc = dc->npc;
1005             break;
1006         default:
1007             g_assert_not_reached();
1008         }
1009     } else {
1010         dc->pc = dc->npc;
1011     }
1012 }
1013 
1014 static void gen_op_next_insn(void)
1015 {
1016     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1017     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1018 }
1019 
1020 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1021                         DisasContext *dc)
1022 {
1023     static int subcc_cond[16] = {
1024         TCG_COND_NEVER,
1025         TCG_COND_EQ,
1026         TCG_COND_LE,
1027         TCG_COND_LT,
1028         TCG_COND_LEU,
1029         TCG_COND_LTU,
1030         -1, /* neg */
1031         -1, /* overflow */
1032         TCG_COND_ALWAYS,
1033         TCG_COND_NE,
1034         TCG_COND_GT,
1035         TCG_COND_GE,
1036         TCG_COND_GTU,
1037         TCG_COND_GEU,
1038         -1, /* pos */
1039         -1, /* no overflow */
1040     };
1041 
1042     static int logic_cond[16] = {
1043         TCG_COND_NEVER,
1044         TCG_COND_EQ,     /* eq:  Z */
1045         TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1046         TCG_COND_LT,     /* lt:  N ^ V -> N */
1047         TCG_COND_EQ,     /* leu: C | Z -> Z */
1048         TCG_COND_NEVER,  /* ltu: C -> 0 */
1049         TCG_COND_LT,     /* neg: N */
1050         TCG_COND_NEVER,  /* vs:  V -> 0 */
1051         TCG_COND_ALWAYS,
1052         TCG_COND_NE,     /* ne:  !Z */
1053         TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1054         TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1055         TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1056         TCG_COND_ALWAYS, /* geu: !C -> 1 */
1057         TCG_COND_GE,     /* pos: !N */
1058         TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1059     };
1060 
1061     TCGv_i32 r_src;
1062     TCGv r_dst;
1063 
1064 #ifdef TARGET_SPARC64
1065     if (xcc) {
1066         r_src = cpu_xcc;
1067     } else {
1068         r_src = cpu_psr;
1069     }
1070 #else
1071     r_src = cpu_psr;
1072 #endif
1073 
1074     switch (dc->cc_op) {
1075     case CC_OP_LOGIC:
1076         cmp->cond = logic_cond[cond];
1077     do_compare_dst_0:
1078         cmp->is_bool = false;
1079         cmp->c2 = tcg_constant_tl(0);
1080 #ifdef TARGET_SPARC64
1081         if (!xcc) {
1082             cmp->c1 = tcg_temp_new();
1083             tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1084             break;
1085         }
1086 #endif
1087         cmp->c1 = cpu_cc_dst;
1088         break;
1089 
1090     case CC_OP_SUB:
1091         switch (cond) {
1092         case 6:  /* neg */
1093         case 14: /* pos */
1094             cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1095             goto do_compare_dst_0;
1096 
1097         case 7: /* overflow */
1098         case 15: /* !overflow */
1099             goto do_dynamic;
1100 
1101         default:
1102             cmp->cond = subcc_cond[cond];
1103             cmp->is_bool = false;
1104 #ifdef TARGET_SPARC64
1105             if (!xcc) {
1106                 /* Note that sign-extension works for unsigned compares as
1107                    long as both operands are sign-extended.  */
1108                 cmp->c1 = tcg_temp_new();
1109                 cmp->c2 = tcg_temp_new();
1110                 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1111                 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1112                 break;
1113             }
1114 #endif
1115             cmp->c1 = cpu_cc_src;
1116             cmp->c2 = cpu_cc_src2;
1117             break;
1118         }
1119         break;
1120 
1121     default:
1122     do_dynamic:
1123         gen_helper_compute_psr(cpu_env);
1124         dc->cc_op = CC_OP_FLAGS;
1125         /* FALLTHRU */
1126 
1127     case CC_OP_FLAGS:
1128         /* We're going to generate a boolean result.  */
1129         cmp->cond = TCG_COND_NE;
1130         cmp->is_bool = true;
1131         cmp->c1 = r_dst = tcg_temp_new();
1132         cmp->c2 = tcg_constant_tl(0);
1133 
1134         switch (cond) {
1135         case 0x0:
1136             gen_op_eval_bn(r_dst);
1137             break;
1138         case 0x1:
1139             gen_op_eval_be(r_dst, r_src);
1140             break;
1141         case 0x2:
1142             gen_op_eval_ble(r_dst, r_src);
1143             break;
1144         case 0x3:
1145             gen_op_eval_bl(r_dst, r_src);
1146             break;
1147         case 0x4:
1148             gen_op_eval_bleu(r_dst, r_src);
1149             break;
1150         case 0x5:
1151             gen_op_eval_bcs(r_dst, r_src);
1152             break;
1153         case 0x6:
1154             gen_op_eval_bneg(r_dst, r_src);
1155             break;
1156         case 0x7:
1157             gen_op_eval_bvs(r_dst, r_src);
1158             break;
1159         case 0x8:
1160             gen_op_eval_ba(r_dst);
1161             break;
1162         case 0x9:
1163             gen_op_eval_bne(r_dst, r_src);
1164             break;
1165         case 0xa:
1166             gen_op_eval_bg(r_dst, r_src);
1167             break;
1168         case 0xb:
1169             gen_op_eval_bge(r_dst, r_src);
1170             break;
1171         case 0xc:
1172             gen_op_eval_bgu(r_dst, r_src);
1173             break;
1174         case 0xd:
1175             gen_op_eval_bcc(r_dst, r_src);
1176             break;
1177         case 0xe:
1178             gen_op_eval_bpos(r_dst, r_src);
1179             break;
1180         case 0xf:
1181             gen_op_eval_bvc(r_dst, r_src);
1182             break;
1183         }
1184         break;
1185     }
1186 }
1187 
1188 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1189 {
1190     unsigned int offset;
1191     TCGv r_dst;
1192 
1193     /* For now we still generate a straight boolean result.  */
1194     cmp->cond = TCG_COND_NE;
1195     cmp->is_bool = true;
1196     cmp->c1 = r_dst = tcg_temp_new();
1197     cmp->c2 = tcg_constant_tl(0);
1198 
1199     switch (cc) {
1200     default:
1201     case 0x0:
1202         offset = 0;
1203         break;
1204     case 0x1:
1205         offset = 32 - 10;
1206         break;
1207     case 0x2:
1208         offset = 34 - 10;
1209         break;
1210     case 0x3:
1211         offset = 36 - 10;
1212         break;
1213     }
1214 
1215     switch (cond) {
1216     case 0x0:
1217         gen_op_eval_bn(r_dst);
1218         break;
1219     case 0x1:
1220         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1221         break;
1222     case 0x2:
1223         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1224         break;
1225     case 0x3:
1226         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1227         break;
1228     case 0x4:
1229         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1230         break;
1231     case 0x5:
1232         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1233         break;
1234     case 0x6:
1235         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1236         break;
1237     case 0x7:
1238         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1239         break;
1240     case 0x8:
1241         gen_op_eval_ba(r_dst);
1242         break;
1243     case 0x9:
1244         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1245         break;
1246     case 0xa:
1247         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1248         break;
1249     case 0xb:
1250         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1251         break;
1252     case 0xc:
1253         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1254         break;
1255     case 0xd:
1256         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1257         break;
1258     case 0xe:
1259         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1260         break;
1261     case 0xf:
1262         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1263         break;
1264     }
1265 }
1266 
1267 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1268                      DisasContext *dc)
1269 {
1270     DisasCompare cmp;
1271     gen_compare(&cmp, cc, cond, dc);
1272 
1273     /* The interface is to return a boolean in r_dst.  */
1274     if (cmp.is_bool) {
1275         tcg_gen_mov_tl(r_dst, cmp.c1);
1276     } else {
1277         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1278     }
1279 }
1280 
1281 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1282 {
1283     DisasCompare cmp;
1284     gen_fcompare(&cmp, cc, cond);
1285 
1286     /* The interface is to return a boolean in r_dst.  */
1287     if (cmp.is_bool) {
1288         tcg_gen_mov_tl(r_dst, cmp.c1);
1289     } else {
1290         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1291     }
1292 }
1293 
1294 #ifdef TARGET_SPARC64
1295 // Inverted logic
1296 static const int gen_tcg_cond_reg[8] = {
1297     -1,
1298     TCG_COND_NE,
1299     TCG_COND_GT,
1300     TCG_COND_GE,
1301     -1,
1302     TCG_COND_EQ,
1303     TCG_COND_LE,
1304     TCG_COND_LT,
1305 };
1306 
1307 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1308 {
1309     cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1310     cmp->is_bool = false;
1311     cmp->c1 = r_src;
1312     cmp->c2 = tcg_constant_tl(0);
1313 }
1314 
1315 static void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1316 {
1317     DisasCompare cmp;
1318     gen_compare_reg(&cmp, cond, r_src);
1319 
1320     /* The interface is to return a boolean in r_dst.  */
1321     tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1322 }
1323 #endif
1324 
1325 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1326 {
1327     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1328     target_ulong target = dc->pc + offset;
1329 
1330 #ifdef TARGET_SPARC64
1331     if (unlikely(AM_CHECK(dc))) {
1332         target &= 0xffffffffULL;
1333     }
1334 #endif
1335     if (cond == 0x0) {
1336         /* unconditional not taken */
1337         if (a) {
1338             dc->pc = dc->npc + 4;
1339             dc->npc = dc->pc + 4;
1340         } else {
1341             dc->pc = dc->npc;
1342             dc->npc = dc->pc + 4;
1343         }
1344     } else if (cond == 0x8) {
1345         /* unconditional taken */
1346         if (a) {
1347             dc->pc = target;
1348             dc->npc = dc->pc + 4;
1349         } else {
1350             dc->pc = dc->npc;
1351             dc->npc = target;
1352             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1353         }
1354     } else {
1355         flush_cond(dc);
1356         gen_cond(cpu_cond, cc, cond, dc);
1357         if (a) {
1358             gen_branch_a(dc, target);
1359         } else {
1360             gen_branch_n(dc, target);
1361         }
1362     }
1363 }
1364 
1365 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1366 {
1367     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1368     target_ulong target = dc->pc + offset;
1369 
1370 #ifdef TARGET_SPARC64
1371     if (unlikely(AM_CHECK(dc))) {
1372         target &= 0xffffffffULL;
1373     }
1374 #endif
1375     if (cond == 0x0) {
1376         /* unconditional not taken */
1377         if (a) {
1378             dc->pc = dc->npc + 4;
1379             dc->npc = dc->pc + 4;
1380         } else {
1381             dc->pc = dc->npc;
1382             dc->npc = dc->pc + 4;
1383         }
1384     } else if (cond == 0x8) {
1385         /* unconditional taken */
1386         if (a) {
1387             dc->pc = target;
1388             dc->npc = dc->pc + 4;
1389         } else {
1390             dc->pc = dc->npc;
1391             dc->npc = target;
1392             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1393         }
1394     } else {
1395         flush_cond(dc);
1396         gen_fcond(cpu_cond, cc, cond);
1397         if (a) {
1398             gen_branch_a(dc, target);
1399         } else {
1400             gen_branch_n(dc, target);
1401         }
1402     }
1403 }
1404 
1405 #ifdef TARGET_SPARC64
1406 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1407                           TCGv r_reg)
1408 {
1409     unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1410     target_ulong target = dc->pc + offset;
1411 
1412     if (unlikely(AM_CHECK(dc))) {
1413         target &= 0xffffffffULL;
1414     }
1415     flush_cond(dc);
1416     gen_cond_reg(cpu_cond, cond, r_reg);
1417     if (a) {
1418         gen_branch_a(dc, target);
1419     } else {
1420         gen_branch_n(dc, target);
1421     }
1422 }
1423 
1424 static void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1425 {
1426     switch (fccno) {
1427     case 0:
1428         gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1429         break;
1430     case 1:
1431         gen_helper_fcmps_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1432         break;
1433     case 2:
1434         gen_helper_fcmps_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1435         break;
1436     case 3:
1437         gen_helper_fcmps_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1438         break;
1439     }
1440 }
1441 
1442 static void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1443 {
1444     switch (fccno) {
1445     case 0:
1446         gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1447         break;
1448     case 1:
1449         gen_helper_fcmpd_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1450         break;
1451     case 2:
1452         gen_helper_fcmpd_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1453         break;
1454     case 3:
1455         gen_helper_fcmpd_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1456         break;
1457     }
1458 }
1459 
1460 static void gen_op_fcmpq(int fccno)
1461 {
1462     switch (fccno) {
1463     case 0:
1464         gen_helper_fcmpq(cpu_fsr, cpu_env);
1465         break;
1466     case 1:
1467         gen_helper_fcmpq_fcc1(cpu_fsr, cpu_env);
1468         break;
1469     case 2:
1470         gen_helper_fcmpq_fcc2(cpu_fsr, cpu_env);
1471         break;
1472     case 3:
1473         gen_helper_fcmpq_fcc3(cpu_fsr, cpu_env);
1474         break;
1475     }
1476 }
1477 
1478 static void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1479 {
1480     switch (fccno) {
1481     case 0:
1482         gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1483         break;
1484     case 1:
1485         gen_helper_fcmpes_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1486         break;
1487     case 2:
1488         gen_helper_fcmpes_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1489         break;
1490     case 3:
1491         gen_helper_fcmpes_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1492         break;
1493     }
1494 }
1495 
1496 static void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1497 {
1498     switch (fccno) {
1499     case 0:
1500         gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1501         break;
1502     case 1:
1503         gen_helper_fcmped_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1504         break;
1505     case 2:
1506         gen_helper_fcmped_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1507         break;
1508     case 3:
1509         gen_helper_fcmped_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1510         break;
1511     }
1512 }
1513 
1514 static void gen_op_fcmpeq(int fccno)
1515 {
1516     switch (fccno) {
1517     case 0:
1518         gen_helper_fcmpeq(cpu_fsr, cpu_env);
1519         break;
1520     case 1:
1521         gen_helper_fcmpeq_fcc1(cpu_fsr, cpu_env);
1522         break;
1523     case 2:
1524         gen_helper_fcmpeq_fcc2(cpu_fsr, cpu_env);
1525         break;
1526     case 3:
1527         gen_helper_fcmpeq_fcc3(cpu_fsr, cpu_env);
1528         break;
1529     }
1530 }
1531 
1532 #else
1533 
1534 static void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1535 {
1536     gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1537 }
1538 
1539 static void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1540 {
1541     gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1542 }
1543 
1544 static void gen_op_fcmpq(int fccno)
1545 {
1546     gen_helper_fcmpq(cpu_fsr, cpu_env);
1547 }
1548 
1549 static void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1550 {
1551     gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1552 }
1553 
1554 static void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1555 {
1556     gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1557 }
1558 
1559 static void gen_op_fcmpeq(int fccno)
1560 {
1561     gen_helper_fcmpeq(cpu_fsr, cpu_env);
1562 }
1563 #endif
1564 
1565 static void gen_op_fpexception_im(DisasContext *dc, int fsr_flags)
1566 {
1567     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1568     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1569     gen_exception(dc, TT_FP_EXCP);
1570 }
1571 
1572 static int gen_trap_ifnofpu(DisasContext *dc)
1573 {
1574 #if !defined(CONFIG_USER_ONLY)
1575     if (!dc->fpu_enabled) {
1576         gen_exception(dc, TT_NFPU_INSN);
1577         return 1;
1578     }
1579 #endif
1580     return 0;
1581 }
1582 
1583 static void gen_op_clear_ieee_excp_and_FTT(void)
1584 {
1585     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1586 }
1587 
1588 static void gen_fop_FF(DisasContext *dc, int rd, int rs,
1589                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1590 {
1591     TCGv_i32 dst, src;
1592 
1593     src = gen_load_fpr_F(dc, rs);
1594     dst = gen_dest_fpr_F(dc);
1595 
1596     gen(dst, cpu_env, src);
1597     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1598 
1599     gen_store_fpr_F(dc, rd, dst);
1600 }
1601 
1602 static void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1603                           void (*gen)(TCGv_i32, TCGv_i32))
1604 {
1605     TCGv_i32 dst, src;
1606 
1607     src = gen_load_fpr_F(dc, rs);
1608     dst = gen_dest_fpr_F(dc);
1609 
1610     gen(dst, src);
1611 
1612     gen_store_fpr_F(dc, rd, dst);
1613 }
1614 
1615 static void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1616                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1617 {
1618     TCGv_i32 dst, src1, src2;
1619 
1620     src1 = gen_load_fpr_F(dc, rs1);
1621     src2 = gen_load_fpr_F(dc, rs2);
1622     dst = gen_dest_fpr_F(dc);
1623 
1624     gen(dst, cpu_env, src1, src2);
1625     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1626 
1627     gen_store_fpr_F(dc, rd, dst);
1628 }
1629 
1630 #ifdef TARGET_SPARC64
1631 static void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1632                            void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1633 {
1634     TCGv_i32 dst, src1, src2;
1635 
1636     src1 = gen_load_fpr_F(dc, rs1);
1637     src2 = gen_load_fpr_F(dc, rs2);
1638     dst = gen_dest_fpr_F(dc);
1639 
1640     gen(dst, src1, src2);
1641 
1642     gen_store_fpr_F(dc, rd, dst);
1643 }
1644 #endif
1645 
1646 static void gen_fop_DD(DisasContext *dc, int rd, int rs,
1647                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1648 {
1649     TCGv_i64 dst, src;
1650 
1651     src = gen_load_fpr_D(dc, rs);
1652     dst = gen_dest_fpr_D(dc, rd);
1653 
1654     gen(dst, cpu_env, src);
1655     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1656 
1657     gen_store_fpr_D(dc, rd, dst);
1658 }
1659 
1660 #ifdef TARGET_SPARC64
1661 static void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1662                           void (*gen)(TCGv_i64, TCGv_i64))
1663 {
1664     TCGv_i64 dst, src;
1665 
1666     src = gen_load_fpr_D(dc, rs);
1667     dst = gen_dest_fpr_D(dc, rd);
1668 
1669     gen(dst, src);
1670 
1671     gen_store_fpr_D(dc, rd, dst);
1672 }
1673 #endif
1674 
1675 static void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1676                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1677 {
1678     TCGv_i64 dst, src1, src2;
1679 
1680     src1 = gen_load_fpr_D(dc, rs1);
1681     src2 = gen_load_fpr_D(dc, rs2);
1682     dst = gen_dest_fpr_D(dc, rd);
1683 
1684     gen(dst, cpu_env, src1, src2);
1685     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1686 
1687     gen_store_fpr_D(dc, rd, dst);
1688 }
1689 
1690 #ifdef TARGET_SPARC64
1691 static void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1692                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1693 {
1694     TCGv_i64 dst, src1, src2;
1695 
1696     src1 = gen_load_fpr_D(dc, rs1);
1697     src2 = gen_load_fpr_D(dc, rs2);
1698     dst = gen_dest_fpr_D(dc, rd);
1699 
1700     gen(dst, src1, src2);
1701 
1702     gen_store_fpr_D(dc, rd, dst);
1703 }
1704 
1705 static void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1706                             void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1707 {
1708     TCGv_i64 dst, src1, src2;
1709 
1710     src1 = gen_load_fpr_D(dc, rs1);
1711     src2 = gen_load_fpr_D(dc, rs2);
1712     dst = gen_dest_fpr_D(dc, rd);
1713 
1714     gen(dst, cpu_gsr, src1, src2);
1715 
1716     gen_store_fpr_D(dc, rd, dst);
1717 }
1718 
1719 static void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1720                             void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1721 {
1722     TCGv_i64 dst, src0, src1, src2;
1723 
1724     src1 = gen_load_fpr_D(dc, rs1);
1725     src2 = gen_load_fpr_D(dc, rs2);
1726     src0 = gen_load_fpr_D(dc, rd);
1727     dst = gen_dest_fpr_D(dc, rd);
1728 
1729     gen(dst, src0, src1, src2);
1730 
1731     gen_store_fpr_D(dc, rd, dst);
1732 }
1733 #endif
1734 
1735 static void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1736                        void (*gen)(TCGv_ptr))
1737 {
1738     gen_op_load_fpr_QT1(QFPREG(rs));
1739 
1740     gen(cpu_env);
1741     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1742 
1743     gen_op_store_QT0_fpr(QFPREG(rd));
1744     gen_update_fprs_dirty(dc, QFPREG(rd));
1745 }
1746 
1747 #ifdef TARGET_SPARC64
1748 static void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1749                           void (*gen)(TCGv_ptr))
1750 {
1751     gen_op_load_fpr_QT1(QFPREG(rs));
1752 
1753     gen(cpu_env);
1754 
1755     gen_op_store_QT0_fpr(QFPREG(rd));
1756     gen_update_fprs_dirty(dc, QFPREG(rd));
1757 }
1758 #endif
1759 
1760 static void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1761                         void (*gen)(TCGv_ptr))
1762 {
1763     gen_op_load_fpr_QT0(QFPREG(rs1));
1764     gen_op_load_fpr_QT1(QFPREG(rs2));
1765 
1766     gen(cpu_env);
1767     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1768 
1769     gen_op_store_QT0_fpr(QFPREG(rd));
1770     gen_update_fprs_dirty(dc, QFPREG(rd));
1771 }
1772 
1773 static void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1774                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1775 {
1776     TCGv_i64 dst;
1777     TCGv_i32 src1, src2;
1778 
1779     src1 = gen_load_fpr_F(dc, rs1);
1780     src2 = gen_load_fpr_F(dc, rs2);
1781     dst = gen_dest_fpr_D(dc, rd);
1782 
1783     gen(dst, cpu_env, src1, src2);
1784     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1785 
1786     gen_store_fpr_D(dc, rd, dst);
1787 }
1788 
1789 static void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1790                         void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1791 {
1792     TCGv_i64 src1, src2;
1793 
1794     src1 = gen_load_fpr_D(dc, rs1);
1795     src2 = gen_load_fpr_D(dc, rs2);
1796 
1797     gen(cpu_env, src1, src2);
1798     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1799 
1800     gen_op_store_QT0_fpr(QFPREG(rd));
1801     gen_update_fprs_dirty(dc, QFPREG(rd));
1802 }
1803 
1804 #ifdef TARGET_SPARC64
1805 static void gen_fop_DF(DisasContext *dc, int rd, int rs,
1806                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1807 {
1808     TCGv_i64 dst;
1809     TCGv_i32 src;
1810 
1811     src = gen_load_fpr_F(dc, rs);
1812     dst = gen_dest_fpr_D(dc, rd);
1813 
1814     gen(dst, cpu_env, src);
1815     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1816 
1817     gen_store_fpr_D(dc, rd, dst);
1818 }
1819 #endif
1820 
1821 static void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1822                           void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1823 {
1824     TCGv_i64 dst;
1825     TCGv_i32 src;
1826 
1827     src = gen_load_fpr_F(dc, rs);
1828     dst = gen_dest_fpr_D(dc, rd);
1829 
1830     gen(dst, cpu_env, src);
1831 
1832     gen_store_fpr_D(dc, rd, dst);
1833 }
1834 
1835 static void gen_fop_FD(DisasContext *dc, int rd, int rs,
1836                        void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1837 {
1838     TCGv_i32 dst;
1839     TCGv_i64 src;
1840 
1841     src = gen_load_fpr_D(dc, rs);
1842     dst = gen_dest_fpr_F(dc);
1843 
1844     gen(dst, cpu_env, src);
1845     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1846 
1847     gen_store_fpr_F(dc, rd, dst);
1848 }
1849 
1850 static void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1851                        void (*gen)(TCGv_i32, TCGv_ptr))
1852 {
1853     TCGv_i32 dst;
1854 
1855     gen_op_load_fpr_QT1(QFPREG(rs));
1856     dst = gen_dest_fpr_F(dc);
1857 
1858     gen(dst, cpu_env);
1859     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1860 
1861     gen_store_fpr_F(dc, rd, dst);
1862 }
1863 
1864 static void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1865                        void (*gen)(TCGv_i64, TCGv_ptr))
1866 {
1867     TCGv_i64 dst;
1868 
1869     gen_op_load_fpr_QT1(QFPREG(rs));
1870     dst = gen_dest_fpr_D(dc, rd);
1871 
1872     gen(dst, cpu_env);
1873     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1874 
1875     gen_store_fpr_D(dc, rd, dst);
1876 }
1877 
1878 static void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1879                           void (*gen)(TCGv_ptr, TCGv_i32))
1880 {
1881     TCGv_i32 src;
1882 
1883     src = gen_load_fpr_F(dc, rs);
1884 
1885     gen(cpu_env, src);
1886 
1887     gen_op_store_QT0_fpr(QFPREG(rd));
1888     gen_update_fprs_dirty(dc, QFPREG(rd));
1889 }
1890 
1891 static void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1892                           void (*gen)(TCGv_ptr, TCGv_i64))
1893 {
1894     TCGv_i64 src;
1895 
1896     src = gen_load_fpr_D(dc, rs);
1897 
1898     gen(cpu_env, src);
1899 
1900     gen_op_store_QT0_fpr(QFPREG(rd));
1901     gen_update_fprs_dirty(dc, QFPREG(rd));
1902 }
1903 
1904 static void gen_swap(DisasContext *dc, TCGv dst, TCGv src,
1905                      TCGv addr, int mmu_idx, MemOp memop)
1906 {
1907     gen_address_mask(dc, addr);
1908     tcg_gen_atomic_xchg_tl(dst, addr, src, mmu_idx, memop | MO_ALIGN);
1909 }
1910 
1911 static void gen_ldstub(DisasContext *dc, TCGv dst, TCGv addr, int mmu_idx)
1912 {
1913     TCGv m1 = tcg_constant_tl(0xff);
1914     gen_address_mask(dc, addr);
1915     tcg_gen_atomic_xchg_tl(dst, addr, m1, mmu_idx, MO_UB);
1916 }
1917 
1918 /* asi moves */
1919 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1920 typedef enum {
1921     GET_ASI_HELPER,
1922     GET_ASI_EXCP,
1923     GET_ASI_DIRECT,
1924     GET_ASI_DTWINX,
1925     GET_ASI_BLOCK,
1926     GET_ASI_SHORT,
1927     GET_ASI_BCOPY,
1928     GET_ASI_BFILL,
1929 } ASIType;
1930 
1931 typedef struct {
1932     ASIType type;
1933     int asi;
1934     int mem_idx;
1935     MemOp memop;
1936 } DisasASI;
1937 
1938 static DisasASI get_asi(DisasContext *dc, int insn, MemOp memop)
1939 {
1940     int asi = GET_FIELD(insn, 19, 26);
1941     ASIType type = GET_ASI_HELPER;
1942     int mem_idx = dc->mem_idx;
1943 
1944 #ifndef TARGET_SPARC64
1945     /* Before v9, all asis are immediate and privileged.  */
1946     if (IS_IMM) {
1947         gen_exception(dc, TT_ILL_INSN);
1948         type = GET_ASI_EXCP;
1949     } else if (supervisor(dc)
1950                /* Note that LEON accepts ASI_USERDATA in user mode, for
1951                   use with CASA.  Also note that previous versions of
1952                   QEMU allowed (and old versions of gcc emitted) ASI_P
1953                   for LEON, which is incorrect.  */
1954                || (asi == ASI_USERDATA
1955                    && (dc->def->features & CPU_FEATURE_CASA))) {
1956         switch (asi) {
1957         case ASI_USERDATA:   /* User data access */
1958             mem_idx = MMU_USER_IDX;
1959             type = GET_ASI_DIRECT;
1960             break;
1961         case ASI_KERNELDATA: /* Supervisor data access */
1962             mem_idx = MMU_KERNEL_IDX;
1963             type = GET_ASI_DIRECT;
1964             break;
1965         case ASI_M_BYPASS:    /* MMU passthrough */
1966         case ASI_LEON_BYPASS: /* LEON MMU passthrough */
1967             mem_idx = MMU_PHYS_IDX;
1968             type = GET_ASI_DIRECT;
1969             break;
1970         case ASI_M_BCOPY: /* Block copy, sta access */
1971             mem_idx = MMU_KERNEL_IDX;
1972             type = GET_ASI_BCOPY;
1973             break;
1974         case ASI_M_BFILL: /* Block fill, stda access */
1975             mem_idx = MMU_KERNEL_IDX;
1976             type = GET_ASI_BFILL;
1977             break;
1978         }
1979 
1980         /* MMU_PHYS_IDX is used when the MMU is disabled to passthrough the
1981          * permissions check in get_physical_address(..).
1982          */
1983         mem_idx = (dc->mem_idx == MMU_PHYS_IDX) ? MMU_PHYS_IDX : mem_idx;
1984     } else {
1985         gen_exception(dc, TT_PRIV_INSN);
1986         type = GET_ASI_EXCP;
1987     }
1988 #else
1989     if (IS_IMM) {
1990         asi = dc->asi;
1991     }
1992     /* With v9, all asis below 0x80 are privileged.  */
1993     /* ??? We ought to check cpu_has_hypervisor, but we didn't copy
1994        down that bit into DisasContext.  For the moment that's ok,
1995        since the direct implementations below doesn't have any ASIs
1996        in the restricted [0x30, 0x7f] range, and the check will be
1997        done properly in the helper.  */
1998     if (!supervisor(dc) && asi < 0x80) {
1999         gen_exception(dc, TT_PRIV_ACT);
2000         type = GET_ASI_EXCP;
2001     } else {
2002         switch (asi) {
2003         case ASI_REAL:      /* Bypass */
2004         case ASI_REAL_IO:   /* Bypass, non-cacheable */
2005         case ASI_REAL_L:    /* Bypass LE */
2006         case ASI_REAL_IO_L: /* Bypass, non-cacheable LE */
2007         case ASI_TWINX_REAL:   /* Real address, twinx */
2008         case ASI_TWINX_REAL_L: /* Real address, twinx, LE */
2009         case ASI_QUAD_LDD_PHYS:
2010         case ASI_QUAD_LDD_PHYS_L:
2011             mem_idx = MMU_PHYS_IDX;
2012             break;
2013         case ASI_N:  /* Nucleus */
2014         case ASI_NL: /* Nucleus LE */
2015         case ASI_TWINX_N:
2016         case ASI_TWINX_NL:
2017         case ASI_NUCLEUS_QUAD_LDD:
2018         case ASI_NUCLEUS_QUAD_LDD_L:
2019             if (hypervisor(dc)) {
2020                 mem_idx = MMU_PHYS_IDX;
2021             } else {
2022                 mem_idx = MMU_NUCLEUS_IDX;
2023             }
2024             break;
2025         case ASI_AIUP:  /* As if user primary */
2026         case ASI_AIUPL: /* As if user primary LE */
2027         case ASI_TWINX_AIUP:
2028         case ASI_TWINX_AIUP_L:
2029         case ASI_BLK_AIUP_4V:
2030         case ASI_BLK_AIUP_L_4V:
2031         case ASI_BLK_AIUP:
2032         case ASI_BLK_AIUPL:
2033             mem_idx = MMU_USER_IDX;
2034             break;
2035         case ASI_AIUS:  /* As if user secondary */
2036         case ASI_AIUSL: /* As if user secondary LE */
2037         case ASI_TWINX_AIUS:
2038         case ASI_TWINX_AIUS_L:
2039         case ASI_BLK_AIUS_4V:
2040         case ASI_BLK_AIUS_L_4V:
2041         case ASI_BLK_AIUS:
2042         case ASI_BLK_AIUSL:
2043             mem_idx = MMU_USER_SECONDARY_IDX;
2044             break;
2045         case ASI_S:  /* Secondary */
2046         case ASI_SL: /* Secondary LE */
2047         case ASI_TWINX_S:
2048         case ASI_TWINX_SL:
2049         case ASI_BLK_COMMIT_S:
2050         case ASI_BLK_S:
2051         case ASI_BLK_SL:
2052         case ASI_FL8_S:
2053         case ASI_FL8_SL:
2054         case ASI_FL16_S:
2055         case ASI_FL16_SL:
2056             if (mem_idx == MMU_USER_IDX) {
2057                 mem_idx = MMU_USER_SECONDARY_IDX;
2058             } else if (mem_idx == MMU_KERNEL_IDX) {
2059                 mem_idx = MMU_KERNEL_SECONDARY_IDX;
2060             }
2061             break;
2062         case ASI_P:  /* Primary */
2063         case ASI_PL: /* Primary LE */
2064         case ASI_TWINX_P:
2065         case ASI_TWINX_PL:
2066         case ASI_BLK_COMMIT_P:
2067         case ASI_BLK_P:
2068         case ASI_BLK_PL:
2069         case ASI_FL8_P:
2070         case ASI_FL8_PL:
2071         case ASI_FL16_P:
2072         case ASI_FL16_PL:
2073             break;
2074         }
2075         switch (asi) {
2076         case ASI_REAL:
2077         case ASI_REAL_IO:
2078         case ASI_REAL_L:
2079         case ASI_REAL_IO_L:
2080         case ASI_N:
2081         case ASI_NL:
2082         case ASI_AIUP:
2083         case ASI_AIUPL:
2084         case ASI_AIUS:
2085         case ASI_AIUSL:
2086         case ASI_S:
2087         case ASI_SL:
2088         case ASI_P:
2089         case ASI_PL:
2090             type = GET_ASI_DIRECT;
2091             break;
2092         case ASI_TWINX_REAL:
2093         case ASI_TWINX_REAL_L:
2094         case ASI_TWINX_N:
2095         case ASI_TWINX_NL:
2096         case ASI_TWINX_AIUP:
2097         case ASI_TWINX_AIUP_L:
2098         case ASI_TWINX_AIUS:
2099         case ASI_TWINX_AIUS_L:
2100         case ASI_TWINX_P:
2101         case ASI_TWINX_PL:
2102         case ASI_TWINX_S:
2103         case ASI_TWINX_SL:
2104         case ASI_QUAD_LDD_PHYS:
2105         case ASI_QUAD_LDD_PHYS_L:
2106         case ASI_NUCLEUS_QUAD_LDD:
2107         case ASI_NUCLEUS_QUAD_LDD_L:
2108             type = GET_ASI_DTWINX;
2109             break;
2110         case ASI_BLK_COMMIT_P:
2111         case ASI_BLK_COMMIT_S:
2112         case ASI_BLK_AIUP_4V:
2113         case ASI_BLK_AIUP_L_4V:
2114         case ASI_BLK_AIUP:
2115         case ASI_BLK_AIUPL:
2116         case ASI_BLK_AIUS_4V:
2117         case ASI_BLK_AIUS_L_4V:
2118         case ASI_BLK_AIUS:
2119         case ASI_BLK_AIUSL:
2120         case ASI_BLK_S:
2121         case ASI_BLK_SL:
2122         case ASI_BLK_P:
2123         case ASI_BLK_PL:
2124             type = GET_ASI_BLOCK;
2125             break;
2126         case ASI_FL8_S:
2127         case ASI_FL8_SL:
2128         case ASI_FL8_P:
2129         case ASI_FL8_PL:
2130             memop = MO_UB;
2131             type = GET_ASI_SHORT;
2132             break;
2133         case ASI_FL16_S:
2134         case ASI_FL16_SL:
2135         case ASI_FL16_P:
2136         case ASI_FL16_PL:
2137             memop = MO_TEUW;
2138             type = GET_ASI_SHORT;
2139             break;
2140         }
2141         /* The little-endian asis all have bit 3 set.  */
2142         if (asi & 8) {
2143             memop ^= MO_BSWAP;
2144         }
2145     }
2146 #endif
2147 
2148     return (DisasASI){ type, asi, mem_idx, memop };
2149 }
2150 
2151 static void gen_ld_asi(DisasContext *dc, TCGv dst, TCGv addr,
2152                        int insn, MemOp memop)
2153 {
2154     DisasASI da = get_asi(dc, insn, memop);
2155 
2156     switch (da.type) {
2157     case GET_ASI_EXCP:
2158         break;
2159     case GET_ASI_DTWINX: /* Reserved for ldda.  */
2160         gen_exception(dc, TT_ILL_INSN);
2161         break;
2162     case GET_ASI_DIRECT:
2163         gen_address_mask(dc, addr);
2164         tcg_gen_qemu_ld_tl(dst, addr, da.mem_idx, da.memop | MO_ALIGN);
2165         break;
2166     default:
2167         {
2168             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2169             TCGv_i32 r_mop = tcg_constant_i32(memop | MO_ALIGN);
2170 
2171             save_state(dc);
2172 #ifdef TARGET_SPARC64
2173             gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_mop);
2174 #else
2175             {
2176                 TCGv_i64 t64 = tcg_temp_new_i64();
2177                 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2178                 tcg_gen_trunc_i64_tl(dst, t64);
2179             }
2180 #endif
2181         }
2182         break;
2183     }
2184 }
2185 
2186 static void gen_st_asi(DisasContext *dc, TCGv src, TCGv addr,
2187                        int insn, MemOp memop)
2188 {
2189     DisasASI da = get_asi(dc, insn, memop);
2190 
2191     switch (da.type) {
2192     case GET_ASI_EXCP:
2193         break;
2194     case GET_ASI_DTWINX: /* Reserved for stda.  */
2195 #ifndef TARGET_SPARC64
2196         gen_exception(dc, TT_ILL_INSN);
2197         break;
2198 #else
2199         if (!(dc->def->features & CPU_FEATURE_HYPV)) {
2200             /* Pre OpenSPARC CPUs don't have these */
2201             gen_exception(dc, TT_ILL_INSN);
2202             return;
2203         }
2204         /* in OpenSPARC T1+ CPUs TWINX ASIs in store instructions
2205          * are ST_BLKINIT_ ASIs */
2206 #endif
2207         /* fall through */
2208     case GET_ASI_DIRECT:
2209         gen_address_mask(dc, addr);
2210         tcg_gen_qemu_st_tl(src, addr, da.mem_idx, da.memop | MO_ALIGN);
2211         break;
2212 #if !defined(TARGET_SPARC64) && !defined(CONFIG_USER_ONLY)
2213     case GET_ASI_BCOPY:
2214         /* Copy 32 bytes from the address in SRC to ADDR.  */
2215         /* ??? The original qemu code suggests 4-byte alignment, dropping
2216            the low bits, but the only place I can see this used is in the
2217            Linux kernel with 32 byte alignment, which would make more sense
2218            as a cacheline-style operation.  */
2219         {
2220             TCGv saddr = tcg_temp_new();
2221             TCGv daddr = tcg_temp_new();
2222             TCGv four = tcg_constant_tl(4);
2223             TCGv_i32 tmp = tcg_temp_new_i32();
2224             int i;
2225 
2226             tcg_gen_andi_tl(saddr, src, -4);
2227             tcg_gen_andi_tl(daddr, addr, -4);
2228             for (i = 0; i < 32; i += 4) {
2229                 /* Since the loads and stores are paired, allow the
2230                    copy to happen in the host endianness.  */
2231                 tcg_gen_qemu_ld_i32(tmp, saddr, da.mem_idx, MO_UL);
2232                 tcg_gen_qemu_st_i32(tmp, daddr, da.mem_idx, MO_UL);
2233                 tcg_gen_add_tl(saddr, saddr, four);
2234                 tcg_gen_add_tl(daddr, daddr, four);
2235             }
2236         }
2237         break;
2238 #endif
2239     default:
2240         {
2241             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2242             TCGv_i32 r_mop = tcg_constant_i32(memop | MO_ALIGN);
2243 
2244             save_state(dc);
2245 #ifdef TARGET_SPARC64
2246             gen_helper_st_asi(cpu_env, addr, src, r_asi, r_mop);
2247 #else
2248             {
2249                 TCGv_i64 t64 = tcg_temp_new_i64();
2250                 tcg_gen_extu_tl_i64(t64, src);
2251                 gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2252             }
2253 #endif
2254 
2255             /* A write to a TLB register may alter page maps.  End the TB. */
2256             dc->npc = DYNAMIC_PC;
2257         }
2258         break;
2259     }
2260 }
2261 
2262 static void gen_swap_asi(DisasContext *dc, TCGv dst, TCGv src,
2263                          TCGv addr, int insn)
2264 {
2265     DisasASI da = get_asi(dc, insn, MO_TEUL);
2266 
2267     switch (da.type) {
2268     case GET_ASI_EXCP:
2269         break;
2270     case GET_ASI_DIRECT:
2271         gen_swap(dc, dst, src, addr, da.mem_idx, da.memop);
2272         break;
2273     default:
2274         /* ??? Should be DAE_invalid_asi.  */
2275         gen_exception(dc, TT_DATA_ACCESS);
2276         break;
2277     }
2278 }
2279 
2280 static void gen_cas_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2281                         int insn, int rd)
2282 {
2283     DisasASI da = get_asi(dc, insn, MO_TEUL);
2284     TCGv oldv;
2285 
2286     switch (da.type) {
2287     case GET_ASI_EXCP:
2288         return;
2289     case GET_ASI_DIRECT:
2290         oldv = tcg_temp_new();
2291         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2292                                   da.mem_idx, da.memop | MO_ALIGN);
2293         gen_store_gpr(dc, rd, oldv);
2294         break;
2295     default:
2296         /* ??? Should be DAE_invalid_asi.  */
2297         gen_exception(dc, TT_DATA_ACCESS);
2298         break;
2299     }
2300 }
2301 
2302 static void gen_ldstub_asi(DisasContext *dc, TCGv dst, TCGv addr, int insn)
2303 {
2304     DisasASI da = get_asi(dc, insn, MO_UB);
2305 
2306     switch (da.type) {
2307     case GET_ASI_EXCP:
2308         break;
2309     case GET_ASI_DIRECT:
2310         gen_ldstub(dc, dst, addr, da.mem_idx);
2311         break;
2312     default:
2313         /* ??? In theory, this should be raise DAE_invalid_asi.
2314            But the SS-20 roms do ldstuba [%l0] #ASI_M_CTL, %o1.  */
2315         if (tb_cflags(dc->base.tb) & CF_PARALLEL) {
2316             gen_helper_exit_atomic(cpu_env);
2317         } else {
2318             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2319             TCGv_i32 r_mop = tcg_constant_i32(MO_UB);
2320             TCGv_i64 s64, t64;
2321 
2322             save_state(dc);
2323             t64 = tcg_temp_new_i64();
2324             gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2325 
2326             s64 = tcg_constant_i64(0xff);
2327             gen_helper_st_asi(cpu_env, addr, s64, r_asi, r_mop);
2328 
2329             tcg_gen_trunc_i64_tl(dst, t64);
2330 
2331             /* End the TB.  */
2332             dc->npc = DYNAMIC_PC;
2333         }
2334         break;
2335     }
2336 }
2337 #endif
2338 
2339 #ifdef TARGET_SPARC64
2340 static void gen_ldf_asi(DisasContext *dc, TCGv addr,
2341                         int insn, int size, int rd)
2342 {
2343     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2344     TCGv_i32 d32;
2345     TCGv_i64 d64;
2346 
2347     switch (da.type) {
2348     case GET_ASI_EXCP:
2349         break;
2350 
2351     case GET_ASI_DIRECT:
2352         gen_address_mask(dc, addr);
2353         switch (size) {
2354         case 4:
2355             d32 = gen_dest_fpr_F(dc);
2356             tcg_gen_qemu_ld_i32(d32, addr, da.mem_idx, da.memop | MO_ALIGN);
2357             gen_store_fpr_F(dc, rd, d32);
2358             break;
2359         case 8:
2360             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2361                                 da.memop | MO_ALIGN_4);
2362             break;
2363         case 16:
2364             d64 = tcg_temp_new_i64();
2365             tcg_gen_qemu_ld_i64(d64, addr, da.mem_idx, da.memop | MO_ALIGN_4);
2366             tcg_gen_addi_tl(addr, addr, 8);
2367             tcg_gen_qemu_ld_i64(cpu_fpr[rd/2+1], addr, da.mem_idx,
2368                                 da.memop | MO_ALIGN_4);
2369             tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2370             break;
2371         default:
2372             g_assert_not_reached();
2373         }
2374         break;
2375 
2376     case GET_ASI_BLOCK:
2377         /* Valid for lddfa on aligned registers only.  */
2378         if (size == 8 && (rd & 7) == 0) {
2379             MemOp memop;
2380             TCGv eight;
2381             int i;
2382 
2383             gen_address_mask(dc, addr);
2384 
2385             /* The first operation checks required alignment.  */
2386             memop = da.memop | MO_ALIGN_64;
2387             eight = tcg_constant_tl(8);
2388             for (i = 0; ; ++i) {
2389                 tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2 + i], addr,
2390                                     da.mem_idx, memop);
2391                 if (i == 7) {
2392                     break;
2393                 }
2394                 tcg_gen_add_tl(addr, addr, eight);
2395                 memop = da.memop;
2396             }
2397         } else {
2398             gen_exception(dc, TT_ILL_INSN);
2399         }
2400         break;
2401 
2402     case GET_ASI_SHORT:
2403         /* Valid for lddfa only.  */
2404         if (size == 8) {
2405             gen_address_mask(dc, addr);
2406             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2407                                 da.memop | MO_ALIGN);
2408         } else {
2409             gen_exception(dc, TT_ILL_INSN);
2410         }
2411         break;
2412 
2413     default:
2414         {
2415             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2416             TCGv_i32 r_mop = tcg_constant_i32(da.memop | MO_ALIGN);
2417 
2418             save_state(dc);
2419             /* According to the table in the UA2011 manual, the only
2420                other asis that are valid for ldfa/lddfa/ldqfa are
2421                the NO_FAULT asis.  We still need a helper for these,
2422                but we can just use the integer asi helper for them.  */
2423             switch (size) {
2424             case 4:
2425                 d64 = tcg_temp_new_i64();
2426                 gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2427                 d32 = gen_dest_fpr_F(dc);
2428                 tcg_gen_extrl_i64_i32(d32, d64);
2429                 gen_store_fpr_F(dc, rd, d32);
2430                 break;
2431             case 8:
2432                 gen_helper_ld_asi(cpu_fpr[rd / 2], cpu_env, addr, r_asi, r_mop);
2433                 break;
2434             case 16:
2435                 d64 = tcg_temp_new_i64();
2436                 gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2437                 tcg_gen_addi_tl(addr, addr, 8);
2438                 gen_helper_ld_asi(cpu_fpr[rd/2+1], cpu_env, addr, r_asi, r_mop);
2439                 tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2440                 break;
2441             default:
2442                 g_assert_not_reached();
2443             }
2444         }
2445         break;
2446     }
2447 }
2448 
2449 static void gen_stf_asi(DisasContext *dc, TCGv addr,
2450                         int insn, int size, int rd)
2451 {
2452     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2453     TCGv_i32 d32;
2454 
2455     switch (da.type) {
2456     case GET_ASI_EXCP:
2457         break;
2458 
2459     case GET_ASI_DIRECT:
2460         gen_address_mask(dc, addr);
2461         switch (size) {
2462         case 4:
2463             d32 = gen_load_fpr_F(dc, rd);
2464             tcg_gen_qemu_st_i32(d32, addr, da.mem_idx, da.memop | MO_ALIGN);
2465             break;
2466         case 8:
2467             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2468                                 da.memop | MO_ALIGN_4);
2469             break;
2470         case 16:
2471             /* Only 4-byte alignment required.  However, it is legal for the
2472                cpu to signal the alignment fault, and the OS trap handler is
2473                required to fix it up.  Requiring 16-byte alignment here avoids
2474                having to probe the second page before performing the first
2475                write.  */
2476             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2477                                 da.memop | MO_ALIGN_16);
2478             tcg_gen_addi_tl(addr, addr, 8);
2479             tcg_gen_qemu_st_i64(cpu_fpr[rd/2+1], addr, da.mem_idx, da.memop);
2480             break;
2481         default:
2482             g_assert_not_reached();
2483         }
2484         break;
2485 
2486     case GET_ASI_BLOCK:
2487         /* Valid for stdfa on aligned registers only.  */
2488         if (size == 8 && (rd & 7) == 0) {
2489             MemOp memop;
2490             TCGv eight;
2491             int i;
2492 
2493             gen_address_mask(dc, addr);
2494 
2495             /* The first operation checks required alignment.  */
2496             memop = da.memop | MO_ALIGN_64;
2497             eight = tcg_constant_tl(8);
2498             for (i = 0; ; ++i) {
2499                 tcg_gen_qemu_st_i64(cpu_fpr[rd / 2 + i], addr,
2500                                     da.mem_idx, memop);
2501                 if (i == 7) {
2502                     break;
2503                 }
2504                 tcg_gen_add_tl(addr, addr, eight);
2505                 memop = da.memop;
2506             }
2507         } else {
2508             gen_exception(dc, TT_ILL_INSN);
2509         }
2510         break;
2511 
2512     case GET_ASI_SHORT:
2513         /* Valid for stdfa only.  */
2514         if (size == 8) {
2515             gen_address_mask(dc, addr);
2516             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2517                                 da.memop | MO_ALIGN);
2518         } else {
2519             gen_exception(dc, TT_ILL_INSN);
2520         }
2521         break;
2522 
2523     default:
2524         /* According to the table in the UA2011 manual, the only
2525            other asis that are valid for ldfa/lddfa/ldqfa are
2526            the PST* asis, which aren't currently handled.  */
2527         gen_exception(dc, TT_ILL_INSN);
2528         break;
2529     }
2530 }
2531 
2532 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2533 {
2534     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2535     TCGv_i64 hi = gen_dest_gpr(dc, rd);
2536     TCGv_i64 lo = gen_dest_gpr(dc, rd + 1);
2537 
2538     switch (da.type) {
2539     case GET_ASI_EXCP:
2540         return;
2541 
2542     case GET_ASI_DTWINX:
2543         gen_address_mask(dc, addr);
2544         tcg_gen_qemu_ld_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2545         tcg_gen_addi_tl(addr, addr, 8);
2546         tcg_gen_qemu_ld_i64(lo, addr, da.mem_idx, da.memop);
2547         break;
2548 
2549     case GET_ASI_DIRECT:
2550         {
2551             TCGv_i64 tmp = tcg_temp_new_i64();
2552 
2553             gen_address_mask(dc, addr);
2554             tcg_gen_qemu_ld_i64(tmp, addr, da.mem_idx, da.memop | MO_ALIGN);
2555 
2556             /* Note that LE ldda acts as if each 32-bit register
2557                result is byte swapped.  Having just performed one
2558                64-bit bswap, we need now to swap the writebacks.  */
2559             if ((da.memop & MO_BSWAP) == MO_TE) {
2560                 tcg_gen_extr32_i64(lo, hi, tmp);
2561             } else {
2562                 tcg_gen_extr32_i64(hi, lo, tmp);
2563             }
2564         }
2565         break;
2566 
2567     default:
2568         /* ??? In theory we've handled all of the ASIs that are valid
2569            for ldda, and this should raise DAE_invalid_asi.  However,
2570            real hardware allows others.  This can be seen with e.g.
2571            FreeBSD 10.3 wrt ASI_IC_TAG.  */
2572         {
2573             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2574             TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2575             TCGv_i64 tmp = tcg_temp_new_i64();
2576 
2577             save_state(dc);
2578             gen_helper_ld_asi(tmp, cpu_env, addr, r_asi, r_mop);
2579 
2580             /* See above.  */
2581             if ((da.memop & MO_BSWAP) == MO_TE) {
2582                 tcg_gen_extr32_i64(lo, hi, tmp);
2583             } else {
2584                 tcg_gen_extr32_i64(hi, lo, tmp);
2585             }
2586         }
2587         break;
2588     }
2589 
2590     gen_store_gpr(dc, rd, hi);
2591     gen_store_gpr(dc, rd + 1, lo);
2592 }
2593 
2594 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2595                          int insn, int rd)
2596 {
2597     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2598     TCGv lo = gen_load_gpr(dc, rd + 1);
2599 
2600     switch (da.type) {
2601     case GET_ASI_EXCP:
2602         break;
2603 
2604     case GET_ASI_DTWINX:
2605         gen_address_mask(dc, addr);
2606         tcg_gen_qemu_st_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2607         tcg_gen_addi_tl(addr, addr, 8);
2608         tcg_gen_qemu_st_i64(lo, addr, da.mem_idx, da.memop);
2609         break;
2610 
2611     case GET_ASI_DIRECT:
2612         {
2613             TCGv_i64 t64 = tcg_temp_new_i64();
2614 
2615             /* Note that LE stda acts as if each 32-bit register result is
2616                byte swapped.  We will perform one 64-bit LE store, so now
2617                we must swap the order of the construction.  */
2618             if ((da.memop & MO_BSWAP) == MO_TE) {
2619                 tcg_gen_concat32_i64(t64, lo, hi);
2620             } else {
2621                 tcg_gen_concat32_i64(t64, hi, lo);
2622             }
2623             gen_address_mask(dc, addr);
2624             tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2625         }
2626         break;
2627 
2628     default:
2629         /* ??? In theory we've handled all of the ASIs that are valid
2630            for stda, and this should raise DAE_invalid_asi.  */
2631         {
2632             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2633             TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2634             TCGv_i64 t64 = tcg_temp_new_i64();
2635 
2636             /* See above.  */
2637             if ((da.memop & MO_BSWAP) == MO_TE) {
2638                 tcg_gen_concat32_i64(t64, lo, hi);
2639             } else {
2640                 tcg_gen_concat32_i64(t64, hi, lo);
2641             }
2642 
2643             save_state(dc);
2644             gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2645         }
2646         break;
2647     }
2648 }
2649 
2650 static void gen_casx_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2651                          int insn, int rd)
2652 {
2653     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2654     TCGv oldv;
2655 
2656     switch (da.type) {
2657     case GET_ASI_EXCP:
2658         return;
2659     case GET_ASI_DIRECT:
2660         oldv = tcg_temp_new();
2661         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2662                                   da.mem_idx, da.memop | MO_ALIGN);
2663         gen_store_gpr(dc, rd, oldv);
2664         break;
2665     default:
2666         /* ??? Should be DAE_invalid_asi.  */
2667         gen_exception(dc, TT_DATA_ACCESS);
2668         break;
2669     }
2670 }
2671 
2672 #elif !defined(CONFIG_USER_ONLY)
2673 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2674 {
2675     /* ??? Work around an apparent bug in Ubuntu gcc 4.8.2-10ubuntu2+12,
2676        whereby "rd + 1" elicits "error: array subscript is above array".
2677        Since we have already asserted that rd is even, the semantics
2678        are unchanged.  */
2679     TCGv lo = gen_dest_gpr(dc, rd | 1);
2680     TCGv hi = gen_dest_gpr(dc, rd);
2681     TCGv_i64 t64 = tcg_temp_new_i64();
2682     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2683 
2684     switch (da.type) {
2685     case GET_ASI_EXCP:
2686         return;
2687     case GET_ASI_DIRECT:
2688         gen_address_mask(dc, addr);
2689         tcg_gen_qemu_ld_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2690         break;
2691     default:
2692         {
2693             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2694             TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2695 
2696             save_state(dc);
2697             gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2698         }
2699         break;
2700     }
2701 
2702     tcg_gen_extr_i64_i32(lo, hi, t64);
2703     gen_store_gpr(dc, rd | 1, lo);
2704     gen_store_gpr(dc, rd, hi);
2705 }
2706 
2707 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2708                          int insn, int rd)
2709 {
2710     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2711     TCGv lo = gen_load_gpr(dc, rd + 1);
2712     TCGv_i64 t64 = tcg_temp_new_i64();
2713 
2714     tcg_gen_concat_tl_i64(t64, lo, hi);
2715 
2716     switch (da.type) {
2717     case GET_ASI_EXCP:
2718         break;
2719     case GET_ASI_DIRECT:
2720         gen_address_mask(dc, addr);
2721         tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2722         break;
2723     case GET_ASI_BFILL:
2724         /* Store 32 bytes of T64 to ADDR.  */
2725         /* ??? The original qemu code suggests 8-byte alignment, dropping
2726            the low bits, but the only place I can see this used is in the
2727            Linux kernel with 32 byte alignment, which would make more sense
2728            as a cacheline-style operation.  */
2729         {
2730             TCGv d_addr = tcg_temp_new();
2731             TCGv eight = tcg_constant_tl(8);
2732             int i;
2733 
2734             tcg_gen_andi_tl(d_addr, addr, -8);
2735             for (i = 0; i < 32; i += 8) {
2736                 tcg_gen_qemu_st_i64(t64, d_addr, da.mem_idx, da.memop);
2737                 tcg_gen_add_tl(d_addr, d_addr, eight);
2738             }
2739         }
2740         break;
2741     default:
2742         {
2743             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2744             TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2745 
2746             save_state(dc);
2747             gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2748         }
2749         break;
2750     }
2751 }
2752 #endif
2753 
2754 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2755 {
2756     unsigned int rs1 = GET_FIELD(insn, 13, 17);
2757     return gen_load_gpr(dc, rs1);
2758 }
2759 
2760 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2761 {
2762     if (IS_IMM) { /* immediate */
2763         target_long simm = GET_FIELDs(insn, 19, 31);
2764         TCGv t = tcg_temp_new();
2765         tcg_gen_movi_tl(t, simm);
2766         return t;
2767     } else {      /* register */
2768         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2769         return gen_load_gpr(dc, rs2);
2770     }
2771 }
2772 
2773 #ifdef TARGET_SPARC64
2774 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2775 {
2776     TCGv_i32 c32, zero, dst, s1, s2;
2777 
2778     /* We have two choices here: extend the 32 bit data and use movcond_i64,
2779        or fold the comparison down to 32 bits and use movcond_i32.  Choose
2780        the later.  */
2781     c32 = tcg_temp_new_i32();
2782     if (cmp->is_bool) {
2783         tcg_gen_extrl_i64_i32(c32, cmp->c1);
2784     } else {
2785         TCGv_i64 c64 = tcg_temp_new_i64();
2786         tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2787         tcg_gen_extrl_i64_i32(c32, c64);
2788     }
2789 
2790     s1 = gen_load_fpr_F(dc, rs);
2791     s2 = gen_load_fpr_F(dc, rd);
2792     dst = gen_dest_fpr_F(dc);
2793     zero = tcg_constant_i32(0);
2794 
2795     tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2796 
2797     gen_store_fpr_F(dc, rd, dst);
2798 }
2799 
2800 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2801 {
2802     TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2803     tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2804                         gen_load_fpr_D(dc, rs),
2805                         gen_load_fpr_D(dc, rd));
2806     gen_store_fpr_D(dc, rd, dst);
2807 }
2808 
2809 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2810 {
2811     int qd = QFPREG(rd);
2812     int qs = QFPREG(rs);
2813 
2814     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2815                         cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2816     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2817                         cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2818 
2819     gen_update_fprs_dirty(dc, qd);
2820 }
2821 
2822 #ifndef CONFIG_USER_ONLY
2823 static void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_env cpu_env)
2824 {
2825     TCGv_i32 r_tl = tcg_temp_new_i32();
2826 
2827     /* load env->tl into r_tl */
2828     tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2829 
2830     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2831     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2832 
2833     /* calculate offset to current trap state from env->ts, reuse r_tl */
2834     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2835     tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2836 
2837     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2838     {
2839         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2840         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2841         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2842     }
2843 }
2844 #endif
2845 
2846 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2847                      int width, bool cc, bool left)
2848 {
2849     TCGv lo1, lo2;
2850     uint64_t amask, tabl, tabr;
2851     int shift, imask, omask;
2852 
2853     if (cc) {
2854         tcg_gen_mov_tl(cpu_cc_src, s1);
2855         tcg_gen_mov_tl(cpu_cc_src2, s2);
2856         tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2857         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2858         dc->cc_op = CC_OP_SUB;
2859     }
2860 
2861     /* Theory of operation: there are two tables, left and right (not to
2862        be confused with the left and right versions of the opcode).  These
2863        are indexed by the low 3 bits of the inputs.  To make things "easy",
2864        these tables are loaded into two constants, TABL and TABR below.
2865        The operation index = (input & imask) << shift calculates the index
2866        into the constant, while val = (table >> index) & omask calculates
2867        the value we're looking for.  */
2868     switch (width) {
2869     case 8:
2870         imask = 0x7;
2871         shift = 3;
2872         omask = 0xff;
2873         if (left) {
2874             tabl = 0x80c0e0f0f8fcfeffULL;
2875             tabr = 0xff7f3f1f0f070301ULL;
2876         } else {
2877             tabl = 0x0103070f1f3f7fffULL;
2878             tabr = 0xfffefcf8f0e0c080ULL;
2879         }
2880         break;
2881     case 16:
2882         imask = 0x6;
2883         shift = 1;
2884         omask = 0xf;
2885         if (left) {
2886             tabl = 0x8cef;
2887             tabr = 0xf731;
2888         } else {
2889             tabl = 0x137f;
2890             tabr = 0xfec8;
2891         }
2892         break;
2893     case 32:
2894         imask = 0x4;
2895         shift = 0;
2896         omask = 0x3;
2897         if (left) {
2898             tabl = (2 << 2) | 3;
2899             tabr = (3 << 2) | 1;
2900         } else {
2901             tabl = (1 << 2) | 3;
2902             tabr = (3 << 2) | 2;
2903         }
2904         break;
2905     default:
2906         abort();
2907     }
2908 
2909     lo1 = tcg_temp_new();
2910     lo2 = tcg_temp_new();
2911     tcg_gen_andi_tl(lo1, s1, imask);
2912     tcg_gen_andi_tl(lo2, s2, imask);
2913     tcg_gen_shli_tl(lo1, lo1, shift);
2914     tcg_gen_shli_tl(lo2, lo2, shift);
2915 
2916     tcg_gen_shr_tl(lo1, tcg_constant_tl(tabl), lo1);
2917     tcg_gen_shr_tl(lo2, tcg_constant_tl(tabr), lo2);
2918     tcg_gen_andi_tl(lo1, lo1, omask);
2919     tcg_gen_andi_tl(lo2, lo2, omask);
2920 
2921     amask = -8;
2922     if (AM_CHECK(dc)) {
2923         amask &= 0xffffffffULL;
2924     }
2925     tcg_gen_andi_tl(s1, s1, amask);
2926     tcg_gen_andi_tl(s2, s2, amask);
2927 
2928     /* Compute dst = (s1 == s2 ? lo1 : lo1 & lo2). */
2929     tcg_gen_and_tl(lo2, lo2, lo1);
2930     tcg_gen_movcond_tl(TCG_COND_EQ, dst, s1, s2, lo1, lo2);
2931 }
2932 
2933 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2934 {
2935     TCGv tmp = tcg_temp_new();
2936 
2937     tcg_gen_add_tl(tmp, s1, s2);
2938     tcg_gen_andi_tl(dst, tmp, -8);
2939     if (left) {
2940         tcg_gen_neg_tl(tmp, tmp);
2941     }
2942     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2943 }
2944 
2945 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2946 {
2947     TCGv t1, t2, shift;
2948 
2949     t1 = tcg_temp_new();
2950     t2 = tcg_temp_new();
2951     shift = tcg_temp_new();
2952 
2953     tcg_gen_andi_tl(shift, gsr, 7);
2954     tcg_gen_shli_tl(shift, shift, 3);
2955     tcg_gen_shl_tl(t1, s1, shift);
2956 
2957     /* A shift of 64 does not produce 0 in TCG.  Divide this into a
2958        shift of (up to 63) followed by a constant shift of 1.  */
2959     tcg_gen_xori_tl(shift, shift, 63);
2960     tcg_gen_shr_tl(t2, s2, shift);
2961     tcg_gen_shri_tl(t2, t2, 1);
2962 
2963     tcg_gen_or_tl(dst, t1, t2);
2964 }
2965 #endif
2966 
2967 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
2968     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2969         goto illegal_insn;
2970 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
2971     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2972         goto nfpu_insn;
2973 
2974 /* before an instruction, dc->pc must be static */
2975 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
2976 {
2977     unsigned int opc, rs1, rs2, rd;
2978     TCGv cpu_src1, cpu_src2;
2979     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2980     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2981     target_long simm;
2982 
2983     opc = GET_FIELD(insn, 0, 1);
2984     rd = GET_FIELD(insn, 2, 6);
2985 
2986     switch (opc) {
2987     case 0:                     /* branches/sethi */
2988         {
2989             unsigned int xop = GET_FIELD(insn, 7, 9);
2990             int32_t target;
2991             switch (xop) {
2992 #ifdef TARGET_SPARC64
2993             case 0x1:           /* V9 BPcc */
2994                 {
2995                     int cc;
2996 
2997                     target = GET_FIELD_SP(insn, 0, 18);
2998                     target = sign_extend(target, 19);
2999                     target <<= 2;
3000                     cc = GET_FIELD_SP(insn, 20, 21);
3001                     if (cc == 0)
3002                         do_branch(dc, target, insn, 0);
3003                     else if (cc == 2)
3004                         do_branch(dc, target, insn, 1);
3005                     else
3006                         goto illegal_insn;
3007                     goto jmp_insn;
3008                 }
3009             case 0x3:           /* V9 BPr */
3010                 {
3011                     target = GET_FIELD_SP(insn, 0, 13) |
3012                         (GET_FIELD_SP(insn, 20, 21) << 14);
3013                     target = sign_extend(target, 16);
3014                     target <<= 2;
3015                     cpu_src1 = get_src1(dc, insn);
3016                     do_branch_reg(dc, target, insn, cpu_src1);
3017                     goto jmp_insn;
3018                 }
3019             case 0x5:           /* V9 FBPcc */
3020                 {
3021                     int cc = GET_FIELD_SP(insn, 20, 21);
3022                     if (gen_trap_ifnofpu(dc)) {
3023                         goto jmp_insn;
3024                     }
3025                     target = GET_FIELD_SP(insn, 0, 18);
3026                     target = sign_extend(target, 19);
3027                     target <<= 2;
3028                     do_fbranch(dc, target, insn, cc);
3029                     goto jmp_insn;
3030                 }
3031 #else
3032             case 0x7:           /* CBN+x */
3033                 {
3034                     goto ncp_insn;
3035                 }
3036 #endif
3037             case 0x2:           /* BN+x */
3038                 {
3039                     target = GET_FIELD(insn, 10, 31);
3040                     target = sign_extend(target, 22);
3041                     target <<= 2;
3042                     do_branch(dc, target, insn, 0);
3043                     goto jmp_insn;
3044                 }
3045             case 0x6:           /* FBN+x */
3046                 {
3047                     if (gen_trap_ifnofpu(dc)) {
3048                         goto jmp_insn;
3049                     }
3050                     target = GET_FIELD(insn, 10, 31);
3051                     target = sign_extend(target, 22);
3052                     target <<= 2;
3053                     do_fbranch(dc, target, insn, 0);
3054                     goto jmp_insn;
3055                 }
3056             case 0x4:           /* SETHI */
3057                 /* Special-case %g0 because that's the canonical nop.  */
3058                 if (rd) {
3059                     uint32_t value = GET_FIELD(insn, 10, 31);
3060                     TCGv t = gen_dest_gpr(dc, rd);
3061                     tcg_gen_movi_tl(t, value << 10);
3062                     gen_store_gpr(dc, rd, t);
3063                 }
3064                 break;
3065             case 0x0:           /* UNIMPL */
3066             default:
3067                 goto illegal_insn;
3068             }
3069             break;
3070         }
3071         break;
3072     case 1:                     /*CALL*/
3073         {
3074             target_long target = GET_FIELDs(insn, 2, 31) << 2;
3075             TCGv o7 = gen_dest_gpr(dc, 15);
3076 
3077             tcg_gen_movi_tl(o7, dc->pc);
3078             gen_store_gpr(dc, 15, o7);
3079             target += dc->pc;
3080             gen_mov_pc_npc(dc);
3081 #ifdef TARGET_SPARC64
3082             if (unlikely(AM_CHECK(dc))) {
3083                 target &= 0xffffffffULL;
3084             }
3085 #endif
3086             dc->npc = target;
3087         }
3088         goto jmp_insn;
3089     case 2:                     /* FPU & Logical Operations */
3090         {
3091             unsigned int xop = GET_FIELD(insn, 7, 12);
3092             TCGv cpu_dst = tcg_temp_new();
3093             TCGv cpu_tmp0;
3094 
3095             if (xop == 0x3a) {  /* generate trap */
3096                 int cond = GET_FIELD(insn, 3, 6);
3097                 TCGv_i32 trap;
3098                 TCGLabel *l1 = NULL;
3099                 int mask;
3100 
3101                 if (cond == 0) {
3102                     /* Trap never.  */
3103                     break;
3104                 }
3105 
3106                 save_state(dc);
3107 
3108                 if (cond != 8) {
3109                     /* Conditional trap.  */
3110                     DisasCompare cmp;
3111 #ifdef TARGET_SPARC64
3112                     /* V9 icc/xcc */
3113                     int cc = GET_FIELD_SP(insn, 11, 12);
3114                     if (cc == 0) {
3115                         gen_compare(&cmp, 0, cond, dc);
3116                     } else if (cc == 2) {
3117                         gen_compare(&cmp, 1, cond, dc);
3118                     } else {
3119                         goto illegal_insn;
3120                     }
3121 #else
3122                     gen_compare(&cmp, 0, cond, dc);
3123 #endif
3124                     l1 = gen_new_label();
3125                     tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
3126                                       cmp.c1, cmp.c2, l1);
3127                 }
3128 
3129                 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
3130                         ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
3131 
3132                 /* Don't use the normal temporaries, as they may well have
3133                    gone out of scope with the branch above.  While we're
3134                    doing that we might as well pre-truncate to 32-bit.  */
3135                 trap = tcg_temp_new_i32();
3136 
3137                 rs1 = GET_FIELD_SP(insn, 14, 18);
3138                 if (IS_IMM) {
3139                     rs2 = GET_FIELD_SP(insn, 0, 7);
3140                     if (rs1 == 0) {
3141                         tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
3142                         /* Signal that the trap value is fully constant.  */
3143                         mask = 0;
3144                     } else {
3145                         TCGv t1 = gen_load_gpr(dc, rs1);
3146                         tcg_gen_trunc_tl_i32(trap, t1);
3147                         tcg_gen_addi_i32(trap, trap, rs2);
3148                     }
3149                 } else {
3150                     TCGv t1, t2;
3151                     rs2 = GET_FIELD_SP(insn, 0, 4);
3152                     t1 = gen_load_gpr(dc, rs1);
3153                     t2 = gen_load_gpr(dc, rs2);
3154                     tcg_gen_add_tl(t1, t1, t2);
3155                     tcg_gen_trunc_tl_i32(trap, t1);
3156                 }
3157                 if (mask != 0) {
3158                     tcg_gen_andi_i32(trap, trap, mask);
3159                     tcg_gen_addi_i32(trap, trap, TT_TRAP);
3160                 }
3161 
3162                 gen_helper_raise_exception(cpu_env, trap);
3163 
3164                 if (cond == 8) {
3165                     /* An unconditional trap ends the TB.  */
3166                     dc->base.is_jmp = DISAS_NORETURN;
3167                     goto jmp_insn;
3168                 } else {
3169                     /* A conditional trap falls through to the next insn.  */
3170                     gen_set_label(l1);
3171                     break;
3172                 }
3173             } else if (xop == 0x28) {
3174                 rs1 = GET_FIELD(insn, 13, 17);
3175                 switch(rs1) {
3176                 case 0: /* rdy */
3177 #ifndef TARGET_SPARC64
3178                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
3179                                        manual, rdy on the microSPARC
3180                                        II */
3181                 case 0x0f:          /* stbar in the SPARCv8 manual,
3182                                        rdy on the microSPARC II */
3183                 case 0x10 ... 0x1f: /* implementation-dependent in the
3184                                        SPARCv8 manual, rdy on the
3185                                        microSPARC II */
3186                     /* Read Asr17 */
3187                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
3188                         TCGv t = gen_dest_gpr(dc, rd);
3189                         /* Read Asr17 for a Leon3 monoprocessor */
3190                         tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
3191                         gen_store_gpr(dc, rd, t);
3192                         break;
3193                     }
3194 #endif
3195                     gen_store_gpr(dc, rd, cpu_y);
3196                     break;
3197 #ifdef TARGET_SPARC64
3198                 case 0x2: /* V9 rdccr */
3199                     update_psr(dc);
3200                     gen_helper_rdccr(cpu_dst, cpu_env);
3201                     gen_store_gpr(dc, rd, cpu_dst);
3202                     break;
3203                 case 0x3: /* V9 rdasi */
3204                     tcg_gen_movi_tl(cpu_dst, dc->asi);
3205                     gen_store_gpr(dc, rd, cpu_dst);
3206                     break;
3207                 case 0x4: /* V9 rdtick */
3208                     {
3209                         TCGv_ptr r_tickptr;
3210                         TCGv_i32 r_const;
3211 
3212                         r_tickptr = tcg_temp_new_ptr();
3213                         r_const = tcg_constant_i32(dc->mem_idx);
3214                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3215                                        offsetof(CPUSPARCState, tick));
3216                         if (translator_io_start(&dc->base)) {
3217                             dc->base.is_jmp = DISAS_EXIT;
3218                         }
3219                         gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3220                                                   r_const);
3221                         gen_store_gpr(dc, rd, cpu_dst);
3222                     }
3223                     break;
3224                 case 0x5: /* V9 rdpc */
3225                     {
3226                         TCGv t = gen_dest_gpr(dc, rd);
3227                         if (unlikely(AM_CHECK(dc))) {
3228                             tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
3229                         } else {
3230                             tcg_gen_movi_tl(t, dc->pc);
3231                         }
3232                         gen_store_gpr(dc, rd, t);
3233                     }
3234                     break;
3235                 case 0x6: /* V9 rdfprs */
3236                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
3237                     gen_store_gpr(dc, rd, cpu_dst);
3238                     break;
3239                 case 0xf: /* V9 membar */
3240                     break; /* no effect */
3241                 case 0x13: /* Graphics Status */
3242                     if (gen_trap_ifnofpu(dc)) {
3243                         goto jmp_insn;
3244                     }
3245                     gen_store_gpr(dc, rd, cpu_gsr);
3246                     break;
3247                 case 0x16: /* Softint */
3248                     tcg_gen_ld32s_tl(cpu_dst, cpu_env,
3249                                      offsetof(CPUSPARCState, softint));
3250                     gen_store_gpr(dc, rd, cpu_dst);
3251                     break;
3252                 case 0x17: /* Tick compare */
3253                     gen_store_gpr(dc, rd, cpu_tick_cmpr);
3254                     break;
3255                 case 0x18: /* System tick */
3256                     {
3257                         TCGv_ptr r_tickptr;
3258                         TCGv_i32 r_const;
3259 
3260                         r_tickptr = tcg_temp_new_ptr();
3261                         r_const = tcg_constant_i32(dc->mem_idx);
3262                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3263                                        offsetof(CPUSPARCState, stick));
3264                         if (translator_io_start(&dc->base)) {
3265                             dc->base.is_jmp = DISAS_EXIT;
3266                         }
3267                         gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3268                                                   r_const);
3269                         gen_store_gpr(dc, rd, cpu_dst);
3270                     }
3271                     break;
3272                 case 0x19: /* System tick compare */
3273                     gen_store_gpr(dc, rd, cpu_stick_cmpr);
3274                     break;
3275                 case 0x1a: /* UltraSPARC-T1 Strand status */
3276                     /* XXX HYPV check maybe not enough, UA2005 & UA2007 describe
3277                      * this ASR as impl. dep
3278                      */
3279                     CHECK_IU_FEATURE(dc, HYPV);
3280                     {
3281                         TCGv t = gen_dest_gpr(dc, rd);
3282                         tcg_gen_movi_tl(t, 1UL);
3283                         gen_store_gpr(dc, rd, t);
3284                     }
3285                     break;
3286                 case 0x10: /* Performance Control */
3287                 case 0x11: /* Performance Instrumentation Counter */
3288                 case 0x12: /* Dispatch Control */
3289                 case 0x14: /* Softint set, WO */
3290                 case 0x15: /* Softint clear, WO */
3291 #endif
3292                 default:
3293                     goto illegal_insn;
3294                 }
3295 #if !defined(CONFIG_USER_ONLY)
3296             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
3297 #ifndef TARGET_SPARC64
3298                 if (!supervisor(dc)) {
3299                     goto priv_insn;
3300                 }
3301                 update_psr(dc);
3302                 gen_helper_rdpsr(cpu_dst, cpu_env);
3303 #else
3304                 CHECK_IU_FEATURE(dc, HYPV);
3305                 if (!hypervisor(dc))
3306                     goto priv_insn;
3307                 rs1 = GET_FIELD(insn, 13, 17);
3308                 switch (rs1) {
3309                 case 0: // hpstate
3310                     tcg_gen_ld_i64(cpu_dst, cpu_env,
3311                                    offsetof(CPUSPARCState, hpstate));
3312                     break;
3313                 case 1: // htstate
3314                     // gen_op_rdhtstate();
3315                     break;
3316                 case 3: // hintp
3317                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
3318                     break;
3319                 case 5: // htba
3320                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
3321                     break;
3322                 case 6: // hver
3323                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
3324                     break;
3325                 case 31: // hstick_cmpr
3326                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
3327                     break;
3328                 default:
3329                     goto illegal_insn;
3330                 }
3331 #endif
3332                 gen_store_gpr(dc, rd, cpu_dst);
3333                 break;
3334             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
3335                 if (!supervisor(dc)) {
3336                     goto priv_insn;
3337                 }
3338                 cpu_tmp0 = tcg_temp_new();
3339 #ifdef TARGET_SPARC64
3340                 rs1 = GET_FIELD(insn, 13, 17);
3341                 switch (rs1) {
3342                 case 0: // tpc
3343                     {
3344                         TCGv_ptr r_tsptr;
3345 
3346                         r_tsptr = tcg_temp_new_ptr();
3347                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3348                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3349                                       offsetof(trap_state, tpc));
3350                     }
3351                     break;
3352                 case 1: // tnpc
3353                     {
3354                         TCGv_ptr r_tsptr;
3355 
3356                         r_tsptr = tcg_temp_new_ptr();
3357                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3358                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3359                                       offsetof(trap_state, tnpc));
3360                     }
3361                     break;
3362                 case 2: // tstate
3363                     {
3364                         TCGv_ptr r_tsptr;
3365 
3366                         r_tsptr = tcg_temp_new_ptr();
3367                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3368                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3369                                       offsetof(trap_state, tstate));
3370                     }
3371                     break;
3372                 case 3: // tt
3373                     {
3374                         TCGv_ptr r_tsptr = tcg_temp_new_ptr();
3375 
3376                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3377                         tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
3378                                          offsetof(trap_state, tt));
3379                     }
3380                     break;
3381                 case 4: // tick
3382                     {
3383                         TCGv_ptr r_tickptr;
3384                         TCGv_i32 r_const;
3385 
3386                         r_tickptr = tcg_temp_new_ptr();
3387                         r_const = tcg_constant_i32(dc->mem_idx);
3388                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3389                                        offsetof(CPUSPARCState, tick));
3390                         if (translator_io_start(&dc->base)) {
3391                             dc->base.is_jmp = DISAS_EXIT;
3392                         }
3393                         gen_helper_tick_get_count(cpu_tmp0, cpu_env,
3394                                                   r_tickptr, r_const);
3395                     }
3396                     break;
3397                 case 5: // tba
3398                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
3399                     break;
3400                 case 6: // pstate
3401                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3402                                      offsetof(CPUSPARCState, pstate));
3403                     break;
3404                 case 7: // tl
3405                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3406                                      offsetof(CPUSPARCState, tl));
3407                     break;
3408                 case 8: // pil
3409                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3410                                      offsetof(CPUSPARCState, psrpil));
3411                     break;
3412                 case 9: // cwp
3413                     gen_helper_rdcwp(cpu_tmp0, cpu_env);
3414                     break;
3415                 case 10: // cansave
3416                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3417                                      offsetof(CPUSPARCState, cansave));
3418                     break;
3419                 case 11: // canrestore
3420                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3421                                      offsetof(CPUSPARCState, canrestore));
3422                     break;
3423                 case 12: // cleanwin
3424                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3425                                      offsetof(CPUSPARCState, cleanwin));
3426                     break;
3427                 case 13: // otherwin
3428                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3429                                      offsetof(CPUSPARCState, otherwin));
3430                     break;
3431                 case 14: // wstate
3432                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3433                                      offsetof(CPUSPARCState, wstate));
3434                     break;
3435                 case 16: // UA2005 gl
3436                     CHECK_IU_FEATURE(dc, GL);
3437                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3438                                      offsetof(CPUSPARCState, gl));
3439                     break;
3440                 case 26: // UA2005 strand status
3441                     CHECK_IU_FEATURE(dc, HYPV);
3442                     if (!hypervisor(dc))
3443                         goto priv_insn;
3444                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
3445                     break;
3446                 case 31: // ver
3447                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
3448                     break;
3449                 case 15: // fq
3450                 default:
3451                     goto illegal_insn;
3452                 }
3453 #else
3454                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
3455 #endif
3456                 gen_store_gpr(dc, rd, cpu_tmp0);
3457                 break;
3458 #endif
3459 #if defined(TARGET_SPARC64) || !defined(CONFIG_USER_ONLY)
3460             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
3461 #ifdef TARGET_SPARC64
3462                 gen_helper_flushw(cpu_env);
3463 #else
3464                 if (!supervisor(dc))
3465                     goto priv_insn;
3466                 gen_store_gpr(dc, rd, cpu_tbr);
3467 #endif
3468                 break;
3469 #endif
3470             } else if (xop == 0x34) {   /* FPU Operations */
3471                 if (gen_trap_ifnofpu(dc)) {
3472                     goto jmp_insn;
3473                 }
3474                 gen_op_clear_ieee_excp_and_FTT();
3475                 rs1 = GET_FIELD(insn, 13, 17);
3476                 rs2 = GET_FIELD(insn, 27, 31);
3477                 xop = GET_FIELD(insn, 18, 26);
3478 
3479                 switch (xop) {
3480                 case 0x1: /* fmovs */
3481                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3482                     gen_store_fpr_F(dc, rd, cpu_src1_32);
3483                     break;
3484                 case 0x5: /* fnegs */
3485                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
3486                     break;
3487                 case 0x9: /* fabss */
3488                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3489                     break;
3490                 case 0x29: /* fsqrts */
3491                     CHECK_FPU_FEATURE(dc, FSQRT);
3492                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3493                     break;
3494                 case 0x2a: /* fsqrtd */
3495                     CHECK_FPU_FEATURE(dc, FSQRT);
3496                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3497                     break;
3498                 case 0x2b: /* fsqrtq */
3499                     CHECK_FPU_FEATURE(dc, FLOAT128);
3500                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3501                     break;
3502                 case 0x41: /* fadds */
3503                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3504                     break;
3505                 case 0x42: /* faddd */
3506                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3507                     break;
3508                 case 0x43: /* faddq */
3509                     CHECK_FPU_FEATURE(dc, FLOAT128);
3510                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3511                     break;
3512                 case 0x45: /* fsubs */
3513                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3514                     break;
3515                 case 0x46: /* fsubd */
3516                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3517                     break;
3518                 case 0x47: /* fsubq */
3519                     CHECK_FPU_FEATURE(dc, FLOAT128);
3520                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3521                     break;
3522                 case 0x49: /* fmuls */
3523                     CHECK_FPU_FEATURE(dc, FMUL);
3524                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3525                     break;
3526                 case 0x4a: /* fmuld */
3527                     CHECK_FPU_FEATURE(dc, FMUL);
3528                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3529                     break;
3530                 case 0x4b: /* fmulq */
3531                     CHECK_FPU_FEATURE(dc, FLOAT128);
3532                     CHECK_FPU_FEATURE(dc, FMUL);
3533                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3534                     break;
3535                 case 0x4d: /* fdivs */
3536                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3537                     break;
3538                 case 0x4e: /* fdivd */
3539                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3540                     break;
3541                 case 0x4f: /* fdivq */
3542                     CHECK_FPU_FEATURE(dc, FLOAT128);
3543                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3544                     break;
3545                 case 0x69: /* fsmuld */
3546                     CHECK_FPU_FEATURE(dc, FSMULD);
3547                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3548                     break;
3549                 case 0x6e: /* fdmulq */
3550                     CHECK_FPU_FEATURE(dc, FLOAT128);
3551                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3552                     break;
3553                 case 0xc4: /* fitos */
3554                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3555                     break;
3556                 case 0xc6: /* fdtos */
3557                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3558                     break;
3559                 case 0xc7: /* fqtos */
3560                     CHECK_FPU_FEATURE(dc, FLOAT128);
3561                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3562                     break;
3563                 case 0xc8: /* fitod */
3564                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3565                     break;
3566                 case 0xc9: /* fstod */
3567                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3568                     break;
3569                 case 0xcb: /* fqtod */
3570                     CHECK_FPU_FEATURE(dc, FLOAT128);
3571                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3572                     break;
3573                 case 0xcc: /* fitoq */
3574                     CHECK_FPU_FEATURE(dc, FLOAT128);
3575                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3576                     break;
3577                 case 0xcd: /* fstoq */
3578                     CHECK_FPU_FEATURE(dc, FLOAT128);
3579                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3580                     break;
3581                 case 0xce: /* fdtoq */
3582                     CHECK_FPU_FEATURE(dc, FLOAT128);
3583                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3584                     break;
3585                 case 0xd1: /* fstoi */
3586                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3587                     break;
3588                 case 0xd2: /* fdtoi */
3589                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3590                     break;
3591                 case 0xd3: /* fqtoi */
3592                     CHECK_FPU_FEATURE(dc, FLOAT128);
3593                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3594                     break;
3595 #ifdef TARGET_SPARC64
3596                 case 0x2: /* V9 fmovd */
3597                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3598                     gen_store_fpr_D(dc, rd, cpu_src1_64);
3599                     break;
3600                 case 0x3: /* V9 fmovq */
3601                     CHECK_FPU_FEATURE(dc, FLOAT128);
3602                     gen_move_Q(dc, rd, rs2);
3603                     break;
3604                 case 0x6: /* V9 fnegd */
3605                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3606                     break;
3607                 case 0x7: /* V9 fnegq */
3608                     CHECK_FPU_FEATURE(dc, FLOAT128);
3609                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3610                     break;
3611                 case 0xa: /* V9 fabsd */
3612                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3613                     break;
3614                 case 0xb: /* V9 fabsq */
3615                     CHECK_FPU_FEATURE(dc, FLOAT128);
3616                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3617                     break;
3618                 case 0x81: /* V9 fstox */
3619                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3620                     break;
3621                 case 0x82: /* V9 fdtox */
3622                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3623                     break;
3624                 case 0x83: /* V9 fqtox */
3625                     CHECK_FPU_FEATURE(dc, FLOAT128);
3626                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3627                     break;
3628                 case 0x84: /* V9 fxtos */
3629                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3630                     break;
3631                 case 0x88: /* V9 fxtod */
3632                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3633                     break;
3634                 case 0x8c: /* V9 fxtoq */
3635                     CHECK_FPU_FEATURE(dc, FLOAT128);
3636                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3637                     break;
3638 #endif
3639                 default:
3640                     goto illegal_insn;
3641                 }
3642             } else if (xop == 0x35) {   /* FPU Operations */
3643 #ifdef TARGET_SPARC64
3644                 int cond;
3645 #endif
3646                 if (gen_trap_ifnofpu(dc)) {
3647                     goto jmp_insn;
3648                 }
3649                 gen_op_clear_ieee_excp_and_FTT();
3650                 rs1 = GET_FIELD(insn, 13, 17);
3651                 rs2 = GET_FIELD(insn, 27, 31);
3652                 xop = GET_FIELD(insn, 18, 26);
3653 
3654 #ifdef TARGET_SPARC64
3655 #define FMOVR(sz)                                                  \
3656                 do {                                               \
3657                     DisasCompare cmp;                              \
3658                     cond = GET_FIELD_SP(insn, 10, 12);             \
3659                     cpu_src1 = get_src1(dc, insn);                 \
3660                     gen_compare_reg(&cmp, cond, cpu_src1);         \
3661                     gen_fmov##sz(dc, &cmp, rd, rs2);               \
3662                 } while (0)
3663 
3664                 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3665                     FMOVR(s);
3666                     break;
3667                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3668                     FMOVR(d);
3669                     break;
3670                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3671                     CHECK_FPU_FEATURE(dc, FLOAT128);
3672                     FMOVR(q);
3673                     break;
3674                 }
3675 #undef FMOVR
3676 #endif
3677                 switch (xop) {
3678 #ifdef TARGET_SPARC64
3679 #define FMOVCC(fcc, sz)                                                 \
3680                     do {                                                \
3681                         DisasCompare cmp;                               \
3682                         cond = GET_FIELD_SP(insn, 14, 17);              \
3683                         gen_fcompare(&cmp, fcc, cond);                  \
3684                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3685                     } while (0)
3686 
3687                     case 0x001: /* V9 fmovscc %fcc0 */
3688                         FMOVCC(0, s);
3689                         break;
3690                     case 0x002: /* V9 fmovdcc %fcc0 */
3691                         FMOVCC(0, d);
3692                         break;
3693                     case 0x003: /* V9 fmovqcc %fcc0 */
3694                         CHECK_FPU_FEATURE(dc, FLOAT128);
3695                         FMOVCC(0, q);
3696                         break;
3697                     case 0x041: /* V9 fmovscc %fcc1 */
3698                         FMOVCC(1, s);
3699                         break;
3700                     case 0x042: /* V9 fmovdcc %fcc1 */
3701                         FMOVCC(1, d);
3702                         break;
3703                     case 0x043: /* V9 fmovqcc %fcc1 */
3704                         CHECK_FPU_FEATURE(dc, FLOAT128);
3705                         FMOVCC(1, q);
3706                         break;
3707                     case 0x081: /* V9 fmovscc %fcc2 */
3708                         FMOVCC(2, s);
3709                         break;
3710                     case 0x082: /* V9 fmovdcc %fcc2 */
3711                         FMOVCC(2, d);
3712                         break;
3713                     case 0x083: /* V9 fmovqcc %fcc2 */
3714                         CHECK_FPU_FEATURE(dc, FLOAT128);
3715                         FMOVCC(2, q);
3716                         break;
3717                     case 0x0c1: /* V9 fmovscc %fcc3 */
3718                         FMOVCC(3, s);
3719                         break;
3720                     case 0x0c2: /* V9 fmovdcc %fcc3 */
3721                         FMOVCC(3, d);
3722                         break;
3723                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3724                         CHECK_FPU_FEATURE(dc, FLOAT128);
3725                         FMOVCC(3, q);
3726                         break;
3727 #undef FMOVCC
3728 #define FMOVCC(xcc, sz)                                                 \
3729                     do {                                                \
3730                         DisasCompare cmp;                               \
3731                         cond = GET_FIELD_SP(insn, 14, 17);              \
3732                         gen_compare(&cmp, xcc, cond, dc);               \
3733                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3734                     } while (0)
3735 
3736                     case 0x101: /* V9 fmovscc %icc */
3737                         FMOVCC(0, s);
3738                         break;
3739                     case 0x102: /* V9 fmovdcc %icc */
3740                         FMOVCC(0, d);
3741                         break;
3742                     case 0x103: /* V9 fmovqcc %icc */
3743                         CHECK_FPU_FEATURE(dc, FLOAT128);
3744                         FMOVCC(0, q);
3745                         break;
3746                     case 0x181: /* V9 fmovscc %xcc */
3747                         FMOVCC(1, s);
3748                         break;
3749                     case 0x182: /* V9 fmovdcc %xcc */
3750                         FMOVCC(1, d);
3751                         break;
3752                     case 0x183: /* V9 fmovqcc %xcc */
3753                         CHECK_FPU_FEATURE(dc, FLOAT128);
3754                         FMOVCC(1, q);
3755                         break;
3756 #undef FMOVCC
3757 #endif
3758                     case 0x51: /* fcmps, V9 %fcc */
3759                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3760                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3761                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3762                         break;
3763                     case 0x52: /* fcmpd, V9 %fcc */
3764                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3765                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3766                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3767                         break;
3768                     case 0x53: /* fcmpq, V9 %fcc */
3769                         CHECK_FPU_FEATURE(dc, FLOAT128);
3770                         gen_op_load_fpr_QT0(QFPREG(rs1));
3771                         gen_op_load_fpr_QT1(QFPREG(rs2));
3772                         gen_op_fcmpq(rd & 3);
3773                         break;
3774                     case 0x55: /* fcmpes, V9 %fcc */
3775                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3776                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3777                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3778                         break;
3779                     case 0x56: /* fcmped, V9 %fcc */
3780                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3781                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3782                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3783                         break;
3784                     case 0x57: /* fcmpeq, V9 %fcc */
3785                         CHECK_FPU_FEATURE(dc, FLOAT128);
3786                         gen_op_load_fpr_QT0(QFPREG(rs1));
3787                         gen_op_load_fpr_QT1(QFPREG(rs2));
3788                         gen_op_fcmpeq(rd & 3);
3789                         break;
3790                     default:
3791                         goto illegal_insn;
3792                 }
3793             } else if (xop == 0x2) {
3794                 TCGv dst = gen_dest_gpr(dc, rd);
3795                 rs1 = GET_FIELD(insn, 13, 17);
3796                 if (rs1 == 0) {
3797                     /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3798                     if (IS_IMM) {       /* immediate */
3799                         simm = GET_FIELDs(insn, 19, 31);
3800                         tcg_gen_movi_tl(dst, simm);
3801                         gen_store_gpr(dc, rd, dst);
3802                     } else {            /* register */
3803                         rs2 = GET_FIELD(insn, 27, 31);
3804                         if (rs2 == 0) {
3805                             tcg_gen_movi_tl(dst, 0);
3806                             gen_store_gpr(dc, rd, dst);
3807                         } else {
3808                             cpu_src2 = gen_load_gpr(dc, rs2);
3809                             gen_store_gpr(dc, rd, cpu_src2);
3810                         }
3811                     }
3812                 } else {
3813                     cpu_src1 = get_src1(dc, insn);
3814                     if (IS_IMM) {       /* immediate */
3815                         simm = GET_FIELDs(insn, 19, 31);
3816                         tcg_gen_ori_tl(dst, cpu_src1, simm);
3817                         gen_store_gpr(dc, rd, dst);
3818                     } else {            /* register */
3819                         rs2 = GET_FIELD(insn, 27, 31);
3820                         if (rs2 == 0) {
3821                             /* mov shortcut:  or x, %g0, y -> mov x, y */
3822                             gen_store_gpr(dc, rd, cpu_src1);
3823                         } else {
3824                             cpu_src2 = gen_load_gpr(dc, rs2);
3825                             tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3826                             gen_store_gpr(dc, rd, dst);
3827                         }
3828                     }
3829                 }
3830 #ifdef TARGET_SPARC64
3831             } else if (xop == 0x25) { /* sll, V9 sllx */
3832                 cpu_src1 = get_src1(dc, insn);
3833                 if (IS_IMM) {   /* immediate */
3834                     simm = GET_FIELDs(insn, 20, 31);
3835                     if (insn & (1 << 12)) {
3836                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3837                     } else {
3838                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3839                     }
3840                 } else {                /* register */
3841                     rs2 = GET_FIELD(insn, 27, 31);
3842                     cpu_src2 = gen_load_gpr(dc, rs2);
3843                     cpu_tmp0 = tcg_temp_new();
3844                     if (insn & (1 << 12)) {
3845                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3846                     } else {
3847                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3848                     }
3849                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3850                 }
3851                 gen_store_gpr(dc, rd, cpu_dst);
3852             } else if (xop == 0x26) { /* srl, V9 srlx */
3853                 cpu_src1 = get_src1(dc, insn);
3854                 if (IS_IMM) {   /* immediate */
3855                     simm = GET_FIELDs(insn, 20, 31);
3856                     if (insn & (1 << 12)) {
3857                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3858                     } else {
3859                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3860                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3861                     }
3862                 } else {                /* register */
3863                     rs2 = GET_FIELD(insn, 27, 31);
3864                     cpu_src2 = gen_load_gpr(dc, rs2);
3865                     cpu_tmp0 = tcg_temp_new();
3866                     if (insn & (1 << 12)) {
3867                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3868                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3869                     } else {
3870                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3871                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3872                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3873                     }
3874                 }
3875                 gen_store_gpr(dc, rd, cpu_dst);
3876             } else if (xop == 0x27) { /* sra, V9 srax */
3877                 cpu_src1 = get_src1(dc, insn);
3878                 if (IS_IMM) {   /* immediate */
3879                     simm = GET_FIELDs(insn, 20, 31);
3880                     if (insn & (1 << 12)) {
3881                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3882                     } else {
3883                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3884                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3885                     }
3886                 } else {                /* register */
3887                     rs2 = GET_FIELD(insn, 27, 31);
3888                     cpu_src2 = gen_load_gpr(dc, rs2);
3889                     cpu_tmp0 = tcg_temp_new();
3890                     if (insn & (1 << 12)) {
3891                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3892                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3893                     } else {
3894                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3895                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3896                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3897                     }
3898                 }
3899                 gen_store_gpr(dc, rd, cpu_dst);
3900 #endif
3901             } else if (xop < 0x36) {
3902                 if (xop < 0x20) {
3903                     cpu_src1 = get_src1(dc, insn);
3904                     cpu_src2 = get_src2(dc, insn);
3905                     switch (xop & ~0x10) {
3906                     case 0x0: /* add */
3907                         if (xop & 0x10) {
3908                             gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3909                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3910                             dc->cc_op = CC_OP_ADD;
3911                         } else {
3912                             tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3913                         }
3914                         break;
3915                     case 0x1: /* and */
3916                         tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3917                         if (xop & 0x10) {
3918                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3919                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3920                             dc->cc_op = CC_OP_LOGIC;
3921                         }
3922                         break;
3923                     case 0x2: /* or */
3924                         tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3925                         if (xop & 0x10) {
3926                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3927                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3928                             dc->cc_op = CC_OP_LOGIC;
3929                         }
3930                         break;
3931                     case 0x3: /* xor */
3932                         tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3933                         if (xop & 0x10) {
3934                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3935                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3936                             dc->cc_op = CC_OP_LOGIC;
3937                         }
3938                         break;
3939                     case 0x4: /* sub */
3940                         if (xop & 0x10) {
3941                             gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3942                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3943                             dc->cc_op = CC_OP_SUB;
3944                         } else {
3945                             tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3946                         }
3947                         break;
3948                     case 0x5: /* andn */
3949                         tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3950                         if (xop & 0x10) {
3951                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3952                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3953                             dc->cc_op = CC_OP_LOGIC;
3954                         }
3955                         break;
3956                     case 0x6: /* orn */
3957                         tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3958                         if (xop & 0x10) {
3959                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3960                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3961                             dc->cc_op = CC_OP_LOGIC;
3962                         }
3963                         break;
3964                     case 0x7: /* xorn */
3965                         tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
3966                         if (xop & 0x10) {
3967                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3968                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3969                             dc->cc_op = CC_OP_LOGIC;
3970                         }
3971                         break;
3972                     case 0x8: /* addx, V9 addc */
3973                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3974                                         (xop & 0x10));
3975                         break;
3976 #ifdef TARGET_SPARC64
3977                     case 0x9: /* V9 mulx */
3978                         tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3979                         break;
3980 #endif
3981                     case 0xa: /* umul */
3982                         CHECK_IU_FEATURE(dc, MUL);
3983                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3984                         if (xop & 0x10) {
3985                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3986                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3987                             dc->cc_op = CC_OP_LOGIC;
3988                         }
3989                         break;
3990                     case 0xb: /* smul */
3991                         CHECK_IU_FEATURE(dc, MUL);
3992                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3993                         if (xop & 0x10) {
3994                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3995                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3996                             dc->cc_op = CC_OP_LOGIC;
3997                         }
3998                         break;
3999                     case 0xc: /* subx, V9 subc */
4000                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4001                                         (xop & 0x10));
4002                         break;
4003 #ifdef TARGET_SPARC64
4004                     case 0xd: /* V9 udivx */
4005                         gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4006                         break;
4007 #endif
4008                     case 0xe: /* udiv */
4009                         CHECK_IU_FEATURE(dc, DIV);
4010                         if (xop & 0x10) {
4011                             gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
4012                                                cpu_src2);
4013                             dc->cc_op = CC_OP_DIV;
4014                         } else {
4015                             gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
4016                                             cpu_src2);
4017                         }
4018                         break;
4019                     case 0xf: /* sdiv */
4020                         CHECK_IU_FEATURE(dc, DIV);
4021                         if (xop & 0x10) {
4022                             gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
4023                                                cpu_src2);
4024                             dc->cc_op = CC_OP_DIV;
4025                         } else {
4026                             gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
4027                                             cpu_src2);
4028                         }
4029                         break;
4030                     default:
4031                         goto illegal_insn;
4032                     }
4033                     gen_store_gpr(dc, rd, cpu_dst);
4034                 } else {
4035                     cpu_src1 = get_src1(dc, insn);
4036                     cpu_src2 = get_src2(dc, insn);
4037                     switch (xop) {
4038                     case 0x20: /* taddcc */
4039                         gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4040                         gen_store_gpr(dc, rd, cpu_dst);
4041                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
4042                         dc->cc_op = CC_OP_TADD;
4043                         break;
4044                     case 0x21: /* tsubcc */
4045                         gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4046                         gen_store_gpr(dc, rd, cpu_dst);
4047                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
4048                         dc->cc_op = CC_OP_TSUB;
4049                         break;
4050                     case 0x22: /* taddcctv */
4051                         gen_helper_taddcctv(cpu_dst, cpu_env,
4052                                             cpu_src1, cpu_src2);
4053                         gen_store_gpr(dc, rd, cpu_dst);
4054                         dc->cc_op = CC_OP_TADDTV;
4055                         break;
4056                     case 0x23: /* tsubcctv */
4057                         gen_helper_tsubcctv(cpu_dst, cpu_env,
4058                                             cpu_src1, cpu_src2);
4059                         gen_store_gpr(dc, rd, cpu_dst);
4060                         dc->cc_op = CC_OP_TSUBTV;
4061                         break;
4062                     case 0x24: /* mulscc */
4063                         update_psr(dc);
4064                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
4065                         gen_store_gpr(dc, rd, cpu_dst);
4066                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4067                         dc->cc_op = CC_OP_ADD;
4068                         break;
4069 #ifndef TARGET_SPARC64
4070                     case 0x25:  /* sll */
4071                         if (IS_IMM) { /* immediate */
4072                             simm = GET_FIELDs(insn, 20, 31);
4073                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
4074                         } else { /* register */
4075                             cpu_tmp0 = tcg_temp_new();
4076                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4077                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
4078                         }
4079                         gen_store_gpr(dc, rd, cpu_dst);
4080                         break;
4081                     case 0x26:  /* srl */
4082                         if (IS_IMM) { /* immediate */
4083                             simm = GET_FIELDs(insn, 20, 31);
4084                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
4085                         } else { /* register */
4086                             cpu_tmp0 = tcg_temp_new();
4087                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4088                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
4089                         }
4090                         gen_store_gpr(dc, rd, cpu_dst);
4091                         break;
4092                     case 0x27:  /* sra */
4093                         if (IS_IMM) { /* immediate */
4094                             simm = GET_FIELDs(insn, 20, 31);
4095                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
4096                         } else { /* register */
4097                             cpu_tmp0 = tcg_temp_new();
4098                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4099                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
4100                         }
4101                         gen_store_gpr(dc, rd, cpu_dst);
4102                         break;
4103 #endif
4104                     case 0x30:
4105                         {
4106                             cpu_tmp0 = tcg_temp_new();
4107                             switch(rd) {
4108                             case 0: /* wry */
4109                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4110                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
4111                                 break;
4112 #ifndef TARGET_SPARC64
4113                             case 0x01 ... 0x0f: /* undefined in the
4114                                                    SPARCv8 manual, nop
4115                                                    on the microSPARC
4116                                                    II */
4117                             case 0x10 ... 0x1f: /* implementation-dependent
4118                                                    in the SPARCv8
4119                                                    manual, nop on the
4120                                                    microSPARC II */
4121                                 if ((rd == 0x13) && (dc->def->features &
4122                                                      CPU_FEATURE_POWERDOWN)) {
4123                                     /* LEON3 power-down */
4124                                     save_state(dc);
4125                                     gen_helper_power_down(cpu_env);
4126                                 }
4127                                 break;
4128 #else
4129                             case 0x2: /* V9 wrccr */
4130                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4131                                 gen_helper_wrccr(cpu_env, cpu_tmp0);
4132                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4133                                 dc->cc_op = CC_OP_FLAGS;
4134                                 break;
4135                             case 0x3: /* V9 wrasi */
4136                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4137                                 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
4138                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4139                                                 offsetof(CPUSPARCState, asi));
4140                                 /*
4141                                  * End TB to notice changed ASI.
4142                                  * TODO: Could notice src1 = %g0 and IS_IMM,
4143                                  * update DisasContext and not exit the TB.
4144                                  */
4145                                 save_state(dc);
4146                                 gen_op_next_insn();
4147                                 tcg_gen_lookup_and_goto_ptr();
4148                                 dc->base.is_jmp = DISAS_NORETURN;
4149                                 break;
4150                             case 0x6: /* V9 wrfprs */
4151                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4152                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
4153                                 dc->fprs_dirty = 0;
4154                                 save_state(dc);
4155                                 gen_op_next_insn();
4156                                 tcg_gen_exit_tb(NULL, 0);
4157                                 dc->base.is_jmp = DISAS_NORETURN;
4158                                 break;
4159                             case 0xf: /* V9 sir, nop if user */
4160 #if !defined(CONFIG_USER_ONLY)
4161                                 if (supervisor(dc)) {
4162                                     ; // XXX
4163                                 }
4164 #endif
4165                                 break;
4166                             case 0x13: /* Graphics Status */
4167                                 if (gen_trap_ifnofpu(dc)) {
4168                                     goto jmp_insn;
4169                                 }
4170                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
4171                                 break;
4172                             case 0x14: /* Softint set */
4173                                 if (!supervisor(dc))
4174                                     goto illegal_insn;
4175                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4176                                 gen_helper_set_softint(cpu_env, cpu_tmp0);
4177                                 break;
4178                             case 0x15: /* Softint clear */
4179                                 if (!supervisor(dc))
4180                                     goto illegal_insn;
4181                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4182                                 gen_helper_clear_softint(cpu_env, cpu_tmp0);
4183                                 break;
4184                             case 0x16: /* Softint write */
4185                                 if (!supervisor(dc))
4186                                     goto illegal_insn;
4187                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4188                                 gen_helper_write_softint(cpu_env, cpu_tmp0);
4189                                 break;
4190                             case 0x17: /* Tick compare */
4191 #if !defined(CONFIG_USER_ONLY)
4192                                 if (!supervisor(dc))
4193                                     goto illegal_insn;
4194 #endif
4195                                 {
4196                                     TCGv_ptr r_tickptr;
4197 
4198                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
4199                                                    cpu_src2);
4200                                     r_tickptr = tcg_temp_new_ptr();
4201                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4202                                                    offsetof(CPUSPARCState, tick));
4203                                     translator_io_start(&dc->base);
4204                                     gen_helper_tick_set_limit(r_tickptr,
4205                                                               cpu_tick_cmpr);
4206                                     /* End TB to handle timer interrupt */
4207                                     dc->base.is_jmp = DISAS_EXIT;
4208                                 }
4209                                 break;
4210                             case 0x18: /* System tick */
4211 #if !defined(CONFIG_USER_ONLY)
4212                                 if (!supervisor(dc))
4213                                     goto illegal_insn;
4214 #endif
4215                                 {
4216                                     TCGv_ptr r_tickptr;
4217 
4218                                     tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
4219                                                    cpu_src2);
4220                                     r_tickptr = tcg_temp_new_ptr();
4221                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4222                                                    offsetof(CPUSPARCState, stick));
4223                                     translator_io_start(&dc->base);
4224                                     gen_helper_tick_set_count(r_tickptr,
4225                                                               cpu_tmp0);
4226                                     /* End TB to handle timer interrupt */
4227                                     dc->base.is_jmp = DISAS_EXIT;
4228                                 }
4229                                 break;
4230                             case 0x19: /* System tick compare */
4231 #if !defined(CONFIG_USER_ONLY)
4232                                 if (!supervisor(dc))
4233                                     goto illegal_insn;
4234 #endif
4235                                 {
4236                                     TCGv_ptr r_tickptr;
4237 
4238                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
4239                                                    cpu_src2);
4240                                     r_tickptr = tcg_temp_new_ptr();
4241                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4242                                                    offsetof(CPUSPARCState, stick));
4243                                     translator_io_start(&dc->base);
4244                                     gen_helper_tick_set_limit(r_tickptr,
4245                                                               cpu_stick_cmpr);
4246                                     /* End TB to handle timer interrupt */
4247                                     dc->base.is_jmp = DISAS_EXIT;
4248                                 }
4249                                 break;
4250 
4251                             case 0x10: /* Performance Control */
4252                             case 0x11: /* Performance Instrumentation
4253                                           Counter */
4254                             case 0x12: /* Dispatch Control */
4255 #endif
4256                             default:
4257                                 goto illegal_insn;
4258                             }
4259                         }
4260                         break;
4261 #if !defined(CONFIG_USER_ONLY)
4262                     case 0x31: /* wrpsr, V9 saved, restored */
4263                         {
4264                             if (!supervisor(dc))
4265                                 goto priv_insn;
4266 #ifdef TARGET_SPARC64
4267                             switch (rd) {
4268                             case 0:
4269                                 gen_helper_saved(cpu_env);
4270                                 break;
4271                             case 1:
4272                                 gen_helper_restored(cpu_env);
4273                                 break;
4274                             case 2: /* UA2005 allclean */
4275                             case 3: /* UA2005 otherw */
4276                             case 4: /* UA2005 normalw */
4277                             case 5: /* UA2005 invalw */
4278                                 // XXX
4279                             default:
4280                                 goto illegal_insn;
4281                             }
4282 #else
4283                             cpu_tmp0 = tcg_temp_new();
4284                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4285                             gen_helper_wrpsr(cpu_env, cpu_tmp0);
4286                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4287                             dc->cc_op = CC_OP_FLAGS;
4288                             save_state(dc);
4289                             gen_op_next_insn();
4290                             tcg_gen_exit_tb(NULL, 0);
4291                             dc->base.is_jmp = DISAS_NORETURN;
4292 #endif
4293                         }
4294                         break;
4295                     case 0x32: /* wrwim, V9 wrpr */
4296                         {
4297                             if (!supervisor(dc))
4298                                 goto priv_insn;
4299                             cpu_tmp0 = tcg_temp_new();
4300                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4301 #ifdef TARGET_SPARC64
4302                             switch (rd) {
4303                             case 0: // tpc
4304                                 {
4305                                     TCGv_ptr r_tsptr;
4306 
4307                                     r_tsptr = tcg_temp_new_ptr();
4308                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4309                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4310                                                   offsetof(trap_state, tpc));
4311                                 }
4312                                 break;
4313                             case 1: // tnpc
4314                                 {
4315                                     TCGv_ptr r_tsptr;
4316 
4317                                     r_tsptr = tcg_temp_new_ptr();
4318                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4319                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4320                                                   offsetof(trap_state, tnpc));
4321                                 }
4322                                 break;
4323                             case 2: // tstate
4324                                 {
4325                                     TCGv_ptr r_tsptr;
4326 
4327                                     r_tsptr = tcg_temp_new_ptr();
4328                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4329                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4330                                                   offsetof(trap_state,
4331                                                            tstate));
4332                                 }
4333                                 break;
4334                             case 3: // tt
4335                                 {
4336                                     TCGv_ptr r_tsptr;
4337 
4338                                     r_tsptr = tcg_temp_new_ptr();
4339                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4340                                     tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
4341                                                     offsetof(trap_state, tt));
4342                                 }
4343                                 break;
4344                             case 4: // tick
4345                                 {
4346                                     TCGv_ptr r_tickptr;
4347 
4348                                     r_tickptr = tcg_temp_new_ptr();
4349                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4350                                                    offsetof(CPUSPARCState, tick));
4351                                     translator_io_start(&dc->base);
4352                                     gen_helper_tick_set_count(r_tickptr,
4353                                                               cpu_tmp0);
4354                                     /* End TB to handle timer interrupt */
4355                                     dc->base.is_jmp = DISAS_EXIT;
4356                                 }
4357                                 break;
4358                             case 5: // tba
4359                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
4360                                 break;
4361                             case 6: // pstate
4362                                 save_state(dc);
4363                                 if (translator_io_start(&dc->base)) {
4364                                     dc->base.is_jmp = DISAS_EXIT;
4365                                 }
4366                                 gen_helper_wrpstate(cpu_env, cpu_tmp0);
4367                                 dc->npc = DYNAMIC_PC;
4368                                 break;
4369                             case 7: // tl
4370                                 save_state(dc);
4371                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4372                                                offsetof(CPUSPARCState, tl));
4373                                 dc->npc = DYNAMIC_PC;
4374                                 break;
4375                             case 8: // pil
4376                                 if (translator_io_start(&dc->base)) {
4377                                     dc->base.is_jmp = DISAS_EXIT;
4378                                 }
4379                                 gen_helper_wrpil(cpu_env, cpu_tmp0);
4380                                 break;
4381                             case 9: // cwp
4382                                 gen_helper_wrcwp(cpu_env, cpu_tmp0);
4383                                 break;
4384                             case 10: // cansave
4385                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4386                                                 offsetof(CPUSPARCState,
4387                                                          cansave));
4388                                 break;
4389                             case 11: // canrestore
4390                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4391                                                 offsetof(CPUSPARCState,
4392                                                          canrestore));
4393                                 break;
4394                             case 12: // cleanwin
4395                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4396                                                 offsetof(CPUSPARCState,
4397                                                          cleanwin));
4398                                 break;
4399                             case 13: // otherwin
4400                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4401                                                 offsetof(CPUSPARCState,
4402                                                          otherwin));
4403                                 break;
4404                             case 14: // wstate
4405                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4406                                                 offsetof(CPUSPARCState,
4407                                                          wstate));
4408                                 break;
4409                             case 16: // UA2005 gl
4410                                 CHECK_IU_FEATURE(dc, GL);
4411                                 gen_helper_wrgl(cpu_env, cpu_tmp0);
4412                                 break;
4413                             case 26: // UA2005 strand status
4414                                 CHECK_IU_FEATURE(dc, HYPV);
4415                                 if (!hypervisor(dc))
4416                                     goto priv_insn;
4417                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
4418                                 break;
4419                             default:
4420                                 goto illegal_insn;
4421                             }
4422 #else
4423                             tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
4424                             if (dc->def->nwindows != 32) {
4425                                 tcg_gen_andi_tl(cpu_wim, cpu_wim,
4426                                                 (1 << dc->def->nwindows) - 1);
4427                             }
4428 #endif
4429                         }
4430                         break;
4431                     case 0x33: /* wrtbr, UA2005 wrhpr */
4432                         {
4433 #ifndef TARGET_SPARC64
4434                             if (!supervisor(dc))
4435                                 goto priv_insn;
4436                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
4437 #else
4438                             CHECK_IU_FEATURE(dc, HYPV);
4439                             if (!hypervisor(dc))
4440                                 goto priv_insn;
4441                             cpu_tmp0 = tcg_temp_new();
4442                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4443                             switch (rd) {
4444                             case 0: // hpstate
4445                                 tcg_gen_st_i64(cpu_tmp0, cpu_env,
4446                                                offsetof(CPUSPARCState,
4447                                                         hpstate));
4448                                 save_state(dc);
4449                                 gen_op_next_insn();
4450                                 tcg_gen_exit_tb(NULL, 0);
4451                                 dc->base.is_jmp = DISAS_NORETURN;
4452                                 break;
4453                             case 1: // htstate
4454                                 // XXX gen_op_wrhtstate();
4455                                 break;
4456                             case 3: // hintp
4457                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
4458                                 break;
4459                             case 5: // htba
4460                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4461                                 break;
4462                             case 31: // hstick_cmpr
4463                                 {
4464                                     TCGv_ptr r_tickptr;
4465 
4466                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4467                                     r_tickptr = tcg_temp_new_ptr();
4468                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4469                                                    offsetof(CPUSPARCState, hstick));
4470                                     translator_io_start(&dc->base);
4471                                     gen_helper_tick_set_limit(r_tickptr,
4472                                                               cpu_hstick_cmpr);
4473                                     /* End TB to handle timer interrupt */
4474                                     dc->base.is_jmp = DISAS_EXIT;
4475                                 }
4476                                 break;
4477                             case 6: // hver readonly
4478                             default:
4479                                 goto illegal_insn;
4480                             }
4481 #endif
4482                         }
4483                         break;
4484 #endif
4485 #ifdef TARGET_SPARC64
4486                     case 0x2c: /* V9 movcc */
4487                         {
4488                             int cc = GET_FIELD_SP(insn, 11, 12);
4489                             int cond = GET_FIELD_SP(insn, 14, 17);
4490                             DisasCompare cmp;
4491                             TCGv dst;
4492 
4493                             if (insn & (1 << 18)) {
4494                                 if (cc == 0) {
4495                                     gen_compare(&cmp, 0, cond, dc);
4496                                 } else if (cc == 2) {
4497                                     gen_compare(&cmp, 1, cond, dc);
4498                                 } else {
4499                                     goto illegal_insn;
4500                                 }
4501                             } else {
4502                                 gen_fcompare(&cmp, cc, cond);
4503                             }
4504 
4505                             /* The get_src2 above loaded the normal 13-bit
4506                                immediate field, not the 11-bit field we have
4507                                in movcc.  But it did handle the reg case.  */
4508                             if (IS_IMM) {
4509                                 simm = GET_FIELD_SPs(insn, 0, 10);
4510                                 tcg_gen_movi_tl(cpu_src2, simm);
4511                             }
4512 
4513                             dst = gen_load_gpr(dc, rd);
4514                             tcg_gen_movcond_tl(cmp.cond, dst,
4515                                                cmp.c1, cmp.c2,
4516                                                cpu_src2, dst);
4517                             gen_store_gpr(dc, rd, dst);
4518                             break;
4519                         }
4520                     case 0x2d: /* V9 sdivx */
4521                         gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4522                         gen_store_gpr(dc, rd, cpu_dst);
4523                         break;
4524                     case 0x2e: /* V9 popc */
4525                         tcg_gen_ctpop_tl(cpu_dst, cpu_src2);
4526                         gen_store_gpr(dc, rd, cpu_dst);
4527                         break;
4528                     case 0x2f: /* V9 movr */
4529                         {
4530                             int cond = GET_FIELD_SP(insn, 10, 12);
4531                             DisasCompare cmp;
4532                             TCGv dst;
4533 
4534                             gen_compare_reg(&cmp, cond, cpu_src1);
4535 
4536                             /* The get_src2 above loaded the normal 13-bit
4537                                immediate field, not the 10-bit field we have
4538                                in movr.  But it did handle the reg case.  */
4539                             if (IS_IMM) {
4540                                 simm = GET_FIELD_SPs(insn, 0, 9);
4541                                 tcg_gen_movi_tl(cpu_src2, simm);
4542                             }
4543 
4544                             dst = gen_load_gpr(dc, rd);
4545                             tcg_gen_movcond_tl(cmp.cond, dst,
4546                                                cmp.c1, cmp.c2,
4547                                                cpu_src2, dst);
4548                             gen_store_gpr(dc, rd, dst);
4549                             break;
4550                         }
4551 #endif
4552                     default:
4553                         goto illegal_insn;
4554                     }
4555                 }
4556             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4557 #ifdef TARGET_SPARC64
4558                 int opf = GET_FIELD_SP(insn, 5, 13);
4559                 rs1 = GET_FIELD(insn, 13, 17);
4560                 rs2 = GET_FIELD(insn, 27, 31);
4561                 if (gen_trap_ifnofpu(dc)) {
4562                     goto jmp_insn;
4563                 }
4564 
4565                 switch (opf) {
4566                 case 0x000: /* VIS I edge8cc */
4567                     CHECK_FPU_FEATURE(dc, VIS1);
4568                     cpu_src1 = gen_load_gpr(dc, rs1);
4569                     cpu_src2 = gen_load_gpr(dc, rs2);
4570                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4571                     gen_store_gpr(dc, rd, cpu_dst);
4572                     break;
4573                 case 0x001: /* VIS II edge8n */
4574                     CHECK_FPU_FEATURE(dc, VIS2);
4575                     cpu_src1 = gen_load_gpr(dc, rs1);
4576                     cpu_src2 = gen_load_gpr(dc, rs2);
4577                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4578                     gen_store_gpr(dc, rd, cpu_dst);
4579                     break;
4580                 case 0x002: /* VIS I edge8lcc */
4581                     CHECK_FPU_FEATURE(dc, VIS1);
4582                     cpu_src1 = gen_load_gpr(dc, rs1);
4583                     cpu_src2 = gen_load_gpr(dc, rs2);
4584                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4585                     gen_store_gpr(dc, rd, cpu_dst);
4586                     break;
4587                 case 0x003: /* VIS II edge8ln */
4588                     CHECK_FPU_FEATURE(dc, VIS2);
4589                     cpu_src1 = gen_load_gpr(dc, rs1);
4590                     cpu_src2 = gen_load_gpr(dc, rs2);
4591                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4592                     gen_store_gpr(dc, rd, cpu_dst);
4593                     break;
4594                 case 0x004: /* VIS I edge16cc */
4595                     CHECK_FPU_FEATURE(dc, VIS1);
4596                     cpu_src1 = gen_load_gpr(dc, rs1);
4597                     cpu_src2 = gen_load_gpr(dc, rs2);
4598                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4599                     gen_store_gpr(dc, rd, cpu_dst);
4600                     break;
4601                 case 0x005: /* VIS II edge16n */
4602                     CHECK_FPU_FEATURE(dc, VIS2);
4603                     cpu_src1 = gen_load_gpr(dc, rs1);
4604                     cpu_src2 = gen_load_gpr(dc, rs2);
4605                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4606                     gen_store_gpr(dc, rd, cpu_dst);
4607                     break;
4608                 case 0x006: /* VIS I edge16lcc */
4609                     CHECK_FPU_FEATURE(dc, VIS1);
4610                     cpu_src1 = gen_load_gpr(dc, rs1);
4611                     cpu_src2 = gen_load_gpr(dc, rs2);
4612                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4613                     gen_store_gpr(dc, rd, cpu_dst);
4614                     break;
4615                 case 0x007: /* VIS II edge16ln */
4616                     CHECK_FPU_FEATURE(dc, VIS2);
4617                     cpu_src1 = gen_load_gpr(dc, rs1);
4618                     cpu_src2 = gen_load_gpr(dc, rs2);
4619                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4620                     gen_store_gpr(dc, rd, cpu_dst);
4621                     break;
4622                 case 0x008: /* VIS I edge32cc */
4623                     CHECK_FPU_FEATURE(dc, VIS1);
4624                     cpu_src1 = gen_load_gpr(dc, rs1);
4625                     cpu_src2 = gen_load_gpr(dc, rs2);
4626                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4627                     gen_store_gpr(dc, rd, cpu_dst);
4628                     break;
4629                 case 0x009: /* VIS II edge32n */
4630                     CHECK_FPU_FEATURE(dc, VIS2);
4631                     cpu_src1 = gen_load_gpr(dc, rs1);
4632                     cpu_src2 = gen_load_gpr(dc, rs2);
4633                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4634                     gen_store_gpr(dc, rd, cpu_dst);
4635                     break;
4636                 case 0x00a: /* VIS I edge32lcc */
4637                     CHECK_FPU_FEATURE(dc, VIS1);
4638                     cpu_src1 = gen_load_gpr(dc, rs1);
4639                     cpu_src2 = gen_load_gpr(dc, rs2);
4640                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4641                     gen_store_gpr(dc, rd, cpu_dst);
4642                     break;
4643                 case 0x00b: /* VIS II edge32ln */
4644                     CHECK_FPU_FEATURE(dc, VIS2);
4645                     cpu_src1 = gen_load_gpr(dc, rs1);
4646                     cpu_src2 = gen_load_gpr(dc, rs2);
4647                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4648                     gen_store_gpr(dc, rd, cpu_dst);
4649                     break;
4650                 case 0x010: /* VIS I array8 */
4651                     CHECK_FPU_FEATURE(dc, VIS1);
4652                     cpu_src1 = gen_load_gpr(dc, rs1);
4653                     cpu_src2 = gen_load_gpr(dc, rs2);
4654                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4655                     gen_store_gpr(dc, rd, cpu_dst);
4656                     break;
4657                 case 0x012: /* VIS I array16 */
4658                     CHECK_FPU_FEATURE(dc, VIS1);
4659                     cpu_src1 = gen_load_gpr(dc, rs1);
4660                     cpu_src2 = gen_load_gpr(dc, rs2);
4661                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4662                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4663                     gen_store_gpr(dc, rd, cpu_dst);
4664                     break;
4665                 case 0x014: /* VIS I array32 */
4666                     CHECK_FPU_FEATURE(dc, VIS1);
4667                     cpu_src1 = gen_load_gpr(dc, rs1);
4668                     cpu_src2 = gen_load_gpr(dc, rs2);
4669                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4670                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4671                     gen_store_gpr(dc, rd, cpu_dst);
4672                     break;
4673                 case 0x018: /* VIS I alignaddr */
4674                     CHECK_FPU_FEATURE(dc, VIS1);
4675                     cpu_src1 = gen_load_gpr(dc, rs1);
4676                     cpu_src2 = gen_load_gpr(dc, rs2);
4677                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4678                     gen_store_gpr(dc, rd, cpu_dst);
4679                     break;
4680                 case 0x01a: /* VIS I alignaddrl */
4681                     CHECK_FPU_FEATURE(dc, VIS1);
4682                     cpu_src1 = gen_load_gpr(dc, rs1);
4683                     cpu_src2 = gen_load_gpr(dc, rs2);
4684                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4685                     gen_store_gpr(dc, rd, cpu_dst);
4686                     break;
4687                 case 0x019: /* VIS II bmask */
4688                     CHECK_FPU_FEATURE(dc, VIS2);
4689                     cpu_src1 = gen_load_gpr(dc, rs1);
4690                     cpu_src2 = gen_load_gpr(dc, rs2);
4691                     tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4692                     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4693                     gen_store_gpr(dc, rd, cpu_dst);
4694                     break;
4695                 case 0x020: /* VIS I fcmple16 */
4696                     CHECK_FPU_FEATURE(dc, VIS1);
4697                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4698                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4699                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4700                     gen_store_gpr(dc, rd, cpu_dst);
4701                     break;
4702                 case 0x022: /* VIS I fcmpne16 */
4703                     CHECK_FPU_FEATURE(dc, VIS1);
4704                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4705                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4706                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4707                     gen_store_gpr(dc, rd, cpu_dst);
4708                     break;
4709                 case 0x024: /* VIS I fcmple32 */
4710                     CHECK_FPU_FEATURE(dc, VIS1);
4711                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4712                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4713                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4714                     gen_store_gpr(dc, rd, cpu_dst);
4715                     break;
4716                 case 0x026: /* VIS I fcmpne32 */
4717                     CHECK_FPU_FEATURE(dc, VIS1);
4718                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4719                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4720                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4721                     gen_store_gpr(dc, rd, cpu_dst);
4722                     break;
4723                 case 0x028: /* VIS I fcmpgt16 */
4724                     CHECK_FPU_FEATURE(dc, VIS1);
4725                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4726                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4727                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4728                     gen_store_gpr(dc, rd, cpu_dst);
4729                     break;
4730                 case 0x02a: /* VIS I fcmpeq16 */
4731                     CHECK_FPU_FEATURE(dc, VIS1);
4732                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4733                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4734                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4735                     gen_store_gpr(dc, rd, cpu_dst);
4736                     break;
4737                 case 0x02c: /* VIS I fcmpgt32 */
4738                     CHECK_FPU_FEATURE(dc, VIS1);
4739                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4740                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4741                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4742                     gen_store_gpr(dc, rd, cpu_dst);
4743                     break;
4744                 case 0x02e: /* VIS I fcmpeq32 */
4745                     CHECK_FPU_FEATURE(dc, VIS1);
4746                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4747                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4748                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4749                     gen_store_gpr(dc, rd, cpu_dst);
4750                     break;
4751                 case 0x031: /* VIS I fmul8x16 */
4752                     CHECK_FPU_FEATURE(dc, VIS1);
4753                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4754                     break;
4755                 case 0x033: /* VIS I fmul8x16au */
4756                     CHECK_FPU_FEATURE(dc, VIS1);
4757                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4758                     break;
4759                 case 0x035: /* VIS I fmul8x16al */
4760                     CHECK_FPU_FEATURE(dc, VIS1);
4761                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4762                     break;
4763                 case 0x036: /* VIS I fmul8sux16 */
4764                     CHECK_FPU_FEATURE(dc, VIS1);
4765                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4766                     break;
4767                 case 0x037: /* VIS I fmul8ulx16 */
4768                     CHECK_FPU_FEATURE(dc, VIS1);
4769                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4770                     break;
4771                 case 0x038: /* VIS I fmuld8sux16 */
4772                     CHECK_FPU_FEATURE(dc, VIS1);
4773                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4774                     break;
4775                 case 0x039: /* VIS I fmuld8ulx16 */
4776                     CHECK_FPU_FEATURE(dc, VIS1);
4777                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4778                     break;
4779                 case 0x03a: /* VIS I fpack32 */
4780                     CHECK_FPU_FEATURE(dc, VIS1);
4781                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4782                     break;
4783                 case 0x03b: /* VIS I fpack16 */
4784                     CHECK_FPU_FEATURE(dc, VIS1);
4785                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4786                     cpu_dst_32 = gen_dest_fpr_F(dc);
4787                     gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4788                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4789                     break;
4790                 case 0x03d: /* VIS I fpackfix */
4791                     CHECK_FPU_FEATURE(dc, VIS1);
4792                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4793                     cpu_dst_32 = gen_dest_fpr_F(dc);
4794                     gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4795                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4796                     break;
4797                 case 0x03e: /* VIS I pdist */
4798                     CHECK_FPU_FEATURE(dc, VIS1);
4799                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4800                     break;
4801                 case 0x048: /* VIS I faligndata */
4802                     CHECK_FPU_FEATURE(dc, VIS1);
4803                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4804                     break;
4805                 case 0x04b: /* VIS I fpmerge */
4806                     CHECK_FPU_FEATURE(dc, VIS1);
4807                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4808                     break;
4809                 case 0x04c: /* VIS II bshuffle */
4810                     CHECK_FPU_FEATURE(dc, VIS2);
4811                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4812                     break;
4813                 case 0x04d: /* VIS I fexpand */
4814                     CHECK_FPU_FEATURE(dc, VIS1);
4815                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4816                     break;
4817                 case 0x050: /* VIS I fpadd16 */
4818                     CHECK_FPU_FEATURE(dc, VIS1);
4819                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4820                     break;
4821                 case 0x051: /* VIS I fpadd16s */
4822                     CHECK_FPU_FEATURE(dc, VIS1);
4823                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4824                     break;
4825                 case 0x052: /* VIS I fpadd32 */
4826                     CHECK_FPU_FEATURE(dc, VIS1);
4827                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4828                     break;
4829                 case 0x053: /* VIS I fpadd32s */
4830                     CHECK_FPU_FEATURE(dc, VIS1);
4831                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4832                     break;
4833                 case 0x054: /* VIS I fpsub16 */
4834                     CHECK_FPU_FEATURE(dc, VIS1);
4835                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4836                     break;
4837                 case 0x055: /* VIS I fpsub16s */
4838                     CHECK_FPU_FEATURE(dc, VIS1);
4839                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4840                     break;
4841                 case 0x056: /* VIS I fpsub32 */
4842                     CHECK_FPU_FEATURE(dc, VIS1);
4843                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4844                     break;
4845                 case 0x057: /* VIS I fpsub32s */
4846                     CHECK_FPU_FEATURE(dc, VIS1);
4847                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4848                     break;
4849                 case 0x060: /* VIS I fzero */
4850                     CHECK_FPU_FEATURE(dc, VIS1);
4851                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4852                     tcg_gen_movi_i64(cpu_dst_64, 0);
4853                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4854                     break;
4855                 case 0x061: /* VIS I fzeros */
4856                     CHECK_FPU_FEATURE(dc, VIS1);
4857                     cpu_dst_32 = gen_dest_fpr_F(dc);
4858                     tcg_gen_movi_i32(cpu_dst_32, 0);
4859                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4860                     break;
4861                 case 0x062: /* VIS I fnor */
4862                     CHECK_FPU_FEATURE(dc, VIS1);
4863                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4864                     break;
4865                 case 0x063: /* VIS I fnors */
4866                     CHECK_FPU_FEATURE(dc, VIS1);
4867                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4868                     break;
4869                 case 0x064: /* VIS I fandnot2 */
4870                     CHECK_FPU_FEATURE(dc, VIS1);
4871                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4872                     break;
4873                 case 0x065: /* VIS I fandnot2s */
4874                     CHECK_FPU_FEATURE(dc, VIS1);
4875                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4876                     break;
4877                 case 0x066: /* VIS I fnot2 */
4878                     CHECK_FPU_FEATURE(dc, VIS1);
4879                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4880                     break;
4881                 case 0x067: /* VIS I fnot2s */
4882                     CHECK_FPU_FEATURE(dc, VIS1);
4883                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4884                     break;
4885                 case 0x068: /* VIS I fandnot1 */
4886                     CHECK_FPU_FEATURE(dc, VIS1);
4887                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4888                     break;
4889                 case 0x069: /* VIS I fandnot1s */
4890                     CHECK_FPU_FEATURE(dc, VIS1);
4891                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4892                     break;
4893                 case 0x06a: /* VIS I fnot1 */
4894                     CHECK_FPU_FEATURE(dc, VIS1);
4895                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4896                     break;
4897                 case 0x06b: /* VIS I fnot1s */
4898                     CHECK_FPU_FEATURE(dc, VIS1);
4899                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4900                     break;
4901                 case 0x06c: /* VIS I fxor */
4902                     CHECK_FPU_FEATURE(dc, VIS1);
4903                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4904                     break;
4905                 case 0x06d: /* VIS I fxors */
4906                     CHECK_FPU_FEATURE(dc, VIS1);
4907                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4908                     break;
4909                 case 0x06e: /* VIS I fnand */
4910                     CHECK_FPU_FEATURE(dc, VIS1);
4911                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4912                     break;
4913                 case 0x06f: /* VIS I fnands */
4914                     CHECK_FPU_FEATURE(dc, VIS1);
4915                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4916                     break;
4917                 case 0x070: /* VIS I fand */
4918                     CHECK_FPU_FEATURE(dc, VIS1);
4919                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4920                     break;
4921                 case 0x071: /* VIS I fands */
4922                     CHECK_FPU_FEATURE(dc, VIS1);
4923                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4924                     break;
4925                 case 0x072: /* VIS I fxnor */
4926                     CHECK_FPU_FEATURE(dc, VIS1);
4927                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4928                     break;
4929                 case 0x073: /* VIS I fxnors */
4930                     CHECK_FPU_FEATURE(dc, VIS1);
4931                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4932                     break;
4933                 case 0x074: /* VIS I fsrc1 */
4934                     CHECK_FPU_FEATURE(dc, VIS1);
4935                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4936                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4937                     break;
4938                 case 0x075: /* VIS I fsrc1s */
4939                     CHECK_FPU_FEATURE(dc, VIS1);
4940                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4941                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4942                     break;
4943                 case 0x076: /* VIS I fornot2 */
4944                     CHECK_FPU_FEATURE(dc, VIS1);
4945                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4946                     break;
4947                 case 0x077: /* VIS I fornot2s */
4948                     CHECK_FPU_FEATURE(dc, VIS1);
4949                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4950                     break;
4951                 case 0x078: /* VIS I fsrc2 */
4952                     CHECK_FPU_FEATURE(dc, VIS1);
4953                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4954                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4955                     break;
4956                 case 0x079: /* VIS I fsrc2s */
4957                     CHECK_FPU_FEATURE(dc, VIS1);
4958                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4959                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4960                     break;
4961                 case 0x07a: /* VIS I fornot1 */
4962                     CHECK_FPU_FEATURE(dc, VIS1);
4963                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4964                     break;
4965                 case 0x07b: /* VIS I fornot1s */
4966                     CHECK_FPU_FEATURE(dc, VIS1);
4967                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4968                     break;
4969                 case 0x07c: /* VIS I for */
4970                     CHECK_FPU_FEATURE(dc, VIS1);
4971                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4972                     break;
4973                 case 0x07d: /* VIS I fors */
4974                     CHECK_FPU_FEATURE(dc, VIS1);
4975                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4976                     break;
4977                 case 0x07e: /* VIS I fone */
4978                     CHECK_FPU_FEATURE(dc, VIS1);
4979                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4980                     tcg_gen_movi_i64(cpu_dst_64, -1);
4981                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4982                     break;
4983                 case 0x07f: /* VIS I fones */
4984                     CHECK_FPU_FEATURE(dc, VIS1);
4985                     cpu_dst_32 = gen_dest_fpr_F(dc);
4986                     tcg_gen_movi_i32(cpu_dst_32, -1);
4987                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4988                     break;
4989                 case 0x080: /* VIS I shutdown */
4990                 case 0x081: /* VIS II siam */
4991                     // XXX
4992                     goto illegal_insn;
4993                 default:
4994                     goto illegal_insn;
4995                 }
4996 #else
4997                 goto ncp_insn;
4998 #endif
4999             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
5000 #ifdef TARGET_SPARC64
5001                 goto illegal_insn;
5002 #else
5003                 goto ncp_insn;
5004 #endif
5005 #ifdef TARGET_SPARC64
5006             } else if (xop == 0x39) { /* V9 return */
5007                 save_state(dc);
5008                 cpu_src1 = get_src1(dc, insn);
5009                 cpu_tmp0 = tcg_temp_new();
5010                 if (IS_IMM) {   /* immediate */
5011                     simm = GET_FIELDs(insn, 19, 31);
5012                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5013                 } else {                /* register */
5014                     rs2 = GET_FIELD(insn, 27, 31);
5015                     if (rs2) {
5016                         cpu_src2 = gen_load_gpr(dc, rs2);
5017                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5018                     } else {
5019                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5020                     }
5021                 }
5022                 gen_helper_restore(cpu_env);
5023                 gen_mov_pc_npc(dc);
5024                 gen_check_align(cpu_tmp0, 3);
5025                 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5026                 dc->npc = DYNAMIC_PC_LOOKUP;
5027                 goto jmp_insn;
5028 #endif
5029             } else {
5030                 cpu_src1 = get_src1(dc, insn);
5031                 cpu_tmp0 = tcg_temp_new();
5032                 if (IS_IMM) {   /* immediate */
5033                     simm = GET_FIELDs(insn, 19, 31);
5034                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5035                 } else {                /* register */
5036                     rs2 = GET_FIELD(insn, 27, 31);
5037                     if (rs2) {
5038                         cpu_src2 = gen_load_gpr(dc, rs2);
5039                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5040                     } else {
5041                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5042                     }
5043                 }
5044                 switch (xop) {
5045                 case 0x38:      /* jmpl */
5046                     {
5047                         TCGv t = gen_dest_gpr(dc, rd);
5048                         tcg_gen_movi_tl(t, dc->pc);
5049                         gen_store_gpr(dc, rd, t);
5050 
5051                         gen_mov_pc_npc(dc);
5052                         gen_check_align(cpu_tmp0, 3);
5053                         gen_address_mask(dc, cpu_tmp0);
5054                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5055                         dc->npc = DYNAMIC_PC_LOOKUP;
5056                     }
5057                     goto jmp_insn;
5058 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5059                 case 0x39:      /* rett, V9 return */
5060                     {
5061                         if (!supervisor(dc))
5062                             goto priv_insn;
5063                         gen_mov_pc_npc(dc);
5064                         gen_check_align(cpu_tmp0, 3);
5065                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5066                         dc->npc = DYNAMIC_PC;
5067                         gen_helper_rett(cpu_env);
5068                     }
5069                     goto jmp_insn;
5070 #endif
5071                 case 0x3b: /* flush */
5072                     if (!((dc)->def->features & CPU_FEATURE_FLUSH))
5073                         goto unimp_flush;
5074                     /* nop */
5075                     break;
5076                 case 0x3c:      /* save */
5077                     gen_helper_save(cpu_env);
5078                     gen_store_gpr(dc, rd, cpu_tmp0);
5079                     break;
5080                 case 0x3d:      /* restore */
5081                     gen_helper_restore(cpu_env);
5082                     gen_store_gpr(dc, rd, cpu_tmp0);
5083                     break;
5084 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
5085                 case 0x3e:      /* V9 done/retry */
5086                     {
5087                         switch (rd) {
5088                         case 0:
5089                             if (!supervisor(dc))
5090                                 goto priv_insn;
5091                             dc->npc = DYNAMIC_PC;
5092                             dc->pc = DYNAMIC_PC;
5093                             translator_io_start(&dc->base);
5094                             gen_helper_done(cpu_env);
5095                             goto jmp_insn;
5096                         case 1:
5097                             if (!supervisor(dc))
5098                                 goto priv_insn;
5099                             dc->npc = DYNAMIC_PC;
5100                             dc->pc = DYNAMIC_PC;
5101                             translator_io_start(&dc->base);
5102                             gen_helper_retry(cpu_env);
5103                             goto jmp_insn;
5104                         default:
5105                             goto illegal_insn;
5106                         }
5107                     }
5108                     break;
5109 #endif
5110                 default:
5111                     goto illegal_insn;
5112                 }
5113             }
5114             break;
5115         }
5116         break;
5117     case 3:                     /* load/store instructions */
5118         {
5119             unsigned int xop = GET_FIELD(insn, 7, 12);
5120             /* ??? gen_address_mask prevents us from using a source
5121                register directly.  Always generate a temporary.  */
5122             TCGv cpu_addr = tcg_temp_new();
5123 
5124             tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
5125             if (xop == 0x3c || xop == 0x3e) {
5126                 /* V9 casa/casxa : no offset */
5127             } else if (IS_IMM) {     /* immediate */
5128                 simm = GET_FIELDs(insn, 19, 31);
5129                 if (simm != 0) {
5130                     tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
5131                 }
5132             } else {            /* register */
5133                 rs2 = GET_FIELD(insn, 27, 31);
5134                 if (rs2 != 0) {
5135                     tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
5136                 }
5137             }
5138             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
5139                 (xop > 0x17 && xop <= 0x1d ) ||
5140                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
5141                 TCGv cpu_val = gen_dest_gpr(dc, rd);
5142 
5143                 switch (xop) {
5144                 case 0x0:       /* ld, V9 lduw, load unsigned word */
5145                     gen_address_mask(dc, cpu_addr);
5146                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5147                                        dc->mem_idx, MO_TEUL | MO_ALIGN);
5148                     break;
5149                 case 0x1:       /* ldub, load unsigned byte */
5150                     gen_address_mask(dc, cpu_addr);
5151                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5152                                        dc->mem_idx, MO_UB);
5153                     break;
5154                 case 0x2:       /* lduh, load unsigned halfword */
5155                     gen_address_mask(dc, cpu_addr);
5156                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5157                                        dc->mem_idx, MO_TEUW | MO_ALIGN);
5158                     break;
5159                 case 0x3:       /* ldd, load double word */
5160                     if (rd & 1)
5161                         goto illegal_insn;
5162                     else {
5163                         TCGv_i64 t64;
5164 
5165                         gen_address_mask(dc, cpu_addr);
5166                         t64 = tcg_temp_new_i64();
5167                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5168                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5169                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5170                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5171                         gen_store_gpr(dc, rd + 1, cpu_val);
5172                         tcg_gen_shri_i64(t64, t64, 32);
5173                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5174                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5175                     }
5176                     break;
5177                 case 0x9:       /* ldsb, load signed byte */
5178                     gen_address_mask(dc, cpu_addr);
5179                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr, dc->mem_idx, MO_SB);
5180                     break;
5181                 case 0xa:       /* ldsh, load signed halfword */
5182                     gen_address_mask(dc, cpu_addr);
5183                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5184                                        dc->mem_idx, MO_TESW | MO_ALIGN);
5185                     break;
5186                 case 0xd:       /* ldstub */
5187                     gen_ldstub(dc, cpu_val, cpu_addr, dc->mem_idx);
5188                     break;
5189                 case 0x0f:
5190                     /* swap, swap register with memory. Also atomically */
5191                     CHECK_IU_FEATURE(dc, SWAP);
5192                     cpu_src1 = gen_load_gpr(dc, rd);
5193                     gen_swap(dc, cpu_val, cpu_src1, cpu_addr,
5194                              dc->mem_idx, MO_TEUL);
5195                     break;
5196 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5197                 case 0x10:      /* lda, V9 lduwa, load word alternate */
5198                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5199                     break;
5200                 case 0x11:      /* lduba, load unsigned byte alternate */
5201                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5202                     break;
5203                 case 0x12:      /* lduha, load unsigned halfword alternate */
5204                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5205                     break;
5206                 case 0x13:      /* ldda, load double word alternate */
5207                     if (rd & 1) {
5208                         goto illegal_insn;
5209                     }
5210                     gen_ldda_asi(dc, cpu_addr, insn, rd);
5211                     goto skip_move;
5212                 case 0x19:      /* ldsba, load signed byte alternate */
5213                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_SB);
5214                     break;
5215                 case 0x1a:      /* ldsha, load signed halfword alternate */
5216                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESW);
5217                     break;
5218                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
5219                     gen_ldstub_asi(dc, cpu_val, cpu_addr, insn);
5220                     break;
5221                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
5222                                    atomically */
5223                     CHECK_IU_FEATURE(dc, SWAP);
5224                     cpu_src1 = gen_load_gpr(dc, rd);
5225                     gen_swap_asi(dc, cpu_val, cpu_src1, cpu_addr, insn);
5226                     break;
5227 
5228 #ifndef TARGET_SPARC64
5229                 case 0x30: /* ldc */
5230                 case 0x31: /* ldcsr */
5231                 case 0x33: /* lddc */
5232                     goto ncp_insn;
5233 #endif
5234 #endif
5235 #ifdef TARGET_SPARC64
5236                 case 0x08: /* V9 ldsw */
5237                     gen_address_mask(dc, cpu_addr);
5238                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5239                                        dc->mem_idx, MO_TESL | MO_ALIGN);
5240                     break;
5241                 case 0x0b: /* V9 ldx */
5242                     gen_address_mask(dc, cpu_addr);
5243                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5244                                        dc->mem_idx, MO_TEUQ | MO_ALIGN);
5245                     break;
5246                 case 0x18: /* V9 ldswa */
5247                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESL);
5248                     break;
5249                 case 0x1b: /* V9 ldxa */
5250                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5251                     break;
5252                 case 0x2d: /* V9 prefetch, no effect */
5253                     goto skip_move;
5254                 case 0x30: /* V9 ldfa */
5255                     if (gen_trap_ifnofpu(dc)) {
5256                         goto jmp_insn;
5257                     }
5258                     gen_ldf_asi(dc, cpu_addr, insn, 4, rd);
5259                     gen_update_fprs_dirty(dc, rd);
5260                     goto skip_move;
5261                 case 0x33: /* V9 lddfa */
5262                     if (gen_trap_ifnofpu(dc)) {
5263                         goto jmp_insn;
5264                     }
5265                     gen_ldf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5266                     gen_update_fprs_dirty(dc, DFPREG(rd));
5267                     goto skip_move;
5268                 case 0x3d: /* V9 prefetcha, no effect */
5269                     goto skip_move;
5270                 case 0x32: /* V9 ldqfa */
5271                     CHECK_FPU_FEATURE(dc, FLOAT128);
5272                     if (gen_trap_ifnofpu(dc)) {
5273                         goto jmp_insn;
5274                     }
5275                     gen_ldf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5276                     gen_update_fprs_dirty(dc, QFPREG(rd));
5277                     goto skip_move;
5278 #endif
5279                 default:
5280                     goto illegal_insn;
5281                 }
5282                 gen_store_gpr(dc, rd, cpu_val);
5283 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5284             skip_move: ;
5285 #endif
5286             } else if (xop >= 0x20 && xop < 0x24) {
5287                 if (gen_trap_ifnofpu(dc)) {
5288                     goto jmp_insn;
5289                 }
5290                 switch (xop) {
5291                 case 0x20:      /* ldf, load fpreg */
5292                     gen_address_mask(dc, cpu_addr);
5293                     cpu_dst_32 = gen_dest_fpr_F(dc);
5294                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5295                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5296                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5297                     break;
5298                 case 0x21:      /* ldfsr, V9 ldxfsr */
5299 #ifdef TARGET_SPARC64
5300                     gen_address_mask(dc, cpu_addr);
5301                     if (rd == 1) {
5302                         TCGv_i64 t64 = tcg_temp_new_i64();
5303                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5304                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5305                         gen_helper_ldxfsr(cpu_fsr, cpu_env, cpu_fsr, t64);
5306                         break;
5307                     }
5308 #endif
5309                     cpu_dst_32 = tcg_temp_new_i32();
5310                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5311                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5312                     gen_helper_ldfsr(cpu_fsr, cpu_env, cpu_fsr, cpu_dst_32);
5313                     break;
5314                 case 0x22:      /* ldqf, load quad fpreg */
5315                     CHECK_FPU_FEATURE(dc, FLOAT128);
5316                     gen_address_mask(dc, cpu_addr);
5317                     cpu_src1_64 = tcg_temp_new_i64();
5318                     tcg_gen_qemu_ld_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5319                                         MO_TEUQ | MO_ALIGN_4);
5320                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5321                     cpu_src2_64 = tcg_temp_new_i64();
5322                     tcg_gen_qemu_ld_i64(cpu_src2_64, cpu_addr, dc->mem_idx,
5323                                         MO_TEUQ | MO_ALIGN_4);
5324                     gen_store_fpr_Q(dc, rd, cpu_src1_64, cpu_src2_64);
5325                     break;
5326                 case 0x23:      /* lddf, load double fpreg */
5327                     gen_address_mask(dc, cpu_addr);
5328                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5329                     tcg_gen_qemu_ld_i64(cpu_dst_64, cpu_addr, dc->mem_idx,
5330                                         MO_TEUQ | MO_ALIGN_4);
5331                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5332                     break;
5333                 default:
5334                     goto illegal_insn;
5335                 }
5336             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
5337                        xop == 0xe || xop == 0x1e) {
5338                 TCGv cpu_val = gen_load_gpr(dc, rd);
5339 
5340                 switch (xop) {
5341                 case 0x4: /* st, store word */
5342                     gen_address_mask(dc, cpu_addr);
5343                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5344                                        dc->mem_idx, MO_TEUL | MO_ALIGN);
5345                     break;
5346                 case 0x5: /* stb, store byte */
5347                     gen_address_mask(dc, cpu_addr);
5348                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr, dc->mem_idx, MO_UB);
5349                     break;
5350                 case 0x6: /* sth, store halfword */
5351                     gen_address_mask(dc, cpu_addr);
5352                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5353                                        dc->mem_idx, MO_TEUW | MO_ALIGN);
5354                     break;
5355                 case 0x7: /* std, store double word */
5356                     if (rd & 1)
5357                         goto illegal_insn;
5358                     else {
5359                         TCGv_i64 t64;
5360                         TCGv lo;
5361 
5362                         gen_address_mask(dc, cpu_addr);
5363                         lo = gen_load_gpr(dc, rd + 1);
5364                         t64 = tcg_temp_new_i64();
5365                         tcg_gen_concat_tl_i64(t64, lo, cpu_val);
5366                         tcg_gen_qemu_st_i64(t64, cpu_addr,
5367                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5368                     }
5369                     break;
5370 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5371                 case 0x14: /* sta, V9 stwa, store word alternate */
5372                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5373                     break;
5374                 case 0x15: /* stba, store byte alternate */
5375                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5376                     break;
5377                 case 0x16: /* stha, store halfword alternate */
5378                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5379                     break;
5380                 case 0x17: /* stda, store double word alternate */
5381                     if (rd & 1) {
5382                         goto illegal_insn;
5383                     }
5384                     gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
5385                     break;
5386 #endif
5387 #ifdef TARGET_SPARC64
5388                 case 0x0e: /* V9 stx */
5389                     gen_address_mask(dc, cpu_addr);
5390                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5391                                        dc->mem_idx, MO_TEUQ | MO_ALIGN);
5392                     break;
5393                 case 0x1e: /* V9 stxa */
5394                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5395                     break;
5396 #endif
5397                 default:
5398                     goto illegal_insn;
5399                 }
5400             } else if (xop > 0x23 && xop < 0x28) {
5401                 if (gen_trap_ifnofpu(dc)) {
5402                     goto jmp_insn;
5403                 }
5404                 switch (xop) {
5405                 case 0x24: /* stf, store fpreg */
5406                     gen_address_mask(dc, cpu_addr);
5407                     cpu_src1_32 = gen_load_fpr_F(dc, rd);
5408                     tcg_gen_qemu_st_i32(cpu_src1_32, cpu_addr,
5409                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5410                     break;
5411                 case 0x25: /* stfsr, V9 stxfsr */
5412                     {
5413 #ifdef TARGET_SPARC64
5414                         gen_address_mask(dc, cpu_addr);
5415                         if (rd == 1) {
5416                             tcg_gen_qemu_st_tl(cpu_fsr, cpu_addr,
5417                                                dc->mem_idx, MO_TEUQ | MO_ALIGN);
5418                             break;
5419                         }
5420 #endif
5421                         tcg_gen_qemu_st_tl(cpu_fsr, cpu_addr,
5422                                            dc->mem_idx, MO_TEUL | MO_ALIGN);
5423                     }
5424                     break;
5425                 case 0x26:
5426 #ifdef TARGET_SPARC64
5427                     /* V9 stqf, store quad fpreg */
5428                     CHECK_FPU_FEATURE(dc, FLOAT128);
5429                     gen_address_mask(dc, cpu_addr);
5430                     /* ??? While stqf only requires 4-byte alignment, it is
5431                        legal for the cpu to signal the unaligned exception.
5432                        The OS trap handler is then required to fix it up.
5433                        For qemu, this avoids having to probe the second page
5434                        before performing the first write.  */
5435                     cpu_src1_64 = gen_load_fpr_Q0(dc, rd);
5436                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5437                                         dc->mem_idx, MO_TEUQ | MO_ALIGN_16);
5438                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5439                     cpu_src2_64 = gen_load_fpr_Q1(dc, rd);
5440                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5441                                         dc->mem_idx, MO_TEUQ);
5442                     break;
5443 #else /* !TARGET_SPARC64 */
5444                     /* stdfq, store floating point queue */
5445 #if defined(CONFIG_USER_ONLY)
5446                     goto illegal_insn;
5447 #else
5448                     if (!supervisor(dc))
5449                         goto priv_insn;
5450                     if (gen_trap_ifnofpu(dc)) {
5451                         goto jmp_insn;
5452                     }
5453                     goto nfq_insn;
5454 #endif
5455 #endif
5456                 case 0x27: /* stdf, store double fpreg */
5457                     gen_address_mask(dc, cpu_addr);
5458                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
5459                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5460                                         MO_TEUQ | MO_ALIGN_4);
5461                     break;
5462                 default:
5463                     goto illegal_insn;
5464                 }
5465             } else if (xop > 0x33 && xop < 0x3f) {
5466                 switch (xop) {
5467 #ifdef TARGET_SPARC64
5468                 case 0x34: /* V9 stfa */
5469                     if (gen_trap_ifnofpu(dc)) {
5470                         goto jmp_insn;
5471                     }
5472                     gen_stf_asi(dc, cpu_addr, insn, 4, rd);
5473                     break;
5474                 case 0x36: /* V9 stqfa */
5475                     {
5476                         CHECK_FPU_FEATURE(dc, FLOAT128);
5477                         if (gen_trap_ifnofpu(dc)) {
5478                             goto jmp_insn;
5479                         }
5480                         gen_stf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5481                     }
5482                     break;
5483                 case 0x37: /* V9 stdfa */
5484                     if (gen_trap_ifnofpu(dc)) {
5485                         goto jmp_insn;
5486                     }
5487                     gen_stf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5488                     break;
5489                 case 0x3e: /* V9 casxa */
5490                     rs2 = GET_FIELD(insn, 27, 31);
5491                     cpu_src2 = gen_load_gpr(dc, rs2);
5492                     gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5493                     break;
5494 #else
5495                 case 0x34: /* stc */
5496                 case 0x35: /* stcsr */
5497                 case 0x36: /* stdcq */
5498                 case 0x37: /* stdc */
5499                     goto ncp_insn;
5500 #endif
5501 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5502                 case 0x3c: /* V9 or LEON3 casa */
5503 #ifndef TARGET_SPARC64
5504                     CHECK_IU_FEATURE(dc, CASA);
5505 #endif
5506                     rs2 = GET_FIELD(insn, 27, 31);
5507                     cpu_src2 = gen_load_gpr(dc, rs2);
5508                     gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5509                     break;
5510 #endif
5511                 default:
5512                     goto illegal_insn;
5513                 }
5514             } else {
5515                 goto illegal_insn;
5516             }
5517         }
5518         break;
5519     }
5520     /* default case for non jump instructions */
5521     if (dc->npc & 3) {
5522         switch (dc->npc) {
5523         case DYNAMIC_PC:
5524         case DYNAMIC_PC_LOOKUP:
5525             dc->pc = dc->npc;
5526             gen_op_next_insn();
5527             break;
5528         case JUMP_PC:
5529             /* we can do a static jump */
5530             gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5531             dc->base.is_jmp = DISAS_NORETURN;
5532             break;
5533         default:
5534             g_assert_not_reached();
5535         }
5536     } else {
5537         dc->pc = dc->npc;
5538         dc->npc = dc->npc + 4;
5539     }
5540  jmp_insn:
5541     return;
5542  illegal_insn:
5543     gen_exception(dc, TT_ILL_INSN);
5544     return;
5545  unimp_flush:
5546     gen_exception(dc, TT_UNIMP_FLUSH);
5547     return;
5548 #if !defined(CONFIG_USER_ONLY)
5549  priv_insn:
5550     gen_exception(dc, TT_PRIV_INSN);
5551     return;
5552 #endif
5553  nfpu_insn:
5554     gen_op_fpexception_im(dc, FSR_FTT_UNIMPFPOP);
5555     return;
5556 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5557  nfq_insn:
5558     gen_op_fpexception_im(dc, FSR_FTT_SEQ_ERROR);
5559     return;
5560 #endif
5561 #ifndef TARGET_SPARC64
5562  ncp_insn:
5563     gen_exception(dc, TT_NCP_INSN);
5564     return;
5565 #endif
5566 }
5567 
5568 static void sparc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
5569 {
5570     DisasContext *dc = container_of(dcbase, DisasContext, base);
5571     CPUSPARCState *env = cs->env_ptr;
5572     int bound;
5573 
5574     dc->pc = dc->base.pc_first;
5575     dc->npc = (target_ulong)dc->base.tb->cs_base;
5576     dc->cc_op = CC_OP_DYNAMIC;
5577     dc->mem_idx = dc->base.tb->flags & TB_FLAG_MMU_MASK;
5578     dc->def = &env->def;
5579     dc->fpu_enabled = tb_fpu_enabled(dc->base.tb->flags);
5580     dc->address_mask_32bit = tb_am_enabled(dc->base.tb->flags);
5581 #ifndef CONFIG_USER_ONLY
5582     dc->supervisor = (dc->base.tb->flags & TB_FLAG_SUPER) != 0;
5583 #endif
5584 #ifdef TARGET_SPARC64
5585     dc->fprs_dirty = 0;
5586     dc->asi = (dc->base.tb->flags >> TB_FLAG_ASI_SHIFT) & 0xff;
5587 #ifndef CONFIG_USER_ONLY
5588     dc->hypervisor = (dc->base.tb->flags & TB_FLAG_HYPER) != 0;
5589 #endif
5590 #endif
5591     /*
5592      * if we reach a page boundary, we stop generation so that the
5593      * PC of a TT_TFAULT exception is always in the right page
5594      */
5595     bound = -(dc->base.pc_first | TARGET_PAGE_MASK) / 4;
5596     dc->base.max_insns = MIN(dc->base.max_insns, bound);
5597 }
5598 
5599 static void sparc_tr_tb_start(DisasContextBase *db, CPUState *cs)
5600 {
5601 }
5602 
5603 static void sparc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
5604 {
5605     DisasContext *dc = container_of(dcbase, DisasContext, base);
5606     target_ulong npc = dc->npc;
5607 
5608     if (npc & 3) {
5609         switch (npc) {
5610         case JUMP_PC:
5611             assert(dc->jump_pc[1] == dc->pc + 4);
5612             npc = dc->jump_pc[0] | JUMP_PC;
5613             break;
5614         case DYNAMIC_PC:
5615         case DYNAMIC_PC_LOOKUP:
5616             npc = DYNAMIC_PC;
5617             break;
5618         default:
5619             g_assert_not_reached();
5620         }
5621     }
5622     tcg_gen_insn_start(dc->pc, npc);
5623 }
5624 
5625 static void sparc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
5626 {
5627     DisasContext *dc = container_of(dcbase, DisasContext, base);
5628     CPUSPARCState *env = cs->env_ptr;
5629     unsigned int insn;
5630 
5631     insn = translator_ldl(env, &dc->base, dc->pc);
5632     dc->base.pc_next += 4;
5633     disas_sparc_insn(dc, insn);
5634 
5635     if (dc->base.is_jmp == DISAS_NORETURN) {
5636         return;
5637     }
5638     if (dc->pc != dc->base.pc_next) {
5639         dc->base.is_jmp = DISAS_TOO_MANY;
5640     }
5641 }
5642 
5643 static void sparc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
5644 {
5645     DisasContext *dc = container_of(dcbase, DisasContext, base);
5646     bool may_lookup;
5647 
5648     switch (dc->base.is_jmp) {
5649     case DISAS_NEXT:
5650     case DISAS_TOO_MANY:
5651         if (((dc->pc | dc->npc) & 3) == 0) {
5652             /* static PC and NPC: we can use direct chaining */
5653             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5654             break;
5655         }
5656 
5657         if (dc->pc & 3) {
5658             switch (dc->pc) {
5659             case DYNAMIC_PC_LOOKUP:
5660                 may_lookup = true;
5661                 break;
5662             case DYNAMIC_PC:
5663                 may_lookup = false;
5664                 break;
5665             default:
5666                 g_assert_not_reached();
5667             }
5668         } else {
5669             tcg_gen_movi_tl(cpu_pc, dc->pc);
5670             may_lookup = true;
5671         }
5672 
5673         save_npc(dc);
5674         if (may_lookup) {
5675             tcg_gen_lookup_and_goto_ptr();
5676         } else {
5677             tcg_gen_exit_tb(NULL, 0);
5678         }
5679         break;
5680 
5681     case DISAS_NORETURN:
5682        break;
5683 
5684     case DISAS_EXIT:
5685         /* Exit TB */
5686         save_state(dc);
5687         tcg_gen_exit_tb(NULL, 0);
5688         break;
5689 
5690     default:
5691         g_assert_not_reached();
5692     }
5693 }
5694 
5695 static void sparc_tr_disas_log(const DisasContextBase *dcbase,
5696                                CPUState *cpu, FILE *logfile)
5697 {
5698     fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
5699     target_disas(logfile, cpu, dcbase->pc_first, dcbase->tb->size);
5700 }
5701 
5702 static const TranslatorOps sparc_tr_ops = {
5703     .init_disas_context = sparc_tr_init_disas_context,
5704     .tb_start           = sparc_tr_tb_start,
5705     .insn_start         = sparc_tr_insn_start,
5706     .translate_insn     = sparc_tr_translate_insn,
5707     .tb_stop            = sparc_tr_tb_stop,
5708     .disas_log          = sparc_tr_disas_log,
5709 };
5710 
5711 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int *max_insns,
5712                            target_ulong pc, void *host_pc)
5713 {
5714     DisasContext dc = {};
5715 
5716     translator_loop(cs, tb, max_insns, pc, host_pc, &sparc_tr_ops, &dc.base);
5717 }
5718 
5719 void sparc_tcg_init(void)
5720 {
5721     static const char gregnames[32][4] = {
5722         "g0", "g1", "g2", "g3", "g4", "g5", "g6", "g7",
5723         "o0", "o1", "o2", "o3", "o4", "o5", "o6", "o7",
5724         "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
5725         "i0", "i1", "i2", "i3", "i4", "i5", "i6", "i7",
5726     };
5727     static const char fregnames[32][4] = {
5728         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5729         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5730         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5731         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5732     };
5733 
5734     static const struct { TCGv_i32 *ptr; int off; const char *name; } r32[] = {
5735 #ifdef TARGET_SPARC64
5736         { &cpu_xcc, offsetof(CPUSPARCState, xcc), "xcc" },
5737         { &cpu_fprs, offsetof(CPUSPARCState, fprs), "fprs" },
5738 #else
5739         { &cpu_wim, offsetof(CPUSPARCState, wim), "wim" },
5740 #endif
5741         { &cpu_cc_op, offsetof(CPUSPARCState, cc_op), "cc_op" },
5742         { &cpu_psr, offsetof(CPUSPARCState, psr), "psr" },
5743     };
5744 
5745     static const struct { TCGv *ptr; int off; const char *name; } rtl[] = {
5746 #ifdef TARGET_SPARC64
5747         { &cpu_gsr, offsetof(CPUSPARCState, gsr), "gsr" },
5748         { &cpu_tick_cmpr, offsetof(CPUSPARCState, tick_cmpr), "tick_cmpr" },
5749         { &cpu_stick_cmpr, offsetof(CPUSPARCState, stick_cmpr), "stick_cmpr" },
5750         { &cpu_hstick_cmpr, offsetof(CPUSPARCState, hstick_cmpr),
5751           "hstick_cmpr" },
5752         { &cpu_hintp, offsetof(CPUSPARCState, hintp), "hintp" },
5753         { &cpu_htba, offsetof(CPUSPARCState, htba), "htba" },
5754         { &cpu_hver, offsetof(CPUSPARCState, hver), "hver" },
5755         { &cpu_ssr, offsetof(CPUSPARCState, ssr), "ssr" },
5756         { &cpu_ver, offsetof(CPUSPARCState, version), "ver" },
5757 #endif
5758         { &cpu_cond, offsetof(CPUSPARCState, cond), "cond" },
5759         { &cpu_cc_src, offsetof(CPUSPARCState, cc_src), "cc_src" },
5760         { &cpu_cc_src2, offsetof(CPUSPARCState, cc_src2), "cc_src2" },
5761         { &cpu_cc_dst, offsetof(CPUSPARCState, cc_dst), "cc_dst" },
5762         { &cpu_fsr, offsetof(CPUSPARCState, fsr), "fsr" },
5763         { &cpu_pc, offsetof(CPUSPARCState, pc), "pc" },
5764         { &cpu_npc, offsetof(CPUSPARCState, npc), "npc" },
5765         { &cpu_y, offsetof(CPUSPARCState, y), "y" },
5766 #ifndef CONFIG_USER_ONLY
5767         { &cpu_tbr, offsetof(CPUSPARCState, tbr), "tbr" },
5768 #endif
5769     };
5770 
5771     unsigned int i;
5772 
5773     cpu_regwptr = tcg_global_mem_new_ptr(cpu_env,
5774                                          offsetof(CPUSPARCState, regwptr),
5775                                          "regwptr");
5776 
5777     for (i = 0; i < ARRAY_SIZE(r32); ++i) {
5778         *r32[i].ptr = tcg_global_mem_new_i32(cpu_env, r32[i].off, r32[i].name);
5779     }
5780 
5781     for (i = 0; i < ARRAY_SIZE(rtl); ++i) {
5782         *rtl[i].ptr = tcg_global_mem_new(cpu_env, rtl[i].off, rtl[i].name);
5783     }
5784 
5785     cpu_regs[0] = NULL;
5786     for (i = 1; i < 8; ++i) {
5787         cpu_regs[i] = tcg_global_mem_new(cpu_env,
5788                                          offsetof(CPUSPARCState, gregs[i]),
5789                                          gregnames[i]);
5790     }
5791 
5792     for (i = 8; i < 32; ++i) {
5793         cpu_regs[i] = tcg_global_mem_new(cpu_regwptr,
5794                                          (i - 8) * sizeof(target_ulong),
5795                                          gregnames[i]);
5796     }
5797 
5798     for (i = 0; i < TARGET_DPREGS; i++) {
5799         cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env,
5800                                             offsetof(CPUSPARCState, fpr[i]),
5801                                             fregnames[i]);
5802     }
5803 }
5804 
5805 void sparc_restore_state_to_opc(CPUState *cs,
5806                                 const TranslationBlock *tb,
5807                                 const uint64_t *data)
5808 {
5809     SPARCCPU *cpu = SPARC_CPU(cs);
5810     CPUSPARCState *env = &cpu->env;
5811     target_ulong pc = data[0];
5812     target_ulong npc = data[1];
5813 
5814     env->pc = pc;
5815     if (npc == DYNAMIC_PC) {
5816         /* dynamic NPC: already stored */
5817     } else if (npc & JUMP_PC) {
5818         /* jump PC: use 'cond' and the jump targets of the translation */
5819         if (env->cond) {
5820             env->npc = npc & ~3;
5821         } else {
5822             env->npc = pc + 4;
5823         }
5824     } else {
5825         env->npc = npc;
5826     }
5827 }
5828