xref: /openbmc/qemu/target/sparc/translate.c (revision 186e78905a4fe69b62598af73903c6cf4df1f24f)
1 /*
2    SPARC translation
3 
4    Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5    Copyright (C) 2003-2005 Fabrice Bellard
6 
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2.1 of the License, or (at your option) any later version.
11 
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16 
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 
23 #include "cpu.h"
24 #include "disas/disas.h"
25 #include "exec/helper-proto.h"
26 #include "exec/exec-all.h"
27 #include "tcg/tcg-op.h"
28 
29 #include "exec/helper-gen.h"
30 
31 #include "exec/translator.h"
32 #include "exec/log.h"
33 #include "asi.h"
34 
35 #define HELPER_H "helper.h"
36 #include "exec/helper-info.c.inc"
37 #undef  HELPER_H
38 
39 /* Dynamic PC, must exit to main loop. */
40 #define DYNAMIC_PC         1
41 /* Dynamic PC, one of two values according to jump_pc[T2]. */
42 #define JUMP_PC            2
43 /* Dynamic PC, may lookup next TB. */
44 #define DYNAMIC_PC_LOOKUP  3
45 
46 #define DISAS_EXIT  DISAS_TARGET_0
47 
48 /* global register indexes */
49 static TCGv_ptr cpu_regwptr;
50 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
51 static TCGv_i32 cpu_cc_op;
52 static TCGv_i32 cpu_psr;
53 static TCGv cpu_fsr, cpu_pc, cpu_npc;
54 static TCGv cpu_regs[32];
55 static TCGv cpu_y;
56 #ifndef CONFIG_USER_ONLY
57 static TCGv cpu_tbr;
58 #endif
59 static TCGv cpu_cond;
60 #ifdef TARGET_SPARC64
61 static TCGv_i32 cpu_xcc, cpu_fprs;
62 static TCGv cpu_gsr;
63 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
64 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
65 #else
66 static TCGv cpu_wim;
67 #endif
68 /* Floating point registers */
69 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
70 
71 typedef struct DisasDelayException {
72     struct DisasDelayException *next;
73     TCGLabel *lab;
74     TCGv_i32 excp;
75     /* Saved state at parent insn. */
76     target_ulong pc;
77     target_ulong npc;
78 } DisasDelayException;
79 
80 typedef struct DisasContext {
81     DisasContextBase base;
82     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
83     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
84     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
85     int mem_idx;
86     bool fpu_enabled;
87     bool address_mask_32bit;
88 #ifndef CONFIG_USER_ONLY
89     bool supervisor;
90 #ifdef TARGET_SPARC64
91     bool hypervisor;
92 #endif
93 #endif
94 
95     uint32_t cc_op;  /* current CC operation */
96     sparc_def_t *def;
97 #ifdef TARGET_SPARC64
98     int fprs_dirty;
99     int asi;
100 #endif
101     DisasDelayException *delay_excp_list;
102 } DisasContext;
103 
104 typedef struct {
105     TCGCond cond;
106     bool is_bool;
107     TCGv c1, c2;
108 } DisasCompare;
109 
110 // This function uses non-native bit order
111 #define GET_FIELD(X, FROM, TO)                                  \
112     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
113 
114 // This function uses the order in the manuals, i.e. bit 0 is 2^0
115 #define GET_FIELD_SP(X, FROM, TO)               \
116     GET_FIELD(X, 31 - (TO), 31 - (FROM))
117 
118 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
119 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
120 
121 #ifdef TARGET_SPARC64
122 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
123 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
124 #else
125 #define DFPREG(r) (r & 0x1e)
126 #define QFPREG(r) (r & 0x1c)
127 #endif
128 
129 #define UA2005_HTRAP_MASK 0xff
130 #define V8_TRAP_MASK 0x7f
131 
132 static int sign_extend(int x, int len)
133 {
134     len = 32 - len;
135     return (x << len) >> len;
136 }
137 
138 #define IS_IMM (insn & (1<<13))
139 
140 static void gen_update_fprs_dirty(DisasContext *dc, int rd)
141 {
142 #if defined(TARGET_SPARC64)
143     int bit = (rd < 32) ? 1 : 2;
144     /* If we know we've already set this bit within the TB,
145        we can avoid setting it again.  */
146     if (!(dc->fprs_dirty & bit)) {
147         dc->fprs_dirty |= bit;
148         tcg_gen_ori_i32(cpu_fprs, cpu_fprs, bit);
149     }
150 #endif
151 }
152 
153 /* floating point registers moves */
154 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
155 {
156     TCGv_i32 ret = tcg_temp_new_i32();
157     if (src & 1) {
158         tcg_gen_extrl_i64_i32(ret, cpu_fpr[src / 2]);
159     } else {
160         tcg_gen_extrh_i64_i32(ret, cpu_fpr[src / 2]);
161     }
162     return ret;
163 }
164 
165 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
166 {
167     TCGv_i64 t = tcg_temp_new_i64();
168 
169     tcg_gen_extu_i32_i64(t, v);
170     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
171                         (dst & 1 ? 0 : 32), 32);
172     gen_update_fprs_dirty(dc, dst);
173 }
174 
175 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
176 {
177     return tcg_temp_new_i32();
178 }
179 
180 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
181 {
182     src = DFPREG(src);
183     return cpu_fpr[src / 2];
184 }
185 
186 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
187 {
188     dst = DFPREG(dst);
189     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
190     gen_update_fprs_dirty(dc, dst);
191 }
192 
193 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
194 {
195     return cpu_fpr[DFPREG(dst) / 2];
196 }
197 
198 static void gen_op_load_fpr_QT0(unsigned int src)
199 {
200     tcg_gen_st_i64(cpu_fpr[src / 2], tcg_env, offsetof(CPUSPARCState, qt0) +
201                    offsetof(CPU_QuadU, ll.upper));
202     tcg_gen_st_i64(cpu_fpr[src/2 + 1], tcg_env, offsetof(CPUSPARCState, qt0) +
203                    offsetof(CPU_QuadU, ll.lower));
204 }
205 
206 static void gen_op_load_fpr_QT1(unsigned int src)
207 {
208     tcg_gen_st_i64(cpu_fpr[src / 2], tcg_env, offsetof(CPUSPARCState, qt1) +
209                    offsetof(CPU_QuadU, ll.upper));
210     tcg_gen_st_i64(cpu_fpr[src/2 + 1], tcg_env, offsetof(CPUSPARCState, qt1) +
211                    offsetof(CPU_QuadU, ll.lower));
212 }
213 
214 static void gen_op_store_QT0_fpr(unsigned int dst)
215 {
216     tcg_gen_ld_i64(cpu_fpr[dst / 2], tcg_env, offsetof(CPUSPARCState, qt0) +
217                    offsetof(CPU_QuadU, ll.upper));
218     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], tcg_env, offsetof(CPUSPARCState, qt0) +
219                    offsetof(CPU_QuadU, ll.lower));
220 }
221 
222 static void gen_store_fpr_Q(DisasContext *dc, unsigned int dst,
223                             TCGv_i64 v1, TCGv_i64 v2)
224 {
225     dst = QFPREG(dst);
226 
227     tcg_gen_mov_i64(cpu_fpr[dst / 2], v1);
228     tcg_gen_mov_i64(cpu_fpr[dst / 2 + 1], v2);
229     gen_update_fprs_dirty(dc, dst);
230 }
231 
232 #ifdef TARGET_SPARC64
233 static TCGv_i64 gen_load_fpr_Q0(DisasContext *dc, unsigned int src)
234 {
235     src = QFPREG(src);
236     return cpu_fpr[src / 2];
237 }
238 
239 static TCGv_i64 gen_load_fpr_Q1(DisasContext *dc, unsigned int src)
240 {
241     src = QFPREG(src);
242     return cpu_fpr[src / 2 + 1];
243 }
244 
245 static void gen_move_Q(DisasContext *dc, unsigned int rd, unsigned int rs)
246 {
247     rd = QFPREG(rd);
248     rs = QFPREG(rs);
249 
250     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
251     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
252     gen_update_fprs_dirty(dc, rd);
253 }
254 #endif
255 
256 /* moves */
257 #ifdef CONFIG_USER_ONLY
258 #define supervisor(dc) 0
259 #ifdef TARGET_SPARC64
260 #define hypervisor(dc) 0
261 #endif
262 #else
263 #ifdef TARGET_SPARC64
264 #define hypervisor(dc) (dc->hypervisor)
265 #define supervisor(dc) (dc->supervisor | dc->hypervisor)
266 #else
267 #define supervisor(dc) (dc->supervisor)
268 #endif
269 #endif
270 
271 #ifdef TARGET_SPARC64
272 #ifndef TARGET_ABI32
273 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
274 #else
275 #define AM_CHECK(dc) (1)
276 #endif
277 #endif
278 
279 static void gen_address_mask(DisasContext *dc, TCGv addr)
280 {
281 #ifdef TARGET_SPARC64
282     if (AM_CHECK(dc))
283         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
284 #endif
285 }
286 
287 static TCGv gen_load_gpr(DisasContext *dc, int reg)
288 {
289     if (reg > 0) {
290         assert(reg < 32);
291         return cpu_regs[reg];
292     } else {
293         TCGv t = tcg_temp_new();
294         tcg_gen_movi_tl(t, 0);
295         return t;
296     }
297 }
298 
299 static void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
300 {
301     if (reg > 0) {
302         assert(reg < 32);
303         tcg_gen_mov_tl(cpu_regs[reg], v);
304     }
305 }
306 
307 static TCGv gen_dest_gpr(DisasContext *dc, int reg)
308 {
309     if (reg > 0) {
310         assert(reg < 32);
311         return cpu_regs[reg];
312     } else {
313         return tcg_temp_new();
314     }
315 }
316 
317 static bool use_goto_tb(DisasContext *s, target_ulong pc, target_ulong npc)
318 {
319     return translator_use_goto_tb(&s->base, pc) &&
320            translator_use_goto_tb(&s->base, npc);
321 }
322 
323 static void gen_goto_tb(DisasContext *s, int tb_num,
324                         target_ulong pc, target_ulong npc)
325 {
326     if (use_goto_tb(s, pc, npc))  {
327         /* jump to same page: we can use a direct jump */
328         tcg_gen_goto_tb(tb_num);
329         tcg_gen_movi_tl(cpu_pc, pc);
330         tcg_gen_movi_tl(cpu_npc, npc);
331         tcg_gen_exit_tb(s->base.tb, tb_num);
332     } else {
333         /* jump to another page: we can use an indirect jump */
334         tcg_gen_movi_tl(cpu_pc, pc);
335         tcg_gen_movi_tl(cpu_npc, npc);
336         tcg_gen_lookup_and_goto_ptr();
337     }
338 }
339 
340 // XXX suboptimal
341 static void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
342 {
343     tcg_gen_extu_i32_tl(reg, src);
344     tcg_gen_extract_tl(reg, reg, PSR_NEG_SHIFT, 1);
345 }
346 
347 static void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
348 {
349     tcg_gen_extu_i32_tl(reg, src);
350     tcg_gen_extract_tl(reg, reg, PSR_ZERO_SHIFT, 1);
351 }
352 
353 static void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
354 {
355     tcg_gen_extu_i32_tl(reg, src);
356     tcg_gen_extract_tl(reg, reg, PSR_OVF_SHIFT, 1);
357 }
358 
359 static void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
360 {
361     tcg_gen_extu_i32_tl(reg, src);
362     tcg_gen_extract_tl(reg, reg, PSR_CARRY_SHIFT, 1);
363 }
364 
365 static void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
366 {
367     tcg_gen_mov_tl(cpu_cc_src, src1);
368     tcg_gen_mov_tl(cpu_cc_src2, src2);
369     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
370     tcg_gen_mov_tl(dst, cpu_cc_dst);
371 }
372 
373 static TCGv_i32 gen_add32_carry32(void)
374 {
375     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
376 
377     /* Carry is computed from a previous add: (dst < src)  */
378 #if TARGET_LONG_BITS == 64
379     cc_src1_32 = tcg_temp_new_i32();
380     cc_src2_32 = tcg_temp_new_i32();
381     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
382     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
383 #else
384     cc_src1_32 = cpu_cc_dst;
385     cc_src2_32 = cpu_cc_src;
386 #endif
387 
388     carry_32 = tcg_temp_new_i32();
389     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
390 
391     return carry_32;
392 }
393 
394 static TCGv_i32 gen_sub32_carry32(void)
395 {
396     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
397 
398     /* Carry is computed from a previous borrow: (src1 < src2)  */
399 #if TARGET_LONG_BITS == 64
400     cc_src1_32 = tcg_temp_new_i32();
401     cc_src2_32 = tcg_temp_new_i32();
402     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
403     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
404 #else
405     cc_src1_32 = cpu_cc_src;
406     cc_src2_32 = cpu_cc_src2;
407 #endif
408 
409     carry_32 = tcg_temp_new_i32();
410     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
411 
412     return carry_32;
413 }
414 
415 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
416                             TCGv src2, int update_cc)
417 {
418     TCGv_i32 carry_32;
419     TCGv carry;
420 
421     switch (dc->cc_op) {
422     case CC_OP_DIV:
423     case CC_OP_LOGIC:
424         /* Carry is known to be zero.  Fall back to plain ADD.  */
425         if (update_cc) {
426             gen_op_add_cc(dst, src1, src2);
427         } else {
428             tcg_gen_add_tl(dst, src1, src2);
429         }
430         return;
431 
432     case CC_OP_ADD:
433     case CC_OP_TADD:
434     case CC_OP_TADDTV:
435         if (TARGET_LONG_BITS == 32) {
436             /* We can re-use the host's hardware carry generation by using
437                an ADD2 opcode.  We discard the low part of the output.
438                Ideally we'd combine this operation with the add that
439                generated the carry in the first place.  */
440             carry = tcg_temp_new();
441             tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
442             goto add_done;
443         }
444         carry_32 = gen_add32_carry32();
445         break;
446 
447     case CC_OP_SUB:
448     case CC_OP_TSUB:
449     case CC_OP_TSUBTV:
450         carry_32 = gen_sub32_carry32();
451         break;
452 
453     default:
454         /* We need external help to produce the carry.  */
455         carry_32 = tcg_temp_new_i32();
456         gen_helper_compute_C_icc(carry_32, tcg_env);
457         break;
458     }
459 
460 #if TARGET_LONG_BITS == 64
461     carry = tcg_temp_new();
462     tcg_gen_extu_i32_i64(carry, carry_32);
463 #else
464     carry = carry_32;
465 #endif
466 
467     tcg_gen_add_tl(dst, src1, src2);
468     tcg_gen_add_tl(dst, dst, carry);
469 
470  add_done:
471     if (update_cc) {
472         tcg_gen_mov_tl(cpu_cc_src, src1);
473         tcg_gen_mov_tl(cpu_cc_src2, src2);
474         tcg_gen_mov_tl(cpu_cc_dst, dst);
475         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
476         dc->cc_op = CC_OP_ADDX;
477     }
478 }
479 
480 static void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
481 {
482     tcg_gen_mov_tl(cpu_cc_src, src1);
483     tcg_gen_mov_tl(cpu_cc_src2, src2);
484     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
485     tcg_gen_mov_tl(dst, cpu_cc_dst);
486 }
487 
488 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
489                             TCGv src2, int update_cc)
490 {
491     TCGv_i32 carry_32;
492     TCGv carry;
493 
494     switch (dc->cc_op) {
495     case CC_OP_DIV:
496     case CC_OP_LOGIC:
497         /* Carry is known to be zero.  Fall back to plain SUB.  */
498         if (update_cc) {
499             gen_op_sub_cc(dst, src1, src2);
500         } else {
501             tcg_gen_sub_tl(dst, src1, src2);
502         }
503         return;
504 
505     case CC_OP_ADD:
506     case CC_OP_TADD:
507     case CC_OP_TADDTV:
508         carry_32 = gen_add32_carry32();
509         break;
510 
511     case CC_OP_SUB:
512     case CC_OP_TSUB:
513     case CC_OP_TSUBTV:
514         if (TARGET_LONG_BITS == 32) {
515             /* We can re-use the host's hardware carry generation by using
516                a SUB2 opcode.  We discard the low part of the output.
517                Ideally we'd combine this operation with the add that
518                generated the carry in the first place.  */
519             carry = tcg_temp_new();
520             tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
521             goto sub_done;
522         }
523         carry_32 = gen_sub32_carry32();
524         break;
525 
526     default:
527         /* We need external help to produce the carry.  */
528         carry_32 = tcg_temp_new_i32();
529         gen_helper_compute_C_icc(carry_32, tcg_env);
530         break;
531     }
532 
533 #if TARGET_LONG_BITS == 64
534     carry = tcg_temp_new();
535     tcg_gen_extu_i32_i64(carry, carry_32);
536 #else
537     carry = carry_32;
538 #endif
539 
540     tcg_gen_sub_tl(dst, src1, src2);
541     tcg_gen_sub_tl(dst, dst, carry);
542 
543  sub_done:
544     if (update_cc) {
545         tcg_gen_mov_tl(cpu_cc_src, src1);
546         tcg_gen_mov_tl(cpu_cc_src2, src2);
547         tcg_gen_mov_tl(cpu_cc_dst, dst);
548         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
549         dc->cc_op = CC_OP_SUBX;
550     }
551 }
552 
553 static void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
554 {
555     TCGv r_temp, zero, t0;
556 
557     r_temp = tcg_temp_new();
558     t0 = tcg_temp_new();
559 
560     /* old op:
561     if (!(env->y & 1))
562         T1 = 0;
563     */
564     zero = tcg_constant_tl(0);
565     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
566     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
567     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
568     tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
569                        zero, cpu_cc_src2);
570 
571     // b2 = T0 & 1;
572     // env->y = (b2 << 31) | (env->y >> 1);
573     tcg_gen_extract_tl(t0, cpu_y, 1, 31);
574     tcg_gen_deposit_tl(cpu_y, t0, cpu_cc_src, 31, 1);
575 
576     // b1 = N ^ V;
577     gen_mov_reg_N(t0, cpu_psr);
578     gen_mov_reg_V(r_temp, cpu_psr);
579     tcg_gen_xor_tl(t0, t0, r_temp);
580 
581     // T0 = (b1 << 31) | (T0 >> 1);
582     // src1 = T0;
583     tcg_gen_shli_tl(t0, t0, 31);
584     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
585     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
586 
587     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
588 
589     tcg_gen_mov_tl(dst, cpu_cc_dst);
590 }
591 
592 static void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
593 {
594 #if TARGET_LONG_BITS == 32
595     if (sign_ext) {
596         tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
597     } else {
598         tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
599     }
600 #else
601     TCGv t0 = tcg_temp_new_i64();
602     TCGv t1 = tcg_temp_new_i64();
603 
604     if (sign_ext) {
605         tcg_gen_ext32s_i64(t0, src1);
606         tcg_gen_ext32s_i64(t1, src2);
607     } else {
608         tcg_gen_ext32u_i64(t0, src1);
609         tcg_gen_ext32u_i64(t1, src2);
610     }
611 
612     tcg_gen_mul_i64(dst, t0, t1);
613     tcg_gen_shri_i64(cpu_y, dst, 32);
614 #endif
615 }
616 
617 static void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
618 {
619     /* zero-extend truncated operands before multiplication */
620     gen_op_multiply(dst, src1, src2, 0);
621 }
622 
623 static void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
624 {
625     /* sign-extend truncated operands before multiplication */
626     gen_op_multiply(dst, src1, src2, 1);
627 }
628 
629 // 1
630 static void gen_op_eval_ba(TCGv dst)
631 {
632     tcg_gen_movi_tl(dst, 1);
633 }
634 
635 // Z
636 static void gen_op_eval_be(TCGv dst, TCGv_i32 src)
637 {
638     gen_mov_reg_Z(dst, src);
639 }
640 
641 // Z | (N ^ V)
642 static void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
643 {
644     TCGv t0 = tcg_temp_new();
645     gen_mov_reg_N(t0, src);
646     gen_mov_reg_V(dst, src);
647     tcg_gen_xor_tl(dst, dst, t0);
648     gen_mov_reg_Z(t0, src);
649     tcg_gen_or_tl(dst, dst, t0);
650 }
651 
652 // N ^ V
653 static void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
654 {
655     TCGv t0 = tcg_temp_new();
656     gen_mov_reg_V(t0, src);
657     gen_mov_reg_N(dst, src);
658     tcg_gen_xor_tl(dst, dst, t0);
659 }
660 
661 // C | Z
662 static void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
663 {
664     TCGv t0 = tcg_temp_new();
665     gen_mov_reg_Z(t0, src);
666     gen_mov_reg_C(dst, src);
667     tcg_gen_or_tl(dst, dst, t0);
668 }
669 
670 // C
671 static void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
672 {
673     gen_mov_reg_C(dst, src);
674 }
675 
676 // V
677 static void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
678 {
679     gen_mov_reg_V(dst, src);
680 }
681 
682 // 0
683 static void gen_op_eval_bn(TCGv dst)
684 {
685     tcg_gen_movi_tl(dst, 0);
686 }
687 
688 // N
689 static void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
690 {
691     gen_mov_reg_N(dst, src);
692 }
693 
694 // !Z
695 static void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
696 {
697     gen_mov_reg_Z(dst, src);
698     tcg_gen_xori_tl(dst, dst, 0x1);
699 }
700 
701 // !(Z | (N ^ V))
702 static void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
703 {
704     gen_op_eval_ble(dst, src);
705     tcg_gen_xori_tl(dst, dst, 0x1);
706 }
707 
708 // !(N ^ V)
709 static void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
710 {
711     gen_op_eval_bl(dst, src);
712     tcg_gen_xori_tl(dst, dst, 0x1);
713 }
714 
715 // !(C | Z)
716 static void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
717 {
718     gen_op_eval_bleu(dst, src);
719     tcg_gen_xori_tl(dst, dst, 0x1);
720 }
721 
722 // !C
723 static void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
724 {
725     gen_mov_reg_C(dst, src);
726     tcg_gen_xori_tl(dst, dst, 0x1);
727 }
728 
729 // !N
730 static void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
731 {
732     gen_mov_reg_N(dst, src);
733     tcg_gen_xori_tl(dst, dst, 0x1);
734 }
735 
736 // !V
737 static void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
738 {
739     gen_mov_reg_V(dst, src);
740     tcg_gen_xori_tl(dst, dst, 0x1);
741 }
742 
743 /*
744   FPSR bit field FCC1 | FCC0:
745    0 =
746    1 <
747    2 >
748    3 unordered
749 */
750 static void gen_mov_reg_FCC0(TCGv reg, TCGv src,
751                                     unsigned int fcc_offset)
752 {
753     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
754     tcg_gen_andi_tl(reg, reg, 0x1);
755 }
756 
757 static void gen_mov_reg_FCC1(TCGv reg, TCGv src, unsigned int fcc_offset)
758 {
759     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
760     tcg_gen_andi_tl(reg, reg, 0x1);
761 }
762 
763 // !0: FCC0 | FCC1
764 static void gen_op_eval_fbne(TCGv dst, TCGv src, unsigned int fcc_offset)
765 {
766     TCGv t0 = tcg_temp_new();
767     gen_mov_reg_FCC0(dst, src, fcc_offset);
768     gen_mov_reg_FCC1(t0, src, fcc_offset);
769     tcg_gen_or_tl(dst, dst, t0);
770 }
771 
772 // 1 or 2: FCC0 ^ FCC1
773 static void gen_op_eval_fblg(TCGv dst, TCGv src, unsigned int fcc_offset)
774 {
775     TCGv t0 = tcg_temp_new();
776     gen_mov_reg_FCC0(dst, src, fcc_offset);
777     gen_mov_reg_FCC1(t0, src, fcc_offset);
778     tcg_gen_xor_tl(dst, dst, t0);
779 }
780 
781 // 1 or 3: FCC0
782 static void gen_op_eval_fbul(TCGv dst, TCGv src, unsigned int fcc_offset)
783 {
784     gen_mov_reg_FCC0(dst, src, fcc_offset);
785 }
786 
787 // 1: FCC0 & !FCC1
788 static void gen_op_eval_fbl(TCGv dst, TCGv src, unsigned int fcc_offset)
789 {
790     TCGv t0 = tcg_temp_new();
791     gen_mov_reg_FCC0(dst, src, fcc_offset);
792     gen_mov_reg_FCC1(t0, src, fcc_offset);
793     tcg_gen_andc_tl(dst, dst, t0);
794 }
795 
796 // 2 or 3: FCC1
797 static void gen_op_eval_fbug(TCGv dst, TCGv src, unsigned int fcc_offset)
798 {
799     gen_mov_reg_FCC1(dst, src, fcc_offset);
800 }
801 
802 // 2: !FCC0 & FCC1
803 static void gen_op_eval_fbg(TCGv dst, TCGv src, unsigned int fcc_offset)
804 {
805     TCGv t0 = tcg_temp_new();
806     gen_mov_reg_FCC0(dst, src, fcc_offset);
807     gen_mov_reg_FCC1(t0, src, fcc_offset);
808     tcg_gen_andc_tl(dst, t0, dst);
809 }
810 
811 // 3: FCC0 & FCC1
812 static void gen_op_eval_fbu(TCGv dst, TCGv src, unsigned int fcc_offset)
813 {
814     TCGv t0 = tcg_temp_new();
815     gen_mov_reg_FCC0(dst, src, fcc_offset);
816     gen_mov_reg_FCC1(t0, src, fcc_offset);
817     tcg_gen_and_tl(dst, dst, t0);
818 }
819 
820 // 0: !(FCC0 | FCC1)
821 static void gen_op_eval_fbe(TCGv dst, TCGv src, unsigned int fcc_offset)
822 {
823     TCGv t0 = tcg_temp_new();
824     gen_mov_reg_FCC0(dst, src, fcc_offset);
825     gen_mov_reg_FCC1(t0, src, fcc_offset);
826     tcg_gen_or_tl(dst, dst, t0);
827     tcg_gen_xori_tl(dst, dst, 0x1);
828 }
829 
830 // 0 or 3: !(FCC0 ^ FCC1)
831 static void gen_op_eval_fbue(TCGv dst, TCGv src, unsigned int fcc_offset)
832 {
833     TCGv t0 = tcg_temp_new();
834     gen_mov_reg_FCC0(dst, src, fcc_offset);
835     gen_mov_reg_FCC1(t0, src, fcc_offset);
836     tcg_gen_xor_tl(dst, dst, t0);
837     tcg_gen_xori_tl(dst, dst, 0x1);
838 }
839 
840 // 0 or 2: !FCC0
841 static void gen_op_eval_fbge(TCGv dst, TCGv src, unsigned int fcc_offset)
842 {
843     gen_mov_reg_FCC0(dst, src, fcc_offset);
844     tcg_gen_xori_tl(dst, dst, 0x1);
845 }
846 
847 // !1: !(FCC0 & !FCC1)
848 static void gen_op_eval_fbuge(TCGv dst, TCGv src, unsigned int fcc_offset)
849 {
850     TCGv t0 = tcg_temp_new();
851     gen_mov_reg_FCC0(dst, src, fcc_offset);
852     gen_mov_reg_FCC1(t0, src, fcc_offset);
853     tcg_gen_andc_tl(dst, dst, t0);
854     tcg_gen_xori_tl(dst, dst, 0x1);
855 }
856 
857 // 0 or 1: !FCC1
858 static void gen_op_eval_fble(TCGv dst, TCGv src, unsigned int fcc_offset)
859 {
860     gen_mov_reg_FCC1(dst, src, fcc_offset);
861     tcg_gen_xori_tl(dst, dst, 0x1);
862 }
863 
864 // !2: !(!FCC0 & FCC1)
865 static void gen_op_eval_fbule(TCGv dst, TCGv src, unsigned int fcc_offset)
866 {
867     TCGv t0 = tcg_temp_new();
868     gen_mov_reg_FCC0(dst, src, fcc_offset);
869     gen_mov_reg_FCC1(t0, src, fcc_offset);
870     tcg_gen_andc_tl(dst, t0, dst);
871     tcg_gen_xori_tl(dst, dst, 0x1);
872 }
873 
874 // !3: !(FCC0 & FCC1)
875 static void gen_op_eval_fbo(TCGv dst, TCGv src, unsigned int fcc_offset)
876 {
877     TCGv t0 = tcg_temp_new();
878     gen_mov_reg_FCC0(dst, src, fcc_offset);
879     gen_mov_reg_FCC1(t0, src, fcc_offset);
880     tcg_gen_and_tl(dst, dst, t0);
881     tcg_gen_xori_tl(dst, dst, 0x1);
882 }
883 
884 static void gen_branch2(DisasContext *dc, target_ulong pc1,
885                         target_ulong pc2, TCGv r_cond)
886 {
887     TCGLabel *l1 = gen_new_label();
888 
889     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
890 
891     gen_goto_tb(dc, 0, pc1, pc1 + 4);
892 
893     gen_set_label(l1);
894     gen_goto_tb(dc, 1, pc2, pc2 + 4);
895 }
896 
897 static void gen_branch_a(DisasContext *dc, target_ulong pc1)
898 {
899     TCGLabel *l1 = gen_new_label();
900     target_ulong npc = dc->npc;
901 
902     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cond, 0, l1);
903 
904     gen_goto_tb(dc, 0, npc, pc1);
905 
906     gen_set_label(l1);
907     gen_goto_tb(dc, 1, npc + 4, npc + 8);
908 
909     dc->base.is_jmp = DISAS_NORETURN;
910 }
911 
912 static void gen_branch_n(DisasContext *dc, target_ulong pc1)
913 {
914     target_ulong npc = dc->npc;
915 
916     if (npc & 3) {
917         switch (npc) {
918         case DYNAMIC_PC:
919         case DYNAMIC_PC_LOOKUP:
920             tcg_gen_mov_tl(cpu_pc, cpu_npc);
921             tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
922             tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc,
923                                cpu_cond, tcg_constant_tl(0),
924                                tcg_constant_tl(pc1), cpu_npc);
925             dc->pc = npc;
926             break;
927         default:
928             g_assert_not_reached();
929         }
930     } else {
931         dc->pc = npc;
932         dc->jump_pc[0] = pc1;
933         dc->jump_pc[1] = npc + 4;
934         dc->npc = JUMP_PC;
935     }
936 }
937 
938 static void gen_generic_branch(DisasContext *dc)
939 {
940     TCGv npc0 = tcg_constant_tl(dc->jump_pc[0]);
941     TCGv npc1 = tcg_constant_tl(dc->jump_pc[1]);
942     TCGv zero = tcg_constant_tl(0);
943 
944     tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
945 }
946 
947 /* call this function before using the condition register as it may
948    have been set for a jump */
949 static void flush_cond(DisasContext *dc)
950 {
951     if (dc->npc == JUMP_PC) {
952         gen_generic_branch(dc);
953         dc->npc = DYNAMIC_PC_LOOKUP;
954     }
955 }
956 
957 static void save_npc(DisasContext *dc)
958 {
959     if (dc->npc & 3) {
960         switch (dc->npc) {
961         case JUMP_PC:
962             gen_generic_branch(dc);
963             dc->npc = DYNAMIC_PC_LOOKUP;
964             break;
965         case DYNAMIC_PC:
966         case DYNAMIC_PC_LOOKUP:
967             break;
968         default:
969             g_assert_not_reached();
970         }
971     } else {
972         tcg_gen_movi_tl(cpu_npc, dc->npc);
973     }
974 }
975 
976 static void update_psr(DisasContext *dc)
977 {
978     if (dc->cc_op != CC_OP_FLAGS) {
979         dc->cc_op = CC_OP_FLAGS;
980         gen_helper_compute_psr(tcg_env);
981     }
982 }
983 
984 static void save_state(DisasContext *dc)
985 {
986     tcg_gen_movi_tl(cpu_pc, dc->pc);
987     save_npc(dc);
988 }
989 
990 static void gen_exception(DisasContext *dc, int which)
991 {
992     save_state(dc);
993     gen_helper_raise_exception(tcg_env, tcg_constant_i32(which));
994     dc->base.is_jmp = DISAS_NORETURN;
995 }
996 
997 static TCGLabel *delay_exceptionv(DisasContext *dc, TCGv_i32 excp)
998 {
999     DisasDelayException *e = g_new0(DisasDelayException, 1);
1000 
1001     e->next = dc->delay_excp_list;
1002     dc->delay_excp_list = e;
1003 
1004     e->lab = gen_new_label();
1005     e->excp = excp;
1006     e->pc = dc->pc;
1007     /* Caller must have used flush_cond before branch. */
1008     assert(e->npc != JUMP_PC);
1009     e->npc = dc->npc;
1010 
1011     return e->lab;
1012 }
1013 
1014 static TCGLabel *delay_exception(DisasContext *dc, int excp)
1015 {
1016     return delay_exceptionv(dc, tcg_constant_i32(excp));
1017 }
1018 
1019 static void gen_check_align(DisasContext *dc, TCGv addr, int mask)
1020 {
1021     TCGv t = tcg_temp_new();
1022     TCGLabel *lab;
1023 
1024     tcg_gen_andi_tl(t, addr, mask);
1025 
1026     flush_cond(dc);
1027     lab = delay_exception(dc, TT_UNALIGNED);
1028     tcg_gen_brcondi_tl(TCG_COND_NE, t, 0, lab);
1029 }
1030 
1031 static void gen_mov_pc_npc(DisasContext *dc)
1032 {
1033     if (dc->npc & 3) {
1034         switch (dc->npc) {
1035         case JUMP_PC:
1036             gen_generic_branch(dc);
1037             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1038             dc->pc = DYNAMIC_PC_LOOKUP;
1039             break;
1040         case DYNAMIC_PC:
1041         case DYNAMIC_PC_LOOKUP:
1042             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1043             dc->pc = dc->npc;
1044             break;
1045         default:
1046             g_assert_not_reached();
1047         }
1048     } else {
1049         dc->pc = dc->npc;
1050     }
1051 }
1052 
1053 static void gen_op_next_insn(void)
1054 {
1055     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1056     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1057 }
1058 
1059 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1060                         DisasContext *dc)
1061 {
1062     static int subcc_cond[16] = {
1063         TCG_COND_NEVER,
1064         TCG_COND_EQ,
1065         TCG_COND_LE,
1066         TCG_COND_LT,
1067         TCG_COND_LEU,
1068         TCG_COND_LTU,
1069         -1, /* neg */
1070         -1, /* overflow */
1071         TCG_COND_ALWAYS,
1072         TCG_COND_NE,
1073         TCG_COND_GT,
1074         TCG_COND_GE,
1075         TCG_COND_GTU,
1076         TCG_COND_GEU,
1077         -1, /* pos */
1078         -1, /* no overflow */
1079     };
1080 
1081     static int logic_cond[16] = {
1082         TCG_COND_NEVER,
1083         TCG_COND_EQ,     /* eq:  Z */
1084         TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1085         TCG_COND_LT,     /* lt:  N ^ V -> N */
1086         TCG_COND_EQ,     /* leu: C | Z -> Z */
1087         TCG_COND_NEVER,  /* ltu: C -> 0 */
1088         TCG_COND_LT,     /* neg: N */
1089         TCG_COND_NEVER,  /* vs:  V -> 0 */
1090         TCG_COND_ALWAYS,
1091         TCG_COND_NE,     /* ne:  !Z */
1092         TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1093         TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1094         TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1095         TCG_COND_ALWAYS, /* geu: !C -> 1 */
1096         TCG_COND_GE,     /* pos: !N */
1097         TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1098     };
1099 
1100     TCGv_i32 r_src;
1101     TCGv r_dst;
1102 
1103 #ifdef TARGET_SPARC64
1104     if (xcc) {
1105         r_src = cpu_xcc;
1106     } else {
1107         r_src = cpu_psr;
1108     }
1109 #else
1110     r_src = cpu_psr;
1111 #endif
1112 
1113     switch (dc->cc_op) {
1114     case CC_OP_LOGIC:
1115         cmp->cond = logic_cond[cond];
1116     do_compare_dst_0:
1117         cmp->is_bool = false;
1118         cmp->c2 = tcg_constant_tl(0);
1119 #ifdef TARGET_SPARC64
1120         if (!xcc) {
1121             cmp->c1 = tcg_temp_new();
1122             tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1123             break;
1124         }
1125 #endif
1126         cmp->c1 = cpu_cc_dst;
1127         break;
1128 
1129     case CC_OP_SUB:
1130         switch (cond) {
1131         case 6:  /* neg */
1132         case 14: /* pos */
1133             cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1134             goto do_compare_dst_0;
1135 
1136         case 7: /* overflow */
1137         case 15: /* !overflow */
1138             goto do_dynamic;
1139 
1140         default:
1141             cmp->cond = subcc_cond[cond];
1142             cmp->is_bool = false;
1143 #ifdef TARGET_SPARC64
1144             if (!xcc) {
1145                 /* Note that sign-extension works for unsigned compares as
1146                    long as both operands are sign-extended.  */
1147                 cmp->c1 = tcg_temp_new();
1148                 cmp->c2 = tcg_temp_new();
1149                 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1150                 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1151                 break;
1152             }
1153 #endif
1154             cmp->c1 = cpu_cc_src;
1155             cmp->c2 = cpu_cc_src2;
1156             break;
1157         }
1158         break;
1159 
1160     default:
1161     do_dynamic:
1162         gen_helper_compute_psr(tcg_env);
1163         dc->cc_op = CC_OP_FLAGS;
1164         /* FALLTHRU */
1165 
1166     case CC_OP_FLAGS:
1167         /* We're going to generate a boolean result.  */
1168         cmp->cond = TCG_COND_NE;
1169         cmp->is_bool = true;
1170         cmp->c1 = r_dst = tcg_temp_new();
1171         cmp->c2 = tcg_constant_tl(0);
1172 
1173         switch (cond) {
1174         case 0x0:
1175             gen_op_eval_bn(r_dst);
1176             break;
1177         case 0x1:
1178             gen_op_eval_be(r_dst, r_src);
1179             break;
1180         case 0x2:
1181             gen_op_eval_ble(r_dst, r_src);
1182             break;
1183         case 0x3:
1184             gen_op_eval_bl(r_dst, r_src);
1185             break;
1186         case 0x4:
1187             gen_op_eval_bleu(r_dst, r_src);
1188             break;
1189         case 0x5:
1190             gen_op_eval_bcs(r_dst, r_src);
1191             break;
1192         case 0x6:
1193             gen_op_eval_bneg(r_dst, r_src);
1194             break;
1195         case 0x7:
1196             gen_op_eval_bvs(r_dst, r_src);
1197             break;
1198         case 0x8:
1199             gen_op_eval_ba(r_dst);
1200             break;
1201         case 0x9:
1202             gen_op_eval_bne(r_dst, r_src);
1203             break;
1204         case 0xa:
1205             gen_op_eval_bg(r_dst, r_src);
1206             break;
1207         case 0xb:
1208             gen_op_eval_bge(r_dst, r_src);
1209             break;
1210         case 0xc:
1211             gen_op_eval_bgu(r_dst, r_src);
1212             break;
1213         case 0xd:
1214             gen_op_eval_bcc(r_dst, r_src);
1215             break;
1216         case 0xe:
1217             gen_op_eval_bpos(r_dst, r_src);
1218             break;
1219         case 0xf:
1220             gen_op_eval_bvc(r_dst, r_src);
1221             break;
1222         }
1223         break;
1224     }
1225 }
1226 
1227 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1228 {
1229     unsigned int offset;
1230     TCGv r_dst;
1231 
1232     /* For now we still generate a straight boolean result.  */
1233     cmp->cond = TCG_COND_NE;
1234     cmp->is_bool = true;
1235     cmp->c1 = r_dst = tcg_temp_new();
1236     cmp->c2 = tcg_constant_tl(0);
1237 
1238     switch (cc) {
1239     default:
1240     case 0x0:
1241         offset = 0;
1242         break;
1243     case 0x1:
1244         offset = 32 - 10;
1245         break;
1246     case 0x2:
1247         offset = 34 - 10;
1248         break;
1249     case 0x3:
1250         offset = 36 - 10;
1251         break;
1252     }
1253 
1254     switch (cond) {
1255     case 0x0:
1256         gen_op_eval_bn(r_dst);
1257         break;
1258     case 0x1:
1259         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1260         break;
1261     case 0x2:
1262         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1263         break;
1264     case 0x3:
1265         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1266         break;
1267     case 0x4:
1268         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1269         break;
1270     case 0x5:
1271         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1272         break;
1273     case 0x6:
1274         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1275         break;
1276     case 0x7:
1277         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1278         break;
1279     case 0x8:
1280         gen_op_eval_ba(r_dst);
1281         break;
1282     case 0x9:
1283         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1284         break;
1285     case 0xa:
1286         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1287         break;
1288     case 0xb:
1289         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1290         break;
1291     case 0xc:
1292         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1293         break;
1294     case 0xd:
1295         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1296         break;
1297     case 0xe:
1298         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1299         break;
1300     case 0xf:
1301         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1302         break;
1303     }
1304 }
1305 
1306 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1307                      DisasContext *dc)
1308 {
1309     DisasCompare cmp;
1310     gen_compare(&cmp, cc, cond, dc);
1311 
1312     /* The interface is to return a boolean in r_dst.  */
1313     if (cmp.is_bool) {
1314         tcg_gen_mov_tl(r_dst, cmp.c1);
1315     } else {
1316         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1317     }
1318 }
1319 
1320 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1321 {
1322     DisasCompare cmp;
1323     gen_fcompare(&cmp, cc, cond);
1324 
1325     /* The interface is to return a boolean in r_dst.  */
1326     if (cmp.is_bool) {
1327         tcg_gen_mov_tl(r_dst, cmp.c1);
1328     } else {
1329         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1330     }
1331 }
1332 
1333 #ifdef TARGET_SPARC64
1334 // Inverted logic
1335 static const int gen_tcg_cond_reg[8] = {
1336     -1,
1337     TCG_COND_NE,
1338     TCG_COND_GT,
1339     TCG_COND_GE,
1340     -1,
1341     TCG_COND_EQ,
1342     TCG_COND_LE,
1343     TCG_COND_LT,
1344 };
1345 
1346 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1347 {
1348     cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1349     cmp->is_bool = false;
1350     cmp->c1 = r_src;
1351     cmp->c2 = tcg_constant_tl(0);
1352 }
1353 
1354 static void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1355 {
1356     DisasCompare cmp;
1357     gen_compare_reg(&cmp, cond, r_src);
1358 
1359     /* The interface is to return a boolean in r_dst.  */
1360     tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1361 }
1362 #endif
1363 
1364 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1365 {
1366     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1367     target_ulong target = dc->pc + offset;
1368 
1369 #ifdef TARGET_SPARC64
1370     if (unlikely(AM_CHECK(dc))) {
1371         target &= 0xffffffffULL;
1372     }
1373 #endif
1374     if (cond == 0x0) {
1375         /* unconditional not taken */
1376         if (a) {
1377             dc->pc = dc->npc + 4;
1378             dc->npc = dc->pc + 4;
1379         } else {
1380             dc->pc = dc->npc;
1381             dc->npc = dc->pc + 4;
1382         }
1383     } else if (cond == 0x8) {
1384         /* unconditional taken */
1385         if (a) {
1386             dc->pc = target;
1387             dc->npc = dc->pc + 4;
1388         } else {
1389             dc->pc = dc->npc;
1390             dc->npc = target;
1391             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1392         }
1393     } else {
1394         flush_cond(dc);
1395         gen_cond(cpu_cond, cc, cond, dc);
1396         if (a) {
1397             gen_branch_a(dc, target);
1398         } else {
1399             gen_branch_n(dc, target);
1400         }
1401     }
1402 }
1403 
1404 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1405 {
1406     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1407     target_ulong target = dc->pc + offset;
1408 
1409 #ifdef TARGET_SPARC64
1410     if (unlikely(AM_CHECK(dc))) {
1411         target &= 0xffffffffULL;
1412     }
1413 #endif
1414     if (cond == 0x0) {
1415         /* unconditional not taken */
1416         if (a) {
1417             dc->pc = dc->npc + 4;
1418             dc->npc = dc->pc + 4;
1419         } else {
1420             dc->pc = dc->npc;
1421             dc->npc = dc->pc + 4;
1422         }
1423     } else if (cond == 0x8) {
1424         /* unconditional taken */
1425         if (a) {
1426             dc->pc = target;
1427             dc->npc = dc->pc + 4;
1428         } else {
1429             dc->pc = dc->npc;
1430             dc->npc = target;
1431             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1432         }
1433     } else {
1434         flush_cond(dc);
1435         gen_fcond(cpu_cond, cc, cond);
1436         if (a) {
1437             gen_branch_a(dc, target);
1438         } else {
1439             gen_branch_n(dc, target);
1440         }
1441     }
1442 }
1443 
1444 #ifdef TARGET_SPARC64
1445 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1446                           TCGv r_reg)
1447 {
1448     unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1449     target_ulong target = dc->pc + offset;
1450 
1451     if (unlikely(AM_CHECK(dc))) {
1452         target &= 0xffffffffULL;
1453     }
1454     flush_cond(dc);
1455     gen_cond_reg(cpu_cond, cond, r_reg);
1456     if (a) {
1457         gen_branch_a(dc, target);
1458     } else {
1459         gen_branch_n(dc, target);
1460     }
1461 }
1462 
1463 static void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1464 {
1465     switch (fccno) {
1466     case 0:
1467         gen_helper_fcmps(cpu_fsr, tcg_env, r_rs1, r_rs2);
1468         break;
1469     case 1:
1470         gen_helper_fcmps_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1471         break;
1472     case 2:
1473         gen_helper_fcmps_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1474         break;
1475     case 3:
1476         gen_helper_fcmps_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1477         break;
1478     }
1479 }
1480 
1481 static void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1482 {
1483     switch (fccno) {
1484     case 0:
1485         gen_helper_fcmpd(cpu_fsr, tcg_env, r_rs1, r_rs2);
1486         break;
1487     case 1:
1488         gen_helper_fcmpd_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1489         break;
1490     case 2:
1491         gen_helper_fcmpd_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1492         break;
1493     case 3:
1494         gen_helper_fcmpd_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1495         break;
1496     }
1497 }
1498 
1499 static void gen_op_fcmpq(int fccno)
1500 {
1501     switch (fccno) {
1502     case 0:
1503         gen_helper_fcmpq(cpu_fsr, tcg_env);
1504         break;
1505     case 1:
1506         gen_helper_fcmpq_fcc1(cpu_fsr, tcg_env);
1507         break;
1508     case 2:
1509         gen_helper_fcmpq_fcc2(cpu_fsr, tcg_env);
1510         break;
1511     case 3:
1512         gen_helper_fcmpq_fcc3(cpu_fsr, tcg_env);
1513         break;
1514     }
1515 }
1516 
1517 static void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1518 {
1519     switch (fccno) {
1520     case 0:
1521         gen_helper_fcmpes(cpu_fsr, tcg_env, r_rs1, r_rs2);
1522         break;
1523     case 1:
1524         gen_helper_fcmpes_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1525         break;
1526     case 2:
1527         gen_helper_fcmpes_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1528         break;
1529     case 3:
1530         gen_helper_fcmpes_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1531         break;
1532     }
1533 }
1534 
1535 static void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1536 {
1537     switch (fccno) {
1538     case 0:
1539         gen_helper_fcmped(cpu_fsr, tcg_env, r_rs1, r_rs2);
1540         break;
1541     case 1:
1542         gen_helper_fcmped_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1543         break;
1544     case 2:
1545         gen_helper_fcmped_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1546         break;
1547     case 3:
1548         gen_helper_fcmped_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1549         break;
1550     }
1551 }
1552 
1553 static void gen_op_fcmpeq(int fccno)
1554 {
1555     switch (fccno) {
1556     case 0:
1557         gen_helper_fcmpeq(cpu_fsr, tcg_env);
1558         break;
1559     case 1:
1560         gen_helper_fcmpeq_fcc1(cpu_fsr, tcg_env);
1561         break;
1562     case 2:
1563         gen_helper_fcmpeq_fcc2(cpu_fsr, tcg_env);
1564         break;
1565     case 3:
1566         gen_helper_fcmpeq_fcc3(cpu_fsr, tcg_env);
1567         break;
1568     }
1569 }
1570 
1571 #else
1572 
1573 static void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1574 {
1575     gen_helper_fcmps(cpu_fsr, tcg_env, r_rs1, r_rs2);
1576 }
1577 
1578 static void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1579 {
1580     gen_helper_fcmpd(cpu_fsr, tcg_env, r_rs1, r_rs2);
1581 }
1582 
1583 static void gen_op_fcmpq(int fccno)
1584 {
1585     gen_helper_fcmpq(cpu_fsr, tcg_env);
1586 }
1587 
1588 static void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1589 {
1590     gen_helper_fcmpes(cpu_fsr, tcg_env, r_rs1, r_rs2);
1591 }
1592 
1593 static void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1594 {
1595     gen_helper_fcmped(cpu_fsr, tcg_env, r_rs1, r_rs2);
1596 }
1597 
1598 static void gen_op_fcmpeq(int fccno)
1599 {
1600     gen_helper_fcmpeq(cpu_fsr, tcg_env);
1601 }
1602 #endif
1603 
1604 static void gen_op_fpexception_im(DisasContext *dc, int fsr_flags)
1605 {
1606     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1607     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1608     gen_exception(dc, TT_FP_EXCP);
1609 }
1610 
1611 static int gen_trap_ifnofpu(DisasContext *dc)
1612 {
1613 #if !defined(CONFIG_USER_ONLY)
1614     if (!dc->fpu_enabled) {
1615         gen_exception(dc, TT_NFPU_INSN);
1616         return 1;
1617     }
1618 #endif
1619     return 0;
1620 }
1621 
1622 static void gen_op_clear_ieee_excp_and_FTT(void)
1623 {
1624     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1625 }
1626 
1627 static void gen_fop_FF(DisasContext *dc, int rd, int rs,
1628                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1629 {
1630     TCGv_i32 dst, src;
1631 
1632     src = gen_load_fpr_F(dc, rs);
1633     dst = gen_dest_fpr_F(dc);
1634 
1635     gen(dst, tcg_env, src);
1636     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1637 
1638     gen_store_fpr_F(dc, rd, dst);
1639 }
1640 
1641 static void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1642                           void (*gen)(TCGv_i32, TCGv_i32))
1643 {
1644     TCGv_i32 dst, src;
1645 
1646     src = gen_load_fpr_F(dc, rs);
1647     dst = gen_dest_fpr_F(dc);
1648 
1649     gen(dst, src);
1650 
1651     gen_store_fpr_F(dc, rd, dst);
1652 }
1653 
1654 static void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1655                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1656 {
1657     TCGv_i32 dst, src1, src2;
1658 
1659     src1 = gen_load_fpr_F(dc, rs1);
1660     src2 = gen_load_fpr_F(dc, rs2);
1661     dst = gen_dest_fpr_F(dc);
1662 
1663     gen(dst, tcg_env, src1, src2);
1664     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1665 
1666     gen_store_fpr_F(dc, rd, dst);
1667 }
1668 
1669 #ifdef TARGET_SPARC64
1670 static void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1671                            void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1672 {
1673     TCGv_i32 dst, src1, src2;
1674 
1675     src1 = gen_load_fpr_F(dc, rs1);
1676     src2 = gen_load_fpr_F(dc, rs2);
1677     dst = gen_dest_fpr_F(dc);
1678 
1679     gen(dst, src1, src2);
1680 
1681     gen_store_fpr_F(dc, rd, dst);
1682 }
1683 #endif
1684 
1685 static void gen_fop_DD(DisasContext *dc, int rd, int rs,
1686                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1687 {
1688     TCGv_i64 dst, src;
1689 
1690     src = gen_load_fpr_D(dc, rs);
1691     dst = gen_dest_fpr_D(dc, rd);
1692 
1693     gen(dst, tcg_env, src);
1694     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1695 
1696     gen_store_fpr_D(dc, rd, dst);
1697 }
1698 
1699 #ifdef TARGET_SPARC64
1700 static void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1701                           void (*gen)(TCGv_i64, TCGv_i64))
1702 {
1703     TCGv_i64 dst, src;
1704 
1705     src = gen_load_fpr_D(dc, rs);
1706     dst = gen_dest_fpr_D(dc, rd);
1707 
1708     gen(dst, src);
1709 
1710     gen_store_fpr_D(dc, rd, dst);
1711 }
1712 #endif
1713 
1714 static void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1715                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1716 {
1717     TCGv_i64 dst, src1, src2;
1718 
1719     src1 = gen_load_fpr_D(dc, rs1);
1720     src2 = gen_load_fpr_D(dc, rs2);
1721     dst = gen_dest_fpr_D(dc, rd);
1722 
1723     gen(dst, tcg_env, src1, src2);
1724     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1725 
1726     gen_store_fpr_D(dc, rd, dst);
1727 }
1728 
1729 #ifdef TARGET_SPARC64
1730 static void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1731                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1732 {
1733     TCGv_i64 dst, src1, src2;
1734 
1735     src1 = gen_load_fpr_D(dc, rs1);
1736     src2 = gen_load_fpr_D(dc, rs2);
1737     dst = gen_dest_fpr_D(dc, rd);
1738 
1739     gen(dst, src1, src2);
1740 
1741     gen_store_fpr_D(dc, rd, dst);
1742 }
1743 
1744 static void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1745                             void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1746 {
1747     TCGv_i64 dst, src1, src2;
1748 
1749     src1 = gen_load_fpr_D(dc, rs1);
1750     src2 = gen_load_fpr_D(dc, rs2);
1751     dst = gen_dest_fpr_D(dc, rd);
1752 
1753     gen(dst, cpu_gsr, src1, src2);
1754 
1755     gen_store_fpr_D(dc, rd, dst);
1756 }
1757 
1758 static void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1759                             void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1760 {
1761     TCGv_i64 dst, src0, src1, src2;
1762 
1763     src1 = gen_load_fpr_D(dc, rs1);
1764     src2 = gen_load_fpr_D(dc, rs2);
1765     src0 = gen_load_fpr_D(dc, rd);
1766     dst = gen_dest_fpr_D(dc, rd);
1767 
1768     gen(dst, src0, src1, src2);
1769 
1770     gen_store_fpr_D(dc, rd, dst);
1771 }
1772 #endif
1773 
1774 static void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1775                        void (*gen)(TCGv_ptr))
1776 {
1777     gen_op_load_fpr_QT1(QFPREG(rs));
1778 
1779     gen(tcg_env);
1780     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1781 
1782     gen_op_store_QT0_fpr(QFPREG(rd));
1783     gen_update_fprs_dirty(dc, QFPREG(rd));
1784 }
1785 
1786 #ifdef TARGET_SPARC64
1787 static void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1788                           void (*gen)(TCGv_ptr))
1789 {
1790     gen_op_load_fpr_QT1(QFPREG(rs));
1791 
1792     gen(tcg_env);
1793 
1794     gen_op_store_QT0_fpr(QFPREG(rd));
1795     gen_update_fprs_dirty(dc, QFPREG(rd));
1796 }
1797 #endif
1798 
1799 static void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1800                         void (*gen)(TCGv_ptr))
1801 {
1802     gen_op_load_fpr_QT0(QFPREG(rs1));
1803     gen_op_load_fpr_QT1(QFPREG(rs2));
1804 
1805     gen(tcg_env);
1806     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1807 
1808     gen_op_store_QT0_fpr(QFPREG(rd));
1809     gen_update_fprs_dirty(dc, QFPREG(rd));
1810 }
1811 
1812 static void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1813                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1814 {
1815     TCGv_i64 dst;
1816     TCGv_i32 src1, src2;
1817 
1818     src1 = gen_load_fpr_F(dc, rs1);
1819     src2 = gen_load_fpr_F(dc, rs2);
1820     dst = gen_dest_fpr_D(dc, rd);
1821 
1822     gen(dst, tcg_env, src1, src2);
1823     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1824 
1825     gen_store_fpr_D(dc, rd, dst);
1826 }
1827 
1828 static void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1829                         void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1830 {
1831     TCGv_i64 src1, src2;
1832 
1833     src1 = gen_load_fpr_D(dc, rs1);
1834     src2 = gen_load_fpr_D(dc, rs2);
1835 
1836     gen(tcg_env, src1, src2);
1837     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1838 
1839     gen_op_store_QT0_fpr(QFPREG(rd));
1840     gen_update_fprs_dirty(dc, QFPREG(rd));
1841 }
1842 
1843 #ifdef TARGET_SPARC64
1844 static void gen_fop_DF(DisasContext *dc, int rd, int rs,
1845                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1846 {
1847     TCGv_i64 dst;
1848     TCGv_i32 src;
1849 
1850     src = gen_load_fpr_F(dc, rs);
1851     dst = gen_dest_fpr_D(dc, rd);
1852 
1853     gen(dst, tcg_env, src);
1854     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1855 
1856     gen_store_fpr_D(dc, rd, dst);
1857 }
1858 #endif
1859 
1860 static void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1861                           void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1862 {
1863     TCGv_i64 dst;
1864     TCGv_i32 src;
1865 
1866     src = gen_load_fpr_F(dc, rs);
1867     dst = gen_dest_fpr_D(dc, rd);
1868 
1869     gen(dst, tcg_env, src);
1870 
1871     gen_store_fpr_D(dc, rd, dst);
1872 }
1873 
1874 static void gen_fop_FD(DisasContext *dc, int rd, int rs,
1875                        void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1876 {
1877     TCGv_i32 dst;
1878     TCGv_i64 src;
1879 
1880     src = gen_load_fpr_D(dc, rs);
1881     dst = gen_dest_fpr_F(dc);
1882 
1883     gen(dst, tcg_env, src);
1884     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1885 
1886     gen_store_fpr_F(dc, rd, dst);
1887 }
1888 
1889 static void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1890                        void (*gen)(TCGv_i32, TCGv_ptr))
1891 {
1892     TCGv_i32 dst;
1893 
1894     gen_op_load_fpr_QT1(QFPREG(rs));
1895     dst = gen_dest_fpr_F(dc);
1896 
1897     gen(dst, tcg_env);
1898     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1899 
1900     gen_store_fpr_F(dc, rd, dst);
1901 }
1902 
1903 static void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1904                        void (*gen)(TCGv_i64, TCGv_ptr))
1905 {
1906     TCGv_i64 dst;
1907 
1908     gen_op_load_fpr_QT1(QFPREG(rs));
1909     dst = gen_dest_fpr_D(dc, rd);
1910 
1911     gen(dst, tcg_env);
1912     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1913 
1914     gen_store_fpr_D(dc, rd, dst);
1915 }
1916 
1917 static void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1918                           void (*gen)(TCGv_ptr, TCGv_i32))
1919 {
1920     TCGv_i32 src;
1921 
1922     src = gen_load_fpr_F(dc, rs);
1923 
1924     gen(tcg_env, src);
1925 
1926     gen_op_store_QT0_fpr(QFPREG(rd));
1927     gen_update_fprs_dirty(dc, QFPREG(rd));
1928 }
1929 
1930 static void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1931                           void (*gen)(TCGv_ptr, TCGv_i64))
1932 {
1933     TCGv_i64 src;
1934 
1935     src = gen_load_fpr_D(dc, rs);
1936 
1937     gen(tcg_env, src);
1938 
1939     gen_op_store_QT0_fpr(QFPREG(rd));
1940     gen_update_fprs_dirty(dc, QFPREG(rd));
1941 }
1942 
1943 static void gen_swap(DisasContext *dc, TCGv dst, TCGv src,
1944                      TCGv addr, int mmu_idx, MemOp memop)
1945 {
1946     gen_address_mask(dc, addr);
1947     tcg_gen_atomic_xchg_tl(dst, addr, src, mmu_idx, memop | MO_ALIGN);
1948 }
1949 
1950 static void gen_ldstub(DisasContext *dc, TCGv dst, TCGv addr, int mmu_idx)
1951 {
1952     TCGv m1 = tcg_constant_tl(0xff);
1953     gen_address_mask(dc, addr);
1954     tcg_gen_atomic_xchg_tl(dst, addr, m1, mmu_idx, MO_UB);
1955 }
1956 
1957 /* asi moves */
1958 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1959 typedef enum {
1960     GET_ASI_HELPER,
1961     GET_ASI_EXCP,
1962     GET_ASI_DIRECT,
1963     GET_ASI_DTWINX,
1964     GET_ASI_BLOCK,
1965     GET_ASI_SHORT,
1966     GET_ASI_BCOPY,
1967     GET_ASI_BFILL,
1968 } ASIType;
1969 
1970 typedef struct {
1971     ASIType type;
1972     int asi;
1973     int mem_idx;
1974     MemOp memop;
1975 } DisasASI;
1976 
1977 static DisasASI get_asi(DisasContext *dc, int insn, MemOp memop)
1978 {
1979     int asi = GET_FIELD(insn, 19, 26);
1980     ASIType type = GET_ASI_HELPER;
1981     int mem_idx = dc->mem_idx;
1982 
1983 #ifndef TARGET_SPARC64
1984     /* Before v9, all asis are immediate and privileged.  */
1985     if (IS_IMM) {
1986         gen_exception(dc, TT_ILL_INSN);
1987         type = GET_ASI_EXCP;
1988     } else if (supervisor(dc)
1989                /* Note that LEON accepts ASI_USERDATA in user mode, for
1990                   use with CASA.  Also note that previous versions of
1991                   QEMU allowed (and old versions of gcc emitted) ASI_P
1992                   for LEON, which is incorrect.  */
1993                || (asi == ASI_USERDATA
1994                    && (dc->def->features & CPU_FEATURE_CASA))) {
1995         switch (asi) {
1996         case ASI_USERDATA:   /* User data access */
1997             mem_idx = MMU_USER_IDX;
1998             type = GET_ASI_DIRECT;
1999             break;
2000         case ASI_KERNELDATA: /* Supervisor data access */
2001             mem_idx = MMU_KERNEL_IDX;
2002             type = GET_ASI_DIRECT;
2003             break;
2004         case ASI_M_BYPASS:    /* MMU passthrough */
2005         case ASI_LEON_BYPASS: /* LEON MMU passthrough */
2006             mem_idx = MMU_PHYS_IDX;
2007             type = GET_ASI_DIRECT;
2008             break;
2009         case ASI_M_BCOPY: /* Block copy, sta access */
2010             mem_idx = MMU_KERNEL_IDX;
2011             type = GET_ASI_BCOPY;
2012             break;
2013         case ASI_M_BFILL: /* Block fill, stda access */
2014             mem_idx = MMU_KERNEL_IDX;
2015             type = GET_ASI_BFILL;
2016             break;
2017         }
2018 
2019         /* MMU_PHYS_IDX is used when the MMU is disabled to passthrough the
2020          * permissions check in get_physical_address(..).
2021          */
2022         mem_idx = (dc->mem_idx == MMU_PHYS_IDX) ? MMU_PHYS_IDX : mem_idx;
2023     } else {
2024         gen_exception(dc, TT_PRIV_INSN);
2025         type = GET_ASI_EXCP;
2026     }
2027 #else
2028     if (IS_IMM) {
2029         asi = dc->asi;
2030     }
2031     /* With v9, all asis below 0x80 are privileged.  */
2032     /* ??? We ought to check cpu_has_hypervisor, but we didn't copy
2033        down that bit into DisasContext.  For the moment that's ok,
2034        since the direct implementations below doesn't have any ASIs
2035        in the restricted [0x30, 0x7f] range, and the check will be
2036        done properly in the helper.  */
2037     if (!supervisor(dc) && asi < 0x80) {
2038         gen_exception(dc, TT_PRIV_ACT);
2039         type = GET_ASI_EXCP;
2040     } else {
2041         switch (asi) {
2042         case ASI_REAL:      /* Bypass */
2043         case ASI_REAL_IO:   /* Bypass, non-cacheable */
2044         case ASI_REAL_L:    /* Bypass LE */
2045         case ASI_REAL_IO_L: /* Bypass, non-cacheable LE */
2046         case ASI_TWINX_REAL:   /* Real address, twinx */
2047         case ASI_TWINX_REAL_L: /* Real address, twinx, LE */
2048         case ASI_QUAD_LDD_PHYS:
2049         case ASI_QUAD_LDD_PHYS_L:
2050             mem_idx = MMU_PHYS_IDX;
2051             break;
2052         case ASI_N:  /* Nucleus */
2053         case ASI_NL: /* Nucleus LE */
2054         case ASI_TWINX_N:
2055         case ASI_TWINX_NL:
2056         case ASI_NUCLEUS_QUAD_LDD:
2057         case ASI_NUCLEUS_QUAD_LDD_L:
2058             if (hypervisor(dc)) {
2059                 mem_idx = MMU_PHYS_IDX;
2060             } else {
2061                 mem_idx = MMU_NUCLEUS_IDX;
2062             }
2063             break;
2064         case ASI_AIUP:  /* As if user primary */
2065         case ASI_AIUPL: /* As if user primary LE */
2066         case ASI_TWINX_AIUP:
2067         case ASI_TWINX_AIUP_L:
2068         case ASI_BLK_AIUP_4V:
2069         case ASI_BLK_AIUP_L_4V:
2070         case ASI_BLK_AIUP:
2071         case ASI_BLK_AIUPL:
2072             mem_idx = MMU_USER_IDX;
2073             break;
2074         case ASI_AIUS:  /* As if user secondary */
2075         case ASI_AIUSL: /* As if user secondary LE */
2076         case ASI_TWINX_AIUS:
2077         case ASI_TWINX_AIUS_L:
2078         case ASI_BLK_AIUS_4V:
2079         case ASI_BLK_AIUS_L_4V:
2080         case ASI_BLK_AIUS:
2081         case ASI_BLK_AIUSL:
2082             mem_idx = MMU_USER_SECONDARY_IDX;
2083             break;
2084         case ASI_S:  /* Secondary */
2085         case ASI_SL: /* Secondary LE */
2086         case ASI_TWINX_S:
2087         case ASI_TWINX_SL:
2088         case ASI_BLK_COMMIT_S:
2089         case ASI_BLK_S:
2090         case ASI_BLK_SL:
2091         case ASI_FL8_S:
2092         case ASI_FL8_SL:
2093         case ASI_FL16_S:
2094         case ASI_FL16_SL:
2095             if (mem_idx == MMU_USER_IDX) {
2096                 mem_idx = MMU_USER_SECONDARY_IDX;
2097             } else if (mem_idx == MMU_KERNEL_IDX) {
2098                 mem_idx = MMU_KERNEL_SECONDARY_IDX;
2099             }
2100             break;
2101         case ASI_P:  /* Primary */
2102         case ASI_PL: /* Primary LE */
2103         case ASI_TWINX_P:
2104         case ASI_TWINX_PL:
2105         case ASI_BLK_COMMIT_P:
2106         case ASI_BLK_P:
2107         case ASI_BLK_PL:
2108         case ASI_FL8_P:
2109         case ASI_FL8_PL:
2110         case ASI_FL16_P:
2111         case ASI_FL16_PL:
2112             break;
2113         }
2114         switch (asi) {
2115         case ASI_REAL:
2116         case ASI_REAL_IO:
2117         case ASI_REAL_L:
2118         case ASI_REAL_IO_L:
2119         case ASI_N:
2120         case ASI_NL:
2121         case ASI_AIUP:
2122         case ASI_AIUPL:
2123         case ASI_AIUS:
2124         case ASI_AIUSL:
2125         case ASI_S:
2126         case ASI_SL:
2127         case ASI_P:
2128         case ASI_PL:
2129             type = GET_ASI_DIRECT;
2130             break;
2131         case ASI_TWINX_REAL:
2132         case ASI_TWINX_REAL_L:
2133         case ASI_TWINX_N:
2134         case ASI_TWINX_NL:
2135         case ASI_TWINX_AIUP:
2136         case ASI_TWINX_AIUP_L:
2137         case ASI_TWINX_AIUS:
2138         case ASI_TWINX_AIUS_L:
2139         case ASI_TWINX_P:
2140         case ASI_TWINX_PL:
2141         case ASI_TWINX_S:
2142         case ASI_TWINX_SL:
2143         case ASI_QUAD_LDD_PHYS:
2144         case ASI_QUAD_LDD_PHYS_L:
2145         case ASI_NUCLEUS_QUAD_LDD:
2146         case ASI_NUCLEUS_QUAD_LDD_L:
2147             type = GET_ASI_DTWINX;
2148             break;
2149         case ASI_BLK_COMMIT_P:
2150         case ASI_BLK_COMMIT_S:
2151         case ASI_BLK_AIUP_4V:
2152         case ASI_BLK_AIUP_L_4V:
2153         case ASI_BLK_AIUP:
2154         case ASI_BLK_AIUPL:
2155         case ASI_BLK_AIUS_4V:
2156         case ASI_BLK_AIUS_L_4V:
2157         case ASI_BLK_AIUS:
2158         case ASI_BLK_AIUSL:
2159         case ASI_BLK_S:
2160         case ASI_BLK_SL:
2161         case ASI_BLK_P:
2162         case ASI_BLK_PL:
2163             type = GET_ASI_BLOCK;
2164             break;
2165         case ASI_FL8_S:
2166         case ASI_FL8_SL:
2167         case ASI_FL8_P:
2168         case ASI_FL8_PL:
2169             memop = MO_UB;
2170             type = GET_ASI_SHORT;
2171             break;
2172         case ASI_FL16_S:
2173         case ASI_FL16_SL:
2174         case ASI_FL16_P:
2175         case ASI_FL16_PL:
2176             memop = MO_TEUW;
2177             type = GET_ASI_SHORT;
2178             break;
2179         }
2180         /* The little-endian asis all have bit 3 set.  */
2181         if (asi & 8) {
2182             memop ^= MO_BSWAP;
2183         }
2184     }
2185 #endif
2186 
2187     return (DisasASI){ type, asi, mem_idx, memop };
2188 }
2189 
2190 static void gen_ld_asi(DisasContext *dc, TCGv dst, TCGv addr,
2191                        int insn, MemOp memop)
2192 {
2193     DisasASI da = get_asi(dc, insn, memop);
2194 
2195     switch (da.type) {
2196     case GET_ASI_EXCP:
2197         break;
2198     case GET_ASI_DTWINX: /* Reserved for ldda.  */
2199         gen_exception(dc, TT_ILL_INSN);
2200         break;
2201     case GET_ASI_DIRECT:
2202         gen_address_mask(dc, addr);
2203         tcg_gen_qemu_ld_tl(dst, addr, da.mem_idx, da.memop | MO_ALIGN);
2204         break;
2205     default:
2206         {
2207             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2208             TCGv_i32 r_mop = tcg_constant_i32(memop | MO_ALIGN);
2209 
2210             save_state(dc);
2211 #ifdef TARGET_SPARC64
2212             gen_helper_ld_asi(dst, tcg_env, addr, r_asi, r_mop);
2213 #else
2214             {
2215                 TCGv_i64 t64 = tcg_temp_new_i64();
2216                 gen_helper_ld_asi(t64, tcg_env, addr, r_asi, r_mop);
2217                 tcg_gen_trunc_i64_tl(dst, t64);
2218             }
2219 #endif
2220         }
2221         break;
2222     }
2223 }
2224 
2225 static void gen_st_asi(DisasContext *dc, TCGv src, TCGv addr,
2226                        int insn, MemOp memop)
2227 {
2228     DisasASI da = get_asi(dc, insn, memop);
2229 
2230     switch (da.type) {
2231     case GET_ASI_EXCP:
2232         break;
2233     case GET_ASI_DTWINX: /* Reserved for stda.  */
2234 #ifndef TARGET_SPARC64
2235         gen_exception(dc, TT_ILL_INSN);
2236         break;
2237 #else
2238         if (!(dc->def->features & CPU_FEATURE_HYPV)) {
2239             /* Pre OpenSPARC CPUs don't have these */
2240             gen_exception(dc, TT_ILL_INSN);
2241             return;
2242         }
2243         /* in OpenSPARC T1+ CPUs TWINX ASIs in store instructions
2244          * are ST_BLKINIT_ ASIs */
2245 #endif
2246         /* fall through */
2247     case GET_ASI_DIRECT:
2248         gen_address_mask(dc, addr);
2249         tcg_gen_qemu_st_tl(src, addr, da.mem_idx, da.memop | MO_ALIGN);
2250         break;
2251 #if !defined(TARGET_SPARC64) && !defined(CONFIG_USER_ONLY)
2252     case GET_ASI_BCOPY:
2253         /* Copy 32 bytes from the address in SRC to ADDR.  */
2254         /* ??? The original qemu code suggests 4-byte alignment, dropping
2255            the low bits, but the only place I can see this used is in the
2256            Linux kernel with 32 byte alignment, which would make more sense
2257            as a cacheline-style operation.  */
2258         {
2259             TCGv saddr = tcg_temp_new();
2260             TCGv daddr = tcg_temp_new();
2261             TCGv four = tcg_constant_tl(4);
2262             TCGv_i32 tmp = tcg_temp_new_i32();
2263             int i;
2264 
2265             tcg_gen_andi_tl(saddr, src, -4);
2266             tcg_gen_andi_tl(daddr, addr, -4);
2267             for (i = 0; i < 32; i += 4) {
2268                 /* Since the loads and stores are paired, allow the
2269                    copy to happen in the host endianness.  */
2270                 tcg_gen_qemu_ld_i32(tmp, saddr, da.mem_idx, MO_UL);
2271                 tcg_gen_qemu_st_i32(tmp, daddr, da.mem_idx, MO_UL);
2272                 tcg_gen_add_tl(saddr, saddr, four);
2273                 tcg_gen_add_tl(daddr, daddr, four);
2274             }
2275         }
2276         break;
2277 #endif
2278     default:
2279         {
2280             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2281             TCGv_i32 r_mop = tcg_constant_i32(memop | MO_ALIGN);
2282 
2283             save_state(dc);
2284 #ifdef TARGET_SPARC64
2285             gen_helper_st_asi(tcg_env, addr, src, r_asi, r_mop);
2286 #else
2287             {
2288                 TCGv_i64 t64 = tcg_temp_new_i64();
2289                 tcg_gen_extu_tl_i64(t64, src);
2290                 gen_helper_st_asi(tcg_env, addr, t64, r_asi, r_mop);
2291             }
2292 #endif
2293 
2294             /* A write to a TLB register may alter page maps.  End the TB. */
2295             dc->npc = DYNAMIC_PC;
2296         }
2297         break;
2298     }
2299 }
2300 
2301 static void gen_swap_asi(DisasContext *dc, TCGv dst, TCGv src,
2302                          TCGv addr, int insn)
2303 {
2304     DisasASI da = get_asi(dc, insn, MO_TEUL);
2305 
2306     switch (da.type) {
2307     case GET_ASI_EXCP:
2308         break;
2309     case GET_ASI_DIRECT:
2310         gen_swap(dc, dst, src, addr, da.mem_idx, da.memop);
2311         break;
2312     default:
2313         /* ??? Should be DAE_invalid_asi.  */
2314         gen_exception(dc, TT_DATA_ACCESS);
2315         break;
2316     }
2317 }
2318 
2319 static void gen_cas_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2320                         int insn, int rd)
2321 {
2322     DisasASI da = get_asi(dc, insn, MO_TEUL);
2323     TCGv oldv;
2324 
2325     switch (da.type) {
2326     case GET_ASI_EXCP:
2327         return;
2328     case GET_ASI_DIRECT:
2329         oldv = tcg_temp_new();
2330         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2331                                   da.mem_idx, da.memop | MO_ALIGN);
2332         gen_store_gpr(dc, rd, oldv);
2333         break;
2334     default:
2335         /* ??? Should be DAE_invalid_asi.  */
2336         gen_exception(dc, TT_DATA_ACCESS);
2337         break;
2338     }
2339 }
2340 
2341 static void gen_ldstub_asi(DisasContext *dc, TCGv dst, TCGv addr, int insn)
2342 {
2343     DisasASI da = get_asi(dc, insn, MO_UB);
2344 
2345     switch (da.type) {
2346     case GET_ASI_EXCP:
2347         break;
2348     case GET_ASI_DIRECT:
2349         gen_ldstub(dc, dst, addr, da.mem_idx);
2350         break;
2351     default:
2352         /* ??? In theory, this should be raise DAE_invalid_asi.
2353            But the SS-20 roms do ldstuba [%l0] #ASI_M_CTL, %o1.  */
2354         if (tb_cflags(dc->base.tb) & CF_PARALLEL) {
2355             gen_helper_exit_atomic(tcg_env);
2356         } else {
2357             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2358             TCGv_i32 r_mop = tcg_constant_i32(MO_UB);
2359             TCGv_i64 s64, t64;
2360 
2361             save_state(dc);
2362             t64 = tcg_temp_new_i64();
2363             gen_helper_ld_asi(t64, tcg_env, addr, r_asi, r_mop);
2364 
2365             s64 = tcg_constant_i64(0xff);
2366             gen_helper_st_asi(tcg_env, addr, s64, r_asi, r_mop);
2367 
2368             tcg_gen_trunc_i64_tl(dst, t64);
2369 
2370             /* End the TB.  */
2371             dc->npc = DYNAMIC_PC;
2372         }
2373         break;
2374     }
2375 }
2376 #endif
2377 
2378 #ifdef TARGET_SPARC64
2379 static void gen_ldf_asi(DisasContext *dc, TCGv addr,
2380                         int insn, int size, int rd)
2381 {
2382     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2383     TCGv_i32 d32;
2384     TCGv_i64 d64;
2385 
2386     switch (da.type) {
2387     case GET_ASI_EXCP:
2388         break;
2389 
2390     case GET_ASI_DIRECT:
2391         gen_address_mask(dc, addr);
2392         switch (size) {
2393         case 4:
2394             d32 = gen_dest_fpr_F(dc);
2395             tcg_gen_qemu_ld_i32(d32, addr, da.mem_idx, da.memop | MO_ALIGN);
2396             gen_store_fpr_F(dc, rd, d32);
2397             break;
2398         case 8:
2399             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2400                                 da.memop | MO_ALIGN_4);
2401             break;
2402         case 16:
2403             d64 = tcg_temp_new_i64();
2404             tcg_gen_qemu_ld_i64(d64, addr, da.mem_idx, da.memop | MO_ALIGN_4);
2405             tcg_gen_addi_tl(addr, addr, 8);
2406             tcg_gen_qemu_ld_i64(cpu_fpr[rd/2+1], addr, da.mem_idx,
2407                                 da.memop | MO_ALIGN_4);
2408             tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2409             break;
2410         default:
2411             g_assert_not_reached();
2412         }
2413         break;
2414 
2415     case GET_ASI_BLOCK:
2416         /* Valid for lddfa on aligned registers only.  */
2417         if (size == 8 && (rd & 7) == 0) {
2418             MemOp memop;
2419             TCGv eight;
2420             int i;
2421 
2422             gen_address_mask(dc, addr);
2423 
2424             /* The first operation checks required alignment.  */
2425             memop = da.memop | MO_ALIGN_64;
2426             eight = tcg_constant_tl(8);
2427             for (i = 0; ; ++i) {
2428                 tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2 + i], addr,
2429                                     da.mem_idx, memop);
2430                 if (i == 7) {
2431                     break;
2432                 }
2433                 tcg_gen_add_tl(addr, addr, eight);
2434                 memop = da.memop;
2435             }
2436         } else {
2437             gen_exception(dc, TT_ILL_INSN);
2438         }
2439         break;
2440 
2441     case GET_ASI_SHORT:
2442         /* Valid for lddfa only.  */
2443         if (size == 8) {
2444             gen_address_mask(dc, addr);
2445             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2446                                 da.memop | MO_ALIGN);
2447         } else {
2448             gen_exception(dc, TT_ILL_INSN);
2449         }
2450         break;
2451 
2452     default:
2453         {
2454             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2455             TCGv_i32 r_mop = tcg_constant_i32(da.memop | MO_ALIGN);
2456 
2457             save_state(dc);
2458             /* According to the table in the UA2011 manual, the only
2459                other asis that are valid for ldfa/lddfa/ldqfa are
2460                the NO_FAULT asis.  We still need a helper for these,
2461                but we can just use the integer asi helper for them.  */
2462             switch (size) {
2463             case 4:
2464                 d64 = tcg_temp_new_i64();
2465                 gen_helper_ld_asi(d64, tcg_env, addr, r_asi, r_mop);
2466                 d32 = gen_dest_fpr_F(dc);
2467                 tcg_gen_extrl_i64_i32(d32, d64);
2468                 gen_store_fpr_F(dc, rd, d32);
2469                 break;
2470             case 8:
2471                 gen_helper_ld_asi(cpu_fpr[rd / 2], tcg_env, addr, r_asi, r_mop);
2472                 break;
2473             case 16:
2474                 d64 = tcg_temp_new_i64();
2475                 gen_helper_ld_asi(d64, tcg_env, addr, r_asi, r_mop);
2476                 tcg_gen_addi_tl(addr, addr, 8);
2477                 gen_helper_ld_asi(cpu_fpr[rd/2+1], tcg_env, addr, r_asi, r_mop);
2478                 tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2479                 break;
2480             default:
2481                 g_assert_not_reached();
2482             }
2483         }
2484         break;
2485     }
2486 }
2487 
2488 static void gen_stf_asi(DisasContext *dc, TCGv addr,
2489                         int insn, int size, int rd)
2490 {
2491     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2492     TCGv_i32 d32;
2493 
2494     switch (da.type) {
2495     case GET_ASI_EXCP:
2496         break;
2497 
2498     case GET_ASI_DIRECT:
2499         gen_address_mask(dc, addr);
2500         switch (size) {
2501         case 4:
2502             d32 = gen_load_fpr_F(dc, rd);
2503             tcg_gen_qemu_st_i32(d32, addr, da.mem_idx, da.memop | MO_ALIGN);
2504             break;
2505         case 8:
2506             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2507                                 da.memop | MO_ALIGN_4);
2508             break;
2509         case 16:
2510             /* Only 4-byte alignment required.  However, it is legal for the
2511                cpu to signal the alignment fault, and the OS trap handler is
2512                required to fix it up.  Requiring 16-byte alignment here avoids
2513                having to probe the second page before performing the first
2514                write.  */
2515             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2516                                 da.memop | MO_ALIGN_16);
2517             tcg_gen_addi_tl(addr, addr, 8);
2518             tcg_gen_qemu_st_i64(cpu_fpr[rd/2+1], addr, da.mem_idx, da.memop);
2519             break;
2520         default:
2521             g_assert_not_reached();
2522         }
2523         break;
2524 
2525     case GET_ASI_BLOCK:
2526         /* Valid for stdfa on aligned registers only.  */
2527         if (size == 8 && (rd & 7) == 0) {
2528             MemOp memop;
2529             TCGv eight;
2530             int i;
2531 
2532             gen_address_mask(dc, addr);
2533 
2534             /* The first operation checks required alignment.  */
2535             memop = da.memop | MO_ALIGN_64;
2536             eight = tcg_constant_tl(8);
2537             for (i = 0; ; ++i) {
2538                 tcg_gen_qemu_st_i64(cpu_fpr[rd / 2 + i], addr,
2539                                     da.mem_idx, memop);
2540                 if (i == 7) {
2541                     break;
2542                 }
2543                 tcg_gen_add_tl(addr, addr, eight);
2544                 memop = da.memop;
2545             }
2546         } else {
2547             gen_exception(dc, TT_ILL_INSN);
2548         }
2549         break;
2550 
2551     case GET_ASI_SHORT:
2552         /* Valid for stdfa only.  */
2553         if (size == 8) {
2554             gen_address_mask(dc, addr);
2555             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2556                                 da.memop | MO_ALIGN);
2557         } else {
2558             gen_exception(dc, TT_ILL_INSN);
2559         }
2560         break;
2561 
2562     default:
2563         /* According to the table in the UA2011 manual, the only
2564            other asis that are valid for ldfa/lddfa/ldqfa are
2565            the PST* asis, which aren't currently handled.  */
2566         gen_exception(dc, TT_ILL_INSN);
2567         break;
2568     }
2569 }
2570 
2571 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2572 {
2573     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2574     TCGv_i64 hi = gen_dest_gpr(dc, rd);
2575     TCGv_i64 lo = gen_dest_gpr(dc, rd + 1);
2576 
2577     switch (da.type) {
2578     case GET_ASI_EXCP:
2579         return;
2580 
2581     case GET_ASI_DTWINX:
2582         gen_address_mask(dc, addr);
2583         tcg_gen_qemu_ld_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2584         tcg_gen_addi_tl(addr, addr, 8);
2585         tcg_gen_qemu_ld_i64(lo, addr, da.mem_idx, da.memop);
2586         break;
2587 
2588     case GET_ASI_DIRECT:
2589         {
2590             TCGv_i64 tmp = tcg_temp_new_i64();
2591 
2592             gen_address_mask(dc, addr);
2593             tcg_gen_qemu_ld_i64(tmp, addr, da.mem_idx, da.memop | MO_ALIGN);
2594 
2595             /* Note that LE ldda acts as if each 32-bit register
2596                result is byte swapped.  Having just performed one
2597                64-bit bswap, we need now to swap the writebacks.  */
2598             if ((da.memop & MO_BSWAP) == MO_TE) {
2599                 tcg_gen_extr32_i64(lo, hi, tmp);
2600             } else {
2601                 tcg_gen_extr32_i64(hi, lo, tmp);
2602             }
2603         }
2604         break;
2605 
2606     default:
2607         /* ??? In theory we've handled all of the ASIs that are valid
2608            for ldda, and this should raise DAE_invalid_asi.  However,
2609            real hardware allows others.  This can be seen with e.g.
2610            FreeBSD 10.3 wrt ASI_IC_TAG.  */
2611         {
2612             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2613             TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2614             TCGv_i64 tmp = tcg_temp_new_i64();
2615 
2616             save_state(dc);
2617             gen_helper_ld_asi(tmp, tcg_env, addr, r_asi, r_mop);
2618 
2619             /* See above.  */
2620             if ((da.memop & MO_BSWAP) == MO_TE) {
2621                 tcg_gen_extr32_i64(lo, hi, tmp);
2622             } else {
2623                 tcg_gen_extr32_i64(hi, lo, tmp);
2624             }
2625         }
2626         break;
2627     }
2628 
2629     gen_store_gpr(dc, rd, hi);
2630     gen_store_gpr(dc, rd + 1, lo);
2631 }
2632 
2633 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2634                          int insn, int rd)
2635 {
2636     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2637     TCGv lo = gen_load_gpr(dc, rd + 1);
2638 
2639     switch (da.type) {
2640     case GET_ASI_EXCP:
2641         break;
2642 
2643     case GET_ASI_DTWINX:
2644         gen_address_mask(dc, addr);
2645         tcg_gen_qemu_st_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2646         tcg_gen_addi_tl(addr, addr, 8);
2647         tcg_gen_qemu_st_i64(lo, addr, da.mem_idx, da.memop);
2648         break;
2649 
2650     case GET_ASI_DIRECT:
2651         {
2652             TCGv_i64 t64 = tcg_temp_new_i64();
2653 
2654             /* Note that LE stda acts as if each 32-bit register result is
2655                byte swapped.  We will perform one 64-bit LE store, so now
2656                we must swap the order of the construction.  */
2657             if ((da.memop & MO_BSWAP) == MO_TE) {
2658                 tcg_gen_concat32_i64(t64, lo, hi);
2659             } else {
2660                 tcg_gen_concat32_i64(t64, hi, lo);
2661             }
2662             gen_address_mask(dc, addr);
2663             tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2664         }
2665         break;
2666 
2667     default:
2668         /* ??? In theory we've handled all of the ASIs that are valid
2669            for stda, and this should raise DAE_invalid_asi.  */
2670         {
2671             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2672             TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2673             TCGv_i64 t64 = tcg_temp_new_i64();
2674 
2675             /* See above.  */
2676             if ((da.memop & MO_BSWAP) == MO_TE) {
2677                 tcg_gen_concat32_i64(t64, lo, hi);
2678             } else {
2679                 tcg_gen_concat32_i64(t64, hi, lo);
2680             }
2681 
2682             save_state(dc);
2683             gen_helper_st_asi(tcg_env, addr, t64, r_asi, r_mop);
2684         }
2685         break;
2686     }
2687 }
2688 
2689 static void gen_casx_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2690                          int insn, int rd)
2691 {
2692     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2693     TCGv oldv;
2694 
2695     switch (da.type) {
2696     case GET_ASI_EXCP:
2697         return;
2698     case GET_ASI_DIRECT:
2699         oldv = tcg_temp_new();
2700         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2701                                   da.mem_idx, da.memop | MO_ALIGN);
2702         gen_store_gpr(dc, rd, oldv);
2703         break;
2704     default:
2705         /* ??? Should be DAE_invalid_asi.  */
2706         gen_exception(dc, TT_DATA_ACCESS);
2707         break;
2708     }
2709 }
2710 
2711 #elif !defined(CONFIG_USER_ONLY)
2712 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2713 {
2714     /* ??? Work around an apparent bug in Ubuntu gcc 4.8.2-10ubuntu2+12,
2715        whereby "rd + 1" elicits "error: array subscript is above array".
2716        Since we have already asserted that rd is even, the semantics
2717        are unchanged.  */
2718     TCGv lo = gen_dest_gpr(dc, rd | 1);
2719     TCGv hi = gen_dest_gpr(dc, rd);
2720     TCGv_i64 t64 = tcg_temp_new_i64();
2721     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2722 
2723     switch (da.type) {
2724     case GET_ASI_EXCP:
2725         return;
2726     case GET_ASI_DIRECT:
2727         gen_address_mask(dc, addr);
2728         tcg_gen_qemu_ld_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2729         break;
2730     default:
2731         {
2732             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2733             TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2734 
2735             save_state(dc);
2736             gen_helper_ld_asi(t64, tcg_env, addr, r_asi, r_mop);
2737         }
2738         break;
2739     }
2740 
2741     tcg_gen_extr_i64_i32(lo, hi, t64);
2742     gen_store_gpr(dc, rd | 1, lo);
2743     gen_store_gpr(dc, rd, hi);
2744 }
2745 
2746 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2747                          int insn, int rd)
2748 {
2749     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2750     TCGv lo = gen_load_gpr(dc, rd + 1);
2751     TCGv_i64 t64 = tcg_temp_new_i64();
2752 
2753     tcg_gen_concat_tl_i64(t64, lo, hi);
2754 
2755     switch (da.type) {
2756     case GET_ASI_EXCP:
2757         break;
2758     case GET_ASI_DIRECT:
2759         gen_address_mask(dc, addr);
2760         tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2761         break;
2762     case GET_ASI_BFILL:
2763         /* Store 32 bytes of T64 to ADDR.  */
2764         /* ??? The original qemu code suggests 8-byte alignment, dropping
2765            the low bits, but the only place I can see this used is in the
2766            Linux kernel with 32 byte alignment, which would make more sense
2767            as a cacheline-style operation.  */
2768         {
2769             TCGv d_addr = tcg_temp_new();
2770             TCGv eight = tcg_constant_tl(8);
2771             int i;
2772 
2773             tcg_gen_andi_tl(d_addr, addr, -8);
2774             for (i = 0; i < 32; i += 8) {
2775                 tcg_gen_qemu_st_i64(t64, d_addr, da.mem_idx, da.memop);
2776                 tcg_gen_add_tl(d_addr, d_addr, eight);
2777             }
2778         }
2779         break;
2780     default:
2781         {
2782             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2783             TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2784 
2785             save_state(dc);
2786             gen_helper_st_asi(tcg_env, addr, t64, r_asi, r_mop);
2787         }
2788         break;
2789     }
2790 }
2791 #endif
2792 
2793 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2794 {
2795     unsigned int rs1 = GET_FIELD(insn, 13, 17);
2796     return gen_load_gpr(dc, rs1);
2797 }
2798 
2799 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2800 {
2801     if (IS_IMM) { /* immediate */
2802         target_long simm = GET_FIELDs(insn, 19, 31);
2803         TCGv t = tcg_temp_new();
2804         tcg_gen_movi_tl(t, simm);
2805         return t;
2806     } else {      /* register */
2807         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2808         return gen_load_gpr(dc, rs2);
2809     }
2810 }
2811 
2812 #ifdef TARGET_SPARC64
2813 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2814 {
2815     TCGv_i32 c32, zero, dst, s1, s2;
2816 
2817     /* We have two choices here: extend the 32 bit data and use movcond_i64,
2818        or fold the comparison down to 32 bits and use movcond_i32.  Choose
2819        the later.  */
2820     c32 = tcg_temp_new_i32();
2821     if (cmp->is_bool) {
2822         tcg_gen_extrl_i64_i32(c32, cmp->c1);
2823     } else {
2824         TCGv_i64 c64 = tcg_temp_new_i64();
2825         tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2826         tcg_gen_extrl_i64_i32(c32, c64);
2827     }
2828 
2829     s1 = gen_load_fpr_F(dc, rs);
2830     s2 = gen_load_fpr_F(dc, rd);
2831     dst = gen_dest_fpr_F(dc);
2832     zero = tcg_constant_i32(0);
2833 
2834     tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2835 
2836     gen_store_fpr_F(dc, rd, dst);
2837 }
2838 
2839 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2840 {
2841     TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2842     tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2843                         gen_load_fpr_D(dc, rs),
2844                         gen_load_fpr_D(dc, rd));
2845     gen_store_fpr_D(dc, rd, dst);
2846 }
2847 
2848 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2849 {
2850     int qd = QFPREG(rd);
2851     int qs = QFPREG(rs);
2852 
2853     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2854                         cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2855     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2856                         cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2857 
2858     gen_update_fprs_dirty(dc, qd);
2859 }
2860 
2861 #ifndef CONFIG_USER_ONLY
2862 static void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_env tcg_env)
2863 {
2864     TCGv_i32 r_tl = tcg_temp_new_i32();
2865 
2866     /* load env->tl into r_tl */
2867     tcg_gen_ld_i32(r_tl, tcg_env, offsetof(CPUSPARCState, tl));
2868 
2869     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2870     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2871 
2872     /* calculate offset to current trap state from env->ts, reuse r_tl */
2873     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2874     tcg_gen_addi_ptr(r_tsptr, tcg_env, offsetof(CPUSPARCState, ts));
2875 
2876     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2877     {
2878         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2879         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2880         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2881     }
2882 }
2883 #endif
2884 
2885 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2886                      int width, bool cc, bool left)
2887 {
2888     TCGv lo1, lo2;
2889     uint64_t amask, tabl, tabr;
2890     int shift, imask, omask;
2891 
2892     if (cc) {
2893         tcg_gen_mov_tl(cpu_cc_src, s1);
2894         tcg_gen_mov_tl(cpu_cc_src2, s2);
2895         tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2896         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2897         dc->cc_op = CC_OP_SUB;
2898     }
2899 
2900     /* Theory of operation: there are two tables, left and right (not to
2901        be confused with the left and right versions of the opcode).  These
2902        are indexed by the low 3 bits of the inputs.  To make things "easy",
2903        these tables are loaded into two constants, TABL and TABR below.
2904        The operation index = (input & imask) << shift calculates the index
2905        into the constant, while val = (table >> index) & omask calculates
2906        the value we're looking for.  */
2907     switch (width) {
2908     case 8:
2909         imask = 0x7;
2910         shift = 3;
2911         omask = 0xff;
2912         if (left) {
2913             tabl = 0x80c0e0f0f8fcfeffULL;
2914             tabr = 0xff7f3f1f0f070301ULL;
2915         } else {
2916             tabl = 0x0103070f1f3f7fffULL;
2917             tabr = 0xfffefcf8f0e0c080ULL;
2918         }
2919         break;
2920     case 16:
2921         imask = 0x6;
2922         shift = 1;
2923         omask = 0xf;
2924         if (left) {
2925             tabl = 0x8cef;
2926             tabr = 0xf731;
2927         } else {
2928             tabl = 0x137f;
2929             tabr = 0xfec8;
2930         }
2931         break;
2932     case 32:
2933         imask = 0x4;
2934         shift = 0;
2935         omask = 0x3;
2936         if (left) {
2937             tabl = (2 << 2) | 3;
2938             tabr = (3 << 2) | 1;
2939         } else {
2940             tabl = (1 << 2) | 3;
2941             tabr = (3 << 2) | 2;
2942         }
2943         break;
2944     default:
2945         abort();
2946     }
2947 
2948     lo1 = tcg_temp_new();
2949     lo2 = tcg_temp_new();
2950     tcg_gen_andi_tl(lo1, s1, imask);
2951     tcg_gen_andi_tl(lo2, s2, imask);
2952     tcg_gen_shli_tl(lo1, lo1, shift);
2953     tcg_gen_shli_tl(lo2, lo2, shift);
2954 
2955     tcg_gen_shr_tl(lo1, tcg_constant_tl(tabl), lo1);
2956     tcg_gen_shr_tl(lo2, tcg_constant_tl(tabr), lo2);
2957     tcg_gen_andi_tl(lo1, lo1, omask);
2958     tcg_gen_andi_tl(lo2, lo2, omask);
2959 
2960     amask = -8;
2961     if (AM_CHECK(dc)) {
2962         amask &= 0xffffffffULL;
2963     }
2964     tcg_gen_andi_tl(s1, s1, amask);
2965     tcg_gen_andi_tl(s2, s2, amask);
2966 
2967     /* Compute dst = (s1 == s2 ? lo1 : lo1 & lo2). */
2968     tcg_gen_and_tl(lo2, lo2, lo1);
2969     tcg_gen_movcond_tl(TCG_COND_EQ, dst, s1, s2, lo1, lo2);
2970 }
2971 
2972 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2973 {
2974     TCGv tmp = tcg_temp_new();
2975 
2976     tcg_gen_add_tl(tmp, s1, s2);
2977     tcg_gen_andi_tl(dst, tmp, -8);
2978     if (left) {
2979         tcg_gen_neg_tl(tmp, tmp);
2980     }
2981     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2982 }
2983 
2984 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2985 {
2986     TCGv t1, t2, shift;
2987 
2988     t1 = tcg_temp_new();
2989     t2 = tcg_temp_new();
2990     shift = tcg_temp_new();
2991 
2992     tcg_gen_andi_tl(shift, gsr, 7);
2993     tcg_gen_shli_tl(shift, shift, 3);
2994     tcg_gen_shl_tl(t1, s1, shift);
2995 
2996     /* A shift of 64 does not produce 0 in TCG.  Divide this into a
2997        shift of (up to 63) followed by a constant shift of 1.  */
2998     tcg_gen_xori_tl(shift, shift, 63);
2999     tcg_gen_shr_tl(t2, s2, shift);
3000     tcg_gen_shri_tl(t2, t2, 1);
3001 
3002     tcg_gen_or_tl(dst, t1, t2);
3003 }
3004 #endif
3005 
3006 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
3007     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3008         goto illegal_insn;
3009 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
3010     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3011         goto nfpu_insn;
3012 
3013 /* before an instruction, dc->pc must be static */
3014 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
3015 {
3016     unsigned int opc, rs1, rs2, rd;
3017     TCGv cpu_src1, cpu_src2;
3018     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
3019     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
3020     target_long simm;
3021 
3022     opc = GET_FIELD(insn, 0, 1);
3023     rd = GET_FIELD(insn, 2, 6);
3024 
3025     switch (opc) {
3026     case 0:                     /* branches/sethi */
3027         {
3028             unsigned int xop = GET_FIELD(insn, 7, 9);
3029             int32_t target;
3030             switch (xop) {
3031 #ifdef TARGET_SPARC64
3032             case 0x1:           /* V9 BPcc */
3033                 {
3034                     int cc;
3035 
3036                     target = GET_FIELD_SP(insn, 0, 18);
3037                     target = sign_extend(target, 19);
3038                     target <<= 2;
3039                     cc = GET_FIELD_SP(insn, 20, 21);
3040                     if (cc == 0)
3041                         do_branch(dc, target, insn, 0);
3042                     else if (cc == 2)
3043                         do_branch(dc, target, insn, 1);
3044                     else
3045                         goto illegal_insn;
3046                     goto jmp_insn;
3047                 }
3048             case 0x3:           /* V9 BPr */
3049                 {
3050                     target = GET_FIELD_SP(insn, 0, 13) |
3051                         (GET_FIELD_SP(insn, 20, 21) << 14);
3052                     target = sign_extend(target, 16);
3053                     target <<= 2;
3054                     cpu_src1 = get_src1(dc, insn);
3055                     do_branch_reg(dc, target, insn, cpu_src1);
3056                     goto jmp_insn;
3057                 }
3058             case 0x5:           /* V9 FBPcc */
3059                 {
3060                     int cc = GET_FIELD_SP(insn, 20, 21);
3061                     if (gen_trap_ifnofpu(dc)) {
3062                         goto jmp_insn;
3063                     }
3064                     target = GET_FIELD_SP(insn, 0, 18);
3065                     target = sign_extend(target, 19);
3066                     target <<= 2;
3067                     do_fbranch(dc, target, insn, cc);
3068                     goto jmp_insn;
3069                 }
3070 #else
3071             case 0x7:           /* CBN+x */
3072                 {
3073                     goto ncp_insn;
3074                 }
3075 #endif
3076             case 0x2:           /* BN+x */
3077                 {
3078                     target = GET_FIELD(insn, 10, 31);
3079                     target = sign_extend(target, 22);
3080                     target <<= 2;
3081                     do_branch(dc, target, insn, 0);
3082                     goto jmp_insn;
3083                 }
3084             case 0x6:           /* FBN+x */
3085                 {
3086                     if (gen_trap_ifnofpu(dc)) {
3087                         goto jmp_insn;
3088                     }
3089                     target = GET_FIELD(insn, 10, 31);
3090                     target = sign_extend(target, 22);
3091                     target <<= 2;
3092                     do_fbranch(dc, target, insn, 0);
3093                     goto jmp_insn;
3094                 }
3095             case 0x4:           /* SETHI */
3096                 /* Special-case %g0 because that's the canonical nop.  */
3097                 if (rd) {
3098                     uint32_t value = GET_FIELD(insn, 10, 31);
3099                     TCGv t = gen_dest_gpr(dc, rd);
3100                     tcg_gen_movi_tl(t, value << 10);
3101                     gen_store_gpr(dc, rd, t);
3102                 }
3103                 break;
3104             case 0x0:           /* UNIMPL */
3105             default:
3106                 goto illegal_insn;
3107             }
3108             break;
3109         }
3110         break;
3111     case 1:                     /*CALL*/
3112         {
3113             target_long target = GET_FIELDs(insn, 2, 31) << 2;
3114             TCGv o7 = gen_dest_gpr(dc, 15);
3115 
3116             tcg_gen_movi_tl(o7, dc->pc);
3117             gen_store_gpr(dc, 15, o7);
3118             target += dc->pc;
3119             gen_mov_pc_npc(dc);
3120 #ifdef TARGET_SPARC64
3121             if (unlikely(AM_CHECK(dc))) {
3122                 target &= 0xffffffffULL;
3123             }
3124 #endif
3125             dc->npc = target;
3126         }
3127         goto jmp_insn;
3128     case 2:                     /* FPU & Logical Operations */
3129         {
3130             unsigned int xop = GET_FIELD(insn, 7, 12);
3131             TCGv cpu_dst = tcg_temp_new();
3132             TCGv cpu_tmp0;
3133 
3134             if (xop == 0x3a) {  /* generate trap */
3135                 int cond = GET_FIELD(insn, 3, 6);
3136                 TCGv_i32 trap;
3137                 TCGLabel *l1 = NULL;
3138                 int mask;
3139 
3140                 if (cond == 0) {
3141                     /* Trap never.  */
3142                     break;
3143                 }
3144 
3145                 save_state(dc);
3146 
3147                 if (cond != 8) {
3148                     /* Conditional trap.  */
3149                     DisasCompare cmp;
3150 #ifdef TARGET_SPARC64
3151                     /* V9 icc/xcc */
3152                     int cc = GET_FIELD_SP(insn, 11, 12);
3153                     if (cc == 0) {
3154                         gen_compare(&cmp, 0, cond, dc);
3155                     } else if (cc == 2) {
3156                         gen_compare(&cmp, 1, cond, dc);
3157                     } else {
3158                         goto illegal_insn;
3159                     }
3160 #else
3161                     gen_compare(&cmp, 0, cond, dc);
3162 #endif
3163                     l1 = gen_new_label();
3164                     tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
3165                                       cmp.c1, cmp.c2, l1);
3166                 }
3167 
3168                 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
3169                         ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
3170 
3171                 /* Don't use the normal temporaries, as they may well have
3172                    gone out of scope with the branch above.  While we're
3173                    doing that we might as well pre-truncate to 32-bit.  */
3174                 trap = tcg_temp_new_i32();
3175 
3176                 rs1 = GET_FIELD_SP(insn, 14, 18);
3177                 if (IS_IMM) {
3178                     rs2 = GET_FIELD_SP(insn, 0, 7);
3179                     if (rs1 == 0) {
3180                         tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
3181                         /* Signal that the trap value is fully constant.  */
3182                         mask = 0;
3183                     } else {
3184                         TCGv t1 = gen_load_gpr(dc, rs1);
3185                         tcg_gen_trunc_tl_i32(trap, t1);
3186                         tcg_gen_addi_i32(trap, trap, rs2);
3187                     }
3188                 } else {
3189                     TCGv t1, t2;
3190                     rs2 = GET_FIELD_SP(insn, 0, 4);
3191                     t1 = gen_load_gpr(dc, rs1);
3192                     t2 = gen_load_gpr(dc, rs2);
3193                     tcg_gen_add_tl(t1, t1, t2);
3194                     tcg_gen_trunc_tl_i32(trap, t1);
3195                 }
3196                 if (mask != 0) {
3197                     tcg_gen_andi_i32(trap, trap, mask);
3198                     tcg_gen_addi_i32(trap, trap, TT_TRAP);
3199                 }
3200 
3201                 gen_helper_raise_exception(tcg_env, trap);
3202 
3203                 if (cond == 8) {
3204                     /* An unconditional trap ends the TB.  */
3205                     dc->base.is_jmp = DISAS_NORETURN;
3206                     goto jmp_insn;
3207                 } else {
3208                     /* A conditional trap falls through to the next insn.  */
3209                     gen_set_label(l1);
3210                     break;
3211                 }
3212             } else if (xop == 0x28) {
3213                 rs1 = GET_FIELD(insn, 13, 17);
3214                 switch(rs1) {
3215                 case 0: /* rdy */
3216 #ifndef TARGET_SPARC64
3217                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
3218                                        manual, rdy on the microSPARC
3219                                        II */
3220                 case 0x0f:          /* stbar in the SPARCv8 manual,
3221                                        rdy on the microSPARC II */
3222                 case 0x10 ... 0x1f: /* implementation-dependent in the
3223                                        SPARCv8 manual, rdy on the
3224                                        microSPARC II */
3225                     /* Read Asr17 */
3226                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
3227                         TCGv t = gen_dest_gpr(dc, rd);
3228                         /* Read Asr17 for a Leon3 monoprocessor */
3229                         tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
3230                         gen_store_gpr(dc, rd, t);
3231                         break;
3232                     }
3233 #endif
3234                     gen_store_gpr(dc, rd, cpu_y);
3235                     break;
3236 #ifdef TARGET_SPARC64
3237                 case 0x2: /* V9 rdccr */
3238                     update_psr(dc);
3239                     gen_helper_rdccr(cpu_dst, tcg_env);
3240                     gen_store_gpr(dc, rd, cpu_dst);
3241                     break;
3242                 case 0x3: /* V9 rdasi */
3243                     tcg_gen_movi_tl(cpu_dst, dc->asi);
3244                     gen_store_gpr(dc, rd, cpu_dst);
3245                     break;
3246                 case 0x4: /* V9 rdtick */
3247                     {
3248                         TCGv_ptr r_tickptr;
3249                         TCGv_i32 r_const;
3250 
3251                         r_tickptr = tcg_temp_new_ptr();
3252                         r_const = tcg_constant_i32(dc->mem_idx);
3253                         tcg_gen_ld_ptr(r_tickptr, tcg_env,
3254                                        offsetof(CPUSPARCState, tick));
3255                         if (translator_io_start(&dc->base)) {
3256                             dc->base.is_jmp = DISAS_EXIT;
3257                         }
3258                         gen_helper_tick_get_count(cpu_dst, tcg_env, r_tickptr,
3259                                                   r_const);
3260                         gen_store_gpr(dc, rd, cpu_dst);
3261                     }
3262                     break;
3263                 case 0x5: /* V9 rdpc */
3264                     {
3265                         TCGv t = gen_dest_gpr(dc, rd);
3266                         if (unlikely(AM_CHECK(dc))) {
3267                             tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
3268                         } else {
3269                             tcg_gen_movi_tl(t, dc->pc);
3270                         }
3271                         gen_store_gpr(dc, rd, t);
3272                     }
3273                     break;
3274                 case 0x6: /* V9 rdfprs */
3275                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
3276                     gen_store_gpr(dc, rd, cpu_dst);
3277                     break;
3278                 case 0xf: /* V9 membar */
3279                     break; /* no effect */
3280                 case 0x13: /* Graphics Status */
3281                     if (gen_trap_ifnofpu(dc)) {
3282                         goto jmp_insn;
3283                     }
3284                     gen_store_gpr(dc, rd, cpu_gsr);
3285                     break;
3286                 case 0x16: /* Softint */
3287                     tcg_gen_ld32s_tl(cpu_dst, tcg_env,
3288                                      offsetof(CPUSPARCState, softint));
3289                     gen_store_gpr(dc, rd, cpu_dst);
3290                     break;
3291                 case 0x17: /* Tick compare */
3292                     gen_store_gpr(dc, rd, cpu_tick_cmpr);
3293                     break;
3294                 case 0x18: /* System tick */
3295                     {
3296                         TCGv_ptr r_tickptr;
3297                         TCGv_i32 r_const;
3298 
3299                         r_tickptr = tcg_temp_new_ptr();
3300                         r_const = tcg_constant_i32(dc->mem_idx);
3301                         tcg_gen_ld_ptr(r_tickptr, tcg_env,
3302                                        offsetof(CPUSPARCState, stick));
3303                         if (translator_io_start(&dc->base)) {
3304                             dc->base.is_jmp = DISAS_EXIT;
3305                         }
3306                         gen_helper_tick_get_count(cpu_dst, tcg_env, r_tickptr,
3307                                                   r_const);
3308                         gen_store_gpr(dc, rd, cpu_dst);
3309                     }
3310                     break;
3311                 case 0x19: /* System tick compare */
3312                     gen_store_gpr(dc, rd, cpu_stick_cmpr);
3313                     break;
3314                 case 0x1a: /* UltraSPARC-T1 Strand status */
3315                     /* XXX HYPV check maybe not enough, UA2005 & UA2007 describe
3316                      * this ASR as impl. dep
3317                      */
3318                     CHECK_IU_FEATURE(dc, HYPV);
3319                     {
3320                         TCGv t = gen_dest_gpr(dc, rd);
3321                         tcg_gen_movi_tl(t, 1UL);
3322                         gen_store_gpr(dc, rd, t);
3323                     }
3324                     break;
3325                 case 0x10: /* Performance Control */
3326                 case 0x11: /* Performance Instrumentation Counter */
3327                 case 0x12: /* Dispatch Control */
3328                 case 0x14: /* Softint set, WO */
3329                 case 0x15: /* Softint clear, WO */
3330 #endif
3331                 default:
3332                     goto illegal_insn;
3333                 }
3334 #if !defined(CONFIG_USER_ONLY)
3335             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
3336 #ifndef TARGET_SPARC64
3337                 if (!supervisor(dc)) {
3338                     goto priv_insn;
3339                 }
3340                 update_psr(dc);
3341                 gen_helper_rdpsr(cpu_dst, tcg_env);
3342 #else
3343                 CHECK_IU_FEATURE(dc, HYPV);
3344                 if (!hypervisor(dc))
3345                     goto priv_insn;
3346                 rs1 = GET_FIELD(insn, 13, 17);
3347                 switch (rs1) {
3348                 case 0: // hpstate
3349                     tcg_gen_ld_i64(cpu_dst, tcg_env,
3350                                    offsetof(CPUSPARCState, hpstate));
3351                     break;
3352                 case 1: // htstate
3353                     // gen_op_rdhtstate();
3354                     break;
3355                 case 3: // hintp
3356                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
3357                     break;
3358                 case 5: // htba
3359                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
3360                     break;
3361                 case 6: // hver
3362                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
3363                     break;
3364                 case 31: // hstick_cmpr
3365                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
3366                     break;
3367                 default:
3368                     goto illegal_insn;
3369                 }
3370 #endif
3371                 gen_store_gpr(dc, rd, cpu_dst);
3372                 break;
3373             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
3374                 if (!supervisor(dc)) {
3375                     goto priv_insn;
3376                 }
3377                 cpu_tmp0 = tcg_temp_new();
3378 #ifdef TARGET_SPARC64
3379                 rs1 = GET_FIELD(insn, 13, 17);
3380                 switch (rs1) {
3381                 case 0: // tpc
3382                     {
3383                         TCGv_ptr r_tsptr;
3384 
3385                         r_tsptr = tcg_temp_new_ptr();
3386                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3387                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3388                                       offsetof(trap_state, tpc));
3389                     }
3390                     break;
3391                 case 1: // tnpc
3392                     {
3393                         TCGv_ptr r_tsptr;
3394 
3395                         r_tsptr = tcg_temp_new_ptr();
3396                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3397                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3398                                       offsetof(trap_state, tnpc));
3399                     }
3400                     break;
3401                 case 2: // tstate
3402                     {
3403                         TCGv_ptr r_tsptr;
3404 
3405                         r_tsptr = tcg_temp_new_ptr();
3406                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3407                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3408                                       offsetof(trap_state, tstate));
3409                     }
3410                     break;
3411                 case 3: // tt
3412                     {
3413                         TCGv_ptr r_tsptr = tcg_temp_new_ptr();
3414 
3415                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3416                         tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
3417                                          offsetof(trap_state, tt));
3418                     }
3419                     break;
3420                 case 4: // tick
3421                     {
3422                         TCGv_ptr r_tickptr;
3423                         TCGv_i32 r_const;
3424 
3425                         r_tickptr = tcg_temp_new_ptr();
3426                         r_const = tcg_constant_i32(dc->mem_idx);
3427                         tcg_gen_ld_ptr(r_tickptr, tcg_env,
3428                                        offsetof(CPUSPARCState, tick));
3429                         if (translator_io_start(&dc->base)) {
3430                             dc->base.is_jmp = DISAS_EXIT;
3431                         }
3432                         gen_helper_tick_get_count(cpu_tmp0, tcg_env,
3433                                                   r_tickptr, r_const);
3434                     }
3435                     break;
3436                 case 5: // tba
3437                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
3438                     break;
3439                 case 6: // pstate
3440                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3441                                      offsetof(CPUSPARCState, pstate));
3442                     break;
3443                 case 7: // tl
3444                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3445                                      offsetof(CPUSPARCState, tl));
3446                     break;
3447                 case 8: // pil
3448                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3449                                      offsetof(CPUSPARCState, psrpil));
3450                     break;
3451                 case 9: // cwp
3452                     gen_helper_rdcwp(cpu_tmp0, tcg_env);
3453                     break;
3454                 case 10: // cansave
3455                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3456                                      offsetof(CPUSPARCState, cansave));
3457                     break;
3458                 case 11: // canrestore
3459                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3460                                      offsetof(CPUSPARCState, canrestore));
3461                     break;
3462                 case 12: // cleanwin
3463                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3464                                      offsetof(CPUSPARCState, cleanwin));
3465                     break;
3466                 case 13: // otherwin
3467                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3468                                      offsetof(CPUSPARCState, otherwin));
3469                     break;
3470                 case 14: // wstate
3471                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3472                                      offsetof(CPUSPARCState, wstate));
3473                     break;
3474                 case 16: // UA2005 gl
3475                     CHECK_IU_FEATURE(dc, GL);
3476                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3477                                      offsetof(CPUSPARCState, gl));
3478                     break;
3479                 case 26: // UA2005 strand status
3480                     CHECK_IU_FEATURE(dc, HYPV);
3481                     if (!hypervisor(dc))
3482                         goto priv_insn;
3483                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
3484                     break;
3485                 case 31: // ver
3486                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
3487                     break;
3488                 case 15: // fq
3489                 default:
3490                     goto illegal_insn;
3491                 }
3492 #else
3493                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
3494 #endif
3495                 gen_store_gpr(dc, rd, cpu_tmp0);
3496                 break;
3497 #endif
3498 #if defined(TARGET_SPARC64) || !defined(CONFIG_USER_ONLY)
3499             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
3500 #ifdef TARGET_SPARC64
3501                 gen_helper_flushw(tcg_env);
3502 #else
3503                 if (!supervisor(dc))
3504                     goto priv_insn;
3505                 gen_store_gpr(dc, rd, cpu_tbr);
3506 #endif
3507                 break;
3508 #endif
3509             } else if (xop == 0x34) {   /* FPU Operations */
3510                 if (gen_trap_ifnofpu(dc)) {
3511                     goto jmp_insn;
3512                 }
3513                 gen_op_clear_ieee_excp_and_FTT();
3514                 rs1 = GET_FIELD(insn, 13, 17);
3515                 rs2 = GET_FIELD(insn, 27, 31);
3516                 xop = GET_FIELD(insn, 18, 26);
3517 
3518                 switch (xop) {
3519                 case 0x1: /* fmovs */
3520                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3521                     gen_store_fpr_F(dc, rd, cpu_src1_32);
3522                     break;
3523                 case 0x5: /* fnegs */
3524                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
3525                     break;
3526                 case 0x9: /* fabss */
3527                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3528                     break;
3529                 case 0x29: /* fsqrts */
3530                     CHECK_FPU_FEATURE(dc, FSQRT);
3531                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3532                     break;
3533                 case 0x2a: /* fsqrtd */
3534                     CHECK_FPU_FEATURE(dc, FSQRT);
3535                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3536                     break;
3537                 case 0x2b: /* fsqrtq */
3538                     CHECK_FPU_FEATURE(dc, FLOAT128);
3539                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3540                     break;
3541                 case 0x41: /* fadds */
3542                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3543                     break;
3544                 case 0x42: /* faddd */
3545                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3546                     break;
3547                 case 0x43: /* faddq */
3548                     CHECK_FPU_FEATURE(dc, FLOAT128);
3549                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3550                     break;
3551                 case 0x45: /* fsubs */
3552                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3553                     break;
3554                 case 0x46: /* fsubd */
3555                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3556                     break;
3557                 case 0x47: /* fsubq */
3558                     CHECK_FPU_FEATURE(dc, FLOAT128);
3559                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3560                     break;
3561                 case 0x49: /* fmuls */
3562                     CHECK_FPU_FEATURE(dc, FMUL);
3563                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3564                     break;
3565                 case 0x4a: /* fmuld */
3566                     CHECK_FPU_FEATURE(dc, FMUL);
3567                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3568                     break;
3569                 case 0x4b: /* fmulq */
3570                     CHECK_FPU_FEATURE(dc, FLOAT128);
3571                     CHECK_FPU_FEATURE(dc, FMUL);
3572                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3573                     break;
3574                 case 0x4d: /* fdivs */
3575                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3576                     break;
3577                 case 0x4e: /* fdivd */
3578                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3579                     break;
3580                 case 0x4f: /* fdivq */
3581                     CHECK_FPU_FEATURE(dc, FLOAT128);
3582                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3583                     break;
3584                 case 0x69: /* fsmuld */
3585                     CHECK_FPU_FEATURE(dc, FSMULD);
3586                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3587                     break;
3588                 case 0x6e: /* fdmulq */
3589                     CHECK_FPU_FEATURE(dc, FLOAT128);
3590                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3591                     break;
3592                 case 0xc4: /* fitos */
3593                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3594                     break;
3595                 case 0xc6: /* fdtos */
3596                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3597                     break;
3598                 case 0xc7: /* fqtos */
3599                     CHECK_FPU_FEATURE(dc, FLOAT128);
3600                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3601                     break;
3602                 case 0xc8: /* fitod */
3603                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3604                     break;
3605                 case 0xc9: /* fstod */
3606                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3607                     break;
3608                 case 0xcb: /* fqtod */
3609                     CHECK_FPU_FEATURE(dc, FLOAT128);
3610                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3611                     break;
3612                 case 0xcc: /* fitoq */
3613                     CHECK_FPU_FEATURE(dc, FLOAT128);
3614                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3615                     break;
3616                 case 0xcd: /* fstoq */
3617                     CHECK_FPU_FEATURE(dc, FLOAT128);
3618                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3619                     break;
3620                 case 0xce: /* fdtoq */
3621                     CHECK_FPU_FEATURE(dc, FLOAT128);
3622                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3623                     break;
3624                 case 0xd1: /* fstoi */
3625                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3626                     break;
3627                 case 0xd2: /* fdtoi */
3628                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3629                     break;
3630                 case 0xd3: /* fqtoi */
3631                     CHECK_FPU_FEATURE(dc, FLOAT128);
3632                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3633                     break;
3634 #ifdef TARGET_SPARC64
3635                 case 0x2: /* V9 fmovd */
3636                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3637                     gen_store_fpr_D(dc, rd, cpu_src1_64);
3638                     break;
3639                 case 0x3: /* V9 fmovq */
3640                     CHECK_FPU_FEATURE(dc, FLOAT128);
3641                     gen_move_Q(dc, rd, rs2);
3642                     break;
3643                 case 0x6: /* V9 fnegd */
3644                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3645                     break;
3646                 case 0x7: /* V9 fnegq */
3647                     CHECK_FPU_FEATURE(dc, FLOAT128);
3648                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3649                     break;
3650                 case 0xa: /* V9 fabsd */
3651                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3652                     break;
3653                 case 0xb: /* V9 fabsq */
3654                     CHECK_FPU_FEATURE(dc, FLOAT128);
3655                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3656                     break;
3657                 case 0x81: /* V9 fstox */
3658                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3659                     break;
3660                 case 0x82: /* V9 fdtox */
3661                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3662                     break;
3663                 case 0x83: /* V9 fqtox */
3664                     CHECK_FPU_FEATURE(dc, FLOAT128);
3665                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3666                     break;
3667                 case 0x84: /* V9 fxtos */
3668                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3669                     break;
3670                 case 0x88: /* V9 fxtod */
3671                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3672                     break;
3673                 case 0x8c: /* V9 fxtoq */
3674                     CHECK_FPU_FEATURE(dc, FLOAT128);
3675                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3676                     break;
3677 #endif
3678                 default:
3679                     goto illegal_insn;
3680                 }
3681             } else if (xop == 0x35) {   /* FPU Operations */
3682 #ifdef TARGET_SPARC64
3683                 int cond;
3684 #endif
3685                 if (gen_trap_ifnofpu(dc)) {
3686                     goto jmp_insn;
3687                 }
3688                 gen_op_clear_ieee_excp_and_FTT();
3689                 rs1 = GET_FIELD(insn, 13, 17);
3690                 rs2 = GET_FIELD(insn, 27, 31);
3691                 xop = GET_FIELD(insn, 18, 26);
3692 
3693 #ifdef TARGET_SPARC64
3694 #define FMOVR(sz)                                                  \
3695                 do {                                               \
3696                     DisasCompare cmp;                              \
3697                     cond = GET_FIELD_SP(insn, 10, 12);             \
3698                     cpu_src1 = get_src1(dc, insn);                 \
3699                     gen_compare_reg(&cmp, cond, cpu_src1);         \
3700                     gen_fmov##sz(dc, &cmp, rd, rs2);               \
3701                 } while (0)
3702 
3703                 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3704                     FMOVR(s);
3705                     break;
3706                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3707                     FMOVR(d);
3708                     break;
3709                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3710                     CHECK_FPU_FEATURE(dc, FLOAT128);
3711                     FMOVR(q);
3712                     break;
3713                 }
3714 #undef FMOVR
3715 #endif
3716                 switch (xop) {
3717 #ifdef TARGET_SPARC64
3718 #define FMOVCC(fcc, sz)                                                 \
3719                     do {                                                \
3720                         DisasCompare cmp;                               \
3721                         cond = GET_FIELD_SP(insn, 14, 17);              \
3722                         gen_fcompare(&cmp, fcc, cond);                  \
3723                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3724                     } while (0)
3725 
3726                     case 0x001: /* V9 fmovscc %fcc0 */
3727                         FMOVCC(0, s);
3728                         break;
3729                     case 0x002: /* V9 fmovdcc %fcc0 */
3730                         FMOVCC(0, d);
3731                         break;
3732                     case 0x003: /* V9 fmovqcc %fcc0 */
3733                         CHECK_FPU_FEATURE(dc, FLOAT128);
3734                         FMOVCC(0, q);
3735                         break;
3736                     case 0x041: /* V9 fmovscc %fcc1 */
3737                         FMOVCC(1, s);
3738                         break;
3739                     case 0x042: /* V9 fmovdcc %fcc1 */
3740                         FMOVCC(1, d);
3741                         break;
3742                     case 0x043: /* V9 fmovqcc %fcc1 */
3743                         CHECK_FPU_FEATURE(dc, FLOAT128);
3744                         FMOVCC(1, q);
3745                         break;
3746                     case 0x081: /* V9 fmovscc %fcc2 */
3747                         FMOVCC(2, s);
3748                         break;
3749                     case 0x082: /* V9 fmovdcc %fcc2 */
3750                         FMOVCC(2, d);
3751                         break;
3752                     case 0x083: /* V9 fmovqcc %fcc2 */
3753                         CHECK_FPU_FEATURE(dc, FLOAT128);
3754                         FMOVCC(2, q);
3755                         break;
3756                     case 0x0c1: /* V9 fmovscc %fcc3 */
3757                         FMOVCC(3, s);
3758                         break;
3759                     case 0x0c2: /* V9 fmovdcc %fcc3 */
3760                         FMOVCC(3, d);
3761                         break;
3762                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3763                         CHECK_FPU_FEATURE(dc, FLOAT128);
3764                         FMOVCC(3, q);
3765                         break;
3766 #undef FMOVCC
3767 #define FMOVCC(xcc, sz)                                                 \
3768                     do {                                                \
3769                         DisasCompare cmp;                               \
3770                         cond = GET_FIELD_SP(insn, 14, 17);              \
3771                         gen_compare(&cmp, xcc, cond, dc);               \
3772                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3773                     } while (0)
3774 
3775                     case 0x101: /* V9 fmovscc %icc */
3776                         FMOVCC(0, s);
3777                         break;
3778                     case 0x102: /* V9 fmovdcc %icc */
3779                         FMOVCC(0, d);
3780                         break;
3781                     case 0x103: /* V9 fmovqcc %icc */
3782                         CHECK_FPU_FEATURE(dc, FLOAT128);
3783                         FMOVCC(0, q);
3784                         break;
3785                     case 0x181: /* V9 fmovscc %xcc */
3786                         FMOVCC(1, s);
3787                         break;
3788                     case 0x182: /* V9 fmovdcc %xcc */
3789                         FMOVCC(1, d);
3790                         break;
3791                     case 0x183: /* V9 fmovqcc %xcc */
3792                         CHECK_FPU_FEATURE(dc, FLOAT128);
3793                         FMOVCC(1, q);
3794                         break;
3795 #undef FMOVCC
3796 #endif
3797                     case 0x51: /* fcmps, V9 %fcc */
3798                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3799                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3800                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3801                         break;
3802                     case 0x52: /* fcmpd, V9 %fcc */
3803                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3804                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3805                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3806                         break;
3807                     case 0x53: /* fcmpq, V9 %fcc */
3808                         CHECK_FPU_FEATURE(dc, FLOAT128);
3809                         gen_op_load_fpr_QT0(QFPREG(rs1));
3810                         gen_op_load_fpr_QT1(QFPREG(rs2));
3811                         gen_op_fcmpq(rd & 3);
3812                         break;
3813                     case 0x55: /* fcmpes, V9 %fcc */
3814                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3815                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3816                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3817                         break;
3818                     case 0x56: /* fcmped, V9 %fcc */
3819                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3820                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3821                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3822                         break;
3823                     case 0x57: /* fcmpeq, V9 %fcc */
3824                         CHECK_FPU_FEATURE(dc, FLOAT128);
3825                         gen_op_load_fpr_QT0(QFPREG(rs1));
3826                         gen_op_load_fpr_QT1(QFPREG(rs2));
3827                         gen_op_fcmpeq(rd & 3);
3828                         break;
3829                     default:
3830                         goto illegal_insn;
3831                 }
3832             } else if (xop == 0x2) {
3833                 TCGv dst = gen_dest_gpr(dc, rd);
3834                 rs1 = GET_FIELD(insn, 13, 17);
3835                 if (rs1 == 0) {
3836                     /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3837                     if (IS_IMM) {       /* immediate */
3838                         simm = GET_FIELDs(insn, 19, 31);
3839                         tcg_gen_movi_tl(dst, simm);
3840                         gen_store_gpr(dc, rd, dst);
3841                     } else {            /* register */
3842                         rs2 = GET_FIELD(insn, 27, 31);
3843                         if (rs2 == 0) {
3844                             tcg_gen_movi_tl(dst, 0);
3845                             gen_store_gpr(dc, rd, dst);
3846                         } else {
3847                             cpu_src2 = gen_load_gpr(dc, rs2);
3848                             gen_store_gpr(dc, rd, cpu_src2);
3849                         }
3850                     }
3851                 } else {
3852                     cpu_src1 = get_src1(dc, insn);
3853                     if (IS_IMM) {       /* immediate */
3854                         simm = GET_FIELDs(insn, 19, 31);
3855                         tcg_gen_ori_tl(dst, cpu_src1, simm);
3856                         gen_store_gpr(dc, rd, dst);
3857                     } else {            /* register */
3858                         rs2 = GET_FIELD(insn, 27, 31);
3859                         if (rs2 == 0) {
3860                             /* mov shortcut:  or x, %g0, y -> mov x, y */
3861                             gen_store_gpr(dc, rd, cpu_src1);
3862                         } else {
3863                             cpu_src2 = gen_load_gpr(dc, rs2);
3864                             tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3865                             gen_store_gpr(dc, rd, dst);
3866                         }
3867                     }
3868                 }
3869 #ifdef TARGET_SPARC64
3870             } else if (xop == 0x25) { /* sll, V9 sllx */
3871                 cpu_src1 = get_src1(dc, insn);
3872                 if (IS_IMM) {   /* immediate */
3873                     simm = GET_FIELDs(insn, 20, 31);
3874                     if (insn & (1 << 12)) {
3875                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3876                     } else {
3877                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3878                     }
3879                 } else {                /* register */
3880                     rs2 = GET_FIELD(insn, 27, 31);
3881                     cpu_src2 = gen_load_gpr(dc, rs2);
3882                     cpu_tmp0 = tcg_temp_new();
3883                     if (insn & (1 << 12)) {
3884                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3885                     } else {
3886                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3887                     }
3888                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3889                 }
3890                 gen_store_gpr(dc, rd, cpu_dst);
3891             } else if (xop == 0x26) { /* srl, V9 srlx */
3892                 cpu_src1 = get_src1(dc, insn);
3893                 if (IS_IMM) {   /* immediate */
3894                     simm = GET_FIELDs(insn, 20, 31);
3895                     if (insn & (1 << 12)) {
3896                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3897                     } else {
3898                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3899                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3900                     }
3901                 } else {                /* register */
3902                     rs2 = GET_FIELD(insn, 27, 31);
3903                     cpu_src2 = gen_load_gpr(dc, rs2);
3904                     cpu_tmp0 = tcg_temp_new();
3905                     if (insn & (1 << 12)) {
3906                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3907                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3908                     } else {
3909                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3910                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3911                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3912                     }
3913                 }
3914                 gen_store_gpr(dc, rd, cpu_dst);
3915             } else if (xop == 0x27) { /* sra, V9 srax */
3916                 cpu_src1 = get_src1(dc, insn);
3917                 if (IS_IMM) {   /* immediate */
3918                     simm = GET_FIELDs(insn, 20, 31);
3919                     if (insn & (1 << 12)) {
3920                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3921                     } else {
3922                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3923                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3924                     }
3925                 } else {                /* register */
3926                     rs2 = GET_FIELD(insn, 27, 31);
3927                     cpu_src2 = gen_load_gpr(dc, rs2);
3928                     cpu_tmp0 = tcg_temp_new();
3929                     if (insn & (1 << 12)) {
3930                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3931                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3932                     } else {
3933                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3934                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3935                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3936                     }
3937                 }
3938                 gen_store_gpr(dc, rd, cpu_dst);
3939 #endif
3940             } else if (xop < 0x36) {
3941                 if (xop < 0x20) {
3942                     cpu_src1 = get_src1(dc, insn);
3943                     cpu_src2 = get_src2(dc, insn);
3944                     switch (xop & ~0x10) {
3945                     case 0x0: /* add */
3946                         if (xop & 0x10) {
3947                             gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3948                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3949                             dc->cc_op = CC_OP_ADD;
3950                         } else {
3951                             tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3952                         }
3953                         break;
3954                     case 0x1: /* and */
3955                         tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3956                         if (xop & 0x10) {
3957                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3958                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3959                             dc->cc_op = CC_OP_LOGIC;
3960                         }
3961                         break;
3962                     case 0x2: /* or */
3963                         tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3964                         if (xop & 0x10) {
3965                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3966                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3967                             dc->cc_op = CC_OP_LOGIC;
3968                         }
3969                         break;
3970                     case 0x3: /* xor */
3971                         tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3972                         if (xop & 0x10) {
3973                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3974                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3975                             dc->cc_op = CC_OP_LOGIC;
3976                         }
3977                         break;
3978                     case 0x4: /* sub */
3979                         if (xop & 0x10) {
3980                             gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3981                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3982                             dc->cc_op = CC_OP_SUB;
3983                         } else {
3984                             tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3985                         }
3986                         break;
3987                     case 0x5: /* andn */
3988                         tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3989                         if (xop & 0x10) {
3990                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3991                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3992                             dc->cc_op = CC_OP_LOGIC;
3993                         }
3994                         break;
3995                     case 0x6: /* orn */
3996                         tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3997                         if (xop & 0x10) {
3998                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3999                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4000                             dc->cc_op = CC_OP_LOGIC;
4001                         }
4002                         break;
4003                     case 0x7: /* xorn */
4004                         tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
4005                         if (xop & 0x10) {
4006                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4007                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4008                             dc->cc_op = CC_OP_LOGIC;
4009                         }
4010                         break;
4011                     case 0x8: /* addx, V9 addc */
4012                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4013                                         (xop & 0x10));
4014                         break;
4015 #ifdef TARGET_SPARC64
4016                     case 0x9: /* V9 mulx */
4017                         tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
4018                         break;
4019 #endif
4020                     case 0xa: /* umul */
4021                         CHECK_IU_FEATURE(dc, MUL);
4022                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
4023                         if (xop & 0x10) {
4024                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4025                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4026                             dc->cc_op = CC_OP_LOGIC;
4027                         }
4028                         break;
4029                     case 0xb: /* smul */
4030                         CHECK_IU_FEATURE(dc, MUL);
4031                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
4032                         if (xop & 0x10) {
4033                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4034                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4035                             dc->cc_op = CC_OP_LOGIC;
4036                         }
4037                         break;
4038                     case 0xc: /* subx, V9 subc */
4039                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4040                                         (xop & 0x10));
4041                         break;
4042 #ifdef TARGET_SPARC64
4043                     case 0xd: /* V9 udivx */
4044                         gen_helper_udivx(cpu_dst, tcg_env, cpu_src1, cpu_src2);
4045                         break;
4046 #endif
4047                     case 0xe: /* udiv */
4048                         CHECK_IU_FEATURE(dc, DIV);
4049                         if (xop & 0x10) {
4050                             gen_helper_udiv_cc(cpu_dst, tcg_env, cpu_src1,
4051                                                cpu_src2);
4052                             dc->cc_op = CC_OP_DIV;
4053                         } else {
4054                             gen_helper_udiv(cpu_dst, tcg_env, cpu_src1,
4055                                             cpu_src2);
4056                         }
4057                         break;
4058                     case 0xf: /* sdiv */
4059                         CHECK_IU_FEATURE(dc, DIV);
4060                         if (xop & 0x10) {
4061                             gen_helper_sdiv_cc(cpu_dst, tcg_env, cpu_src1,
4062                                                cpu_src2);
4063                             dc->cc_op = CC_OP_DIV;
4064                         } else {
4065                             gen_helper_sdiv(cpu_dst, tcg_env, cpu_src1,
4066                                             cpu_src2);
4067                         }
4068                         break;
4069                     default:
4070                         goto illegal_insn;
4071                     }
4072                     gen_store_gpr(dc, rd, cpu_dst);
4073                 } else {
4074                     cpu_src1 = get_src1(dc, insn);
4075                     cpu_src2 = get_src2(dc, insn);
4076                     switch (xop) {
4077                     case 0x20: /* taddcc */
4078                         gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4079                         gen_store_gpr(dc, rd, cpu_dst);
4080                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
4081                         dc->cc_op = CC_OP_TADD;
4082                         break;
4083                     case 0x21: /* tsubcc */
4084                         gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4085                         gen_store_gpr(dc, rd, cpu_dst);
4086                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
4087                         dc->cc_op = CC_OP_TSUB;
4088                         break;
4089                     case 0x22: /* taddcctv */
4090                         gen_helper_taddcctv(cpu_dst, tcg_env,
4091                                             cpu_src1, cpu_src2);
4092                         gen_store_gpr(dc, rd, cpu_dst);
4093                         dc->cc_op = CC_OP_TADDTV;
4094                         break;
4095                     case 0x23: /* tsubcctv */
4096                         gen_helper_tsubcctv(cpu_dst, tcg_env,
4097                                             cpu_src1, cpu_src2);
4098                         gen_store_gpr(dc, rd, cpu_dst);
4099                         dc->cc_op = CC_OP_TSUBTV;
4100                         break;
4101                     case 0x24: /* mulscc */
4102                         update_psr(dc);
4103                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
4104                         gen_store_gpr(dc, rd, cpu_dst);
4105                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4106                         dc->cc_op = CC_OP_ADD;
4107                         break;
4108 #ifndef TARGET_SPARC64
4109                     case 0x25:  /* sll */
4110                         if (IS_IMM) { /* immediate */
4111                             simm = GET_FIELDs(insn, 20, 31);
4112                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
4113                         } else { /* register */
4114                             cpu_tmp0 = tcg_temp_new();
4115                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4116                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
4117                         }
4118                         gen_store_gpr(dc, rd, cpu_dst);
4119                         break;
4120                     case 0x26:  /* srl */
4121                         if (IS_IMM) { /* immediate */
4122                             simm = GET_FIELDs(insn, 20, 31);
4123                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
4124                         } else { /* register */
4125                             cpu_tmp0 = tcg_temp_new();
4126                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4127                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
4128                         }
4129                         gen_store_gpr(dc, rd, cpu_dst);
4130                         break;
4131                     case 0x27:  /* sra */
4132                         if (IS_IMM) { /* immediate */
4133                             simm = GET_FIELDs(insn, 20, 31);
4134                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
4135                         } else { /* register */
4136                             cpu_tmp0 = tcg_temp_new();
4137                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4138                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
4139                         }
4140                         gen_store_gpr(dc, rd, cpu_dst);
4141                         break;
4142 #endif
4143                     case 0x30:
4144                         {
4145                             cpu_tmp0 = tcg_temp_new();
4146                             switch(rd) {
4147                             case 0: /* wry */
4148                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4149                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
4150                                 break;
4151 #ifndef TARGET_SPARC64
4152                             case 0x01 ... 0x0f: /* undefined in the
4153                                                    SPARCv8 manual, nop
4154                                                    on the microSPARC
4155                                                    II */
4156                             case 0x10 ... 0x1f: /* implementation-dependent
4157                                                    in the SPARCv8
4158                                                    manual, nop on the
4159                                                    microSPARC II */
4160                                 if ((rd == 0x13) && (dc->def->features &
4161                                                      CPU_FEATURE_POWERDOWN)) {
4162                                     /* LEON3 power-down */
4163                                     save_state(dc);
4164                                     gen_helper_power_down(tcg_env);
4165                                 }
4166                                 break;
4167 #else
4168                             case 0x2: /* V9 wrccr */
4169                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4170                                 gen_helper_wrccr(tcg_env, cpu_tmp0);
4171                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4172                                 dc->cc_op = CC_OP_FLAGS;
4173                                 break;
4174                             case 0x3: /* V9 wrasi */
4175                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4176                                 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
4177                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4178                                                 offsetof(CPUSPARCState, asi));
4179                                 /*
4180                                  * End TB to notice changed ASI.
4181                                  * TODO: Could notice src1 = %g0 and IS_IMM,
4182                                  * update DisasContext and not exit the TB.
4183                                  */
4184                                 save_state(dc);
4185                                 gen_op_next_insn();
4186                                 tcg_gen_lookup_and_goto_ptr();
4187                                 dc->base.is_jmp = DISAS_NORETURN;
4188                                 break;
4189                             case 0x6: /* V9 wrfprs */
4190                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4191                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
4192                                 dc->fprs_dirty = 0;
4193                                 save_state(dc);
4194                                 gen_op_next_insn();
4195                                 tcg_gen_exit_tb(NULL, 0);
4196                                 dc->base.is_jmp = DISAS_NORETURN;
4197                                 break;
4198                             case 0xf: /* V9 sir, nop if user */
4199 #if !defined(CONFIG_USER_ONLY)
4200                                 if (supervisor(dc)) {
4201                                     ; // XXX
4202                                 }
4203 #endif
4204                                 break;
4205                             case 0x13: /* Graphics Status */
4206                                 if (gen_trap_ifnofpu(dc)) {
4207                                     goto jmp_insn;
4208                                 }
4209                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
4210                                 break;
4211                             case 0x14: /* Softint set */
4212                                 if (!supervisor(dc))
4213                                     goto illegal_insn;
4214                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4215                                 gen_helper_set_softint(tcg_env, cpu_tmp0);
4216                                 break;
4217                             case 0x15: /* Softint clear */
4218                                 if (!supervisor(dc))
4219                                     goto illegal_insn;
4220                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4221                                 gen_helper_clear_softint(tcg_env, cpu_tmp0);
4222                                 break;
4223                             case 0x16: /* Softint write */
4224                                 if (!supervisor(dc))
4225                                     goto illegal_insn;
4226                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4227                                 gen_helper_write_softint(tcg_env, cpu_tmp0);
4228                                 break;
4229                             case 0x17: /* Tick compare */
4230 #if !defined(CONFIG_USER_ONLY)
4231                                 if (!supervisor(dc))
4232                                     goto illegal_insn;
4233 #endif
4234                                 {
4235                                     TCGv_ptr r_tickptr;
4236 
4237                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
4238                                                    cpu_src2);
4239                                     r_tickptr = tcg_temp_new_ptr();
4240                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4241                                                    offsetof(CPUSPARCState, tick));
4242                                     translator_io_start(&dc->base);
4243                                     gen_helper_tick_set_limit(r_tickptr,
4244                                                               cpu_tick_cmpr);
4245                                     /* End TB to handle timer interrupt */
4246                                     dc->base.is_jmp = DISAS_EXIT;
4247                                 }
4248                                 break;
4249                             case 0x18: /* System tick */
4250 #if !defined(CONFIG_USER_ONLY)
4251                                 if (!supervisor(dc))
4252                                     goto illegal_insn;
4253 #endif
4254                                 {
4255                                     TCGv_ptr r_tickptr;
4256 
4257                                     tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
4258                                                    cpu_src2);
4259                                     r_tickptr = tcg_temp_new_ptr();
4260                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4261                                                    offsetof(CPUSPARCState, stick));
4262                                     translator_io_start(&dc->base);
4263                                     gen_helper_tick_set_count(r_tickptr,
4264                                                               cpu_tmp0);
4265                                     /* End TB to handle timer interrupt */
4266                                     dc->base.is_jmp = DISAS_EXIT;
4267                                 }
4268                                 break;
4269                             case 0x19: /* System tick compare */
4270 #if !defined(CONFIG_USER_ONLY)
4271                                 if (!supervisor(dc))
4272                                     goto illegal_insn;
4273 #endif
4274                                 {
4275                                     TCGv_ptr r_tickptr;
4276 
4277                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
4278                                                    cpu_src2);
4279                                     r_tickptr = tcg_temp_new_ptr();
4280                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4281                                                    offsetof(CPUSPARCState, stick));
4282                                     translator_io_start(&dc->base);
4283                                     gen_helper_tick_set_limit(r_tickptr,
4284                                                               cpu_stick_cmpr);
4285                                     /* End TB to handle timer interrupt */
4286                                     dc->base.is_jmp = DISAS_EXIT;
4287                                 }
4288                                 break;
4289 
4290                             case 0x10: /* Performance Control */
4291                             case 0x11: /* Performance Instrumentation
4292                                           Counter */
4293                             case 0x12: /* Dispatch Control */
4294 #endif
4295                             default:
4296                                 goto illegal_insn;
4297                             }
4298                         }
4299                         break;
4300 #if !defined(CONFIG_USER_ONLY)
4301                     case 0x31: /* wrpsr, V9 saved, restored */
4302                         {
4303                             if (!supervisor(dc))
4304                                 goto priv_insn;
4305 #ifdef TARGET_SPARC64
4306                             switch (rd) {
4307                             case 0:
4308                                 gen_helper_saved(tcg_env);
4309                                 break;
4310                             case 1:
4311                                 gen_helper_restored(tcg_env);
4312                                 break;
4313                             case 2: /* UA2005 allclean */
4314                             case 3: /* UA2005 otherw */
4315                             case 4: /* UA2005 normalw */
4316                             case 5: /* UA2005 invalw */
4317                                 // XXX
4318                             default:
4319                                 goto illegal_insn;
4320                             }
4321 #else
4322                             cpu_tmp0 = tcg_temp_new();
4323                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4324                             gen_helper_wrpsr(tcg_env, cpu_tmp0);
4325                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4326                             dc->cc_op = CC_OP_FLAGS;
4327                             save_state(dc);
4328                             gen_op_next_insn();
4329                             tcg_gen_exit_tb(NULL, 0);
4330                             dc->base.is_jmp = DISAS_NORETURN;
4331 #endif
4332                         }
4333                         break;
4334                     case 0x32: /* wrwim, V9 wrpr */
4335                         {
4336                             if (!supervisor(dc))
4337                                 goto priv_insn;
4338                             cpu_tmp0 = tcg_temp_new();
4339                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4340 #ifdef TARGET_SPARC64
4341                             switch (rd) {
4342                             case 0: // tpc
4343                                 {
4344                                     TCGv_ptr r_tsptr;
4345 
4346                                     r_tsptr = tcg_temp_new_ptr();
4347                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4348                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4349                                                   offsetof(trap_state, tpc));
4350                                 }
4351                                 break;
4352                             case 1: // tnpc
4353                                 {
4354                                     TCGv_ptr r_tsptr;
4355 
4356                                     r_tsptr = tcg_temp_new_ptr();
4357                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4358                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4359                                                   offsetof(trap_state, tnpc));
4360                                 }
4361                                 break;
4362                             case 2: // tstate
4363                                 {
4364                                     TCGv_ptr r_tsptr;
4365 
4366                                     r_tsptr = tcg_temp_new_ptr();
4367                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4368                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4369                                                   offsetof(trap_state,
4370                                                            tstate));
4371                                 }
4372                                 break;
4373                             case 3: // tt
4374                                 {
4375                                     TCGv_ptr r_tsptr;
4376 
4377                                     r_tsptr = tcg_temp_new_ptr();
4378                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4379                                     tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
4380                                                     offsetof(trap_state, tt));
4381                                 }
4382                                 break;
4383                             case 4: // tick
4384                                 {
4385                                     TCGv_ptr r_tickptr;
4386 
4387                                     r_tickptr = tcg_temp_new_ptr();
4388                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4389                                                    offsetof(CPUSPARCState, tick));
4390                                     translator_io_start(&dc->base);
4391                                     gen_helper_tick_set_count(r_tickptr,
4392                                                               cpu_tmp0);
4393                                     /* End TB to handle timer interrupt */
4394                                     dc->base.is_jmp = DISAS_EXIT;
4395                                 }
4396                                 break;
4397                             case 5: // tba
4398                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
4399                                 break;
4400                             case 6: // pstate
4401                                 save_state(dc);
4402                                 if (translator_io_start(&dc->base)) {
4403                                     dc->base.is_jmp = DISAS_EXIT;
4404                                 }
4405                                 gen_helper_wrpstate(tcg_env, cpu_tmp0);
4406                                 dc->npc = DYNAMIC_PC;
4407                                 break;
4408                             case 7: // tl
4409                                 save_state(dc);
4410                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4411                                                offsetof(CPUSPARCState, tl));
4412                                 dc->npc = DYNAMIC_PC;
4413                                 break;
4414                             case 8: // pil
4415                                 if (translator_io_start(&dc->base)) {
4416                                     dc->base.is_jmp = DISAS_EXIT;
4417                                 }
4418                                 gen_helper_wrpil(tcg_env, cpu_tmp0);
4419                                 break;
4420                             case 9: // cwp
4421                                 gen_helper_wrcwp(tcg_env, cpu_tmp0);
4422                                 break;
4423                             case 10: // cansave
4424                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4425                                                 offsetof(CPUSPARCState,
4426                                                          cansave));
4427                                 break;
4428                             case 11: // canrestore
4429                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4430                                                 offsetof(CPUSPARCState,
4431                                                          canrestore));
4432                                 break;
4433                             case 12: // cleanwin
4434                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4435                                                 offsetof(CPUSPARCState,
4436                                                          cleanwin));
4437                                 break;
4438                             case 13: // otherwin
4439                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4440                                                 offsetof(CPUSPARCState,
4441                                                          otherwin));
4442                                 break;
4443                             case 14: // wstate
4444                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4445                                                 offsetof(CPUSPARCState,
4446                                                          wstate));
4447                                 break;
4448                             case 16: // UA2005 gl
4449                                 CHECK_IU_FEATURE(dc, GL);
4450                                 gen_helper_wrgl(tcg_env, cpu_tmp0);
4451                                 break;
4452                             case 26: // UA2005 strand status
4453                                 CHECK_IU_FEATURE(dc, HYPV);
4454                                 if (!hypervisor(dc))
4455                                     goto priv_insn;
4456                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
4457                                 break;
4458                             default:
4459                                 goto illegal_insn;
4460                             }
4461 #else
4462                             tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
4463                             if (dc->def->nwindows != 32) {
4464                                 tcg_gen_andi_tl(cpu_wim, cpu_wim,
4465                                                 (1 << dc->def->nwindows) - 1);
4466                             }
4467 #endif
4468                         }
4469                         break;
4470                     case 0x33: /* wrtbr, UA2005 wrhpr */
4471                         {
4472 #ifndef TARGET_SPARC64
4473                             if (!supervisor(dc))
4474                                 goto priv_insn;
4475                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
4476 #else
4477                             CHECK_IU_FEATURE(dc, HYPV);
4478                             if (!hypervisor(dc))
4479                                 goto priv_insn;
4480                             cpu_tmp0 = tcg_temp_new();
4481                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4482                             switch (rd) {
4483                             case 0: // hpstate
4484                                 tcg_gen_st_i64(cpu_tmp0, tcg_env,
4485                                                offsetof(CPUSPARCState,
4486                                                         hpstate));
4487                                 save_state(dc);
4488                                 gen_op_next_insn();
4489                                 tcg_gen_exit_tb(NULL, 0);
4490                                 dc->base.is_jmp = DISAS_NORETURN;
4491                                 break;
4492                             case 1: // htstate
4493                                 // XXX gen_op_wrhtstate();
4494                                 break;
4495                             case 3: // hintp
4496                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
4497                                 break;
4498                             case 5: // htba
4499                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4500                                 break;
4501                             case 31: // hstick_cmpr
4502                                 {
4503                                     TCGv_ptr r_tickptr;
4504 
4505                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4506                                     r_tickptr = tcg_temp_new_ptr();
4507                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4508                                                    offsetof(CPUSPARCState, hstick));
4509                                     translator_io_start(&dc->base);
4510                                     gen_helper_tick_set_limit(r_tickptr,
4511                                                               cpu_hstick_cmpr);
4512                                     /* End TB to handle timer interrupt */
4513                                     dc->base.is_jmp = DISAS_EXIT;
4514                                 }
4515                                 break;
4516                             case 6: // hver readonly
4517                             default:
4518                                 goto illegal_insn;
4519                             }
4520 #endif
4521                         }
4522                         break;
4523 #endif
4524 #ifdef TARGET_SPARC64
4525                     case 0x2c: /* V9 movcc */
4526                         {
4527                             int cc = GET_FIELD_SP(insn, 11, 12);
4528                             int cond = GET_FIELD_SP(insn, 14, 17);
4529                             DisasCompare cmp;
4530                             TCGv dst;
4531 
4532                             if (insn & (1 << 18)) {
4533                                 if (cc == 0) {
4534                                     gen_compare(&cmp, 0, cond, dc);
4535                                 } else if (cc == 2) {
4536                                     gen_compare(&cmp, 1, cond, dc);
4537                                 } else {
4538                                     goto illegal_insn;
4539                                 }
4540                             } else {
4541                                 gen_fcompare(&cmp, cc, cond);
4542                             }
4543 
4544                             /* The get_src2 above loaded the normal 13-bit
4545                                immediate field, not the 11-bit field we have
4546                                in movcc.  But it did handle the reg case.  */
4547                             if (IS_IMM) {
4548                                 simm = GET_FIELD_SPs(insn, 0, 10);
4549                                 tcg_gen_movi_tl(cpu_src2, simm);
4550                             }
4551 
4552                             dst = gen_load_gpr(dc, rd);
4553                             tcg_gen_movcond_tl(cmp.cond, dst,
4554                                                cmp.c1, cmp.c2,
4555                                                cpu_src2, dst);
4556                             gen_store_gpr(dc, rd, dst);
4557                             break;
4558                         }
4559                     case 0x2d: /* V9 sdivx */
4560                         gen_helper_sdivx(cpu_dst, tcg_env, cpu_src1, cpu_src2);
4561                         gen_store_gpr(dc, rd, cpu_dst);
4562                         break;
4563                     case 0x2e: /* V9 popc */
4564                         tcg_gen_ctpop_tl(cpu_dst, cpu_src2);
4565                         gen_store_gpr(dc, rd, cpu_dst);
4566                         break;
4567                     case 0x2f: /* V9 movr */
4568                         {
4569                             int cond = GET_FIELD_SP(insn, 10, 12);
4570                             DisasCompare cmp;
4571                             TCGv dst;
4572 
4573                             gen_compare_reg(&cmp, cond, cpu_src1);
4574 
4575                             /* The get_src2 above loaded the normal 13-bit
4576                                immediate field, not the 10-bit field we have
4577                                in movr.  But it did handle the reg case.  */
4578                             if (IS_IMM) {
4579                                 simm = GET_FIELD_SPs(insn, 0, 9);
4580                                 tcg_gen_movi_tl(cpu_src2, simm);
4581                             }
4582 
4583                             dst = gen_load_gpr(dc, rd);
4584                             tcg_gen_movcond_tl(cmp.cond, dst,
4585                                                cmp.c1, cmp.c2,
4586                                                cpu_src2, dst);
4587                             gen_store_gpr(dc, rd, dst);
4588                             break;
4589                         }
4590 #endif
4591                     default:
4592                         goto illegal_insn;
4593                     }
4594                 }
4595             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4596 #ifdef TARGET_SPARC64
4597                 int opf = GET_FIELD_SP(insn, 5, 13);
4598                 rs1 = GET_FIELD(insn, 13, 17);
4599                 rs2 = GET_FIELD(insn, 27, 31);
4600                 if (gen_trap_ifnofpu(dc)) {
4601                     goto jmp_insn;
4602                 }
4603 
4604                 switch (opf) {
4605                 case 0x000: /* VIS I edge8cc */
4606                     CHECK_FPU_FEATURE(dc, VIS1);
4607                     cpu_src1 = gen_load_gpr(dc, rs1);
4608                     cpu_src2 = gen_load_gpr(dc, rs2);
4609                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4610                     gen_store_gpr(dc, rd, cpu_dst);
4611                     break;
4612                 case 0x001: /* VIS II edge8n */
4613                     CHECK_FPU_FEATURE(dc, VIS2);
4614                     cpu_src1 = gen_load_gpr(dc, rs1);
4615                     cpu_src2 = gen_load_gpr(dc, rs2);
4616                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4617                     gen_store_gpr(dc, rd, cpu_dst);
4618                     break;
4619                 case 0x002: /* VIS I edge8lcc */
4620                     CHECK_FPU_FEATURE(dc, VIS1);
4621                     cpu_src1 = gen_load_gpr(dc, rs1);
4622                     cpu_src2 = gen_load_gpr(dc, rs2);
4623                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4624                     gen_store_gpr(dc, rd, cpu_dst);
4625                     break;
4626                 case 0x003: /* VIS II edge8ln */
4627                     CHECK_FPU_FEATURE(dc, VIS2);
4628                     cpu_src1 = gen_load_gpr(dc, rs1);
4629                     cpu_src2 = gen_load_gpr(dc, rs2);
4630                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4631                     gen_store_gpr(dc, rd, cpu_dst);
4632                     break;
4633                 case 0x004: /* VIS I edge16cc */
4634                     CHECK_FPU_FEATURE(dc, VIS1);
4635                     cpu_src1 = gen_load_gpr(dc, rs1);
4636                     cpu_src2 = gen_load_gpr(dc, rs2);
4637                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4638                     gen_store_gpr(dc, rd, cpu_dst);
4639                     break;
4640                 case 0x005: /* VIS II edge16n */
4641                     CHECK_FPU_FEATURE(dc, VIS2);
4642                     cpu_src1 = gen_load_gpr(dc, rs1);
4643                     cpu_src2 = gen_load_gpr(dc, rs2);
4644                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4645                     gen_store_gpr(dc, rd, cpu_dst);
4646                     break;
4647                 case 0x006: /* VIS I edge16lcc */
4648                     CHECK_FPU_FEATURE(dc, VIS1);
4649                     cpu_src1 = gen_load_gpr(dc, rs1);
4650                     cpu_src2 = gen_load_gpr(dc, rs2);
4651                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4652                     gen_store_gpr(dc, rd, cpu_dst);
4653                     break;
4654                 case 0x007: /* VIS II edge16ln */
4655                     CHECK_FPU_FEATURE(dc, VIS2);
4656                     cpu_src1 = gen_load_gpr(dc, rs1);
4657                     cpu_src2 = gen_load_gpr(dc, rs2);
4658                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4659                     gen_store_gpr(dc, rd, cpu_dst);
4660                     break;
4661                 case 0x008: /* VIS I edge32cc */
4662                     CHECK_FPU_FEATURE(dc, VIS1);
4663                     cpu_src1 = gen_load_gpr(dc, rs1);
4664                     cpu_src2 = gen_load_gpr(dc, rs2);
4665                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4666                     gen_store_gpr(dc, rd, cpu_dst);
4667                     break;
4668                 case 0x009: /* VIS II edge32n */
4669                     CHECK_FPU_FEATURE(dc, VIS2);
4670                     cpu_src1 = gen_load_gpr(dc, rs1);
4671                     cpu_src2 = gen_load_gpr(dc, rs2);
4672                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4673                     gen_store_gpr(dc, rd, cpu_dst);
4674                     break;
4675                 case 0x00a: /* VIS I edge32lcc */
4676                     CHECK_FPU_FEATURE(dc, VIS1);
4677                     cpu_src1 = gen_load_gpr(dc, rs1);
4678                     cpu_src2 = gen_load_gpr(dc, rs2);
4679                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4680                     gen_store_gpr(dc, rd, cpu_dst);
4681                     break;
4682                 case 0x00b: /* VIS II edge32ln */
4683                     CHECK_FPU_FEATURE(dc, VIS2);
4684                     cpu_src1 = gen_load_gpr(dc, rs1);
4685                     cpu_src2 = gen_load_gpr(dc, rs2);
4686                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4687                     gen_store_gpr(dc, rd, cpu_dst);
4688                     break;
4689                 case 0x010: /* VIS I array8 */
4690                     CHECK_FPU_FEATURE(dc, VIS1);
4691                     cpu_src1 = gen_load_gpr(dc, rs1);
4692                     cpu_src2 = gen_load_gpr(dc, rs2);
4693                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4694                     gen_store_gpr(dc, rd, cpu_dst);
4695                     break;
4696                 case 0x012: /* VIS I array16 */
4697                     CHECK_FPU_FEATURE(dc, VIS1);
4698                     cpu_src1 = gen_load_gpr(dc, rs1);
4699                     cpu_src2 = gen_load_gpr(dc, rs2);
4700                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4701                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4702                     gen_store_gpr(dc, rd, cpu_dst);
4703                     break;
4704                 case 0x014: /* VIS I array32 */
4705                     CHECK_FPU_FEATURE(dc, VIS1);
4706                     cpu_src1 = gen_load_gpr(dc, rs1);
4707                     cpu_src2 = gen_load_gpr(dc, rs2);
4708                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4709                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4710                     gen_store_gpr(dc, rd, cpu_dst);
4711                     break;
4712                 case 0x018: /* VIS I alignaddr */
4713                     CHECK_FPU_FEATURE(dc, VIS1);
4714                     cpu_src1 = gen_load_gpr(dc, rs1);
4715                     cpu_src2 = gen_load_gpr(dc, rs2);
4716                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4717                     gen_store_gpr(dc, rd, cpu_dst);
4718                     break;
4719                 case 0x01a: /* VIS I alignaddrl */
4720                     CHECK_FPU_FEATURE(dc, VIS1);
4721                     cpu_src1 = gen_load_gpr(dc, rs1);
4722                     cpu_src2 = gen_load_gpr(dc, rs2);
4723                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4724                     gen_store_gpr(dc, rd, cpu_dst);
4725                     break;
4726                 case 0x019: /* VIS II bmask */
4727                     CHECK_FPU_FEATURE(dc, VIS2);
4728                     cpu_src1 = gen_load_gpr(dc, rs1);
4729                     cpu_src2 = gen_load_gpr(dc, rs2);
4730                     tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4731                     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4732                     gen_store_gpr(dc, rd, cpu_dst);
4733                     break;
4734                 case 0x020: /* VIS I fcmple16 */
4735                     CHECK_FPU_FEATURE(dc, VIS1);
4736                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4737                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4738                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4739                     gen_store_gpr(dc, rd, cpu_dst);
4740                     break;
4741                 case 0x022: /* VIS I fcmpne16 */
4742                     CHECK_FPU_FEATURE(dc, VIS1);
4743                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4744                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4745                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4746                     gen_store_gpr(dc, rd, cpu_dst);
4747                     break;
4748                 case 0x024: /* VIS I fcmple32 */
4749                     CHECK_FPU_FEATURE(dc, VIS1);
4750                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4751                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4752                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4753                     gen_store_gpr(dc, rd, cpu_dst);
4754                     break;
4755                 case 0x026: /* VIS I fcmpne32 */
4756                     CHECK_FPU_FEATURE(dc, VIS1);
4757                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4758                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4759                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4760                     gen_store_gpr(dc, rd, cpu_dst);
4761                     break;
4762                 case 0x028: /* VIS I fcmpgt16 */
4763                     CHECK_FPU_FEATURE(dc, VIS1);
4764                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4765                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4766                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4767                     gen_store_gpr(dc, rd, cpu_dst);
4768                     break;
4769                 case 0x02a: /* VIS I fcmpeq16 */
4770                     CHECK_FPU_FEATURE(dc, VIS1);
4771                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4772                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4773                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4774                     gen_store_gpr(dc, rd, cpu_dst);
4775                     break;
4776                 case 0x02c: /* VIS I fcmpgt32 */
4777                     CHECK_FPU_FEATURE(dc, VIS1);
4778                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4779                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4780                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4781                     gen_store_gpr(dc, rd, cpu_dst);
4782                     break;
4783                 case 0x02e: /* VIS I fcmpeq32 */
4784                     CHECK_FPU_FEATURE(dc, VIS1);
4785                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4786                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4787                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4788                     gen_store_gpr(dc, rd, cpu_dst);
4789                     break;
4790                 case 0x031: /* VIS I fmul8x16 */
4791                     CHECK_FPU_FEATURE(dc, VIS1);
4792                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4793                     break;
4794                 case 0x033: /* VIS I fmul8x16au */
4795                     CHECK_FPU_FEATURE(dc, VIS1);
4796                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4797                     break;
4798                 case 0x035: /* VIS I fmul8x16al */
4799                     CHECK_FPU_FEATURE(dc, VIS1);
4800                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4801                     break;
4802                 case 0x036: /* VIS I fmul8sux16 */
4803                     CHECK_FPU_FEATURE(dc, VIS1);
4804                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4805                     break;
4806                 case 0x037: /* VIS I fmul8ulx16 */
4807                     CHECK_FPU_FEATURE(dc, VIS1);
4808                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4809                     break;
4810                 case 0x038: /* VIS I fmuld8sux16 */
4811                     CHECK_FPU_FEATURE(dc, VIS1);
4812                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4813                     break;
4814                 case 0x039: /* VIS I fmuld8ulx16 */
4815                     CHECK_FPU_FEATURE(dc, VIS1);
4816                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4817                     break;
4818                 case 0x03a: /* VIS I fpack32 */
4819                     CHECK_FPU_FEATURE(dc, VIS1);
4820                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4821                     break;
4822                 case 0x03b: /* VIS I fpack16 */
4823                     CHECK_FPU_FEATURE(dc, VIS1);
4824                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4825                     cpu_dst_32 = gen_dest_fpr_F(dc);
4826                     gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4827                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4828                     break;
4829                 case 0x03d: /* VIS I fpackfix */
4830                     CHECK_FPU_FEATURE(dc, VIS1);
4831                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4832                     cpu_dst_32 = gen_dest_fpr_F(dc);
4833                     gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4834                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4835                     break;
4836                 case 0x03e: /* VIS I pdist */
4837                     CHECK_FPU_FEATURE(dc, VIS1);
4838                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4839                     break;
4840                 case 0x048: /* VIS I faligndata */
4841                     CHECK_FPU_FEATURE(dc, VIS1);
4842                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4843                     break;
4844                 case 0x04b: /* VIS I fpmerge */
4845                     CHECK_FPU_FEATURE(dc, VIS1);
4846                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4847                     break;
4848                 case 0x04c: /* VIS II bshuffle */
4849                     CHECK_FPU_FEATURE(dc, VIS2);
4850                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4851                     break;
4852                 case 0x04d: /* VIS I fexpand */
4853                     CHECK_FPU_FEATURE(dc, VIS1);
4854                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4855                     break;
4856                 case 0x050: /* VIS I fpadd16 */
4857                     CHECK_FPU_FEATURE(dc, VIS1);
4858                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4859                     break;
4860                 case 0x051: /* VIS I fpadd16s */
4861                     CHECK_FPU_FEATURE(dc, VIS1);
4862                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4863                     break;
4864                 case 0x052: /* VIS I fpadd32 */
4865                     CHECK_FPU_FEATURE(dc, VIS1);
4866                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4867                     break;
4868                 case 0x053: /* VIS I fpadd32s */
4869                     CHECK_FPU_FEATURE(dc, VIS1);
4870                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4871                     break;
4872                 case 0x054: /* VIS I fpsub16 */
4873                     CHECK_FPU_FEATURE(dc, VIS1);
4874                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4875                     break;
4876                 case 0x055: /* VIS I fpsub16s */
4877                     CHECK_FPU_FEATURE(dc, VIS1);
4878                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4879                     break;
4880                 case 0x056: /* VIS I fpsub32 */
4881                     CHECK_FPU_FEATURE(dc, VIS1);
4882                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4883                     break;
4884                 case 0x057: /* VIS I fpsub32s */
4885                     CHECK_FPU_FEATURE(dc, VIS1);
4886                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4887                     break;
4888                 case 0x060: /* VIS I fzero */
4889                     CHECK_FPU_FEATURE(dc, VIS1);
4890                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4891                     tcg_gen_movi_i64(cpu_dst_64, 0);
4892                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4893                     break;
4894                 case 0x061: /* VIS I fzeros */
4895                     CHECK_FPU_FEATURE(dc, VIS1);
4896                     cpu_dst_32 = gen_dest_fpr_F(dc);
4897                     tcg_gen_movi_i32(cpu_dst_32, 0);
4898                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4899                     break;
4900                 case 0x062: /* VIS I fnor */
4901                     CHECK_FPU_FEATURE(dc, VIS1);
4902                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4903                     break;
4904                 case 0x063: /* VIS I fnors */
4905                     CHECK_FPU_FEATURE(dc, VIS1);
4906                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4907                     break;
4908                 case 0x064: /* VIS I fandnot2 */
4909                     CHECK_FPU_FEATURE(dc, VIS1);
4910                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4911                     break;
4912                 case 0x065: /* VIS I fandnot2s */
4913                     CHECK_FPU_FEATURE(dc, VIS1);
4914                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4915                     break;
4916                 case 0x066: /* VIS I fnot2 */
4917                     CHECK_FPU_FEATURE(dc, VIS1);
4918                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4919                     break;
4920                 case 0x067: /* VIS I fnot2s */
4921                     CHECK_FPU_FEATURE(dc, VIS1);
4922                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4923                     break;
4924                 case 0x068: /* VIS I fandnot1 */
4925                     CHECK_FPU_FEATURE(dc, VIS1);
4926                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4927                     break;
4928                 case 0x069: /* VIS I fandnot1s */
4929                     CHECK_FPU_FEATURE(dc, VIS1);
4930                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4931                     break;
4932                 case 0x06a: /* VIS I fnot1 */
4933                     CHECK_FPU_FEATURE(dc, VIS1);
4934                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4935                     break;
4936                 case 0x06b: /* VIS I fnot1s */
4937                     CHECK_FPU_FEATURE(dc, VIS1);
4938                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4939                     break;
4940                 case 0x06c: /* VIS I fxor */
4941                     CHECK_FPU_FEATURE(dc, VIS1);
4942                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4943                     break;
4944                 case 0x06d: /* VIS I fxors */
4945                     CHECK_FPU_FEATURE(dc, VIS1);
4946                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4947                     break;
4948                 case 0x06e: /* VIS I fnand */
4949                     CHECK_FPU_FEATURE(dc, VIS1);
4950                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4951                     break;
4952                 case 0x06f: /* VIS I fnands */
4953                     CHECK_FPU_FEATURE(dc, VIS1);
4954                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4955                     break;
4956                 case 0x070: /* VIS I fand */
4957                     CHECK_FPU_FEATURE(dc, VIS1);
4958                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4959                     break;
4960                 case 0x071: /* VIS I fands */
4961                     CHECK_FPU_FEATURE(dc, VIS1);
4962                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4963                     break;
4964                 case 0x072: /* VIS I fxnor */
4965                     CHECK_FPU_FEATURE(dc, VIS1);
4966                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4967                     break;
4968                 case 0x073: /* VIS I fxnors */
4969                     CHECK_FPU_FEATURE(dc, VIS1);
4970                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4971                     break;
4972                 case 0x074: /* VIS I fsrc1 */
4973                     CHECK_FPU_FEATURE(dc, VIS1);
4974                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4975                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4976                     break;
4977                 case 0x075: /* VIS I fsrc1s */
4978                     CHECK_FPU_FEATURE(dc, VIS1);
4979                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4980                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4981                     break;
4982                 case 0x076: /* VIS I fornot2 */
4983                     CHECK_FPU_FEATURE(dc, VIS1);
4984                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4985                     break;
4986                 case 0x077: /* VIS I fornot2s */
4987                     CHECK_FPU_FEATURE(dc, VIS1);
4988                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4989                     break;
4990                 case 0x078: /* VIS I fsrc2 */
4991                     CHECK_FPU_FEATURE(dc, VIS1);
4992                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4993                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4994                     break;
4995                 case 0x079: /* VIS I fsrc2s */
4996                     CHECK_FPU_FEATURE(dc, VIS1);
4997                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4998                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4999                     break;
5000                 case 0x07a: /* VIS I fornot1 */
5001                     CHECK_FPU_FEATURE(dc, VIS1);
5002                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
5003                     break;
5004                 case 0x07b: /* VIS I fornot1s */
5005                     CHECK_FPU_FEATURE(dc, VIS1);
5006                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
5007                     break;
5008                 case 0x07c: /* VIS I for */
5009                     CHECK_FPU_FEATURE(dc, VIS1);
5010                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
5011                     break;
5012                 case 0x07d: /* VIS I fors */
5013                     CHECK_FPU_FEATURE(dc, VIS1);
5014                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
5015                     break;
5016                 case 0x07e: /* VIS I fone */
5017                     CHECK_FPU_FEATURE(dc, VIS1);
5018                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5019                     tcg_gen_movi_i64(cpu_dst_64, -1);
5020                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5021                     break;
5022                 case 0x07f: /* VIS I fones */
5023                     CHECK_FPU_FEATURE(dc, VIS1);
5024                     cpu_dst_32 = gen_dest_fpr_F(dc);
5025                     tcg_gen_movi_i32(cpu_dst_32, -1);
5026                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5027                     break;
5028                 case 0x080: /* VIS I shutdown */
5029                 case 0x081: /* VIS II siam */
5030                     // XXX
5031                     goto illegal_insn;
5032                 default:
5033                     goto illegal_insn;
5034                 }
5035 #else
5036                 goto ncp_insn;
5037 #endif
5038             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
5039 #ifdef TARGET_SPARC64
5040                 goto illegal_insn;
5041 #else
5042                 goto ncp_insn;
5043 #endif
5044 #ifdef TARGET_SPARC64
5045             } else if (xop == 0x39) { /* V9 return */
5046                 save_state(dc);
5047                 cpu_src1 = get_src1(dc, insn);
5048                 cpu_tmp0 = tcg_temp_new();
5049                 if (IS_IMM) {   /* immediate */
5050                     simm = GET_FIELDs(insn, 19, 31);
5051                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5052                 } else {                /* register */
5053                     rs2 = GET_FIELD(insn, 27, 31);
5054                     if (rs2) {
5055                         cpu_src2 = gen_load_gpr(dc, rs2);
5056                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5057                     } else {
5058                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5059                     }
5060                 }
5061                 gen_check_align(dc, cpu_tmp0, 3);
5062                 gen_helper_restore(tcg_env);
5063                 gen_mov_pc_npc(dc);
5064                 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5065                 dc->npc = DYNAMIC_PC_LOOKUP;
5066                 goto jmp_insn;
5067 #endif
5068             } else {
5069                 cpu_src1 = get_src1(dc, insn);
5070                 cpu_tmp0 = tcg_temp_new();
5071                 if (IS_IMM) {   /* immediate */
5072                     simm = GET_FIELDs(insn, 19, 31);
5073                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5074                 } else {                /* register */
5075                     rs2 = GET_FIELD(insn, 27, 31);
5076                     if (rs2) {
5077                         cpu_src2 = gen_load_gpr(dc, rs2);
5078                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5079                     } else {
5080                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5081                     }
5082                 }
5083                 switch (xop) {
5084                 case 0x38:      /* jmpl */
5085                     {
5086                         gen_check_align(dc, cpu_tmp0, 3);
5087                         gen_store_gpr(dc, rd, tcg_constant_tl(dc->pc));
5088                         gen_mov_pc_npc(dc);
5089                         gen_address_mask(dc, cpu_tmp0);
5090                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5091                         dc->npc = DYNAMIC_PC_LOOKUP;
5092                     }
5093                     goto jmp_insn;
5094 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5095                 case 0x39:      /* rett, V9 return */
5096                     {
5097                         if (!supervisor(dc))
5098                             goto priv_insn;
5099                         gen_check_align(dc, cpu_tmp0, 3);
5100                         gen_mov_pc_npc(dc);
5101                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5102                         dc->npc = DYNAMIC_PC;
5103                         gen_helper_rett(tcg_env);
5104                     }
5105                     goto jmp_insn;
5106 #endif
5107                 case 0x3b: /* flush */
5108                     if (!((dc)->def->features & CPU_FEATURE_FLUSH))
5109                         goto unimp_flush;
5110                     /* nop */
5111                     break;
5112                 case 0x3c:      /* save */
5113                     gen_helper_save(tcg_env);
5114                     gen_store_gpr(dc, rd, cpu_tmp0);
5115                     break;
5116                 case 0x3d:      /* restore */
5117                     gen_helper_restore(tcg_env);
5118                     gen_store_gpr(dc, rd, cpu_tmp0);
5119                     break;
5120 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
5121                 case 0x3e:      /* V9 done/retry */
5122                     {
5123                         switch (rd) {
5124                         case 0:
5125                             if (!supervisor(dc))
5126                                 goto priv_insn;
5127                             dc->npc = DYNAMIC_PC;
5128                             dc->pc = DYNAMIC_PC;
5129                             translator_io_start(&dc->base);
5130                             gen_helper_done(tcg_env);
5131                             goto jmp_insn;
5132                         case 1:
5133                             if (!supervisor(dc))
5134                                 goto priv_insn;
5135                             dc->npc = DYNAMIC_PC;
5136                             dc->pc = DYNAMIC_PC;
5137                             translator_io_start(&dc->base);
5138                             gen_helper_retry(tcg_env);
5139                             goto jmp_insn;
5140                         default:
5141                             goto illegal_insn;
5142                         }
5143                     }
5144                     break;
5145 #endif
5146                 default:
5147                     goto illegal_insn;
5148                 }
5149             }
5150             break;
5151         }
5152         break;
5153     case 3:                     /* load/store instructions */
5154         {
5155             unsigned int xop = GET_FIELD(insn, 7, 12);
5156             /* ??? gen_address_mask prevents us from using a source
5157                register directly.  Always generate a temporary.  */
5158             TCGv cpu_addr = tcg_temp_new();
5159 
5160             tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
5161             if (xop == 0x3c || xop == 0x3e) {
5162                 /* V9 casa/casxa : no offset */
5163             } else if (IS_IMM) {     /* immediate */
5164                 simm = GET_FIELDs(insn, 19, 31);
5165                 if (simm != 0) {
5166                     tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
5167                 }
5168             } else {            /* register */
5169                 rs2 = GET_FIELD(insn, 27, 31);
5170                 if (rs2 != 0) {
5171                     tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
5172                 }
5173             }
5174             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
5175                 (xop > 0x17 && xop <= 0x1d ) ||
5176                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
5177                 TCGv cpu_val = gen_dest_gpr(dc, rd);
5178 
5179                 switch (xop) {
5180                 case 0x0:       /* ld, V9 lduw, load unsigned word */
5181                     gen_address_mask(dc, cpu_addr);
5182                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5183                                        dc->mem_idx, MO_TEUL | MO_ALIGN);
5184                     break;
5185                 case 0x1:       /* ldub, load unsigned byte */
5186                     gen_address_mask(dc, cpu_addr);
5187                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5188                                        dc->mem_idx, MO_UB);
5189                     break;
5190                 case 0x2:       /* lduh, load unsigned halfword */
5191                     gen_address_mask(dc, cpu_addr);
5192                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5193                                        dc->mem_idx, MO_TEUW | MO_ALIGN);
5194                     break;
5195                 case 0x3:       /* ldd, load double word */
5196                     if (rd & 1)
5197                         goto illegal_insn;
5198                     else {
5199                         TCGv_i64 t64;
5200 
5201                         gen_address_mask(dc, cpu_addr);
5202                         t64 = tcg_temp_new_i64();
5203                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5204                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5205                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5206                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5207                         gen_store_gpr(dc, rd + 1, cpu_val);
5208                         tcg_gen_shri_i64(t64, t64, 32);
5209                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5210                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5211                     }
5212                     break;
5213                 case 0x9:       /* ldsb, load signed byte */
5214                     gen_address_mask(dc, cpu_addr);
5215                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr, dc->mem_idx, MO_SB);
5216                     break;
5217                 case 0xa:       /* ldsh, load signed halfword */
5218                     gen_address_mask(dc, cpu_addr);
5219                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5220                                        dc->mem_idx, MO_TESW | MO_ALIGN);
5221                     break;
5222                 case 0xd:       /* ldstub */
5223                     gen_ldstub(dc, cpu_val, cpu_addr, dc->mem_idx);
5224                     break;
5225                 case 0x0f:
5226                     /* swap, swap register with memory. Also atomically */
5227                     CHECK_IU_FEATURE(dc, SWAP);
5228                     cpu_src1 = gen_load_gpr(dc, rd);
5229                     gen_swap(dc, cpu_val, cpu_src1, cpu_addr,
5230                              dc->mem_idx, MO_TEUL);
5231                     break;
5232 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5233                 case 0x10:      /* lda, V9 lduwa, load word alternate */
5234                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5235                     break;
5236                 case 0x11:      /* lduba, load unsigned byte alternate */
5237                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5238                     break;
5239                 case 0x12:      /* lduha, load unsigned halfword alternate */
5240                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5241                     break;
5242                 case 0x13:      /* ldda, load double word alternate */
5243                     if (rd & 1) {
5244                         goto illegal_insn;
5245                     }
5246                     gen_ldda_asi(dc, cpu_addr, insn, rd);
5247                     goto skip_move;
5248                 case 0x19:      /* ldsba, load signed byte alternate */
5249                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_SB);
5250                     break;
5251                 case 0x1a:      /* ldsha, load signed halfword alternate */
5252                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESW);
5253                     break;
5254                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
5255                     gen_ldstub_asi(dc, cpu_val, cpu_addr, insn);
5256                     break;
5257                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
5258                                    atomically */
5259                     CHECK_IU_FEATURE(dc, SWAP);
5260                     cpu_src1 = gen_load_gpr(dc, rd);
5261                     gen_swap_asi(dc, cpu_val, cpu_src1, cpu_addr, insn);
5262                     break;
5263 
5264 #ifndef TARGET_SPARC64
5265                 case 0x30: /* ldc */
5266                 case 0x31: /* ldcsr */
5267                 case 0x33: /* lddc */
5268                     goto ncp_insn;
5269 #endif
5270 #endif
5271 #ifdef TARGET_SPARC64
5272                 case 0x08: /* V9 ldsw */
5273                     gen_address_mask(dc, cpu_addr);
5274                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5275                                        dc->mem_idx, MO_TESL | MO_ALIGN);
5276                     break;
5277                 case 0x0b: /* V9 ldx */
5278                     gen_address_mask(dc, cpu_addr);
5279                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5280                                        dc->mem_idx, MO_TEUQ | MO_ALIGN);
5281                     break;
5282                 case 0x18: /* V9 ldswa */
5283                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESL);
5284                     break;
5285                 case 0x1b: /* V9 ldxa */
5286                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5287                     break;
5288                 case 0x2d: /* V9 prefetch, no effect */
5289                     goto skip_move;
5290                 case 0x30: /* V9 ldfa */
5291                     if (gen_trap_ifnofpu(dc)) {
5292                         goto jmp_insn;
5293                     }
5294                     gen_ldf_asi(dc, cpu_addr, insn, 4, rd);
5295                     gen_update_fprs_dirty(dc, rd);
5296                     goto skip_move;
5297                 case 0x33: /* V9 lddfa */
5298                     if (gen_trap_ifnofpu(dc)) {
5299                         goto jmp_insn;
5300                     }
5301                     gen_ldf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5302                     gen_update_fprs_dirty(dc, DFPREG(rd));
5303                     goto skip_move;
5304                 case 0x3d: /* V9 prefetcha, no effect */
5305                     goto skip_move;
5306                 case 0x32: /* V9 ldqfa */
5307                     CHECK_FPU_FEATURE(dc, FLOAT128);
5308                     if (gen_trap_ifnofpu(dc)) {
5309                         goto jmp_insn;
5310                     }
5311                     gen_ldf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5312                     gen_update_fprs_dirty(dc, QFPREG(rd));
5313                     goto skip_move;
5314 #endif
5315                 default:
5316                     goto illegal_insn;
5317                 }
5318                 gen_store_gpr(dc, rd, cpu_val);
5319 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5320             skip_move: ;
5321 #endif
5322             } else if (xop >= 0x20 && xop < 0x24) {
5323                 if (gen_trap_ifnofpu(dc)) {
5324                     goto jmp_insn;
5325                 }
5326                 switch (xop) {
5327                 case 0x20:      /* ldf, load fpreg */
5328                     gen_address_mask(dc, cpu_addr);
5329                     cpu_dst_32 = gen_dest_fpr_F(dc);
5330                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5331                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5332                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5333                     break;
5334                 case 0x21:      /* ldfsr, V9 ldxfsr */
5335 #ifdef TARGET_SPARC64
5336                     gen_address_mask(dc, cpu_addr);
5337                     if (rd == 1) {
5338                         TCGv_i64 t64 = tcg_temp_new_i64();
5339                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5340                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5341                         gen_helper_ldxfsr(cpu_fsr, tcg_env, cpu_fsr, t64);
5342                         break;
5343                     }
5344 #endif
5345                     cpu_dst_32 = tcg_temp_new_i32();
5346                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5347                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5348                     gen_helper_ldfsr(cpu_fsr, tcg_env, cpu_fsr, cpu_dst_32);
5349                     break;
5350                 case 0x22:      /* ldqf, load quad fpreg */
5351                     CHECK_FPU_FEATURE(dc, FLOAT128);
5352                     gen_address_mask(dc, cpu_addr);
5353                     cpu_src1_64 = tcg_temp_new_i64();
5354                     tcg_gen_qemu_ld_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5355                                         MO_TEUQ | MO_ALIGN_4);
5356                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5357                     cpu_src2_64 = tcg_temp_new_i64();
5358                     tcg_gen_qemu_ld_i64(cpu_src2_64, cpu_addr, dc->mem_idx,
5359                                         MO_TEUQ | MO_ALIGN_4);
5360                     gen_store_fpr_Q(dc, rd, cpu_src1_64, cpu_src2_64);
5361                     break;
5362                 case 0x23:      /* lddf, load double fpreg */
5363                     gen_address_mask(dc, cpu_addr);
5364                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5365                     tcg_gen_qemu_ld_i64(cpu_dst_64, cpu_addr, dc->mem_idx,
5366                                         MO_TEUQ | MO_ALIGN_4);
5367                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5368                     break;
5369                 default:
5370                     goto illegal_insn;
5371                 }
5372             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
5373                        xop == 0xe || xop == 0x1e) {
5374                 TCGv cpu_val = gen_load_gpr(dc, rd);
5375 
5376                 switch (xop) {
5377                 case 0x4: /* st, store word */
5378                     gen_address_mask(dc, cpu_addr);
5379                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5380                                        dc->mem_idx, MO_TEUL | MO_ALIGN);
5381                     break;
5382                 case 0x5: /* stb, store byte */
5383                     gen_address_mask(dc, cpu_addr);
5384                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr, dc->mem_idx, MO_UB);
5385                     break;
5386                 case 0x6: /* sth, store halfword */
5387                     gen_address_mask(dc, cpu_addr);
5388                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5389                                        dc->mem_idx, MO_TEUW | MO_ALIGN);
5390                     break;
5391                 case 0x7: /* std, store double word */
5392                     if (rd & 1)
5393                         goto illegal_insn;
5394                     else {
5395                         TCGv_i64 t64;
5396                         TCGv lo;
5397 
5398                         gen_address_mask(dc, cpu_addr);
5399                         lo = gen_load_gpr(dc, rd + 1);
5400                         t64 = tcg_temp_new_i64();
5401                         tcg_gen_concat_tl_i64(t64, lo, cpu_val);
5402                         tcg_gen_qemu_st_i64(t64, cpu_addr,
5403                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5404                     }
5405                     break;
5406 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5407                 case 0x14: /* sta, V9 stwa, store word alternate */
5408                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5409                     break;
5410                 case 0x15: /* stba, store byte alternate */
5411                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5412                     break;
5413                 case 0x16: /* stha, store halfword alternate */
5414                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5415                     break;
5416                 case 0x17: /* stda, store double word alternate */
5417                     if (rd & 1) {
5418                         goto illegal_insn;
5419                     }
5420                     gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
5421                     break;
5422 #endif
5423 #ifdef TARGET_SPARC64
5424                 case 0x0e: /* V9 stx */
5425                     gen_address_mask(dc, cpu_addr);
5426                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5427                                        dc->mem_idx, MO_TEUQ | MO_ALIGN);
5428                     break;
5429                 case 0x1e: /* V9 stxa */
5430                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5431                     break;
5432 #endif
5433                 default:
5434                     goto illegal_insn;
5435                 }
5436             } else if (xop > 0x23 && xop < 0x28) {
5437                 if (gen_trap_ifnofpu(dc)) {
5438                     goto jmp_insn;
5439                 }
5440                 switch (xop) {
5441                 case 0x24: /* stf, store fpreg */
5442                     gen_address_mask(dc, cpu_addr);
5443                     cpu_src1_32 = gen_load_fpr_F(dc, rd);
5444                     tcg_gen_qemu_st_i32(cpu_src1_32, cpu_addr,
5445                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5446                     break;
5447                 case 0x25: /* stfsr, V9 stxfsr */
5448                     {
5449 #ifdef TARGET_SPARC64
5450                         gen_address_mask(dc, cpu_addr);
5451                         if (rd == 1) {
5452                             tcg_gen_qemu_st_tl(cpu_fsr, cpu_addr,
5453                                                dc->mem_idx, MO_TEUQ | MO_ALIGN);
5454                             break;
5455                         }
5456 #endif
5457                         tcg_gen_qemu_st_tl(cpu_fsr, cpu_addr,
5458                                            dc->mem_idx, MO_TEUL | MO_ALIGN);
5459                     }
5460                     break;
5461                 case 0x26:
5462 #ifdef TARGET_SPARC64
5463                     /* V9 stqf, store quad fpreg */
5464                     CHECK_FPU_FEATURE(dc, FLOAT128);
5465                     gen_address_mask(dc, cpu_addr);
5466                     /* ??? While stqf only requires 4-byte alignment, it is
5467                        legal for the cpu to signal the unaligned exception.
5468                        The OS trap handler is then required to fix it up.
5469                        For qemu, this avoids having to probe the second page
5470                        before performing the first write.  */
5471                     cpu_src1_64 = gen_load_fpr_Q0(dc, rd);
5472                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5473                                         dc->mem_idx, MO_TEUQ | MO_ALIGN_16);
5474                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5475                     cpu_src2_64 = gen_load_fpr_Q1(dc, rd);
5476                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5477                                         dc->mem_idx, MO_TEUQ);
5478                     break;
5479 #else /* !TARGET_SPARC64 */
5480                     /* stdfq, store floating point queue */
5481 #if defined(CONFIG_USER_ONLY)
5482                     goto illegal_insn;
5483 #else
5484                     if (!supervisor(dc))
5485                         goto priv_insn;
5486                     if (gen_trap_ifnofpu(dc)) {
5487                         goto jmp_insn;
5488                     }
5489                     goto nfq_insn;
5490 #endif
5491 #endif
5492                 case 0x27: /* stdf, store double fpreg */
5493                     gen_address_mask(dc, cpu_addr);
5494                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
5495                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5496                                         MO_TEUQ | MO_ALIGN_4);
5497                     break;
5498                 default:
5499                     goto illegal_insn;
5500                 }
5501             } else if (xop > 0x33 && xop < 0x3f) {
5502                 switch (xop) {
5503 #ifdef TARGET_SPARC64
5504                 case 0x34: /* V9 stfa */
5505                     if (gen_trap_ifnofpu(dc)) {
5506                         goto jmp_insn;
5507                     }
5508                     gen_stf_asi(dc, cpu_addr, insn, 4, rd);
5509                     break;
5510                 case 0x36: /* V9 stqfa */
5511                     {
5512                         CHECK_FPU_FEATURE(dc, FLOAT128);
5513                         if (gen_trap_ifnofpu(dc)) {
5514                             goto jmp_insn;
5515                         }
5516                         gen_stf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5517                     }
5518                     break;
5519                 case 0x37: /* V9 stdfa */
5520                     if (gen_trap_ifnofpu(dc)) {
5521                         goto jmp_insn;
5522                     }
5523                     gen_stf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5524                     break;
5525                 case 0x3e: /* V9 casxa */
5526                     rs2 = GET_FIELD(insn, 27, 31);
5527                     cpu_src2 = gen_load_gpr(dc, rs2);
5528                     gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5529                     break;
5530 #else
5531                 case 0x34: /* stc */
5532                 case 0x35: /* stcsr */
5533                 case 0x36: /* stdcq */
5534                 case 0x37: /* stdc */
5535                     goto ncp_insn;
5536 #endif
5537 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5538                 case 0x3c: /* V9 or LEON3 casa */
5539 #ifndef TARGET_SPARC64
5540                     CHECK_IU_FEATURE(dc, CASA);
5541 #endif
5542                     rs2 = GET_FIELD(insn, 27, 31);
5543                     cpu_src2 = gen_load_gpr(dc, rs2);
5544                     gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5545                     break;
5546 #endif
5547                 default:
5548                     goto illegal_insn;
5549                 }
5550             } else {
5551                 goto illegal_insn;
5552             }
5553         }
5554         break;
5555     }
5556     /* default case for non jump instructions */
5557     if (dc->npc & 3) {
5558         switch (dc->npc) {
5559         case DYNAMIC_PC:
5560         case DYNAMIC_PC_LOOKUP:
5561             dc->pc = dc->npc;
5562             gen_op_next_insn();
5563             break;
5564         case JUMP_PC:
5565             /* we can do a static jump */
5566             gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5567             dc->base.is_jmp = DISAS_NORETURN;
5568             break;
5569         default:
5570             g_assert_not_reached();
5571         }
5572     } else {
5573         dc->pc = dc->npc;
5574         dc->npc = dc->npc + 4;
5575     }
5576  jmp_insn:
5577     return;
5578  illegal_insn:
5579     gen_exception(dc, TT_ILL_INSN);
5580     return;
5581  unimp_flush:
5582     gen_exception(dc, TT_UNIMP_FLUSH);
5583     return;
5584 #if !defined(CONFIG_USER_ONLY)
5585  priv_insn:
5586     gen_exception(dc, TT_PRIV_INSN);
5587     return;
5588 #endif
5589  nfpu_insn:
5590     gen_op_fpexception_im(dc, FSR_FTT_UNIMPFPOP);
5591     return;
5592 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5593  nfq_insn:
5594     gen_op_fpexception_im(dc, FSR_FTT_SEQ_ERROR);
5595     return;
5596 #endif
5597 #ifndef TARGET_SPARC64
5598  ncp_insn:
5599     gen_exception(dc, TT_NCP_INSN);
5600     return;
5601 #endif
5602 }
5603 
5604 static void sparc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
5605 {
5606     DisasContext *dc = container_of(dcbase, DisasContext, base);
5607     CPUSPARCState *env = cpu_env(cs);
5608     int bound;
5609 
5610     dc->pc = dc->base.pc_first;
5611     dc->npc = (target_ulong)dc->base.tb->cs_base;
5612     dc->cc_op = CC_OP_DYNAMIC;
5613     dc->mem_idx = dc->base.tb->flags & TB_FLAG_MMU_MASK;
5614     dc->def = &env->def;
5615     dc->fpu_enabled = tb_fpu_enabled(dc->base.tb->flags);
5616     dc->address_mask_32bit = tb_am_enabled(dc->base.tb->flags);
5617 #ifndef CONFIG_USER_ONLY
5618     dc->supervisor = (dc->base.tb->flags & TB_FLAG_SUPER) != 0;
5619 #endif
5620 #ifdef TARGET_SPARC64
5621     dc->fprs_dirty = 0;
5622     dc->asi = (dc->base.tb->flags >> TB_FLAG_ASI_SHIFT) & 0xff;
5623 #ifndef CONFIG_USER_ONLY
5624     dc->hypervisor = (dc->base.tb->flags & TB_FLAG_HYPER) != 0;
5625 #endif
5626 #endif
5627     /*
5628      * if we reach a page boundary, we stop generation so that the
5629      * PC of a TT_TFAULT exception is always in the right page
5630      */
5631     bound = -(dc->base.pc_first | TARGET_PAGE_MASK) / 4;
5632     dc->base.max_insns = MIN(dc->base.max_insns, bound);
5633 }
5634 
5635 static void sparc_tr_tb_start(DisasContextBase *db, CPUState *cs)
5636 {
5637 }
5638 
5639 static void sparc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
5640 {
5641     DisasContext *dc = container_of(dcbase, DisasContext, base);
5642     target_ulong npc = dc->npc;
5643 
5644     if (npc & 3) {
5645         switch (npc) {
5646         case JUMP_PC:
5647             assert(dc->jump_pc[1] == dc->pc + 4);
5648             npc = dc->jump_pc[0] | JUMP_PC;
5649             break;
5650         case DYNAMIC_PC:
5651         case DYNAMIC_PC_LOOKUP:
5652             npc = DYNAMIC_PC;
5653             break;
5654         default:
5655             g_assert_not_reached();
5656         }
5657     }
5658     tcg_gen_insn_start(dc->pc, npc);
5659 }
5660 
5661 static void sparc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
5662 {
5663     DisasContext *dc = container_of(dcbase, DisasContext, base);
5664     CPUSPARCState *env = cpu_env(cs);
5665     unsigned int insn;
5666 
5667     insn = translator_ldl(env, &dc->base, dc->pc);
5668     dc->base.pc_next += 4;
5669     disas_sparc_insn(dc, insn);
5670 
5671     if (dc->base.is_jmp == DISAS_NORETURN) {
5672         return;
5673     }
5674     if (dc->pc != dc->base.pc_next) {
5675         dc->base.is_jmp = DISAS_TOO_MANY;
5676     }
5677 }
5678 
5679 static void sparc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
5680 {
5681     DisasContext *dc = container_of(dcbase, DisasContext, base);
5682     DisasDelayException *e, *e_next;
5683     bool may_lookup;
5684 
5685     switch (dc->base.is_jmp) {
5686     case DISAS_NEXT:
5687     case DISAS_TOO_MANY:
5688         if (((dc->pc | dc->npc) & 3) == 0) {
5689             /* static PC and NPC: we can use direct chaining */
5690             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5691             break;
5692         }
5693 
5694         may_lookup = true;
5695         if (dc->pc & 3) {
5696             switch (dc->pc) {
5697             case DYNAMIC_PC_LOOKUP:
5698                 break;
5699             case DYNAMIC_PC:
5700                 may_lookup = false;
5701                 break;
5702             default:
5703                 g_assert_not_reached();
5704             }
5705         } else {
5706             tcg_gen_movi_tl(cpu_pc, dc->pc);
5707         }
5708 
5709         if (dc->npc & 3) {
5710             switch (dc->npc) {
5711             case JUMP_PC:
5712                 gen_generic_branch(dc);
5713                 break;
5714             case DYNAMIC_PC:
5715                 may_lookup = false;
5716                 break;
5717             case DYNAMIC_PC_LOOKUP:
5718                 break;
5719             default:
5720                 g_assert_not_reached();
5721             }
5722         } else {
5723             tcg_gen_movi_tl(cpu_npc, dc->npc);
5724         }
5725         if (may_lookup) {
5726             tcg_gen_lookup_and_goto_ptr();
5727         } else {
5728             tcg_gen_exit_tb(NULL, 0);
5729         }
5730         break;
5731 
5732     case DISAS_NORETURN:
5733        break;
5734 
5735     case DISAS_EXIT:
5736         /* Exit TB */
5737         save_state(dc);
5738         tcg_gen_exit_tb(NULL, 0);
5739         break;
5740 
5741     default:
5742         g_assert_not_reached();
5743     }
5744 
5745     for (e = dc->delay_excp_list; e ; e = e_next) {
5746         gen_set_label(e->lab);
5747 
5748         tcg_gen_movi_tl(cpu_pc, e->pc);
5749         if (e->npc % 4 == 0) {
5750             tcg_gen_movi_tl(cpu_npc, e->npc);
5751         }
5752         gen_helper_raise_exception(tcg_env, e->excp);
5753 
5754         e_next = e->next;
5755         g_free(e);
5756     }
5757 }
5758 
5759 static void sparc_tr_disas_log(const DisasContextBase *dcbase,
5760                                CPUState *cpu, FILE *logfile)
5761 {
5762     fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
5763     target_disas(logfile, cpu, dcbase->pc_first, dcbase->tb->size);
5764 }
5765 
5766 static const TranslatorOps sparc_tr_ops = {
5767     .init_disas_context = sparc_tr_init_disas_context,
5768     .tb_start           = sparc_tr_tb_start,
5769     .insn_start         = sparc_tr_insn_start,
5770     .translate_insn     = sparc_tr_translate_insn,
5771     .tb_stop            = sparc_tr_tb_stop,
5772     .disas_log          = sparc_tr_disas_log,
5773 };
5774 
5775 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int *max_insns,
5776                            target_ulong pc, void *host_pc)
5777 {
5778     DisasContext dc = {};
5779 
5780     translator_loop(cs, tb, max_insns, pc, host_pc, &sparc_tr_ops, &dc.base);
5781 }
5782 
5783 void sparc_tcg_init(void)
5784 {
5785     static const char gregnames[32][4] = {
5786         "g0", "g1", "g2", "g3", "g4", "g5", "g6", "g7",
5787         "o0", "o1", "o2", "o3", "o4", "o5", "o6", "o7",
5788         "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
5789         "i0", "i1", "i2", "i3", "i4", "i5", "i6", "i7",
5790     };
5791     static const char fregnames[32][4] = {
5792         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5793         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5794         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5795         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5796     };
5797 
5798     static const struct { TCGv_i32 *ptr; int off; const char *name; } r32[] = {
5799 #ifdef TARGET_SPARC64
5800         { &cpu_xcc, offsetof(CPUSPARCState, xcc), "xcc" },
5801         { &cpu_fprs, offsetof(CPUSPARCState, fprs), "fprs" },
5802 #else
5803         { &cpu_wim, offsetof(CPUSPARCState, wim), "wim" },
5804 #endif
5805         { &cpu_cc_op, offsetof(CPUSPARCState, cc_op), "cc_op" },
5806         { &cpu_psr, offsetof(CPUSPARCState, psr), "psr" },
5807     };
5808 
5809     static const struct { TCGv *ptr; int off; const char *name; } rtl[] = {
5810 #ifdef TARGET_SPARC64
5811         { &cpu_gsr, offsetof(CPUSPARCState, gsr), "gsr" },
5812         { &cpu_tick_cmpr, offsetof(CPUSPARCState, tick_cmpr), "tick_cmpr" },
5813         { &cpu_stick_cmpr, offsetof(CPUSPARCState, stick_cmpr), "stick_cmpr" },
5814         { &cpu_hstick_cmpr, offsetof(CPUSPARCState, hstick_cmpr),
5815           "hstick_cmpr" },
5816         { &cpu_hintp, offsetof(CPUSPARCState, hintp), "hintp" },
5817         { &cpu_htba, offsetof(CPUSPARCState, htba), "htba" },
5818         { &cpu_hver, offsetof(CPUSPARCState, hver), "hver" },
5819         { &cpu_ssr, offsetof(CPUSPARCState, ssr), "ssr" },
5820         { &cpu_ver, offsetof(CPUSPARCState, version), "ver" },
5821 #endif
5822         { &cpu_cond, offsetof(CPUSPARCState, cond), "cond" },
5823         { &cpu_cc_src, offsetof(CPUSPARCState, cc_src), "cc_src" },
5824         { &cpu_cc_src2, offsetof(CPUSPARCState, cc_src2), "cc_src2" },
5825         { &cpu_cc_dst, offsetof(CPUSPARCState, cc_dst), "cc_dst" },
5826         { &cpu_fsr, offsetof(CPUSPARCState, fsr), "fsr" },
5827         { &cpu_pc, offsetof(CPUSPARCState, pc), "pc" },
5828         { &cpu_npc, offsetof(CPUSPARCState, npc), "npc" },
5829         { &cpu_y, offsetof(CPUSPARCState, y), "y" },
5830 #ifndef CONFIG_USER_ONLY
5831         { &cpu_tbr, offsetof(CPUSPARCState, tbr), "tbr" },
5832 #endif
5833     };
5834 
5835     unsigned int i;
5836 
5837     cpu_regwptr = tcg_global_mem_new_ptr(tcg_env,
5838                                          offsetof(CPUSPARCState, regwptr),
5839                                          "regwptr");
5840 
5841     for (i = 0; i < ARRAY_SIZE(r32); ++i) {
5842         *r32[i].ptr = tcg_global_mem_new_i32(tcg_env, r32[i].off, r32[i].name);
5843     }
5844 
5845     for (i = 0; i < ARRAY_SIZE(rtl); ++i) {
5846         *rtl[i].ptr = tcg_global_mem_new(tcg_env, rtl[i].off, rtl[i].name);
5847     }
5848 
5849     cpu_regs[0] = NULL;
5850     for (i = 1; i < 8; ++i) {
5851         cpu_regs[i] = tcg_global_mem_new(tcg_env,
5852                                          offsetof(CPUSPARCState, gregs[i]),
5853                                          gregnames[i]);
5854     }
5855 
5856     for (i = 8; i < 32; ++i) {
5857         cpu_regs[i] = tcg_global_mem_new(cpu_regwptr,
5858                                          (i - 8) * sizeof(target_ulong),
5859                                          gregnames[i]);
5860     }
5861 
5862     for (i = 0; i < TARGET_DPREGS; i++) {
5863         cpu_fpr[i] = tcg_global_mem_new_i64(tcg_env,
5864                                             offsetof(CPUSPARCState, fpr[i]),
5865                                             fregnames[i]);
5866     }
5867 }
5868 
5869 void sparc_restore_state_to_opc(CPUState *cs,
5870                                 const TranslationBlock *tb,
5871                                 const uint64_t *data)
5872 {
5873     SPARCCPU *cpu = SPARC_CPU(cs);
5874     CPUSPARCState *env = &cpu->env;
5875     target_ulong pc = data[0];
5876     target_ulong npc = data[1];
5877 
5878     env->pc = pc;
5879     if (npc == DYNAMIC_PC) {
5880         /* dynamic NPC: already stored */
5881     } else if (npc & JUMP_PC) {
5882         /* jump PC: use 'cond' and the jump targets of the translation */
5883         if (env->cond) {
5884             env->npc = npc & ~3;
5885         } else {
5886             env->npc = pc + 4;
5887         }
5888     } else {
5889         env->npc = npc;
5890     }
5891 }
5892