xref: /openbmc/qemu/target/sparc/translate.c (revision 878cc6773a69f9018357ff673f258acef58422b3)
1 /*
2    SPARC translation
3 
4    Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5    Copyright (C) 2003-2005 Fabrice Bellard
6 
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2.1 of the License, or (at your option) any later version.
11 
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16 
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 
23 #include "cpu.h"
24 #include "disas/disas.h"
25 #include "exec/helper-proto.h"
26 #include "exec/exec-all.h"
27 #include "tcg/tcg-op.h"
28 
29 #include "exec/helper-gen.h"
30 
31 #include "exec/translator.h"
32 #include "exec/log.h"
33 #include "asi.h"
34 
35 #define HELPER_H "helper.h"
36 #include "exec/helper-info.c.inc"
37 #undef  HELPER_H
38 
39 /* Dynamic PC, must exit to main loop. */
40 #define DYNAMIC_PC         1
41 /* Dynamic PC, one of two values according to jump_pc[T2]. */
42 #define JUMP_PC            2
43 /* Dynamic PC, may lookup next TB. */
44 #define DYNAMIC_PC_LOOKUP  3
45 
46 #define DISAS_EXIT  DISAS_TARGET_0
47 
48 /* global register indexes */
49 static TCGv_ptr cpu_regwptr;
50 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
51 static TCGv_i32 cpu_cc_op;
52 static TCGv_i32 cpu_psr;
53 static TCGv cpu_fsr, cpu_pc, cpu_npc;
54 static TCGv cpu_regs[32];
55 static TCGv cpu_y;
56 #ifndef CONFIG_USER_ONLY
57 static TCGv cpu_tbr;
58 #endif
59 static TCGv cpu_cond;
60 #ifdef TARGET_SPARC64
61 static TCGv_i32 cpu_xcc, cpu_fprs;
62 static TCGv cpu_gsr;
63 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
64 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
65 #else
66 static TCGv cpu_wim;
67 #endif
68 /* Floating point registers */
69 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
70 
71 typedef struct DisasDelayException {
72     struct DisasDelayException *next;
73     TCGLabel *lab;
74     TCGv_i32 excp;
75     /* Saved state at parent insn. */
76     target_ulong pc;
77     target_ulong npc;
78 } DisasDelayException;
79 
80 typedef struct DisasContext {
81     DisasContextBase base;
82     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
83     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
84     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
85     int mem_idx;
86     bool fpu_enabled;
87     bool address_mask_32bit;
88 #ifndef CONFIG_USER_ONLY
89     bool supervisor;
90 #ifdef TARGET_SPARC64
91     bool hypervisor;
92 #endif
93 #endif
94 
95     uint32_t cc_op;  /* current CC operation */
96     sparc_def_t *def;
97 #ifdef TARGET_SPARC64
98     int fprs_dirty;
99     int asi;
100 #endif
101     DisasDelayException *delay_excp_list;
102 } DisasContext;
103 
104 typedef struct {
105     TCGCond cond;
106     bool is_bool;
107     TCGv c1, c2;
108 } DisasCompare;
109 
110 // This function uses non-native bit order
111 #define GET_FIELD(X, FROM, TO)                                  \
112     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
113 
114 // This function uses the order in the manuals, i.e. bit 0 is 2^0
115 #define GET_FIELD_SP(X, FROM, TO)               \
116     GET_FIELD(X, 31 - (TO), 31 - (FROM))
117 
118 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
119 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
120 
121 #ifdef TARGET_SPARC64
122 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
123 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
124 #else
125 #define DFPREG(r) (r & 0x1e)
126 #define QFPREG(r) (r & 0x1c)
127 #endif
128 
129 #define UA2005_HTRAP_MASK 0xff
130 #define V8_TRAP_MASK 0x7f
131 
132 static int sign_extend(int x, int len)
133 {
134     len = 32 - len;
135     return (x << len) >> len;
136 }
137 
138 #define IS_IMM (insn & (1<<13))
139 
140 static void gen_update_fprs_dirty(DisasContext *dc, int rd)
141 {
142 #if defined(TARGET_SPARC64)
143     int bit = (rd < 32) ? 1 : 2;
144     /* If we know we've already set this bit within the TB,
145        we can avoid setting it again.  */
146     if (!(dc->fprs_dirty & bit)) {
147         dc->fprs_dirty |= bit;
148         tcg_gen_ori_i32(cpu_fprs, cpu_fprs, bit);
149     }
150 #endif
151 }
152 
153 /* floating point registers moves */
154 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
155 {
156     TCGv_i32 ret = tcg_temp_new_i32();
157     if (src & 1) {
158         tcg_gen_extrl_i64_i32(ret, cpu_fpr[src / 2]);
159     } else {
160         tcg_gen_extrh_i64_i32(ret, cpu_fpr[src / 2]);
161     }
162     return ret;
163 }
164 
165 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
166 {
167     TCGv_i64 t = tcg_temp_new_i64();
168 
169     tcg_gen_extu_i32_i64(t, v);
170     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
171                         (dst & 1 ? 0 : 32), 32);
172     gen_update_fprs_dirty(dc, dst);
173 }
174 
175 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
176 {
177     return tcg_temp_new_i32();
178 }
179 
180 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
181 {
182     src = DFPREG(src);
183     return cpu_fpr[src / 2];
184 }
185 
186 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
187 {
188     dst = DFPREG(dst);
189     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
190     gen_update_fprs_dirty(dc, dst);
191 }
192 
193 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
194 {
195     return cpu_fpr[DFPREG(dst) / 2];
196 }
197 
198 static void gen_op_load_fpr_QT0(unsigned int src)
199 {
200     tcg_gen_st_i64(cpu_fpr[src / 2], tcg_env, offsetof(CPUSPARCState, qt0) +
201                    offsetof(CPU_QuadU, ll.upper));
202     tcg_gen_st_i64(cpu_fpr[src/2 + 1], tcg_env, offsetof(CPUSPARCState, qt0) +
203                    offsetof(CPU_QuadU, ll.lower));
204 }
205 
206 static void gen_op_load_fpr_QT1(unsigned int src)
207 {
208     tcg_gen_st_i64(cpu_fpr[src / 2], tcg_env, offsetof(CPUSPARCState, qt1) +
209                    offsetof(CPU_QuadU, ll.upper));
210     tcg_gen_st_i64(cpu_fpr[src/2 + 1], tcg_env, offsetof(CPUSPARCState, qt1) +
211                    offsetof(CPU_QuadU, ll.lower));
212 }
213 
214 static void gen_op_store_QT0_fpr(unsigned int dst)
215 {
216     tcg_gen_ld_i64(cpu_fpr[dst / 2], tcg_env, offsetof(CPUSPARCState, qt0) +
217                    offsetof(CPU_QuadU, ll.upper));
218     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], tcg_env, offsetof(CPUSPARCState, qt0) +
219                    offsetof(CPU_QuadU, ll.lower));
220 }
221 
222 static void gen_store_fpr_Q(DisasContext *dc, unsigned int dst,
223                             TCGv_i64 v1, TCGv_i64 v2)
224 {
225     dst = QFPREG(dst);
226 
227     tcg_gen_mov_i64(cpu_fpr[dst / 2], v1);
228     tcg_gen_mov_i64(cpu_fpr[dst / 2 + 1], v2);
229     gen_update_fprs_dirty(dc, dst);
230 }
231 
232 #ifdef TARGET_SPARC64
233 static TCGv_i64 gen_load_fpr_Q0(DisasContext *dc, unsigned int src)
234 {
235     src = QFPREG(src);
236     return cpu_fpr[src / 2];
237 }
238 
239 static TCGv_i64 gen_load_fpr_Q1(DisasContext *dc, unsigned int src)
240 {
241     src = QFPREG(src);
242     return cpu_fpr[src / 2 + 1];
243 }
244 
245 static void gen_move_Q(DisasContext *dc, unsigned int rd, unsigned int rs)
246 {
247     rd = QFPREG(rd);
248     rs = QFPREG(rs);
249 
250     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
251     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
252     gen_update_fprs_dirty(dc, rd);
253 }
254 #endif
255 
256 /* moves */
257 #ifdef CONFIG_USER_ONLY
258 #define supervisor(dc) 0
259 #ifdef TARGET_SPARC64
260 #define hypervisor(dc) 0
261 #endif
262 #else
263 #ifdef TARGET_SPARC64
264 #define hypervisor(dc) (dc->hypervisor)
265 #define supervisor(dc) (dc->supervisor | dc->hypervisor)
266 #else
267 #define supervisor(dc) (dc->supervisor)
268 #endif
269 #endif
270 
271 #ifdef TARGET_SPARC64
272 #ifndef TARGET_ABI32
273 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
274 #else
275 #define AM_CHECK(dc) (1)
276 #endif
277 #endif
278 
279 static void gen_address_mask(DisasContext *dc, TCGv addr)
280 {
281 #ifdef TARGET_SPARC64
282     if (AM_CHECK(dc))
283         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
284 #endif
285 }
286 
287 static TCGv gen_load_gpr(DisasContext *dc, int reg)
288 {
289     if (reg > 0) {
290         assert(reg < 32);
291         return cpu_regs[reg];
292     } else {
293         TCGv t = tcg_temp_new();
294         tcg_gen_movi_tl(t, 0);
295         return t;
296     }
297 }
298 
299 static void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
300 {
301     if (reg > 0) {
302         assert(reg < 32);
303         tcg_gen_mov_tl(cpu_regs[reg], v);
304     }
305 }
306 
307 static TCGv gen_dest_gpr(DisasContext *dc, int reg)
308 {
309     if (reg > 0) {
310         assert(reg < 32);
311         return cpu_regs[reg];
312     } else {
313         return tcg_temp_new();
314     }
315 }
316 
317 static bool use_goto_tb(DisasContext *s, target_ulong pc, target_ulong npc)
318 {
319     return translator_use_goto_tb(&s->base, pc) &&
320            translator_use_goto_tb(&s->base, npc);
321 }
322 
323 static void gen_goto_tb(DisasContext *s, int tb_num,
324                         target_ulong pc, target_ulong npc)
325 {
326     if (use_goto_tb(s, pc, npc))  {
327         /* jump to same page: we can use a direct jump */
328         tcg_gen_goto_tb(tb_num);
329         tcg_gen_movi_tl(cpu_pc, pc);
330         tcg_gen_movi_tl(cpu_npc, npc);
331         tcg_gen_exit_tb(s->base.tb, tb_num);
332     } else {
333         /* jump to another page: we can use an indirect jump */
334         tcg_gen_movi_tl(cpu_pc, pc);
335         tcg_gen_movi_tl(cpu_npc, npc);
336         tcg_gen_lookup_and_goto_ptr();
337     }
338 }
339 
340 // XXX suboptimal
341 static void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
342 {
343     tcg_gen_extu_i32_tl(reg, src);
344     tcg_gen_extract_tl(reg, reg, PSR_NEG_SHIFT, 1);
345 }
346 
347 static void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
348 {
349     tcg_gen_extu_i32_tl(reg, src);
350     tcg_gen_extract_tl(reg, reg, PSR_ZERO_SHIFT, 1);
351 }
352 
353 static void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
354 {
355     tcg_gen_extu_i32_tl(reg, src);
356     tcg_gen_extract_tl(reg, reg, PSR_OVF_SHIFT, 1);
357 }
358 
359 static void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
360 {
361     tcg_gen_extu_i32_tl(reg, src);
362     tcg_gen_extract_tl(reg, reg, PSR_CARRY_SHIFT, 1);
363 }
364 
365 static void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
366 {
367     tcg_gen_mov_tl(cpu_cc_src, src1);
368     tcg_gen_mov_tl(cpu_cc_src2, src2);
369     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
370     tcg_gen_mov_tl(dst, cpu_cc_dst);
371 }
372 
373 static TCGv_i32 gen_add32_carry32(void)
374 {
375     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
376 
377     /* Carry is computed from a previous add: (dst < src)  */
378 #if TARGET_LONG_BITS == 64
379     cc_src1_32 = tcg_temp_new_i32();
380     cc_src2_32 = tcg_temp_new_i32();
381     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
382     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
383 #else
384     cc_src1_32 = cpu_cc_dst;
385     cc_src2_32 = cpu_cc_src;
386 #endif
387 
388     carry_32 = tcg_temp_new_i32();
389     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
390 
391     return carry_32;
392 }
393 
394 static TCGv_i32 gen_sub32_carry32(void)
395 {
396     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
397 
398     /* Carry is computed from a previous borrow: (src1 < src2)  */
399 #if TARGET_LONG_BITS == 64
400     cc_src1_32 = tcg_temp_new_i32();
401     cc_src2_32 = tcg_temp_new_i32();
402     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
403     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
404 #else
405     cc_src1_32 = cpu_cc_src;
406     cc_src2_32 = cpu_cc_src2;
407 #endif
408 
409     carry_32 = tcg_temp_new_i32();
410     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
411 
412     return carry_32;
413 }
414 
415 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
416                             TCGv src2, int update_cc)
417 {
418     TCGv_i32 carry_32;
419     TCGv carry;
420 
421     switch (dc->cc_op) {
422     case CC_OP_DIV:
423     case CC_OP_LOGIC:
424         /* Carry is known to be zero.  Fall back to plain ADD.  */
425         if (update_cc) {
426             gen_op_add_cc(dst, src1, src2);
427         } else {
428             tcg_gen_add_tl(dst, src1, src2);
429         }
430         return;
431 
432     case CC_OP_ADD:
433     case CC_OP_TADD:
434     case CC_OP_TADDTV:
435         if (TARGET_LONG_BITS == 32) {
436             /* We can re-use the host's hardware carry generation by using
437                an ADD2 opcode.  We discard the low part of the output.
438                Ideally we'd combine this operation with the add that
439                generated the carry in the first place.  */
440             carry = tcg_temp_new();
441             tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
442             goto add_done;
443         }
444         carry_32 = gen_add32_carry32();
445         break;
446 
447     case CC_OP_SUB:
448     case CC_OP_TSUB:
449     case CC_OP_TSUBTV:
450         carry_32 = gen_sub32_carry32();
451         break;
452 
453     default:
454         /* We need external help to produce the carry.  */
455         carry_32 = tcg_temp_new_i32();
456         gen_helper_compute_C_icc(carry_32, tcg_env);
457         break;
458     }
459 
460 #if TARGET_LONG_BITS == 64
461     carry = tcg_temp_new();
462     tcg_gen_extu_i32_i64(carry, carry_32);
463 #else
464     carry = carry_32;
465 #endif
466 
467     tcg_gen_add_tl(dst, src1, src2);
468     tcg_gen_add_tl(dst, dst, carry);
469 
470  add_done:
471     if (update_cc) {
472         tcg_gen_mov_tl(cpu_cc_src, src1);
473         tcg_gen_mov_tl(cpu_cc_src2, src2);
474         tcg_gen_mov_tl(cpu_cc_dst, dst);
475         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
476         dc->cc_op = CC_OP_ADDX;
477     }
478 }
479 
480 static void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
481 {
482     tcg_gen_mov_tl(cpu_cc_src, src1);
483     tcg_gen_mov_tl(cpu_cc_src2, src2);
484     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
485     tcg_gen_mov_tl(dst, cpu_cc_dst);
486 }
487 
488 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
489                             TCGv src2, int update_cc)
490 {
491     TCGv_i32 carry_32;
492     TCGv carry;
493 
494     switch (dc->cc_op) {
495     case CC_OP_DIV:
496     case CC_OP_LOGIC:
497         /* Carry is known to be zero.  Fall back to plain SUB.  */
498         if (update_cc) {
499             gen_op_sub_cc(dst, src1, src2);
500         } else {
501             tcg_gen_sub_tl(dst, src1, src2);
502         }
503         return;
504 
505     case CC_OP_ADD:
506     case CC_OP_TADD:
507     case CC_OP_TADDTV:
508         carry_32 = gen_add32_carry32();
509         break;
510 
511     case CC_OP_SUB:
512     case CC_OP_TSUB:
513     case CC_OP_TSUBTV:
514         if (TARGET_LONG_BITS == 32) {
515             /* We can re-use the host's hardware carry generation by using
516                a SUB2 opcode.  We discard the low part of the output.
517                Ideally we'd combine this operation with the add that
518                generated the carry in the first place.  */
519             carry = tcg_temp_new();
520             tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
521             goto sub_done;
522         }
523         carry_32 = gen_sub32_carry32();
524         break;
525 
526     default:
527         /* We need external help to produce the carry.  */
528         carry_32 = tcg_temp_new_i32();
529         gen_helper_compute_C_icc(carry_32, tcg_env);
530         break;
531     }
532 
533 #if TARGET_LONG_BITS == 64
534     carry = tcg_temp_new();
535     tcg_gen_extu_i32_i64(carry, carry_32);
536 #else
537     carry = carry_32;
538 #endif
539 
540     tcg_gen_sub_tl(dst, src1, src2);
541     tcg_gen_sub_tl(dst, dst, carry);
542 
543  sub_done:
544     if (update_cc) {
545         tcg_gen_mov_tl(cpu_cc_src, src1);
546         tcg_gen_mov_tl(cpu_cc_src2, src2);
547         tcg_gen_mov_tl(cpu_cc_dst, dst);
548         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
549         dc->cc_op = CC_OP_SUBX;
550     }
551 }
552 
553 static void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
554 {
555     TCGv r_temp, zero, t0;
556 
557     r_temp = tcg_temp_new();
558     t0 = tcg_temp_new();
559 
560     /* old op:
561     if (!(env->y & 1))
562         T1 = 0;
563     */
564     zero = tcg_constant_tl(0);
565     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
566     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
567     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
568     tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
569                        zero, cpu_cc_src2);
570 
571     // b2 = T0 & 1;
572     // env->y = (b2 << 31) | (env->y >> 1);
573     tcg_gen_extract_tl(t0, cpu_y, 1, 31);
574     tcg_gen_deposit_tl(cpu_y, t0, cpu_cc_src, 31, 1);
575 
576     // b1 = N ^ V;
577     gen_mov_reg_N(t0, cpu_psr);
578     gen_mov_reg_V(r_temp, cpu_psr);
579     tcg_gen_xor_tl(t0, t0, r_temp);
580 
581     // T0 = (b1 << 31) | (T0 >> 1);
582     // src1 = T0;
583     tcg_gen_shli_tl(t0, t0, 31);
584     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
585     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
586 
587     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
588 
589     tcg_gen_mov_tl(dst, cpu_cc_dst);
590 }
591 
592 static void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
593 {
594 #if TARGET_LONG_BITS == 32
595     if (sign_ext) {
596         tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
597     } else {
598         tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
599     }
600 #else
601     TCGv t0 = tcg_temp_new_i64();
602     TCGv t1 = tcg_temp_new_i64();
603 
604     if (sign_ext) {
605         tcg_gen_ext32s_i64(t0, src1);
606         tcg_gen_ext32s_i64(t1, src2);
607     } else {
608         tcg_gen_ext32u_i64(t0, src1);
609         tcg_gen_ext32u_i64(t1, src2);
610     }
611 
612     tcg_gen_mul_i64(dst, t0, t1);
613     tcg_gen_shri_i64(cpu_y, dst, 32);
614 #endif
615 }
616 
617 static void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
618 {
619     /* zero-extend truncated operands before multiplication */
620     gen_op_multiply(dst, src1, src2, 0);
621 }
622 
623 static void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
624 {
625     /* sign-extend truncated operands before multiplication */
626     gen_op_multiply(dst, src1, src2, 1);
627 }
628 
629 // 1
630 static void gen_op_eval_ba(TCGv dst)
631 {
632     tcg_gen_movi_tl(dst, 1);
633 }
634 
635 // Z
636 static void gen_op_eval_be(TCGv dst, TCGv_i32 src)
637 {
638     gen_mov_reg_Z(dst, src);
639 }
640 
641 // Z | (N ^ V)
642 static void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
643 {
644     TCGv t0 = tcg_temp_new();
645     gen_mov_reg_N(t0, src);
646     gen_mov_reg_V(dst, src);
647     tcg_gen_xor_tl(dst, dst, t0);
648     gen_mov_reg_Z(t0, src);
649     tcg_gen_or_tl(dst, dst, t0);
650 }
651 
652 // N ^ V
653 static void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
654 {
655     TCGv t0 = tcg_temp_new();
656     gen_mov_reg_V(t0, src);
657     gen_mov_reg_N(dst, src);
658     tcg_gen_xor_tl(dst, dst, t0);
659 }
660 
661 // C | Z
662 static void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
663 {
664     TCGv t0 = tcg_temp_new();
665     gen_mov_reg_Z(t0, src);
666     gen_mov_reg_C(dst, src);
667     tcg_gen_or_tl(dst, dst, t0);
668 }
669 
670 // C
671 static void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
672 {
673     gen_mov_reg_C(dst, src);
674 }
675 
676 // V
677 static void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
678 {
679     gen_mov_reg_V(dst, src);
680 }
681 
682 // 0
683 static void gen_op_eval_bn(TCGv dst)
684 {
685     tcg_gen_movi_tl(dst, 0);
686 }
687 
688 // N
689 static void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
690 {
691     gen_mov_reg_N(dst, src);
692 }
693 
694 // !Z
695 static void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
696 {
697     gen_mov_reg_Z(dst, src);
698     tcg_gen_xori_tl(dst, dst, 0x1);
699 }
700 
701 // !(Z | (N ^ V))
702 static void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
703 {
704     gen_op_eval_ble(dst, src);
705     tcg_gen_xori_tl(dst, dst, 0x1);
706 }
707 
708 // !(N ^ V)
709 static void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
710 {
711     gen_op_eval_bl(dst, src);
712     tcg_gen_xori_tl(dst, dst, 0x1);
713 }
714 
715 // !(C | Z)
716 static void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
717 {
718     gen_op_eval_bleu(dst, src);
719     tcg_gen_xori_tl(dst, dst, 0x1);
720 }
721 
722 // !C
723 static void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
724 {
725     gen_mov_reg_C(dst, src);
726     tcg_gen_xori_tl(dst, dst, 0x1);
727 }
728 
729 // !N
730 static void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
731 {
732     gen_mov_reg_N(dst, src);
733     tcg_gen_xori_tl(dst, dst, 0x1);
734 }
735 
736 // !V
737 static void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
738 {
739     gen_mov_reg_V(dst, src);
740     tcg_gen_xori_tl(dst, dst, 0x1);
741 }
742 
743 /*
744   FPSR bit field FCC1 | FCC0:
745    0 =
746    1 <
747    2 >
748    3 unordered
749 */
750 static void gen_mov_reg_FCC0(TCGv reg, TCGv src,
751                                     unsigned int fcc_offset)
752 {
753     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
754     tcg_gen_andi_tl(reg, reg, 0x1);
755 }
756 
757 static void gen_mov_reg_FCC1(TCGv reg, TCGv src, unsigned int fcc_offset)
758 {
759     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
760     tcg_gen_andi_tl(reg, reg, 0x1);
761 }
762 
763 // !0: FCC0 | FCC1
764 static void gen_op_eval_fbne(TCGv dst, TCGv src, unsigned int fcc_offset)
765 {
766     TCGv t0 = tcg_temp_new();
767     gen_mov_reg_FCC0(dst, src, fcc_offset);
768     gen_mov_reg_FCC1(t0, src, fcc_offset);
769     tcg_gen_or_tl(dst, dst, t0);
770 }
771 
772 // 1 or 2: FCC0 ^ FCC1
773 static void gen_op_eval_fblg(TCGv dst, TCGv src, unsigned int fcc_offset)
774 {
775     TCGv t0 = tcg_temp_new();
776     gen_mov_reg_FCC0(dst, src, fcc_offset);
777     gen_mov_reg_FCC1(t0, src, fcc_offset);
778     tcg_gen_xor_tl(dst, dst, t0);
779 }
780 
781 // 1 or 3: FCC0
782 static void gen_op_eval_fbul(TCGv dst, TCGv src, unsigned int fcc_offset)
783 {
784     gen_mov_reg_FCC0(dst, src, fcc_offset);
785 }
786 
787 // 1: FCC0 & !FCC1
788 static void gen_op_eval_fbl(TCGv dst, TCGv src, unsigned int fcc_offset)
789 {
790     TCGv t0 = tcg_temp_new();
791     gen_mov_reg_FCC0(dst, src, fcc_offset);
792     gen_mov_reg_FCC1(t0, src, fcc_offset);
793     tcg_gen_andc_tl(dst, dst, t0);
794 }
795 
796 // 2 or 3: FCC1
797 static void gen_op_eval_fbug(TCGv dst, TCGv src, unsigned int fcc_offset)
798 {
799     gen_mov_reg_FCC1(dst, src, fcc_offset);
800 }
801 
802 // 2: !FCC0 & FCC1
803 static void gen_op_eval_fbg(TCGv dst, TCGv src, unsigned int fcc_offset)
804 {
805     TCGv t0 = tcg_temp_new();
806     gen_mov_reg_FCC0(dst, src, fcc_offset);
807     gen_mov_reg_FCC1(t0, src, fcc_offset);
808     tcg_gen_andc_tl(dst, t0, dst);
809 }
810 
811 // 3: FCC0 & FCC1
812 static void gen_op_eval_fbu(TCGv dst, TCGv src, unsigned int fcc_offset)
813 {
814     TCGv t0 = tcg_temp_new();
815     gen_mov_reg_FCC0(dst, src, fcc_offset);
816     gen_mov_reg_FCC1(t0, src, fcc_offset);
817     tcg_gen_and_tl(dst, dst, t0);
818 }
819 
820 // 0: !(FCC0 | FCC1)
821 static void gen_op_eval_fbe(TCGv dst, TCGv src, unsigned int fcc_offset)
822 {
823     TCGv t0 = tcg_temp_new();
824     gen_mov_reg_FCC0(dst, src, fcc_offset);
825     gen_mov_reg_FCC1(t0, src, fcc_offset);
826     tcg_gen_or_tl(dst, dst, t0);
827     tcg_gen_xori_tl(dst, dst, 0x1);
828 }
829 
830 // 0 or 3: !(FCC0 ^ FCC1)
831 static void gen_op_eval_fbue(TCGv dst, TCGv src, unsigned int fcc_offset)
832 {
833     TCGv t0 = tcg_temp_new();
834     gen_mov_reg_FCC0(dst, src, fcc_offset);
835     gen_mov_reg_FCC1(t0, src, fcc_offset);
836     tcg_gen_xor_tl(dst, dst, t0);
837     tcg_gen_xori_tl(dst, dst, 0x1);
838 }
839 
840 // 0 or 2: !FCC0
841 static void gen_op_eval_fbge(TCGv dst, TCGv src, unsigned int fcc_offset)
842 {
843     gen_mov_reg_FCC0(dst, src, fcc_offset);
844     tcg_gen_xori_tl(dst, dst, 0x1);
845 }
846 
847 // !1: !(FCC0 & !FCC1)
848 static void gen_op_eval_fbuge(TCGv dst, TCGv src, unsigned int fcc_offset)
849 {
850     TCGv t0 = tcg_temp_new();
851     gen_mov_reg_FCC0(dst, src, fcc_offset);
852     gen_mov_reg_FCC1(t0, src, fcc_offset);
853     tcg_gen_andc_tl(dst, dst, t0);
854     tcg_gen_xori_tl(dst, dst, 0x1);
855 }
856 
857 // 0 or 1: !FCC1
858 static void gen_op_eval_fble(TCGv dst, TCGv src, unsigned int fcc_offset)
859 {
860     gen_mov_reg_FCC1(dst, src, fcc_offset);
861     tcg_gen_xori_tl(dst, dst, 0x1);
862 }
863 
864 // !2: !(!FCC0 & FCC1)
865 static void gen_op_eval_fbule(TCGv dst, TCGv src, unsigned int fcc_offset)
866 {
867     TCGv t0 = tcg_temp_new();
868     gen_mov_reg_FCC0(dst, src, fcc_offset);
869     gen_mov_reg_FCC1(t0, src, fcc_offset);
870     tcg_gen_andc_tl(dst, t0, dst);
871     tcg_gen_xori_tl(dst, dst, 0x1);
872 }
873 
874 // !3: !(FCC0 & FCC1)
875 static void gen_op_eval_fbo(TCGv dst, TCGv src, unsigned int fcc_offset)
876 {
877     TCGv t0 = tcg_temp_new();
878     gen_mov_reg_FCC0(dst, src, fcc_offset);
879     gen_mov_reg_FCC1(t0, src, fcc_offset);
880     tcg_gen_and_tl(dst, dst, t0);
881     tcg_gen_xori_tl(dst, dst, 0x1);
882 }
883 
884 static void gen_branch2(DisasContext *dc, target_ulong pc1,
885                         target_ulong pc2, TCGv r_cond)
886 {
887     TCGLabel *l1 = gen_new_label();
888 
889     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
890 
891     gen_goto_tb(dc, 0, pc1, pc1 + 4);
892 
893     gen_set_label(l1);
894     gen_goto_tb(dc, 1, pc2, pc2 + 4);
895 }
896 
897 static void gen_branch_a(DisasContext *dc, target_ulong pc1)
898 {
899     TCGLabel *l1 = gen_new_label();
900     target_ulong npc = dc->npc;
901 
902     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cond, 0, l1);
903 
904     gen_goto_tb(dc, 0, npc, pc1);
905 
906     gen_set_label(l1);
907     gen_goto_tb(dc, 1, npc + 4, npc + 8);
908 
909     dc->base.is_jmp = DISAS_NORETURN;
910 }
911 
912 static void gen_branch_n(DisasContext *dc, target_ulong pc1)
913 {
914     target_ulong npc = dc->npc;
915 
916     if (npc & 3) {
917         switch (npc) {
918         case DYNAMIC_PC:
919         case DYNAMIC_PC_LOOKUP:
920             tcg_gen_mov_tl(cpu_pc, cpu_npc);
921             tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
922             tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc,
923                                cpu_cond, tcg_constant_tl(0),
924                                tcg_constant_tl(pc1), cpu_npc);
925             dc->pc = npc;
926             break;
927         default:
928             g_assert_not_reached();
929         }
930     } else {
931         dc->pc = npc;
932         dc->jump_pc[0] = pc1;
933         dc->jump_pc[1] = npc + 4;
934         dc->npc = JUMP_PC;
935     }
936 }
937 
938 static void gen_generic_branch(DisasContext *dc)
939 {
940     TCGv npc0 = tcg_constant_tl(dc->jump_pc[0]);
941     TCGv npc1 = tcg_constant_tl(dc->jump_pc[1]);
942     TCGv zero = tcg_constant_tl(0);
943 
944     tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
945 }
946 
947 /* call this function before using the condition register as it may
948    have been set for a jump */
949 static void flush_cond(DisasContext *dc)
950 {
951     if (dc->npc == JUMP_PC) {
952         gen_generic_branch(dc);
953         dc->npc = DYNAMIC_PC_LOOKUP;
954     }
955 }
956 
957 static void save_npc(DisasContext *dc)
958 {
959     if (dc->npc & 3) {
960         switch (dc->npc) {
961         case JUMP_PC:
962             gen_generic_branch(dc);
963             dc->npc = DYNAMIC_PC_LOOKUP;
964             break;
965         case DYNAMIC_PC:
966         case DYNAMIC_PC_LOOKUP:
967             break;
968         default:
969             g_assert_not_reached();
970         }
971     } else {
972         tcg_gen_movi_tl(cpu_npc, dc->npc);
973     }
974 }
975 
976 static void update_psr(DisasContext *dc)
977 {
978     if (dc->cc_op != CC_OP_FLAGS) {
979         dc->cc_op = CC_OP_FLAGS;
980         gen_helper_compute_psr(tcg_env);
981     }
982 }
983 
984 static void save_state(DisasContext *dc)
985 {
986     tcg_gen_movi_tl(cpu_pc, dc->pc);
987     save_npc(dc);
988 }
989 
990 static void gen_exception(DisasContext *dc, int which)
991 {
992     save_state(dc);
993     gen_helper_raise_exception(tcg_env, tcg_constant_i32(which));
994     dc->base.is_jmp = DISAS_NORETURN;
995 }
996 
997 static TCGLabel *delay_exceptionv(DisasContext *dc, TCGv_i32 excp)
998 {
999     DisasDelayException *e = g_new0(DisasDelayException, 1);
1000 
1001     e->next = dc->delay_excp_list;
1002     dc->delay_excp_list = e;
1003 
1004     e->lab = gen_new_label();
1005     e->excp = excp;
1006     e->pc = dc->pc;
1007     /* Caller must have used flush_cond before branch. */
1008     assert(e->npc != JUMP_PC);
1009     e->npc = dc->npc;
1010 
1011     return e->lab;
1012 }
1013 
1014 static TCGLabel *delay_exception(DisasContext *dc, int excp)
1015 {
1016     return delay_exceptionv(dc, tcg_constant_i32(excp));
1017 }
1018 
1019 static void gen_check_align(DisasContext *dc, TCGv addr, int mask)
1020 {
1021     TCGv t = tcg_temp_new();
1022     TCGLabel *lab;
1023 
1024     tcg_gen_andi_tl(t, addr, mask);
1025 
1026     flush_cond(dc);
1027     lab = delay_exception(dc, TT_UNALIGNED);
1028     tcg_gen_brcondi_tl(TCG_COND_NE, t, 0, lab);
1029 }
1030 
1031 static void gen_mov_pc_npc(DisasContext *dc)
1032 {
1033     if (dc->npc & 3) {
1034         switch (dc->npc) {
1035         case JUMP_PC:
1036             gen_generic_branch(dc);
1037             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1038             dc->pc = DYNAMIC_PC_LOOKUP;
1039             break;
1040         case DYNAMIC_PC:
1041         case DYNAMIC_PC_LOOKUP:
1042             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1043             dc->pc = dc->npc;
1044             break;
1045         default:
1046             g_assert_not_reached();
1047         }
1048     } else {
1049         dc->pc = dc->npc;
1050     }
1051 }
1052 
1053 static void gen_op_next_insn(void)
1054 {
1055     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1056     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1057 }
1058 
1059 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1060                         DisasContext *dc)
1061 {
1062     static int subcc_cond[16] = {
1063         TCG_COND_NEVER,
1064         TCG_COND_EQ,
1065         TCG_COND_LE,
1066         TCG_COND_LT,
1067         TCG_COND_LEU,
1068         TCG_COND_LTU,
1069         -1, /* neg */
1070         -1, /* overflow */
1071         TCG_COND_ALWAYS,
1072         TCG_COND_NE,
1073         TCG_COND_GT,
1074         TCG_COND_GE,
1075         TCG_COND_GTU,
1076         TCG_COND_GEU,
1077         -1, /* pos */
1078         -1, /* no overflow */
1079     };
1080 
1081     static int logic_cond[16] = {
1082         TCG_COND_NEVER,
1083         TCG_COND_EQ,     /* eq:  Z */
1084         TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1085         TCG_COND_LT,     /* lt:  N ^ V -> N */
1086         TCG_COND_EQ,     /* leu: C | Z -> Z */
1087         TCG_COND_NEVER,  /* ltu: C -> 0 */
1088         TCG_COND_LT,     /* neg: N */
1089         TCG_COND_NEVER,  /* vs:  V -> 0 */
1090         TCG_COND_ALWAYS,
1091         TCG_COND_NE,     /* ne:  !Z */
1092         TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1093         TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1094         TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1095         TCG_COND_ALWAYS, /* geu: !C -> 1 */
1096         TCG_COND_GE,     /* pos: !N */
1097         TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1098     };
1099 
1100     TCGv_i32 r_src;
1101     TCGv r_dst;
1102 
1103 #ifdef TARGET_SPARC64
1104     if (xcc) {
1105         r_src = cpu_xcc;
1106     } else {
1107         r_src = cpu_psr;
1108     }
1109 #else
1110     r_src = cpu_psr;
1111 #endif
1112 
1113     switch (dc->cc_op) {
1114     case CC_OP_LOGIC:
1115         cmp->cond = logic_cond[cond];
1116     do_compare_dst_0:
1117         cmp->is_bool = false;
1118         cmp->c2 = tcg_constant_tl(0);
1119 #ifdef TARGET_SPARC64
1120         if (!xcc) {
1121             cmp->c1 = tcg_temp_new();
1122             tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1123             break;
1124         }
1125 #endif
1126         cmp->c1 = cpu_cc_dst;
1127         break;
1128 
1129     case CC_OP_SUB:
1130         switch (cond) {
1131         case 6:  /* neg */
1132         case 14: /* pos */
1133             cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1134             goto do_compare_dst_0;
1135 
1136         case 7: /* overflow */
1137         case 15: /* !overflow */
1138             goto do_dynamic;
1139 
1140         default:
1141             cmp->cond = subcc_cond[cond];
1142             cmp->is_bool = false;
1143 #ifdef TARGET_SPARC64
1144             if (!xcc) {
1145                 /* Note that sign-extension works for unsigned compares as
1146                    long as both operands are sign-extended.  */
1147                 cmp->c1 = tcg_temp_new();
1148                 cmp->c2 = tcg_temp_new();
1149                 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1150                 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1151                 break;
1152             }
1153 #endif
1154             cmp->c1 = cpu_cc_src;
1155             cmp->c2 = cpu_cc_src2;
1156             break;
1157         }
1158         break;
1159 
1160     default:
1161     do_dynamic:
1162         gen_helper_compute_psr(tcg_env);
1163         dc->cc_op = CC_OP_FLAGS;
1164         /* FALLTHRU */
1165 
1166     case CC_OP_FLAGS:
1167         /* We're going to generate a boolean result.  */
1168         cmp->cond = TCG_COND_NE;
1169         cmp->is_bool = true;
1170         cmp->c1 = r_dst = tcg_temp_new();
1171         cmp->c2 = tcg_constant_tl(0);
1172 
1173         switch (cond) {
1174         case 0x0:
1175             gen_op_eval_bn(r_dst);
1176             break;
1177         case 0x1:
1178             gen_op_eval_be(r_dst, r_src);
1179             break;
1180         case 0x2:
1181             gen_op_eval_ble(r_dst, r_src);
1182             break;
1183         case 0x3:
1184             gen_op_eval_bl(r_dst, r_src);
1185             break;
1186         case 0x4:
1187             gen_op_eval_bleu(r_dst, r_src);
1188             break;
1189         case 0x5:
1190             gen_op_eval_bcs(r_dst, r_src);
1191             break;
1192         case 0x6:
1193             gen_op_eval_bneg(r_dst, r_src);
1194             break;
1195         case 0x7:
1196             gen_op_eval_bvs(r_dst, r_src);
1197             break;
1198         case 0x8:
1199             gen_op_eval_ba(r_dst);
1200             break;
1201         case 0x9:
1202             gen_op_eval_bne(r_dst, r_src);
1203             break;
1204         case 0xa:
1205             gen_op_eval_bg(r_dst, r_src);
1206             break;
1207         case 0xb:
1208             gen_op_eval_bge(r_dst, r_src);
1209             break;
1210         case 0xc:
1211             gen_op_eval_bgu(r_dst, r_src);
1212             break;
1213         case 0xd:
1214             gen_op_eval_bcc(r_dst, r_src);
1215             break;
1216         case 0xe:
1217             gen_op_eval_bpos(r_dst, r_src);
1218             break;
1219         case 0xf:
1220             gen_op_eval_bvc(r_dst, r_src);
1221             break;
1222         }
1223         break;
1224     }
1225 }
1226 
1227 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1228 {
1229     unsigned int offset;
1230     TCGv r_dst;
1231 
1232     /* For now we still generate a straight boolean result.  */
1233     cmp->cond = TCG_COND_NE;
1234     cmp->is_bool = true;
1235     cmp->c1 = r_dst = tcg_temp_new();
1236     cmp->c2 = tcg_constant_tl(0);
1237 
1238     switch (cc) {
1239     default:
1240     case 0x0:
1241         offset = 0;
1242         break;
1243     case 0x1:
1244         offset = 32 - 10;
1245         break;
1246     case 0x2:
1247         offset = 34 - 10;
1248         break;
1249     case 0x3:
1250         offset = 36 - 10;
1251         break;
1252     }
1253 
1254     switch (cond) {
1255     case 0x0:
1256         gen_op_eval_bn(r_dst);
1257         break;
1258     case 0x1:
1259         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1260         break;
1261     case 0x2:
1262         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1263         break;
1264     case 0x3:
1265         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1266         break;
1267     case 0x4:
1268         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1269         break;
1270     case 0x5:
1271         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1272         break;
1273     case 0x6:
1274         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1275         break;
1276     case 0x7:
1277         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1278         break;
1279     case 0x8:
1280         gen_op_eval_ba(r_dst);
1281         break;
1282     case 0x9:
1283         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1284         break;
1285     case 0xa:
1286         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1287         break;
1288     case 0xb:
1289         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1290         break;
1291     case 0xc:
1292         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1293         break;
1294     case 0xd:
1295         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1296         break;
1297     case 0xe:
1298         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1299         break;
1300     case 0xf:
1301         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1302         break;
1303     }
1304 }
1305 
1306 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1307                      DisasContext *dc)
1308 {
1309     DisasCompare cmp;
1310     gen_compare(&cmp, cc, cond, dc);
1311 
1312     /* The interface is to return a boolean in r_dst.  */
1313     if (cmp.is_bool) {
1314         tcg_gen_mov_tl(r_dst, cmp.c1);
1315     } else {
1316         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1317     }
1318 }
1319 
1320 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1321 {
1322     DisasCompare cmp;
1323     gen_fcompare(&cmp, cc, cond);
1324 
1325     /* The interface is to return a boolean in r_dst.  */
1326     if (cmp.is_bool) {
1327         tcg_gen_mov_tl(r_dst, cmp.c1);
1328     } else {
1329         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1330     }
1331 }
1332 
1333 #ifdef TARGET_SPARC64
1334 // Inverted logic
1335 static const int gen_tcg_cond_reg[8] = {
1336     -1,
1337     TCG_COND_NE,
1338     TCG_COND_GT,
1339     TCG_COND_GE,
1340     -1,
1341     TCG_COND_EQ,
1342     TCG_COND_LE,
1343     TCG_COND_LT,
1344 };
1345 
1346 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1347 {
1348     cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1349     cmp->is_bool = false;
1350     cmp->c1 = r_src;
1351     cmp->c2 = tcg_constant_tl(0);
1352 }
1353 
1354 static void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1355 {
1356     DisasCompare cmp;
1357     gen_compare_reg(&cmp, cond, r_src);
1358 
1359     /* The interface is to return a boolean in r_dst.  */
1360     tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1361 }
1362 #endif
1363 
1364 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1365 {
1366     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1367     target_ulong target = dc->pc + offset;
1368 
1369 #ifdef TARGET_SPARC64
1370     if (unlikely(AM_CHECK(dc))) {
1371         target &= 0xffffffffULL;
1372     }
1373 #endif
1374     if (cond == 0x0) {
1375         /* unconditional not taken */
1376         if (a) {
1377             dc->pc = dc->npc + 4;
1378             dc->npc = dc->pc + 4;
1379         } else {
1380             dc->pc = dc->npc;
1381             dc->npc = dc->pc + 4;
1382         }
1383     } else if (cond == 0x8) {
1384         /* unconditional taken */
1385         if (a) {
1386             dc->pc = target;
1387             dc->npc = dc->pc + 4;
1388         } else {
1389             dc->pc = dc->npc;
1390             dc->npc = target;
1391             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1392         }
1393     } else {
1394         flush_cond(dc);
1395         gen_cond(cpu_cond, cc, cond, dc);
1396         if (a) {
1397             gen_branch_a(dc, target);
1398         } else {
1399             gen_branch_n(dc, target);
1400         }
1401     }
1402 }
1403 
1404 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1405 {
1406     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1407     target_ulong target = dc->pc + offset;
1408 
1409 #ifdef TARGET_SPARC64
1410     if (unlikely(AM_CHECK(dc))) {
1411         target &= 0xffffffffULL;
1412     }
1413 #endif
1414     if (cond == 0x0) {
1415         /* unconditional not taken */
1416         if (a) {
1417             dc->pc = dc->npc + 4;
1418             dc->npc = dc->pc + 4;
1419         } else {
1420             dc->pc = dc->npc;
1421             dc->npc = dc->pc + 4;
1422         }
1423     } else if (cond == 0x8) {
1424         /* unconditional taken */
1425         if (a) {
1426             dc->pc = target;
1427             dc->npc = dc->pc + 4;
1428         } else {
1429             dc->pc = dc->npc;
1430             dc->npc = target;
1431             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1432         }
1433     } else {
1434         flush_cond(dc);
1435         gen_fcond(cpu_cond, cc, cond);
1436         if (a) {
1437             gen_branch_a(dc, target);
1438         } else {
1439             gen_branch_n(dc, target);
1440         }
1441     }
1442 }
1443 
1444 #ifdef TARGET_SPARC64
1445 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1446                           TCGv r_reg)
1447 {
1448     unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1449     target_ulong target = dc->pc + offset;
1450 
1451     if (unlikely(AM_CHECK(dc))) {
1452         target &= 0xffffffffULL;
1453     }
1454     flush_cond(dc);
1455     gen_cond_reg(cpu_cond, cond, r_reg);
1456     if (a) {
1457         gen_branch_a(dc, target);
1458     } else {
1459         gen_branch_n(dc, target);
1460     }
1461 }
1462 
1463 static void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1464 {
1465     switch (fccno) {
1466     case 0:
1467         gen_helper_fcmps(cpu_fsr, tcg_env, r_rs1, r_rs2);
1468         break;
1469     case 1:
1470         gen_helper_fcmps_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1471         break;
1472     case 2:
1473         gen_helper_fcmps_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1474         break;
1475     case 3:
1476         gen_helper_fcmps_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1477         break;
1478     }
1479 }
1480 
1481 static void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1482 {
1483     switch (fccno) {
1484     case 0:
1485         gen_helper_fcmpd(cpu_fsr, tcg_env, r_rs1, r_rs2);
1486         break;
1487     case 1:
1488         gen_helper_fcmpd_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1489         break;
1490     case 2:
1491         gen_helper_fcmpd_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1492         break;
1493     case 3:
1494         gen_helper_fcmpd_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1495         break;
1496     }
1497 }
1498 
1499 static void gen_op_fcmpq(int fccno)
1500 {
1501     switch (fccno) {
1502     case 0:
1503         gen_helper_fcmpq(cpu_fsr, tcg_env);
1504         break;
1505     case 1:
1506         gen_helper_fcmpq_fcc1(cpu_fsr, tcg_env);
1507         break;
1508     case 2:
1509         gen_helper_fcmpq_fcc2(cpu_fsr, tcg_env);
1510         break;
1511     case 3:
1512         gen_helper_fcmpq_fcc3(cpu_fsr, tcg_env);
1513         break;
1514     }
1515 }
1516 
1517 static void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1518 {
1519     switch (fccno) {
1520     case 0:
1521         gen_helper_fcmpes(cpu_fsr, tcg_env, r_rs1, r_rs2);
1522         break;
1523     case 1:
1524         gen_helper_fcmpes_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1525         break;
1526     case 2:
1527         gen_helper_fcmpes_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1528         break;
1529     case 3:
1530         gen_helper_fcmpes_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1531         break;
1532     }
1533 }
1534 
1535 static void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1536 {
1537     switch (fccno) {
1538     case 0:
1539         gen_helper_fcmped(cpu_fsr, tcg_env, r_rs1, r_rs2);
1540         break;
1541     case 1:
1542         gen_helper_fcmped_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1543         break;
1544     case 2:
1545         gen_helper_fcmped_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1546         break;
1547     case 3:
1548         gen_helper_fcmped_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1549         break;
1550     }
1551 }
1552 
1553 static void gen_op_fcmpeq(int fccno)
1554 {
1555     switch (fccno) {
1556     case 0:
1557         gen_helper_fcmpeq(cpu_fsr, tcg_env);
1558         break;
1559     case 1:
1560         gen_helper_fcmpeq_fcc1(cpu_fsr, tcg_env);
1561         break;
1562     case 2:
1563         gen_helper_fcmpeq_fcc2(cpu_fsr, tcg_env);
1564         break;
1565     case 3:
1566         gen_helper_fcmpeq_fcc3(cpu_fsr, tcg_env);
1567         break;
1568     }
1569 }
1570 
1571 #else
1572 
1573 static void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1574 {
1575     gen_helper_fcmps(cpu_fsr, tcg_env, r_rs1, r_rs2);
1576 }
1577 
1578 static void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1579 {
1580     gen_helper_fcmpd(cpu_fsr, tcg_env, r_rs1, r_rs2);
1581 }
1582 
1583 static void gen_op_fcmpq(int fccno)
1584 {
1585     gen_helper_fcmpq(cpu_fsr, tcg_env);
1586 }
1587 
1588 static void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1589 {
1590     gen_helper_fcmpes(cpu_fsr, tcg_env, r_rs1, r_rs2);
1591 }
1592 
1593 static void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1594 {
1595     gen_helper_fcmped(cpu_fsr, tcg_env, r_rs1, r_rs2);
1596 }
1597 
1598 static void gen_op_fcmpeq(int fccno)
1599 {
1600     gen_helper_fcmpeq(cpu_fsr, tcg_env);
1601 }
1602 #endif
1603 
1604 static void gen_op_fpexception_im(DisasContext *dc, int fsr_flags)
1605 {
1606     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1607     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1608     gen_exception(dc, TT_FP_EXCP);
1609 }
1610 
1611 static int gen_trap_ifnofpu(DisasContext *dc)
1612 {
1613 #if !defined(CONFIG_USER_ONLY)
1614     if (!dc->fpu_enabled) {
1615         gen_exception(dc, TT_NFPU_INSN);
1616         return 1;
1617     }
1618 #endif
1619     return 0;
1620 }
1621 
1622 static void gen_op_clear_ieee_excp_and_FTT(void)
1623 {
1624     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1625 }
1626 
1627 static void gen_fop_FF(DisasContext *dc, int rd, int rs,
1628                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1629 {
1630     TCGv_i32 dst, src;
1631 
1632     src = gen_load_fpr_F(dc, rs);
1633     dst = gen_dest_fpr_F(dc);
1634 
1635     gen(dst, tcg_env, src);
1636     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1637 
1638     gen_store_fpr_F(dc, rd, dst);
1639 }
1640 
1641 static void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1642                           void (*gen)(TCGv_i32, TCGv_i32))
1643 {
1644     TCGv_i32 dst, src;
1645 
1646     src = gen_load_fpr_F(dc, rs);
1647     dst = gen_dest_fpr_F(dc);
1648 
1649     gen(dst, src);
1650 
1651     gen_store_fpr_F(dc, rd, dst);
1652 }
1653 
1654 static void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1655                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1656 {
1657     TCGv_i32 dst, src1, src2;
1658 
1659     src1 = gen_load_fpr_F(dc, rs1);
1660     src2 = gen_load_fpr_F(dc, rs2);
1661     dst = gen_dest_fpr_F(dc);
1662 
1663     gen(dst, tcg_env, src1, src2);
1664     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1665 
1666     gen_store_fpr_F(dc, rd, dst);
1667 }
1668 
1669 #ifdef TARGET_SPARC64
1670 static void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1671                            void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1672 {
1673     TCGv_i32 dst, src1, src2;
1674 
1675     src1 = gen_load_fpr_F(dc, rs1);
1676     src2 = gen_load_fpr_F(dc, rs2);
1677     dst = gen_dest_fpr_F(dc);
1678 
1679     gen(dst, src1, src2);
1680 
1681     gen_store_fpr_F(dc, rd, dst);
1682 }
1683 #endif
1684 
1685 static void gen_fop_DD(DisasContext *dc, int rd, int rs,
1686                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1687 {
1688     TCGv_i64 dst, src;
1689 
1690     src = gen_load_fpr_D(dc, rs);
1691     dst = gen_dest_fpr_D(dc, rd);
1692 
1693     gen(dst, tcg_env, src);
1694     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1695 
1696     gen_store_fpr_D(dc, rd, dst);
1697 }
1698 
1699 #ifdef TARGET_SPARC64
1700 static void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1701                           void (*gen)(TCGv_i64, TCGv_i64))
1702 {
1703     TCGv_i64 dst, src;
1704 
1705     src = gen_load_fpr_D(dc, rs);
1706     dst = gen_dest_fpr_D(dc, rd);
1707 
1708     gen(dst, src);
1709 
1710     gen_store_fpr_D(dc, rd, dst);
1711 }
1712 #endif
1713 
1714 static void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1715                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1716 {
1717     TCGv_i64 dst, src1, src2;
1718 
1719     src1 = gen_load_fpr_D(dc, rs1);
1720     src2 = gen_load_fpr_D(dc, rs2);
1721     dst = gen_dest_fpr_D(dc, rd);
1722 
1723     gen(dst, tcg_env, src1, src2);
1724     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1725 
1726     gen_store_fpr_D(dc, rd, dst);
1727 }
1728 
1729 #ifdef TARGET_SPARC64
1730 static void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1731                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1732 {
1733     TCGv_i64 dst, src1, src2;
1734 
1735     src1 = gen_load_fpr_D(dc, rs1);
1736     src2 = gen_load_fpr_D(dc, rs2);
1737     dst = gen_dest_fpr_D(dc, rd);
1738 
1739     gen(dst, src1, src2);
1740 
1741     gen_store_fpr_D(dc, rd, dst);
1742 }
1743 
1744 static void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1745                             void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1746 {
1747     TCGv_i64 dst, src1, src2;
1748 
1749     src1 = gen_load_fpr_D(dc, rs1);
1750     src2 = gen_load_fpr_D(dc, rs2);
1751     dst = gen_dest_fpr_D(dc, rd);
1752 
1753     gen(dst, cpu_gsr, src1, src2);
1754 
1755     gen_store_fpr_D(dc, rd, dst);
1756 }
1757 
1758 static void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1759                             void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1760 {
1761     TCGv_i64 dst, src0, src1, src2;
1762 
1763     src1 = gen_load_fpr_D(dc, rs1);
1764     src2 = gen_load_fpr_D(dc, rs2);
1765     src0 = gen_load_fpr_D(dc, rd);
1766     dst = gen_dest_fpr_D(dc, rd);
1767 
1768     gen(dst, src0, src1, src2);
1769 
1770     gen_store_fpr_D(dc, rd, dst);
1771 }
1772 #endif
1773 
1774 static void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1775                        void (*gen)(TCGv_ptr))
1776 {
1777     gen_op_load_fpr_QT1(QFPREG(rs));
1778 
1779     gen(tcg_env);
1780     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1781 
1782     gen_op_store_QT0_fpr(QFPREG(rd));
1783     gen_update_fprs_dirty(dc, QFPREG(rd));
1784 }
1785 
1786 #ifdef TARGET_SPARC64
1787 static void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1788                           void (*gen)(TCGv_ptr))
1789 {
1790     gen_op_load_fpr_QT1(QFPREG(rs));
1791 
1792     gen(tcg_env);
1793 
1794     gen_op_store_QT0_fpr(QFPREG(rd));
1795     gen_update_fprs_dirty(dc, QFPREG(rd));
1796 }
1797 #endif
1798 
1799 static void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1800                         void (*gen)(TCGv_ptr))
1801 {
1802     gen_op_load_fpr_QT0(QFPREG(rs1));
1803     gen_op_load_fpr_QT1(QFPREG(rs2));
1804 
1805     gen(tcg_env);
1806     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1807 
1808     gen_op_store_QT0_fpr(QFPREG(rd));
1809     gen_update_fprs_dirty(dc, QFPREG(rd));
1810 }
1811 
1812 static void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1813                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1814 {
1815     TCGv_i64 dst;
1816     TCGv_i32 src1, src2;
1817 
1818     src1 = gen_load_fpr_F(dc, rs1);
1819     src2 = gen_load_fpr_F(dc, rs2);
1820     dst = gen_dest_fpr_D(dc, rd);
1821 
1822     gen(dst, tcg_env, src1, src2);
1823     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1824 
1825     gen_store_fpr_D(dc, rd, dst);
1826 }
1827 
1828 static void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1829                         void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1830 {
1831     TCGv_i64 src1, src2;
1832 
1833     src1 = gen_load_fpr_D(dc, rs1);
1834     src2 = gen_load_fpr_D(dc, rs2);
1835 
1836     gen(tcg_env, src1, src2);
1837     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1838 
1839     gen_op_store_QT0_fpr(QFPREG(rd));
1840     gen_update_fprs_dirty(dc, QFPREG(rd));
1841 }
1842 
1843 #ifdef TARGET_SPARC64
1844 static void gen_fop_DF(DisasContext *dc, int rd, int rs,
1845                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1846 {
1847     TCGv_i64 dst;
1848     TCGv_i32 src;
1849 
1850     src = gen_load_fpr_F(dc, rs);
1851     dst = gen_dest_fpr_D(dc, rd);
1852 
1853     gen(dst, tcg_env, src);
1854     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1855 
1856     gen_store_fpr_D(dc, rd, dst);
1857 }
1858 #endif
1859 
1860 static void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1861                           void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1862 {
1863     TCGv_i64 dst;
1864     TCGv_i32 src;
1865 
1866     src = gen_load_fpr_F(dc, rs);
1867     dst = gen_dest_fpr_D(dc, rd);
1868 
1869     gen(dst, tcg_env, src);
1870 
1871     gen_store_fpr_D(dc, rd, dst);
1872 }
1873 
1874 static void gen_fop_FD(DisasContext *dc, int rd, int rs,
1875                        void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1876 {
1877     TCGv_i32 dst;
1878     TCGv_i64 src;
1879 
1880     src = gen_load_fpr_D(dc, rs);
1881     dst = gen_dest_fpr_F(dc);
1882 
1883     gen(dst, tcg_env, src);
1884     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1885 
1886     gen_store_fpr_F(dc, rd, dst);
1887 }
1888 
1889 static void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1890                        void (*gen)(TCGv_i32, TCGv_ptr))
1891 {
1892     TCGv_i32 dst;
1893 
1894     gen_op_load_fpr_QT1(QFPREG(rs));
1895     dst = gen_dest_fpr_F(dc);
1896 
1897     gen(dst, tcg_env);
1898     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1899 
1900     gen_store_fpr_F(dc, rd, dst);
1901 }
1902 
1903 static void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1904                        void (*gen)(TCGv_i64, TCGv_ptr))
1905 {
1906     TCGv_i64 dst;
1907 
1908     gen_op_load_fpr_QT1(QFPREG(rs));
1909     dst = gen_dest_fpr_D(dc, rd);
1910 
1911     gen(dst, tcg_env);
1912     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1913 
1914     gen_store_fpr_D(dc, rd, dst);
1915 }
1916 
1917 static void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1918                           void (*gen)(TCGv_ptr, TCGv_i32))
1919 {
1920     TCGv_i32 src;
1921 
1922     src = gen_load_fpr_F(dc, rs);
1923 
1924     gen(tcg_env, src);
1925 
1926     gen_op_store_QT0_fpr(QFPREG(rd));
1927     gen_update_fprs_dirty(dc, QFPREG(rd));
1928 }
1929 
1930 static void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1931                           void (*gen)(TCGv_ptr, TCGv_i64))
1932 {
1933     TCGv_i64 src;
1934 
1935     src = gen_load_fpr_D(dc, rs);
1936 
1937     gen(tcg_env, src);
1938 
1939     gen_op_store_QT0_fpr(QFPREG(rd));
1940     gen_update_fprs_dirty(dc, QFPREG(rd));
1941 }
1942 
1943 static void gen_swap(DisasContext *dc, TCGv dst, TCGv src,
1944                      TCGv addr, int mmu_idx, MemOp memop)
1945 {
1946     gen_address_mask(dc, addr);
1947     tcg_gen_atomic_xchg_tl(dst, addr, src, mmu_idx, memop | MO_ALIGN);
1948 }
1949 
1950 static void gen_ldstub(DisasContext *dc, TCGv dst, TCGv addr, int mmu_idx)
1951 {
1952     TCGv m1 = tcg_constant_tl(0xff);
1953     gen_address_mask(dc, addr);
1954     tcg_gen_atomic_xchg_tl(dst, addr, m1, mmu_idx, MO_UB);
1955 }
1956 
1957 /* asi moves */
1958 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1959 typedef enum {
1960     GET_ASI_HELPER,
1961     GET_ASI_EXCP,
1962     GET_ASI_DIRECT,
1963     GET_ASI_DTWINX,
1964     GET_ASI_BLOCK,
1965     GET_ASI_SHORT,
1966     GET_ASI_BCOPY,
1967     GET_ASI_BFILL,
1968 } ASIType;
1969 
1970 typedef struct {
1971     ASIType type;
1972     int asi;
1973     int mem_idx;
1974     MemOp memop;
1975 } DisasASI;
1976 
1977 static DisasASI get_asi(DisasContext *dc, int insn, MemOp memop)
1978 {
1979     int asi = GET_FIELD(insn, 19, 26);
1980     ASIType type = GET_ASI_HELPER;
1981     int mem_idx = dc->mem_idx;
1982 
1983 #ifndef TARGET_SPARC64
1984     /* Before v9, all asis are immediate and privileged.  */
1985     if (IS_IMM) {
1986         gen_exception(dc, TT_ILL_INSN);
1987         type = GET_ASI_EXCP;
1988     } else if (supervisor(dc)
1989                /* Note that LEON accepts ASI_USERDATA in user mode, for
1990                   use with CASA.  Also note that previous versions of
1991                   QEMU allowed (and old versions of gcc emitted) ASI_P
1992                   for LEON, which is incorrect.  */
1993                || (asi == ASI_USERDATA
1994                    && (dc->def->features & CPU_FEATURE_CASA))) {
1995         switch (asi) {
1996         case ASI_USERDATA:   /* User data access */
1997             mem_idx = MMU_USER_IDX;
1998             type = GET_ASI_DIRECT;
1999             break;
2000         case ASI_KERNELDATA: /* Supervisor data access */
2001             mem_idx = MMU_KERNEL_IDX;
2002             type = GET_ASI_DIRECT;
2003             break;
2004         case ASI_M_BYPASS:    /* MMU passthrough */
2005         case ASI_LEON_BYPASS: /* LEON MMU passthrough */
2006             mem_idx = MMU_PHYS_IDX;
2007             type = GET_ASI_DIRECT;
2008             break;
2009         case ASI_M_BCOPY: /* Block copy, sta access */
2010             mem_idx = MMU_KERNEL_IDX;
2011             type = GET_ASI_BCOPY;
2012             break;
2013         case ASI_M_BFILL: /* Block fill, stda access */
2014             mem_idx = MMU_KERNEL_IDX;
2015             type = GET_ASI_BFILL;
2016             break;
2017         }
2018 
2019         /* MMU_PHYS_IDX is used when the MMU is disabled to passthrough the
2020          * permissions check in get_physical_address(..).
2021          */
2022         mem_idx = (dc->mem_idx == MMU_PHYS_IDX) ? MMU_PHYS_IDX : mem_idx;
2023     } else {
2024         gen_exception(dc, TT_PRIV_INSN);
2025         type = GET_ASI_EXCP;
2026     }
2027 #else
2028     if (IS_IMM) {
2029         asi = dc->asi;
2030     }
2031     /* With v9, all asis below 0x80 are privileged.  */
2032     /* ??? We ought to check cpu_has_hypervisor, but we didn't copy
2033        down that bit into DisasContext.  For the moment that's ok,
2034        since the direct implementations below doesn't have any ASIs
2035        in the restricted [0x30, 0x7f] range, and the check will be
2036        done properly in the helper.  */
2037     if (!supervisor(dc) && asi < 0x80) {
2038         gen_exception(dc, TT_PRIV_ACT);
2039         type = GET_ASI_EXCP;
2040     } else {
2041         switch (asi) {
2042         case ASI_REAL:      /* Bypass */
2043         case ASI_REAL_IO:   /* Bypass, non-cacheable */
2044         case ASI_REAL_L:    /* Bypass LE */
2045         case ASI_REAL_IO_L: /* Bypass, non-cacheable LE */
2046         case ASI_TWINX_REAL:   /* Real address, twinx */
2047         case ASI_TWINX_REAL_L: /* Real address, twinx, LE */
2048         case ASI_QUAD_LDD_PHYS:
2049         case ASI_QUAD_LDD_PHYS_L:
2050             mem_idx = MMU_PHYS_IDX;
2051             break;
2052         case ASI_N:  /* Nucleus */
2053         case ASI_NL: /* Nucleus LE */
2054         case ASI_TWINX_N:
2055         case ASI_TWINX_NL:
2056         case ASI_NUCLEUS_QUAD_LDD:
2057         case ASI_NUCLEUS_QUAD_LDD_L:
2058             if (hypervisor(dc)) {
2059                 mem_idx = MMU_PHYS_IDX;
2060             } else {
2061                 mem_idx = MMU_NUCLEUS_IDX;
2062             }
2063             break;
2064         case ASI_AIUP:  /* As if user primary */
2065         case ASI_AIUPL: /* As if user primary LE */
2066         case ASI_TWINX_AIUP:
2067         case ASI_TWINX_AIUP_L:
2068         case ASI_BLK_AIUP_4V:
2069         case ASI_BLK_AIUP_L_4V:
2070         case ASI_BLK_AIUP:
2071         case ASI_BLK_AIUPL:
2072             mem_idx = MMU_USER_IDX;
2073             break;
2074         case ASI_AIUS:  /* As if user secondary */
2075         case ASI_AIUSL: /* As if user secondary LE */
2076         case ASI_TWINX_AIUS:
2077         case ASI_TWINX_AIUS_L:
2078         case ASI_BLK_AIUS_4V:
2079         case ASI_BLK_AIUS_L_4V:
2080         case ASI_BLK_AIUS:
2081         case ASI_BLK_AIUSL:
2082             mem_idx = MMU_USER_SECONDARY_IDX;
2083             break;
2084         case ASI_S:  /* Secondary */
2085         case ASI_SL: /* Secondary LE */
2086         case ASI_TWINX_S:
2087         case ASI_TWINX_SL:
2088         case ASI_BLK_COMMIT_S:
2089         case ASI_BLK_S:
2090         case ASI_BLK_SL:
2091         case ASI_FL8_S:
2092         case ASI_FL8_SL:
2093         case ASI_FL16_S:
2094         case ASI_FL16_SL:
2095             if (mem_idx == MMU_USER_IDX) {
2096                 mem_idx = MMU_USER_SECONDARY_IDX;
2097             } else if (mem_idx == MMU_KERNEL_IDX) {
2098                 mem_idx = MMU_KERNEL_SECONDARY_IDX;
2099             }
2100             break;
2101         case ASI_P:  /* Primary */
2102         case ASI_PL: /* Primary LE */
2103         case ASI_TWINX_P:
2104         case ASI_TWINX_PL:
2105         case ASI_BLK_COMMIT_P:
2106         case ASI_BLK_P:
2107         case ASI_BLK_PL:
2108         case ASI_FL8_P:
2109         case ASI_FL8_PL:
2110         case ASI_FL16_P:
2111         case ASI_FL16_PL:
2112             break;
2113         }
2114         switch (asi) {
2115         case ASI_REAL:
2116         case ASI_REAL_IO:
2117         case ASI_REAL_L:
2118         case ASI_REAL_IO_L:
2119         case ASI_N:
2120         case ASI_NL:
2121         case ASI_AIUP:
2122         case ASI_AIUPL:
2123         case ASI_AIUS:
2124         case ASI_AIUSL:
2125         case ASI_S:
2126         case ASI_SL:
2127         case ASI_P:
2128         case ASI_PL:
2129             type = GET_ASI_DIRECT;
2130             break;
2131         case ASI_TWINX_REAL:
2132         case ASI_TWINX_REAL_L:
2133         case ASI_TWINX_N:
2134         case ASI_TWINX_NL:
2135         case ASI_TWINX_AIUP:
2136         case ASI_TWINX_AIUP_L:
2137         case ASI_TWINX_AIUS:
2138         case ASI_TWINX_AIUS_L:
2139         case ASI_TWINX_P:
2140         case ASI_TWINX_PL:
2141         case ASI_TWINX_S:
2142         case ASI_TWINX_SL:
2143         case ASI_QUAD_LDD_PHYS:
2144         case ASI_QUAD_LDD_PHYS_L:
2145         case ASI_NUCLEUS_QUAD_LDD:
2146         case ASI_NUCLEUS_QUAD_LDD_L:
2147             type = GET_ASI_DTWINX;
2148             break;
2149         case ASI_BLK_COMMIT_P:
2150         case ASI_BLK_COMMIT_S:
2151         case ASI_BLK_AIUP_4V:
2152         case ASI_BLK_AIUP_L_4V:
2153         case ASI_BLK_AIUP:
2154         case ASI_BLK_AIUPL:
2155         case ASI_BLK_AIUS_4V:
2156         case ASI_BLK_AIUS_L_4V:
2157         case ASI_BLK_AIUS:
2158         case ASI_BLK_AIUSL:
2159         case ASI_BLK_S:
2160         case ASI_BLK_SL:
2161         case ASI_BLK_P:
2162         case ASI_BLK_PL:
2163             type = GET_ASI_BLOCK;
2164             break;
2165         case ASI_FL8_S:
2166         case ASI_FL8_SL:
2167         case ASI_FL8_P:
2168         case ASI_FL8_PL:
2169             memop = MO_UB;
2170             type = GET_ASI_SHORT;
2171             break;
2172         case ASI_FL16_S:
2173         case ASI_FL16_SL:
2174         case ASI_FL16_P:
2175         case ASI_FL16_PL:
2176             memop = MO_TEUW;
2177             type = GET_ASI_SHORT;
2178             break;
2179         }
2180         /* The little-endian asis all have bit 3 set.  */
2181         if (asi & 8) {
2182             memop ^= MO_BSWAP;
2183         }
2184     }
2185 #endif
2186 
2187     return (DisasASI){ type, asi, mem_idx, memop };
2188 }
2189 
2190 static void gen_ld_asi(DisasContext *dc, TCGv dst, TCGv addr,
2191                        int insn, MemOp memop)
2192 {
2193     DisasASI da = get_asi(dc, insn, memop);
2194 
2195     switch (da.type) {
2196     case GET_ASI_EXCP:
2197         break;
2198     case GET_ASI_DTWINX: /* Reserved for ldda.  */
2199         gen_exception(dc, TT_ILL_INSN);
2200         break;
2201     case GET_ASI_DIRECT:
2202         gen_address_mask(dc, addr);
2203         tcg_gen_qemu_ld_tl(dst, addr, da.mem_idx, da.memop | MO_ALIGN);
2204         break;
2205     default:
2206         {
2207             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2208             TCGv_i32 r_mop = tcg_constant_i32(memop | MO_ALIGN);
2209 
2210             save_state(dc);
2211 #ifdef TARGET_SPARC64
2212             gen_helper_ld_asi(dst, tcg_env, addr, r_asi, r_mop);
2213 #else
2214             {
2215                 TCGv_i64 t64 = tcg_temp_new_i64();
2216                 gen_helper_ld_asi(t64, tcg_env, addr, r_asi, r_mop);
2217                 tcg_gen_trunc_i64_tl(dst, t64);
2218             }
2219 #endif
2220         }
2221         break;
2222     }
2223 }
2224 
2225 static void gen_st_asi(DisasContext *dc, TCGv src, TCGv addr,
2226                        int insn, MemOp memop)
2227 {
2228     DisasASI da = get_asi(dc, insn, memop);
2229 
2230     switch (da.type) {
2231     case GET_ASI_EXCP:
2232         break;
2233     case GET_ASI_DTWINX: /* Reserved for stda.  */
2234 #ifndef TARGET_SPARC64
2235         gen_exception(dc, TT_ILL_INSN);
2236         break;
2237 #else
2238         if (!(dc->def->features & CPU_FEATURE_HYPV)) {
2239             /* Pre OpenSPARC CPUs don't have these */
2240             gen_exception(dc, TT_ILL_INSN);
2241             return;
2242         }
2243         /* in OpenSPARC T1+ CPUs TWINX ASIs in store instructions
2244          * are ST_BLKINIT_ ASIs */
2245 #endif
2246         /* fall through */
2247     case GET_ASI_DIRECT:
2248         gen_address_mask(dc, addr);
2249         tcg_gen_qemu_st_tl(src, addr, da.mem_idx, da.memop | MO_ALIGN);
2250         break;
2251 #if !defined(TARGET_SPARC64) && !defined(CONFIG_USER_ONLY)
2252     case GET_ASI_BCOPY:
2253         /* Copy 32 bytes from the address in SRC to ADDR.  */
2254         /* ??? The original qemu code suggests 4-byte alignment, dropping
2255            the low bits, but the only place I can see this used is in the
2256            Linux kernel with 32 byte alignment, which would make more sense
2257            as a cacheline-style operation.  */
2258         {
2259             TCGv saddr = tcg_temp_new();
2260             TCGv daddr = tcg_temp_new();
2261             TCGv four = tcg_constant_tl(4);
2262             TCGv_i32 tmp = tcg_temp_new_i32();
2263             int i;
2264 
2265             tcg_gen_andi_tl(saddr, src, -4);
2266             tcg_gen_andi_tl(daddr, addr, -4);
2267             for (i = 0; i < 32; i += 4) {
2268                 /* Since the loads and stores are paired, allow the
2269                    copy to happen in the host endianness.  */
2270                 tcg_gen_qemu_ld_i32(tmp, saddr, da.mem_idx, MO_UL);
2271                 tcg_gen_qemu_st_i32(tmp, daddr, da.mem_idx, MO_UL);
2272                 tcg_gen_add_tl(saddr, saddr, four);
2273                 tcg_gen_add_tl(daddr, daddr, four);
2274             }
2275         }
2276         break;
2277 #endif
2278     default:
2279         {
2280             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2281             TCGv_i32 r_mop = tcg_constant_i32(memop | MO_ALIGN);
2282 
2283             save_state(dc);
2284 #ifdef TARGET_SPARC64
2285             gen_helper_st_asi(tcg_env, addr, src, r_asi, r_mop);
2286 #else
2287             {
2288                 TCGv_i64 t64 = tcg_temp_new_i64();
2289                 tcg_gen_extu_tl_i64(t64, src);
2290                 gen_helper_st_asi(tcg_env, addr, t64, r_asi, r_mop);
2291             }
2292 #endif
2293 
2294             /* A write to a TLB register may alter page maps.  End the TB. */
2295             dc->npc = DYNAMIC_PC;
2296         }
2297         break;
2298     }
2299 }
2300 
2301 static void gen_swap_asi(DisasContext *dc, TCGv dst, TCGv src,
2302                          TCGv addr, int insn)
2303 {
2304     DisasASI da = get_asi(dc, insn, MO_TEUL);
2305 
2306     switch (da.type) {
2307     case GET_ASI_EXCP:
2308         break;
2309     case GET_ASI_DIRECT:
2310         gen_swap(dc, dst, src, addr, da.mem_idx, da.memop);
2311         break;
2312     default:
2313         /* ??? Should be DAE_invalid_asi.  */
2314         gen_exception(dc, TT_DATA_ACCESS);
2315         break;
2316     }
2317 }
2318 
2319 static void gen_cas_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2320                         int insn, int rd)
2321 {
2322     DisasASI da = get_asi(dc, insn, MO_TEUL);
2323     TCGv oldv;
2324 
2325     switch (da.type) {
2326     case GET_ASI_EXCP:
2327         return;
2328     case GET_ASI_DIRECT:
2329         oldv = tcg_temp_new();
2330         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2331                                   da.mem_idx, da.memop | MO_ALIGN);
2332         gen_store_gpr(dc, rd, oldv);
2333         break;
2334     default:
2335         /* ??? Should be DAE_invalid_asi.  */
2336         gen_exception(dc, TT_DATA_ACCESS);
2337         break;
2338     }
2339 }
2340 
2341 static void gen_ldstub_asi(DisasContext *dc, TCGv dst, TCGv addr, int insn)
2342 {
2343     DisasASI da = get_asi(dc, insn, MO_UB);
2344 
2345     switch (da.type) {
2346     case GET_ASI_EXCP:
2347         break;
2348     case GET_ASI_DIRECT:
2349         gen_ldstub(dc, dst, addr, da.mem_idx);
2350         break;
2351     default:
2352         /* ??? In theory, this should be raise DAE_invalid_asi.
2353            But the SS-20 roms do ldstuba [%l0] #ASI_M_CTL, %o1.  */
2354         if (tb_cflags(dc->base.tb) & CF_PARALLEL) {
2355             gen_helper_exit_atomic(tcg_env);
2356         } else {
2357             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2358             TCGv_i32 r_mop = tcg_constant_i32(MO_UB);
2359             TCGv_i64 s64, t64;
2360 
2361             save_state(dc);
2362             t64 = tcg_temp_new_i64();
2363             gen_helper_ld_asi(t64, tcg_env, addr, r_asi, r_mop);
2364 
2365             s64 = tcg_constant_i64(0xff);
2366             gen_helper_st_asi(tcg_env, addr, s64, r_asi, r_mop);
2367 
2368             tcg_gen_trunc_i64_tl(dst, t64);
2369 
2370             /* End the TB.  */
2371             dc->npc = DYNAMIC_PC;
2372         }
2373         break;
2374     }
2375 }
2376 #endif
2377 
2378 #ifdef TARGET_SPARC64
2379 static void gen_ldf_asi(DisasContext *dc, TCGv addr,
2380                         int insn, int size, int rd)
2381 {
2382     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2383     TCGv_i32 d32;
2384     TCGv_i64 d64;
2385 
2386     switch (da.type) {
2387     case GET_ASI_EXCP:
2388         break;
2389 
2390     case GET_ASI_DIRECT:
2391         gen_address_mask(dc, addr);
2392         switch (size) {
2393         case 4:
2394             d32 = gen_dest_fpr_F(dc);
2395             tcg_gen_qemu_ld_i32(d32, addr, da.mem_idx, da.memop | MO_ALIGN);
2396             gen_store_fpr_F(dc, rd, d32);
2397             break;
2398         case 8:
2399             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2400                                 da.memop | MO_ALIGN_4);
2401             break;
2402         case 16:
2403             d64 = tcg_temp_new_i64();
2404             tcg_gen_qemu_ld_i64(d64, addr, da.mem_idx, da.memop | MO_ALIGN_4);
2405             tcg_gen_addi_tl(addr, addr, 8);
2406             tcg_gen_qemu_ld_i64(cpu_fpr[rd/2+1], addr, da.mem_idx,
2407                                 da.memop | MO_ALIGN_4);
2408             tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2409             break;
2410         default:
2411             g_assert_not_reached();
2412         }
2413         break;
2414 
2415     case GET_ASI_BLOCK:
2416         /* Valid for lddfa on aligned registers only.  */
2417         if (size == 8 && (rd & 7) == 0) {
2418             MemOp memop;
2419             TCGv eight;
2420             int i;
2421 
2422             gen_address_mask(dc, addr);
2423 
2424             /* The first operation checks required alignment.  */
2425             memop = da.memop | MO_ALIGN_64;
2426             eight = tcg_constant_tl(8);
2427             for (i = 0; ; ++i) {
2428                 tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2 + i], addr,
2429                                     da.mem_idx, memop);
2430                 if (i == 7) {
2431                     break;
2432                 }
2433                 tcg_gen_add_tl(addr, addr, eight);
2434                 memop = da.memop;
2435             }
2436         } else {
2437             gen_exception(dc, TT_ILL_INSN);
2438         }
2439         break;
2440 
2441     case GET_ASI_SHORT:
2442         /* Valid for lddfa only.  */
2443         if (size == 8) {
2444             gen_address_mask(dc, addr);
2445             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2446                                 da.memop | MO_ALIGN);
2447         } else {
2448             gen_exception(dc, TT_ILL_INSN);
2449         }
2450         break;
2451 
2452     default:
2453         {
2454             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2455             TCGv_i32 r_mop = tcg_constant_i32(da.memop | MO_ALIGN);
2456 
2457             save_state(dc);
2458             /* According to the table in the UA2011 manual, the only
2459                other asis that are valid for ldfa/lddfa/ldqfa are
2460                the NO_FAULT asis.  We still need a helper for these,
2461                but we can just use the integer asi helper for them.  */
2462             switch (size) {
2463             case 4:
2464                 d64 = tcg_temp_new_i64();
2465                 gen_helper_ld_asi(d64, tcg_env, addr, r_asi, r_mop);
2466                 d32 = gen_dest_fpr_F(dc);
2467                 tcg_gen_extrl_i64_i32(d32, d64);
2468                 gen_store_fpr_F(dc, rd, d32);
2469                 break;
2470             case 8:
2471                 gen_helper_ld_asi(cpu_fpr[rd / 2], tcg_env, addr, r_asi, r_mop);
2472                 break;
2473             case 16:
2474                 d64 = tcg_temp_new_i64();
2475                 gen_helper_ld_asi(d64, tcg_env, addr, r_asi, r_mop);
2476                 tcg_gen_addi_tl(addr, addr, 8);
2477                 gen_helper_ld_asi(cpu_fpr[rd/2+1], tcg_env, addr, r_asi, r_mop);
2478                 tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2479                 break;
2480             default:
2481                 g_assert_not_reached();
2482             }
2483         }
2484         break;
2485     }
2486 }
2487 
2488 static void gen_stf_asi(DisasContext *dc, TCGv addr,
2489                         int insn, int size, int rd)
2490 {
2491     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2492     TCGv_i32 d32;
2493 
2494     switch (da.type) {
2495     case GET_ASI_EXCP:
2496         break;
2497 
2498     case GET_ASI_DIRECT:
2499         gen_address_mask(dc, addr);
2500         switch (size) {
2501         case 4:
2502             d32 = gen_load_fpr_F(dc, rd);
2503             tcg_gen_qemu_st_i32(d32, addr, da.mem_idx, da.memop | MO_ALIGN);
2504             break;
2505         case 8:
2506             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2507                                 da.memop | MO_ALIGN_4);
2508             break;
2509         case 16:
2510             /* Only 4-byte alignment required.  However, it is legal for the
2511                cpu to signal the alignment fault, and the OS trap handler is
2512                required to fix it up.  Requiring 16-byte alignment here avoids
2513                having to probe the second page before performing the first
2514                write.  */
2515             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2516                                 da.memop | MO_ALIGN_16);
2517             tcg_gen_addi_tl(addr, addr, 8);
2518             tcg_gen_qemu_st_i64(cpu_fpr[rd/2+1], addr, da.mem_idx, da.memop);
2519             break;
2520         default:
2521             g_assert_not_reached();
2522         }
2523         break;
2524 
2525     case GET_ASI_BLOCK:
2526         /* Valid for stdfa on aligned registers only.  */
2527         if (size == 8 && (rd & 7) == 0) {
2528             MemOp memop;
2529             TCGv eight;
2530             int i;
2531 
2532             gen_address_mask(dc, addr);
2533 
2534             /* The first operation checks required alignment.  */
2535             memop = da.memop | MO_ALIGN_64;
2536             eight = tcg_constant_tl(8);
2537             for (i = 0; ; ++i) {
2538                 tcg_gen_qemu_st_i64(cpu_fpr[rd / 2 + i], addr,
2539                                     da.mem_idx, memop);
2540                 if (i == 7) {
2541                     break;
2542                 }
2543                 tcg_gen_add_tl(addr, addr, eight);
2544                 memop = da.memop;
2545             }
2546         } else {
2547             gen_exception(dc, TT_ILL_INSN);
2548         }
2549         break;
2550 
2551     case GET_ASI_SHORT:
2552         /* Valid for stdfa only.  */
2553         if (size == 8) {
2554             gen_address_mask(dc, addr);
2555             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2556                                 da.memop | MO_ALIGN);
2557         } else {
2558             gen_exception(dc, TT_ILL_INSN);
2559         }
2560         break;
2561 
2562     default:
2563         /* According to the table in the UA2011 manual, the only
2564            other asis that are valid for ldfa/lddfa/ldqfa are
2565            the PST* asis, which aren't currently handled.  */
2566         gen_exception(dc, TT_ILL_INSN);
2567         break;
2568     }
2569 }
2570 
2571 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2572 {
2573     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2574     TCGv_i64 hi = gen_dest_gpr(dc, rd);
2575     TCGv_i64 lo = gen_dest_gpr(dc, rd + 1);
2576 
2577     switch (da.type) {
2578     case GET_ASI_EXCP:
2579         return;
2580 
2581     case GET_ASI_DTWINX:
2582         gen_address_mask(dc, addr);
2583         tcg_gen_qemu_ld_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2584         tcg_gen_addi_tl(addr, addr, 8);
2585         tcg_gen_qemu_ld_i64(lo, addr, da.mem_idx, da.memop);
2586         break;
2587 
2588     case GET_ASI_DIRECT:
2589         {
2590             TCGv_i64 tmp = tcg_temp_new_i64();
2591 
2592             gen_address_mask(dc, addr);
2593             tcg_gen_qemu_ld_i64(tmp, addr, da.mem_idx, da.memop | MO_ALIGN);
2594 
2595             /* Note that LE ldda acts as if each 32-bit register
2596                result is byte swapped.  Having just performed one
2597                64-bit bswap, we need now to swap the writebacks.  */
2598             if ((da.memop & MO_BSWAP) == MO_TE) {
2599                 tcg_gen_extr32_i64(lo, hi, tmp);
2600             } else {
2601                 tcg_gen_extr32_i64(hi, lo, tmp);
2602             }
2603         }
2604         break;
2605 
2606     default:
2607         /* ??? In theory we've handled all of the ASIs that are valid
2608            for ldda, and this should raise DAE_invalid_asi.  However,
2609            real hardware allows others.  This can be seen with e.g.
2610            FreeBSD 10.3 wrt ASI_IC_TAG.  */
2611         {
2612             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2613             TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2614             TCGv_i64 tmp = tcg_temp_new_i64();
2615 
2616             save_state(dc);
2617             gen_helper_ld_asi(tmp, tcg_env, addr, r_asi, r_mop);
2618 
2619             /* See above.  */
2620             if ((da.memop & MO_BSWAP) == MO_TE) {
2621                 tcg_gen_extr32_i64(lo, hi, tmp);
2622             } else {
2623                 tcg_gen_extr32_i64(hi, lo, tmp);
2624             }
2625         }
2626         break;
2627     }
2628 
2629     gen_store_gpr(dc, rd, hi);
2630     gen_store_gpr(dc, rd + 1, lo);
2631 }
2632 
2633 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2634                          int insn, int rd)
2635 {
2636     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2637     TCGv lo = gen_load_gpr(dc, rd + 1);
2638 
2639     switch (da.type) {
2640     case GET_ASI_EXCP:
2641         break;
2642 
2643     case GET_ASI_DTWINX:
2644         gen_address_mask(dc, addr);
2645         tcg_gen_qemu_st_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2646         tcg_gen_addi_tl(addr, addr, 8);
2647         tcg_gen_qemu_st_i64(lo, addr, da.mem_idx, da.memop);
2648         break;
2649 
2650     case GET_ASI_DIRECT:
2651         {
2652             TCGv_i64 t64 = tcg_temp_new_i64();
2653 
2654             /* Note that LE stda acts as if each 32-bit register result is
2655                byte swapped.  We will perform one 64-bit LE store, so now
2656                we must swap the order of the construction.  */
2657             if ((da.memop & MO_BSWAP) == MO_TE) {
2658                 tcg_gen_concat32_i64(t64, lo, hi);
2659             } else {
2660                 tcg_gen_concat32_i64(t64, hi, lo);
2661             }
2662             gen_address_mask(dc, addr);
2663             tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2664         }
2665         break;
2666 
2667     default:
2668         /* ??? In theory we've handled all of the ASIs that are valid
2669            for stda, and this should raise DAE_invalid_asi.  */
2670         {
2671             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2672             TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2673             TCGv_i64 t64 = tcg_temp_new_i64();
2674 
2675             /* See above.  */
2676             if ((da.memop & MO_BSWAP) == MO_TE) {
2677                 tcg_gen_concat32_i64(t64, lo, hi);
2678             } else {
2679                 tcg_gen_concat32_i64(t64, hi, lo);
2680             }
2681 
2682             save_state(dc);
2683             gen_helper_st_asi(tcg_env, addr, t64, r_asi, r_mop);
2684         }
2685         break;
2686     }
2687 }
2688 
2689 static void gen_casx_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2690                          int insn, int rd)
2691 {
2692     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2693     TCGv oldv;
2694 
2695     switch (da.type) {
2696     case GET_ASI_EXCP:
2697         return;
2698     case GET_ASI_DIRECT:
2699         oldv = tcg_temp_new();
2700         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2701                                   da.mem_idx, da.memop | MO_ALIGN);
2702         gen_store_gpr(dc, rd, oldv);
2703         break;
2704     default:
2705         /* ??? Should be DAE_invalid_asi.  */
2706         gen_exception(dc, TT_DATA_ACCESS);
2707         break;
2708     }
2709 }
2710 
2711 #elif !defined(CONFIG_USER_ONLY)
2712 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2713 {
2714     /* ??? Work around an apparent bug in Ubuntu gcc 4.8.2-10ubuntu2+12,
2715        whereby "rd + 1" elicits "error: array subscript is above array".
2716        Since we have already asserted that rd is even, the semantics
2717        are unchanged.  */
2718     TCGv lo = gen_dest_gpr(dc, rd | 1);
2719     TCGv hi = gen_dest_gpr(dc, rd);
2720     TCGv_i64 t64 = tcg_temp_new_i64();
2721     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2722 
2723     switch (da.type) {
2724     case GET_ASI_EXCP:
2725         return;
2726     case GET_ASI_DIRECT:
2727         gen_address_mask(dc, addr);
2728         tcg_gen_qemu_ld_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2729         break;
2730     default:
2731         {
2732             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2733             TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2734 
2735             save_state(dc);
2736             gen_helper_ld_asi(t64, tcg_env, addr, r_asi, r_mop);
2737         }
2738         break;
2739     }
2740 
2741     tcg_gen_extr_i64_i32(lo, hi, t64);
2742     gen_store_gpr(dc, rd | 1, lo);
2743     gen_store_gpr(dc, rd, hi);
2744 }
2745 
2746 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2747                          int insn, int rd)
2748 {
2749     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2750     TCGv lo = gen_load_gpr(dc, rd + 1);
2751     TCGv_i64 t64 = tcg_temp_new_i64();
2752 
2753     tcg_gen_concat_tl_i64(t64, lo, hi);
2754 
2755     switch (da.type) {
2756     case GET_ASI_EXCP:
2757         break;
2758     case GET_ASI_DIRECT:
2759         gen_address_mask(dc, addr);
2760         tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2761         break;
2762     case GET_ASI_BFILL:
2763         /* Store 32 bytes of T64 to ADDR.  */
2764         /* ??? The original qemu code suggests 8-byte alignment, dropping
2765            the low bits, but the only place I can see this used is in the
2766            Linux kernel with 32 byte alignment, which would make more sense
2767            as a cacheline-style operation.  */
2768         {
2769             TCGv d_addr = tcg_temp_new();
2770             TCGv eight = tcg_constant_tl(8);
2771             int i;
2772 
2773             tcg_gen_andi_tl(d_addr, addr, -8);
2774             for (i = 0; i < 32; i += 8) {
2775                 tcg_gen_qemu_st_i64(t64, d_addr, da.mem_idx, da.memop);
2776                 tcg_gen_add_tl(d_addr, d_addr, eight);
2777             }
2778         }
2779         break;
2780     default:
2781         {
2782             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2783             TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2784 
2785             save_state(dc);
2786             gen_helper_st_asi(tcg_env, addr, t64, r_asi, r_mop);
2787         }
2788         break;
2789     }
2790 }
2791 #endif
2792 
2793 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2794 {
2795     unsigned int rs1 = GET_FIELD(insn, 13, 17);
2796     return gen_load_gpr(dc, rs1);
2797 }
2798 
2799 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2800 {
2801     if (IS_IMM) { /* immediate */
2802         target_long simm = GET_FIELDs(insn, 19, 31);
2803         TCGv t = tcg_temp_new();
2804         tcg_gen_movi_tl(t, simm);
2805         return t;
2806     } else {      /* register */
2807         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2808         return gen_load_gpr(dc, rs2);
2809     }
2810 }
2811 
2812 #ifdef TARGET_SPARC64
2813 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2814 {
2815     TCGv_i32 c32, zero, dst, s1, s2;
2816 
2817     /* We have two choices here: extend the 32 bit data and use movcond_i64,
2818        or fold the comparison down to 32 bits and use movcond_i32.  Choose
2819        the later.  */
2820     c32 = tcg_temp_new_i32();
2821     if (cmp->is_bool) {
2822         tcg_gen_extrl_i64_i32(c32, cmp->c1);
2823     } else {
2824         TCGv_i64 c64 = tcg_temp_new_i64();
2825         tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2826         tcg_gen_extrl_i64_i32(c32, c64);
2827     }
2828 
2829     s1 = gen_load_fpr_F(dc, rs);
2830     s2 = gen_load_fpr_F(dc, rd);
2831     dst = gen_dest_fpr_F(dc);
2832     zero = tcg_constant_i32(0);
2833 
2834     tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2835 
2836     gen_store_fpr_F(dc, rd, dst);
2837 }
2838 
2839 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2840 {
2841     TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2842     tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2843                         gen_load_fpr_D(dc, rs),
2844                         gen_load_fpr_D(dc, rd));
2845     gen_store_fpr_D(dc, rd, dst);
2846 }
2847 
2848 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2849 {
2850     int qd = QFPREG(rd);
2851     int qs = QFPREG(rs);
2852 
2853     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2854                         cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2855     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2856                         cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2857 
2858     gen_update_fprs_dirty(dc, qd);
2859 }
2860 
2861 #ifndef CONFIG_USER_ONLY
2862 static void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_env tcg_env)
2863 {
2864     TCGv_i32 r_tl = tcg_temp_new_i32();
2865 
2866     /* load env->tl into r_tl */
2867     tcg_gen_ld_i32(r_tl, tcg_env, offsetof(CPUSPARCState, tl));
2868 
2869     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2870     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2871 
2872     /* calculate offset to current trap state from env->ts, reuse r_tl */
2873     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2874     tcg_gen_addi_ptr(r_tsptr, tcg_env, offsetof(CPUSPARCState, ts));
2875 
2876     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2877     {
2878         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2879         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2880         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2881     }
2882 }
2883 #endif
2884 
2885 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2886                      int width, bool cc, bool left)
2887 {
2888     TCGv lo1, lo2;
2889     uint64_t amask, tabl, tabr;
2890     int shift, imask, omask;
2891 
2892     if (cc) {
2893         tcg_gen_mov_tl(cpu_cc_src, s1);
2894         tcg_gen_mov_tl(cpu_cc_src2, s2);
2895         tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2896         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2897         dc->cc_op = CC_OP_SUB;
2898     }
2899 
2900     /* Theory of operation: there are two tables, left and right (not to
2901        be confused with the left and right versions of the opcode).  These
2902        are indexed by the low 3 bits of the inputs.  To make things "easy",
2903        these tables are loaded into two constants, TABL and TABR below.
2904        The operation index = (input & imask) << shift calculates the index
2905        into the constant, while val = (table >> index) & omask calculates
2906        the value we're looking for.  */
2907     switch (width) {
2908     case 8:
2909         imask = 0x7;
2910         shift = 3;
2911         omask = 0xff;
2912         if (left) {
2913             tabl = 0x80c0e0f0f8fcfeffULL;
2914             tabr = 0xff7f3f1f0f070301ULL;
2915         } else {
2916             tabl = 0x0103070f1f3f7fffULL;
2917             tabr = 0xfffefcf8f0e0c080ULL;
2918         }
2919         break;
2920     case 16:
2921         imask = 0x6;
2922         shift = 1;
2923         omask = 0xf;
2924         if (left) {
2925             tabl = 0x8cef;
2926             tabr = 0xf731;
2927         } else {
2928             tabl = 0x137f;
2929             tabr = 0xfec8;
2930         }
2931         break;
2932     case 32:
2933         imask = 0x4;
2934         shift = 0;
2935         omask = 0x3;
2936         if (left) {
2937             tabl = (2 << 2) | 3;
2938             tabr = (3 << 2) | 1;
2939         } else {
2940             tabl = (1 << 2) | 3;
2941             tabr = (3 << 2) | 2;
2942         }
2943         break;
2944     default:
2945         abort();
2946     }
2947 
2948     lo1 = tcg_temp_new();
2949     lo2 = tcg_temp_new();
2950     tcg_gen_andi_tl(lo1, s1, imask);
2951     tcg_gen_andi_tl(lo2, s2, imask);
2952     tcg_gen_shli_tl(lo1, lo1, shift);
2953     tcg_gen_shli_tl(lo2, lo2, shift);
2954 
2955     tcg_gen_shr_tl(lo1, tcg_constant_tl(tabl), lo1);
2956     tcg_gen_shr_tl(lo2, tcg_constant_tl(tabr), lo2);
2957     tcg_gen_andi_tl(lo1, lo1, omask);
2958     tcg_gen_andi_tl(lo2, lo2, omask);
2959 
2960     amask = -8;
2961     if (AM_CHECK(dc)) {
2962         amask &= 0xffffffffULL;
2963     }
2964     tcg_gen_andi_tl(s1, s1, amask);
2965     tcg_gen_andi_tl(s2, s2, amask);
2966 
2967     /* Compute dst = (s1 == s2 ? lo1 : lo1 & lo2). */
2968     tcg_gen_and_tl(lo2, lo2, lo1);
2969     tcg_gen_movcond_tl(TCG_COND_EQ, dst, s1, s2, lo1, lo2);
2970 }
2971 
2972 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2973 {
2974     TCGv tmp = tcg_temp_new();
2975 
2976     tcg_gen_add_tl(tmp, s1, s2);
2977     tcg_gen_andi_tl(dst, tmp, -8);
2978     if (left) {
2979         tcg_gen_neg_tl(tmp, tmp);
2980     }
2981     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2982 }
2983 
2984 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2985 {
2986     TCGv t1, t2, shift;
2987 
2988     t1 = tcg_temp_new();
2989     t2 = tcg_temp_new();
2990     shift = tcg_temp_new();
2991 
2992     tcg_gen_andi_tl(shift, gsr, 7);
2993     tcg_gen_shli_tl(shift, shift, 3);
2994     tcg_gen_shl_tl(t1, s1, shift);
2995 
2996     /* A shift of 64 does not produce 0 in TCG.  Divide this into a
2997        shift of (up to 63) followed by a constant shift of 1.  */
2998     tcg_gen_xori_tl(shift, shift, 63);
2999     tcg_gen_shr_tl(t2, s2, shift);
3000     tcg_gen_shri_tl(t2, t2, 1);
3001 
3002     tcg_gen_or_tl(dst, t1, t2);
3003 }
3004 #endif
3005 
3006 /* Include the auto-generated decoder.  */
3007 #include "decode-insns.c.inc"
3008 
3009 #define TRANS(NAME, AVAIL, FUNC, ...) \
3010     static bool trans_##NAME(DisasContext *dc, arg_##NAME *a) \
3011     { return avail_##AVAIL(dc) && FUNC(dc, __VA_ARGS__); }
3012 
3013 #define avail_ALL(C)      true
3014 #ifdef TARGET_SPARC64
3015 # define avail_32(C)      false
3016 # define avail_64(C)      true
3017 #else
3018 # define avail_32(C)      true
3019 # define avail_64(C)      false
3020 #endif
3021 
3022 /* Default case for non jump instructions. */
3023 static bool advance_pc(DisasContext *dc)
3024 {
3025     if (dc->npc & 3) {
3026         switch (dc->npc) {
3027         case DYNAMIC_PC:
3028         case DYNAMIC_PC_LOOKUP:
3029             dc->pc = dc->npc;
3030             gen_op_next_insn();
3031             break;
3032         case JUMP_PC:
3033             /* we can do a static jump */
3034             gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
3035             dc->base.is_jmp = DISAS_NORETURN;
3036             break;
3037         default:
3038             g_assert_not_reached();
3039         }
3040     } else {
3041         dc->pc = dc->npc;
3042         dc->npc = dc->npc + 4;
3043     }
3044     return true;
3045 }
3046 
3047 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
3048     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3049         goto illegal_insn;
3050 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
3051     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3052         goto nfpu_insn;
3053 
3054 /* before an instruction, dc->pc must be static */
3055 static void disas_sparc_legacy(DisasContext *dc, unsigned int insn)
3056 {
3057     unsigned int opc, rs1, rs2, rd;
3058     TCGv cpu_src1, cpu_src2;
3059     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
3060     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
3061     target_long simm;
3062 
3063     opc = GET_FIELD(insn, 0, 1);
3064     rd = GET_FIELD(insn, 2, 6);
3065 
3066     switch (opc) {
3067     case 0:                     /* branches/sethi */
3068         {
3069             unsigned int xop = GET_FIELD(insn, 7, 9);
3070             int32_t target;
3071             switch (xop) {
3072 #ifdef TARGET_SPARC64
3073             case 0x1:           /* V9 BPcc */
3074                 {
3075                     int cc;
3076 
3077                     target = GET_FIELD_SP(insn, 0, 18);
3078                     target = sign_extend(target, 19);
3079                     target <<= 2;
3080                     cc = GET_FIELD_SP(insn, 20, 21);
3081                     if (cc == 0)
3082                         do_branch(dc, target, insn, 0);
3083                     else if (cc == 2)
3084                         do_branch(dc, target, insn, 1);
3085                     else
3086                         goto illegal_insn;
3087                     goto jmp_insn;
3088                 }
3089             case 0x3:           /* V9 BPr */
3090                 {
3091                     target = GET_FIELD_SP(insn, 0, 13) |
3092                         (GET_FIELD_SP(insn, 20, 21) << 14);
3093                     target = sign_extend(target, 16);
3094                     target <<= 2;
3095                     cpu_src1 = get_src1(dc, insn);
3096                     do_branch_reg(dc, target, insn, cpu_src1);
3097                     goto jmp_insn;
3098                 }
3099             case 0x5:           /* V9 FBPcc */
3100                 {
3101                     int cc = GET_FIELD_SP(insn, 20, 21);
3102                     if (gen_trap_ifnofpu(dc)) {
3103                         goto jmp_insn;
3104                     }
3105                     target = GET_FIELD_SP(insn, 0, 18);
3106                     target = sign_extend(target, 19);
3107                     target <<= 2;
3108                     do_fbranch(dc, target, insn, cc);
3109                     goto jmp_insn;
3110                 }
3111 #else
3112             case 0x7:           /* CBN+x */
3113                 {
3114                     goto ncp_insn;
3115                 }
3116 #endif
3117             case 0x2:           /* BN+x */
3118                 {
3119                     target = GET_FIELD(insn, 10, 31);
3120                     target = sign_extend(target, 22);
3121                     target <<= 2;
3122                     do_branch(dc, target, insn, 0);
3123                     goto jmp_insn;
3124                 }
3125             case 0x6:           /* FBN+x */
3126                 {
3127                     if (gen_trap_ifnofpu(dc)) {
3128                         goto jmp_insn;
3129                     }
3130                     target = GET_FIELD(insn, 10, 31);
3131                     target = sign_extend(target, 22);
3132                     target <<= 2;
3133                     do_fbranch(dc, target, insn, 0);
3134                     goto jmp_insn;
3135                 }
3136             case 0x4:           /* SETHI */
3137                 /* Special-case %g0 because that's the canonical nop.  */
3138                 if (rd) {
3139                     uint32_t value = GET_FIELD(insn, 10, 31);
3140                     TCGv t = gen_dest_gpr(dc, rd);
3141                     tcg_gen_movi_tl(t, value << 10);
3142                     gen_store_gpr(dc, rd, t);
3143                 }
3144                 break;
3145             case 0x0:           /* UNIMPL */
3146             default:
3147                 goto illegal_insn;
3148             }
3149             break;
3150         }
3151         break;
3152     case 1:                     /*CALL*/
3153         {
3154             target_long target = GET_FIELDs(insn, 2, 31) << 2;
3155             TCGv o7 = gen_dest_gpr(dc, 15);
3156 
3157             tcg_gen_movi_tl(o7, dc->pc);
3158             gen_store_gpr(dc, 15, o7);
3159             target += dc->pc;
3160             gen_mov_pc_npc(dc);
3161 #ifdef TARGET_SPARC64
3162             if (unlikely(AM_CHECK(dc))) {
3163                 target &= 0xffffffffULL;
3164             }
3165 #endif
3166             dc->npc = target;
3167         }
3168         goto jmp_insn;
3169     case 2:                     /* FPU & Logical Operations */
3170         {
3171             unsigned int xop = GET_FIELD(insn, 7, 12);
3172             TCGv cpu_dst = tcg_temp_new();
3173             TCGv cpu_tmp0;
3174 
3175             if (xop == 0x3a) {  /* generate trap */
3176                 int cond = GET_FIELD(insn, 3, 6);
3177                 TCGv_i32 trap;
3178                 TCGLabel *l1 = NULL;
3179                 int mask;
3180 
3181                 if (cond == 0) {
3182                     /* Trap never.  */
3183                     break;
3184                 }
3185 
3186                 save_state(dc);
3187 
3188                 if (cond != 8) {
3189                     /* Conditional trap.  */
3190                     DisasCompare cmp;
3191 #ifdef TARGET_SPARC64
3192                     /* V9 icc/xcc */
3193                     int cc = GET_FIELD_SP(insn, 11, 12);
3194                     if (cc == 0) {
3195                         gen_compare(&cmp, 0, cond, dc);
3196                     } else if (cc == 2) {
3197                         gen_compare(&cmp, 1, cond, dc);
3198                     } else {
3199                         goto illegal_insn;
3200                     }
3201 #else
3202                     gen_compare(&cmp, 0, cond, dc);
3203 #endif
3204                     l1 = gen_new_label();
3205                     tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
3206                                       cmp.c1, cmp.c2, l1);
3207                 }
3208 
3209                 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
3210                         ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
3211 
3212                 /* Don't use the normal temporaries, as they may well have
3213                    gone out of scope with the branch above.  While we're
3214                    doing that we might as well pre-truncate to 32-bit.  */
3215                 trap = tcg_temp_new_i32();
3216 
3217                 rs1 = GET_FIELD_SP(insn, 14, 18);
3218                 if (IS_IMM) {
3219                     rs2 = GET_FIELD_SP(insn, 0, 7);
3220                     if (rs1 == 0) {
3221                         tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
3222                         /* Signal that the trap value is fully constant.  */
3223                         mask = 0;
3224                     } else {
3225                         TCGv t1 = gen_load_gpr(dc, rs1);
3226                         tcg_gen_trunc_tl_i32(trap, t1);
3227                         tcg_gen_addi_i32(trap, trap, rs2);
3228                     }
3229                 } else {
3230                     TCGv t1, t2;
3231                     rs2 = GET_FIELD_SP(insn, 0, 4);
3232                     t1 = gen_load_gpr(dc, rs1);
3233                     t2 = gen_load_gpr(dc, rs2);
3234                     tcg_gen_add_tl(t1, t1, t2);
3235                     tcg_gen_trunc_tl_i32(trap, t1);
3236                 }
3237                 if (mask != 0) {
3238                     tcg_gen_andi_i32(trap, trap, mask);
3239                     tcg_gen_addi_i32(trap, trap, TT_TRAP);
3240                 }
3241 
3242                 gen_helper_raise_exception(tcg_env, trap);
3243 
3244                 if (cond == 8) {
3245                     /* An unconditional trap ends the TB.  */
3246                     dc->base.is_jmp = DISAS_NORETURN;
3247                     goto jmp_insn;
3248                 } else {
3249                     /* A conditional trap falls through to the next insn.  */
3250                     gen_set_label(l1);
3251                     break;
3252                 }
3253             } else if (xop == 0x28) {
3254                 rs1 = GET_FIELD(insn, 13, 17);
3255                 switch(rs1) {
3256                 case 0: /* rdy */
3257 #ifndef TARGET_SPARC64
3258                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
3259                                        manual, rdy on the microSPARC
3260                                        II */
3261                 case 0x0f:          /* stbar in the SPARCv8 manual,
3262                                        rdy on the microSPARC II */
3263                 case 0x10 ... 0x1f: /* implementation-dependent in the
3264                                        SPARCv8 manual, rdy on the
3265                                        microSPARC II */
3266                     /* Read Asr17 */
3267                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
3268                         TCGv t = gen_dest_gpr(dc, rd);
3269                         /* Read Asr17 for a Leon3 monoprocessor */
3270                         tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
3271                         gen_store_gpr(dc, rd, t);
3272                         break;
3273                     }
3274 #endif
3275                     gen_store_gpr(dc, rd, cpu_y);
3276                     break;
3277 #ifdef TARGET_SPARC64
3278                 case 0x2: /* V9 rdccr */
3279                     update_psr(dc);
3280                     gen_helper_rdccr(cpu_dst, tcg_env);
3281                     gen_store_gpr(dc, rd, cpu_dst);
3282                     break;
3283                 case 0x3: /* V9 rdasi */
3284                     tcg_gen_movi_tl(cpu_dst, dc->asi);
3285                     gen_store_gpr(dc, rd, cpu_dst);
3286                     break;
3287                 case 0x4: /* V9 rdtick */
3288                     {
3289                         TCGv_ptr r_tickptr;
3290                         TCGv_i32 r_const;
3291 
3292                         r_tickptr = tcg_temp_new_ptr();
3293                         r_const = tcg_constant_i32(dc->mem_idx);
3294                         tcg_gen_ld_ptr(r_tickptr, tcg_env,
3295                                        offsetof(CPUSPARCState, tick));
3296                         if (translator_io_start(&dc->base)) {
3297                             dc->base.is_jmp = DISAS_EXIT;
3298                         }
3299                         gen_helper_tick_get_count(cpu_dst, tcg_env, r_tickptr,
3300                                                   r_const);
3301                         gen_store_gpr(dc, rd, cpu_dst);
3302                     }
3303                     break;
3304                 case 0x5: /* V9 rdpc */
3305                     {
3306                         TCGv t = gen_dest_gpr(dc, rd);
3307                         if (unlikely(AM_CHECK(dc))) {
3308                             tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
3309                         } else {
3310                             tcg_gen_movi_tl(t, dc->pc);
3311                         }
3312                         gen_store_gpr(dc, rd, t);
3313                     }
3314                     break;
3315                 case 0x6: /* V9 rdfprs */
3316                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
3317                     gen_store_gpr(dc, rd, cpu_dst);
3318                     break;
3319                 case 0xf: /* V9 membar */
3320                     break; /* no effect */
3321                 case 0x13: /* Graphics Status */
3322                     if (gen_trap_ifnofpu(dc)) {
3323                         goto jmp_insn;
3324                     }
3325                     gen_store_gpr(dc, rd, cpu_gsr);
3326                     break;
3327                 case 0x16: /* Softint */
3328                     tcg_gen_ld32s_tl(cpu_dst, tcg_env,
3329                                      offsetof(CPUSPARCState, softint));
3330                     gen_store_gpr(dc, rd, cpu_dst);
3331                     break;
3332                 case 0x17: /* Tick compare */
3333                     gen_store_gpr(dc, rd, cpu_tick_cmpr);
3334                     break;
3335                 case 0x18: /* System tick */
3336                     {
3337                         TCGv_ptr r_tickptr;
3338                         TCGv_i32 r_const;
3339 
3340                         r_tickptr = tcg_temp_new_ptr();
3341                         r_const = tcg_constant_i32(dc->mem_idx);
3342                         tcg_gen_ld_ptr(r_tickptr, tcg_env,
3343                                        offsetof(CPUSPARCState, stick));
3344                         if (translator_io_start(&dc->base)) {
3345                             dc->base.is_jmp = DISAS_EXIT;
3346                         }
3347                         gen_helper_tick_get_count(cpu_dst, tcg_env, r_tickptr,
3348                                                   r_const);
3349                         gen_store_gpr(dc, rd, cpu_dst);
3350                     }
3351                     break;
3352                 case 0x19: /* System tick compare */
3353                     gen_store_gpr(dc, rd, cpu_stick_cmpr);
3354                     break;
3355                 case 0x1a: /* UltraSPARC-T1 Strand status */
3356                     /* XXX HYPV check maybe not enough, UA2005 & UA2007 describe
3357                      * this ASR as impl. dep
3358                      */
3359                     CHECK_IU_FEATURE(dc, HYPV);
3360                     {
3361                         TCGv t = gen_dest_gpr(dc, rd);
3362                         tcg_gen_movi_tl(t, 1UL);
3363                         gen_store_gpr(dc, rd, t);
3364                     }
3365                     break;
3366                 case 0x10: /* Performance Control */
3367                 case 0x11: /* Performance Instrumentation Counter */
3368                 case 0x12: /* Dispatch Control */
3369                 case 0x14: /* Softint set, WO */
3370                 case 0x15: /* Softint clear, WO */
3371 #endif
3372                 default:
3373                     goto illegal_insn;
3374                 }
3375 #if !defined(CONFIG_USER_ONLY)
3376             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
3377 #ifndef TARGET_SPARC64
3378                 if (!supervisor(dc)) {
3379                     goto priv_insn;
3380                 }
3381                 update_psr(dc);
3382                 gen_helper_rdpsr(cpu_dst, tcg_env);
3383 #else
3384                 CHECK_IU_FEATURE(dc, HYPV);
3385                 if (!hypervisor(dc))
3386                     goto priv_insn;
3387                 rs1 = GET_FIELD(insn, 13, 17);
3388                 switch (rs1) {
3389                 case 0: // hpstate
3390                     tcg_gen_ld_i64(cpu_dst, tcg_env,
3391                                    offsetof(CPUSPARCState, hpstate));
3392                     break;
3393                 case 1: // htstate
3394                     // gen_op_rdhtstate();
3395                     break;
3396                 case 3: // hintp
3397                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
3398                     break;
3399                 case 5: // htba
3400                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
3401                     break;
3402                 case 6: // hver
3403                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
3404                     break;
3405                 case 31: // hstick_cmpr
3406                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
3407                     break;
3408                 default:
3409                     goto illegal_insn;
3410                 }
3411 #endif
3412                 gen_store_gpr(dc, rd, cpu_dst);
3413                 break;
3414             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
3415                 if (!supervisor(dc)) {
3416                     goto priv_insn;
3417                 }
3418                 cpu_tmp0 = tcg_temp_new();
3419 #ifdef TARGET_SPARC64
3420                 rs1 = GET_FIELD(insn, 13, 17);
3421                 switch (rs1) {
3422                 case 0: // tpc
3423                     {
3424                         TCGv_ptr r_tsptr;
3425 
3426                         r_tsptr = tcg_temp_new_ptr();
3427                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3428                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3429                                       offsetof(trap_state, tpc));
3430                     }
3431                     break;
3432                 case 1: // tnpc
3433                     {
3434                         TCGv_ptr r_tsptr;
3435 
3436                         r_tsptr = tcg_temp_new_ptr();
3437                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3438                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3439                                       offsetof(trap_state, tnpc));
3440                     }
3441                     break;
3442                 case 2: // tstate
3443                     {
3444                         TCGv_ptr r_tsptr;
3445 
3446                         r_tsptr = tcg_temp_new_ptr();
3447                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3448                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3449                                       offsetof(trap_state, tstate));
3450                     }
3451                     break;
3452                 case 3: // tt
3453                     {
3454                         TCGv_ptr r_tsptr = tcg_temp_new_ptr();
3455 
3456                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3457                         tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
3458                                          offsetof(trap_state, tt));
3459                     }
3460                     break;
3461                 case 4: // tick
3462                     {
3463                         TCGv_ptr r_tickptr;
3464                         TCGv_i32 r_const;
3465 
3466                         r_tickptr = tcg_temp_new_ptr();
3467                         r_const = tcg_constant_i32(dc->mem_idx);
3468                         tcg_gen_ld_ptr(r_tickptr, tcg_env,
3469                                        offsetof(CPUSPARCState, tick));
3470                         if (translator_io_start(&dc->base)) {
3471                             dc->base.is_jmp = DISAS_EXIT;
3472                         }
3473                         gen_helper_tick_get_count(cpu_tmp0, tcg_env,
3474                                                   r_tickptr, r_const);
3475                     }
3476                     break;
3477                 case 5: // tba
3478                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
3479                     break;
3480                 case 6: // pstate
3481                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3482                                      offsetof(CPUSPARCState, pstate));
3483                     break;
3484                 case 7: // tl
3485                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3486                                      offsetof(CPUSPARCState, tl));
3487                     break;
3488                 case 8: // pil
3489                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3490                                      offsetof(CPUSPARCState, psrpil));
3491                     break;
3492                 case 9: // cwp
3493                     gen_helper_rdcwp(cpu_tmp0, tcg_env);
3494                     break;
3495                 case 10: // cansave
3496                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3497                                      offsetof(CPUSPARCState, cansave));
3498                     break;
3499                 case 11: // canrestore
3500                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3501                                      offsetof(CPUSPARCState, canrestore));
3502                     break;
3503                 case 12: // cleanwin
3504                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3505                                      offsetof(CPUSPARCState, cleanwin));
3506                     break;
3507                 case 13: // otherwin
3508                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3509                                      offsetof(CPUSPARCState, otherwin));
3510                     break;
3511                 case 14: // wstate
3512                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3513                                      offsetof(CPUSPARCState, wstate));
3514                     break;
3515                 case 16: // UA2005 gl
3516                     CHECK_IU_FEATURE(dc, GL);
3517                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3518                                      offsetof(CPUSPARCState, gl));
3519                     break;
3520                 case 26: // UA2005 strand status
3521                     CHECK_IU_FEATURE(dc, HYPV);
3522                     if (!hypervisor(dc))
3523                         goto priv_insn;
3524                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
3525                     break;
3526                 case 31: // ver
3527                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
3528                     break;
3529                 case 15: // fq
3530                 default:
3531                     goto illegal_insn;
3532                 }
3533 #else
3534                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
3535 #endif
3536                 gen_store_gpr(dc, rd, cpu_tmp0);
3537                 break;
3538 #endif
3539 #if defined(TARGET_SPARC64) || !defined(CONFIG_USER_ONLY)
3540             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
3541 #ifdef TARGET_SPARC64
3542                 gen_helper_flushw(tcg_env);
3543 #else
3544                 if (!supervisor(dc))
3545                     goto priv_insn;
3546                 gen_store_gpr(dc, rd, cpu_tbr);
3547 #endif
3548                 break;
3549 #endif
3550             } else if (xop == 0x34) {   /* FPU Operations */
3551                 if (gen_trap_ifnofpu(dc)) {
3552                     goto jmp_insn;
3553                 }
3554                 gen_op_clear_ieee_excp_and_FTT();
3555                 rs1 = GET_FIELD(insn, 13, 17);
3556                 rs2 = GET_FIELD(insn, 27, 31);
3557                 xop = GET_FIELD(insn, 18, 26);
3558 
3559                 switch (xop) {
3560                 case 0x1: /* fmovs */
3561                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3562                     gen_store_fpr_F(dc, rd, cpu_src1_32);
3563                     break;
3564                 case 0x5: /* fnegs */
3565                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
3566                     break;
3567                 case 0x9: /* fabss */
3568                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3569                     break;
3570                 case 0x29: /* fsqrts */
3571                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3572                     break;
3573                 case 0x2a: /* fsqrtd */
3574                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3575                     break;
3576                 case 0x2b: /* fsqrtq */
3577                     CHECK_FPU_FEATURE(dc, FLOAT128);
3578                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3579                     break;
3580                 case 0x41: /* fadds */
3581                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3582                     break;
3583                 case 0x42: /* faddd */
3584                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3585                     break;
3586                 case 0x43: /* faddq */
3587                     CHECK_FPU_FEATURE(dc, FLOAT128);
3588                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3589                     break;
3590                 case 0x45: /* fsubs */
3591                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3592                     break;
3593                 case 0x46: /* fsubd */
3594                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3595                     break;
3596                 case 0x47: /* fsubq */
3597                     CHECK_FPU_FEATURE(dc, FLOAT128);
3598                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3599                     break;
3600                 case 0x49: /* fmuls */
3601                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3602                     break;
3603                 case 0x4a: /* fmuld */
3604                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3605                     break;
3606                 case 0x4b: /* fmulq */
3607                     CHECK_FPU_FEATURE(dc, FLOAT128);
3608                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3609                     break;
3610                 case 0x4d: /* fdivs */
3611                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3612                     break;
3613                 case 0x4e: /* fdivd */
3614                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3615                     break;
3616                 case 0x4f: /* fdivq */
3617                     CHECK_FPU_FEATURE(dc, FLOAT128);
3618                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3619                     break;
3620                 case 0x69: /* fsmuld */
3621                     CHECK_FPU_FEATURE(dc, FSMULD);
3622                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3623                     break;
3624                 case 0x6e: /* fdmulq */
3625                     CHECK_FPU_FEATURE(dc, FLOAT128);
3626                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3627                     break;
3628                 case 0xc4: /* fitos */
3629                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3630                     break;
3631                 case 0xc6: /* fdtos */
3632                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3633                     break;
3634                 case 0xc7: /* fqtos */
3635                     CHECK_FPU_FEATURE(dc, FLOAT128);
3636                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3637                     break;
3638                 case 0xc8: /* fitod */
3639                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3640                     break;
3641                 case 0xc9: /* fstod */
3642                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3643                     break;
3644                 case 0xcb: /* fqtod */
3645                     CHECK_FPU_FEATURE(dc, FLOAT128);
3646                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3647                     break;
3648                 case 0xcc: /* fitoq */
3649                     CHECK_FPU_FEATURE(dc, FLOAT128);
3650                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3651                     break;
3652                 case 0xcd: /* fstoq */
3653                     CHECK_FPU_FEATURE(dc, FLOAT128);
3654                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3655                     break;
3656                 case 0xce: /* fdtoq */
3657                     CHECK_FPU_FEATURE(dc, FLOAT128);
3658                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3659                     break;
3660                 case 0xd1: /* fstoi */
3661                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3662                     break;
3663                 case 0xd2: /* fdtoi */
3664                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3665                     break;
3666                 case 0xd3: /* fqtoi */
3667                     CHECK_FPU_FEATURE(dc, FLOAT128);
3668                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3669                     break;
3670 #ifdef TARGET_SPARC64
3671                 case 0x2: /* V9 fmovd */
3672                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3673                     gen_store_fpr_D(dc, rd, cpu_src1_64);
3674                     break;
3675                 case 0x3: /* V9 fmovq */
3676                     CHECK_FPU_FEATURE(dc, FLOAT128);
3677                     gen_move_Q(dc, rd, rs2);
3678                     break;
3679                 case 0x6: /* V9 fnegd */
3680                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3681                     break;
3682                 case 0x7: /* V9 fnegq */
3683                     CHECK_FPU_FEATURE(dc, FLOAT128);
3684                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3685                     break;
3686                 case 0xa: /* V9 fabsd */
3687                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3688                     break;
3689                 case 0xb: /* V9 fabsq */
3690                     CHECK_FPU_FEATURE(dc, FLOAT128);
3691                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3692                     break;
3693                 case 0x81: /* V9 fstox */
3694                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3695                     break;
3696                 case 0x82: /* V9 fdtox */
3697                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3698                     break;
3699                 case 0x83: /* V9 fqtox */
3700                     CHECK_FPU_FEATURE(dc, FLOAT128);
3701                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3702                     break;
3703                 case 0x84: /* V9 fxtos */
3704                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3705                     break;
3706                 case 0x88: /* V9 fxtod */
3707                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3708                     break;
3709                 case 0x8c: /* V9 fxtoq */
3710                     CHECK_FPU_FEATURE(dc, FLOAT128);
3711                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3712                     break;
3713 #endif
3714                 default:
3715                     goto illegal_insn;
3716                 }
3717             } else if (xop == 0x35) {   /* FPU Operations */
3718 #ifdef TARGET_SPARC64
3719                 int cond;
3720 #endif
3721                 if (gen_trap_ifnofpu(dc)) {
3722                     goto jmp_insn;
3723                 }
3724                 gen_op_clear_ieee_excp_and_FTT();
3725                 rs1 = GET_FIELD(insn, 13, 17);
3726                 rs2 = GET_FIELD(insn, 27, 31);
3727                 xop = GET_FIELD(insn, 18, 26);
3728 
3729 #ifdef TARGET_SPARC64
3730 #define FMOVR(sz)                                                  \
3731                 do {                                               \
3732                     DisasCompare cmp;                              \
3733                     cond = GET_FIELD_SP(insn, 10, 12);             \
3734                     cpu_src1 = get_src1(dc, insn);                 \
3735                     gen_compare_reg(&cmp, cond, cpu_src1);         \
3736                     gen_fmov##sz(dc, &cmp, rd, rs2);               \
3737                 } while (0)
3738 
3739                 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3740                     FMOVR(s);
3741                     break;
3742                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3743                     FMOVR(d);
3744                     break;
3745                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3746                     CHECK_FPU_FEATURE(dc, FLOAT128);
3747                     FMOVR(q);
3748                     break;
3749                 }
3750 #undef FMOVR
3751 #endif
3752                 switch (xop) {
3753 #ifdef TARGET_SPARC64
3754 #define FMOVCC(fcc, sz)                                                 \
3755                     do {                                                \
3756                         DisasCompare cmp;                               \
3757                         cond = GET_FIELD_SP(insn, 14, 17);              \
3758                         gen_fcompare(&cmp, fcc, cond);                  \
3759                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3760                     } while (0)
3761 
3762                     case 0x001: /* V9 fmovscc %fcc0 */
3763                         FMOVCC(0, s);
3764                         break;
3765                     case 0x002: /* V9 fmovdcc %fcc0 */
3766                         FMOVCC(0, d);
3767                         break;
3768                     case 0x003: /* V9 fmovqcc %fcc0 */
3769                         CHECK_FPU_FEATURE(dc, FLOAT128);
3770                         FMOVCC(0, q);
3771                         break;
3772                     case 0x041: /* V9 fmovscc %fcc1 */
3773                         FMOVCC(1, s);
3774                         break;
3775                     case 0x042: /* V9 fmovdcc %fcc1 */
3776                         FMOVCC(1, d);
3777                         break;
3778                     case 0x043: /* V9 fmovqcc %fcc1 */
3779                         CHECK_FPU_FEATURE(dc, FLOAT128);
3780                         FMOVCC(1, q);
3781                         break;
3782                     case 0x081: /* V9 fmovscc %fcc2 */
3783                         FMOVCC(2, s);
3784                         break;
3785                     case 0x082: /* V9 fmovdcc %fcc2 */
3786                         FMOVCC(2, d);
3787                         break;
3788                     case 0x083: /* V9 fmovqcc %fcc2 */
3789                         CHECK_FPU_FEATURE(dc, FLOAT128);
3790                         FMOVCC(2, q);
3791                         break;
3792                     case 0x0c1: /* V9 fmovscc %fcc3 */
3793                         FMOVCC(3, s);
3794                         break;
3795                     case 0x0c2: /* V9 fmovdcc %fcc3 */
3796                         FMOVCC(3, d);
3797                         break;
3798                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3799                         CHECK_FPU_FEATURE(dc, FLOAT128);
3800                         FMOVCC(3, q);
3801                         break;
3802 #undef FMOVCC
3803 #define FMOVCC(xcc, sz)                                                 \
3804                     do {                                                \
3805                         DisasCompare cmp;                               \
3806                         cond = GET_FIELD_SP(insn, 14, 17);              \
3807                         gen_compare(&cmp, xcc, cond, dc);               \
3808                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3809                     } while (0)
3810 
3811                     case 0x101: /* V9 fmovscc %icc */
3812                         FMOVCC(0, s);
3813                         break;
3814                     case 0x102: /* V9 fmovdcc %icc */
3815                         FMOVCC(0, d);
3816                         break;
3817                     case 0x103: /* V9 fmovqcc %icc */
3818                         CHECK_FPU_FEATURE(dc, FLOAT128);
3819                         FMOVCC(0, q);
3820                         break;
3821                     case 0x181: /* V9 fmovscc %xcc */
3822                         FMOVCC(1, s);
3823                         break;
3824                     case 0x182: /* V9 fmovdcc %xcc */
3825                         FMOVCC(1, d);
3826                         break;
3827                     case 0x183: /* V9 fmovqcc %xcc */
3828                         CHECK_FPU_FEATURE(dc, FLOAT128);
3829                         FMOVCC(1, q);
3830                         break;
3831 #undef FMOVCC
3832 #endif
3833                     case 0x51: /* fcmps, V9 %fcc */
3834                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3835                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3836                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3837                         break;
3838                     case 0x52: /* fcmpd, V9 %fcc */
3839                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3840                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3841                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3842                         break;
3843                     case 0x53: /* fcmpq, V9 %fcc */
3844                         CHECK_FPU_FEATURE(dc, FLOAT128);
3845                         gen_op_load_fpr_QT0(QFPREG(rs1));
3846                         gen_op_load_fpr_QT1(QFPREG(rs2));
3847                         gen_op_fcmpq(rd & 3);
3848                         break;
3849                     case 0x55: /* fcmpes, V9 %fcc */
3850                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3851                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3852                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3853                         break;
3854                     case 0x56: /* fcmped, V9 %fcc */
3855                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3856                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3857                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3858                         break;
3859                     case 0x57: /* fcmpeq, V9 %fcc */
3860                         CHECK_FPU_FEATURE(dc, FLOAT128);
3861                         gen_op_load_fpr_QT0(QFPREG(rs1));
3862                         gen_op_load_fpr_QT1(QFPREG(rs2));
3863                         gen_op_fcmpeq(rd & 3);
3864                         break;
3865                     default:
3866                         goto illegal_insn;
3867                 }
3868             } else if (xop == 0x2) {
3869                 TCGv dst = gen_dest_gpr(dc, rd);
3870                 rs1 = GET_FIELD(insn, 13, 17);
3871                 if (rs1 == 0) {
3872                     /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3873                     if (IS_IMM) {       /* immediate */
3874                         simm = GET_FIELDs(insn, 19, 31);
3875                         tcg_gen_movi_tl(dst, simm);
3876                         gen_store_gpr(dc, rd, dst);
3877                     } else {            /* register */
3878                         rs2 = GET_FIELD(insn, 27, 31);
3879                         if (rs2 == 0) {
3880                             tcg_gen_movi_tl(dst, 0);
3881                             gen_store_gpr(dc, rd, dst);
3882                         } else {
3883                             cpu_src2 = gen_load_gpr(dc, rs2);
3884                             gen_store_gpr(dc, rd, cpu_src2);
3885                         }
3886                     }
3887                 } else {
3888                     cpu_src1 = get_src1(dc, insn);
3889                     if (IS_IMM) {       /* immediate */
3890                         simm = GET_FIELDs(insn, 19, 31);
3891                         tcg_gen_ori_tl(dst, cpu_src1, simm);
3892                         gen_store_gpr(dc, rd, dst);
3893                     } else {            /* register */
3894                         rs2 = GET_FIELD(insn, 27, 31);
3895                         if (rs2 == 0) {
3896                             /* mov shortcut:  or x, %g0, y -> mov x, y */
3897                             gen_store_gpr(dc, rd, cpu_src1);
3898                         } else {
3899                             cpu_src2 = gen_load_gpr(dc, rs2);
3900                             tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3901                             gen_store_gpr(dc, rd, dst);
3902                         }
3903                     }
3904                 }
3905 #ifdef TARGET_SPARC64
3906             } else if (xop == 0x25) { /* sll, V9 sllx */
3907                 cpu_src1 = get_src1(dc, insn);
3908                 if (IS_IMM) {   /* immediate */
3909                     simm = GET_FIELDs(insn, 20, 31);
3910                     if (insn & (1 << 12)) {
3911                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3912                     } else {
3913                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3914                     }
3915                 } else {                /* register */
3916                     rs2 = GET_FIELD(insn, 27, 31);
3917                     cpu_src2 = gen_load_gpr(dc, rs2);
3918                     cpu_tmp0 = tcg_temp_new();
3919                     if (insn & (1 << 12)) {
3920                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3921                     } else {
3922                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3923                     }
3924                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3925                 }
3926                 gen_store_gpr(dc, rd, cpu_dst);
3927             } else if (xop == 0x26) { /* srl, V9 srlx */
3928                 cpu_src1 = get_src1(dc, insn);
3929                 if (IS_IMM) {   /* immediate */
3930                     simm = GET_FIELDs(insn, 20, 31);
3931                     if (insn & (1 << 12)) {
3932                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3933                     } else {
3934                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3935                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3936                     }
3937                 } else {                /* register */
3938                     rs2 = GET_FIELD(insn, 27, 31);
3939                     cpu_src2 = gen_load_gpr(dc, rs2);
3940                     cpu_tmp0 = tcg_temp_new();
3941                     if (insn & (1 << 12)) {
3942                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3943                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3944                     } else {
3945                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3946                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3947                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3948                     }
3949                 }
3950                 gen_store_gpr(dc, rd, cpu_dst);
3951             } else if (xop == 0x27) { /* sra, V9 srax */
3952                 cpu_src1 = get_src1(dc, insn);
3953                 if (IS_IMM) {   /* immediate */
3954                     simm = GET_FIELDs(insn, 20, 31);
3955                     if (insn & (1 << 12)) {
3956                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3957                     } else {
3958                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3959                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3960                     }
3961                 } else {                /* register */
3962                     rs2 = GET_FIELD(insn, 27, 31);
3963                     cpu_src2 = gen_load_gpr(dc, rs2);
3964                     cpu_tmp0 = tcg_temp_new();
3965                     if (insn & (1 << 12)) {
3966                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3967                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3968                     } else {
3969                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3970                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3971                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3972                     }
3973                 }
3974                 gen_store_gpr(dc, rd, cpu_dst);
3975 #endif
3976             } else if (xop < 0x36) {
3977                 if (xop < 0x20) {
3978                     cpu_src1 = get_src1(dc, insn);
3979                     cpu_src2 = get_src2(dc, insn);
3980                     switch (xop & ~0x10) {
3981                     case 0x0: /* add */
3982                         if (xop & 0x10) {
3983                             gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3984                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3985                             dc->cc_op = CC_OP_ADD;
3986                         } else {
3987                             tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3988                         }
3989                         break;
3990                     case 0x1: /* and */
3991                         tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3992                         if (xop & 0x10) {
3993                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3994                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3995                             dc->cc_op = CC_OP_LOGIC;
3996                         }
3997                         break;
3998                     case 0x2: /* or */
3999                         tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
4000                         if (xop & 0x10) {
4001                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4002                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4003                             dc->cc_op = CC_OP_LOGIC;
4004                         }
4005                         break;
4006                     case 0x3: /* xor */
4007                         tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
4008                         if (xop & 0x10) {
4009                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4010                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4011                             dc->cc_op = CC_OP_LOGIC;
4012                         }
4013                         break;
4014                     case 0x4: /* sub */
4015                         if (xop & 0x10) {
4016                             gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4017                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
4018                             dc->cc_op = CC_OP_SUB;
4019                         } else {
4020                             tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
4021                         }
4022                         break;
4023                     case 0x5: /* andn */
4024                         tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
4025                         if (xop & 0x10) {
4026                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4027                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4028                             dc->cc_op = CC_OP_LOGIC;
4029                         }
4030                         break;
4031                     case 0x6: /* orn */
4032                         tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
4033                         if (xop & 0x10) {
4034                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4035                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4036                             dc->cc_op = CC_OP_LOGIC;
4037                         }
4038                         break;
4039                     case 0x7: /* xorn */
4040                         tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
4041                         if (xop & 0x10) {
4042                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4043                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4044                             dc->cc_op = CC_OP_LOGIC;
4045                         }
4046                         break;
4047                     case 0x8: /* addx, V9 addc */
4048                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4049                                         (xop & 0x10));
4050                         break;
4051 #ifdef TARGET_SPARC64
4052                     case 0x9: /* V9 mulx */
4053                         tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
4054                         break;
4055 #endif
4056                     case 0xa: /* umul */
4057                         CHECK_IU_FEATURE(dc, MUL);
4058                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
4059                         if (xop & 0x10) {
4060                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4061                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4062                             dc->cc_op = CC_OP_LOGIC;
4063                         }
4064                         break;
4065                     case 0xb: /* smul */
4066                         CHECK_IU_FEATURE(dc, MUL);
4067                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
4068                         if (xop & 0x10) {
4069                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4070                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4071                             dc->cc_op = CC_OP_LOGIC;
4072                         }
4073                         break;
4074                     case 0xc: /* subx, V9 subc */
4075                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4076                                         (xop & 0x10));
4077                         break;
4078 #ifdef TARGET_SPARC64
4079                     case 0xd: /* V9 udivx */
4080                         gen_helper_udivx(cpu_dst, tcg_env, cpu_src1, cpu_src2);
4081                         break;
4082 #endif
4083                     case 0xe: /* udiv */
4084                         CHECK_IU_FEATURE(dc, DIV);
4085                         if (xop & 0x10) {
4086                             gen_helper_udiv_cc(cpu_dst, tcg_env, cpu_src1,
4087                                                cpu_src2);
4088                             dc->cc_op = CC_OP_DIV;
4089                         } else {
4090                             gen_helper_udiv(cpu_dst, tcg_env, cpu_src1,
4091                                             cpu_src2);
4092                         }
4093                         break;
4094                     case 0xf: /* sdiv */
4095                         CHECK_IU_FEATURE(dc, DIV);
4096                         if (xop & 0x10) {
4097                             gen_helper_sdiv_cc(cpu_dst, tcg_env, cpu_src1,
4098                                                cpu_src2);
4099                             dc->cc_op = CC_OP_DIV;
4100                         } else {
4101                             gen_helper_sdiv(cpu_dst, tcg_env, cpu_src1,
4102                                             cpu_src2);
4103                         }
4104                         break;
4105                     default:
4106                         goto illegal_insn;
4107                     }
4108                     gen_store_gpr(dc, rd, cpu_dst);
4109                 } else {
4110                     cpu_src1 = get_src1(dc, insn);
4111                     cpu_src2 = get_src2(dc, insn);
4112                     switch (xop) {
4113                     case 0x20: /* taddcc */
4114                         gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4115                         gen_store_gpr(dc, rd, cpu_dst);
4116                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
4117                         dc->cc_op = CC_OP_TADD;
4118                         break;
4119                     case 0x21: /* tsubcc */
4120                         gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4121                         gen_store_gpr(dc, rd, cpu_dst);
4122                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
4123                         dc->cc_op = CC_OP_TSUB;
4124                         break;
4125                     case 0x22: /* taddcctv */
4126                         gen_helper_taddcctv(cpu_dst, tcg_env,
4127                                             cpu_src1, cpu_src2);
4128                         gen_store_gpr(dc, rd, cpu_dst);
4129                         dc->cc_op = CC_OP_TADDTV;
4130                         break;
4131                     case 0x23: /* tsubcctv */
4132                         gen_helper_tsubcctv(cpu_dst, tcg_env,
4133                                             cpu_src1, cpu_src2);
4134                         gen_store_gpr(dc, rd, cpu_dst);
4135                         dc->cc_op = CC_OP_TSUBTV;
4136                         break;
4137                     case 0x24: /* mulscc */
4138                         update_psr(dc);
4139                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
4140                         gen_store_gpr(dc, rd, cpu_dst);
4141                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4142                         dc->cc_op = CC_OP_ADD;
4143                         break;
4144 #ifndef TARGET_SPARC64
4145                     case 0x25:  /* sll */
4146                         if (IS_IMM) { /* immediate */
4147                             simm = GET_FIELDs(insn, 20, 31);
4148                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
4149                         } else { /* register */
4150                             cpu_tmp0 = tcg_temp_new();
4151                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4152                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
4153                         }
4154                         gen_store_gpr(dc, rd, cpu_dst);
4155                         break;
4156                     case 0x26:  /* srl */
4157                         if (IS_IMM) { /* immediate */
4158                             simm = GET_FIELDs(insn, 20, 31);
4159                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
4160                         } else { /* register */
4161                             cpu_tmp0 = tcg_temp_new();
4162                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4163                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
4164                         }
4165                         gen_store_gpr(dc, rd, cpu_dst);
4166                         break;
4167                     case 0x27:  /* sra */
4168                         if (IS_IMM) { /* immediate */
4169                             simm = GET_FIELDs(insn, 20, 31);
4170                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
4171                         } else { /* register */
4172                             cpu_tmp0 = tcg_temp_new();
4173                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4174                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
4175                         }
4176                         gen_store_gpr(dc, rd, cpu_dst);
4177                         break;
4178 #endif
4179                     case 0x30:
4180                         {
4181                             cpu_tmp0 = tcg_temp_new();
4182                             switch(rd) {
4183                             case 0: /* wry */
4184                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4185                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
4186                                 break;
4187 #ifndef TARGET_SPARC64
4188                             case 0x01 ... 0x0f: /* undefined in the
4189                                                    SPARCv8 manual, nop
4190                                                    on the microSPARC
4191                                                    II */
4192                             case 0x10 ... 0x1f: /* implementation-dependent
4193                                                    in the SPARCv8
4194                                                    manual, nop on the
4195                                                    microSPARC II */
4196                                 if ((rd == 0x13) && (dc->def->features &
4197                                                      CPU_FEATURE_POWERDOWN)) {
4198                                     /* LEON3 power-down */
4199                                     save_state(dc);
4200                                     gen_helper_power_down(tcg_env);
4201                                 }
4202                                 break;
4203 #else
4204                             case 0x2: /* V9 wrccr */
4205                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4206                                 gen_helper_wrccr(tcg_env, cpu_tmp0);
4207                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4208                                 dc->cc_op = CC_OP_FLAGS;
4209                                 break;
4210                             case 0x3: /* V9 wrasi */
4211                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4212                                 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
4213                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4214                                                 offsetof(CPUSPARCState, asi));
4215                                 /*
4216                                  * End TB to notice changed ASI.
4217                                  * TODO: Could notice src1 = %g0 and IS_IMM,
4218                                  * update DisasContext and not exit the TB.
4219                                  */
4220                                 save_state(dc);
4221                                 gen_op_next_insn();
4222                                 tcg_gen_lookup_and_goto_ptr();
4223                                 dc->base.is_jmp = DISAS_NORETURN;
4224                                 break;
4225                             case 0x6: /* V9 wrfprs */
4226                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4227                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
4228                                 dc->fprs_dirty = 0;
4229                                 save_state(dc);
4230                                 gen_op_next_insn();
4231                                 tcg_gen_exit_tb(NULL, 0);
4232                                 dc->base.is_jmp = DISAS_NORETURN;
4233                                 break;
4234                             case 0xf: /* V9 sir, nop if user */
4235 #if !defined(CONFIG_USER_ONLY)
4236                                 if (supervisor(dc)) {
4237                                     ; // XXX
4238                                 }
4239 #endif
4240                                 break;
4241                             case 0x13: /* Graphics Status */
4242                                 if (gen_trap_ifnofpu(dc)) {
4243                                     goto jmp_insn;
4244                                 }
4245                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
4246                                 break;
4247                             case 0x14: /* Softint set */
4248                                 if (!supervisor(dc))
4249                                     goto illegal_insn;
4250                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4251                                 gen_helper_set_softint(tcg_env, cpu_tmp0);
4252                                 break;
4253                             case 0x15: /* Softint clear */
4254                                 if (!supervisor(dc))
4255                                     goto illegal_insn;
4256                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4257                                 gen_helper_clear_softint(tcg_env, cpu_tmp0);
4258                                 break;
4259                             case 0x16: /* Softint write */
4260                                 if (!supervisor(dc))
4261                                     goto illegal_insn;
4262                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4263                                 gen_helper_write_softint(tcg_env, cpu_tmp0);
4264                                 break;
4265                             case 0x17: /* Tick compare */
4266 #if !defined(CONFIG_USER_ONLY)
4267                                 if (!supervisor(dc))
4268                                     goto illegal_insn;
4269 #endif
4270                                 {
4271                                     TCGv_ptr r_tickptr;
4272 
4273                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
4274                                                    cpu_src2);
4275                                     r_tickptr = tcg_temp_new_ptr();
4276                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4277                                                    offsetof(CPUSPARCState, tick));
4278                                     translator_io_start(&dc->base);
4279                                     gen_helper_tick_set_limit(r_tickptr,
4280                                                               cpu_tick_cmpr);
4281                                     /* End TB to handle timer interrupt */
4282                                     dc->base.is_jmp = DISAS_EXIT;
4283                                 }
4284                                 break;
4285                             case 0x18: /* System tick */
4286 #if !defined(CONFIG_USER_ONLY)
4287                                 if (!supervisor(dc))
4288                                     goto illegal_insn;
4289 #endif
4290                                 {
4291                                     TCGv_ptr r_tickptr;
4292 
4293                                     tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
4294                                                    cpu_src2);
4295                                     r_tickptr = tcg_temp_new_ptr();
4296                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4297                                                    offsetof(CPUSPARCState, stick));
4298                                     translator_io_start(&dc->base);
4299                                     gen_helper_tick_set_count(r_tickptr,
4300                                                               cpu_tmp0);
4301                                     /* End TB to handle timer interrupt */
4302                                     dc->base.is_jmp = DISAS_EXIT;
4303                                 }
4304                                 break;
4305                             case 0x19: /* System tick compare */
4306 #if !defined(CONFIG_USER_ONLY)
4307                                 if (!supervisor(dc))
4308                                     goto illegal_insn;
4309 #endif
4310                                 {
4311                                     TCGv_ptr r_tickptr;
4312 
4313                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
4314                                                    cpu_src2);
4315                                     r_tickptr = tcg_temp_new_ptr();
4316                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4317                                                    offsetof(CPUSPARCState, stick));
4318                                     translator_io_start(&dc->base);
4319                                     gen_helper_tick_set_limit(r_tickptr,
4320                                                               cpu_stick_cmpr);
4321                                     /* End TB to handle timer interrupt */
4322                                     dc->base.is_jmp = DISAS_EXIT;
4323                                 }
4324                                 break;
4325 
4326                             case 0x10: /* Performance Control */
4327                             case 0x11: /* Performance Instrumentation
4328                                           Counter */
4329                             case 0x12: /* Dispatch Control */
4330 #endif
4331                             default:
4332                                 goto illegal_insn;
4333                             }
4334                         }
4335                         break;
4336 #if !defined(CONFIG_USER_ONLY)
4337                     case 0x31: /* wrpsr, V9 saved, restored */
4338                         {
4339                             if (!supervisor(dc))
4340                                 goto priv_insn;
4341 #ifdef TARGET_SPARC64
4342                             switch (rd) {
4343                             case 0:
4344                                 gen_helper_saved(tcg_env);
4345                                 break;
4346                             case 1:
4347                                 gen_helper_restored(tcg_env);
4348                                 break;
4349                             case 2: /* UA2005 allclean */
4350                             case 3: /* UA2005 otherw */
4351                             case 4: /* UA2005 normalw */
4352                             case 5: /* UA2005 invalw */
4353                                 // XXX
4354                             default:
4355                                 goto illegal_insn;
4356                             }
4357 #else
4358                             cpu_tmp0 = tcg_temp_new();
4359                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4360                             gen_helper_wrpsr(tcg_env, cpu_tmp0);
4361                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4362                             dc->cc_op = CC_OP_FLAGS;
4363                             save_state(dc);
4364                             gen_op_next_insn();
4365                             tcg_gen_exit_tb(NULL, 0);
4366                             dc->base.is_jmp = DISAS_NORETURN;
4367 #endif
4368                         }
4369                         break;
4370                     case 0x32: /* wrwim, V9 wrpr */
4371                         {
4372                             if (!supervisor(dc))
4373                                 goto priv_insn;
4374                             cpu_tmp0 = tcg_temp_new();
4375                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4376 #ifdef TARGET_SPARC64
4377                             switch (rd) {
4378                             case 0: // tpc
4379                                 {
4380                                     TCGv_ptr r_tsptr;
4381 
4382                                     r_tsptr = tcg_temp_new_ptr();
4383                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4384                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4385                                                   offsetof(trap_state, tpc));
4386                                 }
4387                                 break;
4388                             case 1: // tnpc
4389                                 {
4390                                     TCGv_ptr r_tsptr;
4391 
4392                                     r_tsptr = tcg_temp_new_ptr();
4393                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4394                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4395                                                   offsetof(trap_state, tnpc));
4396                                 }
4397                                 break;
4398                             case 2: // tstate
4399                                 {
4400                                     TCGv_ptr r_tsptr;
4401 
4402                                     r_tsptr = tcg_temp_new_ptr();
4403                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4404                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4405                                                   offsetof(trap_state,
4406                                                            tstate));
4407                                 }
4408                                 break;
4409                             case 3: // tt
4410                                 {
4411                                     TCGv_ptr r_tsptr;
4412 
4413                                     r_tsptr = tcg_temp_new_ptr();
4414                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4415                                     tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
4416                                                     offsetof(trap_state, tt));
4417                                 }
4418                                 break;
4419                             case 4: // tick
4420                                 {
4421                                     TCGv_ptr r_tickptr;
4422 
4423                                     r_tickptr = tcg_temp_new_ptr();
4424                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4425                                                    offsetof(CPUSPARCState, tick));
4426                                     translator_io_start(&dc->base);
4427                                     gen_helper_tick_set_count(r_tickptr,
4428                                                               cpu_tmp0);
4429                                     /* End TB to handle timer interrupt */
4430                                     dc->base.is_jmp = DISAS_EXIT;
4431                                 }
4432                                 break;
4433                             case 5: // tba
4434                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
4435                                 break;
4436                             case 6: // pstate
4437                                 save_state(dc);
4438                                 if (translator_io_start(&dc->base)) {
4439                                     dc->base.is_jmp = DISAS_EXIT;
4440                                 }
4441                                 gen_helper_wrpstate(tcg_env, cpu_tmp0);
4442                                 dc->npc = DYNAMIC_PC;
4443                                 break;
4444                             case 7: // tl
4445                                 save_state(dc);
4446                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4447                                                offsetof(CPUSPARCState, tl));
4448                                 dc->npc = DYNAMIC_PC;
4449                                 break;
4450                             case 8: // pil
4451                                 if (translator_io_start(&dc->base)) {
4452                                     dc->base.is_jmp = DISAS_EXIT;
4453                                 }
4454                                 gen_helper_wrpil(tcg_env, cpu_tmp0);
4455                                 break;
4456                             case 9: // cwp
4457                                 gen_helper_wrcwp(tcg_env, cpu_tmp0);
4458                                 break;
4459                             case 10: // cansave
4460                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4461                                                 offsetof(CPUSPARCState,
4462                                                          cansave));
4463                                 break;
4464                             case 11: // canrestore
4465                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4466                                                 offsetof(CPUSPARCState,
4467                                                          canrestore));
4468                                 break;
4469                             case 12: // cleanwin
4470                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4471                                                 offsetof(CPUSPARCState,
4472                                                          cleanwin));
4473                                 break;
4474                             case 13: // otherwin
4475                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4476                                                 offsetof(CPUSPARCState,
4477                                                          otherwin));
4478                                 break;
4479                             case 14: // wstate
4480                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4481                                                 offsetof(CPUSPARCState,
4482                                                          wstate));
4483                                 break;
4484                             case 16: // UA2005 gl
4485                                 CHECK_IU_FEATURE(dc, GL);
4486                                 gen_helper_wrgl(tcg_env, cpu_tmp0);
4487                                 break;
4488                             case 26: // UA2005 strand status
4489                                 CHECK_IU_FEATURE(dc, HYPV);
4490                                 if (!hypervisor(dc))
4491                                     goto priv_insn;
4492                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
4493                                 break;
4494                             default:
4495                                 goto illegal_insn;
4496                             }
4497 #else
4498                             tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
4499                             if (dc->def->nwindows != 32) {
4500                                 tcg_gen_andi_tl(cpu_wim, cpu_wim,
4501                                                 (1 << dc->def->nwindows) - 1);
4502                             }
4503 #endif
4504                         }
4505                         break;
4506                     case 0x33: /* wrtbr, UA2005 wrhpr */
4507                         {
4508 #ifndef TARGET_SPARC64
4509                             if (!supervisor(dc))
4510                                 goto priv_insn;
4511                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
4512 #else
4513                             CHECK_IU_FEATURE(dc, HYPV);
4514                             if (!hypervisor(dc))
4515                                 goto priv_insn;
4516                             cpu_tmp0 = tcg_temp_new();
4517                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4518                             switch (rd) {
4519                             case 0: // hpstate
4520                                 tcg_gen_st_i64(cpu_tmp0, tcg_env,
4521                                                offsetof(CPUSPARCState,
4522                                                         hpstate));
4523                                 save_state(dc);
4524                                 gen_op_next_insn();
4525                                 tcg_gen_exit_tb(NULL, 0);
4526                                 dc->base.is_jmp = DISAS_NORETURN;
4527                                 break;
4528                             case 1: // htstate
4529                                 // XXX gen_op_wrhtstate();
4530                                 break;
4531                             case 3: // hintp
4532                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
4533                                 break;
4534                             case 5: // htba
4535                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4536                                 break;
4537                             case 31: // hstick_cmpr
4538                                 {
4539                                     TCGv_ptr r_tickptr;
4540 
4541                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4542                                     r_tickptr = tcg_temp_new_ptr();
4543                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4544                                                    offsetof(CPUSPARCState, hstick));
4545                                     translator_io_start(&dc->base);
4546                                     gen_helper_tick_set_limit(r_tickptr,
4547                                                               cpu_hstick_cmpr);
4548                                     /* End TB to handle timer interrupt */
4549                                     dc->base.is_jmp = DISAS_EXIT;
4550                                 }
4551                                 break;
4552                             case 6: // hver readonly
4553                             default:
4554                                 goto illegal_insn;
4555                             }
4556 #endif
4557                         }
4558                         break;
4559 #endif
4560 #ifdef TARGET_SPARC64
4561                     case 0x2c: /* V9 movcc */
4562                         {
4563                             int cc = GET_FIELD_SP(insn, 11, 12);
4564                             int cond = GET_FIELD_SP(insn, 14, 17);
4565                             DisasCompare cmp;
4566                             TCGv dst;
4567 
4568                             if (insn & (1 << 18)) {
4569                                 if (cc == 0) {
4570                                     gen_compare(&cmp, 0, cond, dc);
4571                                 } else if (cc == 2) {
4572                                     gen_compare(&cmp, 1, cond, dc);
4573                                 } else {
4574                                     goto illegal_insn;
4575                                 }
4576                             } else {
4577                                 gen_fcompare(&cmp, cc, cond);
4578                             }
4579 
4580                             /* The get_src2 above loaded the normal 13-bit
4581                                immediate field, not the 11-bit field we have
4582                                in movcc.  But it did handle the reg case.  */
4583                             if (IS_IMM) {
4584                                 simm = GET_FIELD_SPs(insn, 0, 10);
4585                                 tcg_gen_movi_tl(cpu_src2, simm);
4586                             }
4587 
4588                             dst = gen_load_gpr(dc, rd);
4589                             tcg_gen_movcond_tl(cmp.cond, dst,
4590                                                cmp.c1, cmp.c2,
4591                                                cpu_src2, dst);
4592                             gen_store_gpr(dc, rd, dst);
4593                             break;
4594                         }
4595                     case 0x2d: /* V9 sdivx */
4596                         gen_helper_sdivx(cpu_dst, tcg_env, cpu_src1, cpu_src2);
4597                         gen_store_gpr(dc, rd, cpu_dst);
4598                         break;
4599                     case 0x2e: /* V9 popc */
4600                         tcg_gen_ctpop_tl(cpu_dst, cpu_src2);
4601                         gen_store_gpr(dc, rd, cpu_dst);
4602                         break;
4603                     case 0x2f: /* V9 movr */
4604                         {
4605                             int cond = GET_FIELD_SP(insn, 10, 12);
4606                             DisasCompare cmp;
4607                             TCGv dst;
4608 
4609                             gen_compare_reg(&cmp, cond, cpu_src1);
4610 
4611                             /* The get_src2 above loaded the normal 13-bit
4612                                immediate field, not the 10-bit field we have
4613                                in movr.  But it did handle the reg case.  */
4614                             if (IS_IMM) {
4615                                 simm = GET_FIELD_SPs(insn, 0, 9);
4616                                 tcg_gen_movi_tl(cpu_src2, simm);
4617                             }
4618 
4619                             dst = gen_load_gpr(dc, rd);
4620                             tcg_gen_movcond_tl(cmp.cond, dst,
4621                                                cmp.c1, cmp.c2,
4622                                                cpu_src2, dst);
4623                             gen_store_gpr(dc, rd, dst);
4624                             break;
4625                         }
4626 #endif
4627                     default:
4628                         goto illegal_insn;
4629                     }
4630                 }
4631             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4632 #ifdef TARGET_SPARC64
4633                 int opf = GET_FIELD_SP(insn, 5, 13);
4634                 rs1 = GET_FIELD(insn, 13, 17);
4635                 rs2 = GET_FIELD(insn, 27, 31);
4636                 if (gen_trap_ifnofpu(dc)) {
4637                     goto jmp_insn;
4638                 }
4639 
4640                 switch (opf) {
4641                 case 0x000: /* VIS I edge8cc */
4642                     CHECK_FPU_FEATURE(dc, VIS1);
4643                     cpu_src1 = gen_load_gpr(dc, rs1);
4644                     cpu_src2 = gen_load_gpr(dc, rs2);
4645                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4646                     gen_store_gpr(dc, rd, cpu_dst);
4647                     break;
4648                 case 0x001: /* VIS II edge8n */
4649                     CHECK_FPU_FEATURE(dc, VIS2);
4650                     cpu_src1 = gen_load_gpr(dc, rs1);
4651                     cpu_src2 = gen_load_gpr(dc, rs2);
4652                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4653                     gen_store_gpr(dc, rd, cpu_dst);
4654                     break;
4655                 case 0x002: /* VIS I edge8lcc */
4656                     CHECK_FPU_FEATURE(dc, VIS1);
4657                     cpu_src1 = gen_load_gpr(dc, rs1);
4658                     cpu_src2 = gen_load_gpr(dc, rs2);
4659                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4660                     gen_store_gpr(dc, rd, cpu_dst);
4661                     break;
4662                 case 0x003: /* VIS II edge8ln */
4663                     CHECK_FPU_FEATURE(dc, VIS2);
4664                     cpu_src1 = gen_load_gpr(dc, rs1);
4665                     cpu_src2 = gen_load_gpr(dc, rs2);
4666                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4667                     gen_store_gpr(dc, rd, cpu_dst);
4668                     break;
4669                 case 0x004: /* VIS I edge16cc */
4670                     CHECK_FPU_FEATURE(dc, VIS1);
4671                     cpu_src1 = gen_load_gpr(dc, rs1);
4672                     cpu_src2 = gen_load_gpr(dc, rs2);
4673                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4674                     gen_store_gpr(dc, rd, cpu_dst);
4675                     break;
4676                 case 0x005: /* VIS II edge16n */
4677                     CHECK_FPU_FEATURE(dc, VIS2);
4678                     cpu_src1 = gen_load_gpr(dc, rs1);
4679                     cpu_src2 = gen_load_gpr(dc, rs2);
4680                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4681                     gen_store_gpr(dc, rd, cpu_dst);
4682                     break;
4683                 case 0x006: /* VIS I edge16lcc */
4684                     CHECK_FPU_FEATURE(dc, VIS1);
4685                     cpu_src1 = gen_load_gpr(dc, rs1);
4686                     cpu_src2 = gen_load_gpr(dc, rs2);
4687                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4688                     gen_store_gpr(dc, rd, cpu_dst);
4689                     break;
4690                 case 0x007: /* VIS II edge16ln */
4691                     CHECK_FPU_FEATURE(dc, VIS2);
4692                     cpu_src1 = gen_load_gpr(dc, rs1);
4693                     cpu_src2 = gen_load_gpr(dc, rs2);
4694                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4695                     gen_store_gpr(dc, rd, cpu_dst);
4696                     break;
4697                 case 0x008: /* VIS I edge32cc */
4698                     CHECK_FPU_FEATURE(dc, VIS1);
4699                     cpu_src1 = gen_load_gpr(dc, rs1);
4700                     cpu_src2 = gen_load_gpr(dc, rs2);
4701                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4702                     gen_store_gpr(dc, rd, cpu_dst);
4703                     break;
4704                 case 0x009: /* VIS II edge32n */
4705                     CHECK_FPU_FEATURE(dc, VIS2);
4706                     cpu_src1 = gen_load_gpr(dc, rs1);
4707                     cpu_src2 = gen_load_gpr(dc, rs2);
4708                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4709                     gen_store_gpr(dc, rd, cpu_dst);
4710                     break;
4711                 case 0x00a: /* VIS I edge32lcc */
4712                     CHECK_FPU_FEATURE(dc, VIS1);
4713                     cpu_src1 = gen_load_gpr(dc, rs1);
4714                     cpu_src2 = gen_load_gpr(dc, rs2);
4715                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4716                     gen_store_gpr(dc, rd, cpu_dst);
4717                     break;
4718                 case 0x00b: /* VIS II edge32ln */
4719                     CHECK_FPU_FEATURE(dc, VIS2);
4720                     cpu_src1 = gen_load_gpr(dc, rs1);
4721                     cpu_src2 = gen_load_gpr(dc, rs2);
4722                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4723                     gen_store_gpr(dc, rd, cpu_dst);
4724                     break;
4725                 case 0x010: /* VIS I array8 */
4726                     CHECK_FPU_FEATURE(dc, VIS1);
4727                     cpu_src1 = gen_load_gpr(dc, rs1);
4728                     cpu_src2 = gen_load_gpr(dc, rs2);
4729                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4730                     gen_store_gpr(dc, rd, cpu_dst);
4731                     break;
4732                 case 0x012: /* VIS I array16 */
4733                     CHECK_FPU_FEATURE(dc, VIS1);
4734                     cpu_src1 = gen_load_gpr(dc, rs1);
4735                     cpu_src2 = gen_load_gpr(dc, rs2);
4736                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4737                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4738                     gen_store_gpr(dc, rd, cpu_dst);
4739                     break;
4740                 case 0x014: /* VIS I array32 */
4741                     CHECK_FPU_FEATURE(dc, VIS1);
4742                     cpu_src1 = gen_load_gpr(dc, rs1);
4743                     cpu_src2 = gen_load_gpr(dc, rs2);
4744                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4745                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4746                     gen_store_gpr(dc, rd, cpu_dst);
4747                     break;
4748                 case 0x018: /* VIS I alignaddr */
4749                     CHECK_FPU_FEATURE(dc, VIS1);
4750                     cpu_src1 = gen_load_gpr(dc, rs1);
4751                     cpu_src2 = gen_load_gpr(dc, rs2);
4752                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4753                     gen_store_gpr(dc, rd, cpu_dst);
4754                     break;
4755                 case 0x01a: /* VIS I alignaddrl */
4756                     CHECK_FPU_FEATURE(dc, VIS1);
4757                     cpu_src1 = gen_load_gpr(dc, rs1);
4758                     cpu_src2 = gen_load_gpr(dc, rs2);
4759                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4760                     gen_store_gpr(dc, rd, cpu_dst);
4761                     break;
4762                 case 0x019: /* VIS II bmask */
4763                     CHECK_FPU_FEATURE(dc, VIS2);
4764                     cpu_src1 = gen_load_gpr(dc, rs1);
4765                     cpu_src2 = gen_load_gpr(dc, rs2);
4766                     tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4767                     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4768                     gen_store_gpr(dc, rd, cpu_dst);
4769                     break;
4770                 case 0x020: /* VIS I fcmple16 */
4771                     CHECK_FPU_FEATURE(dc, VIS1);
4772                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4773                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4774                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4775                     gen_store_gpr(dc, rd, cpu_dst);
4776                     break;
4777                 case 0x022: /* VIS I fcmpne16 */
4778                     CHECK_FPU_FEATURE(dc, VIS1);
4779                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4780                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4781                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4782                     gen_store_gpr(dc, rd, cpu_dst);
4783                     break;
4784                 case 0x024: /* VIS I fcmple32 */
4785                     CHECK_FPU_FEATURE(dc, VIS1);
4786                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4787                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4788                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4789                     gen_store_gpr(dc, rd, cpu_dst);
4790                     break;
4791                 case 0x026: /* VIS I fcmpne32 */
4792                     CHECK_FPU_FEATURE(dc, VIS1);
4793                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4794                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4795                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4796                     gen_store_gpr(dc, rd, cpu_dst);
4797                     break;
4798                 case 0x028: /* VIS I fcmpgt16 */
4799                     CHECK_FPU_FEATURE(dc, VIS1);
4800                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4801                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4802                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4803                     gen_store_gpr(dc, rd, cpu_dst);
4804                     break;
4805                 case 0x02a: /* VIS I fcmpeq16 */
4806                     CHECK_FPU_FEATURE(dc, VIS1);
4807                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4808                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4809                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4810                     gen_store_gpr(dc, rd, cpu_dst);
4811                     break;
4812                 case 0x02c: /* VIS I fcmpgt32 */
4813                     CHECK_FPU_FEATURE(dc, VIS1);
4814                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4815                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4816                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4817                     gen_store_gpr(dc, rd, cpu_dst);
4818                     break;
4819                 case 0x02e: /* VIS I fcmpeq32 */
4820                     CHECK_FPU_FEATURE(dc, VIS1);
4821                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4822                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4823                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4824                     gen_store_gpr(dc, rd, cpu_dst);
4825                     break;
4826                 case 0x031: /* VIS I fmul8x16 */
4827                     CHECK_FPU_FEATURE(dc, VIS1);
4828                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4829                     break;
4830                 case 0x033: /* VIS I fmul8x16au */
4831                     CHECK_FPU_FEATURE(dc, VIS1);
4832                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4833                     break;
4834                 case 0x035: /* VIS I fmul8x16al */
4835                     CHECK_FPU_FEATURE(dc, VIS1);
4836                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4837                     break;
4838                 case 0x036: /* VIS I fmul8sux16 */
4839                     CHECK_FPU_FEATURE(dc, VIS1);
4840                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4841                     break;
4842                 case 0x037: /* VIS I fmul8ulx16 */
4843                     CHECK_FPU_FEATURE(dc, VIS1);
4844                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4845                     break;
4846                 case 0x038: /* VIS I fmuld8sux16 */
4847                     CHECK_FPU_FEATURE(dc, VIS1);
4848                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4849                     break;
4850                 case 0x039: /* VIS I fmuld8ulx16 */
4851                     CHECK_FPU_FEATURE(dc, VIS1);
4852                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4853                     break;
4854                 case 0x03a: /* VIS I fpack32 */
4855                     CHECK_FPU_FEATURE(dc, VIS1);
4856                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4857                     break;
4858                 case 0x03b: /* VIS I fpack16 */
4859                     CHECK_FPU_FEATURE(dc, VIS1);
4860                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4861                     cpu_dst_32 = gen_dest_fpr_F(dc);
4862                     gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4863                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4864                     break;
4865                 case 0x03d: /* VIS I fpackfix */
4866                     CHECK_FPU_FEATURE(dc, VIS1);
4867                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4868                     cpu_dst_32 = gen_dest_fpr_F(dc);
4869                     gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4870                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4871                     break;
4872                 case 0x03e: /* VIS I pdist */
4873                     CHECK_FPU_FEATURE(dc, VIS1);
4874                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4875                     break;
4876                 case 0x048: /* VIS I faligndata */
4877                     CHECK_FPU_FEATURE(dc, VIS1);
4878                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4879                     break;
4880                 case 0x04b: /* VIS I fpmerge */
4881                     CHECK_FPU_FEATURE(dc, VIS1);
4882                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4883                     break;
4884                 case 0x04c: /* VIS II bshuffle */
4885                     CHECK_FPU_FEATURE(dc, VIS2);
4886                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4887                     break;
4888                 case 0x04d: /* VIS I fexpand */
4889                     CHECK_FPU_FEATURE(dc, VIS1);
4890                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4891                     break;
4892                 case 0x050: /* VIS I fpadd16 */
4893                     CHECK_FPU_FEATURE(dc, VIS1);
4894                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4895                     break;
4896                 case 0x051: /* VIS I fpadd16s */
4897                     CHECK_FPU_FEATURE(dc, VIS1);
4898                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4899                     break;
4900                 case 0x052: /* VIS I fpadd32 */
4901                     CHECK_FPU_FEATURE(dc, VIS1);
4902                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4903                     break;
4904                 case 0x053: /* VIS I fpadd32s */
4905                     CHECK_FPU_FEATURE(dc, VIS1);
4906                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4907                     break;
4908                 case 0x054: /* VIS I fpsub16 */
4909                     CHECK_FPU_FEATURE(dc, VIS1);
4910                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4911                     break;
4912                 case 0x055: /* VIS I fpsub16s */
4913                     CHECK_FPU_FEATURE(dc, VIS1);
4914                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4915                     break;
4916                 case 0x056: /* VIS I fpsub32 */
4917                     CHECK_FPU_FEATURE(dc, VIS1);
4918                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4919                     break;
4920                 case 0x057: /* VIS I fpsub32s */
4921                     CHECK_FPU_FEATURE(dc, VIS1);
4922                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4923                     break;
4924                 case 0x060: /* VIS I fzero */
4925                     CHECK_FPU_FEATURE(dc, VIS1);
4926                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4927                     tcg_gen_movi_i64(cpu_dst_64, 0);
4928                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4929                     break;
4930                 case 0x061: /* VIS I fzeros */
4931                     CHECK_FPU_FEATURE(dc, VIS1);
4932                     cpu_dst_32 = gen_dest_fpr_F(dc);
4933                     tcg_gen_movi_i32(cpu_dst_32, 0);
4934                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4935                     break;
4936                 case 0x062: /* VIS I fnor */
4937                     CHECK_FPU_FEATURE(dc, VIS1);
4938                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4939                     break;
4940                 case 0x063: /* VIS I fnors */
4941                     CHECK_FPU_FEATURE(dc, VIS1);
4942                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4943                     break;
4944                 case 0x064: /* VIS I fandnot2 */
4945                     CHECK_FPU_FEATURE(dc, VIS1);
4946                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4947                     break;
4948                 case 0x065: /* VIS I fandnot2s */
4949                     CHECK_FPU_FEATURE(dc, VIS1);
4950                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4951                     break;
4952                 case 0x066: /* VIS I fnot2 */
4953                     CHECK_FPU_FEATURE(dc, VIS1);
4954                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4955                     break;
4956                 case 0x067: /* VIS I fnot2s */
4957                     CHECK_FPU_FEATURE(dc, VIS1);
4958                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4959                     break;
4960                 case 0x068: /* VIS I fandnot1 */
4961                     CHECK_FPU_FEATURE(dc, VIS1);
4962                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4963                     break;
4964                 case 0x069: /* VIS I fandnot1s */
4965                     CHECK_FPU_FEATURE(dc, VIS1);
4966                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4967                     break;
4968                 case 0x06a: /* VIS I fnot1 */
4969                     CHECK_FPU_FEATURE(dc, VIS1);
4970                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4971                     break;
4972                 case 0x06b: /* VIS I fnot1s */
4973                     CHECK_FPU_FEATURE(dc, VIS1);
4974                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4975                     break;
4976                 case 0x06c: /* VIS I fxor */
4977                     CHECK_FPU_FEATURE(dc, VIS1);
4978                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4979                     break;
4980                 case 0x06d: /* VIS I fxors */
4981                     CHECK_FPU_FEATURE(dc, VIS1);
4982                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4983                     break;
4984                 case 0x06e: /* VIS I fnand */
4985                     CHECK_FPU_FEATURE(dc, VIS1);
4986                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4987                     break;
4988                 case 0x06f: /* VIS I fnands */
4989                     CHECK_FPU_FEATURE(dc, VIS1);
4990                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4991                     break;
4992                 case 0x070: /* VIS I fand */
4993                     CHECK_FPU_FEATURE(dc, VIS1);
4994                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4995                     break;
4996                 case 0x071: /* VIS I fands */
4997                     CHECK_FPU_FEATURE(dc, VIS1);
4998                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4999                     break;
5000                 case 0x072: /* VIS I fxnor */
5001                     CHECK_FPU_FEATURE(dc, VIS1);
5002                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
5003                     break;
5004                 case 0x073: /* VIS I fxnors */
5005                     CHECK_FPU_FEATURE(dc, VIS1);
5006                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
5007                     break;
5008                 case 0x074: /* VIS I fsrc1 */
5009                     CHECK_FPU_FEATURE(dc, VIS1);
5010                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
5011                     gen_store_fpr_D(dc, rd, cpu_src1_64);
5012                     break;
5013                 case 0x075: /* VIS I fsrc1s */
5014                     CHECK_FPU_FEATURE(dc, VIS1);
5015                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
5016                     gen_store_fpr_F(dc, rd, cpu_src1_32);
5017                     break;
5018                 case 0x076: /* VIS I fornot2 */
5019                     CHECK_FPU_FEATURE(dc, VIS1);
5020                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
5021                     break;
5022                 case 0x077: /* VIS I fornot2s */
5023                     CHECK_FPU_FEATURE(dc, VIS1);
5024                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
5025                     break;
5026                 case 0x078: /* VIS I fsrc2 */
5027                     CHECK_FPU_FEATURE(dc, VIS1);
5028                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
5029                     gen_store_fpr_D(dc, rd, cpu_src1_64);
5030                     break;
5031                 case 0x079: /* VIS I fsrc2s */
5032                     CHECK_FPU_FEATURE(dc, VIS1);
5033                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
5034                     gen_store_fpr_F(dc, rd, cpu_src1_32);
5035                     break;
5036                 case 0x07a: /* VIS I fornot1 */
5037                     CHECK_FPU_FEATURE(dc, VIS1);
5038                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
5039                     break;
5040                 case 0x07b: /* VIS I fornot1s */
5041                     CHECK_FPU_FEATURE(dc, VIS1);
5042                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
5043                     break;
5044                 case 0x07c: /* VIS I for */
5045                     CHECK_FPU_FEATURE(dc, VIS1);
5046                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
5047                     break;
5048                 case 0x07d: /* VIS I fors */
5049                     CHECK_FPU_FEATURE(dc, VIS1);
5050                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
5051                     break;
5052                 case 0x07e: /* VIS I fone */
5053                     CHECK_FPU_FEATURE(dc, VIS1);
5054                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5055                     tcg_gen_movi_i64(cpu_dst_64, -1);
5056                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5057                     break;
5058                 case 0x07f: /* VIS I fones */
5059                     CHECK_FPU_FEATURE(dc, VIS1);
5060                     cpu_dst_32 = gen_dest_fpr_F(dc);
5061                     tcg_gen_movi_i32(cpu_dst_32, -1);
5062                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5063                     break;
5064                 case 0x080: /* VIS I shutdown */
5065                 case 0x081: /* VIS II siam */
5066                     // XXX
5067                     goto illegal_insn;
5068                 default:
5069                     goto illegal_insn;
5070                 }
5071 #else
5072                 goto ncp_insn;
5073 #endif
5074             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
5075 #ifdef TARGET_SPARC64
5076                 goto illegal_insn;
5077 #else
5078                 goto ncp_insn;
5079 #endif
5080 #ifdef TARGET_SPARC64
5081             } else if (xop == 0x39) { /* V9 return */
5082                 save_state(dc);
5083                 cpu_src1 = get_src1(dc, insn);
5084                 cpu_tmp0 = tcg_temp_new();
5085                 if (IS_IMM) {   /* immediate */
5086                     simm = GET_FIELDs(insn, 19, 31);
5087                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5088                 } else {                /* register */
5089                     rs2 = GET_FIELD(insn, 27, 31);
5090                     if (rs2) {
5091                         cpu_src2 = gen_load_gpr(dc, rs2);
5092                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5093                     } else {
5094                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5095                     }
5096                 }
5097                 gen_check_align(dc, cpu_tmp0, 3);
5098                 gen_helper_restore(tcg_env);
5099                 gen_mov_pc_npc(dc);
5100                 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5101                 dc->npc = DYNAMIC_PC_LOOKUP;
5102                 goto jmp_insn;
5103 #endif
5104             } else {
5105                 cpu_src1 = get_src1(dc, insn);
5106                 cpu_tmp0 = tcg_temp_new();
5107                 if (IS_IMM) {   /* immediate */
5108                     simm = GET_FIELDs(insn, 19, 31);
5109                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5110                 } else {                /* register */
5111                     rs2 = GET_FIELD(insn, 27, 31);
5112                     if (rs2) {
5113                         cpu_src2 = gen_load_gpr(dc, rs2);
5114                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5115                     } else {
5116                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5117                     }
5118                 }
5119                 switch (xop) {
5120                 case 0x38:      /* jmpl */
5121                     {
5122                         gen_check_align(dc, cpu_tmp0, 3);
5123                         gen_store_gpr(dc, rd, tcg_constant_tl(dc->pc));
5124                         gen_mov_pc_npc(dc);
5125                         gen_address_mask(dc, cpu_tmp0);
5126                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5127                         dc->npc = DYNAMIC_PC_LOOKUP;
5128                     }
5129                     goto jmp_insn;
5130 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5131                 case 0x39:      /* rett, V9 return */
5132                     {
5133                         if (!supervisor(dc))
5134                             goto priv_insn;
5135                         gen_check_align(dc, cpu_tmp0, 3);
5136                         gen_mov_pc_npc(dc);
5137                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5138                         dc->npc = DYNAMIC_PC;
5139                         gen_helper_rett(tcg_env);
5140                     }
5141                     goto jmp_insn;
5142 #endif
5143                 case 0x3b: /* flush */
5144                     /* nop */
5145                     break;
5146                 case 0x3c:      /* save */
5147                     gen_helper_save(tcg_env);
5148                     gen_store_gpr(dc, rd, cpu_tmp0);
5149                     break;
5150                 case 0x3d:      /* restore */
5151                     gen_helper_restore(tcg_env);
5152                     gen_store_gpr(dc, rd, cpu_tmp0);
5153                     break;
5154 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
5155                 case 0x3e:      /* V9 done/retry */
5156                     {
5157                         switch (rd) {
5158                         case 0:
5159                             if (!supervisor(dc))
5160                                 goto priv_insn;
5161                             dc->npc = DYNAMIC_PC;
5162                             dc->pc = DYNAMIC_PC;
5163                             translator_io_start(&dc->base);
5164                             gen_helper_done(tcg_env);
5165                             goto jmp_insn;
5166                         case 1:
5167                             if (!supervisor(dc))
5168                                 goto priv_insn;
5169                             dc->npc = DYNAMIC_PC;
5170                             dc->pc = DYNAMIC_PC;
5171                             translator_io_start(&dc->base);
5172                             gen_helper_retry(tcg_env);
5173                             goto jmp_insn;
5174                         default:
5175                             goto illegal_insn;
5176                         }
5177                     }
5178                     break;
5179 #endif
5180                 default:
5181                     goto illegal_insn;
5182                 }
5183             }
5184             break;
5185         }
5186         break;
5187     case 3:                     /* load/store instructions */
5188         {
5189             unsigned int xop = GET_FIELD(insn, 7, 12);
5190             /* ??? gen_address_mask prevents us from using a source
5191                register directly.  Always generate a temporary.  */
5192             TCGv cpu_addr = tcg_temp_new();
5193 
5194             tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
5195             if (xop == 0x3c || xop == 0x3e) {
5196                 /* V9 casa/casxa : no offset */
5197             } else if (IS_IMM) {     /* immediate */
5198                 simm = GET_FIELDs(insn, 19, 31);
5199                 if (simm != 0) {
5200                     tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
5201                 }
5202             } else {            /* register */
5203                 rs2 = GET_FIELD(insn, 27, 31);
5204                 if (rs2 != 0) {
5205                     tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
5206                 }
5207             }
5208             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
5209                 (xop > 0x17 && xop <= 0x1d ) ||
5210                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
5211                 TCGv cpu_val = gen_dest_gpr(dc, rd);
5212 
5213                 switch (xop) {
5214                 case 0x0:       /* ld, V9 lduw, load unsigned word */
5215                     gen_address_mask(dc, cpu_addr);
5216                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5217                                        dc->mem_idx, MO_TEUL | MO_ALIGN);
5218                     break;
5219                 case 0x1:       /* ldub, load unsigned byte */
5220                     gen_address_mask(dc, cpu_addr);
5221                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5222                                        dc->mem_idx, MO_UB);
5223                     break;
5224                 case 0x2:       /* lduh, load unsigned halfword */
5225                     gen_address_mask(dc, cpu_addr);
5226                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5227                                        dc->mem_idx, MO_TEUW | MO_ALIGN);
5228                     break;
5229                 case 0x3:       /* ldd, load double word */
5230                     if (rd & 1)
5231                         goto illegal_insn;
5232                     else {
5233                         TCGv_i64 t64;
5234 
5235                         gen_address_mask(dc, cpu_addr);
5236                         t64 = tcg_temp_new_i64();
5237                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5238                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5239                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5240                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5241                         gen_store_gpr(dc, rd + 1, cpu_val);
5242                         tcg_gen_shri_i64(t64, t64, 32);
5243                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5244                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5245                     }
5246                     break;
5247                 case 0x9:       /* ldsb, load signed byte */
5248                     gen_address_mask(dc, cpu_addr);
5249                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr, dc->mem_idx, MO_SB);
5250                     break;
5251                 case 0xa:       /* ldsh, load signed halfword */
5252                     gen_address_mask(dc, cpu_addr);
5253                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5254                                        dc->mem_idx, MO_TESW | MO_ALIGN);
5255                     break;
5256                 case 0xd:       /* ldstub */
5257                     gen_ldstub(dc, cpu_val, cpu_addr, dc->mem_idx);
5258                     break;
5259                 case 0x0f:
5260                     /* swap, swap register with memory. Also atomically */
5261                     cpu_src1 = gen_load_gpr(dc, rd);
5262                     gen_swap(dc, cpu_val, cpu_src1, cpu_addr,
5263                              dc->mem_idx, MO_TEUL);
5264                     break;
5265 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5266                 case 0x10:      /* lda, V9 lduwa, load word alternate */
5267                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5268                     break;
5269                 case 0x11:      /* lduba, load unsigned byte alternate */
5270                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5271                     break;
5272                 case 0x12:      /* lduha, load unsigned halfword alternate */
5273                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5274                     break;
5275                 case 0x13:      /* ldda, load double word alternate */
5276                     if (rd & 1) {
5277                         goto illegal_insn;
5278                     }
5279                     gen_ldda_asi(dc, cpu_addr, insn, rd);
5280                     goto skip_move;
5281                 case 0x19:      /* ldsba, load signed byte alternate */
5282                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_SB);
5283                     break;
5284                 case 0x1a:      /* ldsha, load signed halfword alternate */
5285                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESW);
5286                     break;
5287                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
5288                     gen_ldstub_asi(dc, cpu_val, cpu_addr, insn);
5289                     break;
5290                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
5291                                    atomically */
5292                     cpu_src1 = gen_load_gpr(dc, rd);
5293                     gen_swap_asi(dc, cpu_val, cpu_src1, cpu_addr, insn);
5294                     break;
5295 
5296 #ifndef TARGET_SPARC64
5297                 case 0x30: /* ldc */
5298                 case 0x31: /* ldcsr */
5299                 case 0x33: /* lddc */
5300                     goto ncp_insn;
5301 #endif
5302 #endif
5303 #ifdef TARGET_SPARC64
5304                 case 0x08: /* V9 ldsw */
5305                     gen_address_mask(dc, cpu_addr);
5306                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5307                                        dc->mem_idx, MO_TESL | MO_ALIGN);
5308                     break;
5309                 case 0x0b: /* V9 ldx */
5310                     gen_address_mask(dc, cpu_addr);
5311                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5312                                        dc->mem_idx, MO_TEUQ | MO_ALIGN);
5313                     break;
5314                 case 0x18: /* V9 ldswa */
5315                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESL);
5316                     break;
5317                 case 0x1b: /* V9 ldxa */
5318                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5319                     break;
5320                 case 0x2d: /* V9 prefetch, no effect */
5321                     goto skip_move;
5322                 case 0x30: /* V9 ldfa */
5323                     if (gen_trap_ifnofpu(dc)) {
5324                         goto jmp_insn;
5325                     }
5326                     gen_ldf_asi(dc, cpu_addr, insn, 4, rd);
5327                     gen_update_fprs_dirty(dc, rd);
5328                     goto skip_move;
5329                 case 0x33: /* V9 lddfa */
5330                     if (gen_trap_ifnofpu(dc)) {
5331                         goto jmp_insn;
5332                     }
5333                     gen_ldf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5334                     gen_update_fprs_dirty(dc, DFPREG(rd));
5335                     goto skip_move;
5336                 case 0x3d: /* V9 prefetcha, no effect */
5337                     goto skip_move;
5338                 case 0x32: /* V9 ldqfa */
5339                     CHECK_FPU_FEATURE(dc, FLOAT128);
5340                     if (gen_trap_ifnofpu(dc)) {
5341                         goto jmp_insn;
5342                     }
5343                     gen_ldf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5344                     gen_update_fprs_dirty(dc, QFPREG(rd));
5345                     goto skip_move;
5346 #endif
5347                 default:
5348                     goto illegal_insn;
5349                 }
5350                 gen_store_gpr(dc, rd, cpu_val);
5351 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5352             skip_move: ;
5353 #endif
5354             } else if (xop >= 0x20 && xop < 0x24) {
5355                 if (gen_trap_ifnofpu(dc)) {
5356                     goto jmp_insn;
5357                 }
5358                 switch (xop) {
5359                 case 0x20:      /* ldf, load fpreg */
5360                     gen_address_mask(dc, cpu_addr);
5361                     cpu_dst_32 = gen_dest_fpr_F(dc);
5362                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5363                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5364                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5365                     break;
5366                 case 0x21:      /* ldfsr, V9 ldxfsr */
5367 #ifdef TARGET_SPARC64
5368                     gen_address_mask(dc, cpu_addr);
5369                     if (rd == 1) {
5370                         TCGv_i64 t64 = tcg_temp_new_i64();
5371                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5372                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5373                         gen_helper_ldxfsr(cpu_fsr, tcg_env, cpu_fsr, t64);
5374                         break;
5375                     }
5376 #endif
5377                     cpu_dst_32 = tcg_temp_new_i32();
5378                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5379                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5380                     gen_helper_ldfsr(cpu_fsr, tcg_env, cpu_fsr, cpu_dst_32);
5381                     break;
5382                 case 0x22:      /* ldqf, load quad fpreg */
5383                     CHECK_FPU_FEATURE(dc, FLOAT128);
5384                     gen_address_mask(dc, cpu_addr);
5385                     cpu_src1_64 = tcg_temp_new_i64();
5386                     tcg_gen_qemu_ld_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5387                                         MO_TEUQ | MO_ALIGN_4);
5388                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5389                     cpu_src2_64 = tcg_temp_new_i64();
5390                     tcg_gen_qemu_ld_i64(cpu_src2_64, cpu_addr, dc->mem_idx,
5391                                         MO_TEUQ | MO_ALIGN_4);
5392                     gen_store_fpr_Q(dc, rd, cpu_src1_64, cpu_src2_64);
5393                     break;
5394                 case 0x23:      /* lddf, load double fpreg */
5395                     gen_address_mask(dc, cpu_addr);
5396                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5397                     tcg_gen_qemu_ld_i64(cpu_dst_64, cpu_addr, dc->mem_idx,
5398                                         MO_TEUQ | MO_ALIGN_4);
5399                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5400                     break;
5401                 default:
5402                     goto illegal_insn;
5403                 }
5404             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
5405                        xop == 0xe || xop == 0x1e) {
5406                 TCGv cpu_val = gen_load_gpr(dc, rd);
5407 
5408                 switch (xop) {
5409                 case 0x4: /* st, store word */
5410                     gen_address_mask(dc, cpu_addr);
5411                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5412                                        dc->mem_idx, MO_TEUL | MO_ALIGN);
5413                     break;
5414                 case 0x5: /* stb, store byte */
5415                     gen_address_mask(dc, cpu_addr);
5416                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr, dc->mem_idx, MO_UB);
5417                     break;
5418                 case 0x6: /* sth, store halfword */
5419                     gen_address_mask(dc, cpu_addr);
5420                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5421                                        dc->mem_idx, MO_TEUW | MO_ALIGN);
5422                     break;
5423                 case 0x7: /* std, store double word */
5424                     if (rd & 1)
5425                         goto illegal_insn;
5426                     else {
5427                         TCGv_i64 t64;
5428                         TCGv lo;
5429 
5430                         gen_address_mask(dc, cpu_addr);
5431                         lo = gen_load_gpr(dc, rd + 1);
5432                         t64 = tcg_temp_new_i64();
5433                         tcg_gen_concat_tl_i64(t64, lo, cpu_val);
5434                         tcg_gen_qemu_st_i64(t64, cpu_addr,
5435                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5436                     }
5437                     break;
5438 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5439                 case 0x14: /* sta, V9 stwa, store word alternate */
5440                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5441                     break;
5442                 case 0x15: /* stba, store byte alternate */
5443                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5444                     break;
5445                 case 0x16: /* stha, store halfword alternate */
5446                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5447                     break;
5448                 case 0x17: /* stda, store double word alternate */
5449                     if (rd & 1) {
5450                         goto illegal_insn;
5451                     }
5452                     gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
5453                     break;
5454 #endif
5455 #ifdef TARGET_SPARC64
5456                 case 0x0e: /* V9 stx */
5457                     gen_address_mask(dc, cpu_addr);
5458                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5459                                        dc->mem_idx, MO_TEUQ | MO_ALIGN);
5460                     break;
5461                 case 0x1e: /* V9 stxa */
5462                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5463                     break;
5464 #endif
5465                 default:
5466                     goto illegal_insn;
5467                 }
5468             } else if (xop > 0x23 && xop < 0x28) {
5469                 if (gen_trap_ifnofpu(dc)) {
5470                     goto jmp_insn;
5471                 }
5472                 switch (xop) {
5473                 case 0x24: /* stf, store fpreg */
5474                     gen_address_mask(dc, cpu_addr);
5475                     cpu_src1_32 = gen_load_fpr_F(dc, rd);
5476                     tcg_gen_qemu_st_i32(cpu_src1_32, cpu_addr,
5477                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5478                     break;
5479                 case 0x25: /* stfsr, V9 stxfsr */
5480                     {
5481 #ifdef TARGET_SPARC64
5482                         gen_address_mask(dc, cpu_addr);
5483                         if (rd == 1) {
5484                             tcg_gen_qemu_st_tl(cpu_fsr, cpu_addr,
5485                                                dc->mem_idx, MO_TEUQ | MO_ALIGN);
5486                             break;
5487                         }
5488 #endif
5489                         tcg_gen_qemu_st_tl(cpu_fsr, cpu_addr,
5490                                            dc->mem_idx, MO_TEUL | MO_ALIGN);
5491                     }
5492                     break;
5493                 case 0x26:
5494 #ifdef TARGET_SPARC64
5495                     /* V9 stqf, store quad fpreg */
5496                     CHECK_FPU_FEATURE(dc, FLOAT128);
5497                     gen_address_mask(dc, cpu_addr);
5498                     /* ??? While stqf only requires 4-byte alignment, it is
5499                        legal for the cpu to signal the unaligned exception.
5500                        The OS trap handler is then required to fix it up.
5501                        For qemu, this avoids having to probe the second page
5502                        before performing the first write.  */
5503                     cpu_src1_64 = gen_load_fpr_Q0(dc, rd);
5504                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5505                                         dc->mem_idx, MO_TEUQ | MO_ALIGN_16);
5506                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5507                     cpu_src2_64 = gen_load_fpr_Q1(dc, rd);
5508                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5509                                         dc->mem_idx, MO_TEUQ);
5510                     break;
5511 #else /* !TARGET_SPARC64 */
5512                     /* stdfq, store floating point queue */
5513 #if defined(CONFIG_USER_ONLY)
5514                     goto illegal_insn;
5515 #else
5516                     if (!supervisor(dc))
5517                         goto priv_insn;
5518                     if (gen_trap_ifnofpu(dc)) {
5519                         goto jmp_insn;
5520                     }
5521                     goto nfq_insn;
5522 #endif
5523 #endif
5524                 case 0x27: /* stdf, store double fpreg */
5525                     gen_address_mask(dc, cpu_addr);
5526                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
5527                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5528                                         MO_TEUQ | MO_ALIGN_4);
5529                     break;
5530                 default:
5531                     goto illegal_insn;
5532                 }
5533             } else if (xop > 0x33 && xop < 0x3f) {
5534                 switch (xop) {
5535 #ifdef TARGET_SPARC64
5536                 case 0x34: /* V9 stfa */
5537                     if (gen_trap_ifnofpu(dc)) {
5538                         goto jmp_insn;
5539                     }
5540                     gen_stf_asi(dc, cpu_addr, insn, 4, rd);
5541                     break;
5542                 case 0x36: /* V9 stqfa */
5543                     {
5544                         CHECK_FPU_FEATURE(dc, FLOAT128);
5545                         if (gen_trap_ifnofpu(dc)) {
5546                             goto jmp_insn;
5547                         }
5548                         gen_stf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5549                     }
5550                     break;
5551                 case 0x37: /* V9 stdfa */
5552                     if (gen_trap_ifnofpu(dc)) {
5553                         goto jmp_insn;
5554                     }
5555                     gen_stf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5556                     break;
5557                 case 0x3e: /* V9 casxa */
5558                     rs2 = GET_FIELD(insn, 27, 31);
5559                     cpu_src2 = gen_load_gpr(dc, rs2);
5560                     gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5561                     break;
5562 #else
5563                 case 0x34: /* stc */
5564                 case 0x35: /* stcsr */
5565                 case 0x36: /* stdcq */
5566                 case 0x37: /* stdc */
5567                     goto ncp_insn;
5568 #endif
5569 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5570                 case 0x3c: /* V9 or LEON3 casa */
5571 #ifndef TARGET_SPARC64
5572                     CHECK_IU_FEATURE(dc, CASA);
5573 #endif
5574                     rs2 = GET_FIELD(insn, 27, 31);
5575                     cpu_src2 = gen_load_gpr(dc, rs2);
5576                     gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5577                     break;
5578 #endif
5579                 default:
5580                     goto illegal_insn;
5581                 }
5582             } else {
5583                 goto illegal_insn;
5584             }
5585         }
5586         break;
5587     }
5588     advance_pc(dc);
5589  jmp_insn:
5590     return;
5591  illegal_insn:
5592     gen_exception(dc, TT_ILL_INSN);
5593     return;
5594 #if !defined(CONFIG_USER_ONLY)
5595  priv_insn:
5596     gen_exception(dc, TT_PRIV_INSN);
5597     return;
5598 #endif
5599  nfpu_insn:
5600     gen_op_fpexception_im(dc, FSR_FTT_UNIMPFPOP);
5601     return;
5602 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5603  nfq_insn:
5604     gen_op_fpexception_im(dc, FSR_FTT_SEQ_ERROR);
5605     return;
5606 #endif
5607 #ifndef TARGET_SPARC64
5608  ncp_insn:
5609     gen_exception(dc, TT_NCP_INSN);
5610     return;
5611 #endif
5612 }
5613 
5614 static void sparc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
5615 {
5616     DisasContext *dc = container_of(dcbase, DisasContext, base);
5617     CPUSPARCState *env = cpu_env(cs);
5618     int bound;
5619 
5620     dc->pc = dc->base.pc_first;
5621     dc->npc = (target_ulong)dc->base.tb->cs_base;
5622     dc->cc_op = CC_OP_DYNAMIC;
5623     dc->mem_idx = dc->base.tb->flags & TB_FLAG_MMU_MASK;
5624     dc->def = &env->def;
5625     dc->fpu_enabled = tb_fpu_enabled(dc->base.tb->flags);
5626     dc->address_mask_32bit = tb_am_enabled(dc->base.tb->flags);
5627 #ifndef CONFIG_USER_ONLY
5628     dc->supervisor = (dc->base.tb->flags & TB_FLAG_SUPER) != 0;
5629 #endif
5630 #ifdef TARGET_SPARC64
5631     dc->fprs_dirty = 0;
5632     dc->asi = (dc->base.tb->flags >> TB_FLAG_ASI_SHIFT) & 0xff;
5633 #ifndef CONFIG_USER_ONLY
5634     dc->hypervisor = (dc->base.tb->flags & TB_FLAG_HYPER) != 0;
5635 #endif
5636 #endif
5637     /*
5638      * if we reach a page boundary, we stop generation so that the
5639      * PC of a TT_TFAULT exception is always in the right page
5640      */
5641     bound = -(dc->base.pc_first | TARGET_PAGE_MASK) / 4;
5642     dc->base.max_insns = MIN(dc->base.max_insns, bound);
5643 }
5644 
5645 static void sparc_tr_tb_start(DisasContextBase *db, CPUState *cs)
5646 {
5647 }
5648 
5649 static void sparc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
5650 {
5651     DisasContext *dc = container_of(dcbase, DisasContext, base);
5652     target_ulong npc = dc->npc;
5653 
5654     if (npc & 3) {
5655         switch (npc) {
5656         case JUMP_PC:
5657             assert(dc->jump_pc[1] == dc->pc + 4);
5658             npc = dc->jump_pc[0] | JUMP_PC;
5659             break;
5660         case DYNAMIC_PC:
5661         case DYNAMIC_PC_LOOKUP:
5662             npc = DYNAMIC_PC;
5663             break;
5664         default:
5665             g_assert_not_reached();
5666         }
5667     }
5668     tcg_gen_insn_start(dc->pc, npc);
5669 }
5670 
5671 static void sparc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
5672 {
5673     DisasContext *dc = container_of(dcbase, DisasContext, base);
5674     CPUSPARCState *env = cpu_env(cs);
5675     unsigned int insn;
5676 
5677     insn = translator_ldl(env, &dc->base, dc->pc);
5678     dc->base.pc_next += 4;
5679 
5680     if (!decode(dc, insn)) {
5681         disas_sparc_legacy(dc, insn);
5682     }
5683 
5684     if (dc->base.is_jmp == DISAS_NORETURN) {
5685         return;
5686     }
5687     if (dc->pc != dc->base.pc_next) {
5688         dc->base.is_jmp = DISAS_TOO_MANY;
5689     }
5690 }
5691 
5692 static void sparc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
5693 {
5694     DisasContext *dc = container_of(dcbase, DisasContext, base);
5695     DisasDelayException *e, *e_next;
5696     bool may_lookup;
5697 
5698     switch (dc->base.is_jmp) {
5699     case DISAS_NEXT:
5700     case DISAS_TOO_MANY:
5701         if (((dc->pc | dc->npc) & 3) == 0) {
5702             /* static PC and NPC: we can use direct chaining */
5703             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5704             break;
5705         }
5706 
5707         may_lookup = true;
5708         if (dc->pc & 3) {
5709             switch (dc->pc) {
5710             case DYNAMIC_PC_LOOKUP:
5711                 break;
5712             case DYNAMIC_PC:
5713                 may_lookup = false;
5714                 break;
5715             default:
5716                 g_assert_not_reached();
5717             }
5718         } else {
5719             tcg_gen_movi_tl(cpu_pc, dc->pc);
5720         }
5721 
5722         if (dc->npc & 3) {
5723             switch (dc->npc) {
5724             case JUMP_PC:
5725                 gen_generic_branch(dc);
5726                 break;
5727             case DYNAMIC_PC:
5728                 may_lookup = false;
5729                 break;
5730             case DYNAMIC_PC_LOOKUP:
5731                 break;
5732             default:
5733                 g_assert_not_reached();
5734             }
5735         } else {
5736             tcg_gen_movi_tl(cpu_npc, dc->npc);
5737         }
5738         if (may_lookup) {
5739             tcg_gen_lookup_and_goto_ptr();
5740         } else {
5741             tcg_gen_exit_tb(NULL, 0);
5742         }
5743         break;
5744 
5745     case DISAS_NORETURN:
5746        break;
5747 
5748     case DISAS_EXIT:
5749         /* Exit TB */
5750         save_state(dc);
5751         tcg_gen_exit_tb(NULL, 0);
5752         break;
5753 
5754     default:
5755         g_assert_not_reached();
5756     }
5757 
5758     for (e = dc->delay_excp_list; e ; e = e_next) {
5759         gen_set_label(e->lab);
5760 
5761         tcg_gen_movi_tl(cpu_pc, e->pc);
5762         if (e->npc % 4 == 0) {
5763             tcg_gen_movi_tl(cpu_npc, e->npc);
5764         }
5765         gen_helper_raise_exception(tcg_env, e->excp);
5766 
5767         e_next = e->next;
5768         g_free(e);
5769     }
5770 }
5771 
5772 static void sparc_tr_disas_log(const DisasContextBase *dcbase,
5773                                CPUState *cpu, FILE *logfile)
5774 {
5775     fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
5776     target_disas(logfile, cpu, dcbase->pc_first, dcbase->tb->size);
5777 }
5778 
5779 static const TranslatorOps sparc_tr_ops = {
5780     .init_disas_context = sparc_tr_init_disas_context,
5781     .tb_start           = sparc_tr_tb_start,
5782     .insn_start         = sparc_tr_insn_start,
5783     .translate_insn     = sparc_tr_translate_insn,
5784     .tb_stop            = sparc_tr_tb_stop,
5785     .disas_log          = sparc_tr_disas_log,
5786 };
5787 
5788 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int *max_insns,
5789                            target_ulong pc, void *host_pc)
5790 {
5791     DisasContext dc = {};
5792 
5793     translator_loop(cs, tb, max_insns, pc, host_pc, &sparc_tr_ops, &dc.base);
5794 }
5795 
5796 void sparc_tcg_init(void)
5797 {
5798     static const char gregnames[32][4] = {
5799         "g0", "g1", "g2", "g3", "g4", "g5", "g6", "g7",
5800         "o0", "o1", "o2", "o3", "o4", "o5", "o6", "o7",
5801         "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
5802         "i0", "i1", "i2", "i3", "i4", "i5", "i6", "i7",
5803     };
5804     static const char fregnames[32][4] = {
5805         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5806         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5807         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5808         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5809     };
5810 
5811     static const struct { TCGv_i32 *ptr; int off; const char *name; } r32[] = {
5812 #ifdef TARGET_SPARC64
5813         { &cpu_xcc, offsetof(CPUSPARCState, xcc), "xcc" },
5814         { &cpu_fprs, offsetof(CPUSPARCState, fprs), "fprs" },
5815 #else
5816         { &cpu_wim, offsetof(CPUSPARCState, wim), "wim" },
5817 #endif
5818         { &cpu_cc_op, offsetof(CPUSPARCState, cc_op), "cc_op" },
5819         { &cpu_psr, offsetof(CPUSPARCState, psr), "psr" },
5820     };
5821 
5822     static const struct { TCGv *ptr; int off; const char *name; } rtl[] = {
5823 #ifdef TARGET_SPARC64
5824         { &cpu_gsr, offsetof(CPUSPARCState, gsr), "gsr" },
5825         { &cpu_tick_cmpr, offsetof(CPUSPARCState, tick_cmpr), "tick_cmpr" },
5826         { &cpu_stick_cmpr, offsetof(CPUSPARCState, stick_cmpr), "stick_cmpr" },
5827         { &cpu_hstick_cmpr, offsetof(CPUSPARCState, hstick_cmpr),
5828           "hstick_cmpr" },
5829         { &cpu_hintp, offsetof(CPUSPARCState, hintp), "hintp" },
5830         { &cpu_htba, offsetof(CPUSPARCState, htba), "htba" },
5831         { &cpu_hver, offsetof(CPUSPARCState, hver), "hver" },
5832         { &cpu_ssr, offsetof(CPUSPARCState, ssr), "ssr" },
5833         { &cpu_ver, offsetof(CPUSPARCState, version), "ver" },
5834 #endif
5835         { &cpu_cond, offsetof(CPUSPARCState, cond), "cond" },
5836         { &cpu_cc_src, offsetof(CPUSPARCState, cc_src), "cc_src" },
5837         { &cpu_cc_src2, offsetof(CPUSPARCState, cc_src2), "cc_src2" },
5838         { &cpu_cc_dst, offsetof(CPUSPARCState, cc_dst), "cc_dst" },
5839         { &cpu_fsr, offsetof(CPUSPARCState, fsr), "fsr" },
5840         { &cpu_pc, offsetof(CPUSPARCState, pc), "pc" },
5841         { &cpu_npc, offsetof(CPUSPARCState, npc), "npc" },
5842         { &cpu_y, offsetof(CPUSPARCState, y), "y" },
5843 #ifndef CONFIG_USER_ONLY
5844         { &cpu_tbr, offsetof(CPUSPARCState, tbr), "tbr" },
5845 #endif
5846     };
5847 
5848     unsigned int i;
5849 
5850     cpu_regwptr = tcg_global_mem_new_ptr(tcg_env,
5851                                          offsetof(CPUSPARCState, regwptr),
5852                                          "regwptr");
5853 
5854     for (i = 0; i < ARRAY_SIZE(r32); ++i) {
5855         *r32[i].ptr = tcg_global_mem_new_i32(tcg_env, r32[i].off, r32[i].name);
5856     }
5857 
5858     for (i = 0; i < ARRAY_SIZE(rtl); ++i) {
5859         *rtl[i].ptr = tcg_global_mem_new(tcg_env, rtl[i].off, rtl[i].name);
5860     }
5861 
5862     cpu_regs[0] = NULL;
5863     for (i = 1; i < 8; ++i) {
5864         cpu_regs[i] = tcg_global_mem_new(tcg_env,
5865                                          offsetof(CPUSPARCState, gregs[i]),
5866                                          gregnames[i]);
5867     }
5868 
5869     for (i = 8; i < 32; ++i) {
5870         cpu_regs[i] = tcg_global_mem_new(cpu_regwptr,
5871                                          (i - 8) * sizeof(target_ulong),
5872                                          gregnames[i]);
5873     }
5874 
5875     for (i = 0; i < TARGET_DPREGS; i++) {
5876         cpu_fpr[i] = tcg_global_mem_new_i64(tcg_env,
5877                                             offsetof(CPUSPARCState, fpr[i]),
5878                                             fregnames[i]);
5879     }
5880 }
5881 
5882 void sparc_restore_state_to_opc(CPUState *cs,
5883                                 const TranslationBlock *tb,
5884                                 const uint64_t *data)
5885 {
5886     SPARCCPU *cpu = SPARC_CPU(cs);
5887     CPUSPARCState *env = &cpu->env;
5888     target_ulong pc = data[0];
5889     target_ulong npc = data[1];
5890 
5891     env->pc = pc;
5892     if (npc == DYNAMIC_PC) {
5893         /* dynamic NPC: already stored */
5894     } else if (npc & JUMP_PC) {
5895         /* jump PC: use 'cond' and the jump targets of the translation */
5896         if (env->cond) {
5897             env->npc = npc & ~3;
5898         } else {
5899             env->npc = pc + 4;
5900         }
5901     } else {
5902         env->npc = npc;
5903     }
5904 }
5905