xref: /openbmc/qemu/target/sparc/translate.c (revision 8a0816d6bee1f1f639429d1e468abd4845336689)
1 /*
2    SPARC translation
3 
4    Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5    Copyright (C) 2003-2005 Fabrice Bellard
6 
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2.1 of the License, or (at your option) any later version.
11 
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16 
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 
23 #include "cpu.h"
24 #include "disas/disas.h"
25 #include "exec/helper-proto.h"
26 #include "exec/exec-all.h"
27 #include "tcg/tcg-op.h"
28 #include "exec/cpu_ldst.h"
29 
30 #include "exec/helper-gen.h"
31 
32 #include "exec/translator.h"
33 #include "exec/log.h"
34 #include "asi.h"
35 
36 
37 #define DEBUG_DISAS
38 
39 #define DYNAMIC_PC  1 /* dynamic pc value */
40 #define JUMP_PC     2 /* dynamic pc value which takes only two values
41                          according to jump_pc[T2] */
42 
43 #define DISAS_EXIT  DISAS_TARGET_0
44 
45 /* global register indexes */
46 static TCGv_ptr cpu_regwptr;
47 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
48 static TCGv_i32 cpu_cc_op;
49 static TCGv_i32 cpu_psr;
50 static TCGv cpu_fsr, cpu_pc, cpu_npc;
51 static TCGv cpu_regs[32];
52 static TCGv cpu_y;
53 #ifndef CONFIG_USER_ONLY
54 static TCGv cpu_tbr;
55 #endif
56 static TCGv cpu_cond;
57 #ifdef TARGET_SPARC64
58 static TCGv_i32 cpu_xcc, cpu_fprs;
59 static TCGv cpu_gsr;
60 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
61 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
62 #else
63 static TCGv cpu_wim;
64 #endif
65 /* Floating point registers */
66 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
67 
68 #include "exec/gen-icount.h"
69 
70 typedef struct DisasContext {
71     DisasContextBase base;
72     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
73     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
75     int mem_idx;
76     bool fpu_enabled;
77     bool address_mask_32bit;
78 #ifndef CONFIG_USER_ONLY
79     bool supervisor;
80 #ifdef TARGET_SPARC64
81     bool hypervisor;
82 #endif
83 #endif
84 
85     uint32_t cc_op;  /* current CC operation */
86     sparc_def_t *def;
87 #ifdef TARGET_SPARC64
88     int fprs_dirty;
89     int asi;
90 #endif
91 } DisasContext;
92 
93 typedef struct {
94     TCGCond cond;
95     bool is_bool;
96     TCGv c1, c2;
97 } DisasCompare;
98 
99 // This function uses non-native bit order
100 #define GET_FIELD(X, FROM, TO)                                  \
101     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
102 
103 // This function uses the order in the manuals, i.e. bit 0 is 2^0
104 #define GET_FIELD_SP(X, FROM, TO)               \
105     GET_FIELD(X, 31 - (TO), 31 - (FROM))
106 
107 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
108 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
109 
110 #ifdef TARGET_SPARC64
111 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
112 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
113 #else
114 #define DFPREG(r) (r & 0x1e)
115 #define QFPREG(r) (r & 0x1c)
116 #endif
117 
118 #define UA2005_HTRAP_MASK 0xff
119 #define V8_TRAP_MASK 0x7f
120 
121 static int sign_extend(int x, int len)
122 {
123     len = 32 - len;
124     return (x << len) >> len;
125 }
126 
127 #define IS_IMM (insn & (1<<13))
128 
129 static inline void gen_update_fprs_dirty(DisasContext *dc, int rd)
130 {
131 #if defined(TARGET_SPARC64)
132     int bit = (rd < 32) ? 1 : 2;
133     /* If we know we've already set this bit within the TB,
134        we can avoid setting it again.  */
135     if (!(dc->fprs_dirty & bit)) {
136         dc->fprs_dirty |= bit;
137         tcg_gen_ori_i32(cpu_fprs, cpu_fprs, bit);
138     }
139 #endif
140 }
141 
142 /* floating point registers moves */
143 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
144 {
145     TCGv_i32 ret = tcg_temp_new_i32();
146     if (src & 1) {
147         tcg_gen_extrl_i64_i32(ret, cpu_fpr[src / 2]);
148     } else {
149         tcg_gen_extrh_i64_i32(ret, cpu_fpr[src / 2]);
150     }
151     return ret;
152 }
153 
154 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
155 {
156     TCGv_i64 t = tcg_temp_new_i64();
157 
158     tcg_gen_extu_i32_i64(t, v);
159     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
160                         (dst & 1 ? 0 : 32), 32);
161     gen_update_fprs_dirty(dc, dst);
162 }
163 
164 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
165 {
166     return tcg_temp_new_i32();
167 }
168 
169 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
170 {
171     src = DFPREG(src);
172     return cpu_fpr[src / 2];
173 }
174 
175 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
176 {
177     dst = DFPREG(dst);
178     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
179     gen_update_fprs_dirty(dc, dst);
180 }
181 
182 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
183 {
184     return cpu_fpr[DFPREG(dst) / 2];
185 }
186 
187 static void gen_op_load_fpr_QT0(unsigned int src)
188 {
189     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
190                    offsetof(CPU_QuadU, ll.upper));
191     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
192                    offsetof(CPU_QuadU, ll.lower));
193 }
194 
195 static void gen_op_load_fpr_QT1(unsigned int src)
196 {
197     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
198                    offsetof(CPU_QuadU, ll.upper));
199     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
200                    offsetof(CPU_QuadU, ll.lower));
201 }
202 
203 static void gen_op_store_QT0_fpr(unsigned int dst)
204 {
205     tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
206                    offsetof(CPU_QuadU, ll.upper));
207     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
208                    offsetof(CPU_QuadU, ll.lower));
209 }
210 
211 static void gen_store_fpr_Q(DisasContext *dc, unsigned int dst,
212                             TCGv_i64 v1, TCGv_i64 v2)
213 {
214     dst = QFPREG(dst);
215 
216     tcg_gen_mov_i64(cpu_fpr[dst / 2], v1);
217     tcg_gen_mov_i64(cpu_fpr[dst / 2 + 1], v2);
218     gen_update_fprs_dirty(dc, dst);
219 }
220 
221 #ifdef TARGET_SPARC64
222 static TCGv_i64 gen_load_fpr_Q0(DisasContext *dc, unsigned int src)
223 {
224     src = QFPREG(src);
225     return cpu_fpr[src / 2];
226 }
227 
228 static TCGv_i64 gen_load_fpr_Q1(DisasContext *dc, unsigned int src)
229 {
230     src = QFPREG(src);
231     return cpu_fpr[src / 2 + 1];
232 }
233 
234 static void gen_move_Q(DisasContext *dc, unsigned int rd, unsigned int rs)
235 {
236     rd = QFPREG(rd);
237     rs = QFPREG(rs);
238 
239     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
240     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
241     gen_update_fprs_dirty(dc, rd);
242 }
243 #endif
244 
245 /* moves */
246 #ifdef CONFIG_USER_ONLY
247 #define supervisor(dc) 0
248 #ifdef TARGET_SPARC64
249 #define hypervisor(dc) 0
250 #endif
251 #else
252 #ifdef TARGET_SPARC64
253 #define hypervisor(dc) (dc->hypervisor)
254 #define supervisor(dc) (dc->supervisor | dc->hypervisor)
255 #else
256 #define supervisor(dc) (dc->supervisor)
257 #endif
258 #endif
259 
260 #ifdef TARGET_SPARC64
261 #ifndef TARGET_ABI32
262 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
263 #else
264 #define AM_CHECK(dc) (1)
265 #endif
266 #endif
267 
268 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
269 {
270 #ifdef TARGET_SPARC64
271     if (AM_CHECK(dc))
272         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
273 #endif
274 }
275 
276 static inline TCGv gen_load_gpr(DisasContext *dc, int reg)
277 {
278     if (reg > 0) {
279         assert(reg < 32);
280         return cpu_regs[reg];
281     } else {
282         TCGv t = tcg_temp_new();
283         tcg_gen_movi_tl(t, 0);
284         return t;
285     }
286 }
287 
288 static inline void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
289 {
290     if (reg > 0) {
291         assert(reg < 32);
292         tcg_gen_mov_tl(cpu_regs[reg], v);
293     }
294 }
295 
296 static inline TCGv gen_dest_gpr(DisasContext *dc, int reg)
297 {
298     if (reg > 0) {
299         assert(reg < 32);
300         return cpu_regs[reg];
301     } else {
302         return tcg_temp_new();
303     }
304 }
305 
306 static bool use_goto_tb(DisasContext *s, target_ulong pc, target_ulong npc)
307 {
308     return translator_use_goto_tb(&s->base, pc) &&
309            translator_use_goto_tb(&s->base, npc);
310 }
311 
312 static void gen_goto_tb(DisasContext *s, int tb_num,
313                         target_ulong pc, target_ulong npc)
314 {
315     if (use_goto_tb(s, pc, npc))  {
316         /* jump to same page: we can use a direct jump */
317         tcg_gen_goto_tb(tb_num);
318         tcg_gen_movi_tl(cpu_pc, pc);
319         tcg_gen_movi_tl(cpu_npc, npc);
320         tcg_gen_exit_tb(s->base.tb, tb_num);
321     } else {
322         /* jump to another page: currently not optimized */
323         tcg_gen_movi_tl(cpu_pc, pc);
324         tcg_gen_movi_tl(cpu_npc, npc);
325         tcg_gen_exit_tb(NULL, 0);
326     }
327 }
328 
329 // XXX suboptimal
330 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
331 {
332     tcg_gen_extu_i32_tl(reg, src);
333     tcg_gen_extract_tl(reg, reg, PSR_NEG_SHIFT, 1);
334 }
335 
336 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
337 {
338     tcg_gen_extu_i32_tl(reg, src);
339     tcg_gen_extract_tl(reg, reg, PSR_ZERO_SHIFT, 1);
340 }
341 
342 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
343 {
344     tcg_gen_extu_i32_tl(reg, src);
345     tcg_gen_extract_tl(reg, reg, PSR_OVF_SHIFT, 1);
346 }
347 
348 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
349 {
350     tcg_gen_extu_i32_tl(reg, src);
351     tcg_gen_extract_tl(reg, reg, PSR_CARRY_SHIFT, 1);
352 }
353 
354 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
355 {
356     tcg_gen_mov_tl(cpu_cc_src, src1);
357     tcg_gen_mov_tl(cpu_cc_src2, src2);
358     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
359     tcg_gen_mov_tl(dst, cpu_cc_dst);
360 }
361 
362 static TCGv_i32 gen_add32_carry32(void)
363 {
364     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
365 
366     /* Carry is computed from a previous add: (dst < src)  */
367 #if TARGET_LONG_BITS == 64
368     cc_src1_32 = tcg_temp_new_i32();
369     cc_src2_32 = tcg_temp_new_i32();
370     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
371     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
372 #else
373     cc_src1_32 = cpu_cc_dst;
374     cc_src2_32 = cpu_cc_src;
375 #endif
376 
377     carry_32 = tcg_temp_new_i32();
378     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
379 
380     return carry_32;
381 }
382 
383 static TCGv_i32 gen_sub32_carry32(void)
384 {
385     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
386 
387     /* Carry is computed from a previous borrow: (src1 < src2)  */
388 #if TARGET_LONG_BITS == 64
389     cc_src1_32 = tcg_temp_new_i32();
390     cc_src2_32 = tcg_temp_new_i32();
391     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
392     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
393 #else
394     cc_src1_32 = cpu_cc_src;
395     cc_src2_32 = cpu_cc_src2;
396 #endif
397 
398     carry_32 = tcg_temp_new_i32();
399     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
400 
401     return carry_32;
402 }
403 
404 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
405                             TCGv src2, int update_cc)
406 {
407     TCGv_i32 carry_32;
408     TCGv carry;
409 
410     switch (dc->cc_op) {
411     case CC_OP_DIV:
412     case CC_OP_LOGIC:
413         /* Carry is known to be zero.  Fall back to plain ADD.  */
414         if (update_cc) {
415             gen_op_add_cc(dst, src1, src2);
416         } else {
417             tcg_gen_add_tl(dst, src1, src2);
418         }
419         return;
420 
421     case CC_OP_ADD:
422     case CC_OP_TADD:
423     case CC_OP_TADDTV:
424         if (TARGET_LONG_BITS == 32) {
425             /* We can re-use the host's hardware carry generation by using
426                an ADD2 opcode.  We discard the low part of the output.
427                Ideally we'd combine this operation with the add that
428                generated the carry in the first place.  */
429             carry = tcg_temp_new();
430             tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
431             goto add_done;
432         }
433         carry_32 = gen_add32_carry32();
434         break;
435 
436     case CC_OP_SUB:
437     case CC_OP_TSUB:
438     case CC_OP_TSUBTV:
439         carry_32 = gen_sub32_carry32();
440         break;
441 
442     default:
443         /* We need external help to produce the carry.  */
444         carry_32 = tcg_temp_new_i32();
445         gen_helper_compute_C_icc(carry_32, cpu_env);
446         break;
447     }
448 
449 #if TARGET_LONG_BITS == 64
450     carry = tcg_temp_new();
451     tcg_gen_extu_i32_i64(carry, carry_32);
452 #else
453     carry = carry_32;
454 #endif
455 
456     tcg_gen_add_tl(dst, src1, src2);
457     tcg_gen_add_tl(dst, dst, carry);
458 
459  add_done:
460     if (update_cc) {
461         tcg_gen_mov_tl(cpu_cc_src, src1);
462         tcg_gen_mov_tl(cpu_cc_src2, src2);
463         tcg_gen_mov_tl(cpu_cc_dst, dst);
464         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
465         dc->cc_op = CC_OP_ADDX;
466     }
467 }
468 
469 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
470 {
471     tcg_gen_mov_tl(cpu_cc_src, src1);
472     tcg_gen_mov_tl(cpu_cc_src2, src2);
473     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
474     tcg_gen_mov_tl(dst, cpu_cc_dst);
475 }
476 
477 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
478                             TCGv src2, int update_cc)
479 {
480     TCGv_i32 carry_32;
481     TCGv carry;
482 
483     switch (dc->cc_op) {
484     case CC_OP_DIV:
485     case CC_OP_LOGIC:
486         /* Carry is known to be zero.  Fall back to plain SUB.  */
487         if (update_cc) {
488             gen_op_sub_cc(dst, src1, src2);
489         } else {
490             tcg_gen_sub_tl(dst, src1, src2);
491         }
492         return;
493 
494     case CC_OP_ADD:
495     case CC_OP_TADD:
496     case CC_OP_TADDTV:
497         carry_32 = gen_add32_carry32();
498         break;
499 
500     case CC_OP_SUB:
501     case CC_OP_TSUB:
502     case CC_OP_TSUBTV:
503         if (TARGET_LONG_BITS == 32) {
504             /* We can re-use the host's hardware carry generation by using
505                a SUB2 opcode.  We discard the low part of the output.
506                Ideally we'd combine this operation with the add that
507                generated the carry in the first place.  */
508             carry = tcg_temp_new();
509             tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
510             goto sub_done;
511         }
512         carry_32 = gen_sub32_carry32();
513         break;
514 
515     default:
516         /* We need external help to produce the carry.  */
517         carry_32 = tcg_temp_new_i32();
518         gen_helper_compute_C_icc(carry_32, cpu_env);
519         break;
520     }
521 
522 #if TARGET_LONG_BITS == 64
523     carry = tcg_temp_new();
524     tcg_gen_extu_i32_i64(carry, carry_32);
525 #else
526     carry = carry_32;
527 #endif
528 
529     tcg_gen_sub_tl(dst, src1, src2);
530     tcg_gen_sub_tl(dst, dst, carry);
531 
532  sub_done:
533     if (update_cc) {
534         tcg_gen_mov_tl(cpu_cc_src, src1);
535         tcg_gen_mov_tl(cpu_cc_src2, src2);
536         tcg_gen_mov_tl(cpu_cc_dst, dst);
537         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
538         dc->cc_op = CC_OP_SUBX;
539     }
540 }
541 
542 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
543 {
544     TCGv r_temp, zero, t0;
545 
546     r_temp = tcg_temp_new();
547     t0 = tcg_temp_new();
548 
549     /* old op:
550     if (!(env->y & 1))
551         T1 = 0;
552     */
553     zero = tcg_const_tl(0);
554     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
555     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
556     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
557     tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
558                        zero, cpu_cc_src2);
559 
560     // b2 = T0 & 1;
561     // env->y = (b2 << 31) | (env->y >> 1);
562     tcg_gen_extract_tl(t0, cpu_y, 1, 31);
563     tcg_gen_deposit_tl(cpu_y, t0, cpu_cc_src, 31, 1);
564 
565     // b1 = N ^ V;
566     gen_mov_reg_N(t0, cpu_psr);
567     gen_mov_reg_V(r_temp, cpu_psr);
568     tcg_gen_xor_tl(t0, t0, r_temp);
569 
570     // T0 = (b1 << 31) | (T0 >> 1);
571     // src1 = T0;
572     tcg_gen_shli_tl(t0, t0, 31);
573     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
574     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
575 
576     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
577 
578     tcg_gen_mov_tl(dst, cpu_cc_dst);
579 }
580 
581 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
582 {
583 #if TARGET_LONG_BITS == 32
584     if (sign_ext) {
585         tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
586     } else {
587         tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
588     }
589 #else
590     TCGv t0 = tcg_temp_new_i64();
591     TCGv t1 = tcg_temp_new_i64();
592 
593     if (sign_ext) {
594         tcg_gen_ext32s_i64(t0, src1);
595         tcg_gen_ext32s_i64(t1, src2);
596     } else {
597         tcg_gen_ext32u_i64(t0, src1);
598         tcg_gen_ext32u_i64(t1, src2);
599     }
600 
601     tcg_gen_mul_i64(dst, t0, t1);
602     tcg_gen_shri_i64(cpu_y, dst, 32);
603 #endif
604 }
605 
606 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
607 {
608     /* zero-extend truncated operands before multiplication */
609     gen_op_multiply(dst, src1, src2, 0);
610 }
611 
612 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
613 {
614     /* sign-extend truncated operands before multiplication */
615     gen_op_multiply(dst, src1, src2, 1);
616 }
617 
618 // 1
619 static inline void gen_op_eval_ba(TCGv dst)
620 {
621     tcg_gen_movi_tl(dst, 1);
622 }
623 
624 // Z
625 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
626 {
627     gen_mov_reg_Z(dst, src);
628 }
629 
630 // Z | (N ^ V)
631 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
632 {
633     TCGv t0 = tcg_temp_new();
634     gen_mov_reg_N(t0, src);
635     gen_mov_reg_V(dst, src);
636     tcg_gen_xor_tl(dst, dst, t0);
637     gen_mov_reg_Z(t0, src);
638     tcg_gen_or_tl(dst, dst, t0);
639 }
640 
641 // N ^ V
642 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
643 {
644     TCGv t0 = tcg_temp_new();
645     gen_mov_reg_V(t0, src);
646     gen_mov_reg_N(dst, src);
647     tcg_gen_xor_tl(dst, dst, t0);
648 }
649 
650 // C | Z
651 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
652 {
653     TCGv t0 = tcg_temp_new();
654     gen_mov_reg_Z(t0, src);
655     gen_mov_reg_C(dst, src);
656     tcg_gen_or_tl(dst, dst, t0);
657 }
658 
659 // C
660 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
661 {
662     gen_mov_reg_C(dst, src);
663 }
664 
665 // V
666 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
667 {
668     gen_mov_reg_V(dst, src);
669 }
670 
671 // 0
672 static inline void gen_op_eval_bn(TCGv dst)
673 {
674     tcg_gen_movi_tl(dst, 0);
675 }
676 
677 // N
678 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
679 {
680     gen_mov_reg_N(dst, src);
681 }
682 
683 // !Z
684 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
685 {
686     gen_mov_reg_Z(dst, src);
687     tcg_gen_xori_tl(dst, dst, 0x1);
688 }
689 
690 // !(Z | (N ^ V))
691 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
692 {
693     gen_op_eval_ble(dst, src);
694     tcg_gen_xori_tl(dst, dst, 0x1);
695 }
696 
697 // !(N ^ V)
698 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
699 {
700     gen_op_eval_bl(dst, src);
701     tcg_gen_xori_tl(dst, dst, 0x1);
702 }
703 
704 // !(C | Z)
705 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
706 {
707     gen_op_eval_bleu(dst, src);
708     tcg_gen_xori_tl(dst, dst, 0x1);
709 }
710 
711 // !C
712 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
713 {
714     gen_mov_reg_C(dst, src);
715     tcg_gen_xori_tl(dst, dst, 0x1);
716 }
717 
718 // !N
719 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
720 {
721     gen_mov_reg_N(dst, src);
722     tcg_gen_xori_tl(dst, dst, 0x1);
723 }
724 
725 // !V
726 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
727 {
728     gen_mov_reg_V(dst, src);
729     tcg_gen_xori_tl(dst, dst, 0x1);
730 }
731 
732 /*
733   FPSR bit field FCC1 | FCC0:
734    0 =
735    1 <
736    2 >
737    3 unordered
738 */
739 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
740                                     unsigned int fcc_offset)
741 {
742     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
743     tcg_gen_andi_tl(reg, reg, 0x1);
744 }
745 
746 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
747                                     unsigned int fcc_offset)
748 {
749     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
750     tcg_gen_andi_tl(reg, reg, 0x1);
751 }
752 
753 // !0: FCC0 | FCC1
754 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
755                                     unsigned int fcc_offset)
756 {
757     TCGv t0 = tcg_temp_new();
758     gen_mov_reg_FCC0(dst, src, fcc_offset);
759     gen_mov_reg_FCC1(t0, src, fcc_offset);
760     tcg_gen_or_tl(dst, dst, t0);
761 }
762 
763 // 1 or 2: FCC0 ^ FCC1
764 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
765                                     unsigned int fcc_offset)
766 {
767     TCGv t0 = tcg_temp_new();
768     gen_mov_reg_FCC0(dst, src, fcc_offset);
769     gen_mov_reg_FCC1(t0, src, fcc_offset);
770     tcg_gen_xor_tl(dst, dst, t0);
771 }
772 
773 // 1 or 3: FCC0
774 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
775                                     unsigned int fcc_offset)
776 {
777     gen_mov_reg_FCC0(dst, src, fcc_offset);
778 }
779 
780 // 1: FCC0 & !FCC1
781 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
782                                     unsigned int fcc_offset)
783 {
784     TCGv t0 = tcg_temp_new();
785     gen_mov_reg_FCC0(dst, src, fcc_offset);
786     gen_mov_reg_FCC1(t0, src, fcc_offset);
787     tcg_gen_andc_tl(dst, dst, t0);
788 }
789 
790 // 2 or 3: FCC1
791 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
792                                     unsigned int fcc_offset)
793 {
794     gen_mov_reg_FCC1(dst, src, fcc_offset);
795 }
796 
797 // 2: !FCC0 & FCC1
798 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
799                                     unsigned int fcc_offset)
800 {
801     TCGv t0 = tcg_temp_new();
802     gen_mov_reg_FCC0(dst, src, fcc_offset);
803     gen_mov_reg_FCC1(t0, src, fcc_offset);
804     tcg_gen_andc_tl(dst, t0, dst);
805 }
806 
807 // 3: FCC0 & FCC1
808 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
809                                     unsigned int fcc_offset)
810 {
811     TCGv t0 = tcg_temp_new();
812     gen_mov_reg_FCC0(dst, src, fcc_offset);
813     gen_mov_reg_FCC1(t0, src, fcc_offset);
814     tcg_gen_and_tl(dst, dst, t0);
815 }
816 
817 // 0: !(FCC0 | FCC1)
818 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
819                                     unsigned int fcc_offset)
820 {
821     TCGv t0 = tcg_temp_new();
822     gen_mov_reg_FCC0(dst, src, fcc_offset);
823     gen_mov_reg_FCC1(t0, src, fcc_offset);
824     tcg_gen_or_tl(dst, dst, t0);
825     tcg_gen_xori_tl(dst, dst, 0x1);
826 }
827 
828 // 0 or 3: !(FCC0 ^ FCC1)
829 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
830                                     unsigned int fcc_offset)
831 {
832     TCGv t0 = tcg_temp_new();
833     gen_mov_reg_FCC0(dst, src, fcc_offset);
834     gen_mov_reg_FCC1(t0, src, fcc_offset);
835     tcg_gen_xor_tl(dst, dst, t0);
836     tcg_gen_xori_tl(dst, dst, 0x1);
837 }
838 
839 // 0 or 2: !FCC0
840 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
841                                     unsigned int fcc_offset)
842 {
843     gen_mov_reg_FCC0(dst, src, fcc_offset);
844     tcg_gen_xori_tl(dst, dst, 0x1);
845 }
846 
847 // !1: !(FCC0 & !FCC1)
848 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
849                                     unsigned int fcc_offset)
850 {
851     TCGv t0 = tcg_temp_new();
852     gen_mov_reg_FCC0(dst, src, fcc_offset);
853     gen_mov_reg_FCC1(t0, src, fcc_offset);
854     tcg_gen_andc_tl(dst, dst, t0);
855     tcg_gen_xori_tl(dst, dst, 0x1);
856 }
857 
858 // 0 or 1: !FCC1
859 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
860                                     unsigned int fcc_offset)
861 {
862     gen_mov_reg_FCC1(dst, src, fcc_offset);
863     tcg_gen_xori_tl(dst, dst, 0x1);
864 }
865 
866 // !2: !(!FCC0 & FCC1)
867 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
868                                     unsigned int fcc_offset)
869 {
870     TCGv t0 = tcg_temp_new();
871     gen_mov_reg_FCC0(dst, src, fcc_offset);
872     gen_mov_reg_FCC1(t0, src, fcc_offset);
873     tcg_gen_andc_tl(dst, t0, dst);
874     tcg_gen_xori_tl(dst, dst, 0x1);
875 }
876 
877 // !3: !(FCC0 & FCC1)
878 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
879                                     unsigned int fcc_offset)
880 {
881     TCGv t0 = tcg_temp_new();
882     gen_mov_reg_FCC0(dst, src, fcc_offset);
883     gen_mov_reg_FCC1(t0, src, fcc_offset);
884     tcg_gen_and_tl(dst, dst, t0);
885     tcg_gen_xori_tl(dst, dst, 0x1);
886 }
887 
888 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
889                                target_ulong pc2, TCGv r_cond)
890 {
891     TCGLabel *l1 = gen_new_label();
892 
893     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
894 
895     gen_goto_tb(dc, 0, pc1, pc1 + 4);
896 
897     gen_set_label(l1);
898     gen_goto_tb(dc, 1, pc2, pc2 + 4);
899 }
900 
901 static void gen_branch_a(DisasContext *dc, target_ulong pc1)
902 {
903     TCGLabel *l1 = gen_new_label();
904     target_ulong npc = dc->npc;
905 
906     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cond, 0, l1);
907 
908     gen_goto_tb(dc, 0, npc, pc1);
909 
910     gen_set_label(l1);
911     gen_goto_tb(dc, 1, npc + 4, npc + 8);
912 
913     dc->base.is_jmp = DISAS_NORETURN;
914 }
915 
916 static void gen_branch_n(DisasContext *dc, target_ulong pc1)
917 {
918     target_ulong npc = dc->npc;
919 
920     if (likely(npc != DYNAMIC_PC)) {
921         dc->pc = npc;
922         dc->jump_pc[0] = pc1;
923         dc->jump_pc[1] = npc + 4;
924         dc->npc = JUMP_PC;
925     } else {
926         TCGv t, z;
927 
928         tcg_gen_mov_tl(cpu_pc, cpu_npc);
929 
930         tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
931         t = tcg_const_tl(pc1);
932         z = tcg_const_tl(0);
933         tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, z, t, cpu_npc);
934 
935         dc->pc = DYNAMIC_PC;
936     }
937 }
938 
939 static inline void gen_generic_branch(DisasContext *dc)
940 {
941     TCGv npc0 = tcg_const_tl(dc->jump_pc[0]);
942     TCGv npc1 = tcg_const_tl(dc->jump_pc[1]);
943     TCGv zero = tcg_const_tl(0);
944 
945     tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
946 }
947 
948 /* call this function before using the condition register as it may
949    have been set for a jump */
950 static inline void flush_cond(DisasContext *dc)
951 {
952     if (dc->npc == JUMP_PC) {
953         gen_generic_branch(dc);
954         dc->npc = DYNAMIC_PC;
955     }
956 }
957 
958 static inline void save_npc(DisasContext *dc)
959 {
960     if (dc->npc == JUMP_PC) {
961         gen_generic_branch(dc);
962         dc->npc = DYNAMIC_PC;
963     } else if (dc->npc != DYNAMIC_PC) {
964         tcg_gen_movi_tl(cpu_npc, dc->npc);
965     }
966 }
967 
968 static inline void update_psr(DisasContext *dc)
969 {
970     if (dc->cc_op != CC_OP_FLAGS) {
971         dc->cc_op = CC_OP_FLAGS;
972         gen_helper_compute_psr(cpu_env);
973     }
974 }
975 
976 static inline void save_state(DisasContext *dc)
977 {
978     tcg_gen_movi_tl(cpu_pc, dc->pc);
979     save_npc(dc);
980 }
981 
982 static void gen_exception(DisasContext *dc, int which)
983 {
984     TCGv_i32 t;
985 
986     save_state(dc);
987     t = tcg_const_i32(which);
988     gen_helper_raise_exception(cpu_env, t);
989     dc->base.is_jmp = DISAS_NORETURN;
990 }
991 
992 static void gen_check_align(TCGv addr, int mask)
993 {
994     TCGv_i32 r_mask = tcg_const_i32(mask);
995     gen_helper_check_align(cpu_env, addr, r_mask);
996 }
997 
998 static inline void gen_mov_pc_npc(DisasContext *dc)
999 {
1000     if (dc->npc == JUMP_PC) {
1001         gen_generic_branch(dc);
1002         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1003         dc->pc = DYNAMIC_PC;
1004     } else if (dc->npc == DYNAMIC_PC) {
1005         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1006         dc->pc = DYNAMIC_PC;
1007     } else {
1008         dc->pc = dc->npc;
1009     }
1010 }
1011 
1012 static inline void gen_op_next_insn(void)
1013 {
1014     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1015     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1016 }
1017 
1018 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1019                         DisasContext *dc)
1020 {
1021     static int subcc_cond[16] = {
1022         TCG_COND_NEVER,
1023         TCG_COND_EQ,
1024         TCG_COND_LE,
1025         TCG_COND_LT,
1026         TCG_COND_LEU,
1027         TCG_COND_LTU,
1028         -1, /* neg */
1029         -1, /* overflow */
1030         TCG_COND_ALWAYS,
1031         TCG_COND_NE,
1032         TCG_COND_GT,
1033         TCG_COND_GE,
1034         TCG_COND_GTU,
1035         TCG_COND_GEU,
1036         -1, /* pos */
1037         -1, /* no overflow */
1038     };
1039 
1040     static int logic_cond[16] = {
1041         TCG_COND_NEVER,
1042         TCG_COND_EQ,     /* eq:  Z */
1043         TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1044         TCG_COND_LT,     /* lt:  N ^ V -> N */
1045         TCG_COND_EQ,     /* leu: C | Z -> Z */
1046         TCG_COND_NEVER,  /* ltu: C -> 0 */
1047         TCG_COND_LT,     /* neg: N */
1048         TCG_COND_NEVER,  /* vs:  V -> 0 */
1049         TCG_COND_ALWAYS,
1050         TCG_COND_NE,     /* ne:  !Z */
1051         TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1052         TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1053         TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1054         TCG_COND_ALWAYS, /* geu: !C -> 1 */
1055         TCG_COND_GE,     /* pos: !N */
1056         TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1057     };
1058 
1059     TCGv_i32 r_src;
1060     TCGv r_dst;
1061 
1062 #ifdef TARGET_SPARC64
1063     if (xcc) {
1064         r_src = cpu_xcc;
1065     } else {
1066         r_src = cpu_psr;
1067     }
1068 #else
1069     r_src = cpu_psr;
1070 #endif
1071 
1072     switch (dc->cc_op) {
1073     case CC_OP_LOGIC:
1074         cmp->cond = logic_cond[cond];
1075     do_compare_dst_0:
1076         cmp->is_bool = false;
1077         cmp->c2 = tcg_const_tl(0);
1078 #ifdef TARGET_SPARC64
1079         if (!xcc) {
1080             cmp->c1 = tcg_temp_new();
1081             tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1082             break;
1083         }
1084 #endif
1085         cmp->c1 = cpu_cc_dst;
1086         break;
1087 
1088     case CC_OP_SUB:
1089         switch (cond) {
1090         case 6:  /* neg */
1091         case 14: /* pos */
1092             cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1093             goto do_compare_dst_0;
1094 
1095         case 7: /* overflow */
1096         case 15: /* !overflow */
1097             goto do_dynamic;
1098 
1099         default:
1100             cmp->cond = subcc_cond[cond];
1101             cmp->is_bool = false;
1102 #ifdef TARGET_SPARC64
1103             if (!xcc) {
1104                 /* Note that sign-extension works for unsigned compares as
1105                    long as both operands are sign-extended.  */
1106                 cmp->c1 = tcg_temp_new();
1107                 cmp->c2 = tcg_temp_new();
1108                 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1109                 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1110                 break;
1111             }
1112 #endif
1113             cmp->c1 = cpu_cc_src;
1114             cmp->c2 = cpu_cc_src2;
1115             break;
1116         }
1117         break;
1118 
1119     default:
1120     do_dynamic:
1121         gen_helper_compute_psr(cpu_env);
1122         dc->cc_op = CC_OP_FLAGS;
1123         /* FALLTHRU */
1124 
1125     case CC_OP_FLAGS:
1126         /* We're going to generate a boolean result.  */
1127         cmp->cond = TCG_COND_NE;
1128         cmp->is_bool = true;
1129         cmp->c1 = r_dst = tcg_temp_new();
1130         cmp->c2 = tcg_const_tl(0);
1131 
1132         switch (cond) {
1133         case 0x0:
1134             gen_op_eval_bn(r_dst);
1135             break;
1136         case 0x1:
1137             gen_op_eval_be(r_dst, r_src);
1138             break;
1139         case 0x2:
1140             gen_op_eval_ble(r_dst, r_src);
1141             break;
1142         case 0x3:
1143             gen_op_eval_bl(r_dst, r_src);
1144             break;
1145         case 0x4:
1146             gen_op_eval_bleu(r_dst, r_src);
1147             break;
1148         case 0x5:
1149             gen_op_eval_bcs(r_dst, r_src);
1150             break;
1151         case 0x6:
1152             gen_op_eval_bneg(r_dst, r_src);
1153             break;
1154         case 0x7:
1155             gen_op_eval_bvs(r_dst, r_src);
1156             break;
1157         case 0x8:
1158             gen_op_eval_ba(r_dst);
1159             break;
1160         case 0x9:
1161             gen_op_eval_bne(r_dst, r_src);
1162             break;
1163         case 0xa:
1164             gen_op_eval_bg(r_dst, r_src);
1165             break;
1166         case 0xb:
1167             gen_op_eval_bge(r_dst, r_src);
1168             break;
1169         case 0xc:
1170             gen_op_eval_bgu(r_dst, r_src);
1171             break;
1172         case 0xd:
1173             gen_op_eval_bcc(r_dst, r_src);
1174             break;
1175         case 0xe:
1176             gen_op_eval_bpos(r_dst, r_src);
1177             break;
1178         case 0xf:
1179             gen_op_eval_bvc(r_dst, r_src);
1180             break;
1181         }
1182         break;
1183     }
1184 }
1185 
1186 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1187 {
1188     unsigned int offset;
1189     TCGv r_dst;
1190 
1191     /* For now we still generate a straight boolean result.  */
1192     cmp->cond = TCG_COND_NE;
1193     cmp->is_bool = true;
1194     cmp->c1 = r_dst = tcg_temp_new();
1195     cmp->c2 = tcg_const_tl(0);
1196 
1197     switch (cc) {
1198     default:
1199     case 0x0:
1200         offset = 0;
1201         break;
1202     case 0x1:
1203         offset = 32 - 10;
1204         break;
1205     case 0x2:
1206         offset = 34 - 10;
1207         break;
1208     case 0x3:
1209         offset = 36 - 10;
1210         break;
1211     }
1212 
1213     switch (cond) {
1214     case 0x0:
1215         gen_op_eval_bn(r_dst);
1216         break;
1217     case 0x1:
1218         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1219         break;
1220     case 0x2:
1221         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1222         break;
1223     case 0x3:
1224         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1225         break;
1226     case 0x4:
1227         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1228         break;
1229     case 0x5:
1230         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1231         break;
1232     case 0x6:
1233         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1234         break;
1235     case 0x7:
1236         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1237         break;
1238     case 0x8:
1239         gen_op_eval_ba(r_dst);
1240         break;
1241     case 0x9:
1242         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1243         break;
1244     case 0xa:
1245         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1246         break;
1247     case 0xb:
1248         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1249         break;
1250     case 0xc:
1251         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1252         break;
1253     case 0xd:
1254         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1255         break;
1256     case 0xe:
1257         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1258         break;
1259     case 0xf:
1260         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1261         break;
1262     }
1263 }
1264 
1265 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1266                      DisasContext *dc)
1267 {
1268     DisasCompare cmp;
1269     gen_compare(&cmp, cc, cond, dc);
1270 
1271     /* The interface is to return a boolean in r_dst.  */
1272     if (cmp.is_bool) {
1273         tcg_gen_mov_tl(r_dst, cmp.c1);
1274     } else {
1275         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1276     }
1277 }
1278 
1279 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1280 {
1281     DisasCompare cmp;
1282     gen_fcompare(&cmp, cc, cond);
1283 
1284     /* The interface is to return a boolean in r_dst.  */
1285     if (cmp.is_bool) {
1286         tcg_gen_mov_tl(r_dst, cmp.c1);
1287     } else {
1288         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1289     }
1290 }
1291 
1292 #ifdef TARGET_SPARC64
1293 // Inverted logic
1294 static const int gen_tcg_cond_reg[8] = {
1295     -1,
1296     TCG_COND_NE,
1297     TCG_COND_GT,
1298     TCG_COND_GE,
1299     -1,
1300     TCG_COND_EQ,
1301     TCG_COND_LE,
1302     TCG_COND_LT,
1303 };
1304 
1305 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1306 {
1307     cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1308     cmp->is_bool = false;
1309     cmp->c1 = r_src;
1310     cmp->c2 = tcg_const_tl(0);
1311 }
1312 
1313 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1314 {
1315     DisasCompare cmp;
1316     gen_compare_reg(&cmp, cond, r_src);
1317 
1318     /* The interface is to return a boolean in r_dst.  */
1319     tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1320 }
1321 #endif
1322 
1323 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1324 {
1325     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1326     target_ulong target = dc->pc + offset;
1327 
1328 #ifdef TARGET_SPARC64
1329     if (unlikely(AM_CHECK(dc))) {
1330         target &= 0xffffffffULL;
1331     }
1332 #endif
1333     if (cond == 0x0) {
1334         /* unconditional not taken */
1335         if (a) {
1336             dc->pc = dc->npc + 4;
1337             dc->npc = dc->pc + 4;
1338         } else {
1339             dc->pc = dc->npc;
1340             dc->npc = dc->pc + 4;
1341         }
1342     } else if (cond == 0x8) {
1343         /* unconditional taken */
1344         if (a) {
1345             dc->pc = target;
1346             dc->npc = dc->pc + 4;
1347         } else {
1348             dc->pc = dc->npc;
1349             dc->npc = target;
1350             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1351         }
1352     } else {
1353         flush_cond(dc);
1354         gen_cond(cpu_cond, cc, cond, dc);
1355         if (a) {
1356             gen_branch_a(dc, target);
1357         } else {
1358             gen_branch_n(dc, target);
1359         }
1360     }
1361 }
1362 
1363 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1364 {
1365     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1366     target_ulong target = dc->pc + offset;
1367 
1368 #ifdef TARGET_SPARC64
1369     if (unlikely(AM_CHECK(dc))) {
1370         target &= 0xffffffffULL;
1371     }
1372 #endif
1373     if (cond == 0x0) {
1374         /* unconditional not taken */
1375         if (a) {
1376             dc->pc = dc->npc + 4;
1377             dc->npc = dc->pc + 4;
1378         } else {
1379             dc->pc = dc->npc;
1380             dc->npc = dc->pc + 4;
1381         }
1382     } else if (cond == 0x8) {
1383         /* unconditional taken */
1384         if (a) {
1385             dc->pc = target;
1386             dc->npc = dc->pc + 4;
1387         } else {
1388             dc->pc = dc->npc;
1389             dc->npc = target;
1390             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1391         }
1392     } else {
1393         flush_cond(dc);
1394         gen_fcond(cpu_cond, cc, cond);
1395         if (a) {
1396             gen_branch_a(dc, target);
1397         } else {
1398             gen_branch_n(dc, target);
1399         }
1400     }
1401 }
1402 
1403 #ifdef TARGET_SPARC64
1404 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1405                           TCGv r_reg)
1406 {
1407     unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1408     target_ulong target = dc->pc + offset;
1409 
1410     if (unlikely(AM_CHECK(dc))) {
1411         target &= 0xffffffffULL;
1412     }
1413     flush_cond(dc);
1414     gen_cond_reg(cpu_cond, cond, r_reg);
1415     if (a) {
1416         gen_branch_a(dc, target);
1417     } else {
1418         gen_branch_n(dc, target);
1419     }
1420 }
1421 
1422 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1423 {
1424     switch (fccno) {
1425     case 0:
1426         gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1427         break;
1428     case 1:
1429         gen_helper_fcmps_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1430         break;
1431     case 2:
1432         gen_helper_fcmps_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1433         break;
1434     case 3:
1435         gen_helper_fcmps_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1436         break;
1437     }
1438 }
1439 
1440 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1441 {
1442     switch (fccno) {
1443     case 0:
1444         gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1445         break;
1446     case 1:
1447         gen_helper_fcmpd_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1448         break;
1449     case 2:
1450         gen_helper_fcmpd_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1451         break;
1452     case 3:
1453         gen_helper_fcmpd_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1454         break;
1455     }
1456 }
1457 
1458 static inline void gen_op_fcmpq(int fccno)
1459 {
1460     switch (fccno) {
1461     case 0:
1462         gen_helper_fcmpq(cpu_fsr, cpu_env);
1463         break;
1464     case 1:
1465         gen_helper_fcmpq_fcc1(cpu_fsr, cpu_env);
1466         break;
1467     case 2:
1468         gen_helper_fcmpq_fcc2(cpu_fsr, cpu_env);
1469         break;
1470     case 3:
1471         gen_helper_fcmpq_fcc3(cpu_fsr, cpu_env);
1472         break;
1473     }
1474 }
1475 
1476 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1477 {
1478     switch (fccno) {
1479     case 0:
1480         gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1481         break;
1482     case 1:
1483         gen_helper_fcmpes_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1484         break;
1485     case 2:
1486         gen_helper_fcmpes_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1487         break;
1488     case 3:
1489         gen_helper_fcmpes_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1490         break;
1491     }
1492 }
1493 
1494 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1495 {
1496     switch (fccno) {
1497     case 0:
1498         gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1499         break;
1500     case 1:
1501         gen_helper_fcmped_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1502         break;
1503     case 2:
1504         gen_helper_fcmped_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1505         break;
1506     case 3:
1507         gen_helper_fcmped_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1508         break;
1509     }
1510 }
1511 
1512 static inline void gen_op_fcmpeq(int fccno)
1513 {
1514     switch (fccno) {
1515     case 0:
1516         gen_helper_fcmpeq(cpu_fsr, cpu_env);
1517         break;
1518     case 1:
1519         gen_helper_fcmpeq_fcc1(cpu_fsr, cpu_env);
1520         break;
1521     case 2:
1522         gen_helper_fcmpeq_fcc2(cpu_fsr, cpu_env);
1523         break;
1524     case 3:
1525         gen_helper_fcmpeq_fcc3(cpu_fsr, cpu_env);
1526         break;
1527     }
1528 }
1529 
1530 #else
1531 
1532 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1533 {
1534     gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1535 }
1536 
1537 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1538 {
1539     gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1540 }
1541 
1542 static inline void gen_op_fcmpq(int fccno)
1543 {
1544     gen_helper_fcmpq(cpu_fsr, cpu_env);
1545 }
1546 
1547 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1548 {
1549     gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1550 }
1551 
1552 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1553 {
1554     gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1555 }
1556 
1557 static inline void gen_op_fcmpeq(int fccno)
1558 {
1559     gen_helper_fcmpeq(cpu_fsr, cpu_env);
1560 }
1561 #endif
1562 
1563 static void gen_op_fpexception_im(DisasContext *dc, int fsr_flags)
1564 {
1565     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1566     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1567     gen_exception(dc, TT_FP_EXCP);
1568 }
1569 
1570 static int gen_trap_ifnofpu(DisasContext *dc)
1571 {
1572 #if !defined(CONFIG_USER_ONLY)
1573     if (!dc->fpu_enabled) {
1574         gen_exception(dc, TT_NFPU_INSN);
1575         return 1;
1576     }
1577 #endif
1578     return 0;
1579 }
1580 
1581 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1582 {
1583     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1584 }
1585 
1586 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1587                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1588 {
1589     TCGv_i32 dst, src;
1590 
1591     src = gen_load_fpr_F(dc, rs);
1592     dst = gen_dest_fpr_F(dc);
1593 
1594     gen(dst, cpu_env, src);
1595     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1596 
1597     gen_store_fpr_F(dc, rd, dst);
1598 }
1599 
1600 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1601                                  void (*gen)(TCGv_i32, TCGv_i32))
1602 {
1603     TCGv_i32 dst, src;
1604 
1605     src = gen_load_fpr_F(dc, rs);
1606     dst = gen_dest_fpr_F(dc);
1607 
1608     gen(dst, src);
1609 
1610     gen_store_fpr_F(dc, rd, dst);
1611 }
1612 
1613 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1614                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1615 {
1616     TCGv_i32 dst, src1, src2;
1617 
1618     src1 = gen_load_fpr_F(dc, rs1);
1619     src2 = gen_load_fpr_F(dc, rs2);
1620     dst = gen_dest_fpr_F(dc);
1621 
1622     gen(dst, cpu_env, src1, src2);
1623     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1624 
1625     gen_store_fpr_F(dc, rd, dst);
1626 }
1627 
1628 #ifdef TARGET_SPARC64
1629 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1630                                   void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1631 {
1632     TCGv_i32 dst, src1, src2;
1633 
1634     src1 = gen_load_fpr_F(dc, rs1);
1635     src2 = gen_load_fpr_F(dc, rs2);
1636     dst = gen_dest_fpr_F(dc);
1637 
1638     gen(dst, src1, src2);
1639 
1640     gen_store_fpr_F(dc, rd, dst);
1641 }
1642 #endif
1643 
1644 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1645                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1646 {
1647     TCGv_i64 dst, src;
1648 
1649     src = gen_load_fpr_D(dc, rs);
1650     dst = gen_dest_fpr_D(dc, rd);
1651 
1652     gen(dst, cpu_env, src);
1653     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1654 
1655     gen_store_fpr_D(dc, rd, dst);
1656 }
1657 
1658 #ifdef TARGET_SPARC64
1659 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1660                                  void (*gen)(TCGv_i64, TCGv_i64))
1661 {
1662     TCGv_i64 dst, src;
1663 
1664     src = gen_load_fpr_D(dc, rs);
1665     dst = gen_dest_fpr_D(dc, rd);
1666 
1667     gen(dst, src);
1668 
1669     gen_store_fpr_D(dc, rd, dst);
1670 }
1671 #endif
1672 
1673 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1674                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1675 {
1676     TCGv_i64 dst, src1, src2;
1677 
1678     src1 = gen_load_fpr_D(dc, rs1);
1679     src2 = gen_load_fpr_D(dc, rs2);
1680     dst = gen_dest_fpr_D(dc, rd);
1681 
1682     gen(dst, cpu_env, src1, src2);
1683     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1684 
1685     gen_store_fpr_D(dc, rd, dst);
1686 }
1687 
1688 #ifdef TARGET_SPARC64
1689 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1690                                   void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1691 {
1692     TCGv_i64 dst, src1, src2;
1693 
1694     src1 = gen_load_fpr_D(dc, rs1);
1695     src2 = gen_load_fpr_D(dc, rs2);
1696     dst = gen_dest_fpr_D(dc, rd);
1697 
1698     gen(dst, src1, src2);
1699 
1700     gen_store_fpr_D(dc, rd, dst);
1701 }
1702 
1703 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1704                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1705 {
1706     TCGv_i64 dst, src1, src2;
1707 
1708     src1 = gen_load_fpr_D(dc, rs1);
1709     src2 = gen_load_fpr_D(dc, rs2);
1710     dst = gen_dest_fpr_D(dc, rd);
1711 
1712     gen(dst, cpu_gsr, src1, src2);
1713 
1714     gen_store_fpr_D(dc, rd, dst);
1715 }
1716 
1717 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1718                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1719 {
1720     TCGv_i64 dst, src0, src1, src2;
1721 
1722     src1 = gen_load_fpr_D(dc, rs1);
1723     src2 = gen_load_fpr_D(dc, rs2);
1724     src0 = gen_load_fpr_D(dc, rd);
1725     dst = gen_dest_fpr_D(dc, rd);
1726 
1727     gen(dst, src0, src1, src2);
1728 
1729     gen_store_fpr_D(dc, rd, dst);
1730 }
1731 #endif
1732 
1733 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1734                               void (*gen)(TCGv_ptr))
1735 {
1736     gen_op_load_fpr_QT1(QFPREG(rs));
1737 
1738     gen(cpu_env);
1739     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1740 
1741     gen_op_store_QT0_fpr(QFPREG(rd));
1742     gen_update_fprs_dirty(dc, QFPREG(rd));
1743 }
1744 
1745 #ifdef TARGET_SPARC64
1746 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1747                                  void (*gen)(TCGv_ptr))
1748 {
1749     gen_op_load_fpr_QT1(QFPREG(rs));
1750 
1751     gen(cpu_env);
1752 
1753     gen_op_store_QT0_fpr(QFPREG(rd));
1754     gen_update_fprs_dirty(dc, QFPREG(rd));
1755 }
1756 #endif
1757 
1758 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1759                                void (*gen)(TCGv_ptr))
1760 {
1761     gen_op_load_fpr_QT0(QFPREG(rs1));
1762     gen_op_load_fpr_QT1(QFPREG(rs2));
1763 
1764     gen(cpu_env);
1765     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1766 
1767     gen_op_store_QT0_fpr(QFPREG(rd));
1768     gen_update_fprs_dirty(dc, QFPREG(rd));
1769 }
1770 
1771 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1772                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1773 {
1774     TCGv_i64 dst;
1775     TCGv_i32 src1, src2;
1776 
1777     src1 = gen_load_fpr_F(dc, rs1);
1778     src2 = gen_load_fpr_F(dc, rs2);
1779     dst = gen_dest_fpr_D(dc, rd);
1780 
1781     gen(dst, cpu_env, src1, src2);
1782     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1783 
1784     gen_store_fpr_D(dc, rd, dst);
1785 }
1786 
1787 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1788                                void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1789 {
1790     TCGv_i64 src1, src2;
1791 
1792     src1 = gen_load_fpr_D(dc, rs1);
1793     src2 = gen_load_fpr_D(dc, rs2);
1794 
1795     gen(cpu_env, src1, src2);
1796     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1797 
1798     gen_op_store_QT0_fpr(QFPREG(rd));
1799     gen_update_fprs_dirty(dc, QFPREG(rd));
1800 }
1801 
1802 #ifdef TARGET_SPARC64
1803 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1804                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1805 {
1806     TCGv_i64 dst;
1807     TCGv_i32 src;
1808 
1809     src = gen_load_fpr_F(dc, rs);
1810     dst = gen_dest_fpr_D(dc, rd);
1811 
1812     gen(dst, cpu_env, src);
1813     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1814 
1815     gen_store_fpr_D(dc, rd, dst);
1816 }
1817 #endif
1818 
1819 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1820                                  void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1821 {
1822     TCGv_i64 dst;
1823     TCGv_i32 src;
1824 
1825     src = gen_load_fpr_F(dc, rs);
1826     dst = gen_dest_fpr_D(dc, rd);
1827 
1828     gen(dst, cpu_env, src);
1829 
1830     gen_store_fpr_D(dc, rd, dst);
1831 }
1832 
1833 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1834                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1835 {
1836     TCGv_i32 dst;
1837     TCGv_i64 src;
1838 
1839     src = gen_load_fpr_D(dc, rs);
1840     dst = gen_dest_fpr_F(dc);
1841 
1842     gen(dst, cpu_env, src);
1843     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1844 
1845     gen_store_fpr_F(dc, rd, dst);
1846 }
1847 
1848 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1849                               void (*gen)(TCGv_i32, TCGv_ptr))
1850 {
1851     TCGv_i32 dst;
1852 
1853     gen_op_load_fpr_QT1(QFPREG(rs));
1854     dst = gen_dest_fpr_F(dc);
1855 
1856     gen(dst, cpu_env);
1857     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1858 
1859     gen_store_fpr_F(dc, rd, dst);
1860 }
1861 
1862 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1863                               void (*gen)(TCGv_i64, TCGv_ptr))
1864 {
1865     TCGv_i64 dst;
1866 
1867     gen_op_load_fpr_QT1(QFPREG(rs));
1868     dst = gen_dest_fpr_D(dc, rd);
1869 
1870     gen(dst, cpu_env);
1871     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1872 
1873     gen_store_fpr_D(dc, rd, dst);
1874 }
1875 
1876 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1877                                  void (*gen)(TCGv_ptr, TCGv_i32))
1878 {
1879     TCGv_i32 src;
1880 
1881     src = gen_load_fpr_F(dc, rs);
1882 
1883     gen(cpu_env, src);
1884 
1885     gen_op_store_QT0_fpr(QFPREG(rd));
1886     gen_update_fprs_dirty(dc, QFPREG(rd));
1887 }
1888 
1889 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1890                                  void (*gen)(TCGv_ptr, TCGv_i64))
1891 {
1892     TCGv_i64 src;
1893 
1894     src = gen_load_fpr_D(dc, rs);
1895 
1896     gen(cpu_env, src);
1897 
1898     gen_op_store_QT0_fpr(QFPREG(rd));
1899     gen_update_fprs_dirty(dc, QFPREG(rd));
1900 }
1901 
1902 static void gen_swap(DisasContext *dc, TCGv dst, TCGv src,
1903                      TCGv addr, int mmu_idx, MemOp memop)
1904 {
1905     gen_address_mask(dc, addr);
1906     tcg_gen_atomic_xchg_tl(dst, addr, src, mmu_idx, memop);
1907 }
1908 
1909 static void gen_ldstub(DisasContext *dc, TCGv dst, TCGv addr, int mmu_idx)
1910 {
1911     TCGv m1 = tcg_const_tl(0xff);
1912     gen_address_mask(dc, addr);
1913     tcg_gen_atomic_xchg_tl(dst, addr, m1, mmu_idx, MO_UB);
1914 }
1915 
1916 /* asi moves */
1917 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1918 typedef enum {
1919     GET_ASI_HELPER,
1920     GET_ASI_EXCP,
1921     GET_ASI_DIRECT,
1922     GET_ASI_DTWINX,
1923     GET_ASI_BLOCK,
1924     GET_ASI_SHORT,
1925     GET_ASI_BCOPY,
1926     GET_ASI_BFILL,
1927 } ASIType;
1928 
1929 typedef struct {
1930     ASIType type;
1931     int asi;
1932     int mem_idx;
1933     MemOp memop;
1934 } DisasASI;
1935 
1936 static DisasASI get_asi(DisasContext *dc, int insn, MemOp memop)
1937 {
1938     int asi = GET_FIELD(insn, 19, 26);
1939     ASIType type = GET_ASI_HELPER;
1940     int mem_idx = dc->mem_idx;
1941 
1942 #ifndef TARGET_SPARC64
1943     /* Before v9, all asis are immediate and privileged.  */
1944     if (IS_IMM) {
1945         gen_exception(dc, TT_ILL_INSN);
1946         type = GET_ASI_EXCP;
1947     } else if (supervisor(dc)
1948                /* Note that LEON accepts ASI_USERDATA in user mode, for
1949                   use with CASA.  Also note that previous versions of
1950                   QEMU allowed (and old versions of gcc emitted) ASI_P
1951                   for LEON, which is incorrect.  */
1952                || (asi == ASI_USERDATA
1953                    && (dc->def->features & CPU_FEATURE_CASA))) {
1954         switch (asi) {
1955         case ASI_USERDATA:   /* User data access */
1956             mem_idx = MMU_USER_IDX;
1957             type = GET_ASI_DIRECT;
1958             break;
1959         case ASI_KERNELDATA: /* Supervisor data access */
1960             mem_idx = MMU_KERNEL_IDX;
1961             type = GET_ASI_DIRECT;
1962             break;
1963         case ASI_M_BYPASS:    /* MMU passthrough */
1964         case ASI_LEON_BYPASS: /* LEON MMU passthrough */
1965             mem_idx = MMU_PHYS_IDX;
1966             type = GET_ASI_DIRECT;
1967             break;
1968         case ASI_M_BCOPY: /* Block copy, sta access */
1969             mem_idx = MMU_KERNEL_IDX;
1970             type = GET_ASI_BCOPY;
1971             break;
1972         case ASI_M_BFILL: /* Block fill, stda access */
1973             mem_idx = MMU_KERNEL_IDX;
1974             type = GET_ASI_BFILL;
1975             break;
1976         }
1977 
1978         /* MMU_PHYS_IDX is used when the MMU is disabled to passthrough the
1979          * permissions check in get_physical_address(..).
1980          */
1981         mem_idx = (dc->mem_idx == MMU_PHYS_IDX) ? MMU_PHYS_IDX : mem_idx;
1982     } else {
1983         gen_exception(dc, TT_PRIV_INSN);
1984         type = GET_ASI_EXCP;
1985     }
1986 #else
1987     if (IS_IMM) {
1988         asi = dc->asi;
1989     }
1990     /* With v9, all asis below 0x80 are privileged.  */
1991     /* ??? We ought to check cpu_has_hypervisor, but we didn't copy
1992        down that bit into DisasContext.  For the moment that's ok,
1993        since the direct implementations below doesn't have any ASIs
1994        in the restricted [0x30, 0x7f] range, and the check will be
1995        done properly in the helper.  */
1996     if (!supervisor(dc) && asi < 0x80) {
1997         gen_exception(dc, TT_PRIV_ACT);
1998         type = GET_ASI_EXCP;
1999     } else {
2000         switch (asi) {
2001         case ASI_REAL:      /* Bypass */
2002         case ASI_REAL_IO:   /* Bypass, non-cacheable */
2003         case ASI_REAL_L:    /* Bypass LE */
2004         case ASI_REAL_IO_L: /* Bypass, non-cacheable LE */
2005         case ASI_TWINX_REAL:   /* Real address, twinx */
2006         case ASI_TWINX_REAL_L: /* Real address, twinx, LE */
2007         case ASI_QUAD_LDD_PHYS:
2008         case ASI_QUAD_LDD_PHYS_L:
2009             mem_idx = MMU_PHYS_IDX;
2010             break;
2011         case ASI_N:  /* Nucleus */
2012         case ASI_NL: /* Nucleus LE */
2013         case ASI_TWINX_N:
2014         case ASI_TWINX_NL:
2015         case ASI_NUCLEUS_QUAD_LDD:
2016         case ASI_NUCLEUS_QUAD_LDD_L:
2017             if (hypervisor(dc)) {
2018                 mem_idx = MMU_PHYS_IDX;
2019             } else {
2020                 mem_idx = MMU_NUCLEUS_IDX;
2021             }
2022             break;
2023         case ASI_AIUP:  /* As if user primary */
2024         case ASI_AIUPL: /* As if user primary LE */
2025         case ASI_TWINX_AIUP:
2026         case ASI_TWINX_AIUP_L:
2027         case ASI_BLK_AIUP_4V:
2028         case ASI_BLK_AIUP_L_4V:
2029         case ASI_BLK_AIUP:
2030         case ASI_BLK_AIUPL:
2031             mem_idx = MMU_USER_IDX;
2032             break;
2033         case ASI_AIUS:  /* As if user secondary */
2034         case ASI_AIUSL: /* As if user secondary LE */
2035         case ASI_TWINX_AIUS:
2036         case ASI_TWINX_AIUS_L:
2037         case ASI_BLK_AIUS_4V:
2038         case ASI_BLK_AIUS_L_4V:
2039         case ASI_BLK_AIUS:
2040         case ASI_BLK_AIUSL:
2041             mem_idx = MMU_USER_SECONDARY_IDX;
2042             break;
2043         case ASI_S:  /* Secondary */
2044         case ASI_SL: /* Secondary LE */
2045         case ASI_TWINX_S:
2046         case ASI_TWINX_SL:
2047         case ASI_BLK_COMMIT_S:
2048         case ASI_BLK_S:
2049         case ASI_BLK_SL:
2050         case ASI_FL8_S:
2051         case ASI_FL8_SL:
2052         case ASI_FL16_S:
2053         case ASI_FL16_SL:
2054             if (mem_idx == MMU_USER_IDX) {
2055                 mem_idx = MMU_USER_SECONDARY_IDX;
2056             } else if (mem_idx == MMU_KERNEL_IDX) {
2057                 mem_idx = MMU_KERNEL_SECONDARY_IDX;
2058             }
2059             break;
2060         case ASI_P:  /* Primary */
2061         case ASI_PL: /* Primary LE */
2062         case ASI_TWINX_P:
2063         case ASI_TWINX_PL:
2064         case ASI_BLK_COMMIT_P:
2065         case ASI_BLK_P:
2066         case ASI_BLK_PL:
2067         case ASI_FL8_P:
2068         case ASI_FL8_PL:
2069         case ASI_FL16_P:
2070         case ASI_FL16_PL:
2071             break;
2072         }
2073         switch (asi) {
2074         case ASI_REAL:
2075         case ASI_REAL_IO:
2076         case ASI_REAL_L:
2077         case ASI_REAL_IO_L:
2078         case ASI_N:
2079         case ASI_NL:
2080         case ASI_AIUP:
2081         case ASI_AIUPL:
2082         case ASI_AIUS:
2083         case ASI_AIUSL:
2084         case ASI_S:
2085         case ASI_SL:
2086         case ASI_P:
2087         case ASI_PL:
2088             type = GET_ASI_DIRECT;
2089             break;
2090         case ASI_TWINX_REAL:
2091         case ASI_TWINX_REAL_L:
2092         case ASI_TWINX_N:
2093         case ASI_TWINX_NL:
2094         case ASI_TWINX_AIUP:
2095         case ASI_TWINX_AIUP_L:
2096         case ASI_TWINX_AIUS:
2097         case ASI_TWINX_AIUS_L:
2098         case ASI_TWINX_P:
2099         case ASI_TWINX_PL:
2100         case ASI_TWINX_S:
2101         case ASI_TWINX_SL:
2102         case ASI_QUAD_LDD_PHYS:
2103         case ASI_QUAD_LDD_PHYS_L:
2104         case ASI_NUCLEUS_QUAD_LDD:
2105         case ASI_NUCLEUS_QUAD_LDD_L:
2106             type = GET_ASI_DTWINX;
2107             break;
2108         case ASI_BLK_COMMIT_P:
2109         case ASI_BLK_COMMIT_S:
2110         case ASI_BLK_AIUP_4V:
2111         case ASI_BLK_AIUP_L_4V:
2112         case ASI_BLK_AIUP:
2113         case ASI_BLK_AIUPL:
2114         case ASI_BLK_AIUS_4V:
2115         case ASI_BLK_AIUS_L_4V:
2116         case ASI_BLK_AIUS:
2117         case ASI_BLK_AIUSL:
2118         case ASI_BLK_S:
2119         case ASI_BLK_SL:
2120         case ASI_BLK_P:
2121         case ASI_BLK_PL:
2122             type = GET_ASI_BLOCK;
2123             break;
2124         case ASI_FL8_S:
2125         case ASI_FL8_SL:
2126         case ASI_FL8_P:
2127         case ASI_FL8_PL:
2128             memop = MO_UB;
2129             type = GET_ASI_SHORT;
2130             break;
2131         case ASI_FL16_S:
2132         case ASI_FL16_SL:
2133         case ASI_FL16_P:
2134         case ASI_FL16_PL:
2135             memop = MO_TEUW;
2136             type = GET_ASI_SHORT;
2137             break;
2138         }
2139         /* The little-endian asis all have bit 3 set.  */
2140         if (asi & 8) {
2141             memop ^= MO_BSWAP;
2142         }
2143     }
2144 #endif
2145 
2146     return (DisasASI){ type, asi, mem_idx, memop };
2147 }
2148 
2149 static void gen_ld_asi(DisasContext *dc, TCGv dst, TCGv addr,
2150                        int insn, MemOp memop)
2151 {
2152     DisasASI da = get_asi(dc, insn, memop);
2153 
2154     switch (da.type) {
2155     case GET_ASI_EXCP:
2156         break;
2157     case GET_ASI_DTWINX: /* Reserved for ldda.  */
2158         gen_exception(dc, TT_ILL_INSN);
2159         break;
2160     case GET_ASI_DIRECT:
2161         gen_address_mask(dc, addr);
2162         tcg_gen_qemu_ld_tl(dst, addr, da.mem_idx, da.memop);
2163         break;
2164     default:
2165         {
2166             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2167             TCGv_i32 r_mop = tcg_const_i32(memop);
2168 
2169             save_state(dc);
2170 #ifdef TARGET_SPARC64
2171             gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_mop);
2172 #else
2173             {
2174                 TCGv_i64 t64 = tcg_temp_new_i64();
2175                 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2176                 tcg_gen_trunc_i64_tl(dst, t64);
2177             }
2178 #endif
2179         }
2180         break;
2181     }
2182 }
2183 
2184 static void gen_st_asi(DisasContext *dc, TCGv src, TCGv addr,
2185                        int insn, MemOp memop)
2186 {
2187     DisasASI da = get_asi(dc, insn, memop);
2188 
2189     switch (da.type) {
2190     case GET_ASI_EXCP:
2191         break;
2192     case GET_ASI_DTWINX: /* Reserved for stda.  */
2193 #ifndef TARGET_SPARC64
2194         gen_exception(dc, TT_ILL_INSN);
2195         break;
2196 #else
2197         if (!(dc->def->features & CPU_FEATURE_HYPV)) {
2198             /* Pre OpenSPARC CPUs don't have these */
2199             gen_exception(dc, TT_ILL_INSN);
2200             return;
2201         }
2202         /* in OpenSPARC T1+ CPUs TWINX ASIs in store instructions
2203          * are ST_BLKINIT_ ASIs */
2204 #endif
2205         /* fall through */
2206     case GET_ASI_DIRECT:
2207         gen_address_mask(dc, addr);
2208         tcg_gen_qemu_st_tl(src, addr, da.mem_idx, da.memop);
2209         break;
2210 #if !defined(TARGET_SPARC64) && !defined(CONFIG_USER_ONLY)
2211     case GET_ASI_BCOPY:
2212         /* Copy 32 bytes from the address in SRC to ADDR.  */
2213         /* ??? The original qemu code suggests 4-byte alignment, dropping
2214            the low bits, but the only place I can see this used is in the
2215            Linux kernel with 32 byte alignment, which would make more sense
2216            as a cacheline-style operation.  */
2217         {
2218             TCGv saddr = tcg_temp_new();
2219             TCGv daddr = tcg_temp_new();
2220             TCGv four = tcg_const_tl(4);
2221             TCGv_i32 tmp = tcg_temp_new_i32();
2222             int i;
2223 
2224             tcg_gen_andi_tl(saddr, src, -4);
2225             tcg_gen_andi_tl(daddr, addr, -4);
2226             for (i = 0; i < 32; i += 4) {
2227                 /* Since the loads and stores are paired, allow the
2228                    copy to happen in the host endianness.  */
2229                 tcg_gen_qemu_ld_i32(tmp, saddr, da.mem_idx, MO_UL);
2230                 tcg_gen_qemu_st_i32(tmp, daddr, da.mem_idx, MO_UL);
2231                 tcg_gen_add_tl(saddr, saddr, four);
2232                 tcg_gen_add_tl(daddr, daddr, four);
2233             }
2234         }
2235         break;
2236 #endif
2237     default:
2238         {
2239             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2240             TCGv_i32 r_mop = tcg_const_i32(memop & MO_SIZE);
2241 
2242             save_state(dc);
2243 #ifdef TARGET_SPARC64
2244             gen_helper_st_asi(cpu_env, addr, src, r_asi, r_mop);
2245 #else
2246             {
2247                 TCGv_i64 t64 = tcg_temp_new_i64();
2248                 tcg_gen_extu_tl_i64(t64, src);
2249                 gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2250             }
2251 #endif
2252 
2253             /* A write to a TLB register may alter page maps.  End the TB. */
2254             dc->npc = DYNAMIC_PC;
2255         }
2256         break;
2257     }
2258 }
2259 
2260 static void gen_swap_asi(DisasContext *dc, TCGv dst, TCGv src,
2261                          TCGv addr, int insn)
2262 {
2263     DisasASI da = get_asi(dc, insn, MO_TEUL);
2264 
2265     switch (da.type) {
2266     case GET_ASI_EXCP:
2267         break;
2268     case GET_ASI_DIRECT:
2269         gen_swap(dc, dst, src, addr, da.mem_idx, da.memop);
2270         break;
2271     default:
2272         /* ??? Should be DAE_invalid_asi.  */
2273         gen_exception(dc, TT_DATA_ACCESS);
2274         break;
2275     }
2276 }
2277 
2278 static void gen_cas_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2279                         int insn, int rd)
2280 {
2281     DisasASI da = get_asi(dc, insn, MO_TEUL);
2282     TCGv oldv;
2283 
2284     switch (da.type) {
2285     case GET_ASI_EXCP:
2286         return;
2287     case GET_ASI_DIRECT:
2288         oldv = tcg_temp_new();
2289         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2290                                   da.mem_idx, da.memop);
2291         gen_store_gpr(dc, rd, oldv);
2292         break;
2293     default:
2294         /* ??? Should be DAE_invalid_asi.  */
2295         gen_exception(dc, TT_DATA_ACCESS);
2296         break;
2297     }
2298 }
2299 
2300 static void gen_ldstub_asi(DisasContext *dc, TCGv dst, TCGv addr, int insn)
2301 {
2302     DisasASI da = get_asi(dc, insn, MO_UB);
2303 
2304     switch (da.type) {
2305     case GET_ASI_EXCP:
2306         break;
2307     case GET_ASI_DIRECT:
2308         gen_ldstub(dc, dst, addr, da.mem_idx);
2309         break;
2310     default:
2311         /* ??? In theory, this should be raise DAE_invalid_asi.
2312            But the SS-20 roms do ldstuba [%l0] #ASI_M_CTL, %o1.  */
2313         if (tb_cflags(dc->base.tb) & CF_PARALLEL) {
2314             gen_helper_exit_atomic(cpu_env);
2315         } else {
2316             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2317             TCGv_i32 r_mop = tcg_const_i32(MO_UB);
2318             TCGv_i64 s64, t64;
2319 
2320             save_state(dc);
2321             t64 = tcg_temp_new_i64();
2322             gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2323 
2324             s64 = tcg_const_i64(0xff);
2325             gen_helper_st_asi(cpu_env, addr, s64, r_asi, r_mop);
2326 
2327             tcg_gen_trunc_i64_tl(dst, t64);
2328 
2329             /* End the TB.  */
2330             dc->npc = DYNAMIC_PC;
2331         }
2332         break;
2333     }
2334 }
2335 #endif
2336 
2337 #ifdef TARGET_SPARC64
2338 static void gen_ldf_asi(DisasContext *dc, TCGv addr,
2339                         int insn, int size, int rd)
2340 {
2341     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2342     TCGv_i32 d32;
2343     TCGv_i64 d64;
2344 
2345     switch (da.type) {
2346     case GET_ASI_EXCP:
2347         break;
2348 
2349     case GET_ASI_DIRECT:
2350         gen_address_mask(dc, addr);
2351         switch (size) {
2352         case 4:
2353             d32 = gen_dest_fpr_F(dc);
2354             tcg_gen_qemu_ld_i32(d32, addr, da.mem_idx, da.memop);
2355             gen_store_fpr_F(dc, rd, d32);
2356             break;
2357         case 8:
2358             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2359                                 da.memop | MO_ALIGN_4);
2360             break;
2361         case 16:
2362             d64 = tcg_temp_new_i64();
2363             tcg_gen_qemu_ld_i64(d64, addr, da.mem_idx, da.memop | MO_ALIGN_4);
2364             tcg_gen_addi_tl(addr, addr, 8);
2365             tcg_gen_qemu_ld_i64(cpu_fpr[rd/2+1], addr, da.mem_idx,
2366                                 da.memop | MO_ALIGN_4);
2367             tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2368             break;
2369         default:
2370             g_assert_not_reached();
2371         }
2372         break;
2373 
2374     case GET_ASI_BLOCK:
2375         /* Valid for lddfa on aligned registers only.  */
2376         if (size == 8 && (rd & 7) == 0) {
2377             MemOp memop;
2378             TCGv eight;
2379             int i;
2380 
2381             gen_address_mask(dc, addr);
2382 
2383             /* The first operation checks required alignment.  */
2384             memop = da.memop | MO_ALIGN_64;
2385             eight = tcg_const_tl(8);
2386             for (i = 0; ; ++i) {
2387                 tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2 + i], addr,
2388                                     da.mem_idx, memop);
2389                 if (i == 7) {
2390                     break;
2391                 }
2392                 tcg_gen_add_tl(addr, addr, eight);
2393                 memop = da.memop;
2394             }
2395         } else {
2396             gen_exception(dc, TT_ILL_INSN);
2397         }
2398         break;
2399 
2400     case GET_ASI_SHORT:
2401         /* Valid for lddfa only.  */
2402         if (size == 8) {
2403             gen_address_mask(dc, addr);
2404             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx, da.memop);
2405         } else {
2406             gen_exception(dc, TT_ILL_INSN);
2407         }
2408         break;
2409 
2410     default:
2411         {
2412             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2413             TCGv_i32 r_mop = tcg_const_i32(da.memop);
2414 
2415             save_state(dc);
2416             /* According to the table in the UA2011 manual, the only
2417                other asis that are valid for ldfa/lddfa/ldqfa are
2418                the NO_FAULT asis.  We still need a helper for these,
2419                but we can just use the integer asi helper for them.  */
2420             switch (size) {
2421             case 4:
2422                 d64 = tcg_temp_new_i64();
2423                 gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2424                 d32 = gen_dest_fpr_F(dc);
2425                 tcg_gen_extrl_i64_i32(d32, d64);
2426                 gen_store_fpr_F(dc, rd, d32);
2427                 break;
2428             case 8:
2429                 gen_helper_ld_asi(cpu_fpr[rd / 2], cpu_env, addr, r_asi, r_mop);
2430                 break;
2431             case 16:
2432                 d64 = tcg_temp_new_i64();
2433                 gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2434                 tcg_gen_addi_tl(addr, addr, 8);
2435                 gen_helper_ld_asi(cpu_fpr[rd/2+1], cpu_env, addr, r_asi, r_mop);
2436                 tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2437                 break;
2438             default:
2439                 g_assert_not_reached();
2440             }
2441         }
2442         break;
2443     }
2444 }
2445 
2446 static void gen_stf_asi(DisasContext *dc, TCGv addr,
2447                         int insn, int size, int rd)
2448 {
2449     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2450     TCGv_i32 d32;
2451 
2452     switch (da.type) {
2453     case GET_ASI_EXCP:
2454         break;
2455 
2456     case GET_ASI_DIRECT:
2457         gen_address_mask(dc, addr);
2458         switch (size) {
2459         case 4:
2460             d32 = gen_load_fpr_F(dc, rd);
2461             tcg_gen_qemu_st_i32(d32, addr, da.mem_idx, da.memop);
2462             break;
2463         case 8:
2464             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2465                                 da.memop | MO_ALIGN_4);
2466             break;
2467         case 16:
2468             /* Only 4-byte alignment required.  However, it is legal for the
2469                cpu to signal the alignment fault, and the OS trap handler is
2470                required to fix it up.  Requiring 16-byte alignment here avoids
2471                having to probe the second page before performing the first
2472                write.  */
2473             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2474                                 da.memop | MO_ALIGN_16);
2475             tcg_gen_addi_tl(addr, addr, 8);
2476             tcg_gen_qemu_st_i64(cpu_fpr[rd/2+1], addr, da.mem_idx, da.memop);
2477             break;
2478         default:
2479             g_assert_not_reached();
2480         }
2481         break;
2482 
2483     case GET_ASI_BLOCK:
2484         /* Valid for stdfa on aligned registers only.  */
2485         if (size == 8 && (rd & 7) == 0) {
2486             MemOp memop;
2487             TCGv eight;
2488             int i;
2489 
2490             gen_address_mask(dc, addr);
2491 
2492             /* The first operation checks required alignment.  */
2493             memop = da.memop | MO_ALIGN_64;
2494             eight = tcg_const_tl(8);
2495             for (i = 0; ; ++i) {
2496                 tcg_gen_qemu_st_i64(cpu_fpr[rd / 2 + i], addr,
2497                                     da.mem_idx, memop);
2498                 if (i == 7) {
2499                     break;
2500                 }
2501                 tcg_gen_add_tl(addr, addr, eight);
2502                 memop = da.memop;
2503             }
2504         } else {
2505             gen_exception(dc, TT_ILL_INSN);
2506         }
2507         break;
2508 
2509     case GET_ASI_SHORT:
2510         /* Valid for stdfa only.  */
2511         if (size == 8) {
2512             gen_address_mask(dc, addr);
2513             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx, da.memop);
2514         } else {
2515             gen_exception(dc, TT_ILL_INSN);
2516         }
2517         break;
2518 
2519     default:
2520         /* According to the table in the UA2011 manual, the only
2521            other asis that are valid for ldfa/lddfa/ldqfa are
2522            the PST* asis, which aren't currently handled.  */
2523         gen_exception(dc, TT_ILL_INSN);
2524         break;
2525     }
2526 }
2527 
2528 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2529 {
2530     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2531     TCGv_i64 hi = gen_dest_gpr(dc, rd);
2532     TCGv_i64 lo = gen_dest_gpr(dc, rd + 1);
2533 
2534     switch (da.type) {
2535     case GET_ASI_EXCP:
2536         return;
2537 
2538     case GET_ASI_DTWINX:
2539         gen_address_mask(dc, addr);
2540         tcg_gen_qemu_ld_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2541         tcg_gen_addi_tl(addr, addr, 8);
2542         tcg_gen_qemu_ld_i64(lo, addr, da.mem_idx, da.memop);
2543         break;
2544 
2545     case GET_ASI_DIRECT:
2546         {
2547             TCGv_i64 tmp = tcg_temp_new_i64();
2548 
2549             gen_address_mask(dc, addr);
2550             tcg_gen_qemu_ld_i64(tmp, addr, da.mem_idx, da.memop);
2551 
2552             /* Note that LE ldda acts as if each 32-bit register
2553                result is byte swapped.  Having just performed one
2554                64-bit bswap, we need now to swap the writebacks.  */
2555             if ((da.memop & MO_BSWAP) == MO_TE) {
2556                 tcg_gen_extr32_i64(lo, hi, tmp);
2557             } else {
2558                 tcg_gen_extr32_i64(hi, lo, tmp);
2559             }
2560         }
2561         break;
2562 
2563     default:
2564         /* ??? In theory we've handled all of the ASIs that are valid
2565            for ldda, and this should raise DAE_invalid_asi.  However,
2566            real hardware allows others.  This can be seen with e.g.
2567            FreeBSD 10.3 wrt ASI_IC_TAG.  */
2568         {
2569             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2570             TCGv_i32 r_mop = tcg_const_i32(da.memop);
2571             TCGv_i64 tmp = tcg_temp_new_i64();
2572 
2573             save_state(dc);
2574             gen_helper_ld_asi(tmp, cpu_env, addr, r_asi, r_mop);
2575 
2576             /* See above.  */
2577             if ((da.memop & MO_BSWAP) == MO_TE) {
2578                 tcg_gen_extr32_i64(lo, hi, tmp);
2579             } else {
2580                 tcg_gen_extr32_i64(hi, lo, tmp);
2581             }
2582         }
2583         break;
2584     }
2585 
2586     gen_store_gpr(dc, rd, hi);
2587     gen_store_gpr(dc, rd + 1, lo);
2588 }
2589 
2590 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2591                          int insn, int rd)
2592 {
2593     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2594     TCGv lo = gen_load_gpr(dc, rd + 1);
2595 
2596     switch (da.type) {
2597     case GET_ASI_EXCP:
2598         break;
2599 
2600     case GET_ASI_DTWINX:
2601         gen_address_mask(dc, addr);
2602         tcg_gen_qemu_st_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2603         tcg_gen_addi_tl(addr, addr, 8);
2604         tcg_gen_qemu_st_i64(lo, addr, da.mem_idx, da.memop);
2605         break;
2606 
2607     case GET_ASI_DIRECT:
2608         {
2609             TCGv_i64 t64 = tcg_temp_new_i64();
2610 
2611             /* Note that LE stda acts as if each 32-bit register result is
2612                byte swapped.  We will perform one 64-bit LE store, so now
2613                we must swap the order of the construction.  */
2614             if ((da.memop & MO_BSWAP) == MO_TE) {
2615                 tcg_gen_concat32_i64(t64, lo, hi);
2616             } else {
2617                 tcg_gen_concat32_i64(t64, hi, lo);
2618             }
2619             gen_address_mask(dc, addr);
2620             tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop);
2621         }
2622         break;
2623 
2624     default:
2625         /* ??? In theory we've handled all of the ASIs that are valid
2626            for stda, and this should raise DAE_invalid_asi.  */
2627         {
2628             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2629             TCGv_i32 r_mop = tcg_const_i32(da.memop);
2630             TCGv_i64 t64 = tcg_temp_new_i64();
2631 
2632             /* See above.  */
2633             if ((da.memop & MO_BSWAP) == MO_TE) {
2634                 tcg_gen_concat32_i64(t64, lo, hi);
2635             } else {
2636                 tcg_gen_concat32_i64(t64, hi, lo);
2637             }
2638 
2639             save_state(dc);
2640             gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2641         }
2642         break;
2643     }
2644 }
2645 
2646 static void gen_casx_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2647                          int insn, int rd)
2648 {
2649     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2650     TCGv oldv;
2651 
2652     switch (da.type) {
2653     case GET_ASI_EXCP:
2654         return;
2655     case GET_ASI_DIRECT:
2656         oldv = tcg_temp_new();
2657         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2658                                   da.mem_idx, da.memop);
2659         gen_store_gpr(dc, rd, oldv);
2660         break;
2661     default:
2662         /* ??? Should be DAE_invalid_asi.  */
2663         gen_exception(dc, TT_DATA_ACCESS);
2664         break;
2665     }
2666 }
2667 
2668 #elif !defined(CONFIG_USER_ONLY)
2669 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2670 {
2671     /* ??? Work around an apparent bug in Ubuntu gcc 4.8.2-10ubuntu2+12,
2672        whereby "rd + 1" elicits "error: array subscript is above array".
2673        Since we have already asserted that rd is even, the semantics
2674        are unchanged.  */
2675     TCGv lo = gen_dest_gpr(dc, rd | 1);
2676     TCGv hi = gen_dest_gpr(dc, rd);
2677     TCGv_i64 t64 = tcg_temp_new_i64();
2678     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2679 
2680     switch (da.type) {
2681     case GET_ASI_EXCP:
2682         return;
2683     case GET_ASI_DIRECT:
2684         gen_address_mask(dc, addr);
2685         tcg_gen_qemu_ld_i64(t64, addr, da.mem_idx, da.memop);
2686         break;
2687     default:
2688         {
2689             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2690             TCGv_i32 r_mop = tcg_const_i32(MO_UQ);
2691 
2692             save_state(dc);
2693             gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2694         }
2695         break;
2696     }
2697 
2698     tcg_gen_extr_i64_i32(lo, hi, t64);
2699     gen_store_gpr(dc, rd | 1, lo);
2700     gen_store_gpr(dc, rd, hi);
2701 }
2702 
2703 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2704                          int insn, int rd)
2705 {
2706     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2707     TCGv lo = gen_load_gpr(dc, rd + 1);
2708     TCGv_i64 t64 = tcg_temp_new_i64();
2709 
2710     tcg_gen_concat_tl_i64(t64, lo, hi);
2711 
2712     switch (da.type) {
2713     case GET_ASI_EXCP:
2714         break;
2715     case GET_ASI_DIRECT:
2716         gen_address_mask(dc, addr);
2717         tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop);
2718         break;
2719     case GET_ASI_BFILL:
2720         /* Store 32 bytes of T64 to ADDR.  */
2721         /* ??? The original qemu code suggests 8-byte alignment, dropping
2722            the low bits, but the only place I can see this used is in the
2723            Linux kernel with 32 byte alignment, which would make more sense
2724            as a cacheline-style operation.  */
2725         {
2726             TCGv d_addr = tcg_temp_new();
2727             TCGv eight = tcg_const_tl(8);
2728             int i;
2729 
2730             tcg_gen_andi_tl(d_addr, addr, -8);
2731             for (i = 0; i < 32; i += 8) {
2732                 tcg_gen_qemu_st_i64(t64, d_addr, da.mem_idx, da.memop);
2733                 tcg_gen_add_tl(d_addr, d_addr, eight);
2734             }
2735         }
2736         break;
2737     default:
2738         {
2739             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2740             TCGv_i32 r_mop = tcg_const_i32(MO_UQ);
2741 
2742             save_state(dc);
2743             gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2744         }
2745         break;
2746     }
2747 }
2748 #endif
2749 
2750 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2751 {
2752     unsigned int rs1 = GET_FIELD(insn, 13, 17);
2753     return gen_load_gpr(dc, rs1);
2754 }
2755 
2756 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2757 {
2758     if (IS_IMM) { /* immediate */
2759         target_long simm = GET_FIELDs(insn, 19, 31);
2760         TCGv t = tcg_temp_new();
2761         tcg_gen_movi_tl(t, simm);
2762         return t;
2763     } else {      /* register */
2764         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2765         return gen_load_gpr(dc, rs2);
2766     }
2767 }
2768 
2769 #ifdef TARGET_SPARC64
2770 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2771 {
2772     TCGv_i32 c32, zero, dst, s1, s2;
2773 
2774     /* We have two choices here: extend the 32 bit data and use movcond_i64,
2775        or fold the comparison down to 32 bits and use movcond_i32.  Choose
2776        the later.  */
2777     c32 = tcg_temp_new_i32();
2778     if (cmp->is_bool) {
2779         tcg_gen_extrl_i64_i32(c32, cmp->c1);
2780     } else {
2781         TCGv_i64 c64 = tcg_temp_new_i64();
2782         tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2783         tcg_gen_extrl_i64_i32(c32, c64);
2784     }
2785 
2786     s1 = gen_load_fpr_F(dc, rs);
2787     s2 = gen_load_fpr_F(dc, rd);
2788     dst = gen_dest_fpr_F(dc);
2789     zero = tcg_const_i32(0);
2790 
2791     tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2792 
2793     gen_store_fpr_F(dc, rd, dst);
2794 }
2795 
2796 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2797 {
2798     TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2799     tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2800                         gen_load_fpr_D(dc, rs),
2801                         gen_load_fpr_D(dc, rd));
2802     gen_store_fpr_D(dc, rd, dst);
2803 }
2804 
2805 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2806 {
2807     int qd = QFPREG(rd);
2808     int qs = QFPREG(rs);
2809 
2810     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2811                         cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2812     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2813                         cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2814 
2815     gen_update_fprs_dirty(dc, qd);
2816 }
2817 
2818 #ifndef CONFIG_USER_ONLY
2819 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_env cpu_env)
2820 {
2821     TCGv_i32 r_tl = tcg_temp_new_i32();
2822 
2823     /* load env->tl into r_tl */
2824     tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2825 
2826     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2827     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2828 
2829     /* calculate offset to current trap state from env->ts, reuse r_tl */
2830     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2831     tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2832 
2833     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2834     {
2835         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2836         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2837         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2838     }
2839 }
2840 #endif
2841 
2842 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2843                      int width, bool cc, bool left)
2844 {
2845     TCGv lo1, lo2, t1, t2;
2846     uint64_t amask, tabl, tabr;
2847     int shift, imask, omask;
2848 
2849     if (cc) {
2850         tcg_gen_mov_tl(cpu_cc_src, s1);
2851         tcg_gen_mov_tl(cpu_cc_src2, s2);
2852         tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2853         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2854         dc->cc_op = CC_OP_SUB;
2855     }
2856 
2857     /* Theory of operation: there are two tables, left and right (not to
2858        be confused with the left and right versions of the opcode).  These
2859        are indexed by the low 3 bits of the inputs.  To make things "easy",
2860        these tables are loaded into two constants, TABL and TABR below.
2861        The operation index = (input & imask) << shift calculates the index
2862        into the constant, while val = (table >> index) & omask calculates
2863        the value we're looking for.  */
2864     switch (width) {
2865     case 8:
2866         imask = 0x7;
2867         shift = 3;
2868         omask = 0xff;
2869         if (left) {
2870             tabl = 0x80c0e0f0f8fcfeffULL;
2871             tabr = 0xff7f3f1f0f070301ULL;
2872         } else {
2873             tabl = 0x0103070f1f3f7fffULL;
2874             tabr = 0xfffefcf8f0e0c080ULL;
2875         }
2876         break;
2877     case 16:
2878         imask = 0x6;
2879         shift = 1;
2880         omask = 0xf;
2881         if (left) {
2882             tabl = 0x8cef;
2883             tabr = 0xf731;
2884         } else {
2885             tabl = 0x137f;
2886             tabr = 0xfec8;
2887         }
2888         break;
2889     case 32:
2890         imask = 0x4;
2891         shift = 0;
2892         omask = 0x3;
2893         if (left) {
2894             tabl = (2 << 2) | 3;
2895             tabr = (3 << 2) | 1;
2896         } else {
2897             tabl = (1 << 2) | 3;
2898             tabr = (3 << 2) | 2;
2899         }
2900         break;
2901     default:
2902         abort();
2903     }
2904 
2905     lo1 = tcg_temp_new();
2906     lo2 = tcg_temp_new();
2907     tcg_gen_andi_tl(lo1, s1, imask);
2908     tcg_gen_andi_tl(lo2, s2, imask);
2909     tcg_gen_shli_tl(lo1, lo1, shift);
2910     tcg_gen_shli_tl(lo2, lo2, shift);
2911 
2912     t1 = tcg_const_tl(tabl);
2913     t2 = tcg_const_tl(tabr);
2914     tcg_gen_shr_tl(lo1, t1, lo1);
2915     tcg_gen_shr_tl(lo2, t2, lo2);
2916     tcg_gen_andi_tl(dst, lo1, omask);
2917     tcg_gen_andi_tl(lo2, lo2, omask);
2918 
2919     amask = -8;
2920     if (AM_CHECK(dc)) {
2921         amask &= 0xffffffffULL;
2922     }
2923     tcg_gen_andi_tl(s1, s1, amask);
2924     tcg_gen_andi_tl(s2, s2, amask);
2925 
2926     /* We want to compute
2927         dst = (s1 == s2 ? lo1 : lo1 & lo2).
2928        We've already done dst = lo1, so this reduces to
2929         dst &= (s1 == s2 ? -1 : lo2)
2930        Which we perform by
2931         lo2 |= -(s1 == s2)
2932         dst &= lo2
2933     */
2934     tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
2935     tcg_gen_neg_tl(t1, t1);
2936     tcg_gen_or_tl(lo2, lo2, t1);
2937     tcg_gen_and_tl(dst, dst, lo2);
2938 }
2939 
2940 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2941 {
2942     TCGv tmp = tcg_temp_new();
2943 
2944     tcg_gen_add_tl(tmp, s1, s2);
2945     tcg_gen_andi_tl(dst, tmp, -8);
2946     if (left) {
2947         tcg_gen_neg_tl(tmp, tmp);
2948     }
2949     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2950 }
2951 
2952 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2953 {
2954     TCGv t1, t2, shift;
2955 
2956     t1 = tcg_temp_new();
2957     t2 = tcg_temp_new();
2958     shift = tcg_temp_new();
2959 
2960     tcg_gen_andi_tl(shift, gsr, 7);
2961     tcg_gen_shli_tl(shift, shift, 3);
2962     tcg_gen_shl_tl(t1, s1, shift);
2963 
2964     /* A shift of 64 does not produce 0 in TCG.  Divide this into a
2965        shift of (up to 63) followed by a constant shift of 1.  */
2966     tcg_gen_xori_tl(shift, shift, 63);
2967     tcg_gen_shr_tl(t2, s2, shift);
2968     tcg_gen_shri_tl(t2, t2, 1);
2969 
2970     tcg_gen_or_tl(dst, t1, t2);
2971 }
2972 #endif
2973 
2974 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
2975     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2976         goto illegal_insn;
2977 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
2978     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2979         goto nfpu_insn;
2980 
2981 /* before an instruction, dc->pc must be static */
2982 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
2983 {
2984     unsigned int opc, rs1, rs2, rd;
2985     TCGv cpu_src1, cpu_src2;
2986     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2987     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2988     target_long simm;
2989 
2990     opc = GET_FIELD(insn, 0, 1);
2991     rd = GET_FIELD(insn, 2, 6);
2992 
2993     switch (opc) {
2994     case 0:                     /* branches/sethi */
2995         {
2996             unsigned int xop = GET_FIELD(insn, 7, 9);
2997             int32_t target;
2998             switch (xop) {
2999 #ifdef TARGET_SPARC64
3000             case 0x1:           /* V9 BPcc */
3001                 {
3002                     int cc;
3003 
3004                     target = GET_FIELD_SP(insn, 0, 18);
3005                     target = sign_extend(target, 19);
3006                     target <<= 2;
3007                     cc = GET_FIELD_SP(insn, 20, 21);
3008                     if (cc == 0)
3009                         do_branch(dc, target, insn, 0);
3010                     else if (cc == 2)
3011                         do_branch(dc, target, insn, 1);
3012                     else
3013                         goto illegal_insn;
3014                     goto jmp_insn;
3015                 }
3016             case 0x3:           /* V9 BPr */
3017                 {
3018                     target = GET_FIELD_SP(insn, 0, 13) |
3019                         (GET_FIELD_SP(insn, 20, 21) << 14);
3020                     target = sign_extend(target, 16);
3021                     target <<= 2;
3022                     cpu_src1 = get_src1(dc, insn);
3023                     do_branch_reg(dc, target, insn, cpu_src1);
3024                     goto jmp_insn;
3025                 }
3026             case 0x5:           /* V9 FBPcc */
3027                 {
3028                     int cc = GET_FIELD_SP(insn, 20, 21);
3029                     if (gen_trap_ifnofpu(dc)) {
3030                         goto jmp_insn;
3031                     }
3032                     target = GET_FIELD_SP(insn, 0, 18);
3033                     target = sign_extend(target, 19);
3034                     target <<= 2;
3035                     do_fbranch(dc, target, insn, cc);
3036                     goto jmp_insn;
3037                 }
3038 #else
3039             case 0x7:           /* CBN+x */
3040                 {
3041                     goto ncp_insn;
3042                 }
3043 #endif
3044             case 0x2:           /* BN+x */
3045                 {
3046                     target = GET_FIELD(insn, 10, 31);
3047                     target = sign_extend(target, 22);
3048                     target <<= 2;
3049                     do_branch(dc, target, insn, 0);
3050                     goto jmp_insn;
3051                 }
3052             case 0x6:           /* FBN+x */
3053                 {
3054                     if (gen_trap_ifnofpu(dc)) {
3055                         goto jmp_insn;
3056                     }
3057                     target = GET_FIELD(insn, 10, 31);
3058                     target = sign_extend(target, 22);
3059                     target <<= 2;
3060                     do_fbranch(dc, target, insn, 0);
3061                     goto jmp_insn;
3062                 }
3063             case 0x4:           /* SETHI */
3064                 /* Special-case %g0 because that's the canonical nop.  */
3065                 if (rd) {
3066                     uint32_t value = GET_FIELD(insn, 10, 31);
3067                     TCGv t = gen_dest_gpr(dc, rd);
3068                     tcg_gen_movi_tl(t, value << 10);
3069                     gen_store_gpr(dc, rd, t);
3070                 }
3071                 break;
3072             case 0x0:           /* UNIMPL */
3073             default:
3074                 goto illegal_insn;
3075             }
3076             break;
3077         }
3078         break;
3079     case 1:                     /*CALL*/
3080         {
3081             target_long target = GET_FIELDs(insn, 2, 31) << 2;
3082             TCGv o7 = gen_dest_gpr(dc, 15);
3083 
3084             tcg_gen_movi_tl(o7, dc->pc);
3085             gen_store_gpr(dc, 15, o7);
3086             target += dc->pc;
3087             gen_mov_pc_npc(dc);
3088 #ifdef TARGET_SPARC64
3089             if (unlikely(AM_CHECK(dc))) {
3090                 target &= 0xffffffffULL;
3091             }
3092 #endif
3093             dc->npc = target;
3094         }
3095         goto jmp_insn;
3096     case 2:                     /* FPU & Logical Operations */
3097         {
3098             unsigned int xop = GET_FIELD(insn, 7, 12);
3099             TCGv cpu_dst = tcg_temp_new();
3100             TCGv cpu_tmp0;
3101 
3102             if (xop == 0x3a) {  /* generate trap */
3103                 int cond = GET_FIELD(insn, 3, 6);
3104                 TCGv_i32 trap;
3105                 TCGLabel *l1 = NULL;
3106                 int mask;
3107 
3108                 if (cond == 0) {
3109                     /* Trap never.  */
3110                     break;
3111                 }
3112 
3113                 save_state(dc);
3114 
3115                 if (cond != 8) {
3116                     /* Conditional trap.  */
3117                     DisasCompare cmp;
3118 #ifdef TARGET_SPARC64
3119                     /* V9 icc/xcc */
3120                     int cc = GET_FIELD_SP(insn, 11, 12);
3121                     if (cc == 0) {
3122                         gen_compare(&cmp, 0, cond, dc);
3123                     } else if (cc == 2) {
3124                         gen_compare(&cmp, 1, cond, dc);
3125                     } else {
3126                         goto illegal_insn;
3127                     }
3128 #else
3129                     gen_compare(&cmp, 0, cond, dc);
3130 #endif
3131                     l1 = gen_new_label();
3132                     tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
3133                                       cmp.c1, cmp.c2, l1);
3134                 }
3135 
3136                 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
3137                         ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
3138 
3139                 /* Don't use the normal temporaries, as they may well have
3140                    gone out of scope with the branch above.  While we're
3141                    doing that we might as well pre-truncate to 32-bit.  */
3142                 trap = tcg_temp_new_i32();
3143 
3144                 rs1 = GET_FIELD_SP(insn, 14, 18);
3145                 if (IS_IMM) {
3146                     rs2 = GET_FIELD_SP(insn, 0, 7);
3147                     if (rs1 == 0) {
3148                         tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
3149                         /* Signal that the trap value is fully constant.  */
3150                         mask = 0;
3151                     } else {
3152                         TCGv t1 = gen_load_gpr(dc, rs1);
3153                         tcg_gen_trunc_tl_i32(trap, t1);
3154                         tcg_gen_addi_i32(trap, trap, rs2);
3155                     }
3156                 } else {
3157                     TCGv t1, t2;
3158                     rs2 = GET_FIELD_SP(insn, 0, 4);
3159                     t1 = gen_load_gpr(dc, rs1);
3160                     t2 = gen_load_gpr(dc, rs2);
3161                     tcg_gen_add_tl(t1, t1, t2);
3162                     tcg_gen_trunc_tl_i32(trap, t1);
3163                 }
3164                 if (mask != 0) {
3165                     tcg_gen_andi_i32(trap, trap, mask);
3166                     tcg_gen_addi_i32(trap, trap, TT_TRAP);
3167                 }
3168 
3169                 gen_helper_raise_exception(cpu_env, trap);
3170 
3171                 if (cond == 8) {
3172                     /* An unconditional trap ends the TB.  */
3173                     dc->base.is_jmp = DISAS_NORETURN;
3174                     goto jmp_insn;
3175                 } else {
3176                     /* A conditional trap falls through to the next insn.  */
3177                     gen_set_label(l1);
3178                     break;
3179                 }
3180             } else if (xop == 0x28) {
3181                 rs1 = GET_FIELD(insn, 13, 17);
3182                 switch(rs1) {
3183                 case 0: /* rdy */
3184 #ifndef TARGET_SPARC64
3185                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
3186                                        manual, rdy on the microSPARC
3187                                        II */
3188                 case 0x0f:          /* stbar in the SPARCv8 manual,
3189                                        rdy on the microSPARC II */
3190                 case 0x10 ... 0x1f: /* implementation-dependent in the
3191                                        SPARCv8 manual, rdy on the
3192                                        microSPARC II */
3193                     /* Read Asr17 */
3194                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
3195                         TCGv t = gen_dest_gpr(dc, rd);
3196                         /* Read Asr17 for a Leon3 monoprocessor */
3197                         tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
3198                         gen_store_gpr(dc, rd, t);
3199                         break;
3200                     }
3201 #endif
3202                     gen_store_gpr(dc, rd, cpu_y);
3203                     break;
3204 #ifdef TARGET_SPARC64
3205                 case 0x2: /* V9 rdccr */
3206                     update_psr(dc);
3207                     gen_helper_rdccr(cpu_dst, cpu_env);
3208                     gen_store_gpr(dc, rd, cpu_dst);
3209                     break;
3210                 case 0x3: /* V9 rdasi */
3211                     tcg_gen_movi_tl(cpu_dst, dc->asi);
3212                     gen_store_gpr(dc, rd, cpu_dst);
3213                     break;
3214                 case 0x4: /* V9 rdtick */
3215                     {
3216                         TCGv_ptr r_tickptr;
3217                         TCGv_i32 r_const;
3218 
3219                         r_tickptr = tcg_temp_new_ptr();
3220                         r_const = tcg_const_i32(dc->mem_idx);
3221                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3222                                        offsetof(CPUSPARCState, tick));
3223                         if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3224                             gen_io_start();
3225                         }
3226                         gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3227                                                   r_const);
3228                         gen_store_gpr(dc, rd, cpu_dst);
3229                         if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3230                             /* I/O operations in icount mode must end the TB */
3231                             dc->base.is_jmp = DISAS_EXIT;
3232                         }
3233                     }
3234                     break;
3235                 case 0x5: /* V9 rdpc */
3236                     {
3237                         TCGv t = gen_dest_gpr(dc, rd);
3238                         if (unlikely(AM_CHECK(dc))) {
3239                             tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
3240                         } else {
3241                             tcg_gen_movi_tl(t, dc->pc);
3242                         }
3243                         gen_store_gpr(dc, rd, t);
3244                     }
3245                     break;
3246                 case 0x6: /* V9 rdfprs */
3247                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
3248                     gen_store_gpr(dc, rd, cpu_dst);
3249                     break;
3250                 case 0xf: /* V9 membar */
3251                     break; /* no effect */
3252                 case 0x13: /* Graphics Status */
3253                     if (gen_trap_ifnofpu(dc)) {
3254                         goto jmp_insn;
3255                     }
3256                     gen_store_gpr(dc, rd, cpu_gsr);
3257                     break;
3258                 case 0x16: /* Softint */
3259                     tcg_gen_ld32s_tl(cpu_dst, cpu_env,
3260                                      offsetof(CPUSPARCState, softint));
3261                     gen_store_gpr(dc, rd, cpu_dst);
3262                     break;
3263                 case 0x17: /* Tick compare */
3264                     gen_store_gpr(dc, rd, cpu_tick_cmpr);
3265                     break;
3266                 case 0x18: /* System tick */
3267                     {
3268                         TCGv_ptr r_tickptr;
3269                         TCGv_i32 r_const;
3270 
3271                         r_tickptr = tcg_temp_new_ptr();
3272                         r_const = tcg_const_i32(dc->mem_idx);
3273                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3274                                        offsetof(CPUSPARCState, stick));
3275                         if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3276                             gen_io_start();
3277                         }
3278                         gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3279                                                   r_const);
3280                         gen_store_gpr(dc, rd, cpu_dst);
3281                         if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3282                             /* I/O operations in icount mode must end the TB */
3283                             dc->base.is_jmp = DISAS_EXIT;
3284                         }
3285                     }
3286                     break;
3287                 case 0x19: /* System tick compare */
3288                     gen_store_gpr(dc, rd, cpu_stick_cmpr);
3289                     break;
3290                 case 0x1a: /* UltraSPARC-T1 Strand status */
3291                     /* XXX HYPV check maybe not enough, UA2005 & UA2007 describe
3292                      * this ASR as impl. dep
3293                      */
3294                     CHECK_IU_FEATURE(dc, HYPV);
3295                     {
3296                         TCGv t = gen_dest_gpr(dc, rd);
3297                         tcg_gen_movi_tl(t, 1UL);
3298                         gen_store_gpr(dc, rd, t);
3299                     }
3300                     break;
3301                 case 0x10: /* Performance Control */
3302                 case 0x11: /* Performance Instrumentation Counter */
3303                 case 0x12: /* Dispatch Control */
3304                 case 0x14: /* Softint set, WO */
3305                 case 0x15: /* Softint clear, WO */
3306 #endif
3307                 default:
3308                     goto illegal_insn;
3309                 }
3310 #if !defined(CONFIG_USER_ONLY)
3311             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
3312 #ifndef TARGET_SPARC64
3313                 if (!supervisor(dc)) {
3314                     goto priv_insn;
3315                 }
3316                 update_psr(dc);
3317                 gen_helper_rdpsr(cpu_dst, cpu_env);
3318 #else
3319                 CHECK_IU_FEATURE(dc, HYPV);
3320                 if (!hypervisor(dc))
3321                     goto priv_insn;
3322                 rs1 = GET_FIELD(insn, 13, 17);
3323                 switch (rs1) {
3324                 case 0: // hpstate
3325                     tcg_gen_ld_i64(cpu_dst, cpu_env,
3326                                    offsetof(CPUSPARCState, hpstate));
3327                     break;
3328                 case 1: // htstate
3329                     // gen_op_rdhtstate();
3330                     break;
3331                 case 3: // hintp
3332                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
3333                     break;
3334                 case 5: // htba
3335                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
3336                     break;
3337                 case 6: // hver
3338                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
3339                     break;
3340                 case 31: // hstick_cmpr
3341                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
3342                     break;
3343                 default:
3344                     goto illegal_insn;
3345                 }
3346 #endif
3347                 gen_store_gpr(dc, rd, cpu_dst);
3348                 break;
3349             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
3350                 if (!supervisor(dc)) {
3351                     goto priv_insn;
3352                 }
3353                 cpu_tmp0 = tcg_temp_new();
3354 #ifdef TARGET_SPARC64
3355                 rs1 = GET_FIELD(insn, 13, 17);
3356                 switch (rs1) {
3357                 case 0: // tpc
3358                     {
3359                         TCGv_ptr r_tsptr;
3360 
3361                         r_tsptr = tcg_temp_new_ptr();
3362                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3363                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3364                                       offsetof(trap_state, tpc));
3365                     }
3366                     break;
3367                 case 1: // tnpc
3368                     {
3369                         TCGv_ptr r_tsptr;
3370 
3371                         r_tsptr = tcg_temp_new_ptr();
3372                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3373                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3374                                       offsetof(trap_state, tnpc));
3375                     }
3376                     break;
3377                 case 2: // tstate
3378                     {
3379                         TCGv_ptr r_tsptr;
3380 
3381                         r_tsptr = tcg_temp_new_ptr();
3382                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3383                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3384                                       offsetof(trap_state, tstate));
3385                     }
3386                     break;
3387                 case 3: // tt
3388                     {
3389                         TCGv_ptr r_tsptr = tcg_temp_new_ptr();
3390 
3391                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3392                         tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
3393                                          offsetof(trap_state, tt));
3394                     }
3395                     break;
3396                 case 4: // tick
3397                     {
3398                         TCGv_ptr r_tickptr;
3399                         TCGv_i32 r_const;
3400 
3401                         r_tickptr = tcg_temp_new_ptr();
3402                         r_const = tcg_const_i32(dc->mem_idx);
3403                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3404                                        offsetof(CPUSPARCState, tick));
3405                         if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3406                             gen_io_start();
3407                         }
3408                         gen_helper_tick_get_count(cpu_tmp0, cpu_env,
3409                                                   r_tickptr, r_const);
3410                         if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3411                             /* I/O operations in icount mode must end the TB */
3412                             dc->base.is_jmp = DISAS_EXIT;
3413                         }
3414                     }
3415                     break;
3416                 case 5: // tba
3417                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
3418                     break;
3419                 case 6: // pstate
3420                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3421                                      offsetof(CPUSPARCState, pstate));
3422                     break;
3423                 case 7: // tl
3424                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3425                                      offsetof(CPUSPARCState, tl));
3426                     break;
3427                 case 8: // pil
3428                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3429                                      offsetof(CPUSPARCState, psrpil));
3430                     break;
3431                 case 9: // cwp
3432                     gen_helper_rdcwp(cpu_tmp0, cpu_env);
3433                     break;
3434                 case 10: // cansave
3435                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3436                                      offsetof(CPUSPARCState, cansave));
3437                     break;
3438                 case 11: // canrestore
3439                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3440                                      offsetof(CPUSPARCState, canrestore));
3441                     break;
3442                 case 12: // cleanwin
3443                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3444                                      offsetof(CPUSPARCState, cleanwin));
3445                     break;
3446                 case 13: // otherwin
3447                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3448                                      offsetof(CPUSPARCState, otherwin));
3449                     break;
3450                 case 14: // wstate
3451                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3452                                      offsetof(CPUSPARCState, wstate));
3453                     break;
3454                 case 16: // UA2005 gl
3455                     CHECK_IU_FEATURE(dc, GL);
3456                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3457                                      offsetof(CPUSPARCState, gl));
3458                     break;
3459                 case 26: // UA2005 strand status
3460                     CHECK_IU_FEATURE(dc, HYPV);
3461                     if (!hypervisor(dc))
3462                         goto priv_insn;
3463                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
3464                     break;
3465                 case 31: // ver
3466                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
3467                     break;
3468                 case 15: // fq
3469                 default:
3470                     goto illegal_insn;
3471                 }
3472 #else
3473                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
3474 #endif
3475                 gen_store_gpr(dc, rd, cpu_tmp0);
3476                 break;
3477 #endif
3478 #if defined(TARGET_SPARC64) || !defined(CONFIG_USER_ONLY)
3479             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
3480 #ifdef TARGET_SPARC64
3481                 gen_helper_flushw(cpu_env);
3482 #else
3483                 if (!supervisor(dc))
3484                     goto priv_insn;
3485                 gen_store_gpr(dc, rd, cpu_tbr);
3486 #endif
3487                 break;
3488 #endif
3489             } else if (xop == 0x34) {   /* FPU Operations */
3490                 if (gen_trap_ifnofpu(dc)) {
3491                     goto jmp_insn;
3492                 }
3493                 gen_op_clear_ieee_excp_and_FTT();
3494                 rs1 = GET_FIELD(insn, 13, 17);
3495                 rs2 = GET_FIELD(insn, 27, 31);
3496                 xop = GET_FIELD(insn, 18, 26);
3497 
3498                 switch (xop) {
3499                 case 0x1: /* fmovs */
3500                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3501                     gen_store_fpr_F(dc, rd, cpu_src1_32);
3502                     break;
3503                 case 0x5: /* fnegs */
3504                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
3505                     break;
3506                 case 0x9: /* fabss */
3507                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3508                     break;
3509                 case 0x29: /* fsqrts */
3510                     CHECK_FPU_FEATURE(dc, FSQRT);
3511                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3512                     break;
3513                 case 0x2a: /* fsqrtd */
3514                     CHECK_FPU_FEATURE(dc, FSQRT);
3515                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3516                     break;
3517                 case 0x2b: /* fsqrtq */
3518                     CHECK_FPU_FEATURE(dc, FLOAT128);
3519                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3520                     break;
3521                 case 0x41: /* fadds */
3522                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3523                     break;
3524                 case 0x42: /* faddd */
3525                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3526                     break;
3527                 case 0x43: /* faddq */
3528                     CHECK_FPU_FEATURE(dc, FLOAT128);
3529                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3530                     break;
3531                 case 0x45: /* fsubs */
3532                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3533                     break;
3534                 case 0x46: /* fsubd */
3535                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3536                     break;
3537                 case 0x47: /* fsubq */
3538                     CHECK_FPU_FEATURE(dc, FLOAT128);
3539                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3540                     break;
3541                 case 0x49: /* fmuls */
3542                     CHECK_FPU_FEATURE(dc, FMUL);
3543                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3544                     break;
3545                 case 0x4a: /* fmuld */
3546                     CHECK_FPU_FEATURE(dc, FMUL);
3547                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3548                     break;
3549                 case 0x4b: /* fmulq */
3550                     CHECK_FPU_FEATURE(dc, FLOAT128);
3551                     CHECK_FPU_FEATURE(dc, FMUL);
3552                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3553                     break;
3554                 case 0x4d: /* fdivs */
3555                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3556                     break;
3557                 case 0x4e: /* fdivd */
3558                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3559                     break;
3560                 case 0x4f: /* fdivq */
3561                     CHECK_FPU_FEATURE(dc, FLOAT128);
3562                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3563                     break;
3564                 case 0x69: /* fsmuld */
3565                     CHECK_FPU_FEATURE(dc, FSMULD);
3566                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3567                     break;
3568                 case 0x6e: /* fdmulq */
3569                     CHECK_FPU_FEATURE(dc, FLOAT128);
3570                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3571                     break;
3572                 case 0xc4: /* fitos */
3573                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3574                     break;
3575                 case 0xc6: /* fdtos */
3576                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3577                     break;
3578                 case 0xc7: /* fqtos */
3579                     CHECK_FPU_FEATURE(dc, FLOAT128);
3580                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3581                     break;
3582                 case 0xc8: /* fitod */
3583                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3584                     break;
3585                 case 0xc9: /* fstod */
3586                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3587                     break;
3588                 case 0xcb: /* fqtod */
3589                     CHECK_FPU_FEATURE(dc, FLOAT128);
3590                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3591                     break;
3592                 case 0xcc: /* fitoq */
3593                     CHECK_FPU_FEATURE(dc, FLOAT128);
3594                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3595                     break;
3596                 case 0xcd: /* fstoq */
3597                     CHECK_FPU_FEATURE(dc, FLOAT128);
3598                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3599                     break;
3600                 case 0xce: /* fdtoq */
3601                     CHECK_FPU_FEATURE(dc, FLOAT128);
3602                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3603                     break;
3604                 case 0xd1: /* fstoi */
3605                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3606                     break;
3607                 case 0xd2: /* fdtoi */
3608                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3609                     break;
3610                 case 0xd3: /* fqtoi */
3611                     CHECK_FPU_FEATURE(dc, FLOAT128);
3612                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3613                     break;
3614 #ifdef TARGET_SPARC64
3615                 case 0x2: /* V9 fmovd */
3616                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3617                     gen_store_fpr_D(dc, rd, cpu_src1_64);
3618                     break;
3619                 case 0x3: /* V9 fmovq */
3620                     CHECK_FPU_FEATURE(dc, FLOAT128);
3621                     gen_move_Q(dc, rd, rs2);
3622                     break;
3623                 case 0x6: /* V9 fnegd */
3624                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3625                     break;
3626                 case 0x7: /* V9 fnegq */
3627                     CHECK_FPU_FEATURE(dc, FLOAT128);
3628                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3629                     break;
3630                 case 0xa: /* V9 fabsd */
3631                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3632                     break;
3633                 case 0xb: /* V9 fabsq */
3634                     CHECK_FPU_FEATURE(dc, FLOAT128);
3635                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3636                     break;
3637                 case 0x81: /* V9 fstox */
3638                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3639                     break;
3640                 case 0x82: /* V9 fdtox */
3641                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3642                     break;
3643                 case 0x83: /* V9 fqtox */
3644                     CHECK_FPU_FEATURE(dc, FLOAT128);
3645                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3646                     break;
3647                 case 0x84: /* V9 fxtos */
3648                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3649                     break;
3650                 case 0x88: /* V9 fxtod */
3651                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3652                     break;
3653                 case 0x8c: /* V9 fxtoq */
3654                     CHECK_FPU_FEATURE(dc, FLOAT128);
3655                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3656                     break;
3657 #endif
3658                 default:
3659                     goto illegal_insn;
3660                 }
3661             } else if (xop == 0x35) {   /* FPU Operations */
3662 #ifdef TARGET_SPARC64
3663                 int cond;
3664 #endif
3665                 if (gen_trap_ifnofpu(dc)) {
3666                     goto jmp_insn;
3667                 }
3668                 gen_op_clear_ieee_excp_and_FTT();
3669                 rs1 = GET_FIELD(insn, 13, 17);
3670                 rs2 = GET_FIELD(insn, 27, 31);
3671                 xop = GET_FIELD(insn, 18, 26);
3672 
3673 #ifdef TARGET_SPARC64
3674 #define FMOVR(sz)                                                  \
3675                 do {                                               \
3676                     DisasCompare cmp;                              \
3677                     cond = GET_FIELD_SP(insn, 10, 12);             \
3678                     cpu_src1 = get_src1(dc, insn);                 \
3679                     gen_compare_reg(&cmp, cond, cpu_src1);         \
3680                     gen_fmov##sz(dc, &cmp, rd, rs2);               \
3681                 } while (0)
3682 
3683                 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3684                     FMOVR(s);
3685                     break;
3686                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3687                     FMOVR(d);
3688                     break;
3689                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3690                     CHECK_FPU_FEATURE(dc, FLOAT128);
3691                     FMOVR(q);
3692                     break;
3693                 }
3694 #undef FMOVR
3695 #endif
3696                 switch (xop) {
3697 #ifdef TARGET_SPARC64
3698 #define FMOVCC(fcc, sz)                                                 \
3699                     do {                                                \
3700                         DisasCompare cmp;                               \
3701                         cond = GET_FIELD_SP(insn, 14, 17);              \
3702                         gen_fcompare(&cmp, fcc, cond);                  \
3703                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3704                     } while (0)
3705 
3706                     case 0x001: /* V9 fmovscc %fcc0 */
3707                         FMOVCC(0, s);
3708                         break;
3709                     case 0x002: /* V9 fmovdcc %fcc0 */
3710                         FMOVCC(0, d);
3711                         break;
3712                     case 0x003: /* V9 fmovqcc %fcc0 */
3713                         CHECK_FPU_FEATURE(dc, FLOAT128);
3714                         FMOVCC(0, q);
3715                         break;
3716                     case 0x041: /* V9 fmovscc %fcc1 */
3717                         FMOVCC(1, s);
3718                         break;
3719                     case 0x042: /* V9 fmovdcc %fcc1 */
3720                         FMOVCC(1, d);
3721                         break;
3722                     case 0x043: /* V9 fmovqcc %fcc1 */
3723                         CHECK_FPU_FEATURE(dc, FLOAT128);
3724                         FMOVCC(1, q);
3725                         break;
3726                     case 0x081: /* V9 fmovscc %fcc2 */
3727                         FMOVCC(2, s);
3728                         break;
3729                     case 0x082: /* V9 fmovdcc %fcc2 */
3730                         FMOVCC(2, d);
3731                         break;
3732                     case 0x083: /* V9 fmovqcc %fcc2 */
3733                         CHECK_FPU_FEATURE(dc, FLOAT128);
3734                         FMOVCC(2, q);
3735                         break;
3736                     case 0x0c1: /* V9 fmovscc %fcc3 */
3737                         FMOVCC(3, s);
3738                         break;
3739                     case 0x0c2: /* V9 fmovdcc %fcc3 */
3740                         FMOVCC(3, d);
3741                         break;
3742                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3743                         CHECK_FPU_FEATURE(dc, FLOAT128);
3744                         FMOVCC(3, q);
3745                         break;
3746 #undef FMOVCC
3747 #define FMOVCC(xcc, sz)                                                 \
3748                     do {                                                \
3749                         DisasCompare cmp;                               \
3750                         cond = GET_FIELD_SP(insn, 14, 17);              \
3751                         gen_compare(&cmp, xcc, cond, dc);               \
3752                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3753                     } while (0)
3754 
3755                     case 0x101: /* V9 fmovscc %icc */
3756                         FMOVCC(0, s);
3757                         break;
3758                     case 0x102: /* V9 fmovdcc %icc */
3759                         FMOVCC(0, d);
3760                         break;
3761                     case 0x103: /* V9 fmovqcc %icc */
3762                         CHECK_FPU_FEATURE(dc, FLOAT128);
3763                         FMOVCC(0, q);
3764                         break;
3765                     case 0x181: /* V9 fmovscc %xcc */
3766                         FMOVCC(1, s);
3767                         break;
3768                     case 0x182: /* V9 fmovdcc %xcc */
3769                         FMOVCC(1, d);
3770                         break;
3771                     case 0x183: /* V9 fmovqcc %xcc */
3772                         CHECK_FPU_FEATURE(dc, FLOAT128);
3773                         FMOVCC(1, q);
3774                         break;
3775 #undef FMOVCC
3776 #endif
3777                     case 0x51: /* fcmps, V9 %fcc */
3778                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3779                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3780                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3781                         break;
3782                     case 0x52: /* fcmpd, V9 %fcc */
3783                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3784                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3785                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3786                         break;
3787                     case 0x53: /* fcmpq, V9 %fcc */
3788                         CHECK_FPU_FEATURE(dc, FLOAT128);
3789                         gen_op_load_fpr_QT0(QFPREG(rs1));
3790                         gen_op_load_fpr_QT1(QFPREG(rs2));
3791                         gen_op_fcmpq(rd & 3);
3792                         break;
3793                     case 0x55: /* fcmpes, V9 %fcc */
3794                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3795                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3796                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3797                         break;
3798                     case 0x56: /* fcmped, V9 %fcc */
3799                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3800                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3801                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3802                         break;
3803                     case 0x57: /* fcmpeq, V9 %fcc */
3804                         CHECK_FPU_FEATURE(dc, FLOAT128);
3805                         gen_op_load_fpr_QT0(QFPREG(rs1));
3806                         gen_op_load_fpr_QT1(QFPREG(rs2));
3807                         gen_op_fcmpeq(rd & 3);
3808                         break;
3809                     default:
3810                         goto illegal_insn;
3811                 }
3812             } else if (xop == 0x2) {
3813                 TCGv dst = gen_dest_gpr(dc, rd);
3814                 rs1 = GET_FIELD(insn, 13, 17);
3815                 if (rs1 == 0) {
3816                     /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3817                     if (IS_IMM) {       /* immediate */
3818                         simm = GET_FIELDs(insn, 19, 31);
3819                         tcg_gen_movi_tl(dst, simm);
3820                         gen_store_gpr(dc, rd, dst);
3821                     } else {            /* register */
3822                         rs2 = GET_FIELD(insn, 27, 31);
3823                         if (rs2 == 0) {
3824                             tcg_gen_movi_tl(dst, 0);
3825                             gen_store_gpr(dc, rd, dst);
3826                         } else {
3827                             cpu_src2 = gen_load_gpr(dc, rs2);
3828                             gen_store_gpr(dc, rd, cpu_src2);
3829                         }
3830                     }
3831                 } else {
3832                     cpu_src1 = get_src1(dc, insn);
3833                     if (IS_IMM) {       /* immediate */
3834                         simm = GET_FIELDs(insn, 19, 31);
3835                         tcg_gen_ori_tl(dst, cpu_src1, simm);
3836                         gen_store_gpr(dc, rd, dst);
3837                     } else {            /* register */
3838                         rs2 = GET_FIELD(insn, 27, 31);
3839                         if (rs2 == 0) {
3840                             /* mov shortcut:  or x, %g0, y -> mov x, y */
3841                             gen_store_gpr(dc, rd, cpu_src1);
3842                         } else {
3843                             cpu_src2 = gen_load_gpr(dc, rs2);
3844                             tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3845                             gen_store_gpr(dc, rd, dst);
3846                         }
3847                     }
3848                 }
3849 #ifdef TARGET_SPARC64
3850             } else if (xop == 0x25) { /* sll, V9 sllx */
3851                 cpu_src1 = get_src1(dc, insn);
3852                 if (IS_IMM) {   /* immediate */
3853                     simm = GET_FIELDs(insn, 20, 31);
3854                     if (insn & (1 << 12)) {
3855                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3856                     } else {
3857                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3858                     }
3859                 } else {                /* register */
3860                     rs2 = GET_FIELD(insn, 27, 31);
3861                     cpu_src2 = gen_load_gpr(dc, rs2);
3862                     cpu_tmp0 = tcg_temp_new();
3863                     if (insn & (1 << 12)) {
3864                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3865                     } else {
3866                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3867                     }
3868                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3869                 }
3870                 gen_store_gpr(dc, rd, cpu_dst);
3871             } else if (xop == 0x26) { /* srl, V9 srlx */
3872                 cpu_src1 = get_src1(dc, insn);
3873                 if (IS_IMM) {   /* immediate */
3874                     simm = GET_FIELDs(insn, 20, 31);
3875                     if (insn & (1 << 12)) {
3876                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3877                     } else {
3878                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3879                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3880                     }
3881                 } else {                /* register */
3882                     rs2 = GET_FIELD(insn, 27, 31);
3883                     cpu_src2 = gen_load_gpr(dc, rs2);
3884                     cpu_tmp0 = tcg_temp_new();
3885                     if (insn & (1 << 12)) {
3886                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3887                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3888                     } else {
3889                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3890                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3891                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3892                     }
3893                 }
3894                 gen_store_gpr(dc, rd, cpu_dst);
3895             } else if (xop == 0x27) { /* sra, V9 srax */
3896                 cpu_src1 = get_src1(dc, insn);
3897                 if (IS_IMM) {   /* immediate */
3898                     simm = GET_FIELDs(insn, 20, 31);
3899                     if (insn & (1 << 12)) {
3900                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3901                     } else {
3902                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3903                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3904                     }
3905                 } else {                /* register */
3906                     rs2 = GET_FIELD(insn, 27, 31);
3907                     cpu_src2 = gen_load_gpr(dc, rs2);
3908                     cpu_tmp0 = tcg_temp_new();
3909                     if (insn & (1 << 12)) {
3910                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3911                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3912                     } else {
3913                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3914                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3915                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3916                     }
3917                 }
3918                 gen_store_gpr(dc, rd, cpu_dst);
3919 #endif
3920             } else if (xop < 0x36) {
3921                 if (xop < 0x20) {
3922                     cpu_src1 = get_src1(dc, insn);
3923                     cpu_src2 = get_src2(dc, insn);
3924                     switch (xop & ~0x10) {
3925                     case 0x0: /* add */
3926                         if (xop & 0x10) {
3927                             gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3928                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3929                             dc->cc_op = CC_OP_ADD;
3930                         } else {
3931                             tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3932                         }
3933                         break;
3934                     case 0x1: /* and */
3935                         tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3936                         if (xop & 0x10) {
3937                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3938                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3939                             dc->cc_op = CC_OP_LOGIC;
3940                         }
3941                         break;
3942                     case 0x2: /* or */
3943                         tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3944                         if (xop & 0x10) {
3945                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3946                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3947                             dc->cc_op = CC_OP_LOGIC;
3948                         }
3949                         break;
3950                     case 0x3: /* xor */
3951                         tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3952                         if (xop & 0x10) {
3953                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3954                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3955                             dc->cc_op = CC_OP_LOGIC;
3956                         }
3957                         break;
3958                     case 0x4: /* sub */
3959                         if (xop & 0x10) {
3960                             gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3961                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3962                             dc->cc_op = CC_OP_SUB;
3963                         } else {
3964                             tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3965                         }
3966                         break;
3967                     case 0x5: /* andn */
3968                         tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3969                         if (xop & 0x10) {
3970                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3971                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3972                             dc->cc_op = CC_OP_LOGIC;
3973                         }
3974                         break;
3975                     case 0x6: /* orn */
3976                         tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3977                         if (xop & 0x10) {
3978                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3979                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3980                             dc->cc_op = CC_OP_LOGIC;
3981                         }
3982                         break;
3983                     case 0x7: /* xorn */
3984                         tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
3985                         if (xop & 0x10) {
3986                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3987                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3988                             dc->cc_op = CC_OP_LOGIC;
3989                         }
3990                         break;
3991                     case 0x8: /* addx, V9 addc */
3992                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3993                                         (xop & 0x10));
3994                         break;
3995 #ifdef TARGET_SPARC64
3996                     case 0x9: /* V9 mulx */
3997                         tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3998                         break;
3999 #endif
4000                     case 0xa: /* umul */
4001                         CHECK_IU_FEATURE(dc, MUL);
4002                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
4003                         if (xop & 0x10) {
4004                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4005                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4006                             dc->cc_op = CC_OP_LOGIC;
4007                         }
4008                         break;
4009                     case 0xb: /* smul */
4010                         CHECK_IU_FEATURE(dc, MUL);
4011                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
4012                         if (xop & 0x10) {
4013                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4014                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4015                             dc->cc_op = CC_OP_LOGIC;
4016                         }
4017                         break;
4018                     case 0xc: /* subx, V9 subc */
4019                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4020                                         (xop & 0x10));
4021                         break;
4022 #ifdef TARGET_SPARC64
4023                     case 0xd: /* V9 udivx */
4024                         gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4025                         break;
4026 #endif
4027                     case 0xe: /* udiv */
4028                         CHECK_IU_FEATURE(dc, DIV);
4029                         if (xop & 0x10) {
4030                             gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
4031                                                cpu_src2);
4032                             dc->cc_op = CC_OP_DIV;
4033                         } else {
4034                             gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
4035                                             cpu_src2);
4036                         }
4037                         break;
4038                     case 0xf: /* sdiv */
4039                         CHECK_IU_FEATURE(dc, DIV);
4040                         if (xop & 0x10) {
4041                             gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
4042                                                cpu_src2);
4043                             dc->cc_op = CC_OP_DIV;
4044                         } else {
4045                             gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
4046                                             cpu_src2);
4047                         }
4048                         break;
4049                     default:
4050                         goto illegal_insn;
4051                     }
4052                     gen_store_gpr(dc, rd, cpu_dst);
4053                 } else {
4054                     cpu_src1 = get_src1(dc, insn);
4055                     cpu_src2 = get_src2(dc, insn);
4056                     switch (xop) {
4057                     case 0x20: /* taddcc */
4058                         gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4059                         gen_store_gpr(dc, rd, cpu_dst);
4060                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
4061                         dc->cc_op = CC_OP_TADD;
4062                         break;
4063                     case 0x21: /* tsubcc */
4064                         gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4065                         gen_store_gpr(dc, rd, cpu_dst);
4066                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
4067                         dc->cc_op = CC_OP_TSUB;
4068                         break;
4069                     case 0x22: /* taddcctv */
4070                         gen_helper_taddcctv(cpu_dst, cpu_env,
4071                                             cpu_src1, cpu_src2);
4072                         gen_store_gpr(dc, rd, cpu_dst);
4073                         dc->cc_op = CC_OP_TADDTV;
4074                         break;
4075                     case 0x23: /* tsubcctv */
4076                         gen_helper_tsubcctv(cpu_dst, cpu_env,
4077                                             cpu_src1, cpu_src2);
4078                         gen_store_gpr(dc, rd, cpu_dst);
4079                         dc->cc_op = CC_OP_TSUBTV;
4080                         break;
4081                     case 0x24: /* mulscc */
4082                         update_psr(dc);
4083                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
4084                         gen_store_gpr(dc, rd, cpu_dst);
4085                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4086                         dc->cc_op = CC_OP_ADD;
4087                         break;
4088 #ifndef TARGET_SPARC64
4089                     case 0x25:  /* sll */
4090                         if (IS_IMM) { /* immediate */
4091                             simm = GET_FIELDs(insn, 20, 31);
4092                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
4093                         } else { /* register */
4094                             cpu_tmp0 = tcg_temp_new();
4095                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4096                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
4097                         }
4098                         gen_store_gpr(dc, rd, cpu_dst);
4099                         break;
4100                     case 0x26:  /* srl */
4101                         if (IS_IMM) { /* immediate */
4102                             simm = GET_FIELDs(insn, 20, 31);
4103                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
4104                         } else { /* register */
4105                             cpu_tmp0 = tcg_temp_new();
4106                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4107                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
4108                         }
4109                         gen_store_gpr(dc, rd, cpu_dst);
4110                         break;
4111                     case 0x27:  /* sra */
4112                         if (IS_IMM) { /* immediate */
4113                             simm = GET_FIELDs(insn, 20, 31);
4114                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
4115                         } else { /* register */
4116                             cpu_tmp0 = tcg_temp_new();
4117                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4118                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
4119                         }
4120                         gen_store_gpr(dc, rd, cpu_dst);
4121                         break;
4122 #endif
4123                     case 0x30:
4124                         {
4125                             cpu_tmp0 = tcg_temp_new();
4126                             switch(rd) {
4127                             case 0: /* wry */
4128                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4129                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
4130                                 break;
4131 #ifndef TARGET_SPARC64
4132                             case 0x01 ... 0x0f: /* undefined in the
4133                                                    SPARCv8 manual, nop
4134                                                    on the microSPARC
4135                                                    II */
4136                             case 0x10 ... 0x1f: /* implementation-dependent
4137                                                    in the SPARCv8
4138                                                    manual, nop on the
4139                                                    microSPARC II */
4140                                 if ((rd == 0x13) && (dc->def->features &
4141                                                      CPU_FEATURE_POWERDOWN)) {
4142                                     /* LEON3 power-down */
4143                                     save_state(dc);
4144                                     gen_helper_power_down(cpu_env);
4145                                 }
4146                                 break;
4147 #else
4148                             case 0x2: /* V9 wrccr */
4149                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4150                                 gen_helper_wrccr(cpu_env, cpu_tmp0);
4151                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4152                                 dc->cc_op = CC_OP_FLAGS;
4153                                 break;
4154                             case 0x3: /* V9 wrasi */
4155                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4156                                 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
4157                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4158                                                 offsetof(CPUSPARCState, asi));
4159                                 /* End TB to notice changed ASI.  */
4160                                 save_state(dc);
4161                                 gen_op_next_insn();
4162                                 tcg_gen_exit_tb(NULL, 0);
4163                                 dc->base.is_jmp = DISAS_NORETURN;
4164                                 break;
4165                             case 0x6: /* V9 wrfprs */
4166                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4167                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
4168                                 dc->fprs_dirty = 0;
4169                                 save_state(dc);
4170                                 gen_op_next_insn();
4171                                 tcg_gen_exit_tb(NULL, 0);
4172                                 dc->base.is_jmp = DISAS_NORETURN;
4173                                 break;
4174                             case 0xf: /* V9 sir, nop if user */
4175 #if !defined(CONFIG_USER_ONLY)
4176                                 if (supervisor(dc)) {
4177                                     ; // XXX
4178                                 }
4179 #endif
4180                                 break;
4181                             case 0x13: /* Graphics Status */
4182                                 if (gen_trap_ifnofpu(dc)) {
4183                                     goto jmp_insn;
4184                                 }
4185                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
4186                                 break;
4187                             case 0x14: /* Softint set */
4188                                 if (!supervisor(dc))
4189                                     goto illegal_insn;
4190                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4191                                 gen_helper_set_softint(cpu_env, cpu_tmp0);
4192                                 break;
4193                             case 0x15: /* Softint clear */
4194                                 if (!supervisor(dc))
4195                                     goto illegal_insn;
4196                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4197                                 gen_helper_clear_softint(cpu_env, cpu_tmp0);
4198                                 break;
4199                             case 0x16: /* Softint write */
4200                                 if (!supervisor(dc))
4201                                     goto illegal_insn;
4202                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4203                                 gen_helper_write_softint(cpu_env, cpu_tmp0);
4204                                 break;
4205                             case 0x17: /* Tick compare */
4206 #if !defined(CONFIG_USER_ONLY)
4207                                 if (!supervisor(dc))
4208                                     goto illegal_insn;
4209 #endif
4210                                 {
4211                                     TCGv_ptr r_tickptr;
4212 
4213                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
4214                                                    cpu_src2);
4215                                     r_tickptr = tcg_temp_new_ptr();
4216                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4217                                                    offsetof(CPUSPARCState, tick));
4218                                     if (tb_cflags(dc->base.tb) &
4219                                            CF_USE_ICOUNT) {
4220                                         gen_io_start();
4221                                     }
4222                                     gen_helper_tick_set_limit(r_tickptr,
4223                                                               cpu_tick_cmpr);
4224                                     /* End TB to handle timer interrupt */
4225                                     dc->base.is_jmp = DISAS_EXIT;
4226                                 }
4227                                 break;
4228                             case 0x18: /* System tick */
4229 #if !defined(CONFIG_USER_ONLY)
4230                                 if (!supervisor(dc))
4231                                     goto illegal_insn;
4232 #endif
4233                                 {
4234                                     TCGv_ptr r_tickptr;
4235 
4236                                     tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
4237                                                    cpu_src2);
4238                                     r_tickptr = tcg_temp_new_ptr();
4239                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4240                                                    offsetof(CPUSPARCState, stick));
4241                                     if (tb_cflags(dc->base.tb) &
4242                                            CF_USE_ICOUNT) {
4243                                         gen_io_start();
4244                                     }
4245                                     gen_helper_tick_set_count(r_tickptr,
4246                                                               cpu_tmp0);
4247                                     /* End TB to handle timer interrupt */
4248                                     dc->base.is_jmp = DISAS_EXIT;
4249                                 }
4250                                 break;
4251                             case 0x19: /* System tick compare */
4252 #if !defined(CONFIG_USER_ONLY)
4253                                 if (!supervisor(dc))
4254                                     goto illegal_insn;
4255 #endif
4256                                 {
4257                                     TCGv_ptr r_tickptr;
4258 
4259                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
4260                                                    cpu_src2);
4261                                     r_tickptr = tcg_temp_new_ptr();
4262                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4263                                                    offsetof(CPUSPARCState, stick));
4264                                     if (tb_cflags(dc->base.tb) &
4265                                            CF_USE_ICOUNT) {
4266                                         gen_io_start();
4267                                     }
4268                                     gen_helper_tick_set_limit(r_tickptr,
4269                                                               cpu_stick_cmpr);
4270                                     /* End TB to handle timer interrupt */
4271                                     dc->base.is_jmp = DISAS_EXIT;
4272                                 }
4273                                 break;
4274 
4275                             case 0x10: /* Performance Control */
4276                             case 0x11: /* Performance Instrumentation
4277                                           Counter */
4278                             case 0x12: /* Dispatch Control */
4279 #endif
4280                             default:
4281                                 goto illegal_insn;
4282                             }
4283                         }
4284                         break;
4285 #if !defined(CONFIG_USER_ONLY)
4286                     case 0x31: /* wrpsr, V9 saved, restored */
4287                         {
4288                             if (!supervisor(dc))
4289                                 goto priv_insn;
4290 #ifdef TARGET_SPARC64
4291                             switch (rd) {
4292                             case 0:
4293                                 gen_helper_saved(cpu_env);
4294                                 break;
4295                             case 1:
4296                                 gen_helper_restored(cpu_env);
4297                                 break;
4298                             case 2: /* UA2005 allclean */
4299                             case 3: /* UA2005 otherw */
4300                             case 4: /* UA2005 normalw */
4301                             case 5: /* UA2005 invalw */
4302                                 // XXX
4303                             default:
4304                                 goto illegal_insn;
4305                             }
4306 #else
4307                             cpu_tmp0 = tcg_temp_new();
4308                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4309                             gen_helper_wrpsr(cpu_env, cpu_tmp0);
4310                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4311                             dc->cc_op = CC_OP_FLAGS;
4312                             save_state(dc);
4313                             gen_op_next_insn();
4314                             tcg_gen_exit_tb(NULL, 0);
4315                             dc->base.is_jmp = DISAS_NORETURN;
4316 #endif
4317                         }
4318                         break;
4319                     case 0x32: /* wrwim, V9 wrpr */
4320                         {
4321                             if (!supervisor(dc))
4322                                 goto priv_insn;
4323                             cpu_tmp0 = tcg_temp_new();
4324                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4325 #ifdef TARGET_SPARC64
4326                             switch (rd) {
4327                             case 0: // tpc
4328                                 {
4329                                     TCGv_ptr r_tsptr;
4330 
4331                                     r_tsptr = tcg_temp_new_ptr();
4332                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4333                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4334                                                   offsetof(trap_state, tpc));
4335                                 }
4336                                 break;
4337                             case 1: // tnpc
4338                                 {
4339                                     TCGv_ptr r_tsptr;
4340 
4341                                     r_tsptr = tcg_temp_new_ptr();
4342                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4343                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4344                                                   offsetof(trap_state, tnpc));
4345                                 }
4346                                 break;
4347                             case 2: // tstate
4348                                 {
4349                                     TCGv_ptr r_tsptr;
4350 
4351                                     r_tsptr = tcg_temp_new_ptr();
4352                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4353                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4354                                                   offsetof(trap_state,
4355                                                            tstate));
4356                                 }
4357                                 break;
4358                             case 3: // tt
4359                                 {
4360                                     TCGv_ptr r_tsptr;
4361 
4362                                     r_tsptr = tcg_temp_new_ptr();
4363                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4364                                     tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
4365                                                     offsetof(trap_state, tt));
4366                                 }
4367                                 break;
4368                             case 4: // tick
4369                                 {
4370                                     TCGv_ptr r_tickptr;
4371 
4372                                     r_tickptr = tcg_temp_new_ptr();
4373                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4374                                                    offsetof(CPUSPARCState, tick));
4375                                     if (tb_cflags(dc->base.tb) &
4376                                            CF_USE_ICOUNT) {
4377                                         gen_io_start();
4378                                     }
4379                                     gen_helper_tick_set_count(r_tickptr,
4380                                                               cpu_tmp0);
4381                                     /* End TB to handle timer interrupt */
4382                                     dc->base.is_jmp = DISAS_EXIT;
4383                                 }
4384                                 break;
4385                             case 5: // tba
4386                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
4387                                 break;
4388                             case 6: // pstate
4389                                 save_state(dc);
4390                                 if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
4391                                     gen_io_start();
4392                                 }
4393                                 gen_helper_wrpstate(cpu_env, cpu_tmp0);
4394                                 if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
4395                                     /* I/O ops in icount mode must end the TB */
4396                                     dc->base.is_jmp = DISAS_EXIT;
4397                                 }
4398                                 dc->npc = DYNAMIC_PC;
4399                                 break;
4400                             case 7: // tl
4401                                 save_state(dc);
4402                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4403                                                offsetof(CPUSPARCState, tl));
4404                                 dc->npc = DYNAMIC_PC;
4405                                 break;
4406                             case 8: // pil
4407                                 if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
4408                                     gen_io_start();
4409                                 }
4410                                 gen_helper_wrpil(cpu_env, cpu_tmp0);
4411                                 if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
4412                                     /* I/O ops in icount mode must end the TB */
4413                                     dc->base.is_jmp = DISAS_EXIT;
4414                                 }
4415                                 break;
4416                             case 9: // cwp
4417                                 gen_helper_wrcwp(cpu_env, cpu_tmp0);
4418                                 break;
4419                             case 10: // cansave
4420                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4421                                                 offsetof(CPUSPARCState,
4422                                                          cansave));
4423                                 break;
4424                             case 11: // canrestore
4425                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4426                                                 offsetof(CPUSPARCState,
4427                                                          canrestore));
4428                                 break;
4429                             case 12: // cleanwin
4430                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4431                                                 offsetof(CPUSPARCState,
4432                                                          cleanwin));
4433                                 break;
4434                             case 13: // otherwin
4435                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4436                                                 offsetof(CPUSPARCState,
4437                                                          otherwin));
4438                                 break;
4439                             case 14: // wstate
4440                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4441                                                 offsetof(CPUSPARCState,
4442                                                          wstate));
4443                                 break;
4444                             case 16: // UA2005 gl
4445                                 CHECK_IU_FEATURE(dc, GL);
4446                                 gen_helper_wrgl(cpu_env, cpu_tmp0);
4447                                 break;
4448                             case 26: // UA2005 strand status
4449                                 CHECK_IU_FEATURE(dc, HYPV);
4450                                 if (!hypervisor(dc))
4451                                     goto priv_insn;
4452                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
4453                                 break;
4454                             default:
4455                                 goto illegal_insn;
4456                             }
4457 #else
4458                             tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
4459                             if (dc->def->nwindows != 32) {
4460                                 tcg_gen_andi_tl(cpu_wim, cpu_wim,
4461                                                 (1 << dc->def->nwindows) - 1);
4462                             }
4463 #endif
4464                         }
4465                         break;
4466                     case 0x33: /* wrtbr, UA2005 wrhpr */
4467                         {
4468 #ifndef TARGET_SPARC64
4469                             if (!supervisor(dc))
4470                                 goto priv_insn;
4471                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
4472 #else
4473                             CHECK_IU_FEATURE(dc, HYPV);
4474                             if (!hypervisor(dc))
4475                                 goto priv_insn;
4476                             cpu_tmp0 = tcg_temp_new();
4477                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4478                             switch (rd) {
4479                             case 0: // hpstate
4480                                 tcg_gen_st_i64(cpu_tmp0, cpu_env,
4481                                                offsetof(CPUSPARCState,
4482                                                         hpstate));
4483                                 save_state(dc);
4484                                 gen_op_next_insn();
4485                                 tcg_gen_exit_tb(NULL, 0);
4486                                 dc->base.is_jmp = DISAS_NORETURN;
4487                                 break;
4488                             case 1: // htstate
4489                                 // XXX gen_op_wrhtstate();
4490                                 break;
4491                             case 3: // hintp
4492                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
4493                                 break;
4494                             case 5: // htba
4495                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4496                                 break;
4497                             case 31: // hstick_cmpr
4498                                 {
4499                                     TCGv_ptr r_tickptr;
4500 
4501                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4502                                     r_tickptr = tcg_temp_new_ptr();
4503                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4504                                                    offsetof(CPUSPARCState, hstick));
4505                                     if (tb_cflags(dc->base.tb) &
4506                                            CF_USE_ICOUNT) {
4507                                         gen_io_start();
4508                                     }
4509                                     gen_helper_tick_set_limit(r_tickptr,
4510                                                               cpu_hstick_cmpr);
4511                                     /* End TB to handle timer interrupt */
4512                                     dc->base.is_jmp = DISAS_EXIT;
4513                                 }
4514                                 break;
4515                             case 6: // hver readonly
4516                             default:
4517                                 goto illegal_insn;
4518                             }
4519 #endif
4520                         }
4521                         break;
4522 #endif
4523 #ifdef TARGET_SPARC64
4524                     case 0x2c: /* V9 movcc */
4525                         {
4526                             int cc = GET_FIELD_SP(insn, 11, 12);
4527                             int cond = GET_FIELD_SP(insn, 14, 17);
4528                             DisasCompare cmp;
4529                             TCGv dst;
4530 
4531                             if (insn & (1 << 18)) {
4532                                 if (cc == 0) {
4533                                     gen_compare(&cmp, 0, cond, dc);
4534                                 } else if (cc == 2) {
4535                                     gen_compare(&cmp, 1, cond, dc);
4536                                 } else {
4537                                     goto illegal_insn;
4538                                 }
4539                             } else {
4540                                 gen_fcompare(&cmp, cc, cond);
4541                             }
4542 
4543                             /* The get_src2 above loaded the normal 13-bit
4544                                immediate field, not the 11-bit field we have
4545                                in movcc.  But it did handle the reg case.  */
4546                             if (IS_IMM) {
4547                                 simm = GET_FIELD_SPs(insn, 0, 10);
4548                                 tcg_gen_movi_tl(cpu_src2, simm);
4549                             }
4550 
4551                             dst = gen_load_gpr(dc, rd);
4552                             tcg_gen_movcond_tl(cmp.cond, dst,
4553                                                cmp.c1, cmp.c2,
4554                                                cpu_src2, dst);
4555                             gen_store_gpr(dc, rd, dst);
4556                             break;
4557                         }
4558                     case 0x2d: /* V9 sdivx */
4559                         gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4560                         gen_store_gpr(dc, rd, cpu_dst);
4561                         break;
4562                     case 0x2e: /* V9 popc */
4563                         tcg_gen_ctpop_tl(cpu_dst, cpu_src2);
4564                         gen_store_gpr(dc, rd, cpu_dst);
4565                         break;
4566                     case 0x2f: /* V9 movr */
4567                         {
4568                             int cond = GET_FIELD_SP(insn, 10, 12);
4569                             DisasCompare cmp;
4570                             TCGv dst;
4571 
4572                             gen_compare_reg(&cmp, cond, cpu_src1);
4573 
4574                             /* The get_src2 above loaded the normal 13-bit
4575                                immediate field, not the 10-bit field we have
4576                                in movr.  But it did handle the reg case.  */
4577                             if (IS_IMM) {
4578                                 simm = GET_FIELD_SPs(insn, 0, 9);
4579                                 tcg_gen_movi_tl(cpu_src2, simm);
4580                             }
4581 
4582                             dst = gen_load_gpr(dc, rd);
4583                             tcg_gen_movcond_tl(cmp.cond, dst,
4584                                                cmp.c1, cmp.c2,
4585                                                cpu_src2, dst);
4586                             gen_store_gpr(dc, rd, dst);
4587                             break;
4588                         }
4589 #endif
4590                     default:
4591                         goto illegal_insn;
4592                     }
4593                 }
4594             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4595 #ifdef TARGET_SPARC64
4596                 int opf = GET_FIELD_SP(insn, 5, 13);
4597                 rs1 = GET_FIELD(insn, 13, 17);
4598                 rs2 = GET_FIELD(insn, 27, 31);
4599                 if (gen_trap_ifnofpu(dc)) {
4600                     goto jmp_insn;
4601                 }
4602 
4603                 switch (opf) {
4604                 case 0x000: /* VIS I edge8cc */
4605                     CHECK_FPU_FEATURE(dc, VIS1);
4606                     cpu_src1 = gen_load_gpr(dc, rs1);
4607                     cpu_src2 = gen_load_gpr(dc, rs2);
4608                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4609                     gen_store_gpr(dc, rd, cpu_dst);
4610                     break;
4611                 case 0x001: /* VIS II edge8n */
4612                     CHECK_FPU_FEATURE(dc, VIS2);
4613                     cpu_src1 = gen_load_gpr(dc, rs1);
4614                     cpu_src2 = gen_load_gpr(dc, rs2);
4615                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4616                     gen_store_gpr(dc, rd, cpu_dst);
4617                     break;
4618                 case 0x002: /* VIS I edge8lcc */
4619                     CHECK_FPU_FEATURE(dc, VIS1);
4620                     cpu_src1 = gen_load_gpr(dc, rs1);
4621                     cpu_src2 = gen_load_gpr(dc, rs2);
4622                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4623                     gen_store_gpr(dc, rd, cpu_dst);
4624                     break;
4625                 case 0x003: /* VIS II edge8ln */
4626                     CHECK_FPU_FEATURE(dc, VIS2);
4627                     cpu_src1 = gen_load_gpr(dc, rs1);
4628                     cpu_src2 = gen_load_gpr(dc, rs2);
4629                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4630                     gen_store_gpr(dc, rd, cpu_dst);
4631                     break;
4632                 case 0x004: /* VIS I edge16cc */
4633                     CHECK_FPU_FEATURE(dc, VIS1);
4634                     cpu_src1 = gen_load_gpr(dc, rs1);
4635                     cpu_src2 = gen_load_gpr(dc, rs2);
4636                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4637                     gen_store_gpr(dc, rd, cpu_dst);
4638                     break;
4639                 case 0x005: /* VIS II edge16n */
4640                     CHECK_FPU_FEATURE(dc, VIS2);
4641                     cpu_src1 = gen_load_gpr(dc, rs1);
4642                     cpu_src2 = gen_load_gpr(dc, rs2);
4643                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4644                     gen_store_gpr(dc, rd, cpu_dst);
4645                     break;
4646                 case 0x006: /* VIS I edge16lcc */
4647                     CHECK_FPU_FEATURE(dc, VIS1);
4648                     cpu_src1 = gen_load_gpr(dc, rs1);
4649                     cpu_src2 = gen_load_gpr(dc, rs2);
4650                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4651                     gen_store_gpr(dc, rd, cpu_dst);
4652                     break;
4653                 case 0x007: /* VIS II edge16ln */
4654                     CHECK_FPU_FEATURE(dc, VIS2);
4655                     cpu_src1 = gen_load_gpr(dc, rs1);
4656                     cpu_src2 = gen_load_gpr(dc, rs2);
4657                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4658                     gen_store_gpr(dc, rd, cpu_dst);
4659                     break;
4660                 case 0x008: /* VIS I edge32cc */
4661                     CHECK_FPU_FEATURE(dc, VIS1);
4662                     cpu_src1 = gen_load_gpr(dc, rs1);
4663                     cpu_src2 = gen_load_gpr(dc, rs2);
4664                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4665                     gen_store_gpr(dc, rd, cpu_dst);
4666                     break;
4667                 case 0x009: /* VIS II edge32n */
4668                     CHECK_FPU_FEATURE(dc, VIS2);
4669                     cpu_src1 = gen_load_gpr(dc, rs1);
4670                     cpu_src2 = gen_load_gpr(dc, rs2);
4671                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4672                     gen_store_gpr(dc, rd, cpu_dst);
4673                     break;
4674                 case 0x00a: /* VIS I edge32lcc */
4675                     CHECK_FPU_FEATURE(dc, VIS1);
4676                     cpu_src1 = gen_load_gpr(dc, rs1);
4677                     cpu_src2 = gen_load_gpr(dc, rs2);
4678                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4679                     gen_store_gpr(dc, rd, cpu_dst);
4680                     break;
4681                 case 0x00b: /* VIS II edge32ln */
4682                     CHECK_FPU_FEATURE(dc, VIS2);
4683                     cpu_src1 = gen_load_gpr(dc, rs1);
4684                     cpu_src2 = gen_load_gpr(dc, rs2);
4685                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4686                     gen_store_gpr(dc, rd, cpu_dst);
4687                     break;
4688                 case 0x010: /* VIS I array8 */
4689                     CHECK_FPU_FEATURE(dc, VIS1);
4690                     cpu_src1 = gen_load_gpr(dc, rs1);
4691                     cpu_src2 = gen_load_gpr(dc, rs2);
4692                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4693                     gen_store_gpr(dc, rd, cpu_dst);
4694                     break;
4695                 case 0x012: /* VIS I array16 */
4696                     CHECK_FPU_FEATURE(dc, VIS1);
4697                     cpu_src1 = gen_load_gpr(dc, rs1);
4698                     cpu_src2 = gen_load_gpr(dc, rs2);
4699                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4700                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4701                     gen_store_gpr(dc, rd, cpu_dst);
4702                     break;
4703                 case 0x014: /* VIS I array32 */
4704                     CHECK_FPU_FEATURE(dc, VIS1);
4705                     cpu_src1 = gen_load_gpr(dc, rs1);
4706                     cpu_src2 = gen_load_gpr(dc, rs2);
4707                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4708                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4709                     gen_store_gpr(dc, rd, cpu_dst);
4710                     break;
4711                 case 0x018: /* VIS I alignaddr */
4712                     CHECK_FPU_FEATURE(dc, VIS1);
4713                     cpu_src1 = gen_load_gpr(dc, rs1);
4714                     cpu_src2 = gen_load_gpr(dc, rs2);
4715                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4716                     gen_store_gpr(dc, rd, cpu_dst);
4717                     break;
4718                 case 0x01a: /* VIS I alignaddrl */
4719                     CHECK_FPU_FEATURE(dc, VIS1);
4720                     cpu_src1 = gen_load_gpr(dc, rs1);
4721                     cpu_src2 = gen_load_gpr(dc, rs2);
4722                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4723                     gen_store_gpr(dc, rd, cpu_dst);
4724                     break;
4725                 case 0x019: /* VIS II bmask */
4726                     CHECK_FPU_FEATURE(dc, VIS2);
4727                     cpu_src1 = gen_load_gpr(dc, rs1);
4728                     cpu_src2 = gen_load_gpr(dc, rs2);
4729                     tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4730                     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4731                     gen_store_gpr(dc, rd, cpu_dst);
4732                     break;
4733                 case 0x020: /* VIS I fcmple16 */
4734                     CHECK_FPU_FEATURE(dc, VIS1);
4735                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4736                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4737                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4738                     gen_store_gpr(dc, rd, cpu_dst);
4739                     break;
4740                 case 0x022: /* VIS I fcmpne16 */
4741                     CHECK_FPU_FEATURE(dc, VIS1);
4742                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4743                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4744                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4745                     gen_store_gpr(dc, rd, cpu_dst);
4746                     break;
4747                 case 0x024: /* VIS I fcmple32 */
4748                     CHECK_FPU_FEATURE(dc, VIS1);
4749                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4750                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4751                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4752                     gen_store_gpr(dc, rd, cpu_dst);
4753                     break;
4754                 case 0x026: /* VIS I fcmpne32 */
4755                     CHECK_FPU_FEATURE(dc, VIS1);
4756                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4757                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4758                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4759                     gen_store_gpr(dc, rd, cpu_dst);
4760                     break;
4761                 case 0x028: /* VIS I fcmpgt16 */
4762                     CHECK_FPU_FEATURE(dc, VIS1);
4763                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4764                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4765                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4766                     gen_store_gpr(dc, rd, cpu_dst);
4767                     break;
4768                 case 0x02a: /* VIS I fcmpeq16 */
4769                     CHECK_FPU_FEATURE(dc, VIS1);
4770                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4771                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4772                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4773                     gen_store_gpr(dc, rd, cpu_dst);
4774                     break;
4775                 case 0x02c: /* VIS I fcmpgt32 */
4776                     CHECK_FPU_FEATURE(dc, VIS1);
4777                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4778                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4779                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4780                     gen_store_gpr(dc, rd, cpu_dst);
4781                     break;
4782                 case 0x02e: /* VIS I fcmpeq32 */
4783                     CHECK_FPU_FEATURE(dc, VIS1);
4784                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4785                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4786                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4787                     gen_store_gpr(dc, rd, cpu_dst);
4788                     break;
4789                 case 0x031: /* VIS I fmul8x16 */
4790                     CHECK_FPU_FEATURE(dc, VIS1);
4791                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4792                     break;
4793                 case 0x033: /* VIS I fmul8x16au */
4794                     CHECK_FPU_FEATURE(dc, VIS1);
4795                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4796                     break;
4797                 case 0x035: /* VIS I fmul8x16al */
4798                     CHECK_FPU_FEATURE(dc, VIS1);
4799                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4800                     break;
4801                 case 0x036: /* VIS I fmul8sux16 */
4802                     CHECK_FPU_FEATURE(dc, VIS1);
4803                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4804                     break;
4805                 case 0x037: /* VIS I fmul8ulx16 */
4806                     CHECK_FPU_FEATURE(dc, VIS1);
4807                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4808                     break;
4809                 case 0x038: /* VIS I fmuld8sux16 */
4810                     CHECK_FPU_FEATURE(dc, VIS1);
4811                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4812                     break;
4813                 case 0x039: /* VIS I fmuld8ulx16 */
4814                     CHECK_FPU_FEATURE(dc, VIS1);
4815                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4816                     break;
4817                 case 0x03a: /* VIS I fpack32 */
4818                     CHECK_FPU_FEATURE(dc, VIS1);
4819                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4820                     break;
4821                 case 0x03b: /* VIS I fpack16 */
4822                     CHECK_FPU_FEATURE(dc, VIS1);
4823                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4824                     cpu_dst_32 = gen_dest_fpr_F(dc);
4825                     gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4826                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4827                     break;
4828                 case 0x03d: /* VIS I fpackfix */
4829                     CHECK_FPU_FEATURE(dc, VIS1);
4830                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4831                     cpu_dst_32 = gen_dest_fpr_F(dc);
4832                     gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4833                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4834                     break;
4835                 case 0x03e: /* VIS I pdist */
4836                     CHECK_FPU_FEATURE(dc, VIS1);
4837                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4838                     break;
4839                 case 0x048: /* VIS I faligndata */
4840                     CHECK_FPU_FEATURE(dc, VIS1);
4841                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4842                     break;
4843                 case 0x04b: /* VIS I fpmerge */
4844                     CHECK_FPU_FEATURE(dc, VIS1);
4845                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4846                     break;
4847                 case 0x04c: /* VIS II bshuffle */
4848                     CHECK_FPU_FEATURE(dc, VIS2);
4849                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4850                     break;
4851                 case 0x04d: /* VIS I fexpand */
4852                     CHECK_FPU_FEATURE(dc, VIS1);
4853                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4854                     break;
4855                 case 0x050: /* VIS I fpadd16 */
4856                     CHECK_FPU_FEATURE(dc, VIS1);
4857                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4858                     break;
4859                 case 0x051: /* VIS I fpadd16s */
4860                     CHECK_FPU_FEATURE(dc, VIS1);
4861                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4862                     break;
4863                 case 0x052: /* VIS I fpadd32 */
4864                     CHECK_FPU_FEATURE(dc, VIS1);
4865                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4866                     break;
4867                 case 0x053: /* VIS I fpadd32s */
4868                     CHECK_FPU_FEATURE(dc, VIS1);
4869                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4870                     break;
4871                 case 0x054: /* VIS I fpsub16 */
4872                     CHECK_FPU_FEATURE(dc, VIS1);
4873                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4874                     break;
4875                 case 0x055: /* VIS I fpsub16s */
4876                     CHECK_FPU_FEATURE(dc, VIS1);
4877                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4878                     break;
4879                 case 0x056: /* VIS I fpsub32 */
4880                     CHECK_FPU_FEATURE(dc, VIS1);
4881                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4882                     break;
4883                 case 0x057: /* VIS I fpsub32s */
4884                     CHECK_FPU_FEATURE(dc, VIS1);
4885                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4886                     break;
4887                 case 0x060: /* VIS I fzero */
4888                     CHECK_FPU_FEATURE(dc, VIS1);
4889                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4890                     tcg_gen_movi_i64(cpu_dst_64, 0);
4891                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4892                     break;
4893                 case 0x061: /* VIS I fzeros */
4894                     CHECK_FPU_FEATURE(dc, VIS1);
4895                     cpu_dst_32 = gen_dest_fpr_F(dc);
4896                     tcg_gen_movi_i32(cpu_dst_32, 0);
4897                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4898                     break;
4899                 case 0x062: /* VIS I fnor */
4900                     CHECK_FPU_FEATURE(dc, VIS1);
4901                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4902                     break;
4903                 case 0x063: /* VIS I fnors */
4904                     CHECK_FPU_FEATURE(dc, VIS1);
4905                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4906                     break;
4907                 case 0x064: /* VIS I fandnot2 */
4908                     CHECK_FPU_FEATURE(dc, VIS1);
4909                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4910                     break;
4911                 case 0x065: /* VIS I fandnot2s */
4912                     CHECK_FPU_FEATURE(dc, VIS1);
4913                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4914                     break;
4915                 case 0x066: /* VIS I fnot2 */
4916                     CHECK_FPU_FEATURE(dc, VIS1);
4917                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4918                     break;
4919                 case 0x067: /* VIS I fnot2s */
4920                     CHECK_FPU_FEATURE(dc, VIS1);
4921                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4922                     break;
4923                 case 0x068: /* VIS I fandnot1 */
4924                     CHECK_FPU_FEATURE(dc, VIS1);
4925                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4926                     break;
4927                 case 0x069: /* VIS I fandnot1s */
4928                     CHECK_FPU_FEATURE(dc, VIS1);
4929                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4930                     break;
4931                 case 0x06a: /* VIS I fnot1 */
4932                     CHECK_FPU_FEATURE(dc, VIS1);
4933                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4934                     break;
4935                 case 0x06b: /* VIS I fnot1s */
4936                     CHECK_FPU_FEATURE(dc, VIS1);
4937                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4938                     break;
4939                 case 0x06c: /* VIS I fxor */
4940                     CHECK_FPU_FEATURE(dc, VIS1);
4941                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4942                     break;
4943                 case 0x06d: /* VIS I fxors */
4944                     CHECK_FPU_FEATURE(dc, VIS1);
4945                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4946                     break;
4947                 case 0x06e: /* VIS I fnand */
4948                     CHECK_FPU_FEATURE(dc, VIS1);
4949                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4950                     break;
4951                 case 0x06f: /* VIS I fnands */
4952                     CHECK_FPU_FEATURE(dc, VIS1);
4953                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4954                     break;
4955                 case 0x070: /* VIS I fand */
4956                     CHECK_FPU_FEATURE(dc, VIS1);
4957                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4958                     break;
4959                 case 0x071: /* VIS I fands */
4960                     CHECK_FPU_FEATURE(dc, VIS1);
4961                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4962                     break;
4963                 case 0x072: /* VIS I fxnor */
4964                     CHECK_FPU_FEATURE(dc, VIS1);
4965                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4966                     break;
4967                 case 0x073: /* VIS I fxnors */
4968                     CHECK_FPU_FEATURE(dc, VIS1);
4969                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4970                     break;
4971                 case 0x074: /* VIS I fsrc1 */
4972                     CHECK_FPU_FEATURE(dc, VIS1);
4973                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4974                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4975                     break;
4976                 case 0x075: /* VIS I fsrc1s */
4977                     CHECK_FPU_FEATURE(dc, VIS1);
4978                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4979                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4980                     break;
4981                 case 0x076: /* VIS I fornot2 */
4982                     CHECK_FPU_FEATURE(dc, VIS1);
4983                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4984                     break;
4985                 case 0x077: /* VIS I fornot2s */
4986                     CHECK_FPU_FEATURE(dc, VIS1);
4987                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4988                     break;
4989                 case 0x078: /* VIS I fsrc2 */
4990                     CHECK_FPU_FEATURE(dc, VIS1);
4991                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4992                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4993                     break;
4994                 case 0x079: /* VIS I fsrc2s */
4995                     CHECK_FPU_FEATURE(dc, VIS1);
4996                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4997                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4998                     break;
4999                 case 0x07a: /* VIS I fornot1 */
5000                     CHECK_FPU_FEATURE(dc, VIS1);
5001                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
5002                     break;
5003                 case 0x07b: /* VIS I fornot1s */
5004                     CHECK_FPU_FEATURE(dc, VIS1);
5005                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
5006                     break;
5007                 case 0x07c: /* VIS I for */
5008                     CHECK_FPU_FEATURE(dc, VIS1);
5009                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
5010                     break;
5011                 case 0x07d: /* VIS I fors */
5012                     CHECK_FPU_FEATURE(dc, VIS1);
5013                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
5014                     break;
5015                 case 0x07e: /* VIS I fone */
5016                     CHECK_FPU_FEATURE(dc, VIS1);
5017                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5018                     tcg_gen_movi_i64(cpu_dst_64, -1);
5019                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5020                     break;
5021                 case 0x07f: /* VIS I fones */
5022                     CHECK_FPU_FEATURE(dc, VIS1);
5023                     cpu_dst_32 = gen_dest_fpr_F(dc);
5024                     tcg_gen_movi_i32(cpu_dst_32, -1);
5025                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5026                     break;
5027                 case 0x080: /* VIS I shutdown */
5028                 case 0x081: /* VIS II siam */
5029                     // XXX
5030                     goto illegal_insn;
5031                 default:
5032                     goto illegal_insn;
5033                 }
5034 #else
5035                 goto ncp_insn;
5036 #endif
5037             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
5038 #ifdef TARGET_SPARC64
5039                 goto illegal_insn;
5040 #else
5041                 goto ncp_insn;
5042 #endif
5043 #ifdef TARGET_SPARC64
5044             } else if (xop == 0x39) { /* V9 return */
5045                 save_state(dc);
5046                 cpu_src1 = get_src1(dc, insn);
5047                 cpu_tmp0 = tcg_temp_new();
5048                 if (IS_IMM) {   /* immediate */
5049                     simm = GET_FIELDs(insn, 19, 31);
5050                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5051                 } else {                /* register */
5052                     rs2 = GET_FIELD(insn, 27, 31);
5053                     if (rs2) {
5054                         cpu_src2 = gen_load_gpr(dc, rs2);
5055                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5056                     } else {
5057                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5058                     }
5059                 }
5060                 gen_helper_restore(cpu_env);
5061                 gen_mov_pc_npc(dc);
5062                 gen_check_align(cpu_tmp0, 3);
5063                 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5064                 dc->npc = DYNAMIC_PC;
5065                 goto jmp_insn;
5066 #endif
5067             } else {
5068                 cpu_src1 = get_src1(dc, insn);
5069                 cpu_tmp0 = tcg_temp_new();
5070                 if (IS_IMM) {   /* immediate */
5071                     simm = GET_FIELDs(insn, 19, 31);
5072                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5073                 } else {                /* register */
5074                     rs2 = GET_FIELD(insn, 27, 31);
5075                     if (rs2) {
5076                         cpu_src2 = gen_load_gpr(dc, rs2);
5077                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5078                     } else {
5079                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5080                     }
5081                 }
5082                 switch (xop) {
5083                 case 0x38:      /* jmpl */
5084                     {
5085                         TCGv t = gen_dest_gpr(dc, rd);
5086                         tcg_gen_movi_tl(t, dc->pc);
5087                         gen_store_gpr(dc, rd, t);
5088 
5089                         gen_mov_pc_npc(dc);
5090                         gen_check_align(cpu_tmp0, 3);
5091                         gen_address_mask(dc, cpu_tmp0);
5092                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5093                         dc->npc = DYNAMIC_PC;
5094                     }
5095                     goto jmp_insn;
5096 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5097                 case 0x39:      /* rett, V9 return */
5098                     {
5099                         if (!supervisor(dc))
5100                             goto priv_insn;
5101                         gen_mov_pc_npc(dc);
5102                         gen_check_align(cpu_tmp0, 3);
5103                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5104                         dc->npc = DYNAMIC_PC;
5105                         gen_helper_rett(cpu_env);
5106                     }
5107                     goto jmp_insn;
5108 #endif
5109                 case 0x3b: /* flush */
5110                     if (!((dc)->def->features & CPU_FEATURE_FLUSH))
5111                         goto unimp_flush;
5112                     /* nop */
5113                     break;
5114                 case 0x3c:      /* save */
5115                     gen_helper_save(cpu_env);
5116                     gen_store_gpr(dc, rd, cpu_tmp0);
5117                     break;
5118                 case 0x3d:      /* restore */
5119                     gen_helper_restore(cpu_env);
5120                     gen_store_gpr(dc, rd, cpu_tmp0);
5121                     break;
5122 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
5123                 case 0x3e:      /* V9 done/retry */
5124                     {
5125                         switch (rd) {
5126                         case 0:
5127                             if (!supervisor(dc))
5128                                 goto priv_insn;
5129                             dc->npc = DYNAMIC_PC;
5130                             dc->pc = DYNAMIC_PC;
5131                             if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
5132                                 gen_io_start();
5133                             }
5134                             gen_helper_done(cpu_env);
5135                             goto jmp_insn;
5136                         case 1:
5137                             if (!supervisor(dc))
5138                                 goto priv_insn;
5139                             dc->npc = DYNAMIC_PC;
5140                             dc->pc = DYNAMIC_PC;
5141                             if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
5142                                 gen_io_start();
5143                             }
5144                             gen_helper_retry(cpu_env);
5145                             goto jmp_insn;
5146                         default:
5147                             goto illegal_insn;
5148                         }
5149                     }
5150                     break;
5151 #endif
5152                 default:
5153                     goto illegal_insn;
5154                 }
5155             }
5156             break;
5157         }
5158         break;
5159     case 3:                     /* load/store instructions */
5160         {
5161             unsigned int xop = GET_FIELD(insn, 7, 12);
5162             /* ??? gen_address_mask prevents us from using a source
5163                register directly.  Always generate a temporary.  */
5164             TCGv cpu_addr = tcg_temp_new();
5165 
5166             tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
5167             if (xop == 0x3c || xop == 0x3e) {
5168                 /* V9 casa/casxa : no offset */
5169             } else if (IS_IMM) {     /* immediate */
5170                 simm = GET_FIELDs(insn, 19, 31);
5171                 if (simm != 0) {
5172                     tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
5173                 }
5174             } else {            /* register */
5175                 rs2 = GET_FIELD(insn, 27, 31);
5176                 if (rs2 != 0) {
5177                     tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
5178                 }
5179             }
5180             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
5181                 (xop > 0x17 && xop <= 0x1d ) ||
5182                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
5183                 TCGv cpu_val = gen_dest_gpr(dc, rd);
5184 
5185                 switch (xop) {
5186                 case 0x0:       /* ld, V9 lduw, load unsigned word */
5187                     gen_address_mask(dc, cpu_addr);
5188                     tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
5189                     break;
5190                 case 0x1:       /* ldub, load unsigned byte */
5191                     gen_address_mask(dc, cpu_addr);
5192                     tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
5193                     break;
5194                 case 0x2:       /* lduh, load unsigned halfword */
5195                     gen_address_mask(dc, cpu_addr);
5196                     tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
5197                     break;
5198                 case 0x3:       /* ldd, load double word */
5199                     if (rd & 1)
5200                         goto illegal_insn;
5201                     else {
5202                         TCGv_i64 t64;
5203 
5204                         gen_address_mask(dc, cpu_addr);
5205                         t64 = tcg_temp_new_i64();
5206                         tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
5207                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5208                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5209                         gen_store_gpr(dc, rd + 1, cpu_val);
5210                         tcg_gen_shri_i64(t64, t64, 32);
5211                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5212                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5213                     }
5214                     break;
5215                 case 0x9:       /* ldsb, load signed byte */
5216                     gen_address_mask(dc, cpu_addr);
5217                     tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
5218                     break;
5219                 case 0xa:       /* ldsh, load signed halfword */
5220                     gen_address_mask(dc, cpu_addr);
5221                     tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
5222                     break;
5223                 case 0xd:       /* ldstub */
5224                     gen_ldstub(dc, cpu_val, cpu_addr, dc->mem_idx);
5225                     break;
5226                 case 0x0f:
5227                     /* swap, swap register with memory. Also atomically */
5228                     CHECK_IU_FEATURE(dc, SWAP);
5229                     cpu_src1 = gen_load_gpr(dc, rd);
5230                     gen_swap(dc, cpu_val, cpu_src1, cpu_addr,
5231                              dc->mem_idx, MO_TEUL);
5232                     break;
5233 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5234                 case 0x10:      /* lda, V9 lduwa, load word alternate */
5235                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5236                     break;
5237                 case 0x11:      /* lduba, load unsigned byte alternate */
5238                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5239                     break;
5240                 case 0x12:      /* lduha, load unsigned halfword alternate */
5241                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5242                     break;
5243                 case 0x13:      /* ldda, load double word alternate */
5244                     if (rd & 1) {
5245                         goto illegal_insn;
5246                     }
5247                     gen_ldda_asi(dc, cpu_addr, insn, rd);
5248                     goto skip_move;
5249                 case 0x19:      /* ldsba, load signed byte alternate */
5250                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_SB);
5251                     break;
5252                 case 0x1a:      /* ldsha, load signed halfword alternate */
5253                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESW);
5254                     break;
5255                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
5256                     gen_ldstub_asi(dc, cpu_val, cpu_addr, insn);
5257                     break;
5258                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
5259                                    atomically */
5260                     CHECK_IU_FEATURE(dc, SWAP);
5261                     cpu_src1 = gen_load_gpr(dc, rd);
5262                     gen_swap_asi(dc, cpu_val, cpu_src1, cpu_addr, insn);
5263                     break;
5264 
5265 #ifndef TARGET_SPARC64
5266                 case 0x30: /* ldc */
5267                 case 0x31: /* ldcsr */
5268                 case 0x33: /* lddc */
5269                     goto ncp_insn;
5270 #endif
5271 #endif
5272 #ifdef TARGET_SPARC64
5273                 case 0x08: /* V9 ldsw */
5274                     gen_address_mask(dc, cpu_addr);
5275                     tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
5276                     break;
5277                 case 0x0b: /* V9 ldx */
5278                     gen_address_mask(dc, cpu_addr);
5279                     tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
5280                     break;
5281                 case 0x18: /* V9 ldswa */
5282                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESL);
5283                     break;
5284                 case 0x1b: /* V9 ldxa */
5285                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5286                     break;
5287                 case 0x2d: /* V9 prefetch, no effect */
5288                     goto skip_move;
5289                 case 0x30: /* V9 ldfa */
5290                     if (gen_trap_ifnofpu(dc)) {
5291                         goto jmp_insn;
5292                     }
5293                     gen_ldf_asi(dc, cpu_addr, insn, 4, rd);
5294                     gen_update_fprs_dirty(dc, rd);
5295                     goto skip_move;
5296                 case 0x33: /* V9 lddfa */
5297                     if (gen_trap_ifnofpu(dc)) {
5298                         goto jmp_insn;
5299                     }
5300                     gen_ldf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5301                     gen_update_fprs_dirty(dc, DFPREG(rd));
5302                     goto skip_move;
5303                 case 0x3d: /* V9 prefetcha, no effect */
5304                     goto skip_move;
5305                 case 0x32: /* V9 ldqfa */
5306                     CHECK_FPU_FEATURE(dc, FLOAT128);
5307                     if (gen_trap_ifnofpu(dc)) {
5308                         goto jmp_insn;
5309                     }
5310                     gen_ldf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5311                     gen_update_fprs_dirty(dc, QFPREG(rd));
5312                     goto skip_move;
5313 #endif
5314                 default:
5315                     goto illegal_insn;
5316                 }
5317                 gen_store_gpr(dc, rd, cpu_val);
5318 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5319             skip_move: ;
5320 #endif
5321             } else if (xop >= 0x20 && xop < 0x24) {
5322                 if (gen_trap_ifnofpu(dc)) {
5323                     goto jmp_insn;
5324                 }
5325                 switch (xop) {
5326                 case 0x20:      /* ldf, load fpreg */
5327                     gen_address_mask(dc, cpu_addr);
5328                     cpu_dst_32 = gen_dest_fpr_F(dc);
5329                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5330                                         dc->mem_idx, MO_TEUL);
5331                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5332                     break;
5333                 case 0x21:      /* ldfsr, V9 ldxfsr */
5334 #ifdef TARGET_SPARC64
5335                     gen_address_mask(dc, cpu_addr);
5336                     if (rd == 1) {
5337                         TCGv_i64 t64 = tcg_temp_new_i64();
5338                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5339                                             dc->mem_idx, MO_TEUQ);
5340                         gen_helper_ldxfsr(cpu_fsr, cpu_env, cpu_fsr, t64);
5341                         break;
5342                     }
5343 #endif
5344                     cpu_dst_32 = tcg_temp_new_i32();
5345                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5346                                         dc->mem_idx, MO_TEUL);
5347                     gen_helper_ldfsr(cpu_fsr, cpu_env, cpu_fsr, cpu_dst_32);
5348                     break;
5349                 case 0x22:      /* ldqf, load quad fpreg */
5350                     CHECK_FPU_FEATURE(dc, FLOAT128);
5351                     gen_address_mask(dc, cpu_addr);
5352                     cpu_src1_64 = tcg_temp_new_i64();
5353                     tcg_gen_qemu_ld_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5354                                         MO_TEUQ | MO_ALIGN_4);
5355                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5356                     cpu_src2_64 = tcg_temp_new_i64();
5357                     tcg_gen_qemu_ld_i64(cpu_src2_64, cpu_addr, dc->mem_idx,
5358                                         MO_TEUQ | MO_ALIGN_4);
5359                     gen_store_fpr_Q(dc, rd, cpu_src1_64, cpu_src2_64);
5360                     break;
5361                 case 0x23:      /* lddf, load double fpreg */
5362                     gen_address_mask(dc, cpu_addr);
5363                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5364                     tcg_gen_qemu_ld_i64(cpu_dst_64, cpu_addr, dc->mem_idx,
5365                                         MO_TEUQ | MO_ALIGN_4);
5366                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5367                     break;
5368                 default:
5369                     goto illegal_insn;
5370                 }
5371             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
5372                        xop == 0xe || xop == 0x1e) {
5373                 TCGv cpu_val = gen_load_gpr(dc, rd);
5374 
5375                 switch (xop) {
5376                 case 0x4: /* st, store word */
5377                     gen_address_mask(dc, cpu_addr);
5378                     tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
5379                     break;
5380                 case 0x5: /* stb, store byte */
5381                     gen_address_mask(dc, cpu_addr);
5382                     tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
5383                     break;
5384                 case 0x6: /* sth, store halfword */
5385                     gen_address_mask(dc, cpu_addr);
5386                     tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
5387                     break;
5388                 case 0x7: /* std, store double word */
5389                     if (rd & 1)
5390                         goto illegal_insn;
5391                     else {
5392                         TCGv_i64 t64;
5393                         TCGv lo;
5394 
5395                         gen_address_mask(dc, cpu_addr);
5396                         lo = gen_load_gpr(dc, rd + 1);
5397                         t64 = tcg_temp_new_i64();
5398                         tcg_gen_concat_tl_i64(t64, lo, cpu_val);
5399                         tcg_gen_qemu_st64(t64, cpu_addr, dc->mem_idx);
5400                     }
5401                     break;
5402 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5403                 case 0x14: /* sta, V9 stwa, store word alternate */
5404                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5405                     break;
5406                 case 0x15: /* stba, store byte alternate */
5407                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5408                     break;
5409                 case 0x16: /* stha, store halfword alternate */
5410                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5411                     break;
5412                 case 0x17: /* stda, store double word alternate */
5413                     if (rd & 1) {
5414                         goto illegal_insn;
5415                     }
5416                     gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
5417                     break;
5418 #endif
5419 #ifdef TARGET_SPARC64
5420                 case 0x0e: /* V9 stx */
5421                     gen_address_mask(dc, cpu_addr);
5422                     tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
5423                     break;
5424                 case 0x1e: /* V9 stxa */
5425                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5426                     break;
5427 #endif
5428                 default:
5429                     goto illegal_insn;
5430                 }
5431             } else if (xop > 0x23 && xop < 0x28) {
5432                 if (gen_trap_ifnofpu(dc)) {
5433                     goto jmp_insn;
5434                 }
5435                 switch (xop) {
5436                 case 0x24: /* stf, store fpreg */
5437                     gen_address_mask(dc, cpu_addr);
5438                     cpu_src1_32 = gen_load_fpr_F(dc, rd);
5439                     tcg_gen_qemu_st_i32(cpu_src1_32, cpu_addr,
5440                                         dc->mem_idx, MO_TEUL);
5441                     break;
5442                 case 0x25: /* stfsr, V9 stxfsr */
5443                     {
5444 #ifdef TARGET_SPARC64
5445                         gen_address_mask(dc, cpu_addr);
5446                         if (rd == 1) {
5447                             tcg_gen_qemu_st64(cpu_fsr, cpu_addr, dc->mem_idx);
5448                             break;
5449                         }
5450 #endif
5451                         tcg_gen_qemu_st32(cpu_fsr, cpu_addr, dc->mem_idx);
5452                     }
5453                     break;
5454                 case 0x26:
5455 #ifdef TARGET_SPARC64
5456                     /* V9 stqf, store quad fpreg */
5457                     CHECK_FPU_FEATURE(dc, FLOAT128);
5458                     gen_address_mask(dc, cpu_addr);
5459                     /* ??? While stqf only requires 4-byte alignment, it is
5460                        legal for the cpu to signal the unaligned exception.
5461                        The OS trap handler is then required to fix it up.
5462                        For qemu, this avoids having to probe the second page
5463                        before performing the first write.  */
5464                     cpu_src1_64 = gen_load_fpr_Q0(dc, rd);
5465                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5466                                         dc->mem_idx, MO_TEUQ | MO_ALIGN_16);
5467                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5468                     cpu_src2_64 = gen_load_fpr_Q1(dc, rd);
5469                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5470                                         dc->mem_idx, MO_TEUQ);
5471                     break;
5472 #else /* !TARGET_SPARC64 */
5473                     /* stdfq, store floating point queue */
5474 #if defined(CONFIG_USER_ONLY)
5475                     goto illegal_insn;
5476 #else
5477                     if (!supervisor(dc))
5478                         goto priv_insn;
5479                     if (gen_trap_ifnofpu(dc)) {
5480                         goto jmp_insn;
5481                     }
5482                     goto nfq_insn;
5483 #endif
5484 #endif
5485                 case 0x27: /* stdf, store double fpreg */
5486                     gen_address_mask(dc, cpu_addr);
5487                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
5488                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5489                                         MO_TEUQ | MO_ALIGN_4);
5490                     break;
5491                 default:
5492                     goto illegal_insn;
5493                 }
5494             } else if (xop > 0x33 && xop < 0x3f) {
5495                 switch (xop) {
5496 #ifdef TARGET_SPARC64
5497                 case 0x34: /* V9 stfa */
5498                     if (gen_trap_ifnofpu(dc)) {
5499                         goto jmp_insn;
5500                     }
5501                     gen_stf_asi(dc, cpu_addr, insn, 4, rd);
5502                     break;
5503                 case 0x36: /* V9 stqfa */
5504                     {
5505                         CHECK_FPU_FEATURE(dc, FLOAT128);
5506                         if (gen_trap_ifnofpu(dc)) {
5507                             goto jmp_insn;
5508                         }
5509                         gen_stf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5510                     }
5511                     break;
5512                 case 0x37: /* V9 stdfa */
5513                     if (gen_trap_ifnofpu(dc)) {
5514                         goto jmp_insn;
5515                     }
5516                     gen_stf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5517                     break;
5518                 case 0x3e: /* V9 casxa */
5519                     rs2 = GET_FIELD(insn, 27, 31);
5520                     cpu_src2 = gen_load_gpr(dc, rs2);
5521                     gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5522                     break;
5523 #else
5524                 case 0x34: /* stc */
5525                 case 0x35: /* stcsr */
5526                 case 0x36: /* stdcq */
5527                 case 0x37: /* stdc */
5528                     goto ncp_insn;
5529 #endif
5530 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5531                 case 0x3c: /* V9 or LEON3 casa */
5532 #ifndef TARGET_SPARC64
5533                     CHECK_IU_FEATURE(dc, CASA);
5534 #endif
5535                     rs2 = GET_FIELD(insn, 27, 31);
5536                     cpu_src2 = gen_load_gpr(dc, rs2);
5537                     gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5538                     break;
5539 #endif
5540                 default:
5541                     goto illegal_insn;
5542                 }
5543             } else {
5544                 goto illegal_insn;
5545             }
5546         }
5547         break;
5548     }
5549     /* default case for non jump instructions */
5550     if (dc->npc == DYNAMIC_PC) {
5551         dc->pc = DYNAMIC_PC;
5552         gen_op_next_insn();
5553     } else if (dc->npc == JUMP_PC) {
5554         /* we can do a static jump */
5555         gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5556         dc->base.is_jmp = DISAS_NORETURN;
5557     } else {
5558         dc->pc = dc->npc;
5559         dc->npc = dc->npc + 4;
5560     }
5561  jmp_insn:
5562     return;
5563  illegal_insn:
5564     gen_exception(dc, TT_ILL_INSN);
5565     return;
5566  unimp_flush:
5567     gen_exception(dc, TT_UNIMP_FLUSH);
5568     return;
5569 #if !defined(CONFIG_USER_ONLY)
5570  priv_insn:
5571     gen_exception(dc, TT_PRIV_INSN);
5572     return;
5573 #endif
5574  nfpu_insn:
5575     gen_op_fpexception_im(dc, FSR_FTT_UNIMPFPOP);
5576     return;
5577 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5578  nfq_insn:
5579     gen_op_fpexception_im(dc, FSR_FTT_SEQ_ERROR);
5580     return;
5581 #endif
5582 #ifndef TARGET_SPARC64
5583  ncp_insn:
5584     gen_exception(dc, TT_NCP_INSN);
5585     return;
5586 #endif
5587 }
5588 
5589 static void sparc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
5590 {
5591     DisasContext *dc = container_of(dcbase, DisasContext, base);
5592     CPUSPARCState *env = cs->env_ptr;
5593     int bound;
5594 
5595     dc->pc = dc->base.pc_first;
5596     dc->npc = (target_ulong)dc->base.tb->cs_base;
5597     dc->cc_op = CC_OP_DYNAMIC;
5598     dc->mem_idx = dc->base.tb->flags & TB_FLAG_MMU_MASK;
5599     dc->def = &env->def;
5600     dc->fpu_enabled = tb_fpu_enabled(dc->base.tb->flags);
5601     dc->address_mask_32bit = tb_am_enabled(dc->base.tb->flags);
5602 #ifndef CONFIG_USER_ONLY
5603     dc->supervisor = (dc->base.tb->flags & TB_FLAG_SUPER) != 0;
5604 #endif
5605 #ifdef TARGET_SPARC64
5606     dc->fprs_dirty = 0;
5607     dc->asi = (dc->base.tb->flags >> TB_FLAG_ASI_SHIFT) & 0xff;
5608 #ifndef CONFIG_USER_ONLY
5609     dc->hypervisor = (dc->base.tb->flags & TB_FLAG_HYPER) != 0;
5610 #endif
5611 #endif
5612     /*
5613      * if we reach a page boundary, we stop generation so that the
5614      * PC of a TT_TFAULT exception is always in the right page
5615      */
5616     bound = -(dc->base.pc_first | TARGET_PAGE_MASK) / 4;
5617     dc->base.max_insns = MIN(dc->base.max_insns, bound);
5618 }
5619 
5620 static void sparc_tr_tb_start(DisasContextBase *db, CPUState *cs)
5621 {
5622 }
5623 
5624 static void sparc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
5625 {
5626     DisasContext *dc = container_of(dcbase, DisasContext, base);
5627 
5628     if (dc->npc & JUMP_PC) {
5629         assert(dc->jump_pc[1] == dc->pc + 4);
5630         tcg_gen_insn_start(dc->pc, dc->jump_pc[0] | JUMP_PC);
5631     } else {
5632         tcg_gen_insn_start(dc->pc, dc->npc);
5633     }
5634 }
5635 
5636 static void sparc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
5637 {
5638     DisasContext *dc = container_of(dcbase, DisasContext, base);
5639     CPUSPARCState *env = cs->env_ptr;
5640     unsigned int insn;
5641 
5642     insn = translator_ldl(env, &dc->base, dc->pc);
5643     dc->base.pc_next += 4;
5644     disas_sparc_insn(dc, insn);
5645 
5646     if (dc->base.is_jmp == DISAS_NORETURN) {
5647         return;
5648     }
5649     if (dc->pc != dc->base.pc_next) {
5650         dc->base.is_jmp = DISAS_TOO_MANY;
5651     }
5652 }
5653 
5654 static void sparc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
5655 {
5656     DisasContext *dc = container_of(dcbase, DisasContext, base);
5657 
5658     switch (dc->base.is_jmp) {
5659     case DISAS_NEXT:
5660     case DISAS_TOO_MANY:
5661         if (dc->pc != DYNAMIC_PC &&
5662             (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5663             /* static PC and NPC: we can use direct chaining */
5664             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5665         } else {
5666             if (dc->pc != DYNAMIC_PC) {
5667                 tcg_gen_movi_tl(cpu_pc, dc->pc);
5668             }
5669             save_npc(dc);
5670             tcg_gen_exit_tb(NULL, 0);
5671         }
5672         break;
5673 
5674     case DISAS_NORETURN:
5675        break;
5676 
5677     case DISAS_EXIT:
5678         /* Exit TB */
5679         save_state(dc);
5680         tcg_gen_exit_tb(NULL, 0);
5681         break;
5682 
5683     default:
5684         g_assert_not_reached();
5685     }
5686 }
5687 
5688 static void sparc_tr_disas_log(const DisasContextBase *dcbase,
5689                                CPUState *cpu, FILE *logfile)
5690 {
5691     fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
5692     target_disas(logfile, cpu, dcbase->pc_first, dcbase->tb->size);
5693 }
5694 
5695 static const TranslatorOps sparc_tr_ops = {
5696     .init_disas_context = sparc_tr_init_disas_context,
5697     .tb_start           = sparc_tr_tb_start,
5698     .insn_start         = sparc_tr_insn_start,
5699     .translate_insn     = sparc_tr_translate_insn,
5700     .tb_stop            = sparc_tr_tb_stop,
5701     .disas_log          = sparc_tr_disas_log,
5702 };
5703 
5704 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int *max_insns,
5705                            target_ulong pc, void *host_pc)
5706 {
5707     DisasContext dc = {};
5708 
5709     translator_loop(cs, tb, max_insns, pc, host_pc, &sparc_tr_ops, &dc.base);
5710 }
5711 
5712 void sparc_tcg_init(void)
5713 {
5714     static const char gregnames[32][4] = {
5715         "g0", "g1", "g2", "g3", "g4", "g5", "g6", "g7",
5716         "o0", "o1", "o2", "o3", "o4", "o5", "o6", "o7",
5717         "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
5718         "i0", "i1", "i2", "i3", "i4", "i5", "i6", "i7",
5719     };
5720     static const char fregnames[32][4] = {
5721         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5722         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5723         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5724         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5725     };
5726 
5727     static const struct { TCGv_i32 *ptr; int off; const char *name; } r32[] = {
5728 #ifdef TARGET_SPARC64
5729         { &cpu_xcc, offsetof(CPUSPARCState, xcc), "xcc" },
5730         { &cpu_fprs, offsetof(CPUSPARCState, fprs), "fprs" },
5731 #else
5732         { &cpu_wim, offsetof(CPUSPARCState, wim), "wim" },
5733 #endif
5734         { &cpu_cc_op, offsetof(CPUSPARCState, cc_op), "cc_op" },
5735         { &cpu_psr, offsetof(CPUSPARCState, psr), "psr" },
5736     };
5737 
5738     static const struct { TCGv *ptr; int off; const char *name; } rtl[] = {
5739 #ifdef TARGET_SPARC64
5740         { &cpu_gsr, offsetof(CPUSPARCState, gsr), "gsr" },
5741         { &cpu_tick_cmpr, offsetof(CPUSPARCState, tick_cmpr), "tick_cmpr" },
5742         { &cpu_stick_cmpr, offsetof(CPUSPARCState, stick_cmpr), "stick_cmpr" },
5743         { &cpu_hstick_cmpr, offsetof(CPUSPARCState, hstick_cmpr),
5744           "hstick_cmpr" },
5745         { &cpu_hintp, offsetof(CPUSPARCState, hintp), "hintp" },
5746         { &cpu_htba, offsetof(CPUSPARCState, htba), "htba" },
5747         { &cpu_hver, offsetof(CPUSPARCState, hver), "hver" },
5748         { &cpu_ssr, offsetof(CPUSPARCState, ssr), "ssr" },
5749         { &cpu_ver, offsetof(CPUSPARCState, version), "ver" },
5750 #endif
5751         { &cpu_cond, offsetof(CPUSPARCState, cond), "cond" },
5752         { &cpu_cc_src, offsetof(CPUSPARCState, cc_src), "cc_src" },
5753         { &cpu_cc_src2, offsetof(CPUSPARCState, cc_src2), "cc_src2" },
5754         { &cpu_cc_dst, offsetof(CPUSPARCState, cc_dst), "cc_dst" },
5755         { &cpu_fsr, offsetof(CPUSPARCState, fsr), "fsr" },
5756         { &cpu_pc, offsetof(CPUSPARCState, pc), "pc" },
5757         { &cpu_npc, offsetof(CPUSPARCState, npc), "npc" },
5758         { &cpu_y, offsetof(CPUSPARCState, y), "y" },
5759 #ifndef CONFIG_USER_ONLY
5760         { &cpu_tbr, offsetof(CPUSPARCState, tbr), "tbr" },
5761 #endif
5762     };
5763 
5764     unsigned int i;
5765 
5766     cpu_regwptr = tcg_global_mem_new_ptr(cpu_env,
5767                                          offsetof(CPUSPARCState, regwptr),
5768                                          "regwptr");
5769 
5770     for (i = 0; i < ARRAY_SIZE(r32); ++i) {
5771         *r32[i].ptr = tcg_global_mem_new_i32(cpu_env, r32[i].off, r32[i].name);
5772     }
5773 
5774     for (i = 0; i < ARRAY_SIZE(rtl); ++i) {
5775         *rtl[i].ptr = tcg_global_mem_new(cpu_env, rtl[i].off, rtl[i].name);
5776     }
5777 
5778     cpu_regs[0] = NULL;
5779     for (i = 1; i < 8; ++i) {
5780         cpu_regs[i] = tcg_global_mem_new(cpu_env,
5781                                          offsetof(CPUSPARCState, gregs[i]),
5782                                          gregnames[i]);
5783     }
5784 
5785     for (i = 8; i < 32; ++i) {
5786         cpu_regs[i] = tcg_global_mem_new(cpu_regwptr,
5787                                          (i - 8) * sizeof(target_ulong),
5788                                          gregnames[i]);
5789     }
5790 
5791     for (i = 0; i < TARGET_DPREGS; i++) {
5792         cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env,
5793                                             offsetof(CPUSPARCState, fpr[i]),
5794                                             fregnames[i]);
5795     }
5796 }
5797 
5798 void sparc_restore_state_to_opc(CPUState *cs,
5799                                 const TranslationBlock *tb,
5800                                 const uint64_t *data)
5801 {
5802     SPARCCPU *cpu = SPARC_CPU(cs);
5803     CPUSPARCState *env = &cpu->env;
5804     target_ulong pc = data[0];
5805     target_ulong npc = data[1];
5806 
5807     env->pc = pc;
5808     if (npc == DYNAMIC_PC) {
5809         /* dynamic NPC: already stored */
5810     } else if (npc & JUMP_PC) {
5811         /* jump PC: use 'cond' and the jump targets of the translation */
5812         if (env->cond) {
5813             env->npc = npc & ~3;
5814         } else {
5815             env->npc = pc + 4;
5816         }
5817     } else {
5818         env->npc = npc;
5819     }
5820 }
5821